Merge branch 'main' into zeta-record-recently-active-files-when-data-collection-is-enabled

Michael Sloan created

Change summary

CONTRIBUTING.md                                               |  16 
Cargo.lock                                                    | 166 
Cargo.toml                                                    |  14 
assets/keymaps/default-linux.json                             |   2 
assets/keymaps/vim.json                                       |   9 
assets/settings/default.json                                  |  23 
crates/acp_thread/Cargo.toml                                  |   1 
crates/acp_thread/src/acp_thread.rs                           |  35 
crates/acp_thread/src/connection.rs                           |  17 
crates/acp_thread/src/diff.rs                                 |   8 
crates/agent2/src/agent.rs                                    |  13 
crates/agent2/src/tests/mod.rs                                |   1 
crates/agent2/src/thread.rs                                   |  10 
crates/agent2/src/tools/context_server_registry.rs            |   5 
crates/agent_servers/src/acp.rs                               |  36 
crates/agent_servers/src/agent_servers.rs                     | 197 +
crates/agent_servers/src/claude.rs                            |  40 
crates/agent_servers/src/e2e_tests.rs                         |   2 
crates/agent_servers/src/gemini.rs                            |  39 
crates/agent_servers/src/settings.rs                          |   4 
crates/agent_settings/src/agent_settings.rs                   |  17 
crates/agent_ui/Cargo.toml                                    |   1 
crates/agent_ui/src/acp/message_editor.rs                     |   4 
crates/agent_ui/src/acp/thread_view.rs                        |  69 
crates/agent_ui/src/agent_configuration.rs                    |  49 
crates/agent_ui/src/agent_diff.rs                             |   3 
crates/agent_ui/src/agent_panel.rs                            |  37 
crates/agent_ui/src/slash_command_settings.rs                 |   4 
crates/assistant_tools/src/delete_path_tool.rs                |   2 
crates/assistant_tools/src/edit_file_tool.rs                  |  10 
crates/audio/src/audio_settings.rs                            |   4 
crates/auto_update/src/auto_update.rs                         |   3 
crates/auto_update_helper/src/updater.rs                      |  26 
crates/bedrock/src/bedrock.rs                                 |  18 
crates/bedrock/src/models.rs                                  |  28 
crates/call/src/call_settings.rs                              |   4 
crates/cli/src/cli.rs                                         |   1 
crates/cli/src/main.rs                                        |  76 
crates/client/src/client.rs                                   |  23 
crates/collab/src/tests/editor_tests.rs                       |  52 
crates/collab/src/tests/integration_tests.rs                  |  26 
crates/collab/src/tests/random_project_collaboration_tests.rs |   2 
crates/collab_ui/src/panel_settings.rs                        |  10 
crates/copilot/src/copilot.rs                                 |  12 
crates/copilot/src/copilot_chat.rs                            |   2 
crates/copilot/src/request.rs                                 |   2 
crates/dap/src/debugger_settings.rs                           |   8 
crates/deepseek/src/deepseek.rs                               |   4 
crates/diagnostics/src/diagnostics.rs                         |   6 
crates/diagnostics/src/diagnostics_tests.rs                   |  45 
crates/editor/src/code_context_menus.rs                       |  43 
crates/editor/src/editor.rs                                   |  12 
crates/editor/src/editor_settings.rs                          |  23 
crates/editor/src/editor_tests.rs                             |  38 
crates/editor/src/element.rs                                  | 547 +++
crates/editor/src/hover_links.rs                              |  18 
crates/editor/src/hover_popover.rs                            |   2 
crates/editor/src/inlay_hint_cache.rs                         |  28 
crates/editor/src/scroll/scroll_amount.rs                     |   2 
crates/editor/src/test/editor_lsp_test_context.rs             |   6 
crates/extension_host/src/extension_host.rs                   |  11 
crates/extension_host/src/extension_settings.rs               |   4 
crates/file_finder/src/file_finder_settings.rs                |   4 
crates/git_hosting_providers/src/settings.rs                  |   4 
crates/git_ui/src/commit_view.rs                              |   4 
crates/git_ui/src/git_panel_settings.rs                       |   4 
crates/git_ui/src/project_diff.rs                             |   3 
crates/go_to_line/src/cursor_position.rs                      |   4 
crates/gpui/examples/image/image.rs                           |  88 
crates/gpui/src/assets.rs                                     |  11 
crates/gpui/src/elements/img.rs                               |  23 
crates/gpui/src/platform.rs                                   |   1 
crates/gpui/src/platform/linux/platform.rs                    |   1 
crates/gpui/src/platform/mac/text_system.rs                   | 170 +
crates/gpui/src/platform/mac/window.rs                        |  21 
crates/gpui/src/platform/windows/alpha_correction.hlsl        |  28 
crates/gpui/src/platform/windows/color_text_raster.hlsl       |  14 
crates/gpui/src/platform/windows/direct_write.rs              | 141 
crates/gpui/src/platform/windows/directx_renderer.rs          |  78 
crates/gpui/src/platform/windows/events.rs                    |   2 
crates/gpui/src/platform/windows/platform.rs                  |  15 
crates/gpui/src/platform/windows/shaders.hlsl                 |   9 
crates/gpui/src/platform/windows/vsync.rs                     |   2 
crates/gpui/src/text_system.rs                                | 102 
crates/gpui/src/text_system/line_layout.rs                    |  15 
crates/gpui/src/window.rs                                     |   7 
crates/gpui_macros/src/derive_action.rs                       |   7 
crates/image_viewer/src/image_viewer_settings.rs              |   4 
crates/journal/src/journal.rs                                 |   4 
crates/keymap_editor/Cargo.toml                               |  53 
crates/keymap_editor/LICENSE-GPL                              |   1 
crates/keymap_editor/src/keymap_editor.rs                     |   6 
crates/keymap_editor/src/ui_components/keystroke_input.rs     |   0 
crates/keymap_editor/src/ui_components/mod.rs                 |   0 
crates/keymap_editor/src/ui_components/table.rs               |   0 
crates/language/src/buffer.rs                                 |   2 
crates/language/src/language_settings.rs                      |   4 
crates/language/src/proto.rs                                  |   2 
crates/language/src/toolchain.rs                              |   8 
crates/language_models/src/provider/anthropic.rs              |   2 
crates/language_models/src/provider/copilot_chat.rs           |  30 
crates/language_models/src/settings.rs                        |   4 
crates/language_tools/src/language_tools.rs                   |   2 
crates/language_tools/src/lsp_log_view.rs                     |  96 
crates/language_tools/src/lsp_log_view_tests.rs               |   2 
crates/languages/src/python.rs                                | 178 
crates/languages/src/rust.rs                                  |   2 
crates/lsp/src/lsp.rs                                         |  38 
crates/markdown_preview/Cargo.toml                            |   6 
crates/markdown_preview/src/markdown_elements.rs              |  17 
crates/markdown_preview/src/markdown_parser.rs                | 372 ++
crates/markdown_preview/src/markdown_renderer.rs              | 137 
crates/outline_panel/src/outline_panel_settings.rs            |   4 
crates/paths/src/paths.rs                                     |   5 
crates/project/src/buffer_store.rs                            |  22 
crates/project/src/debugger/dap_store.rs                      |  11 
crates/project/src/lsp_command.rs                             |  30 
crates/project/src/lsp_store.rs                               |  54 
crates/project/src/lsp_store/log_store.rs                     |  66 
crates/project/src/lsp_store/lsp_ext_command.rs               |   2 
crates/project/src/project.rs                                 |  22 
crates/project/src/project_settings.rs                        |   4 
crates/project/src/project_tests.rs                           |  91 
crates/project/src/terminals.rs                               | 127 
crates/project_panel/src/project_panel_settings.rs            |   4 
crates/project_symbols/src/project_symbols.rs                 |  59 
crates/recent_projects/src/disconnected_overlay.rs            |  31 
crates/recent_projects/src/recent_projects.rs                 |  33 
crates/recent_projects/src/remote_connections.rs              | 142 
crates/recent_projects/src/remote_servers.rs                  | 123 
crates/remote/src/remote.rs                                   |   3 
crates/remote/src/remote_client.rs                            | 131 
crates/remote/src/transport.rs                                | 335 ++
crates/remote/src/transport/ssh.rs                            | 362 --
crates/remote/src/transport/wsl.rs                            | 494 +++
crates/remote_server/src/headless_project.rs                  |   6 
crates/remote_server/src/remote_editing_tests.rs              |   2 
crates/repl/src/jupyter_settings.rs                           |   4 
crates/search/src/project_search.rs                           |   3 
crates/settings/Cargo.toml                                    |   2 
crates/settings/src/base_keymap_setting.rs                    |   8 
crates/settings/src/settings.rs                               |   4 
crates/settings/src/settings_json.rs                          |  15 
crates/settings/src/settings_store.rs                         | 192 
crates/settings/src/settings_ui.rs                            | 118 
crates/settings/src/vscode_import.rs                          |   4 
crates/settings_ui/Cargo.toml                                 |  41 
crates/settings_ui/src/settings_ui.rs                         | 500 +++
crates/settings_ui_macros/Cargo.toml                          |  22 
crates/settings_ui_macros/LICENSE-GPL                         |   1 
crates/settings_ui_macros/src/settings_ui_macros.rs           | 201 +
crates/supermaven/src/supermaven_completion_provider.rs       |  93 
crates/terminal/src/terminal.rs                               |  20 
crates/terminal/src/terminal_settings.rs                      |   4 
crates/terminal_view/src/terminal_element.rs                  |   9 
crates/theme/src/settings.rs                                  |   4 
crates/title_bar/Cargo.toml                                   |   2 
crates/title_bar/src/system_window_tabs.rs                    |  49 
crates/title_bar/src/title_bar.rs                             |  19 
crates/title_bar/src/title_bar_settings.rs                    |   5 
crates/vim/src/normal/scroll.rs                               |   6 
crates/vim/src/test.rs                                        |  87 
crates/vim/src/test/vim_test_context.rs                       |   4 
crates/vim/src/vim.rs                                         |   4 
crates/vim_mode_setting/src/vim_mode_setting.rs               |   4 
crates/workspace/src/item.rs                                  |   6 
crates/workspace/src/persistence.rs                           | 444 ++
crates/workspace/src/persistence/model.rs                     |  32 
crates/workspace/src/workspace.rs                             |  98 
crates/workspace/src/workspace_settings.rs                    |   6 
crates/worktree/src/worktree_settings.rs                      |   4 
crates/zed/Cargo.toml                                         |   1 
crates/zed/resources/windows/zed-wsl                          |  25 
crates/zed/src/main.rs                                        |  80 
crates/zed/src/zed.rs                                         |   6 
crates/zed/src/zed/app_menus.rs                               |   3 
crates/zed/src/zed/open_listener.rs                           |  91 
crates/zed/src/zed/windows_only_instance.rs                   |   1 
crates/zeta/Cargo.toml                                        |   3 
crates/zeta/license_examples/0bsd.txt                         |  13 
crates/zeta/license_examples/apache-2.0-ex0.txt               |   0 
crates/zeta/license_examples/apache-2.0-ex1.txt               |  42 
crates/zeta/license_examples/apache-2.0-ex2.txt               | 152 
crates/zeta/license_examples/apache-2.0-ex3.txt               |  13 
crates/zeta/license_examples/apache-2.0-ex4.txt               | 187 +
crates/zeta/license_examples/bsd-1-clause.txt                 |  20 
crates/zeta/license_examples/bsd-2-clause-ex0.txt             |  26 
crates/zeta/license_examples/bsd-3-clause-ex0.txt             |  29 
crates/zeta/license_examples/bsd-3-clause-ex1.txt             |  27 
crates/zeta/license_examples/bsd-3-clause-ex2.txt             |  31 
crates/zeta/license_examples/bsd-3-clause-ex3.txt             |  30 
crates/zeta/license_examples/bsd-3-clause-ex4.txt             |  27 
crates/zeta/license_examples/isc.txt                          |  15 
crates/zeta/license_examples/mit-ex0.txt                      |   0 
crates/zeta/license_examples/mit-ex1.txt                      |  26 
crates/zeta/license_examples/mit-ex2.txt                      |  22 
crates/zeta/license_examples/mit-ex3.txt                      |  21 
crates/zeta/license_examples/upl-1.0.txt                      |  35 
crates/zeta/license_examples/zlib-ex0.txt                     |  19 
crates/zeta/license_patterns/0bsd-pattern                     |  11 
crates/zeta/license_patterns/apache-2.0-pattern               |  65 
crates/zeta/license_patterns/apache-2.0-reference-pattern     |  14 
crates/zeta/license_patterns/bsd-pattern                      |  32 
crates/zeta/license_patterns/isc-pattern                      |  11 
crates/zeta/license_patterns/mit-pattern                      |  13 
crates/zeta/license_patterns/upl-1.0-pattern                  |  31 
crates/zeta/license_patterns/zlib-pattern                     |  21 
crates/zeta/src/license_detection.rs                          | 701 ++--
crates/zlog_settings/src/zlog_settings.rs                     |   4 
docs/src/accounts.md                                          |   5 
docs/src/ai/llm-providers.md                                  |   4 
docs/src/configuring-zed.md                                   |  16 
docs/src/debugger.md                                          |   9 
docs/src/development/releases.md                              |   2 
docs/src/development/windows.md                               |  15 
docs/src/visual-customization.md                              |   6 
flake.lock                                                    |  18 
nix/build.nix                                                 |   1 
script/bundle-windows.ps1                                     |   1 
script/danger/pnpm-lock.yaml                                  |  64 
script/issue_response/package.json                            |  10 
script/issue_response/pnpm-lock.yaml                          | 418 +-
tooling/workspace-hack/Cargo.toml                             |   4 
typos.toml                                                    |   5 
224 files changed, 7,390 insertions(+), 2,862 deletions(-)

Detailed changes

CONTRIBUTING.md 🔗

@@ -27,6 +27,22 @@ By effectively engaging with the Zed team and community early in your process, w
 
 We plan to set aside time each week to pair program with contributors on promising pull requests in Zed. This will be an experiment. We tend to prefer pairing over async code review on our team, and we'd like to see how well it works in an open source setting. If we're finding it difficult to get on the same page with async review, we may ask you to pair with us if you're open to it. The closer a contribution is to the goals outlined in our roadmap, the more likely we'll be to spend time pairing on it.
 
+## Mandatory PR contents
+
+Please ensure the PR contains
+
+- Before & after screenshots, if there are visual adjustments introduced.
+
+Examples of visual adjustments: tree-sitter query updates, UI changes, etc.
+
+- A disclosure of the AI assistance usage, if any was used.
+
+Any kind of AI assistance must be disclosed in the PR, along with the extent to which AI assistance was used (e.g. docs only vs. code generation).
+
+If the PR responses are being generated by an AI, disclose that as well.
+
+As a small exception, trivial tab-completion doesn't need to be disclosed, as long as it's limited to single keywords or short phrases.
+
 ## Tips to improve the chances of your PR getting reviewed and merged
 
 - Discuss your plans ahead of time with the team

Cargo.lock 🔗

@@ -8,6 +8,7 @@ version = "0.1.0"
 dependencies = [
  "action_log",
  "agent-client-protocol",
+ "agent_settings",
  "anyhow",
  "buffer_diff",
  "collections",
@@ -414,6 +415,7 @@ dependencies = [
  "serde_json",
  "serde_json_lenient",
  "settings",
+ "shlex",
  "smol",
  "streaming_diff",
  "task",
@@ -505,7 +507,7 @@ dependencies = [
  "parking_lot",
  "piper",
  "polling",
- "regex-automata 0.4.9",
+ "regex-automata",
  "rustix-openpty",
  "serde",
  "signal-hook",
@@ -2455,7 +2457,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4"
 dependencies = [
  "memchr",
- "regex-automata 0.4.9",
+ "regex-automata",
  "serde",
 ]
 
@@ -4730,7 +4732,7 @@ version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b545b8c50194bdd008283985ab0b31dba153cfd5b3066a92770634fbc0d7d291"
 dependencies = [
- "nu-ansi-term 0.50.1",
+ "nu-ansi-term",
 ]
 
 [[package]]
@@ -5629,8 +5631,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2"
 dependencies = [
  "bit-set 0.5.3",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
 ]
 
 [[package]]
@@ -5640,8 +5642,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
 dependencies = [
  "bit-set 0.8.0",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
 ]
 
 [[package]]
@@ -7291,8 +7293,8 @@ dependencies = [
  "aho-corasick",
  "bstr",
  "log",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
 ]
 
 [[package]]
@@ -8297,7 +8299,7 @@ dependencies = [
  "globset",
  "log",
  "memchr",
- "regex-automata 0.4.9",
+ "regex-automata",
  "same-file",
  "walkdir",
  "winapi-util",
@@ -8896,7 +8898,7 @@ dependencies = [
  "percent-encoding",
  "referencing",
  "regex",
- "regex-syntax 0.8.5",
+ "regex-syntax",
  "reqwest 0.12.15 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde",
  "serde_json",
@@ -8949,6 +8951,44 @@ dependencies = [
  "uuid",
 ]
 
+[[package]]
+name = "keymap_editor"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "collections",
+ "command_palette",
+ "component",
+ "db",
+ "editor",
+ "fs",
+ "fuzzy",
+ "gpui",
+ "itertools 0.14.0",
+ "language",
+ "log",
+ "menu",
+ "notifications",
+ "paths",
+ "project",
+ "search",
+ "serde",
+ "serde_json",
+ "settings",
+ "telemetry",
+ "tempfile",
+ "theme",
+ "tree-sitter-json",
+ "tree-sitter-rust",
+ "ui",
+ "ui_input",
+ "util",
+ "vim",
+ "workspace",
+ "workspace-hack",
+ "zed_actions",
+]
+
 [[package]]
 name = "khronos-egl"
 version = "6.0.0"
@@ -9698,7 +9738,7 @@ dependencies = [
  "lazy_static",
  "proc-macro2",
  "quote",
- "regex-syntax 0.8.5",
+ "regex-syntax",
  "rustc_version",
  "syn 2.0.101",
 ]
@@ -9770,7 +9810,7 @@ dependencies = [
 [[package]]
 name = "lsp-types"
 version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=39f629bdd03d59abd786ed9fc27e8bca02c0c0ec#39f629bdd03d59abd786ed9fc27e8bca02c0c0ec"
+source = "git+https://github.com/zed-industries/lsp-types?rev=0874f8742fe55b4dc94308c1e3c0069710d8eeaf#0874f8742fe55b4dc94308c1e3c0069710d8eeaf"
 dependencies = [
  "bitflags 1.3.2",
  "serde",
@@ -9913,9 +9953,11 @@ dependencies = [
  "editor",
  "fs",
  "gpui",
+ "html5ever 0.27.0",
  "language",
  "linkify",
  "log",
+ "markup5ever_rcdom",
  "pretty_assertions",
  "pulldown-cmark 0.12.2",
  "settings",
@@ -9976,11 +10018,11 @@ dependencies = [
 
 [[package]]
 name = "matchers"
-version = "0.1.0"
+version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9"
 dependencies = [
- "regex-automata 0.1.10",
+ "regex-automata",
 ]
 
 [[package]]
@@ -10681,16 +10723,6 @@ dependencies = [
  "winapi",
 ]
 
-[[package]]
-name = "nu-ansi-term"
-version = "0.46.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
-dependencies = [
- "overload",
- "winapi",
-]
-
 [[package]]
 name = "nu-ansi-term"
 version = "0.50.1"
@@ -11384,12 +11416,6 @@ version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e"
 
-[[package]]
-name = "overload"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
-
 [[package]]
 name = "p256"
 version = "0.11.1"
@@ -13380,17 +13406,8 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-dependencies = [
- "regex-syntax 0.6.29",
+ "regex-automata",
+ "regex-syntax",
 ]
 
 [[package]]
@@ -13401,7 +13418,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
 dependencies = [
  "aho-corasick",
  "memchr",
- "regex-syntax 0.8.5",
+ "regex-syntax",
 ]
 
 [[package]]
@@ -13410,12 +13427,6 @@ version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
 
-[[package]]
-name = "regex-syntax"
-version = "0.6.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
 [[package]]
 name = "regex-syntax"
 version = "0.8.5"
@@ -14854,6 +14865,8 @@ dependencies = [
  "serde_derive",
  "serde_json",
  "serde_json_lenient",
+ "serde_path_to_error",
+ "settings_ui_macros",
  "smallvec",
  "tree-sitter",
  "tree-sitter-json",
@@ -14889,39 +14902,28 @@ name = "settings_ui"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "collections",
- "command_palette",
  "command_palette_hooks",
- "component",
- "db",
  "editor",
  "feature_flags",
- "fs",
- "fuzzy",
  "gpui",
- "itertools 0.14.0",
- "language",
- "log",
- "menu",
- "notifications",
- "paths",
- "project",
- "search",
  "serde",
  "serde_json",
  "settings",
- "telemetry",
- "tempfile",
+ "smallvec",
  "theme",
- "tree-sitter-json",
- "tree-sitter-rust",
  "ui",
- "ui_input",
- "util",
- "vim",
  "workspace",
  "workspace-hack",
- "zed_actions",
+]
+
+[[package]]
+name = "settings_ui_macros"
+version = "0.1.0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.101",
+ "workspace-hack",
 ]
 
 [[package]]
@@ -16737,6 +16739,7 @@ dependencies = [
  "db",
  "gpui",
  "http_client",
+ "keymap_editor",
  "notifications",
  "pretty_assertions",
  "project",
@@ -16745,7 +16748,6 @@ dependencies = [
  "schemars",
  "serde",
  "settings",
- "settings_ui",
  "smallvec",
  "story",
  "telemetry",
@@ -17114,14 +17116,14 @@ dependencies = [
 
 [[package]]
 name = "tracing-subscriber"
-version = "0.3.19"
+version = "0.3.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
+checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
 dependencies = [
  "matchers",
- "nu-ansi-term 0.46.0",
+ "nu-ansi-term",
  "once_cell",
- "regex",
+ "regex-automata",
  "serde",
  "serde_json",
  "sharded-slab",
@@ -17152,7 +17154,7 @@ checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
 dependencies = [
  "cc",
  "regex",
- "regex-syntax 0.8.5",
+ "regex-syntax",
  "serde_json",
  "streaming-iterator",
  "tree-sitter-language",
@@ -19950,8 +19952,8 @@ dependencies = [
  "rand_core 0.6.4",
  "regalloc2",
  "regex",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
+ "regex-automata",
+ "regex-syntax",
  "ring",
  "rust_decimal",
  "rustc-hash 1.1.0",
@@ -20456,6 +20458,7 @@ dependencies = [
  "itertools 0.14.0",
  "jj_ui",
  "journal",
+ "keymap_editor",
  "language",
  "language_extension",
  "language_model",
@@ -20785,6 +20788,7 @@ dependencies = [
  "gpui",
  "http_client",
  "indoc",
+ "itertools 0.14.0",
  "language",
  "language_model",
  "log",
@@ -20800,6 +20804,7 @@ dependencies = [
  "serde",
  "serde_json",
  "settings",
+ "strum 0.27.1",
  "telemetry",
  "telemetry_events",
  "theme",
@@ -20807,7 +20812,6 @@ dependencies = [
  "tree-sitter-go",
  "tree-sitter-rust",
  "ui",
- "unindent",
  "util",
  "uuid",
  "workspace",

Cargo.toml 🔗

@@ -54,6 +54,8 @@ members = [
     "crates/deepseek",
     "crates/diagnostics",
     "crates/docs_preprocessor",
+    "crates/edit_prediction",
+    "crates/edit_prediction_button",
     "crates/editor",
     "crates/eval",
     "crates/explorer_command_injector",
@@ -82,13 +84,12 @@ members = [
     "crates/http_client_tls",
     "crates/icons",
     "crates/image_viewer",
-    "crates/edit_prediction",
-    "crates/edit_prediction_button",
     "crates/inspector_ui",
     "crates/install_cli",
     "crates/jj",
     "crates/jj_ui",
     "crates/journal",
+    "crates/keymap_editor",
     "crates/language",
     "crates/language_extension",
     "crates/language_model",
@@ -146,6 +147,7 @@ members = [
     "crates/settings",
     "crates/settings_profile_selector",
     "crates/settings_ui",
+    "crates/settings_ui_macros",
     "crates/snippet",
     "crates/snippet_provider",
     "crates/snippets_ui",
@@ -156,9 +158,9 @@ members = [
     "crates/streaming_diff",
     "crates/sum_tree",
     "crates/supermaven",
-    "crates/system_specs",
     "crates/supermaven_api",
     "crates/svg_preview",
+    "crates/system_specs",
     "crates/tab_switcher",
     "crates/task",
     "crates/tasks_ui",
@@ -314,6 +316,7 @@ install_cli = { path = "crates/install_cli" }
 jj = { path = "crates/jj" }
 jj_ui = { path = "crates/jj_ui" }
 journal = { path = "crates/journal" }
+keymap_editor = { path = "crates/keymap_editor" }
 language = { path = "crates/language" }
 language_extension = { path = "crates/language_extension" }
 language_model = { path = "crates/language_model" }
@@ -373,6 +376,7 @@ semantic_version = { path = "crates/semantic_version" }
 session = { path = "crates/session" }
 settings = { path = "crates/settings" }
 settings_ui = { path = "crates/settings_ui" }
+settings_ui_macros = { path = "crates/settings_ui_macros" }
 snippet = { path = "crates/snippet" }
 snippet_provider = { path = "crates/snippet_provider" }
 snippets_ui = { path = "crates/snippets_ui" }
@@ -519,7 +523,7 @@ libc = "0.2"
 libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
 linkify = "0.10.0"
 log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "0874f8742fe55b4dc94308c1e3c0069710d8eeaf" }
 mach2 = "0.5"
 markup5ever_rcdom = "0.3.0"
 metal = "0.29"
@@ -588,6 +592,7 @@ serde_json_lenient = { version = "0.2", features = [
     "preserve_order",
     "raw_value",
 ] }
+serde_path_to_error = "0.1.17"
 serde_repr = "0.1"
 serde_urlencoded = "0.7"
 sha2 = "0.10"
@@ -691,6 +696,7 @@ features = [
     "Win32_Graphics_Dxgi_Common",
     "Win32_Graphics_Gdi",
     "Win32_Graphics_Imaging",
+    "Win32_Graphics_Hlsl",
     "Win32_Networking_WinSock",
     "Win32_Security",
     "Win32_Security_Credentials",

assets/keymaps/default-linux.json 🔗

@@ -170,6 +170,7 @@
     "context": "Markdown",
     "bindings": {
       "copy": "markdown::Copy",
+      "ctrl-insert": "markdown::Copy",
       "ctrl-c": "markdown::Copy"
     }
   },
@@ -258,6 +259,7 @@
     "context": "AgentPanel > Markdown",
     "bindings": {
       "copy": "markdown::CopyAsMarkdown",
+      "ctrl-insert": "markdown::CopyAsMarkdown",
       "ctrl-c": "markdown::CopyAsMarkdown"
     }
   },

assets/keymaps/vim.json 🔗

@@ -354,6 +354,15 @@
       "ctrl-s": "editor::ShowSignatureHelp"
     }
   },
+  {
+    "context": "showing_completions",
+    "bindings": {
+      "ctrl-d": "vim::ScrollDown",
+      "ctrl-u": "vim::ScrollUp",
+      "ctrl-e": "vim::LineDown",
+      "ctrl-y": "vim::LineUp"
+    }
+  },
   {
     "context": "(vim_mode == normal || vim_mode == helix_normal) && !menu",
     "bindings": {

assets/settings/default.json 🔗

@@ -223,9 +223,25 @@
   "current_line_highlight": "all",
   // Whether to highlight all occurrences of the selected text in an editor.
   "selection_highlight": true,
+  // Whether the text selection should have rounded corners.
+  "rounded_selection": true,
   // The debounce delay before querying highlights from the language
   // server based on the current cursor location.
   "lsp_highlight_debounce": 75,
+  // The minimum APCA perceptual contrast between foreground and background colors.
+  // APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
+  // especially for dark mode. Values range from 0 to 106.
+  //
+  // Based on APCA Readability Criterion (ARC) Bronze Simple Mode:
+  // https://readtech.org/ARC/tests/bronze-simple-mode/
+  // - 0: No contrast adjustment
+  // - 45: Minimum for large fluent text (36px+)
+  // - 60: Minimum for other content text
+  // - 75: Minimum for body text
+  // - 90: Preferred for body text
+  //
+  // This only affects text drawn over highlight backgrounds in the editor.
+  "minimum_contrast_for_highlights": 45,
   // Whether to pop the completions menu while typing in an editor without
   // explicitly requesting it.
   "show_completions_on_input": true,
@@ -279,6 +295,8 @@
   "redact_private_values": false,
   // The default number of lines to expand excerpts in the multibuffer by.
   "expand_excerpt_lines": 5,
+  // The default number of context lines shown in multibuffer excerpts.
+  "excerpt_context_lines": 2,
   // Globs to match against file paths to determine if a file is private.
   "private_files": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"],
   // Whether to use additional LSP queries to format (and amend) the code after
@@ -1758,7 +1776,7 @@
       "api_url": "http://localhost:1234/api/v0"
     },
     "deepseek": {
-      "api_url": "https://api.deepseek.com"
+      "api_url": "https://api.deepseek.com/v1"
     },
     "mistral": {
       "api_url": "https://api.mistral.ai/v1"
@@ -1906,7 +1924,10 @@
   "debugger": {
     "stepping_granularity": "line",
     "save_breakpoints": true,
+    "timeout": 2000,
     "dock": "bottom",
+    "log_dap_communications": true,
+    "format_dap_log_messages": true,
     "button": true
   },
   // Configures any number of settings profiles that are temporarily applied on

crates/acp_thread/Cargo.toml 🔗

@@ -19,6 +19,7 @@ test-support = ["gpui/test-support", "project/test-support", "dep:parking_lot"]
 action_log.workspace = true
 agent-client-protocol.workspace = true
 anyhow.workspace = true
+agent_settings.workspace = true
 buffer_diff.workspace = true
 collections.workspace = true
 editor.workspace = true

crates/acp_thread/src/acp_thread.rs 🔗

@@ -3,6 +3,7 @@ mod diff;
 mod mention;
 mod terminal;
 
+use agent_settings::AgentSettings;
 use collections::HashSet;
 pub use connection::*;
 pub use diff::*;
@@ -10,6 +11,7 @@ use language::language_settings::FormatOnSave;
 pub use mention::*;
 use project::lsp_store::{FormatTrigger, LspFormatTarget};
 use serde::{Deserialize, Serialize};
+use settings::Settings as _;
 pub use terminal::*;
 
 use action_log::ActionLog;
@@ -1230,9 +1232,29 @@ impl AcpThread {
         tool_call: acp::ToolCallUpdate,
         options: Vec<acp::PermissionOption>,
         cx: &mut Context<Self>,
-    ) -> Result<oneshot::Receiver<acp::PermissionOptionId>, acp::Error> {
+    ) -> Result<BoxFuture<'static, acp::RequestPermissionOutcome>> {
         let (tx, rx) = oneshot::channel();
 
+        if AgentSettings::get_global(cx).always_allow_tool_actions {
+            // Don't use AllowAlways, because then if you were to turn off always_allow_tool_actions,
+            // some tools would (incorrectly) continue to auto-accept.
+            if let Some(allow_once_option) = options.iter().find_map(|option| {
+                if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) {
+                    Some(option.id.clone())
+                } else {
+                    None
+                }
+            }) {
+                self.upsert_tool_call_inner(tool_call, ToolCallStatus::Pending, cx)?;
+                return Ok(async {
+                    acp::RequestPermissionOutcome::Selected {
+                        option_id: allow_once_option,
+                    }
+                }
+                .boxed());
+            }
+        }
+
         let status = ToolCallStatus::WaitingForConfirmation {
             options,
             respond_tx: tx,
@@ -1240,7 +1262,16 @@ impl AcpThread {
 
         self.upsert_tool_call_inner(tool_call, status, cx)?;
         cx.emit(AcpThreadEvent::ToolAuthorizationRequired);
-        Ok(rx)
+
+        let fut = async {
+            match rx.await {
+                Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option },
+                Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled,
+            }
+        }
+        .boxed();
+
+        Ok(fut)
     }
 
     pub fn authorize_tool_call(

crates/acp_thread/src/connection.rs 🔗

@@ -393,14 +393,15 @@ mod test_support {
                     };
                     let task = cx.spawn(async move |cx| {
                         if let Some((tool_call, options)) = permission_request {
-                            let permission = thread.update(cx, |thread, cx| {
-                                thread.request_tool_call_authorization(
-                                    tool_call.clone().into(),
-                                    options.clone(),
-                                    cx,
-                                )
-                            })?;
-                            permission?.await?;
+                            thread
+                                .update(cx, |thread, cx| {
+                                    thread.request_tool_call_authorization(
+                                        tool_call.clone().into(),
+                                        options.clone(),
+                                        cx,
+                                    )
+                                })??
+                                .await;
                         }
                         thread.update(cx, |thread, cx| {
                             thread.handle_session_update(update.clone(), cx).unwrap();

crates/acp_thread/src/diff.rs 🔗

@@ -1,6 +1,6 @@
 use anyhow::Result;
 use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{MultiBuffer, PathKey};
+use editor::{MultiBuffer, PathKey, multibuffer_context_lines};
 use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task};
 use itertools::Itertools;
 use language::{
@@ -64,7 +64,7 @@ impl Diff {
                             PathKey::for_buffer(&buffer, cx),
                             buffer.clone(),
                             hunk_ranges,
-                            editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                            multibuffer_context_lines(cx),
                             cx,
                         );
                         multibuffer.add_diff(diff, cx);
@@ -279,7 +279,7 @@ impl PendingDiff {
                         path_key,
                         buffer,
                         ranges,
-                        editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                        multibuffer_context_lines(cx),
                         cx,
                     );
                     multibuffer.add_diff(buffer_diff.clone(), cx);
@@ -305,7 +305,7 @@ impl PendingDiff {
                 PathKey::for_buffer(&self.new_buffer, cx),
                 self.new_buffer.clone(),
                 ranges,
-                editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                multibuffer_context_lines(cx),
                 cx,
             );
             let end = multibuffer.len(cx);

crates/agent2/src/agent.rs 🔗

@@ -762,18 +762,15 @@ impl NativeAgentConnection {
                                 options,
                                 response,
                             }) => {
-                                let recv = acp_thread.update(cx, |thread, cx| {
+                                let outcome_task = acp_thread.update(cx, |thread, cx| {
                                     thread.request_tool_call_authorization(tool_call, options, cx)
-                                })?;
+                                })??;
                                 cx.background_spawn(async move {
-                                    if let Some(recv) = recv.log_err()
-                                        && let Some(option) = recv
-                                            .await
-                                            .context("authorization sender was dropped")
-                                            .log_err()
+                                    if let acp::RequestPermissionOutcome::Selected { option_id } =
+                                        outcome_task.await
                                     {
                                         response
-                                            .send(option)
+                                            .send(option_id)
                                             .map(|_| anyhow!("authorization receiver was dropped"))
                                             .log_err();
                                     }

crates/agent2/src/tests/mod.rs 🔗

@@ -950,6 +950,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
         paths::settings_file(),
         json!({
             "agent": {
+                "always_allow_tool_actions": true,
                 "profiles": {
                     "test": {
                         "name": "Test Profile",

crates/agent2/src/thread.rs 🔗

@@ -484,11 +484,15 @@ impl AgentMessage {
         };
 
         for tool_result in self.tool_results.values() {
+            let mut tool_result = tool_result.clone();
+            // Surprisingly, the API fails if we return an empty string here.
+            // It thinks we are sending a tool use without a tool result.
+            if tool_result.content.is_empty() {
+                tool_result.content = "<Tool returned an empty string>".into();
+            }
             user_message
                 .content
-                .push(language_model::MessageContent::ToolResult(
-                    tool_result.clone(),
-                ));
+                .push(language_model::MessageContent::ToolResult(tool_result));
         }
 
         let mut messages = Vec::new();

crates/agent2/src/tools/context_server_registry.rs 🔗

@@ -169,15 +169,18 @@ impl AnyAgentTool for ContextServerTool {
     fn run(
         self: Arc<Self>,
         input: serde_json::Value,
-        _event_stream: ToolCallEventStream,
+        event_stream: ToolCallEventStream,
         cx: &mut App,
     ) -> Task<Result<AgentToolOutput>> {
         let Some(server) = self.store.read(cx).get_running_server(&self.server_id) else {
             return Task::ready(Err(anyhow!("Context server not found")));
         };
         let tool_name = self.tool.name.clone();
+        let authorize = event_stream.authorize(self.initial_title(input.clone()), cx);
 
         cx.spawn(async move |_cx| {
+            authorize.await?;
+
             let Some(protocol) = server.client() else {
                 bail!("Context server not initialized");
             };

crates/agent_servers/src/acp.rs 🔗

@@ -3,15 +3,13 @@ use acp_thread::AgentConnection;
 use acp_tools::AcpConnectionRegistry;
 use action_log::ActionLog;
 use agent_client_protocol::{self as acp, Agent as _, ErrorCode};
-use agent_settings::AgentSettings;
 use anyhow::anyhow;
 use collections::HashMap;
 use futures::AsyncBufReadExt as _;
-use futures::channel::oneshot;
 use futures::io::BufReader;
 use project::Project;
 use serde::Deserialize;
-use settings::Settings as _;
+
 use std::{any::Any, cell::RefCell};
 use std::{path::Path, rc::Rc};
 use thiserror::Error;
@@ -345,28 +343,7 @@ impl acp::Client for ClientDelegate {
     ) -> Result<acp::RequestPermissionResponse, acp::Error> {
         let cx = &mut self.cx.clone();
 
-        // If always_allow_tool_actions is enabled, then auto-choose the first "Allow" button
-        if AgentSettings::try_read_global(cx, |settings| settings.always_allow_tool_actions)
-            .unwrap_or(false)
-        {
-            // Don't use AllowAlways, because then if you were to turn off always_allow_tool_actions,
-            // some tools would (incorrectly) continue to auto-accept.
-            if let Some(allow_once_option) = arguments.options.iter().find_map(|option| {
-                if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) {
-                    Some(option.id.clone())
-                } else {
-                    None
-                }
-            }) {
-                return Ok(acp::RequestPermissionResponse {
-                    outcome: acp::RequestPermissionOutcome::Selected {
-                        option_id: allow_once_option,
-                    },
-                });
-            }
-        }
-
-        let rx = self
+        let task = self
             .sessions
             .borrow()
             .get(&arguments.session_id)
@@ -374,14 +351,9 @@ impl acp::Client for ClientDelegate {
             .thread
             .update(cx, |thread, cx| {
                 thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx)
-            })?;
+            })??;
 
-        let result = rx?.await;
-
-        let outcome = match result {
-            Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option },
-            Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled,
-        };
+        let outcome = task.await;
 
         Ok(acp::RequestPermissionResponse { outcome })
     }

crates/agent_servers/src/agent_servers.rs 🔗

@@ -7,20 +7,24 @@ mod settings;
 #[cfg(any(test, feature = "test-support"))]
 pub mod e2e_tests;
 
+use anyhow::Context as _;
 pub use claude::*;
 pub use custom::*;
+use fs::Fs;
+use fs::RemoveOptions;
+use fs::RenameOptions;
+use futures::StreamExt as _;
 pub use gemini::*;
+use gpui::AppContext;
+use node_runtime::NodeRuntime;
 pub use settings::*;
 
 use acp_thread::AgentConnection;
 use acp_thread::LoadError;
 use anyhow::Result;
 use anyhow::anyhow;
-use anyhow::bail;
 use collections::HashMap;
-use gpui::AppContext as _;
 use gpui::{App, AsyncApp, Entity, SharedString, Task};
-use node_runtime::VersionStrategy;
 use project::Project;
 use schemars::JsonSchema;
 use semver::Version;
@@ -40,11 +44,11 @@ pub fn init(cx: &mut App) {
 
 pub struct AgentServerDelegate {
     project: Entity<Project>,
-    status_tx: watch::Sender<SharedString>,
+    status_tx: Option<watch::Sender<SharedString>>,
 }
 
 impl AgentServerDelegate {
-    pub fn new(project: Entity<Project>, status_tx: watch::Sender<SharedString>) -> Self {
+    pub fn new(project: Entity<Project>, status_tx: Option<watch::Sender<SharedString>>) -> Self {
         Self { project, status_tx }
     }
 
@@ -64,70 +68,163 @@ impl AgentServerDelegate {
         let project = self.project;
         let fs = project.read(cx).fs().clone();
         let Some(node_runtime) = project.read(cx).node_runtime().cloned() else {
-            return Task::ready(Err(anyhow!("Missing node runtime")));
+            return Task::ready(Err(anyhow!(
+                "External agents are not yet available in remote projects."
+            )));
         };
-        let mut status_tx = self.status_tx;
+        let status_tx = self.status_tx;
 
         cx.spawn(async move |cx| {
             if !ignore_system_version {
                 if let Some(bin) = find_bin_in_path(binary_name.clone(), &project, cx).await {
-                    return Ok(AgentServerCommand { path: bin, args: Vec::new(), env: Default::default() })
+                    return Ok(AgentServerCommand {
+                        path: bin,
+                        args: Vec::new(),
+                        env: Default::default(),
+                    });
                 }
             }
 
-            cx.background_spawn(async move {
+            cx.spawn(async move |cx| {
                 let node_path = node_runtime.binary_path().await?;
-                let dir = paths::data_dir().join("external_agents").join(binary_name.as_str());
+                let dir = paths::data_dir()
+                    .join("external_agents")
+                    .join(binary_name.as_str());
                 fs.create_dir(&dir).await?;
-                let local_executable_path = dir.join(entrypoint_path);
-                let command = AgentServerCommand {
-                    path: node_path,
-                    args: vec![local_executable_path.to_string_lossy().to_string()],
-                    env: Default::default(),
-                };
 
-                let installed_version = node_runtime
-                    .npm_package_installed_version(&dir, &package_name)
-                    .await?
-                    .filter(|version| {
-                        Version::from_str(&version)
-                            .is_ok_and(|version| Some(version) >= minimum_version)
-                    });
+                let mut stream = fs.read_dir(&dir).await?;
+                let mut versions = Vec::new();
+                let mut to_delete = Vec::new();
+                while let Some(entry) = stream.next().await {
+                    let Ok(entry) = entry else { continue };
+                    let Some(file_name) = entry.file_name() else {
+                        continue;
+                    };
 
-                status_tx.send("Checking for latest version…".into())?;
-                let latest_version = match node_runtime.npm_package_latest_version(&package_name).await
+                    if let Some(version) = file_name
+                        .to_str()
+                        .and_then(|name| semver::Version::from_str(&name).ok())
+                    {
+                        versions.push((version, file_name.to_owned()));
+                    } else {
+                        to_delete.push(file_name.to_owned())
+                    }
+                }
+
+                versions.sort();
+                let newest_version = if let Some((version, file_name)) = versions.last().cloned()
+                    && minimum_version.is_none_or(|minimum_version| version >= minimum_version)
                 {
-                    Ok(latest_version) => latest_version,
-                    Err(e) => {
-                        if let Some(installed_version) = installed_version {
-                            log::error!("{e}");
-                            log::warn!("failed to fetch latest version of {package_name}, falling back to cached version {installed_version}");
-                            return Ok(command);
-                        } else {
-                            bail!(e);
+                    versions.pop();
+                    Some(file_name)
+                } else {
+                    None
+                };
+                log::debug!("existing version of {package_name}: {newest_version:?}");
+                to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name));
+
+                cx.background_spawn({
+                    let fs = fs.clone();
+                    let dir = dir.clone();
+                    async move {
+                        for file_name in to_delete {
+                            fs.remove_dir(
+                                &dir.join(file_name),
+                                RemoveOptions {
+                                    recursive: true,
+                                    ignore_if_not_exists: false,
+                                },
+                            )
+                            .await
+                            .ok();
+                        }
+                    }
+                })
+                .detach();
+
+                let version = if let Some(file_name) = newest_version {
+                    cx.background_spawn({
+                        let file_name = file_name.clone();
+                        let dir = dir.clone();
+                        async move {
+                            let latest_version =
+                                node_runtime.npm_package_latest_version(&package_name).await;
+                            if let Ok(latest_version) = latest_version
+                                && &latest_version != &file_name.to_string_lossy()
+                            {
+                                Self::download_latest_version(
+                                    fs,
+                                    dir.clone(),
+                                    node_runtime,
+                                    package_name,
+                                )
+                                .await
+                                .log_err();
+                            }
                         }
+                    })
+                    .detach();
+                    file_name
+                } else {
+                    if let Some(mut status_tx) = status_tx {
+                        status_tx.send("Installing…".into()).ok();
                     }
+                    let dir = dir.clone();
+                    cx.background_spawn(Self::download_latest_version(
+                        fs,
+                        dir.clone(),
+                        node_runtime,
+                        package_name,
+                    ))
+                    .await?
+                    .into()
                 };
+                anyhow::Ok(AgentServerCommand {
+                    path: node_path,
+                    args: vec![
+                        dir.join(version)
+                            .join(entrypoint_path)
+                            .to_string_lossy()
+                            .to_string(),
+                    ],
+                    env: Default::default(),
+                })
+            })
+            .await
+            .map_err(|e| LoadError::FailedToInstall(e.to_string().into()).into())
+        })
+    }
 
-                let should_install = node_runtime
-                    .should_install_npm_package(
-                        &package_name,
-                        &local_executable_path,
-                        &dir,
-                        VersionStrategy::Latest(&latest_version),
-                    )
-                    .await;
+    async fn download_latest_version(
+        fs: Arc<dyn Fs>,
+        dir: PathBuf,
+        node_runtime: NodeRuntime,
+        package_name: SharedString,
+    ) -> Result<String> {
+        log::debug!("downloading latest version of {package_name}");
 
-                if should_install {
-                    status_tx.send("Installing latest version…".into())?;
-                    node_runtime
-                        .npm_install_packages(&dir, &[(&package_name, &latest_version)])
-                        .await?;
-                }
+        let tmp_dir = tempfile::tempdir_in(&dir)?;
 
-                Ok(command)
-            }).await.map_err(|e| LoadError::FailedToInstall(e.to_string().into()).into())
-        })
+        node_runtime
+            .npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")])
+            .await?;
+
+        let version = node_runtime
+            .npm_package_installed_version(tmp_dir.path(), &package_name)
+            .await?
+            .context("expected package to be installed")?;
+
+        fs.rename(
+            &tmp_dir.keep(),
+            &dir.join(&version),
+            RenameOptions {
+                ignore_if_exists: true,
+                overwrite: false,
+            },
+        )
+        .await?;
+
+        anyhow::Ok(version)
     }
 }
 

crates/agent_servers/src/claude.rs 🔗

@@ -1,8 +1,8 @@
 use language_models::provider::anthropic::AnthropicLanguageModelProvider;
 use settings::SettingsStore;
-use std::any::Any;
 use std::path::Path;
 use std::rc::Rc;
+use std::{any::Any, path::PathBuf};
 
 use anyhow::Result;
 use gpui::{App, AppContext as _, SharedString, Task};
@@ -13,9 +13,47 @@ use acp_thread::AgentConnection;
 #[derive(Clone)]
 pub struct ClaudeCode;
 
+pub struct ClaudeCodeLoginCommand {
+    pub path: PathBuf,
+    pub arguments: Vec<String>,
+}
+
 impl ClaudeCode {
     const BINARY_NAME: &'static str = "claude-code-acp";
     const PACKAGE_NAME: &'static str = "@zed-industries/claude-code-acp";
+
+    pub fn login_command(
+        delegate: AgentServerDelegate,
+        cx: &mut App,
+    ) -> Task<Result<ClaudeCodeLoginCommand>> {
+        let settings = cx.read_global(|settings: &SettingsStore, _| {
+            settings.get::<AllAgentServersSettings>(None).claude.clone()
+        });
+
+        cx.spawn(async move |cx| {
+            let mut command = if let Some(settings) = settings {
+                settings.command
+            } else {
+                cx.update(|cx| {
+                    delegate.get_or_npm_install_builtin_agent(
+                        Self::BINARY_NAME.into(),
+                        Self::PACKAGE_NAME.into(),
+                        "node_modules/@anthropic-ai/claude-code/cli.js".into(),
+                        true,
+                        None,
+                        cx,
+                    )
+                })?
+                .await?
+            };
+            command.args.push("/login".into());
+
+            Ok(ClaudeCodeLoginCommand {
+                path: command.path,
+                arguments: command.args,
+            })
+        })
+    }
 }
 
 impl AgentServer for ClaudeCode {

crates/agent_servers/src/e2e_tests.rs 🔗

@@ -498,7 +498,7 @@ pub async fn new_test_thread(
     current_dir: impl AsRef<Path>,
     cx: &mut TestAppContext,
 ) -> Entity<AcpThread> {
-    let delegate = AgentServerDelegate::new(project.clone(), watch::channel("".into()).0);
+    let delegate = AgentServerDelegate::new(project.clone(), None);
 
     let connection = cx
         .update(|cx| server.connect(current_dir.as_ref(), delegate, cx))

crates/agent_servers/src/gemini.rs 🔗

@@ -63,7 +63,9 @@ impl AgentServer for Gemini {
                 })?
                 .await?
             };
-            command.args.push("--experimental-acp".into());
+            if !command.args.contains(&ACP_ARG.into()) {
+                command.args.push(ACP_ARG.into());
+            }
 
             if let Some(api_key) = cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() {
                 command
@@ -86,17 +88,17 @@ impl AgentServer for Gemini {
                             .await;
                         let current_version =
                             String::from_utf8(version_output?.stdout)?.trim().to_owned();
-                        if !connection.prompt_capabilities().image {
-                            return Err(LoadError::Unsupported {
-                                current_version: current_version.into(),
-                                command: command.path.to_string_lossy().to_string().into(),
-                                minimum_version: Self::MINIMUM_VERSION.into(),
-                            }
-                            .into());
+
+                        log::error!("connected to gemini, but missing prompt_capabilities.image (version is {current_version})");
+                        return Err(LoadError::Unsupported {
+                            current_version: current_version.into(),
+                            command: command.path.to_string_lossy().to_string().into(),
+                            minimum_version: Self::MINIMUM_VERSION.into(),
                         }
+                        .into());
                     }
                 }
-                Err(_) => {
+                Err(e) => {
                     let version_fut = util::command::new_smol_command(&command.path)
                         .args(command.args.iter())
                         .arg("--version")
@@ -111,12 +113,19 @@ impl AgentServer for Gemini {
 
                     let (version_output, help_output) =
                         futures::future::join(version_fut, help_fut).await;
-
-                    let current_version = std::str::from_utf8(&version_output?.stdout)?
-                        .trim()
-                        .to_string();
-                    let supported = String::from_utf8(help_output?.stdout)?.contains(ACP_ARG);
-
+                    let Some(version_output) = version_output.ok().and_then(|output| String::from_utf8(output.stdout).ok()) else {
+                        return result;
+                    };
+                    let Some((help_stdout, help_stderr)) = help_output.ok().and_then(|output| String::from_utf8(output.stdout).ok().zip(String::from_utf8(output.stderr).ok())) else  {
+                        return result;
+                    };
+
+                    let current_version = version_output.trim().to_string();
+                    let supported = help_stdout.contains(ACP_ARG) || current_version.parse::<semver::Version>().is_ok_and(|version| version >= Self::MINIMUM_VERSION.parse::<semver::Version>().unwrap());
+
+                    log::error!("failed to create ACP connection to gemini (version is {current_version}, supported: {supported}): {e}");
+                    log::debug!("gemini --help stdout: {help_stdout:?}");
+                    log::debug!("gemini --help stderr: {help_stderr:?}");
                     if !supported {
                         return Err(LoadError::Unsupported {
                             current_version: current_version.into(),

crates/agent_servers/src/settings.rs 🔗

@@ -6,13 +6,13 @@ use collections::HashMap;
 use gpui::{App, SharedString};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 pub fn init(cx: &mut App) {
     AllAgentServersSettings::register(cx);
 }
 
-#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug)]
+#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi)]
 pub struct AllAgentServersSettings {
     pub gemini: Option<BuiltinAgentServerSettings>,
     pub claude: Option<CustomAgentServerSettings>,

crates/agent_settings/src/agent_settings.rs 🔗

@@ -8,7 +8,7 @@ use gpui::{App, Pixels, SharedString};
 use language_model::LanguageModel;
 use schemars::{JsonSchema, json_schema};
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use std::borrow::Cow;
 
 pub use crate::agent_profile::*;
@@ -48,7 +48,7 @@ pub enum NotifyWhenAgentWaiting {
     Never,
 }
 
-#[derive(Default, Clone, Debug)]
+#[derive(Default, Clone, Debug, SettingsUi)]
 pub struct AgentSettings {
     pub enabled: bool,
     pub button: bool,
@@ -352,18 +352,19 @@ impl JsonSchema for LanguageModelProviderSetting {
     fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
         json_schema!({
             "enum": [
-                "anthropic",
                 "amazon-bedrock",
+                "anthropic",
+                "copilot_chat",
+                "deepseek",
                 "google",
                 "lmstudio",
+                "mistral",
                 "ollama",
                 "openai",
-                "zed.dev",
-                "copilot_chat",
-                "deepseek",
                 "openrouter",
-                "mistral",
-                "vercel"
+                "vercel",
+                "x_ai",
+                "zed.dev"
             ]
         })
     }

crates/agent_ui/Cargo.toml 🔗

@@ -80,6 +80,7 @@ serde.workspace = true
 serde_json.workspace = true
 serde_json_lenient.workspace = true
 settings.workspace = true
+shlex.workspace = true
 smol.workspace = true
 streaming_diff.workspace = true
 task.workspace = true

crates/agent_ui/src/acp/message_editor.rs 🔗

@@ -648,7 +648,7 @@ impl MessageEditor {
             self.project.read(cx).fs().clone(),
             self.history_store.clone(),
         ));
-        let delegate = AgentServerDelegate::new(self.project.clone(), watch::channel("".into()).0);
+        let delegate = AgentServerDelegate::new(self.project.clone(), None);
         let connection = server.connect(Path::new(""), delegate, cx);
         cx.spawn(async move |_, cx| {
             let agent = connection.await?;
@@ -2131,7 +2131,7 @@ mod tests {
                     lsp::SymbolInformation {
                         name: "MySymbol".into(),
                         location: lsp::Location {
-                            uri: lsp::Url::from_file_path(path!("/dir/a/one.txt")).unwrap(),
+                            uri: lsp::Uri::from_file_path(path!("/dir/a/one.txt")).unwrap(),
                             range: lsp::Range::new(
                                 lsp::Position::new(0, 0),
                                 lsp::Position::new(0, 1),

crates/agent_ui/src/acp/thread_view.rs 🔗

@@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities};
 use agent_servers::{AgentServer, AgentServerDelegate, ClaudeCode};
 use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting};
 use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore};
-use anyhow::{Result, anyhow, bail};
+use anyhow::{Context as _, Result, anyhow, bail};
 use audio::{Audio, Sound};
 use buffer_diff::BufferDiff;
 use client::zed_urls;
@@ -423,7 +423,7 @@ impl AcpThreadView {
             .map(|worktree| worktree.read(cx).abs_path())
             .unwrap_or_else(|| paths::home_dir().as_path().into());
         let (tx, mut rx) = watch::channel("Loading…".into());
-        let delegate = AgentServerDelegate::new(project.clone(), tx);
+        let delegate = AgentServerDelegate::new(project.clone(), Some(tx));
 
         let connect_task = agent.connect(&root_dir, delegate, cx);
         let load_task = cx.spawn_in(window, async move |this, cx| {
@@ -1386,31 +1386,52 @@ impl AcpThreadView {
         let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
             return Task::ready(Ok(()));
         };
-        let project = workspace.read(cx).project().read(cx);
+        let project_entity = workspace.read(cx).project();
+        let project = project_entity.read(cx);
         let cwd = project.first_project_directory(cx);
         let shell = project.terminal_settings(&cwd, cx).shell.clone();
 
-        let terminal = terminal_panel.update(cx, |terminal_panel, cx| {
-            terminal_panel.spawn_task(
-                &SpawnInTerminal {
-                    id: task::TaskId("claude-login".into()),
-                    full_label: "claude /login".to_owned(),
-                    label: "claude /login".to_owned(),
-                    command: Some("claude".to_owned()),
-                    args: vec!["/login".to_owned()],
-                    command_label: "claude /login".to_owned(),
-                    cwd,
-                    use_new_terminal: true,
-                    allow_concurrent_runs: true,
-                    hide: task::HideStrategy::Always,
-                    shell,
-                    ..Default::default()
-                },
-                window,
-                cx,
-            )
-        });
-        cx.spawn(async move |cx| {
+        let delegate = AgentServerDelegate::new(project_entity.clone(), None);
+        let command = ClaudeCode::login_command(delegate, cx);
+
+        window.spawn(cx, async move |cx| {
+            let login_command = command.await?;
+            let command = login_command
+                .path
+                .to_str()
+                .with_context(|| format!("invalid login command: {:?}", login_command.path))?;
+            let command = shlex::try_quote(command)?;
+            let args = login_command
+                .arguments
+                .iter()
+                .map(|arg| {
+                    Ok(shlex::try_quote(arg)
+                        .context("Failed to quote argument")?
+                        .to_string())
+                })
+                .collect::<Result<Vec<_>>>()?;
+
+            let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
+                terminal_panel.spawn_task(
+                    &SpawnInTerminal {
+                        id: task::TaskId("claude-login".into()),
+                        full_label: "claude /login".to_owned(),
+                        label: "claude /login".to_owned(),
+                        command: Some(command.into()),
+                        args,
+                        command_label: "claude /login".to_owned(),
+                        cwd,
+                        use_new_terminal: true,
+                        allow_concurrent_runs: true,
+                        hide: task::HideStrategy::Always,
+                        shell,
+                        ..Default::default()
+                    },
+                    window,
+                    cx,
+                )
+            })?;
+
             let terminal = terminal.await?;
             let mut exit_status = terminal
                 .read_with(cx, |terminal, cx| terminal.wait_for_completed_task(cx))?

crates/agent_ui/src/agent_configuration.rs 🔗

@@ -331,6 +331,7 @@ impl AgentConfiguration {
                             .gap_0p5()
                             .child(
                                 h_flex()
+                                    .pr_1()
                                     .w_full()
                                     .gap_2()
                                     .justify_between()
@@ -1022,6 +1023,7 @@ impl AgentConfiguration {
                             .gap_0p5()
                             .child(
                                 h_flex()
+                                    .pr_1()
                                     .w_full()
                                     .gap_2()
                                     .justify_between()
@@ -1052,7 +1054,7 @@ impl AgentConfiguration {
                             )
                             .child(
                                 Label::new(
-                                    "Bring the agent of your choice to Zed via our new Agent Client Protocol.",
+                                    "All agents connected through the Agent Client Protocol.",
                                 )
                                 .color(Color::Muted),
                             ),
@@ -1063,7 +1065,12 @@ impl AgentConfiguration {
                         ExternalAgent::Gemini,
                         cx,
                     ))
-                    // TODO add CC
+                    .child(self.render_agent_server(
+                        IconName::AiClaude,
+                        "Claude Code",
+                        ExternalAgent::ClaudeCode,
+                        cx,
+                    ))
                     .children(user_defined_agents),
             )
     }
@@ -1093,26 +1100,24 @@ impl AgentConfiguration {
                     .child(Label::new(name.clone())),
             )
             .child(
-                h_flex().gap_1().child(
-                    Button::new(
-                        SharedString::from(format!("start_acp_thread-{name}")),
-                        "Start New Thread",
-                    )
-                    .label_size(LabelSize::Small)
-                    .icon(IconName::Thread)
-                    .icon_position(IconPosition::Start)
-                    .icon_size(IconSize::XSmall)
-                    .icon_color(Color::Muted)
-                    .on_click(move |_, window, cx| {
-                        window.dispatch_action(
-                            NewExternalAgentThread {
-                                agent: Some(agent.clone()),
-                            }
-                            .boxed_clone(),
-                            cx,
-                        );
-                    }),
-                ),
+                Button::new(
+                    SharedString::from(format!("start_acp_thread-{name}")),
+                    "Start New Thread",
+                )
+                .label_size(LabelSize::Small)
+                .icon(IconName::Thread)
+                .icon_position(IconPosition::Start)
+                .icon_size(IconSize::XSmall)
+                .icon_color(Color::Muted)
+                .on_click(move |_, window, cx| {
+                    window.dispatch_action(
+                        NewExternalAgentThread {
+                            agent: Some(agent.clone()),
+                        }
+                        .boxed_clone(),
+                        cx,
+                    );
+                }),
             )
     }
 }

crates/agent_ui/src/agent_diff.rs 🔗

@@ -10,6 +10,7 @@ use editor::{
     Direction, Editor, EditorEvent, EditorSettings, MultiBuffer, MultiBufferSnapshot,
     SelectionEffects, ToPoint,
     actions::{GoToHunk, GoToPreviousHunk},
+    multibuffer_context_lines,
     scroll::Autoscroll,
 };
 use gpui::{
@@ -257,7 +258,7 @@ impl AgentDiffPane {
                         path_key.clone(),
                         buffer.clone(),
                         diff_hunk_ranges,
-                        editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                        multibuffer_context_lines(cx),
                         cx,
                     );
                     multibuffer.add_diff(diff_handle, cx);

crates/agent_ui/src/agent_panel.rs 🔗

@@ -86,7 +86,7 @@ use zed_actions::{
 
 const AGENT_PANEL_KEY: &str = "agent_panel";
 
-#[derive(Serialize, Deserialize)]
+#[derive(Serialize, Deserialize, Debug)]
 struct SerializedAgentPanel {
     width: Option<Pixels>,
     selected_agent: Option<AgentType>,
@@ -284,6 +284,17 @@ impl AgentType {
     }
 }
 
+impl From<ExternalAgent> for AgentType {
+    fn from(value: ExternalAgent) -> Self {
+        match value {
+            ExternalAgent::Gemini => Self::Gemini,
+            ExternalAgent::ClaudeCode => Self::ClaudeCode,
+            ExternalAgent::Custom { name, command } => Self::Custom { name, command },
+            ExternalAgent::NativeAgent => Self::NativeAgent,
+        }
+    }
+}
+
 impl ActiveView {
     pub fn which_font_size_used(&self) -> WhichFontSize {
         match self {
@@ -592,7 +603,7 @@ impl AgentPanel {
                 .log_err()
                 .flatten()
             {
-                Some(serde_json::from_str::<SerializedAgentPanel>(&panel)?)
+                serde_json::from_str::<SerializedAgentPanel>(&panel).log_err()
             } else {
                 None
             };
@@ -1049,6 +1060,11 @@ impl AgentPanel {
             editor
         });
 
+        if self.selected_agent != AgentType::TextThread {
+            self.selected_agent = AgentType::TextThread;
+            self.serialize(cx);
+        }
+
         self.set_active_view(
             ActiveView::prompt_editor(
                 context_editor.clone(),
@@ -1140,6 +1156,12 @@ impl AgentPanel {
                     }
                 }
 
+                let selected_agent = ext_agent.into();
+                if this.selected_agent != selected_agent {
+                    this.selected_agent = selected_agent;
+                    this.serialize(cx);
+                }
+
                 let thread_view = cx.new(|cx| {
                     crate::acp::AcpThreadView::new(
                         server,
@@ -1235,6 +1257,12 @@ impl AgentPanel {
                 cx,
             )
         });
+
+        if self.selected_agent != AgentType::TextThread {
+            self.selected_agent = AgentType::TextThread;
+            self.serialize(cx);
+        }
+
         self.set_active_view(
             ActiveView::prompt_editor(
                 editor,
@@ -1860,11 +1888,6 @@ impl AgentPanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if self.selected_agent != agent {
-            self.selected_agent = agent.clone();
-            self.serialize(cx);
-        }
-
         match agent {
             AgentType::Zed => {
                 window.dispatch_action(

crates/agent_ui/src/slash_command_settings.rs 🔗

@@ -2,10 +2,10 @@ use anyhow::Result;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 /// Settings for slash commands.
-#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
+#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)]
 pub struct SlashCommandSettings {
     /// Settings for the `/cargo-workspace` slash command.
     #[serde(default)]

crates/assistant_tools/src/delete_path_tool.rs 🔗

@@ -35,7 +35,7 @@ impl Tool for DeletePathTool {
     }
 
     fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity<Project>, _: &App) -> bool {
-        false
+        true
     }
 
     fn may_perform_edits(&self) -> bool {

crates/assistant_tools/src/edit_file_tool.rs 🔗

@@ -11,7 +11,9 @@ use assistant_tool::{
     AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus,
 };
 use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey};
+use editor::{
+    Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey, multibuffer_context_lines,
+};
 use futures::StreamExt;
 use gpui::{
     Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
@@ -474,7 +476,7 @@ impl Tool for EditFileTool {
                             PathKey::for_buffer(&buffer, cx),
                             buffer,
                             diff_hunk_ranges,
-                            editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                            multibuffer_context_lines(cx),
                             cx,
                         );
                         multibuffer.add_diff(buffer_diff, cx);
@@ -703,7 +705,7 @@ impl EditFileToolCard {
                 PathKey::for_buffer(buffer, cx),
                 buffer.clone(),
                 ranges,
-                editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                multibuffer_context_lines(cx),
                 cx,
             );
             let end = multibuffer.len(cx);
@@ -791,7 +793,7 @@ impl EditFileToolCard {
                         path_key,
                         buffer,
                         ranges,
-                        editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                        multibuffer_context_lines(cx),
                         cx,
                     );
                     multibuffer.add_diff(buffer_diff.clone(), cx);

crates/audio/src/audio_settings.rs 🔗

@@ -2,9 +2,9 @@ use anyhow::Result;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Deserialize, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)]
 pub struct AudioSettings {
     /// Opt into the new audio system.
     #[serde(rename = "experimental.rodio_audio", default)]

crates/auto_update/src/auto_update.rs 🔗

@@ -10,7 +10,7 @@ use paths::remote_servers_dir;
 use release_channel::{AppCommitSha, ReleaseChannel};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources, SettingsStore};
+use settings::{Settings, SettingsSources, SettingsStore, SettingsUi};
 use smol::{fs, io::AsyncReadExt};
 use smol::{fs::File, process::Command};
 use std::{
@@ -113,6 +113,7 @@ impl Drop for MacOsUnmounter {
     }
 }
 
+#[derive(SettingsUi)]
 struct AutoUpdateSetting(bool);
 
 /// Whether or not to automatically check for updates.

crates/auto_update_helper/src/updater.rs 🔗

@@ -16,7 +16,7 @@ use crate::windows_impl::WM_JOB_UPDATED;
 type Job = fn(&Path) -> Result<()>;
 
 #[cfg(not(test))]
-pub(crate) const JOBS: [Job; 6] = [
+pub(crate) const JOBS: &[Job] = &[
     // Delete old files
     |app_dir| {
         let zed_executable = app_dir.join("Zed.exe");
@@ -32,6 +32,12 @@ pub(crate) const JOBS: [Job; 6] = [
         std::fs::remove_file(&zed_cli)
             .context(format!("Failed to remove old file {}", zed_cli.display()))
     },
+    |app_dir| {
+        let zed_wsl = app_dir.join("bin\\zed");
+        log::info!("Removing old file: {}", zed_wsl.display());
+        std::fs::remove_file(&zed_wsl)
+            .context(format!("Failed to remove old file {}", zed_wsl.display()))
+    },
     // Copy new files
     |app_dir| {
         let zed_executable_source = app_dir.join("install\\Zed.exe");
@@ -65,6 +71,22 @@ pub(crate) const JOBS: [Job; 6] = [
                 zed_cli_dest.display()
             ))
     },
+    |app_dir| {
+        let zed_wsl_source = app_dir.join("install\\bin\\zed");
+        let zed_wsl_dest = app_dir.join("bin\\zed");
+        log::info!(
+            "Copying new file {} to {}",
+            zed_wsl_source.display(),
+            zed_wsl_dest.display()
+        );
+        std::fs::copy(&zed_wsl_source, &zed_wsl_dest)
+            .map(|_| ())
+            .context(format!(
+                "Failed to copy new file {} to {}",
+                zed_wsl_source.display(),
+                zed_wsl_dest.display()
+            ))
+    },
     // Clean up installer folder and updates folder
     |app_dir| {
         let updates_folder = app_dir.join("updates");
@@ -85,7 +107,7 @@ pub(crate) const JOBS: [Job; 6] = [
 ];
 
 #[cfg(test)]
-pub(crate) const JOBS: [Job; 2] = [
+pub(crate) const JOBS: &[Job] = &[
     |_| {
         std::thread::sleep(Duration::from_millis(1000));
         if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {

crates/bedrock/src/bedrock.rs 🔗

@@ -3,6 +3,7 @@ mod models;
 use anyhow::{Context, Error, Result, anyhow};
 use aws_sdk_bedrockruntime as bedrock;
 pub use aws_sdk_bedrockruntime as bedrock_client;
+use aws_sdk_bedrockruntime::types::InferenceConfiguration;
 pub use aws_sdk_bedrockruntime::types::{
     AnyToolChoice as BedrockAnyToolChoice, AutoToolChoice as BedrockAutoToolChoice,
     ContentBlock as BedrockInnerContent, Tool as BedrockTool, ToolChoice as BedrockToolChoice,
@@ -17,7 +18,8 @@ pub use bedrock::types::{
     ConverseOutput as BedrockResponse, ConverseStreamOutput as BedrockStreamingResponse,
     ImageBlock as BedrockImageBlock, Message as BedrockMessage,
     ReasoningContentBlock as BedrockThinkingBlock, ReasoningTextBlock as BedrockThinkingTextBlock,
-    ResponseStream as BedrockResponseStream, ToolResultBlock as BedrockToolResultBlock,
+    ResponseStream as BedrockResponseStream, SystemContentBlock as BedrockSystemContentBlock,
+    ToolResultBlock as BedrockToolResultBlock,
     ToolResultContentBlock as BedrockToolResultContentBlock,
     ToolResultStatus as BedrockToolResultStatus, ToolUseBlock as BedrockToolUseBlock,
 };
@@ -58,6 +60,20 @@ pub async fn stream_completion(
         response = response.set_tool_config(request.tools);
     }
 
+    let inference_config = InferenceConfiguration::builder()
+        .max_tokens(request.max_tokens as i32)
+        .set_temperature(request.temperature)
+        .set_top_p(request.top_p)
+        .build();
+
+    response = response.inference_config(inference_config);
+
+    if let Some(system) = request.system {
+        if !system.is_empty() {
+            response = response.system(BedrockSystemContentBlock::Text(system));
+        }
+    }
+
     let output = response
         .send()
         .await

crates/bedrock/src/models.rs 🔗

@@ -151,12 +151,12 @@ impl Model {
 
     pub fn id(&self) -> &str {
         match self {
-            Model::ClaudeSonnet4 => "claude-4-sonnet",
-            Model::ClaudeSonnet4Thinking => "claude-4-sonnet-thinking",
-            Model::ClaudeOpus4 => "claude-4-opus",
-            Model::ClaudeOpus4_1 => "claude-4-opus-1",
-            Model::ClaudeOpus4Thinking => "claude-4-opus-thinking",
-            Model::ClaudeOpus4_1Thinking => "claude-4-opus-1-thinking",
+            Model::ClaudeSonnet4 => "claude-sonnet-4",
+            Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking",
+            Model::ClaudeOpus4 => "claude-opus-4",
+            Model::ClaudeOpus4_1 => "claude-opus-4-1",
+            Model::ClaudeOpus4Thinking => "claude-opus-4-thinking",
+            Model::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking",
             Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2",
             Model::Claude3_5Sonnet => "claude-3-5-sonnet",
             Model::Claude3Opus => "claude-3-opus",
@@ -359,14 +359,12 @@ impl Model {
     pub fn max_output_tokens(&self) -> u64 {
         match self {
             Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
-            Self::Claude3_7Sonnet
-            | Self::Claude3_7SonnetThinking
-            | Self::ClaudeSonnet4
-            | Self::ClaudeSonnet4Thinking
-            | Self::ClaudeOpus4
-            | Model::ClaudeOpus4Thinking
+            Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
+            Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
+            Self::ClaudeOpus4
+            | Self::ClaudeOpus4Thinking
             | Self::ClaudeOpus4_1
-            | Model::ClaudeOpus4_1Thinking => 128_000,
+            | Self::ClaudeOpus4_1Thinking => 32_000,
             Self::Claude3_5SonnetV2 | Self::PalmyraWriterX4 | Self::PalmyraWriterX5 => 8_192,
             Self::Custom {
                 max_output_tokens, ..
@@ -784,10 +782,10 @@ mod tests {
         );
 
         // Test thinking models have different friendly IDs but same request IDs
-        assert_eq!(Model::ClaudeSonnet4.id(), "claude-4-sonnet");
+        assert_eq!(Model::ClaudeSonnet4.id(), "claude-sonnet-4");
         assert_eq!(
             Model::ClaudeSonnet4Thinking.id(),
-            "claude-4-sonnet-thinking"
+            "claude-sonnet-4-thinking"
         );
         assert_eq!(
             Model::ClaudeSonnet4.request_id(),

crates/call/src/call_settings.rs 🔗

@@ -2,9 +2,9 @@ use anyhow::Result;
 use gpui::App;
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Deserialize, Debug)]
+#[derive(Deserialize, Debug, SettingsUi)]
 pub struct CallSettings {
     pub mute_on_join: bool,
     pub share_on_join: bool,

crates/cli/src/cli.rs 🔗

@@ -14,6 +14,7 @@ pub enum CliRequest {
         paths: Vec<String>,
         urls: Vec<String>,
         diff_paths: Vec<[String; 2]>,
+        wsl: Option<String>,
         wait: bool,
         open_new_workspace: Option<bool>,
         env: Option<HashMap<String, String>>,

crates/cli/src/main.rs 🔗

@@ -6,7 +6,6 @@
 use anyhow::{Context as _, Result};
 use clap::Parser;
 use cli::{CliRequest, CliResponse, IpcHandshake, ipc::IpcOneShotServer};
-use collections::HashMap;
 use parking_lot::Mutex;
 use std::{
     env, fs, io,
@@ -85,6 +84,17 @@ struct Args {
     /// Run zed in dev-server mode
     #[arg(long)]
     dev_server_token: Option<String>,
+    /// The username and WSL distribution to use when opening paths. If not specified,
+    /// Zed will attempt to open the paths directly.
+    ///
+    /// The username is optional, and if not specified, the default user for the distribution
+    /// will be used.
+    ///
+    /// Example: `me@Ubuntu` or `Ubuntu`.
+    ///
+    /// WARN: You should not fill in this field by hand.
+    #[arg(long, value_name = "USER@DISTRO")]
+    wsl: Option<String>,
     /// Not supported in Zed CLI, only supported on Zed binary
     /// Will attempt to give the correct command to run
     #[arg(long)]
@@ -129,14 +139,41 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
     Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string()))
 }
 
-fn main() -> Result<()> {
-    #[cfg(all(not(debug_assertions), target_os = "windows"))]
-    unsafe {
-        use ::windows::Win32::System::Console::{ATTACH_PARENT_PROCESS, AttachConsole};
+fn parse_path_in_wsl(source: &str, wsl: &str) -> Result<String> {
+    let mut command = util::command::new_std_command("wsl.exe");
+
+    let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') {
+        if user.is_empty() {
+            anyhow::bail!("user is empty in wsl argument");
+        }
+        (Some(user), distro)
+    } else {
+        (None, wsl)
+    };
 
-        let _ = AttachConsole(ATTACH_PARENT_PROCESS);
+    if let Some(user) = user {
+        command.arg("--user").arg(user);
     }
 
+    let output = command
+        .arg("--distribution")
+        .arg(distro_name)
+        .arg("wslpath")
+        .arg("-m")
+        .arg(source)
+        .output()?;
+
+    let result = String::from_utf8_lossy(&output.stdout);
+    let prefix = format!("//wsl.localhost/{}", distro_name);
+
+    Ok(result
+        .trim()
+        .strip_prefix(&prefix)
+        .unwrap_or(&result)
+        .to_string())
+}
+
+fn main() -> Result<()> {
     #[cfg(unix)]
     util::prevent_root_execution();
 
@@ -223,6 +260,8 @@ fn main() -> Result<()> {
     let env = {
         #[cfg(any(target_os = "linux", target_os = "freebsd"))]
         {
+            use collections::HashMap;
+
             // On Linux, the desktop entry uses `cli` to spawn `zed`.
             // We need to handle env vars correctly since std::env::vars() may not contain
             // project-specific vars (e.g. those set by direnv).
@@ -235,8 +274,19 @@ fn main() -> Result<()> {
             }
         }
 
-        #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
-        Some(std::env::vars().collect::<HashMap<_, _>>())
+        #[cfg(target_os = "windows")]
+        {
+            // On Windows, by default, a child process inherits a copy of the environment block of the parent process.
+            // So we don't need to pass env vars explicitly.
+            None
+        }
+
+        #[cfg(not(any(target_os = "linux", target_os = "freebsd", target_os = "windows")))]
+        {
+            use collections::HashMap;
+
+            Some(std::env::vars().collect::<HashMap<_, _>>())
+        }
     };
 
     let exit_status = Arc::new(Mutex::new(None));
@@ -271,8 +321,10 @@ fn main() -> Result<()> {
             paths.push(tmp_file.path().to_string_lossy().to_string());
             let (tmp_file, _) = tmp_file.keep()?;
             anonymous_fd_tmp_files.push((file, tmp_file));
+        } else if let Some(wsl) = &args.wsl {
+            urls.push(format!("file://{}", parse_path_in_wsl(path, wsl)?));
         } else {
-            paths.push(parse_path_with_position(path)?)
+            paths.push(parse_path_with_position(path)?);
         }
     }
 
@@ -292,6 +344,7 @@ fn main() -> Result<()> {
                 paths,
                 urls,
                 diff_paths,
+                wsl: args.wsl,
                 wait: args.wait,
                 open_new_workspace,
                 env,
@@ -644,15 +697,15 @@ mod windows {
             Storage::FileSystem::{
                 CreateFileW, FILE_FLAGS_AND_ATTRIBUTES, FILE_SHARE_MODE, OPEN_EXISTING, WriteFile,
             },
-            System::Threading::CreateMutexW,
+            System::Threading::{CREATE_NEW_PROCESS_GROUP, CreateMutexW},
         },
         core::HSTRING,
     };
 
     use crate::{Detect, InstalledApp};
-    use std::io;
     use std::path::{Path, PathBuf};
     use std::process::ExitStatus;
+    use std::{io, os::windows::process::CommandExt};
 
     fn check_single_instance() -> bool {
         let mutex = unsafe {
@@ -691,6 +744,7 @@ mod windows {
         fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
             if check_single_instance() {
                 std::process::Command::new(self.0.clone())
+                    .creation_flags(CREATE_NEW_PROCESS_GROUP.0)
                     .arg(ipc_url)
                     .spawn()?;
             } else {

crates/client/src/client.rs 🔗

@@ -31,7 +31,7 @@ use release_channel::{AppVersion, ReleaseChannel};
 use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use std::{
     any::TypeId,
     convert::TryFrom,
@@ -101,7 +101,7 @@ pub struct ClientSettingsContent {
     server_url: Option<String>,
 }
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct ClientSettings {
     pub server_url: String,
 }
@@ -127,7 +127,7 @@ pub struct ProxySettingsContent {
     proxy: Option<String>,
 }
 
-#[derive(Deserialize, Default)]
+#[derive(Deserialize, Default, SettingsUi)]
 pub struct ProxySettings {
     pub proxy: Option<String>,
 }
@@ -520,7 +520,7 @@ impl<T: 'static> Drop for PendingEntitySubscription<T> {
     }
 }
 
-#[derive(Copy, Clone, Deserialize, Debug)]
+#[derive(Copy, Clone, Deserialize, Debug, SettingsUi)]
 pub struct TelemetrySettings {
     pub diagnostics: bool,
     pub metrics: bool,
@@ -1696,21 +1696,10 @@ impl Client {
             );
             cx.spawn(async move |_| match future.await {
                 Ok(()) => {
-                    log::debug!(
-                        "rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
-                        client_id,
-                        original_sender_id,
-                        type_name
-                    );
+                    log::debug!("rpc message handled. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}");
                 }
                 Err(error) => {
-                    log::error!(
-                        "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
-                        client_id,
-                        original_sender_id,
-                        type_name,
-                        error
-                    );
+                    log::error!("error handling message. client_id:{client_id}, sender_id:{original_sender_id:?}, type:{type_name}, error:{error:#}");
                 }
             })
             .detach();

crates/collab/src/tests/editor_tests.rs 🔗

@@ -369,7 +369,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
         .set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
             assert_eq!(
                 params.text_document_position.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.text_document_position.position,
@@ -488,7 +488,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
         .set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
             assert_eq!(
                 params.text_document_position.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.text_document_position.position,
@@ -615,7 +615,7 @@ async fn test_collaborating_with_code_actions(
         .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(params.range.start, lsp::Position::new(0, 0));
             assert_eq!(params.range.end, lsp::Position::new(0, 0));
@@ -637,7 +637,7 @@ async fn test_collaborating_with_code_actions(
         .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(params.range.start, lsp::Position::new(1, 31));
             assert_eq!(params.range.end, lsp::Position::new(1, 31));
@@ -649,7 +649,7 @@ async fn test_collaborating_with_code_actions(
                         changes: Some(
                             [
                                 (
-                                    lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                    lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                     vec![lsp::TextEdit::new(
                                         lsp::Range::new(
                                             lsp::Position::new(1, 22),
@@ -659,7 +659,7 @@ async fn test_collaborating_with_code_actions(
                                     )],
                                 ),
                                 (
-                                    lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
+                                    lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(),
                                     vec![lsp::TextEdit::new(
                                         lsp::Range::new(
                                             lsp::Position::new(0, 0),
@@ -721,7 +721,7 @@ async fn test_collaborating_with_code_actions(
                     changes: Some(
                         [
                             (
-                                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                 vec![lsp::TextEdit::new(
                                     lsp::Range::new(
                                         lsp::Position::new(1, 22),
@@ -731,7 +731,7 @@ async fn test_collaborating_with_code_actions(
                                 )],
                             ),
                             (
-                                lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
+                                lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap(),
                                 vec![lsp::TextEdit::new(
                                     lsp::Range::new(
                                         lsp::Position::new(0, 0),
@@ -949,14 +949,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
                 changes: Some(
                     [
                         (
-                            lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
+                            lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
                             vec![lsp::TextEdit::new(
                                 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
                                 "THREE".to_string(),
                             )],
                         ),
                         (
-                            lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
+                            lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
                             vec![
                                 lsp::TextEdit::new(
                                     lsp::Range::new(
@@ -1574,7 +1574,7 @@ async fn test_on_input_format_from_host_to_guest(
         |params, _| async move {
             assert_eq!(
                 params.text_document_position.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.text_document_position.position,
@@ -1717,7 +1717,7 @@ async fn test_on_input_format_from_guest_to_host(
         .set_request_handler::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
             assert_eq!(
                 params.text_document_position.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.text_document_position.position,
@@ -1901,7 +1901,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
             async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                 );
                 let edits_made = task_edits_made.load(atomic::Ordering::Acquire);
                 Ok(Some(vec![lsp::InlayHint {
@@ -2151,7 +2151,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
             async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                 );
                 let other_hints = task_other_hints.load(atomic::Ordering::Acquire);
                 let character = if other_hints { 0 } else { 2 };
@@ -2332,7 +2332,7 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
             async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                 );
                 requests_made.fetch_add(1, atomic::Ordering::Release);
                 Ok(vec![lsp::ColorInformation {
@@ -2621,11 +2621,11 @@ async fn test_lsp_pull_diagnostics(
             let requests_made = closure_diagnostics_pulls_made.clone();
             let diagnostics_pulls_result_ids = closure_diagnostics_pulls_result_ids.clone();
             async move {
-                let message = if lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+                let message = if lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
                     == params.text_document.uri
                 {
                     expected_pull_diagnostic_main_message.to_string()
-                } else if lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap()
+                } else if lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap()
                     == params.text_document.uri
                 {
                     expected_pull_diagnostic_lib_message.to_string()
@@ -2717,7 +2717,7 @@ async fn test_lsp_pull_diagnostics(
                         items: vec![
                             lsp::WorkspaceDocumentDiagnosticReport::Full(
                                 lsp::WorkspaceFullDocumentDiagnosticReport {
-                                    uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                    uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                     version: None,
                                     full_document_diagnostic_report:
                                         lsp::FullDocumentDiagnosticReport {
@@ -2746,7 +2746,7 @@ async fn test_lsp_pull_diagnostics(
                             ),
                             lsp::WorkspaceDocumentDiagnosticReport::Full(
                                 lsp::WorkspaceFullDocumentDiagnosticReport {
-                                    uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+                                    uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
                                     version: None,
                                     full_document_diagnostic_report:
                                         lsp::FullDocumentDiagnosticReport {
@@ -2821,7 +2821,7 @@ async fn test_lsp_pull_diagnostics(
 
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             diagnostics: vec![lsp::Diagnostic {
                 range: lsp::Range {
                     start: lsp::Position {
@@ -2842,7 +2842,7 @@ async fn test_lsp_pull_diagnostics(
     );
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
             diagnostics: vec![lsp::Diagnostic {
                 range: lsp::Range {
                     start: lsp::Position {
@@ -2870,7 +2870,7 @@ async fn test_lsp_pull_diagnostics(
                     items: vec![
                         lsp::WorkspaceDocumentDiagnosticReport::Full(
                             lsp::WorkspaceFullDocumentDiagnosticReport {
-                                uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                 version: None,
                                 full_document_diagnostic_report:
                                     lsp::FullDocumentDiagnosticReport {
@@ -2902,7 +2902,7 @@ async fn test_lsp_pull_diagnostics(
                         ),
                         lsp::WorkspaceDocumentDiagnosticReport::Full(
                             lsp::WorkspaceFullDocumentDiagnosticReport {
-                                uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+                                uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
                                 version: None,
                                 full_document_diagnostic_report:
                                     lsp::FullDocumentDiagnosticReport {
@@ -3051,7 +3051,7 @@ async fn test_lsp_pull_diagnostics(
                 lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
                     items: vec![lsp::WorkspaceDocumentDiagnosticReport::Full(
                         lsp::WorkspaceFullDocumentDiagnosticReport {
-                            uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
+                            uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
                             version: None,
                             full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
                                 result_id: Some(format!(
@@ -4040,7 +4040,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
         |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(params.position, lsp::Position::new(0, 0));
             Ok(Some(ExpandedMacro {
@@ -4075,7 +4075,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
         |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.position,

crates/collab/src/tests/integration_tests.rs 🔗

@@ -4070,7 +4070,7 @@ async fn test_collaborating_with_diagnostics(
         .await;
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
             version: None,
             diagnostics: vec![lsp::Diagnostic {
                 severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4090,7 +4090,7 @@ async fn test_collaborating_with_diagnostics(
         .unwrap();
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
             version: None,
             diagnostics: vec![lsp::Diagnostic {
                 severity: Some(lsp::DiagnosticSeverity::ERROR),
@@ -4164,7 +4164,7 @@ async fn test_collaborating_with_diagnostics(
     // Simulate a language server reporting more errors for a file.
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
             version: None,
             diagnostics: vec![
                 lsp::Diagnostic {
@@ -4260,7 +4260,7 @@ async fn test_collaborating_with_diagnostics(
     // Simulate a language server reporting no errors for a file.
     fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
         &lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
             version: None,
             diagnostics: Vec::new(),
         },
@@ -4367,7 +4367,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
     for file_name in file_names {
         fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
             &lsp::PublishDiagnosticsParams {
-                uri: lsp::Url::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
+                uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
                 version: None,
                 diagnostics: vec![lsp::Diagnostic {
                     severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -4833,7 +4833,7 @@ async fn test_definition(
         |_, _| async move {
             Ok(Some(lsp::GotoDefinitionResponse::Scalar(
                 lsp::Location::new(
-                    lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
                     lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
                 ),
             )))
@@ -4871,7 +4871,7 @@ async fn test_definition(
         |_, _| async move {
             Ok(Some(lsp::GotoDefinitionResponse::Scalar(
                 lsp::Location::new(
-                    lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
                     lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
                 ),
             )))
@@ -4909,7 +4909,7 @@ async fn test_definition(
             );
             Ok(Some(lsp::GotoDefinitionResponse::Scalar(
                 lsp::Location::new(
-                    lsp::Url::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
                     lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)),
                 ),
             )))
@@ -5044,15 +5044,15 @@ async fn test_references(
     lsp_response_tx
         .unbounded_send(Ok(Some(vec![
             lsp::Location {
-                uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
                 range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
             },
             lsp::Location {
-                uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
                 range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)),
             },
             lsp::Location {
-                uri: lsp::Url::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
                 range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
             },
         ])))
@@ -5620,7 +5620,7 @@ async fn test_project_symbols(
                 lsp::SymbolInformation {
                     name: "TWO".into(),
                     location: lsp::Location {
-                        uri: lsp::Url::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
                         range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
                     },
                     kind: lsp::SymbolKind::CONSTANT,
@@ -5732,7 +5732,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
         |_, _| async move {
             Ok(Some(lsp::GotoDefinitionResponse::Scalar(
                 lsp::Location::new(
-                    lsp::Url::from_file_path(path!("/root/b.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/root/b.rs")).unwrap(),
                     lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
                 ),
             )))

crates/collab/src/tests/random_project_collaboration_tests.rs 🔗

@@ -1101,7 +1101,7 @@ impl RandomizedTest for ProjectCollaborationTest {
                                         files
                                             .into_iter()
                                             .map(|file| lsp::Location {
-                                                uri: lsp::Url::from_file_path(file).unwrap(),
+                                                uri: lsp::Uri::from_file_path(file).unwrap(),
                                                 range: Default::default(),
                                             })
                                             .collect(),

crates/collab_ui/src/panel_settings.rs 🔗

@@ -1,10 +1,10 @@
 use gpui::Pixels;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use workspace::dock::DockPosition;
 
-#[derive(Deserialize, Debug)]
+#[derive(Deserialize, Debug, SettingsUi)]
 pub struct CollaborationPanelSettings {
     pub button: bool,
     pub dock: DockPosition,
@@ -20,7 +20,7 @@ pub enum ChatPanelButton {
     WhenInCall,
 }
 
-#[derive(Deserialize, Debug)]
+#[derive(Deserialize, Debug, SettingsUi)]
 pub struct ChatPanelSettings {
     pub button: ChatPanelButton,
     pub dock: DockPosition,
@@ -43,7 +43,7 @@ pub struct ChatPanelSettingsContent {
     pub default_width: Option<f32>,
 }
 
-#[derive(Deserialize, Debug)]
+#[derive(Deserialize, Debug, SettingsUi)]
 pub struct NotificationPanelSettings {
     pub button: bool,
     pub dock: DockPosition,
@@ -66,7 +66,7 @@ pub struct PanelSettingsContent {
     pub default_width: Option<f32>,
 }
 
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)]
 pub struct MessageEditorSettings {
     /// Whether to automatically replace emoji shortcodes with emoji characters.
     /// For example: typing `:wave:` gets replaced with `👋`.

crates/copilot/src/copilot.rs 🔗

@@ -197,7 +197,7 @@ impl Status {
 }
 
 struct RegisteredBuffer {
-    uri: lsp::Url,
+    uri: lsp::Uri,
     language_id: String,
     snapshot: BufferSnapshot,
     snapshot_version: i32,
@@ -1108,9 +1108,9 @@ fn id_for_language(language: Option<&Arc<Language>>) -> String {
         .unwrap_or_else(|| "plaintext".to_string())
 }
 
-fn uri_for_buffer(buffer: &Entity<Buffer>, cx: &App) -> Result<lsp::Url, ()> {
+fn uri_for_buffer(buffer: &Entity<Buffer>, cx: &App) -> Result<lsp::Uri, ()> {
     if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
-        lsp::Url::from_file_path(file.abs_path(cx))
+        lsp::Uri::from_file_path(file.abs_path(cx))
     } else {
         format!("buffer://{}", buffer.entity_id())
             .parse()
@@ -1201,7 +1201,7 @@ mod tests {
         let (copilot, mut lsp) = Copilot::fake(cx);
 
         let buffer_1 = cx.new(|cx| Buffer::local("Hello", cx));
-        let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
+        let buffer_1_uri: lsp::Uri = format!("buffer://{}", buffer_1.entity_id().as_u64())
             .parse()
             .unwrap();
         copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
@@ -1219,7 +1219,7 @@ mod tests {
         );
 
         let buffer_2 = cx.new(|cx| Buffer::local("Goodbye", cx));
-        let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
+        let buffer_2_uri: lsp::Uri = format!("buffer://{}", buffer_2.entity_id().as_u64())
             .parse()
             .unwrap();
         copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
@@ -1270,7 +1270,7 @@ mod tests {
                 text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
             }
         );
-        let buffer_1_uri = lsp::Url::from_file_path(path!("/root/child/buffer-1")).unwrap();
+        let buffer_1_uri = lsp::Uri::from_file_path(path!("/root/child/buffer-1")).unwrap();
         assert_eq!(
             lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
                 .await,

crates/copilot/src/copilot_chat.rs 🔗

@@ -164,6 +164,8 @@ pub enum ModelVendor {
     OpenAI,
     Google,
     Anthropic,
+    #[serde(rename = "xAI")]
+    XAI,
 }
 
 #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]

crates/copilot/src/request.rs 🔗

@@ -102,7 +102,7 @@ pub struct GetCompletionsDocument {
     pub tab_size: u32,
     pub indent_size: u32,
     pub insert_spaces: bool,
-    pub uri: lsp::Url,
+    pub uri: lsp::Uri,
     pub relative_path: String,
     pub position: lsp::Position,
     pub version: usize,

crates/dap/src/debugger_settings.rs 🔗

@@ -2,9 +2,9 @@ use dap_types::SteppingGranularity;
 use gpui::{App, Global};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, SettingsUi)]
 #[serde(rename_all = "snake_case")]
 pub enum DebugPanelDockPosition {
     Left,
@@ -12,12 +12,14 @@ pub enum DebugPanelDockPosition {
     Right,
 }
 
-#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy)]
+#[derive(Serialize, Deserialize, JsonSchema, Clone, Copy, SettingsUi)]
 #[serde(default)]
+#[settings_ui(group = "Debugger", path = "debugger")]
 pub struct DebuggerSettings {
     /// Determines the stepping granularity.
     ///
     /// Default: line
+    #[settings_ui(skip)]
     pub stepping_granularity: SteppingGranularity,
     /// Whether the breakpoints should be reused across Zed sessions.
     ///

crates/deepseek/src/deepseek.rs 🔗

@@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
 use serde_json::Value;
 use std::convert::TryFrom;
 
-pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com";
+pub const DEEPSEEK_API_URL: &str = "https://api.deepseek.com/v1";
 
 #[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 #[serde(rename_all = "lowercase")]
@@ -263,7 +263,7 @@ pub async fn stream_completion(
     api_key: &str,
     request: Request,
 ) -> Result<BoxStream<'static, Result<StreamResponse>>> {
-    let uri = format!("{api_url}/v1/chat/completions");
+    let uri = format!("{api_url}/chat/completions");
     let request_builder = HttpRequest::builder()
         .method(Method::POST)
         .uri(uri)

crates/diagnostics/src/diagnostics.rs 🔗

@@ -10,8 +10,9 @@ use anyhow::Result;
 use collections::{BTreeSet, HashMap};
 use diagnostic_renderer::DiagnosticBlock;
 use editor::{
-    DEFAULT_MULTIBUFFER_CONTEXT, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
+    Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
     display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
+    multibuffer_context_lines,
 };
 use gpui::{
     AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable,
@@ -493,10 +494,11 @@ impl ProjectDiagnosticsEditor {
             }
 
             let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
+            let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
             for b in blocks.iter() {
                 let excerpt_range = context_range_for_entry(
                     b.initial_range.clone(),
-                    DEFAULT_MULTIBUFFER_CONTEXT,
+                    context_lines,
                     buffer_snapshot.clone(),
                     cx,
                 )

crates/diagnostics/src/diagnostics_tests.rs 🔗

@@ -24,6 +24,7 @@ use settings::SettingsStore;
 use std::{
     env,
     path::{Path, PathBuf},
+    str::FromStr,
 };
 use unindent::Unindent as _;
 use util::{RandomCharIter, path, post_inc};
@@ -70,7 +71,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
     let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
     let cx = &mut VisualTestContext::from_window(*window, cx);
     let workspace = window.root(cx).unwrap();
-    let uri = lsp::Url::from_file_path(path!("/test/main.rs")).unwrap();
+    let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
 
     // Create some diagnostics
     lsp_store.update(cx, |lsp_store, cx| {
@@ -167,7 +168,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
             .update_diagnostics(
                 language_server_id,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(
                             lsp::Position::new(0, 15),
@@ -243,7 +244,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
             .update_diagnostics(
                 language_server_id,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/consts.rs")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/consts.rs")).unwrap(),
                     diagnostics: vec![
                         lsp::Diagnostic {
                             range: lsp::Range::new(
@@ -356,14 +357,14 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_1,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(4, 0), lsp::Position::new(4, 4)),
                         severity: Some(lsp::DiagnosticSeverity::WARNING),
                         message: "no method `tset`".to_string(),
                         related_information: Some(vec![lsp::DiagnosticRelatedInformation {
                             location: lsp::Location::new(
-                                lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                                lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                                 lsp::Range::new(
                                     lsp::Position::new(0, 9),
                                     lsp::Position::new(0, 13),
@@ -465,7 +466,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_1,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
                         severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -509,7 +510,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_2,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(1, 1)),
                         severity: Some(lsp::DiagnosticSeverity::ERROR),
@@ -552,7 +553,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_1,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(2, 0), lsp::Position::new(2, 1)),
                         severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -571,7 +572,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_2,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.rs")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap(),
                     diagnostics: vec![],
                     version: None,
                 },
@@ -608,7 +609,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
             .update_diagnostics(
                 server_id_2,
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/test/main.js")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/test/main.js")).unwrap(),
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(3, 0), lsp::Position::new(3, 1)),
                         severity: Some(lsp::DiagnosticSeverity::WARNING),
@@ -745,8 +746,8 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
                         .update_diagnostics(
                             server_id,
                             lsp::PublishDiagnosticsParams {
-                                uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| {
-                                    lsp::Url::parse("file:///test/fallback.rs").unwrap()
+                                uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| {
+                                    lsp::Uri::from_str("file:///test/fallback.rs").unwrap()
                                 }),
                                 diagnostics: diagnostics.clone(),
                                 version: None,
@@ -934,8 +935,8 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
                         .update_diagnostics(
                             server_id,
                             lsp::PublishDiagnosticsParams {
-                                uri: lsp::Url::from_file_path(&path).unwrap_or_else(|_| {
-                                    lsp::Url::parse("file:///test/fallback.rs").unwrap()
+                                uri: lsp::Uri::from_file_path(&path).unwrap_or_else(|_| {
+                                    lsp::Uri::from_str("file:///test/fallback.rs").unwrap()
                                 }),
                                 diagnostics: diagnostics.clone(),
                                 version: None,
@@ -985,7 +986,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
                 .update_diagnostics(
                     LanguageServerId(0),
                     lsp::PublishDiagnosticsParams {
-                        uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                         version: None,
                         diagnostics: vec![lsp::Diagnostic {
                             range: lsp::Range::new(
@@ -1028,7 +1029,7 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
                 .update_diagnostics(
                     LanguageServerId(0),
                     lsp::PublishDiagnosticsParams {
-                        uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                         version: None,
                         diagnostics: Vec::new(),
                     },
@@ -1078,7 +1079,7 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) {
                 .update_diagnostics(
                     LanguageServerId(0),
                     lsp::PublishDiagnosticsParams {
-                        uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                         version: None,
                         diagnostics: vec![
                             lsp::Diagnostic {
@@ -1246,7 +1247,7 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
             lsp_store.update_diagnostics(
                 LanguageServerId(0),
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)),
@@ -1299,7 +1300,7 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext)
             lsp_store.update_diagnostics(
                 LanguageServerId(0),
                 lsp::PublishDiagnosticsParams {
-                    uri: lsp::Url::from_file_path(path!("/root/dir/file.rs")).unwrap(),
+                    uri: lsp::Uri::from_file_path(path!("/root/dir/file.rs")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range,
@@ -1376,7 +1377,7 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) {
     let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
     let cx = &mut VisualTestContext::from_window(*window, cx);
     let workspace = window.root(cx).unwrap();
-    let uri = lsp::Url::from_file_path(path!("/root/main.js")).unwrap();
+    let uri = lsp::Uri::from_file_path(path!("/root/main.js")).unwrap();
 
     // Create diagnostics with code fields
     lsp_store.update(cx, |lsp_store, cx| {
@@ -1460,7 +1461,7 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) {
                 .update_diagnostics(
                     LanguageServerId(0),
                     lsp::PublishDiagnosticsParams {
-                        uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                         version: None,
                         diagnostics: vec![
                             lsp::Diagnostic {
@@ -1673,7 +1674,7 @@ fn random_lsp_diagnostic(
             );
 
             related_info.push(lsp::DiagnosticRelatedInformation {
-                location: lsp::Location::new(lsp::Url::from_file_path(path).unwrap(), info_range),
+                location: lsp::Location::new(lsp::Uri::from_file_path(path).unwrap(), info_range),
                 message: format!("related info {i} for diagnostic {unique_id}"),
             });
         }

crates/editor/src/code_context_menus.rs 🔗

@@ -1,7 +1,9 @@
+use crate::scroll::ScrollAmount;
 use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{
-    AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollStrategy, SharedString,
-    Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px, uniform_list,
+    AnyElement, Entity, Focusable, FontWeight, ListSizingBehavior, ScrollHandle, ScrollStrategy,
+    SharedString, Size, StrikethroughStyle, StyledText, Task, UniformListScrollHandle, div, px,
+    uniform_list,
 };
 use itertools::Itertools;
 use language::CodeLabel;
@@ -184,6 +186,20 @@ impl CodeContextMenu {
             CodeContextMenu::CodeActions(_) => false,
         }
     }
+
+    pub fn scroll_aside(
+        &mut self,
+        scroll_amount: ScrollAmount,
+        window: &mut Window,
+        cx: &mut Context<Editor>,
+    ) {
+        match self {
+            CodeContextMenu::Completions(completions_menu) => {
+                completions_menu.scroll_aside(scroll_amount, window, cx)
+            }
+            CodeContextMenu::CodeActions(_) => (),
+        }
+    }
 }
 
 pub enum ContextMenuOrigin {
@@ -207,6 +223,9 @@ pub struct CompletionsMenu {
     filter_task: Task<()>,
     cancel_filter: Arc<AtomicBool>,
     scroll_handle: UniformListScrollHandle,
+    // The `ScrollHandle` used on the Markdown documentation rendered on the
+    // side of the completions menu.
+    pub scroll_handle_aside: ScrollHandle,
     resolve_completions: bool,
     show_completion_documentation: bool,
     last_rendered_range: Rc<RefCell<Option<Range<usize>>>>,
@@ -279,6 +298,7 @@ impl CompletionsMenu {
             filter_task: Task::ready(()),
             cancel_filter: Arc::new(AtomicBool::new(false)),
             scroll_handle: UniformListScrollHandle::new(),
+            scroll_handle_aside: ScrollHandle::new(),
             resolve_completions: true,
             last_rendered_range: RefCell::new(None).into(),
             markdown_cache: RefCell::new(VecDeque::new()).into(),
@@ -348,6 +368,7 @@ impl CompletionsMenu {
             filter_task: Task::ready(()),
             cancel_filter: Arc::new(AtomicBool::new(false)),
             scroll_handle: UniformListScrollHandle::new(),
+            scroll_handle_aside: ScrollHandle::new(),
             resolve_completions: false,
             show_completion_documentation: false,
             last_rendered_range: RefCell::new(None).into(),
@@ -911,6 +932,7 @@ impl CompletionsMenu {
                         .max_w(max_size.width)
                         .max_h(max_size.height)
                         .overflow_y_scroll()
+                        .track_scroll(&self.scroll_handle_aside)
                         .occlude(),
                 )
                 .into_any_element(),
@@ -1175,6 +1197,23 @@ impl CompletionsMenu {
                 }
             });
     }
+
+    pub fn scroll_aside(
+        &mut self,
+        amount: ScrollAmount,
+        window: &mut Window,
+        cx: &mut Context<Editor>,
+    ) {
+        let mut offset = self.scroll_handle_aside.offset();
+
+        offset.y -= amount.pixels(
+            window.line_height(),
+            self.scroll_handle_aside.bounds().size.height - px(16.),
+        ) / 2.0;
+
+        cx.notify();
+        self.scroll_handle_aside.set_offset(offset);
+    }
 }
 
 #[derive(Clone)]

crates/editor/src/editor.rs 🔗

@@ -219,7 +219,6 @@ use crate::{
 
 pub const FILE_HEADER_HEIGHT: u32 = 2;
 pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1;
-pub const DEFAULT_MULTIBUFFER_CONTEXT: u32 = 2;
 const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
 const MAX_LINE_LEN: usize = 1024;
 const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10;
@@ -6402,7 +6401,7 @@ impl Editor {
                     PathKey::for_buffer(buffer_handle, cx),
                     buffer_handle.clone(),
                     edited_ranges,
-                    DEFAULT_MULTIBUFFER_CONTEXT,
+                    multibuffer_context_lines(cx),
                     cx,
                 );
 
@@ -16237,7 +16236,7 @@ impl Editor {
                     PathKey::for_buffer(&location.buffer, cx),
                     location.buffer.clone(),
                     ranges_for_buffer,
-                    DEFAULT_MULTIBUFFER_CONTEXT,
+                    multibuffer_context_lines(cx),
                     cx,
                 );
                 ranges.extend(new_ranges)
@@ -24078,3 +24077,10 @@ fn render_diff_hunk_controls(
         )
         .into_any_element()
 }
+
+pub fn multibuffer_context_lines(cx: &App) -> u32 {
+    EditorSettings::try_get(cx)
+        .map(|settings| settings.excerpt_context_lines)
+        .unwrap_or(2)
+        .clamp(1, 32)
+}

crates/editor/src/editor_settings.rs 🔗

@@ -6,17 +6,18 @@ use language::CursorShape;
 use project::project_settings::DiagnosticSeverity;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources, VsCodeSettings};
+use settings::{Settings, SettingsSources, SettingsUi, VsCodeSettings};
 use util::serde::default_true;
 
 /// Imports from the VSCode settings at
 /// https://code.visualstudio.com/docs/reference/default-settings
-#[derive(Deserialize, Clone)]
+#[derive(Deserialize, Clone, SettingsUi)]
 pub struct EditorSettings {
     pub cursor_blink: bool,
     pub cursor_shape: Option<CursorShape>,
     pub current_line_highlight: CurrentLineHighlight,
     pub selection_highlight: bool,
+    pub rounded_selection: bool,
     pub lsp_highlight_debounce: u64,
     pub hover_popover_enabled: bool,
     pub hover_popover_delay: u64,
@@ -37,6 +38,7 @@ pub struct EditorSettings {
     pub multi_cursor_modifier: MultiCursorModifier,
     pub redact_private_values: bool,
     pub expand_excerpt_lines: u32,
+    pub excerpt_context_lines: u32,
     pub middle_click_paste: bool,
     #[serde(default)]
     pub double_click_in_multibuffer: DoubleClickInMultibuffer,
@@ -55,6 +57,7 @@ pub struct EditorSettings {
     pub inline_code_actions: bool,
     pub drag_and_drop_selection: DragAndDropSelection,
     pub lsp_document_colors: DocumentColorsRenderMode,
+    pub minimum_contrast_for_highlights: f32,
 }
 
 /// How to render LSP `textDocument/documentColor` colors in the editor.
@@ -439,6 +442,10 @@ pub struct EditorSettingsContent {
     ///
     /// Default: true
     pub selection_highlight: Option<bool>,
+    /// Whether the text selection should have rounded corners.
+    ///
+    /// Default: true
+    pub rounded_selection: Option<bool>,
     /// The debounce delay before querying highlights from the language
     /// server based on the current cursor location.
     ///
@@ -515,6 +522,11 @@ pub struct EditorSettingsContent {
     /// Default: 3
     pub expand_excerpt_lines: Option<u32>,
 
+    /// How many lines of context to provide in multibuffer excerpts by default
+    ///
+    /// Default: 2
+    pub excerpt_context_lines: Option<u32>,
+
     /// Whether to enable middle-click paste on Linux
     ///
     /// Default: true
@@ -544,6 +556,12 @@ pub struct EditorSettingsContent {
     ///
     /// Default: false
     pub show_signature_help_after_edits: Option<bool>,
+    /// The minimum APCA perceptual contrast to maintain when
+    /// rendering text over highlight backgrounds in the editor.
+    ///
+    /// Values range from 0 to 106. Set to 0 to disable adjustments.
+    /// Default: 45
+    pub minimum_contrast_for_highlights: Option<f32>,
 
     /// Whether to follow-up empty go to definition responses from the language server or not.
     /// `FindAllReferences` allows to look up references of the same symbol instead.
@@ -781,6 +799,7 @@ impl Settings for EditorSettings {
             "editor.selectionHighlight",
             &mut current.selection_highlight,
         );
+        vscode.bool_setting("editor.roundedSelection", &mut current.rounded_selection);
         vscode.bool_setting("editor.hover.enabled", &mut current.hover_popover_enabled);
         vscode.u64_setting("editor.hover.delay", &mut current.hover_popover_delay);
 

crates/editor/src/editor_tests.rs 🔗

@@ -9909,7 +9909,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
             move |params, _| async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                    lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
                 );
                 assert_eq!(params.options.tab_size, 4);
                 Ok(Some(vec![lsp::TextEdit::new(
@@ -9952,7 +9952,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
             move |params, _| async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                    lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
                 );
                 futures::future::pending::<()>().await;
                 unreachable!()
@@ -10000,7 +10000,7 @@ async fn test_document_format_during_save(cx: &mut TestAppContext) {
             .set_request_handler::<lsp::request::Formatting, _, _>(move |params, _| async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                    lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
                 );
                 assert_eq!(params.options.tab_size, 8);
                 Ok(Some(vec![]))
@@ -10548,7 +10548,7 @@ async fn test_range_format_on_save_success(cx: &mut TestAppContext) {
         .set_request_handler::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
             );
             assert_eq!(params.options.tab_size, 4);
             Ok(Some(vec![lsp::TextEdit::new(
@@ -10581,7 +10581,7 @@ async fn test_range_format_on_save_timeout(cx: &mut TestAppContext) {
         move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
             );
             futures::future::pending::<()>().await;
             unreachable!()
@@ -10674,7 +10674,7 @@ async fn test_range_format_respects_language_tab_size_override(cx: &mut TestAppC
         .set_request_handler::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
             );
             assert_eq!(params.options.tab_size, 8);
             Ok(Some(Vec::new()))
@@ -10761,7 +10761,7 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
         .set_request_handler::<lsp::request::Formatting, _, _>(move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
             );
             assert_eq!(params.options.tab_size, 4);
             Ok(Some(vec![lsp::TextEdit::new(
@@ -10786,7 +10786,7 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
         move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.rs")).unwrap()
             );
             futures::future::pending::<()>().await;
             unreachable!()
@@ -10882,7 +10882,7 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
                 params.context.only,
                 Some(vec!["code-action-1".into(), "code-action-2".into()])
             );
-            let uri = lsp::Url::from_file_path(path!("/file.rs")).unwrap();
+            let uri = lsp::Uri::from_file_path(path!("/file.rs")).unwrap();
             Ok(Some(vec![
                 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
                     kind: Some("code-action-1".into()),
@@ -10942,7 +10942,7 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
                         edit: lsp::WorkspaceEdit {
                             changes: Some(
                                 [(
-                                    lsp::Url::from_file_path(path!("/file.rs")).unwrap(),
+                                    lsp::Uri::from_file_path(path!("/file.rs")).unwrap(),
                                     vec![lsp::TextEdit {
                                         range: lsp::Range::new(
                                             lsp::Position::new(0, 0),
@@ -11153,7 +11153,7 @@ async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
         .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.ts")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.ts")).unwrap()
             );
             Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
                 lsp::CodeAction {
@@ -11201,7 +11201,7 @@ async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
         move |params, _| async move {
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/file.ts")).unwrap()
+                lsp::Uri::from_file_path(path!("/file.ts")).unwrap()
             );
             futures::future::pending::<()>().await;
             unreachable!()
@@ -15478,7 +15478,7 @@ async fn go_to_prev_overlapping_diagnostic(executor: BackgroundExecutor, cx: &mu
                 .update_diagnostics(
                     LanguageServerId(0),
                     lsp::PublishDiagnosticsParams {
-                        uri: lsp::Url::from_file_path(path!("/root/file")).unwrap(),
+                        uri: lsp::Uri::from_file_path(path!("/root/file")).unwrap(),
                         version: None,
                         diagnostics: vec![
                             lsp::Diagnostic {
@@ -15874,7 +15874,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut TestAppContext) {
         |params, _| async move {
             assert_eq!(
                 params.text_document_position.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
             );
             assert_eq!(
                 params.text_document_position.position,
@@ -16399,7 +16399,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) {
                     edit: Some(lsp::WorkspaceEdit {
                         changes: Some(
                             [(
-                                lsp::Url::from_file_path(path!("/file.rs")).unwrap(),
+                                lsp::Uri::from_file_path(path!("/file.rs")).unwrap(),
                                 vec![lsp::TextEdit {
                                     range: lsp::Range::new(
                                         lsp::Position::new(5, 4),
@@ -19867,7 +19867,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
                 PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()),
                 buffer.clone(),
                 vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
-                DEFAULT_MULTIBUFFER_CONTEXT,
+                2,
                 cx,
             );
         }
@@ -22067,7 +22067,7 @@ async fn test_apply_code_lens_actions_with_commands(cx: &mut gpui::TestAppContex
                                 edit: lsp::WorkspaceEdit {
                                     changes: Some(
                                         [(
-                                            lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
+                                            lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
                                             vec![lsp::TextEdit {
                                                 range: lsp::Range::new(
                                                     lsp::Position::new(0, 0),
@@ -24039,7 +24039,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
             let result_id = Some(new_result_id.to_string());
             assert_eq!(
                 params.text_document.uri,
-                lsp::Url::from_file_path(path!("/a/first.rs")).unwrap()
+                lsp::Uri::from_file_path(path!("/a/first.rs")).unwrap()
             );
             async move {
                 Ok(lsp::DocumentDiagnosticReportResult::Report(
@@ -24254,7 +24254,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
             async move {
                 assert_eq!(
                     params.text_document.uri,
-                    lsp::Url::from_file_path(path!("/a/first.rs")).unwrap()
+                    lsp::Uri::from_file_path(path!("/a/first.rs")).unwrap()
                 );
                 requests_made.fetch_add(1, atomic::Ordering::Release);
                 Ok(vec![

crates/editor/src/element.rs 🔗

@@ -82,6 +82,7 @@ use std::{
 use sum_tree::Bias;
 use text::{BufferId, SelectionGoal};
 use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor};
+use ui::utils::ensure_minimum_contrast;
 use ui::{
     ButtonLike, ContextMenu, Indicator, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*,
     right_click_menu,
@@ -3260,12 +3261,161 @@ impl EditorElement {
             .collect()
     }
 
+    fn bg_segments_per_row(
+        rows: Range<DisplayRow>,
+        selections: &[(PlayerColor, Vec<SelectionLayout>)],
+        highlight_ranges: &[(Range<DisplayPoint>, Hsla)],
+        base_background: Hsla,
+    ) -> Vec<Vec<(Range<DisplayPoint>, Hsla)>> {
+        if rows.start >= rows.end {
+            return Vec::new();
+        }
+        let highlight_iter = highlight_ranges.iter().cloned();
+        let selection_iter = selections.iter().flat_map(|(player_color, layouts)| {
+            let color = player_color.selection;
+            layouts.iter().filter_map(move |selection_layout| {
+                if selection_layout.range.start != selection_layout.range.end {
+                    Some((selection_layout.range.clone(), color))
+                } else {
+                    None
+                }
+            })
+        });
+        let mut per_row_map = vec![Vec::new(); rows.len()];
+        for (range, color) in highlight_iter.chain(selection_iter) {
+            let covered_rows = if range.end.column() == 0 {
+                cmp::max(range.start.row(), rows.start)..cmp::min(range.end.row(), rows.end)
+            } else {
+                cmp::max(range.start.row(), rows.start)
+                    ..cmp::min(range.end.row().next_row(), rows.end)
+            };
+            for row in covered_rows.iter_rows() {
+                let seg_start = if row == range.start.row() {
+                    range.start
+                } else {
+                    DisplayPoint::new(row, 0)
+                };
+                let seg_end = if row == range.end.row() && range.end.column() != 0 {
+                    range.end
+                } else {
+                    DisplayPoint::new(row, u32::MAX)
+                };
+                let ix = row.minus(rows.start) as usize;
+                debug_assert!(row >= rows.start && row < rows.end);
+                debug_assert!(ix < per_row_map.len());
+                per_row_map[ix].push((seg_start..seg_end, color));
+            }
+        }
+        for row_segments in per_row_map.iter_mut() {
+            if row_segments.is_empty() {
+                continue;
+            }
+            let segments = mem::take(row_segments);
+            let merged = Self::merge_overlapping_ranges(segments, base_background);
+            *row_segments = merged;
+        }
+        per_row_map
+    }
+
+    /// Merge overlapping ranges by splitting at all range boundaries and blending colors where
+    /// multiple ranges overlap. The result contains non-overlapping ranges ordered from left to right.
+    ///
+    /// Expects `start.row() == end.row()` for each range.
+    fn merge_overlapping_ranges(
+        ranges: Vec<(Range<DisplayPoint>, Hsla)>,
+        base_background: Hsla,
+    ) -> Vec<(Range<DisplayPoint>, Hsla)> {
+        struct Boundary {
+            pos: DisplayPoint,
+            is_start: bool,
+            index: usize,
+            color: Hsla,
+        }
+
+        let mut boundaries: SmallVec<[Boundary; 16]> = SmallVec::with_capacity(ranges.len() * 2);
+        for (index, (range, color)) in ranges.iter().enumerate() {
+            debug_assert!(
+                range.start.row() == range.end.row(),
+                "expects single-row ranges"
+            );
+            if range.start < range.end {
+                boundaries.push(Boundary {
+                    pos: range.start,
+                    is_start: true,
+                    index,
+                    color: *color,
+                });
+                boundaries.push(Boundary {
+                    pos: range.end,
+                    is_start: false,
+                    index,
+                    color: *color,
+                });
+            }
+        }
+
+        if boundaries.is_empty() {
+            return Vec::new();
+        }
+
+        boundaries
+            .sort_unstable_by(|a, b| a.pos.cmp(&b.pos).then_with(|| a.is_start.cmp(&b.is_start)));
+
+        let mut processed_ranges: Vec<(Range<DisplayPoint>, Hsla)> = Vec::new();
+        let mut active_ranges: SmallVec<[(usize, Hsla); 8]> = SmallVec::new();
+
+        let mut i = 0;
+        let mut start_pos = boundaries[0].pos;
+
+        let boundaries_len = boundaries.len();
+        while i < boundaries_len {
+            let current_boundary_pos = boundaries[i].pos;
+            if start_pos < current_boundary_pos {
+                if !active_ranges.is_empty() {
+                    let mut color = base_background;
+                    for &(_, c) in &active_ranges {
+                        color = Hsla::blend(color, c);
+                    }
+                    if let Some((last_range, last_color)) = processed_ranges.last_mut() {
+                        if *last_color == color && last_range.end == start_pos {
+                            last_range.end = current_boundary_pos;
+                        } else {
+                            processed_ranges.push((start_pos..current_boundary_pos, color));
+                        }
+                    } else {
+                        processed_ranges.push((start_pos..current_boundary_pos, color));
+                    }
+                }
+            }
+            while i < boundaries_len && boundaries[i].pos == current_boundary_pos {
+                let active_range = &boundaries[i];
+                if active_range.is_start {
+                    let idx = active_range.index;
+                    let pos = active_ranges
+                        .binary_search_by_key(&idx, |(i, _)| *i)
+                        .unwrap_or_else(|p| p);
+                    active_ranges.insert(pos, (idx, active_range.color));
+                } else {
+                    let idx = active_range.index;
+                    if let Ok(pos) = active_ranges.binary_search_by_key(&idx, |(i, _)| *i) {
+                        active_ranges.remove(pos);
+                    }
+                }
+                i += 1;
+            }
+            start_pos = current_boundary_pos;
+        }
+
+        processed_ranges
+    }
+
     fn layout_lines(
         rows: Range<DisplayRow>,
         snapshot: &EditorSnapshot,
         style: &EditorStyle,
         editor_width: Pixels,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+        bg_segments_per_row: &[Vec<(Range<DisplayPoint>, Hsla)>],
         window: &mut Window,
         cx: &mut App,
     ) -> Vec<LineWithInvisibles> {
@@ -3321,6 +3471,7 @@ impl EditorElement {
                 &snapshot.mode,
                 editor_width,
                 is_row_soft_wrapped,
+                bg_segments_per_row,
                 window,
                 cx,
             )
@@ -5912,7 +6063,7 @@ impl EditorElement {
                 };
 
                 self.paint_lines_background(layout, window, cx);
-                let invisible_display_ranges = self.paint_highlights(layout, window);
+                let invisible_display_ranges = self.paint_highlights(layout, window, cx);
                 self.paint_document_colors(layout, window);
                 self.paint_lines(&invisible_display_ranges, layout, window, cx);
                 self.paint_redactions(layout, window);
@@ -5934,6 +6085,7 @@ impl EditorElement {
         &mut self,
         layout: &mut EditorLayout,
         window: &mut Window,
+        cx: &mut App,
     ) -> SmallVec<[Range<DisplayPoint>; 32]> {
         window.paint_layer(layout.position_map.text_hitbox.bounds, |window| {
             let mut invisible_display_ranges = SmallVec::<[Range<DisplayPoint>; 32]>::new();
@@ -5950,7 +6102,11 @@ impl EditorElement {
                 );
             }
 
-            let corner_radius = 0.15 * layout.position_map.line_height;
+            let corner_radius = if EditorSettings::get_global(cx).rounded_selection {
+                0.15 * layout.position_map.line_height
+            } else {
+                Pixels::ZERO
+            };
 
             for (player_color, selections) in &layout.selections {
                 for selection in selections.iter() {
@@ -7340,6 +7496,7 @@ impl LineWithInvisibles {
         editor_mode: &EditorMode,
         text_width: Pixels,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+        bg_segments_per_row: &[Vec<(Range<DisplayPoint>, Hsla)>],
         window: &mut Window,
         cx: &mut App,
     ) -> Vec<Self> {
@@ -7355,6 +7512,7 @@ impl LineWithInvisibles {
         let mut row = 0;
         let mut line_exceeded_max_len = false;
         let font_size = text_style.font_size.to_pixels(window.rem_size());
+        let min_contrast = EditorSettings::get_global(cx).minimum_contrast_for_highlights;
 
         let ellipsis = SharedString::from("⋯");
 
@@ -7367,10 +7525,16 @@ impl LineWithInvisibles {
         }]) {
             if let Some(replacement) = highlighted_chunk.replacement {
                 if !line.is_empty() {
+                    let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]);
+                    let text_runs: &[TextRun] = if segments.is_empty() {
+                        &styles
+                    } else {
+                        &Self::split_runs_by_bg_segments(&styles, segments, min_contrast)
+                    };
                     let shaped_line = window.text_system().shape_line(
                         line.clone().into(),
                         font_size,
-                        &styles,
+                        text_runs,
                         None,
                     );
                     width += shaped_line.width;
@@ -7448,10 +7612,16 @@ impl LineWithInvisibles {
             } else {
                 for (ix, mut line_chunk) in highlighted_chunk.text.split('\n').enumerate() {
                     if ix > 0 {
+                        let segments = bg_segments_per_row.get(row).map(|v| &v[..]).unwrap_or(&[]);
+                        let text_runs = if segments.is_empty() {
+                            &styles
+                        } else {
+                            &Self::split_runs_by_bg_segments(&styles, segments, min_contrast)
+                        };
                         let shaped_line = window.text_system().shape_line(
                             line.clone().into(),
                             font_size,
-                            &styles,
+                            text_runs,
                             None,
                         );
                         width += shaped_line.width;
@@ -7539,6 +7709,81 @@ impl LineWithInvisibles {
         layouts
     }
 
+    /// Takes text runs and non-overlapping left-to-right background ranges with color.
+    /// Returns new text runs with adjusted contrast as per background ranges.
+    fn split_runs_by_bg_segments(
+        text_runs: &[TextRun],
+        bg_segments: &[(Range<DisplayPoint>, Hsla)],
+        min_contrast: f32,
+    ) -> Vec<TextRun> {
+        let mut output_runs: Vec<TextRun> = Vec::with_capacity(text_runs.len());
+        let mut line_col = 0usize;
+        let mut segment_ix = 0usize;
+
+        for text_run in text_runs.iter() {
+            let run_start_col = line_col;
+            let run_end_col = run_start_col + text_run.len;
+            while segment_ix < bg_segments.len()
+                && (bg_segments[segment_ix].0.end.column() as usize) <= run_start_col
+            {
+                segment_ix += 1;
+            }
+            let mut cursor_col = run_start_col;
+            let mut local_segment_ix = segment_ix;
+            while local_segment_ix < bg_segments.len() {
+                let (range, segment_color) = &bg_segments[local_segment_ix];
+                let segment_start_col = range.start.column() as usize;
+                let segment_end_col = range.end.column() as usize;
+                if segment_start_col >= run_end_col {
+                    break;
+                }
+                if segment_start_col > cursor_col {
+                    let span_len = segment_start_col - cursor_col;
+                    output_runs.push(TextRun {
+                        len: span_len,
+                        font: text_run.font.clone(),
+                        color: text_run.color,
+                        background_color: text_run.background_color,
+                        underline: text_run.underline,
+                        strikethrough: text_run.strikethrough,
+                    });
+                    cursor_col = segment_start_col;
+                }
+                let segment_slice_end_col = segment_end_col.min(run_end_col);
+                if segment_slice_end_col > cursor_col {
+                    let new_text_color =
+                        ensure_minimum_contrast(text_run.color, *segment_color, min_contrast);
+                    output_runs.push(TextRun {
+                        len: segment_slice_end_col - cursor_col,
+                        font: text_run.font.clone(),
+                        color: new_text_color,
+                        background_color: text_run.background_color,
+                        underline: text_run.underline,
+                        strikethrough: text_run.strikethrough,
+                    });
+                    cursor_col = segment_slice_end_col;
+                }
+                if segment_end_col >= run_end_col {
+                    break;
+                }
+                local_segment_ix += 1;
+            }
+            if cursor_col < run_end_col {
+                output_runs.push(TextRun {
+                    len: run_end_col - cursor_col,
+                    font: text_run.font.clone(),
+                    color: text_run.color,
+                    background_color: text_run.background_color,
+                    underline: text_run.underline,
+                    strikethrough: text_run.strikethrough,
+                });
+            }
+            line_col = run_end_col;
+            segment_ix = local_segment_ix;
+        }
+        output_runs
+    }
+
     fn prepaint(
         &mut self,
         line_height: Pixels,
@@ -8452,12 +8697,20 @@ impl Element for EditorElement {
                         cx,
                     );
 
+                    let bg_segments_per_row = Self::bg_segments_per_row(
+                        start_row..end_row,
+                        &selections,
+                        &highlighted_ranges,
+                        self.style.background,
+                    );
+
                     let mut line_layouts = Self::layout_lines(
                         start_row..end_row,
                         &snapshot,
                         &self.style,
                         editor_width,
                         is_row_soft_wrapped,
+                        &bg_segments_per_row,
                         window,
                         cx,
                     );
@@ -9817,6 +10070,7 @@ pub fn layout_line(
         &snapshot.mode,
         text_width,
         is_row_soft_wrapped,
+        &[],
         window,
         cx,
     )
@@ -10717,4 +10971,289 @@ mod tests {
             .cloned()
             .collect()
     }
+
+    #[gpui::test]
+    fn test_merge_overlapping_ranges() {
+        let base_bg = Hsla::default();
+        let color1 = Hsla {
+            h: 0.0,
+            s: 0.5,
+            l: 0.5,
+            a: 0.5,
+        };
+        let color2 = Hsla {
+            h: 120.0,
+            s: 0.5,
+            l: 0.5,
+            a: 0.5,
+        };
+
+        let display_point = |col| DisplayPoint::new(DisplayRow(0), col);
+        let cols = |v: &Vec<(Range<DisplayPoint>, Hsla)>| -> Vec<(u32, u32)> {
+            v.iter()
+                .map(|(r, _)| (r.start.column(), r.end.column()))
+                .collect()
+        };
+
+        // Test overlapping ranges blend colors
+        let overlapping = vec![
+            (display_point(5)..display_point(15), color1),
+            (display_point(10)..display_point(20), color2),
+        ];
+        let result = EditorElement::merge_overlapping_ranges(overlapping, base_bg);
+        assert_eq!(cols(&result), vec![(5, 10), (10, 15), (15, 20)]);
+
+        // Test middle segment should have blended color
+        let blended = Hsla::blend(Hsla::blend(base_bg, color1), color2);
+        assert_eq!(result[1].1, blended);
+
+        // Test adjacent same-color ranges merge
+        let adjacent_same = vec![
+            (display_point(5)..display_point(10), color1),
+            (display_point(10)..display_point(15), color1),
+        ];
+        let result = EditorElement::merge_overlapping_ranges(adjacent_same, base_bg);
+        assert_eq!(cols(&result), vec![(5, 15)]);
+
+        // Test contained range splits
+        let contained = vec![
+            (display_point(5)..display_point(20), color1),
+            (display_point(10)..display_point(15), color2),
+        ];
+        let result = EditorElement::merge_overlapping_ranges(contained, base_bg);
+        assert_eq!(cols(&result), vec![(5, 10), (10, 15), (15, 20)]);
+
+        // Test multiple overlaps split at every boundary
+        let color3 = Hsla {
+            h: 240.0,
+            s: 0.5,
+            l: 0.5,
+            a: 0.5,
+        };
+        let complex = vec![
+            (display_point(5)..display_point(12), color1),
+            (display_point(8)..display_point(16), color2),
+            (display_point(10)..display_point(14), color3),
+        ];
+        let result = EditorElement::merge_overlapping_ranges(complex, base_bg);
+        assert_eq!(
+            cols(&result),
+            vec![(5, 8), (8, 10), (10, 12), (12, 14), (14, 16)]
+        );
+    }
+
+    #[gpui::test]
+    fn test_bg_segments_per_row() {
+        let base_bg = Hsla::default();
+
+        // Case A: selection spans three display rows: row 1 [5, end), full row 2, row 3 [0, 7)
+        {
+            let selection_color = Hsla {
+                h: 200.0,
+                s: 0.5,
+                l: 0.5,
+                a: 0.5,
+            };
+            let player_color = PlayerColor {
+                cursor: selection_color,
+                background: selection_color,
+                selection: selection_color,
+            };
+
+            let spanning_selection = SelectionLayout {
+                head: DisplayPoint::new(DisplayRow(3), 7),
+                cursor_shape: CursorShape::Bar,
+                is_newest: true,
+                is_local: true,
+                range: DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(3), 7),
+                active_rows: DisplayRow(1)..DisplayRow(4),
+                user_name: None,
+            };
+
+            let selections = vec![(player_color, vec![spanning_selection])];
+            let result = EditorElement::bg_segments_per_row(
+                DisplayRow(0)..DisplayRow(5),
+                &selections,
+                &[],
+                base_bg,
+            );
+
+            assert_eq!(result.len(), 5);
+            assert!(result[0].is_empty());
+            assert_eq!(result[1].len(), 1);
+            assert_eq!(result[2].len(), 1);
+            assert_eq!(result[3].len(), 1);
+            assert!(result[4].is_empty());
+
+            assert_eq!(result[1][0].0.start, DisplayPoint::new(DisplayRow(1), 5));
+            assert_eq!(result[1][0].0.end.row(), DisplayRow(1));
+            assert_eq!(result[1][0].0.end.column(), u32::MAX);
+            assert_eq!(result[2][0].0.start, DisplayPoint::new(DisplayRow(2), 0));
+            assert_eq!(result[2][0].0.end.row(), DisplayRow(2));
+            assert_eq!(result[2][0].0.end.column(), u32::MAX);
+            assert_eq!(result[3][0].0.start, DisplayPoint::new(DisplayRow(3), 0));
+            assert_eq!(result[3][0].0.end, DisplayPoint::new(DisplayRow(3), 7));
+        }
+
+        // Case B: selection ends exactly at the start of row 3, excluding row 3
+        {
+            let selection_color = Hsla {
+                h: 120.0,
+                s: 0.5,
+                l: 0.5,
+                a: 0.5,
+            };
+            let player_color = PlayerColor {
+                cursor: selection_color,
+                background: selection_color,
+                selection: selection_color,
+            };
+
+            let selection = SelectionLayout {
+                head: DisplayPoint::new(DisplayRow(2), 0),
+                cursor_shape: CursorShape::Bar,
+                is_newest: true,
+                is_local: true,
+                range: DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(3), 0),
+                active_rows: DisplayRow(1)..DisplayRow(3),
+                user_name: None,
+            };
+
+            let selections = vec![(player_color, vec![selection])];
+            let result = EditorElement::bg_segments_per_row(
+                DisplayRow(0)..DisplayRow(4),
+                &selections,
+                &[],
+                base_bg,
+            );
+
+            assert_eq!(result.len(), 4);
+            assert!(result[0].is_empty());
+            assert_eq!(result[1].len(), 1);
+            assert_eq!(result[2].len(), 1);
+            assert!(result[3].is_empty());
+
+            assert_eq!(result[1][0].0.start, DisplayPoint::new(DisplayRow(1), 5));
+            assert_eq!(result[1][0].0.end.row(), DisplayRow(1));
+            assert_eq!(result[1][0].0.end.column(), u32::MAX);
+            assert_eq!(result[2][0].0.start, DisplayPoint::new(DisplayRow(2), 0));
+            assert_eq!(result[2][0].0.end.row(), DisplayRow(2));
+            assert_eq!(result[2][0].0.end.column(), u32::MAX);
+        }
+    }
+
+    #[cfg(test)]
+    fn generate_test_run(len: usize, color: Hsla) -> TextRun {
+        TextRun {
+            len,
+            font: gpui::font(".SystemUIFont"),
+            color,
+            background_color: None,
+            underline: None,
+            strikethrough: None,
+        }
+    }
+
+    #[gpui::test]
+    fn test_split_runs_by_bg_segments(cx: &mut gpui::TestAppContext) {
+        init_test(cx, |_| {});
+
+        let text_color = Hsla {
+            h: 210.0,
+            s: 0.1,
+            l: 0.4,
+            a: 1.0,
+        };
+        let bg1 = Hsla {
+            h: 30.0,
+            s: 0.6,
+            l: 0.8,
+            a: 1.0,
+        };
+        let bg2 = Hsla {
+            h: 200.0,
+            s: 0.6,
+            l: 0.2,
+            a: 1.0,
+        };
+        let min_contrast = 45.0;
+
+        // Case A: single run; disjoint segments inside the run
+        let runs = vec![generate_test_run(20, text_color)];
+        let segs = vec![
+            (
+                DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10),
+                bg1,
+            ),
+            (
+                DisplayPoint::new(DisplayRow(0), 12)..DisplayPoint::new(DisplayRow(0), 16),
+                bg2,
+            ),
+        ];
+        let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast);
+        // Expected slices: [0,5) [5,10) [10,12) [12,16) [16,20)
+        assert_eq!(
+            out.iter().map(|r| r.len).collect::<Vec<_>>(),
+            vec![5, 5, 2, 4, 4]
+        );
+        assert_eq!(out[0].color, text_color);
+        assert_eq!(
+            out[1].color,
+            ensure_minimum_contrast(text_color, bg1, min_contrast)
+        );
+        assert_eq!(out[2].color, text_color);
+        assert_eq!(
+            out[3].color,
+            ensure_minimum_contrast(text_color, bg2, min_contrast)
+        );
+        assert_eq!(out[4].color, text_color);
+
+        // Case B: multiple runs; segment extends to end of line (u32::MAX)
+        let runs = vec![
+            generate_test_run(8, text_color),
+            generate_test_run(7, text_color),
+        ];
+        let segs = vec![(
+            DisplayPoint::new(DisplayRow(0), 6)..DisplayPoint::new(DisplayRow(0), u32::MAX),
+            bg1,
+        )];
+        let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast);
+        // Expected slices across runs: [0,6) [6,8) | [0,7)
+        assert_eq!(out.iter().map(|r| r.len).collect::<Vec<_>>(), vec![6, 2, 7]);
+        let adjusted = ensure_minimum_contrast(text_color, bg1, min_contrast);
+        assert_eq!(out[0].color, text_color);
+        assert_eq!(out[1].color, adjusted);
+        assert_eq!(out[2].color, adjusted);
+
+        // Case C: multi-byte characters
+        // for text: "Hello 🌍 世界!"
+        let runs = vec![
+            generate_test_run(5, text_color), // "Hello"
+            generate_test_run(6, text_color), // " 🌍 "
+            generate_test_run(6, text_color), // "世界"
+            generate_test_run(1, text_color), // "!"
+        ];
+        // selecting "🌍 世"
+        let segs = vec![(
+            DisplayPoint::new(DisplayRow(0), 6)..DisplayPoint::new(DisplayRow(0), 14),
+            bg1,
+        )];
+        let out = LineWithInvisibles::split_runs_by_bg_segments(&runs, &segs, min_contrast);
+        // "Hello" | " " | "🌍 " | "世" | "界" | "!"
+        assert_eq!(
+            out.iter().map(|r| r.len).collect::<Vec<_>>(),
+            vec![5, 1, 5, 3, 3, 1]
+        );
+        assert_eq!(out[0].color, text_color); // "Hello"
+        assert_eq!(
+            out[2].color,
+            ensure_minimum_contrast(text_color, bg1, min_contrast)
+        ); // "🌍 "
+        assert_eq!(
+            out[3].color,
+            ensure_minimum_contrast(text_color, bg1, min_contrast)
+        ); // "世"
+        assert_eq!(out[4].color, text_color); // "界"
+        assert_eq!(out[5].color, text_color); // "!"
+    }
 }

crates/editor/src/hover_links.rs 🔗

@@ -188,22 +188,26 @@ impl Editor {
 
     pub fn scroll_hover(
         &mut self,
-        amount: &ScrollAmount,
+        amount: ScrollAmount,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> bool {
         let selection = self.selections.newest_anchor().head();
         let snapshot = self.snapshot(window, cx);
 
-        let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| {
+        if let Some(popover) = self.hover_state.info_popovers.iter().find(|popover| {
             popover
                 .symbol_range
                 .point_within_range(&TriggerPoint::Text(selection), &snapshot)
-        }) else {
-            return false;
-        };
-        popover.scroll(amount, window, cx);
-        true
+        }) {
+            popover.scroll(amount, window, cx);
+            true
+        } else if let Some(context_menu) = self.context_menu.borrow_mut().as_mut() {
+            context_menu.scroll_aside(amount, window, cx);
+            true
+        } else {
+            false
+        }
     }
 
     fn cmd_click_reveal_task(

crates/editor/src/hover_popover.rs 🔗

@@ -896,7 +896,7 @@ impl InfoPopover {
             .into_any_element()
     }
 
-    pub fn scroll(&self, amount: &ScrollAmount, window: &mut Window, cx: &mut Context<Editor>) {
+    pub fn scroll(&self, amount: ScrollAmount, window: &mut Window, cx: &mut Context<Editor>) {
         let mut current = self.scroll_handle.offset();
         current.y -= amount.pixels(
             window.line_height(),

crates/editor/src/inlay_hint_cache.rs 🔗

@@ -1339,7 +1339,7 @@ pub mod tests {
                         let i = task_lsp_request_count.fetch_add(1, Ordering::Release) + 1;
                         assert_eq!(
                             params.text_document.uri,
-                            lsp::Url::from_file_path(file_with_hints).unwrap(),
+                            lsp::Uri::from_file_path(file_with_hints).unwrap(),
                         );
                         Ok(Some(vec![lsp::InlayHint {
                             position: lsp::Position::new(0, i),
@@ -1449,7 +1449,7 @@ pub mod tests {
                     async move {
                         assert_eq!(
                             params.text_document.uri,
-                            lsp::Url::from_file_path(file_with_hints).unwrap(),
+                            lsp::Uri::from_file_path(file_with_hints).unwrap(),
                         );
                         let current_call_id =
                             Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst);
@@ -1594,7 +1594,7 @@ pub mod tests {
                                             "Rust" => {
                                                 assert_eq!(
                                                     params.text_document.uri,
-                                                    lsp::Url::from_file_path(path!("/a/main.rs"))
+                                                    lsp::Uri::from_file_path(path!("/a/main.rs"))
                                                         .unwrap(),
                                                 );
                                                 rs_lsp_request_count.fetch_add(1, Ordering::Release)
@@ -1603,7 +1603,7 @@ pub mod tests {
                                             "Markdown" => {
                                                 assert_eq!(
                                                     params.text_document.uri,
-                                                    lsp::Url::from_file_path(path!("/a/other.md"))
+                                                    lsp::Uri::from_file_path(path!("/a/other.md"))
                                                         .unwrap(),
                                                 );
                                                 md_lsp_request_count.fetch_add(1, Ordering::Release)
@@ -1789,7 +1789,7 @@ pub mod tests {
                         async move {
                             assert_eq!(
                                 params.text_document.uri,
-                                lsp::Url::from_file_path(file_with_hints).unwrap(),
+                                lsp::Uri::from_file_path(file_with_hints).unwrap(),
                             );
                             Ok(Some(vec![
                                 lsp::InlayHint {
@@ -2127,7 +2127,7 @@ pub mod tests {
                             let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1;
                             assert_eq!(
                                 params.text_document.uri,
-                                lsp::Url::from_file_path(file_with_hints).unwrap(),
+                                lsp::Uri::from_file_path(file_with_hints).unwrap(),
                             );
                             Ok(Some(vec![lsp::InlayHint {
                                 position: lsp::Position::new(0, i),
@@ -2290,7 +2290,7 @@ pub mod tests {
                                 async move {
                                     assert_eq!(
                                         params.text_document.uri,
-                                        lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                        lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                     );
 
                                     task_lsp_request_ranges.lock().push(params.range);
@@ -2633,11 +2633,11 @@ pub mod tests {
                 let task_editor_edited = Arc::clone(&closure_editor_edited);
                 async move {
                     let hint_text = if params.text_document.uri
-                        == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+                        == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
                     {
                         "main hint"
                     } else if params.text_document.uri
-                        == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
+                        == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap()
                     {
                         "other hint"
                     } else {
@@ -2944,11 +2944,11 @@ pub mod tests {
                 let task_editor_edited = Arc::clone(&closure_editor_edited);
                 async move {
                     let hint_text = if params.text_document.uri
-                        == lsp::Url::from_file_path(path!("/a/main.rs")).unwrap()
+                        == lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
                     {
                         "main hint"
                     } else if params.text_document.uri
-                        == lsp::Url::from_file_path(path!("/a/other.rs")).unwrap()
+                        == lsp::Uri::from_file_path(path!("/a/other.rs")).unwrap()
                     {
                         "other hint"
                     } else {
@@ -3116,7 +3116,7 @@ pub mod tests {
                             async move {
                                 assert_eq!(
                                     params.text_document.uri,
-                                    lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                    lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                                 );
                                 let query_start = params.range.start;
                                 Ok(Some(vec![lsp::InlayHint {
@@ -3188,7 +3188,7 @@ pub mod tests {
                     async move {
                         assert_eq!(
                             params.text_document.uri,
-                            lsp::Url::from_file_path(file_with_hints).unwrap(),
+                            lsp::Uri::from_file_path(file_with_hints).unwrap(),
                         );
 
                         let i = lsp_request_count.fetch_add(1, Ordering::SeqCst) + 1;
@@ -3351,7 +3351,7 @@ pub mod tests {
                         move |params, _| async move {
                             assert_eq!(
                                 params.text_document.uri,
-                                lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
+                                lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
                             );
                             Ok(Some(
                                 serde_json::from_value(json!([

crates/editor/src/scroll/scroll_amount.rs 🔗

@@ -15,7 +15,7 @@ impl ScrollDirection {
     }
 }
 
-#[derive(Debug, Clone, PartialEq, Deserialize)]
+#[derive(Debug, Clone, Copy, PartialEq, Deserialize)]
 pub enum ScrollAmount {
     // Scroll N lines (positive is towards the end of the document)
     Line(f32),

crates/editor/src/test/editor_lsp_test_context.rs 🔗

@@ -29,7 +29,7 @@ pub struct EditorLspTestContext {
     pub cx: EditorTestContext,
     pub lsp: lsp::FakeLanguageServer,
     pub workspace: Entity<Workspace>,
-    pub buffer_lsp_url: lsp::Url,
+    pub buffer_lsp_url: lsp::Uri,
 }
 
 pub(crate) fn rust_lang() -> Arc<Language> {
@@ -189,7 +189,7 @@ impl EditorLspTestContext {
             },
             lsp,
             workspace,
-            buffer_lsp_url: lsp::Url::from_file_path(root.join("dir").join(file_name)).unwrap(),
+            buffer_lsp_url: lsp::Uri::from_file_path(root.join("dir").join(file_name)).unwrap(),
         }
     }
 
@@ -358,7 +358,7 @@ impl EditorLspTestContext {
     where
         T: 'static + request::Request,
         T::Params: 'static + Send,
-        F: 'static + Send + FnMut(lsp::Url, T::Params, gpui::AsyncApp) -> Fut,
+        F: 'static + Send + FnMut(lsp::Uri, T::Params, gpui::AsyncApp) -> Fut,
         Fut: 'static + Future<Output = Result<T::Result>>,
     {
         let url = self.buffer_lsp_url.clone();

crates/extension_host/src/extension_host.rs 🔗

@@ -43,7 +43,7 @@ use language::{
 use node_runtime::NodeRuntime;
 use project::ContextProviderWithTasks;
 use release_channel::ReleaseChannel;
-use remote::RemoteClient;
+use remote::{RemoteClient, RemoteConnectionOptions};
 use semantic_version::SemanticVersion;
 use serde::{Deserialize, Serialize};
 use settings::Settings;
@@ -117,7 +117,7 @@ pub struct ExtensionStore {
     pub wasm_host: Arc<WasmHost>,
     pub wasm_extensions: Vec<(Arc<ExtensionManifest>, WasmExtension)>,
     pub tasks: Vec<Task<()>>,
-    pub remote_clients: HashMap<String, WeakEntity<RemoteClient>>,
+    pub remote_clients: HashMap<RemoteConnectionOptions, WeakEntity<RemoteClient>>,
     pub ssh_registered_tx: UnboundedSender<()>,
 }
 
@@ -1779,16 +1779,15 @@ impl ExtensionStore {
     }
 
     pub fn register_remote_client(&mut self, client: Entity<RemoteClient>, cx: &mut Context<Self>) {
-        let connection_options = client.read(cx).connection_options();
-        let ssh_url = connection_options.ssh_url();
+        let options = client.read(cx).connection_options();
 
-        if let Some(existing_client) = self.remote_clients.get(&ssh_url)
+        if let Some(existing_client) = self.remote_clients.get(&options)
             && existing_client.upgrade().is_some()
         {
             return;
         }
 
-        self.remote_clients.insert(ssh_url, client.downgrade());
+        self.remote_clients.insert(options, client.downgrade());
         self.ssh_registered_tx.unbounded_send(()).ok();
     }
 }

crates/extension_host/src/extension_settings.rs 🔗

@@ -3,10 +3,10 @@ use collections::HashMap;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use std::sync::Arc;
 
-#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
+#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema, SettingsUi)]
 pub struct ExtensionSettings {
     /// The extensions that should be automatically installed by Zed.
     ///

crates/file_finder/src/file_finder_settings.rs 🔗

@@ -1,9 +1,9 @@
 use anyhow::Result;
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)]
 pub struct FileFinderSettings {
     pub file_icons: bool,
     pub modal_max_width: Option<FileFinderWidth>,

crates/git_hosting_providers/src/settings.rs 🔗

@@ -5,7 +5,7 @@ use git::GitHostingProviderRegistry;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore};
+use settings::{Settings, SettingsStore, SettingsUi};
 use url::Url;
 use util::ResultExt as _;
 
@@ -78,7 +78,7 @@ pub struct GitHostingProviderConfig {
     pub name: String,
 }
 
-#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema)]
+#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)]
 pub struct GitHostingProviderSettings {
     /// The list of custom Git hosting providers.
     #[serde(default)]

crates/git_ui/src/commit_view.rs 🔗

@@ -1,6 +1,6 @@
 use anyhow::{Context as _, Result};
 use buffer_diff::{BufferDiff, BufferDiffSnapshot};
-use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects};
+use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_context_lines};
 use git::repository::{CommitDetails, CommitDiff, CommitSummary, RepoPath};
 use gpui::{
     AnyElement, AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter,
@@ -195,7 +195,7 @@ impl CommitView {
                             PathKey::namespaced(FILE_NAMESPACE, path),
                             buffer,
                             diff_hunk_ranges,
-                            editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                            multibuffer_context_lines(cx),
                             cx,
                         );
                         multibuffer.add_diff(buffer_diff, cx);

crates/git_ui/src/git_panel_settings.rs 🔗

@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
 use gpui::Pixels;
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use workspace::dock::DockPosition;
 
 #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@@ -77,7 +77,7 @@ pub struct GitPanelSettingsContent {
     pub collapse_untracked_diff: Option<bool>,
 }
 
-#[derive(Deserialize, Debug, Clone, PartialEq)]
+#[derive(Deserialize, Debug, Clone, PartialEq, SettingsUi)]
 pub struct GitPanelSettings {
     pub button: bool,
     pub dock: DockPosition,

crates/git_ui/src/project_diff.rs 🔗

@@ -10,6 +10,7 @@ use collections::HashSet;
 use editor::{
     Editor, EditorEvent, SelectionEffects,
     actions::{GoToHunk, GoToPreviousHunk},
+    multibuffer_context_lines,
     scroll::Autoscroll,
 };
 use futures::StreamExt;
@@ -465,7 +466,7 @@ impl ProjectDiff {
                 path_key.clone(),
                 buffer,
                 excerpt_ranges,
-                editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                multibuffer_context_lines(cx),
                 cx,
             );
             (was_empty, is_newly_added)

crates/go_to_line/src/cursor_position.rs 🔗

@@ -2,7 +2,7 @@ use editor::{Editor, EditorSettings, MultiBufferSnapshot};
 use gpui::{App, Entity, FocusHandle, Focusable, Subscription, Task, WeakEntity};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use std::{fmt::Write, num::NonZeroU32, time::Duration};
 use text::{Point, Selection};
 use ui::{
@@ -293,7 +293,7 @@ impl StatusItemView for CursorPosition {
     }
 }
 
-#[derive(Clone, Copy, Default, PartialEq, JsonSchema, Deserialize, Serialize)]
+#[derive(Clone, Copy, Default, PartialEq, JsonSchema, Deserialize, Serialize, SettingsUi)]
 #[serde(rename_all = "snake_case")]
 pub(crate) enum LineIndicatorFormat {
     Short,

crates/gpui/examples/image/image.rs 🔗

@@ -75,65 +75,71 @@ impl Render for ImageShowcase {
     fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
         div()
             .id("main")
+            .bg(gpui::white())
             .overflow_y_scroll()
             .p_5()
             .size_full()
-            .flex()
-            .flex_col()
-            .justify_center()
-            .items_center()
-            .gap_8()
-            .bg(rgb(0xffffff))
             .child(
                 div()
                     .flex()
-                    .flex_row()
+                    .flex_col()
                     .justify_center()
                     .items_center()
                     .gap_8()
-                    .child(ImageContainer::new(
-                        "Image loaded from a local file",
-                        self.local_resource.clone(),
-                    ))
-                    .child(ImageContainer::new(
-                        "Image loaded from a remote resource",
-                        self.remote_resource.clone(),
+                    .child(img(
+                        "https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg",
                     ))
-                    .child(ImageContainer::new(
-                        "Image loaded from an asset",
-                        self.asset_resource.clone(),
-                    )),
-            )
-            .child(
-                div()
-                    .flex()
-                    .flex_row()
-                    .gap_8()
                     .child(
                         div()
-                            .flex_col()
-                            .child("Auto Width")
-                            .child(img("https://picsum.photos/800/400").h(px(180.))),
+                            .flex()
+                            .flex_row()
+                            .justify_center()
+                            .items_center()
+                            .gap_8()
+                            .child(ImageContainer::new(
+                                "Image loaded from a local file",
+                                self.local_resource.clone(),
+                            ))
+                            .child(ImageContainer::new(
+                                "Image loaded from a remote resource",
+                                self.remote_resource.clone(),
+                            ))
+                            .child(ImageContainer::new(
+                                "Image loaded from an asset",
+                                self.asset_resource.clone(),
+                            )),
+                    )
+                    .child(
+                        div()
+                            .flex()
+                            .flex_row()
+                            .gap_8()
+                            .child(
+                                div()
+                                    .flex_col()
+                                    .child("Auto Width")
+                                    .child(img("https://picsum.photos/800/400").h(px(180.))),
+                            )
+                            .child(
+                                div()
+                                    .flex_col()
+                                    .child("Auto Height")
+                                    .child(img("https://picsum.photos/800/400").w(px(180.))),
+                            ),
                     )
                     .child(
                         div()
+                            .flex()
                             .flex_col()
-                            .child("Auto Height")
-                            .child(img("https://picsum.photos/800/400").w(px(180.))),
+                            .justify_center()
+                            .items_center()
+                            .w_full()
+                            .border_1()
+                            .border_color(rgb(0xC0C0C0))
+                            .child("image with max width 100%")
+                            .child(img("https://picsum.photos/800/400").max_w_full()),
                     ),
             )
-            .child(
-                div()
-                    .flex()
-                    .flex_col()
-                    .justify_center()
-                    .items_center()
-                    .w_full()
-                    .border_1()
-                    .border_color(rgb(0xC0C0C0))
-                    .child("image with max width 100%")
-                    .child(img("https://picsum.photos/800/400").max_w_full()),
-            )
     }
 }
 

crates/gpui/src/assets.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{DevicePixels, Result, SharedString, Size, size};
+use crate::{DevicePixels, Pixels, Result, SharedString, Size, size};
 use smallvec::SmallVec;
 
 use image::{Delay, Frame};
@@ -42,6 +42,8 @@ pub(crate) struct RenderImageParams {
 pub struct RenderImage {
     /// The ID associated with this image
     pub id: ImageId,
+    /// The scale factor of this image on render.
+    pub(crate) scale_factor: f32,
     data: SmallVec<[Frame; 1]>,
 }
 
@@ -60,6 +62,7 @@ impl RenderImage {
 
         Self {
             id: ImageId(NEXT_ID.fetch_add(1, SeqCst)),
+            scale_factor: 1.0,
             data: data.into(),
         }
     }
@@ -77,6 +80,12 @@ impl RenderImage {
         size(width.into(), height.into())
     }
 
+    /// Get the size of this image, in pixels for display, adjusted for the scale factor.
+    pub(crate) fn render_size(&self, frame_index: usize) -> Size<Pixels> {
+        self.size(frame_index)
+            .map(|v| (v.0 as f32 / self.scale_factor).into())
+    }
+
     /// Get the delay of this frame from the previous
     pub fn delay(&self, frame_index: usize) -> Delay {
         self.data[frame_index].delay()

crates/gpui/src/elements/img.rs 🔗

@@ -332,20 +332,18 @@ impl Element for Img {
                                 state.started_loading = None;
                             }
 
-                            let image_size = data.size(frame_index);
-                            style.aspect_ratio =
-                                Some(image_size.width.0 as f32 / image_size.height.0 as f32);
+                            let image_size = data.render_size(frame_index);
+                            style.aspect_ratio = Some(image_size.width / image_size.height);
 
                             if let Length::Auto = style.size.width {
                                 style.size.width = match style.size.height {
                                     Length::Definite(DefiniteLength::Absolute(
                                         AbsoluteLength::Pixels(height),
                                     )) => Length::Definite(
-                                        px(image_size.width.0 as f32 * height.0
-                                            / image_size.height.0 as f32)
-                                        .into(),
+                                        px(image_size.width.0 * height.0 / image_size.height.0)
+                                            .into(),
                                     ),
-                                    _ => Length::Definite(px(image_size.width.0 as f32).into()),
+                                    _ => Length::Definite(image_size.width.into()),
                                 };
                             }
 
@@ -354,11 +352,10 @@ impl Element for Img {
                                     Length::Definite(DefiniteLength::Absolute(
                                         AbsoluteLength::Pixels(width),
                                     )) => Length::Definite(
-                                        px(image_size.height.0 as f32 * width.0
-                                            / image_size.width.0 as f32)
-                                        .into(),
+                                        px(image_size.height.0 * width.0 / image_size.width.0)
+                                            .into(),
                                     ),
-                                    _ => Length::Definite(px(image_size.height.0 as f32).into()),
+                                    _ => Length::Definite(image_size.height.into()),
                                 };
                             }
 
@@ -701,7 +698,9 @@ impl Asset for ImageAssetLoader {
                     swap_rgba_pa_to_bgra(pixel);
                 }
 
-                RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1))
+                let mut image = RenderImage::new(SmallVec::from_elem(Frame::new(buffer), 1));
+                image.scale_factor = SMOOTH_SVG_SCALE_FACTOR;
+                image
             };
 
             Ok(Arc::new(data))

crates/gpui/src/platform.rs 🔗

@@ -522,6 +522,7 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
     fn merge_all_windows(&self) {}
     fn move_tab_to_new_window(&self) {}
     fn toggle_window_tab_overview(&self) {}
+    fn set_tabbing_identifier(&self, _identifier: Option<String>) {}
 
     #[cfg(target_os = "windows")]
     fn get_raw_handle(&self) -> windows::HWND;

crates/gpui/src/platform/linux/platform.rs 🔗

@@ -848,6 +848,7 @@ impl crate::Keystroke {
             Keysym::Down => "down".to_owned(),
             Keysym::Home => "home".to_owned(),
             Keysym::End => "end".to_owned(),
+            Keysym::Insert => "insert".to_owned(),
 
             _ => {
                 let name = xkb::keysym_get_name(key_sym).to_lowercase();

crates/gpui/src/platform/mac/text_system.rs 🔗

@@ -43,7 +43,7 @@ use pathfinder_geometry::{
     vector::{Vector2F, Vector2I},
 };
 use smallvec::SmallVec;
-use std::{borrow::Cow, char, cmp, convert::TryFrom, sync::Arc};
+use std::{borrow::Cow, char, convert::TryFrom, sync::Arc};
 
 use super::open_type::apply_features_and_fallbacks;
 
@@ -67,6 +67,7 @@ struct MacTextSystemState {
     font_ids_by_postscript_name: HashMap<String, FontId>,
     font_ids_by_font_key: HashMap<FontKey, SmallVec<[FontId; 4]>>,
     postscript_names_by_font_id: HashMap<FontId, String>,
+    zwnjs_scratch_space: Vec<(usize, usize)>,
 }
 
 impl MacTextSystem {
@@ -79,6 +80,7 @@ impl MacTextSystem {
             font_ids_by_postscript_name: HashMap::default(),
             font_ids_by_font_key: HashMap::default(),
             postscript_names_by_font_id: HashMap::default(),
+            zwnjs_scratch_space: Vec::new(),
         }))
     }
 }
@@ -424,29 +426,41 @@ impl MacTextSystemState {
     }
 
     fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {
+        const ZWNJ: char = '\u{200C}';
+        const ZWNJ_STR: &str = "\u{200C}";
+        const ZWNJ_SIZE_16: usize = ZWNJ.len_utf16();
+
+        self.zwnjs_scratch_space.clear();
         // Construct the attributed string, converting UTF8 ranges to UTF16 ranges.
         let mut string = CFMutableAttributedString::new();
-        {
-            string.replace_str(&CFString::new(text), CFRange::init(0, 0));
-            let utf16_line_len = string.char_len() as usize;
 
-            let mut ix_converter = StringIndexConverter::new(text);
+        {
+            let mut ix_converter = StringIndexConverter::new(&text);
+            let mut last_font_run = None;
             for run in font_runs {
-                let utf8_end = ix_converter.utf8_ix + run.len;
-                let utf16_start = ix_converter.utf16_ix;
-
-                if utf16_start >= utf16_line_len {
-                    break;
+                let text = &text[ix_converter.utf8_ix..][..run.len];
+                // if the fonts are the same, we need to disconnect the text with a ZWNJ
+                // to prevent core text from forming ligatures between them
+                let needs_zwnj = last_font_run.replace(run.font_id) == Some(run.font_id);
+
+                let n_zwnjs = self.zwnjs_scratch_space.len();
+                let utf16_start = ix_converter.utf16_ix + n_zwnjs * ZWNJ_SIZE_16;
+                ix_converter.advance_to_utf8_ix(ix_converter.utf8_ix + run.len);
+
+                string.replace_str(&CFString::new(text), CFRange::init(utf16_start as isize, 0));
+                if needs_zwnj {
+                    let zwnjs_pos = string.char_len();
+                    self.zwnjs_scratch_space.push((n_zwnjs, zwnjs_pos as usize));
+                    string.replace_str(
+                        &CFString::from_static_string(ZWNJ_STR),
+                        CFRange::init(zwnjs_pos, 0),
+                    );
                 }
-
-                ix_converter.advance_to_utf8_ix(utf8_end);
-                let utf16_end = cmp::min(ix_converter.utf16_ix, utf16_line_len);
+                let utf16_end = string.char_len() as usize;
 
                 let cf_range =
                     CFRange::init(utf16_start as isize, (utf16_end - utf16_start) as isize);
-
-                let font: &FontKitFont = &self.fonts[run.font_id.0];
-
+                let font = &self.fonts[run.font_id.0];
                 unsafe {
                     string.set_attribute(
                         cf_range,
@@ -454,17 +468,12 @@ impl MacTextSystemState {
                         &font.native_font().clone_with_font_size(font_size.into()),
                     );
                 }
-
-                if utf16_end == utf16_line_len {
-                    break;
-                }
             }
         }
-
         // Retrieve the glyphs from the shaped line, converting UTF16 offsets to UTF8 offsets.
         let line = CTLine::new_with_attributed_string(string.as_concrete_TypeRef());
         let glyph_runs = line.glyph_runs();
-        let mut runs = Vec::with_capacity(glyph_runs.len() as usize);
+        let mut runs = <Vec<ShapedRun>>::with_capacity(glyph_runs.len() as usize);
         let mut ix_converter = StringIndexConverter::new(text);
         for run in glyph_runs.into_iter() {
             let attributes = run.attributes().unwrap();
@@ -476,28 +485,44 @@ impl MacTextSystemState {
             };
             let font_id = self.id_for_native_font(font);
 
-            let mut glyphs = Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0));
-            for ((glyph_id, position), glyph_utf16_ix) in run
+            let mut glyphs = match runs.last_mut() {
+                Some(run) if run.font_id == font_id => &mut run.glyphs,
+                _ => {
+                    runs.push(ShapedRun {
+                        font_id,
+                        glyphs: Vec::with_capacity(run.glyph_count().try_into().unwrap_or(0)),
+                    });
+                    &mut runs.last_mut().unwrap().glyphs
+                }
+            };
+            for ((&glyph_id, position), &glyph_utf16_ix) in run
                 .glyphs()
                 .iter()
                 .zip(run.positions().iter())
                 .zip(run.string_indices().iter())
             {
-                let glyph_utf16_ix = usize::try_from(*glyph_utf16_ix).unwrap();
+                let mut glyph_utf16_ix = usize::try_from(glyph_utf16_ix).unwrap();
+                let r = self
+                    .zwnjs_scratch_space
+                    .binary_search_by(|&(_, it)| it.cmp(&glyph_utf16_ix));
+                match r {
+                    // this glyph is a ZWNJ, skip it
+                    Ok(_) => continue,
+                    // adjust the index to account for the ZWNJs we've inserted
+                    Err(idx) => glyph_utf16_ix -= idx * ZWNJ_SIZE_16,
+                }
                 if ix_converter.utf16_ix > glyph_utf16_ix {
                     // We cannot reuse current index converter, as it can only seek forward. Restart the search.
                     ix_converter = StringIndexConverter::new(text);
                 }
                 ix_converter.advance_to_utf16_ix(glyph_utf16_ix);
                 glyphs.push(ShapedGlyph {
-                    id: GlyphId(*glyph_id as u32),
+                    id: GlyphId(glyph_id as u32),
                     position: point(position.x as f32, position.y as f32).map(px),
                     index: ix_converter.utf8_ix,
                     is_emoji: self.is_emoji(font_id),
                 });
             }
-
-            runs.push(ShapedRun { font_id, glyphs });
         }
         let typographic_bounds = line.get_typographic_bounds();
         LineLayout {
@@ -696,4 +721,93 @@ mod tests {
         // There's no glyph for \u{feff}
         assert_eq!(layout.runs[0].glyphs[1].id, GlyphId(69u32)); // b
     }
+
+    #[test]
+    fn test_layout_line_zwnj_insertion() {
+        let fonts = MacTextSystem::new();
+        let font_id = fonts.font_id(&font("Helvetica")).unwrap();
+
+        let text = "hello world";
+        let font_runs = &[
+            FontRun { font_id, len: 5 }, // "hello"
+            FontRun { font_id, len: 6 }, // " world"
+        ];
+
+        let layout = fonts.layout_line(text, px(16.), font_runs);
+        assert_eq!(layout.len, text.len());
+
+        for run in &layout.runs {
+            for glyph in &run.glyphs {
+                assert!(
+                    glyph.index < text.len(),
+                    "Glyph index {} is out of bounds for text length {}",
+                    glyph.index,
+                    text.len()
+                );
+            }
+        }
+
+        // Test with different font runs - should not insert ZWNJ
+        let font_id2 = fonts.font_id(&font("Times")).unwrap_or(font_id);
+        let font_runs_different = &[
+            FontRun { font_id, len: 5 }, // "hello"
+            // " world"
+            FontRun {
+                font_id: font_id2,
+                len: 6,
+            },
+        ];
+
+        let layout2 = fonts.layout_line(text, px(16.), font_runs_different);
+        assert_eq!(layout2.len, text.len());
+
+        for run in &layout2.runs {
+            for glyph in &run.glyphs {
+                assert!(
+                    glyph.index < text.len(),
+                    "Glyph index {} is out of bounds for text length {}",
+                    glyph.index,
+                    text.len()
+                );
+            }
+        }
+    }
+
+    #[test]
+    fn test_layout_line_zwnj_edge_cases() {
+        let fonts = MacTextSystem::new();
+        let font_id = fonts.font_id(&font("Helvetica")).unwrap();
+
+        let text = "hello";
+        let font_runs = &[FontRun { font_id, len: 5 }];
+        let layout = fonts.layout_line(text, px(16.), font_runs);
+        assert_eq!(layout.len, text.len());
+
+        let text = "abc";
+        let font_runs = &[
+            FontRun { font_id, len: 1 }, // "a"
+            FontRun { font_id, len: 1 }, // "b"
+            FontRun { font_id, len: 1 }, // "c"
+        ];
+        let layout = fonts.layout_line(text, px(16.), font_runs);
+        assert_eq!(layout.len, text.len());
+
+        for run in &layout.runs {
+            for glyph in &run.glyphs {
+                assert!(
+                    glyph.index < text.len(),
+                    "Glyph index {} is out of bounds for text length {}",
+                    glyph.index,
+                    text.len()
+                );
+            }
+        }
+
+        // Test with empty text
+        let text = "";
+        let font_runs = &[];
+        let layout = fonts.layout_line(text, px(16.), font_runs);
+        assert_eq!(layout.len, 0);
+        assert!(layout.runs.is_empty());
+    }
 }

crates/gpui/src/platform/mac/window.rs 🔗

@@ -781,6 +781,8 @@ impl MacWindow {
                     if let Some(tabbing_identifier) = tabbing_identifier {
                         let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str());
                         let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id];
+                    } else {
+                        let _: () = msg_send![native_window, setTabbingIdentifier:nil];
                     }
                 }
                 WindowKind::PopUp => {
@@ -1018,6 +1020,25 @@ impl PlatformWindow for MacWindow {
         }
     }
 
+    fn set_tabbing_identifier(&self, tabbing_identifier: Option<String>) {
+        let native_window = self.0.lock().native_window;
+        unsafe {
+            let allows_automatic_window_tabbing = tabbing_identifier.is_some();
+            if allows_automatic_window_tabbing {
+                let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: YES];
+            } else {
+                let () = msg_send![class!(NSWindow), setAllowsAutomaticWindowTabbing: NO];
+            }
+
+            if let Some(tabbing_identifier) = tabbing_identifier {
+                let tabbing_id = NSString::alloc(nil).init_str(tabbing_identifier.as_str());
+                let _: () = msg_send![native_window, setTabbingIdentifier: tabbing_id];
+            } else {
+                let _: () = msg_send![native_window, setTabbingIdentifier:nil];
+            }
+        }
+    }
+
     fn scale_factor(&self) -> f32 {
         self.0.as_ref().lock().scale_factor()
     }

crates/gpui/src/platform/windows/alpha_correction.hlsl 🔗

@@ -0,0 +1,28 @@
+float color_brightness(float3 color) {
+    // REC. 601 luminance coefficients for percieved brightness
+    return dot(color, float3(0.30f, 0.59f, 0.11f));
+}
+
+float light_on_dark_contrast(float enhancedContrast, float3 color) {
+    float brightness = color_brightness(color);
+    float multiplier = saturate(4.0f * (0.75f - brightness));
+    return enhancedContrast * multiplier;
+}
+
+float enhance_contrast(float alpha, float k) {
+    return alpha * (k + 1.0f) / (alpha * k + 1.0f);
+}
+
+float apply_alpha_correction(float a, float b, float4 g) {
+    float brightness_adjustment = g.x * b + g.y;
+    float correction = brightness_adjustment * a + (g.z * b + g.w);
+    return a + a * (1.0f - a) * correction;
+}
+
+float apply_contrast_and_gamma_correction(float sample, float3 color, float enhanced_contrast_factor, float4 gamma_ratios) {
+    float enhanced_contrast = light_on_dark_contrast(enhanced_contrast_factor, color);
+    float brightness = color_brightness(color);
+
+    float contrasted = enhance_contrast(sample, enhanced_contrast);
+    return apply_alpha_correction(contrasted, brightness, gamma_ratios);
+}

crates/gpui/src/platform/windows/color_text_raster.hlsl 🔗

@@ -1,3 +1,5 @@
+#include "alpha_correction.hlsl"
+
 struct RasterVertexOutput {
     float4 position : SV_Position;
     float2 texcoord : TEXCOORD0;
@@ -23,17 +25,19 @@ struct Bounds {
     int2 size;
 };
 
-Texture2D<float4> t_layer : register(t0);
+Texture2D<float> t_layer : register(t0);
 SamplerState s_layer : register(s0);
 
 cbuffer GlyphLayerTextureParams : register(b0) {
     Bounds bounds;
     float4 run_color;
+    float4 gamma_ratios;
+    float grayscale_enhanced_contrast;
+    float3 _pad;
 };
 
 float4 emoji_rasterization_fragment(PixelInput input): SV_Target {
-    float3 sampled = t_layer.Sample(s_layer, input.texcoord.xy).rgb;
-    float alpha = (sampled.r + sampled.g + sampled.b) / 3;
-
-    return float4(run_color.rgb, alpha);
+    float sample = t_layer.Sample(s_layer, input.texcoord.xy).r;
+    float alpha_corrected = apply_contrast_and_gamma_correction(sample, run_color.rgb, grayscale_enhanced_contrast, gamma_ratios);
+    return float4(run_color.rgb, alpha_corrected * run_color.a);
 }

crates/gpui/src/platform/windows/direct_write.rs 🔗

@@ -10,12 +10,8 @@ use windows::{
         Foundation::*,
         Globalization::GetUserDefaultLocaleName,
         Graphics::{
-            Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
-            Direct3D11::*,
-            DirectWrite::*,
-            Dxgi::Common::*,
-            Gdi::{IsRectEmpty, LOGFONTW},
-            Imaging::*,
+            Direct3D::D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP, Direct3D11::*, DirectWrite::*,
+            Dxgi::Common::*, Gdi::LOGFONTW,
         },
         System::SystemServices::LOCALE_NAME_MAX_LENGTH,
         UI::WindowsAndMessaging::*,
@@ -40,12 +36,10 @@ pub(crate) struct DirectWriteTextSystem(RwLock<DirectWriteState>);
 struct DirectWriteComponent {
     locale: String,
     factory: IDWriteFactory5,
-    bitmap_factory: AgileReference<IWICImagingFactory>,
     in_memory_loader: IDWriteInMemoryFontFileLoader,
     builder: IDWriteFontSetBuilder1,
     text_renderer: Arc<TextRendererWrapper>,
 
-    render_params: IDWriteRenderingParams3,
     gpu_state: GPUState,
 }
 
@@ -76,11 +70,10 @@ struct FontIdentifier {
 }
 
 impl DirectWriteComponent {
-    pub fn new(bitmap_factory: &IWICImagingFactory, gpu_context: &DirectXDevices) -> Result<Self> {
+    pub fn new(gpu_context: &DirectXDevices) -> Result<Self> {
         // todo: ideally this would not be a large unsafe block but smaller isolated ones for easier auditing
         unsafe {
             let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED)?;
-            let bitmap_factory = AgileReference::new(bitmap_factory)?;
             // The `IDWriteInMemoryFontFileLoader` here is supported starting from
             // Windows 10 Creators Update, which consequently requires the entire
             // `DirectWriteTextSystem` to run on `win10 1703`+.
@@ -92,36 +85,14 @@ impl DirectWriteComponent {
             let locale = String::from_utf16_lossy(&locale_vec);
             let text_renderer = Arc::new(TextRendererWrapper::new(&locale));
 
-            let render_params = {
-                let default_params: IDWriteRenderingParams3 =
-                    factory.CreateRenderingParams()?.cast()?;
-                let gamma = default_params.GetGamma();
-                let enhanced_contrast = default_params.GetEnhancedContrast();
-                let gray_contrast = default_params.GetGrayscaleEnhancedContrast();
-                let cleartype_level = default_params.GetClearTypeLevel();
-                let grid_fit_mode = default_params.GetGridFitMode();
-
-                factory.CreateCustomRenderingParams(
-                    gamma,
-                    enhanced_contrast,
-                    gray_contrast,
-                    cleartype_level,
-                    DWRITE_PIXEL_GEOMETRY_RGB,
-                    DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC,
-                    grid_fit_mode,
-                )?
-            };
-
             let gpu_state = GPUState::new(gpu_context)?;
 
             Ok(DirectWriteComponent {
                 locale,
                 factory,
-                bitmap_factory,
                 in_memory_loader,
                 builder,
                 text_renderer,
-                render_params,
                 gpu_state,
             })
         }
@@ -212,11 +183,8 @@ impl GPUState {
 }
 
 impl DirectWriteTextSystem {
-    pub(crate) fn new(
-        gpu_context: &DirectXDevices,
-        bitmap_factory: &IWICImagingFactory,
-    ) -> Result<Self> {
-        let components = DirectWriteComponent::new(bitmap_factory, gpu_context)?;
+    pub(crate) fn new(gpu_context: &DirectXDevices) -> Result<Self> {
+        let components = DirectWriteComponent::new(gpu_context)?;
         let system_font_collection = unsafe {
             let mut result = std::mem::zeroed();
             components
@@ -762,14 +730,14 @@ impl DirectWriteState {
         unsafe {
             font.font_face.GetRecommendedRenderingMode(
                 params.font_size.0,
-                // The dpi here seems that it has the same effect with `Some(&transform)`
-                1.0,
-                1.0,
+                // Using 96 as scale is applied by the transform
+                96.0,
+                96.0,
                 Some(&transform),
                 false,
                 DWRITE_OUTLINE_THRESHOLD_ANTIALIASED,
                 DWRITE_MEASURING_MODE_NATURAL,
-                &self.components.render_params,
+                None,
                 &mut rendering_mode,
                 &mut grid_fit_mode,
             )?;
@@ -782,8 +750,7 @@ impl DirectWriteState {
                 rendering_mode,
                 DWRITE_MEASURING_MODE_NATURAL,
                 grid_fit_mode,
-                // We're using cleartype not grayscale for monochrome is because it provides better quality
-                DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE,
+                DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE,
                 baseline_origin_x,
                 baseline_origin_y,
             )
@@ -794,10 +761,14 @@ impl DirectWriteState {
     fn raster_bounds(&self, params: &RenderGlyphParams) -> Result<Bounds<DevicePixels>> {
         let glyph_analysis = self.create_glyph_run_analysis(params)?;
 
-        let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1)? };
-        // Some glyphs cannot be drawn with ClearType, such as bitmap fonts. In that case
-        // GetAlphaTextureBounds() supposedly returns an empty RECT, but I haven't tested that yet.
-        if !unsafe { IsRectEmpty(&bounds) }.as_bool() {
+        let bounds = unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? };
+
+        if bounds.right < bounds.left {
+            Ok(Bounds {
+                origin: point(0.into(), 0.into()),
+                size: size(0.into(), 0.into()),
+            })
+        } else {
             Ok(Bounds {
                 origin: point(bounds.left.into(), bounds.top.into()),
                 size: size(
@@ -805,25 +776,6 @@ impl DirectWriteState {
                     (bounds.bottom - bounds.top).into(),
                 ),
             })
-        } else {
-            // If it's empty, retry with grayscale AA.
-            let bounds =
-                unsafe { glyph_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1)? };
-
-            if bounds.right < bounds.left {
-                Ok(Bounds {
-                    origin: point(0.into(), 0.into()),
-                    size: size(0.into(), 0.into()),
-                })
-            } else {
-                Ok(Bounds {
-                    origin: point(bounds.left.into(), bounds.top.into()),
-                    size: size(
-                        (bounds.right - bounds.left).into(),
-                        (bounds.bottom - bounds.top).into(),
-                    ),
-                })
-            }
         }
     }
 
@@ -872,13 +824,12 @@ impl DirectWriteState {
         glyph_bounds: Bounds<DevicePixels>,
     ) -> Result<Vec<u8>> {
         let mut bitmap_data =
-            vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize * 3];
+            vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize];
 
         let glyph_analysis = self.create_glyph_run_analysis(params)?;
         unsafe {
             glyph_analysis.CreateAlphaTexture(
-                // We're using cleartype not grayscale for monochrome is because it provides better quality
-                DWRITE_TEXTURE_CLEARTYPE_3x1,
+                DWRITE_TEXTURE_ALIASED_1x1,
                 &RECT {
                     left: glyph_bounds.origin.x.0,
                     top: glyph_bounds.origin.y.0,
@@ -889,30 +840,6 @@ impl DirectWriteState {
             )?;
         }
 
-        let bitmap_factory = self.components.bitmap_factory.resolve()?;
-        let bitmap = unsafe {
-            bitmap_factory.CreateBitmapFromMemory(
-                glyph_bounds.size.width.0 as u32,
-                glyph_bounds.size.height.0 as u32,
-                &GUID_WICPixelFormat24bppRGB,
-                glyph_bounds.size.width.0 as u32 * 3,
-                &bitmap_data,
-            )
-        }?;
-
-        let grayscale_bitmap =
-            unsafe { WICConvertBitmapSource(&GUID_WICPixelFormat8bppGray, &bitmap) }?;
-
-        let mut bitmap_data =
-            vec![0u8; glyph_bounds.size.width.0 as usize * glyph_bounds.size.height.0 as usize];
-        unsafe {
-            grayscale_bitmap.CopyPixels(
-                std::ptr::null() as _,
-                glyph_bounds.size.width.0 as u32,
-                &mut bitmap_data,
-            )
-        }?;
-
         Ok(bitmap_data)
     }
 
@@ -981,25 +908,24 @@ impl DirectWriteState {
                         DWRITE_RENDERING_MODE1_NATURAL_SYMMETRIC,
                         DWRITE_MEASURING_MODE_NATURAL,
                         DWRITE_GRID_FIT_MODE_DEFAULT,
-                        DWRITE_TEXT_ANTIALIAS_MODE_CLEARTYPE,
+                        DWRITE_TEXT_ANTIALIAS_MODE_GRAYSCALE,
                         baseline_origin_x,
                         baseline_origin_y,
                     )
                 }?;
 
                 let color_bounds =
-                    unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_CLEARTYPE_3x1) }?;
+                    unsafe { color_analysis.GetAlphaTextureBounds(DWRITE_TEXTURE_ALIASED_1x1) }?;
 
                 let color_size = size(
                     color_bounds.right - color_bounds.left,
                     color_bounds.bottom - color_bounds.top,
                 );
                 if color_size.width > 0 && color_size.height > 0 {
-                    let mut alpha_data =
-                        vec![0u8; (color_size.width * color_size.height * 3) as usize];
+                    let mut alpha_data = vec![0u8; (color_size.width * color_size.height) as usize];
                     unsafe {
                         color_analysis.CreateAlphaTexture(
-                            DWRITE_TEXTURE_CLEARTYPE_3x1,
+                            DWRITE_TEXTURE_ALIASED_1x1,
                             &color_bounds,
                             &mut alpha_data,
                         )
@@ -1015,10 +941,6 @@ impl DirectWriteState {
                         }
                     };
                     let bounds = bounds(point(color_bounds.left, color_bounds.top), color_size);
-                    let alpha_data = alpha_data
-                        .chunks_exact(3)
-                        .flat_map(|chunk| [chunk[0], chunk[1], chunk[2], 255])
-                        .collect::<Vec<_>>();
                     glyph_layers.push(GlyphLayerTexture::new(
                         &self.components.gpu_state,
                         run_color,
@@ -1135,10 +1057,18 @@ impl DirectWriteState {
         unsafe { device_context.PSSetSamplers(0, Some(&gpu_state.sampler)) };
         unsafe { device_context.OMSetBlendState(&gpu_state.blend_state, None, 0xffffffff) };
 
+        let crate::FontInfo {
+            gamma_ratios,
+            grayscale_enhanced_contrast,
+        } = DirectXRenderer::get_font_info();
+
         for layer in glyph_layers {
             let params = GlyphLayerTextureParams {
                 run_color: layer.run_color,
                 bounds: layer.bounds,
+                gamma_ratios: *gamma_ratios,
+                grayscale_enhanced_contrast: *grayscale_enhanced_contrast,
+                _pad: [0f32; 3],
             };
             unsafe {
                 let mut dest = std::mem::zeroed();
@@ -1298,7 +1228,7 @@ impl GlyphLayerTexture {
             Height: texture_size.height as u32,
             MipLevels: 1,
             ArraySize: 1,
-            Format: DXGI_FORMAT_R8G8B8A8_UNORM,
+            Format: DXGI_FORMAT_R8_UNORM,
             SampleDesc: DXGI_SAMPLE_DESC {
                 Count: 1,
                 Quality: 0,
@@ -1334,7 +1264,7 @@ impl GlyphLayerTexture {
                 0,
                 None,
                 alpha_data.as_ptr() as _,
-                (texture_size.width * 4) as u32,
+                texture_size.width as u32,
                 0,
             )
         };
@@ -1352,6 +1282,9 @@ impl GlyphLayerTexture {
 struct GlyphLayerTextureParams {
     bounds: Bounds<i32>,
     run_color: Rgba,
+    gamma_ratios: [f32; 4],
+    grayscale_enhanced_contrast: f32,
+    _pad: [f32; 3],
 }
 
 struct TextRendererWrapper(pub IDWriteTextRenderer);

crates/gpui/src/platform/windows/directx_renderer.rs 🔗

@@ -1,4 +1,7 @@
-use std::{mem::ManuallyDrop, sync::Arc};
+use std::{
+    mem::ManuallyDrop,
+    sync::{Arc, OnceLock},
+};
 
 use ::util::ResultExt;
 use anyhow::{Context, Result};
@@ -9,6 +12,7 @@ use windows::{
             Direct3D::*,
             Direct3D11::*,
             DirectComposition::*,
+            DirectWrite::*,
             Dxgi::{Common::*, *},
         },
     },
@@ -27,6 +31,11 @@ const RENDER_TARGET_FORMAT: DXGI_FORMAT = DXGI_FORMAT_B8G8R8A8_UNORM;
 // This configuration is used for MSAA rendering on paths only, and it's guaranteed to be supported by DirectX 11.
 const PATH_MULTISAMPLE_COUNT: u32 = 4;
 
+pub(crate) struct FontInfo {
+    pub gamma_ratios: [f32; 4],
+    pub grayscale_enhanced_contrast: f32,
+}
+
 pub(crate) struct DirectXRenderer {
     hwnd: HWND,
     atlas: Arc<DirectXAtlas>,
@@ -35,6 +44,7 @@ pub(crate) struct DirectXRenderer {
     globals: DirectXGlobalElements,
     pipelines: DirectXRenderPipelines,
     direct_composition: Option<DirectComposition>,
+    font_info: &'static FontInfo,
 }
 
 /// Direct3D objects
@@ -171,6 +181,7 @@ impl DirectXRenderer {
             globals,
             pipelines,
             direct_composition,
+            font_info: Self::get_font_info(),
         })
     }
 
@@ -183,10 +194,12 @@ impl DirectXRenderer {
             &self.devices.device_context,
             self.globals.global_params_buffer[0].as_ref().unwrap(),
             &[GlobalParams {
+                gamma_ratios: self.font_info.gamma_ratios,
                 viewport_size: [
                     self.resources.viewport[0].Width,
                     self.resources.viewport[0].Height,
                 ],
+                grayscale_enhanced_contrast: self.font_info.grayscale_enhanced_contrast,
                 _pad: 0,
             }],
         )?;
@@ -617,6 +630,52 @@ impl DirectXRenderer {
             driver_info: driver_version,
         })
     }
+
+    pub(crate) fn get_font_info() -> &'static FontInfo {
+        static CACHED_FONT_INFO: OnceLock<FontInfo> = OnceLock::new();
+        CACHED_FONT_INFO.get_or_init(|| unsafe {
+            let factory: IDWriteFactory5 = DWriteCreateFactory(DWRITE_FACTORY_TYPE_SHARED).unwrap();
+            let render_params: IDWriteRenderingParams1 =
+                factory.CreateRenderingParams().unwrap().cast().unwrap();
+            FontInfo {
+                gamma_ratios: Self::get_gamma_ratios(render_params.GetGamma()),
+                grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(),
+            }
+        })
+    }
+
+    // Gamma ratios for brightening/darkening edges for better contrast
+    // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50
+    fn get_gamma_ratios(gamma: f32) -> [f32; 4] {
+        const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [
+            [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0
+            [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1
+            [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2
+            [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3
+            [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4
+            [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5
+            [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6
+            [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7
+            [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8
+            [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9
+            [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0
+            [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1
+            [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2
+        ];
+
+        const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32;
+        const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32;
+
+        let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10;
+        let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index];
+
+        [
+            ratios[0] * NORM13,
+            ratios[1] * NORM24,
+            ratios[2] * NORM13,
+            ratios[3] * NORM24,
+        ]
+    }
 }
 
 impl DirectXResources {
@@ -822,8 +881,10 @@ impl DirectXGlobalElements {
 #[derive(Debug, Default)]
 #[repr(C)]
 struct GlobalParams {
+    gamma_ratios: [f32; 4],
     viewport_size: [f32; 2],
-    _pad: u64,
+    grayscale_enhanced_contrast: f32,
+    _pad: u32,
 }
 
 struct PipelineState<T> {
@@ -1544,6 +1605,10 @@ pub(crate) mod shader_resources {
     #[cfg(debug_assertions)]
     pub(super) fn build_shader_blob(entry: ShaderModule, target: ShaderTarget) -> Result<ID3DBlob> {
         unsafe {
+            use windows::Win32::Graphics::{
+                Direct3D::ID3DInclude, Hlsl::D3D_COMPILE_STANDARD_FILE_INCLUDE,
+            };
+
             let shader_name = if matches!(entry, ShaderModule::EmojiRasterization) {
                 "color_text_raster.hlsl"
             } else {
@@ -1572,10 +1637,15 @@ pub(crate) mod shader_resources {
             let entry_point = PCSTR::from_raw(entry.as_ptr());
             let target_cstr = PCSTR::from_raw(target.as_ptr());
 
+            // really dirty trick because winapi bindings are unhappy otherwise
+            let include_handler = &std::mem::transmute::<usize, ID3DInclude>(
+                D3D_COMPILE_STANDARD_FILE_INCLUDE as usize,
+            );
+
             let ret = D3DCompileFromFile(
                 &HSTRING::from(shader_path.to_str().unwrap()),
                 None,
-                None,
+                include_handler,
                 entry_point,
                 target_cstr,
                 D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION,
@@ -1760,7 +1830,7 @@ mod amd {
                 anyhow::bail!("Failed to initialize AMD AGS, error code: {}", result);
             }
 
-            // Vulkan acctually returns this as the driver version
+            // Vulkan actually returns this as the driver version
             let software_version = if !gpu_info.radeon_software_version.is_null() {
                 std::ffi::CStr::from_ptr(gpu_info.radeon_software_version)
                     .to_string_lossy()

crates/gpui/src/platform/windows/events.rs 🔗

@@ -708,7 +708,7 @@ impl WindowsWindowInner {
                 .system_settings
                 .auto_hide_taskbar_position
         {
-            // Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
+            // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
             // so the window isn't treated as a "fullscreen app", which would cause
             // the taskbar to disappear.
             match taskbar_position {

crates/gpui/src/platform/windows/platform.rs 🔗

@@ -1,7 +1,6 @@
 use std::{
     cell::RefCell,
     ffi::OsStr,
-    mem::ManuallyDrop,
     path::{Path, PathBuf},
     rc::Rc,
     sync::Arc,
@@ -18,10 +17,7 @@ use windows::{
     UI::ViewManagement::UISettings,
     Win32::{
         Foundation::*,
-        Graphics::{
-            Gdi::*,
-            Imaging::{CLSID_WICImagingFactory, IWICImagingFactory},
-        },
+        Graphics::Gdi::*,
         Security::Credentials::*,
         System::{Com::*, LibraryLoader::*, Ole::*, SystemInformation::*, Threading::*},
         UI::{Input::KeyboardAndMouse::*, Shell::*, WindowsAndMessaging::*},
@@ -41,7 +37,6 @@ pub(crate) struct WindowsPlatform {
     foreground_executor: ForegroundExecutor,
     text_system: Arc<DirectWriteTextSystem>,
     windows_version: WindowsVersion,
-    bitmap_factory: ManuallyDrop<IWICImagingFactory>,
     drop_target_helper: IDropTargetHelper,
     validation_number: usize,
     main_thread_id_win32: u32,
@@ -101,12 +96,8 @@ impl WindowsPlatform {
         let foreground_executor = ForegroundExecutor::new(dispatcher);
         let directx_devices = DirectXDevices::new(disable_direct_composition)
             .context("Unable to init directx devices.")?;
-        let bitmap_factory = ManuallyDrop::new(unsafe {
-            CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_INPROC_SERVER)
-                .context("Error creating bitmap factory.")?
-        });
         let text_system = Arc::new(
-            DirectWriteTextSystem::new(&directx_devices, &bitmap_factory)
+            DirectWriteTextSystem::new(&directx_devices)
                 .context("Error creating DirectWriteTextSystem")?,
         );
         let drop_target_helper: IDropTargetHelper = unsafe {
@@ -128,7 +119,6 @@ impl WindowsPlatform {
             text_system,
             disable_direct_composition,
             windows_version,
-            bitmap_factory,
             drop_target_helper,
             validation_number,
             main_thread_id_win32,
@@ -716,7 +706,6 @@ impl Platform for WindowsPlatform {
 impl Drop for WindowsPlatform {
     fn drop(&mut self) {
         unsafe {
-            ManuallyDrop::drop(&mut self.bitmap_factory);
             OleUninitialize();
         }
     }

crates/gpui/src/platform/windows/shaders.hlsl 🔗

@@ -1,6 +1,10 @@
+#include "alpha_correction.hlsl"
+
 cbuffer GlobalParams: register(b0) {
+    float4 gamma_ratios;
     float2 global_viewport_size;
-    uint2 _pad;
+    float grayscale_enhanced_contrast;
+    uint _pad;
 };
 
 Texture2D<float4> t_sprite: register(t0);
@@ -1098,7 +1102,8 @@ MonochromeSpriteVertexOutput monochrome_sprite_vertex(uint vertex_id: SV_VertexI
 
 float4 monochrome_sprite_fragment(MonochromeSpriteFragmentInput input): SV_Target {
     float sample = t_sprite.Sample(s_sprite, input.tile_position).r;
-    return float4(input.color.rgb, input.color.a * sample);
+    float alpha_corrected = apply_contrast_and_gamma_correction(sample, input.color.rgb, grayscale_enhanced_contrast, gamma_ratios);
+    return float4(input.color.rgb, input.color.a * alpha_corrected);
 }
 
 /*

crates/gpui/src/platform/windows/vsync.rs 🔗

@@ -94,7 +94,7 @@ impl VSyncProvider {
         // DwmFlush and DCompositionWaitForCompositorClock returns very early
         // instead of waiting until vblank when the monitor goes to sleep or is
         // unplugged (nothing to present due to desktop occlusion). We use 1ms as
-        // a threshhold for the duration of the wait functions and fallback to
+        // a threshold for the duration of the wait functions and fallback to
         // Sleep() if it returns before that. This could happen during normal
         // operation for the first call after the vsync thread becomes non-idle,
         // but it shouldn't happen often.

crates/gpui/src/text_system.rs 🔗

@@ -413,9 +413,10 @@ impl WindowTextSystem {
         let mut wrapped_lines = 0;
 
         let mut process_line = |line_text: SharedString| {
+            font_runs.clear();
             let line_end = line_start + line_text.len();
 
-            let mut last_font: Option<Font> = None;
+            let mut last_font: Option<FontId> = None;
             let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new();
             let mut run_start = line_start;
             while run_start < line_end {
@@ -425,23 +426,14 @@ impl WindowTextSystem {
 
                 let run_len_within_line = cmp::min(line_end, run_start + run.len) - run_start;
 
-                if last_font == Some(run.font.clone()) {
-                    font_runs.last_mut().unwrap().len += run_len_within_line;
-                } else {
-                    last_font = Some(run.font.clone());
-                    font_runs.push(FontRun {
-                        len: run_len_within_line,
-                        font_id: self.resolve_font(&run.font),
-                    });
-                }
-
-                if decoration_runs.last().is_some_and(|last_run| {
-                    last_run.color == run.color
-                        && last_run.underline == run.underline
-                        && last_run.strikethrough == run.strikethrough
-                        && last_run.background_color == run.background_color
-                }) {
-                    decoration_runs.last_mut().unwrap().len += run_len_within_line as u32;
+                let decoration_changed = if let Some(last_run) = decoration_runs.last_mut()
+                    && last_run.color == run.color
+                    && last_run.underline == run.underline
+                    && last_run.strikethrough == run.strikethrough
+                    && last_run.background_color == run.background_color
+                {
+                    last_run.len += run_len_within_line as u32;
+                    false
                 } else {
                     decoration_runs.push(DecorationRun {
                         len: run_len_within_line as u32,
@@ -450,6 +442,21 @@ impl WindowTextSystem {
                         underline: run.underline,
                         strikethrough: run.strikethrough,
                     });
+                    true
+                };
+
+                if let Some(font_run) = font_runs.last_mut()
+                    && Some(font_run.font_id) == last_font
+                    && !decoration_changed
+                {
+                    font_run.len += run_len_within_line;
+                } else {
+                    let font_id = self.resolve_font(&run.font);
+                    last_font = Some(font_id);
+                    font_runs.push(FontRun {
+                        len: run_len_within_line,
+                        font_id,
+                    });
                 }
 
                 if run_len_within_line == run.len {
@@ -484,8 +491,6 @@ impl WindowTextSystem {
                     runs.next();
                 }
             }
-
-            font_runs.clear();
         };
 
         let mut split_lines = text.split('\n');
@@ -519,37 +524,54 @@ impl WindowTextSystem {
     /// Subsets of the line can be styled independently with the `runs` parameter.
     /// Generally, you should prefer to use `TextLayout::shape_line` instead, which
     /// can be painted directly.
-    pub fn layout_line<Text>(
+    pub fn layout_line(
         &self,
-        text: Text,
+        text: &str,
         font_size: Pixels,
         runs: &[TextRun],
         force_width: Option<Pixels>,
-    ) -> Arc<LineLayout>
-    where
-        Text: AsRef<str>,
-        SharedString: From<Text>,
-    {
+    ) -> Arc<LineLayout> {
+        let mut last_run = None::<&TextRun>;
+        let mut last_font: Option<FontId> = None;
         let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default();
+        font_runs.clear();
+
         for run in runs.iter() {
-            let font_id = self.resolve_font(&run.font);
-            if let Some(last_run) = font_runs.last_mut()
-                && last_run.font_id == font_id
+            let decoration_changed = if let Some(last_run) = last_run
+                && last_run.color == run.color
+                && last_run.underline == run.underline
+                && last_run.strikethrough == run.strikethrough
+            // we do not consider differing background color relevant, as it does not affect glyphs
+            // && last_run.background_color == run.background_color
             {
-                last_run.len += run.len;
-                continue;
+                false
+            } else {
+                last_run = Some(run);
+                true
+            };
+
+            if let Some(font_run) = font_runs.last_mut()
+                && Some(font_run.font_id) == last_font
+                && !decoration_changed
+            {
+                font_run.len += run.len;
+            } else {
+                let font_id = self.resolve_font(&run.font);
+                last_font = Some(font_id);
+                font_runs.push(FontRun {
+                    len: run.len,
+                    font_id,
+                });
             }
-            font_runs.push(FontRun {
-                len: run.len,
-                font_id,
-            });
         }
 
-        let layout =
-            self.line_layout_cache
-                .layout_line_internal(text, font_size, &font_runs, force_width);
+        let layout = self.line_layout_cache.layout_line(
+            &SharedString::new(text),
+            font_size,
+            &font_runs,
+            force_width,
+        );
 
-        font_runs.clear();
         self.font_runs_pool.lock().push(font_runs);
 
         layout

crates/gpui/src/text_system/line_layout.rs 🔗

@@ -501,7 +501,7 @@ impl LineLayoutCache {
         } else {
             drop(current_frame);
             let text = SharedString::from(text);
-            let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs);
+            let unwrapped_layout = self.layout_line::<&SharedString>(&text, font_size, runs, None);
             let wrap_boundaries = if let Some(wrap_width) = wrap_width {
                 unwrapped_layout.compute_wrap_boundaries(text.as_ref(), wrap_width, max_lines)
             } else {
@@ -535,19 +535,6 @@ impl LineLayoutCache {
         text: Text,
         font_size: Pixels,
         runs: &[FontRun],
-    ) -> Arc<LineLayout>
-    where
-        Text: AsRef<str>,
-        SharedString: From<Text>,
-    {
-        self.layout_line_internal(text, font_size, runs, None)
-    }
-
-    pub fn layout_line_internal<Text>(
-        &self,
-        text: Text,
-        font_size: Pixels,
-        runs: &[FontRun],
         force_width: Option<Pixels>,
     ) -> Arc<LineLayout>
     where

crates/gpui/src/window.rs 🔗

@@ -4390,6 +4390,13 @@ impl Window {
         self.platform_window.toggle_window_tab_overview()
     }
 
+    /// Sets the tabbing identifier for the window.
+    /// This is macOS specific.
+    pub fn set_tabbing_identifier(&self, tabbing_identifier: Option<String>) {
+        self.platform_window
+            .set_tabbing_identifier(tabbing_identifier)
+    }
+
     /// Toggles the inspector mode on this window.
     #[cfg(any(feature = "inspector", debug_assertions))]
     pub fn toggle_inspector(&mut self, cx: &mut App) {

crates/gpui_macros/src/derive_action.rs 🔗

@@ -16,6 +16,13 @@ pub(crate) fn derive_action(input: TokenStream) -> TokenStream {
     let mut deprecated = None;
     let mut doc_str: Option<String> = None;
 
+    /*
+    *
+    * #[action()]
+    * Struct Foo {
+    *  bar: bool // is bar considered an attribute
+    }
+    */
     for attr in &input.attrs {
         if attr.path().is_ident("action") {
             attr.parse_nested_meta(|meta| {

crates/image_viewer/src/image_viewer_settings.rs 🔗

@@ -1,10 +1,10 @@
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 /// The settings for the image viewer.
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default)]
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Default, SettingsUi)]
 pub struct ImageViewerSettings {
     /// The unit to use for displaying image file sizes.
     ///

crates/journal/src/journal.rs 🔗

@@ -5,7 +5,7 @@ use editor::{Editor, SelectionEffects};
 use gpui::{App, AppContext as _, Context, Window, actions};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use std::{
     fs::OpenOptions,
     path::{Path, PathBuf},
@@ -22,7 +22,7 @@ actions!(
 );
 
 /// Settings specific to journaling
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, SettingsUi)]
 pub struct JournalSettings {
     /// The path of the directory where journal entries are stored.
     ///

crates/keymap_editor/Cargo.toml 🔗

@@ -0,0 +1,53 @@
+[package]
+name = "keymap_editor"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/keymap_editor.rs"
+
+[dependencies]
+anyhow.workspace = true
+collections.workspace = true
+command_palette.workspace = true
+component.workspace = true
+db.workspace = true
+editor.workspace = true
+fs.workspace = true
+fuzzy.workspace = true
+gpui.workspace = true
+itertools.workspace = true
+language.workspace = true
+log.workspace = true
+menu.workspace = true
+notifications.workspace = true
+paths.workspace = true
+project.workspace = true
+search.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+settings.workspace = true
+telemetry.workspace = true
+tempfile.workspace = true
+theme.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-rust.workspace = true
+ui.workspace = true
+ui_input.workspace = true
+util.workspace = true
+vim.workspace = true
+workspace-hack.workspace = true
+workspace.workspace = true
+zed_actions.workspace = true
+
+[dev-dependencies]
+db = {"workspace"= true, "features" = ["test-support"]}
+fs = { workspace = true, features = ["test-support"] }
+gpui = { workspace = true, features = ["test-support"] }
+project = { workspace = true, features = ["test-support"] }
+workspace = { workspace = true, features = ["test-support"] }

crates/settings_ui/src/keybindings.rs → crates/keymap_editor/src/keymap_editor.rs 🔗

@@ -5,6 +5,8 @@ use std::{
     time::Duration,
 };
 
+mod ui_components;
+
 use anyhow::{Context as _, anyhow};
 use collections::{HashMap, HashSet};
 use editor::{CompletionProvider, Editor, EditorEvent};
@@ -34,8 +36,10 @@ use workspace::{
     register_serializable_item,
 };
 
+pub use ui_components::*;
+
 use crate::{
-    keybindings::persistence::KEYBINDING_EDITORS,
+    persistence::KEYBINDING_EDITORS,
     ui_components::{
         keystroke_input::{ClearKeystrokes, KeystrokeInput, StartRecording, StopRecording},
         table::{ColumnWidths, ResizeBehavior, Table, TableInteractionState},

crates/language/src/buffer.rs 🔗

@@ -202,7 +202,7 @@ pub struct Diagnostic {
     pub source: Option<String>,
     /// A machine-readable code that identifies this diagnostic.
     pub code: Option<NumberOrString>,
-    pub code_description: Option<lsp::Url>,
+    pub code_description: Option<lsp::Uri>,
     /// Whether this diagnostic is a hint, warning, or error.
     pub severity: DiagnosticSeverity,
     /// The human-readable message associated with this diagnostic.

crates/language/src/language_settings.rs 🔗

@@ -17,7 +17,7 @@ use serde::{
 };
 
 use settings::{
-    ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore,
+    ParameterizedJsonSchema, Settings, SettingsLocation, SettingsSources, SettingsStore, SettingsUi,
 };
 use shellexpand;
 use std::{borrow::Cow, num::NonZeroU32, path::Path, slice, sync::Arc};
@@ -55,7 +55,7 @@ pub fn all_language_settings<'a>(
 }
 
 /// The settings for all languages.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, SettingsUi)]
 pub struct AllLanguageSettings {
     /// The edit prediction settings.
     pub edit_predictions: EditPredictionSettings,

crates/language/src/proto.rs 🔗

@@ -431,7 +431,7 @@ pub fn deserialize_diagnostics(
                     code: diagnostic.code.map(lsp::NumberOrString::from_string),
                     code_description: diagnostic
                         .code_description
-                        .and_then(|s| lsp::Url::parse(&s).ok()),
+                        .and_then(|s| lsp::Uri::from_str(&s).ok()),
                     is_primary: diagnostic.is_primary,
                     is_disk_based: diagnostic.is_disk_based,
                     is_unnecessary: diagnostic.is_unnecessary,

crates/language/src/toolchain.rs 🔗

@@ -14,6 +14,7 @@ use collections::HashMap;
 use fs::Fs;
 use gpui::{AsyncApp, SharedString};
 use settings::WorktreeId;
+use task::ShellKind;
 
 use crate::{LanguageName, ManifestName};
 
@@ -68,7 +69,12 @@ pub trait ToolchainLister: Send + Sync {
     fn term(&self) -> SharedString;
     /// Returns the name of the manifest file for this toolchain.
     fn manifest_name(&self) -> ManifestName;
-    async fn activation_script(&self, toolchain: &Toolchain, fs: &dyn Fs) -> Option<String>;
+    async fn activation_script(
+        &self,
+        toolchain: &Toolchain,
+        shell: ShellKind,
+        fs: &dyn Fs,
+    ) -> Vec<String>;
 }
 
 #[async_trait(?Send)]

crates/language_models/src/provider/anthropic.rs 🔗

@@ -197,7 +197,7 @@ impl AnthropicLanguageModelProvider {
         })
     }
 
-    pub fn api_key(cx: &mut App) -> Task<Result<ApiKey>> {
+    pub fn api_key(cx: &mut App) -> Task<Result<ApiKey, AuthenticateError>> {
         let credentials_provider = <dyn CredentialsProvider>::global(cx);
         let api_url = AllLanguageModelSettings::get_global(cx)
             .anthropic

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -32,6 +32,8 @@ use std::time::Duration;
 use ui::prelude::*;
 use util::debug_panic;
 
+use crate::provider::x_ai::count_xai_tokens;
+
 use super::anthropic::count_anthropic_tokens;
 use super::google::count_google_tokens;
 use super::open_ai::count_open_ai_tokens;
@@ -228,7 +230,9 @@ impl LanguageModel for CopilotChatLanguageModel {
             ModelVendor::OpenAI | ModelVendor::Anthropic => {
                 LanguageModelToolSchemaFormat::JsonSchema
             }
-            ModelVendor::Google => LanguageModelToolSchemaFormat::JsonSchemaSubset,
+            ModelVendor::Google | ModelVendor::XAI => {
+                LanguageModelToolSchemaFormat::JsonSchemaSubset
+            }
         }
     }
 
@@ -256,6 +260,10 @@ impl LanguageModel for CopilotChatLanguageModel {
         match self.model.vendor() {
             ModelVendor::Anthropic => count_anthropic_tokens(request, cx),
             ModelVendor::Google => count_google_tokens(request, cx),
+            ModelVendor::XAI => {
+                let model = x_ai::Model::from_id(self.model.id()).unwrap_or_default();
+                count_xai_tokens(request, model, cx)
+            }
             ModelVendor::OpenAI => {
                 let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default();
                 count_open_ai_tokens(request, model, cx)
@@ -475,7 +483,6 @@ fn into_copilot_chat(
         }
     }
 
-    let mut tool_called = false;
     let mut messages: Vec<ChatMessage> = Vec::new();
     for message in request_messages {
         match message.role {
@@ -545,7 +552,6 @@ fn into_copilot_chat(
                 let mut tool_calls = Vec::new();
                 for content in &message.content {
                     if let MessageContent::ToolUse(tool_use) = content {
-                        tool_called = true;
                         tool_calls.push(ToolCall {
                             id: tool_use.id.to_string(),
                             content: copilot::copilot_chat::ToolCallContent::Function {
@@ -590,7 +596,7 @@ fn into_copilot_chat(
         }
     }
 
-    let mut tools = request
+    let tools = request
         .tools
         .iter()
         .map(|tool| Tool::Function {
@@ -602,22 +608,6 @@ fn into_copilot_chat(
         })
         .collect::<Vec<_>>();
 
-    // The API will return a Bad Request (with no error message) when tools
-    // were used previously in the conversation but no tools are provided as
-    // part of this request. Inserting a dummy tool seems to circumvent this
-    // error.
-    if tool_called && tools.is_empty() {
-        tools.push(Tool::Function {
-            function: copilot::copilot_chat::Function {
-                name: "noop".to_string(),
-                description: "No operation".to_string(),
-                parameters: serde_json::json!({
-                    "type": "object"
-                }),
-            },
-        });
-    }
-
     Ok(CopilotChatRequest {
         intent: true,
         n: 1,

crates/language_models/src/settings.rs 🔗

@@ -5,7 +5,7 @@ use collections::HashMap;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 use crate::provider::{
     self,
@@ -29,7 +29,7 @@ pub fn init_settings(cx: &mut App) {
     AllLanguageModelSettings::register(cx);
 }
 
-#[derive(Default)]
+#[derive(Default, SettingsUi)]
 pub struct AllLanguageModelSettings {
     pub anthropic: AnthropicSettings,
     pub bedrock: AmazonBedrockSettings,

crates/language_tools/src/language_tools.rs 🔗

@@ -14,7 +14,7 @@ use ui::{Context, Window};
 use workspace::{Item, ItemHandle, SplitDirection, Workspace};
 
 pub fn init(cx: &mut App) {
-    lsp_log_view::init(true, cx);
+    lsp_log_view::init(false, cx);
     syntax_tree_view::init(cx);
     key_context_view::init(cx);
 }

crates/language_tools/src/lsp_log_view.rs 🔗

@@ -16,6 +16,7 @@ use project::{
     lsp_store::log_store::{self, Event, LanguageServerKind, LogKind, LogStore, Message},
     search::SearchQuery,
 };
+use proto::toggle_lsp_logs::LogType;
 use std::{any::TypeId, borrow::Cow, sync::Arc};
 use ui::{Button, Checkbox, ContextMenu, Label, PopoverMenu, ToggleState, prelude::*};
 use util::ResultExt as _;
@@ -111,8 +112,8 @@ actions!(
     ]
 );
 
-pub fn init(store_logs: bool, cx: &mut App) {
-    let log_store = log_store::init(store_logs, cx);
+pub fn init(on_headless_host: bool, cx: &mut App) {
+    let log_store = log_store::init(on_headless_host, cx);
 
     log_store.update(cx, |_, cx| {
         Copilot::global(cx).map(|copilot| {
@@ -266,6 +267,19 @@ impl LspLogView {
             window.focus(&log_view.editor.focus_handle(cx));
         });
 
+        cx.on_release(|log_view, cx| {
+            log_view.log_store.update(cx, |log_store, cx| {
+                for (server_id, state) in &log_store.language_servers {
+                    if let Some(log_kind) = state.toggled_log_kind {
+                        if let Some(log_type) = log_type(log_kind) {
+                            send_toggle_log_message(state, *server_id, false, log_type, cx);
+                        }
+                    }
+                }
+            });
+        })
+        .detach();
+
         let mut lsp_log_view = Self {
             focus_handle,
             editor,
@@ -436,6 +450,12 @@ impl LspLogView {
             cx.notify();
         }
         self.editor.read(cx).focus_handle(cx).focus(window);
+        self.log_store.update(cx, |log_store, cx| {
+            let state = log_store.get_language_server_state(server_id)?;
+            state.toggled_log_kind = Some(LogKind::Logs);
+            send_toggle_log_message(state, server_id, true, LogType::Log, cx);
+            Some(())
+        });
     }
 
     fn update_log_level(
@@ -472,8 +492,8 @@ impl LspLogView {
     ) {
         let trace_level = self
             .log_store
-            .update(cx, |this, _| {
-                Some(this.get_language_server_state(server_id)?.trace_level)
+            .update(cx, |log_store, _| {
+                Some(log_store.get_language_server_state(server_id)?.trace_level)
             })
             .unwrap_or(TraceValue::Messages);
         let log_contents = self
@@ -487,6 +507,12 @@ impl LspLogView {
             let (editor, editor_subscriptions) = Self::editor_for_logs(log_contents, window, cx);
             self.editor = editor;
             self.editor_subscriptions = editor_subscriptions;
+            self.log_store.update(cx, |log_store, cx| {
+                let state = log_store.get_language_server_state(server_id)?;
+                state.toggled_log_kind = Some(LogKind::Trace);
+                send_toggle_log_message(state, server_id, true, LogType::Trace, cx);
+                Some(())
+            });
             cx.notify();
         }
         self.editor.read(cx).focus_handle(cx).focus(window);
@@ -551,24 +577,7 @@ impl LspLogView {
             }
 
             if let Some(server_state) = log_store.language_servers.get(&server_id) {
-                if let LanguageServerKind::Remote { project } = &server_state.kind {
-                    project
-                        .update(cx, |project, cx| {
-                            if let Some((client, project_id)) =
-                                project.lsp_store().read(cx).upstream_client()
-                            {
-                                client
-                                    .send(proto::ToggleLspLogs {
-                                        project_id,
-                                        log_type: proto::toggle_lsp_logs::LogType::Rpc as i32,
-                                        server_id: server_id.to_proto(),
-                                        enabled,
-                                    })
-                                    .log_err();
-                            }
-                        })
-                        .ok();
-                }
+                send_toggle_log_message(server_state, server_id, enabled, LogType::Rpc, cx);
             };
         });
         if !enabled && Some(server_id) == self.current_server_id {
@@ -644,6 +653,49 @@ impl LspLogView {
         self.editor_subscriptions = editor_subscriptions;
         cx.notify();
         self.editor.read(cx).focus_handle(cx).focus(window);
+        self.log_store.update(cx, |log_store, cx| {
+            let state = log_store.get_language_server_state(server_id)?;
+            if let Some(log_kind) = state.toggled_log_kind.take() {
+                if let Some(log_type) = log_type(log_kind) {
+                    send_toggle_log_message(state, server_id, false, log_type, cx);
+                }
+            };
+            Some(())
+        });
+    }
+}
+
+fn log_type(log_kind: LogKind) -> Option<LogType> {
+    match log_kind {
+        LogKind::Rpc => Some(LogType::Rpc),
+        LogKind::Trace => Some(LogType::Trace),
+        LogKind::Logs => Some(LogType::Log),
+        LogKind::ServerInfo => None,
+    }
+}
+
+fn send_toggle_log_message(
+    server_state: &log_store::LanguageServerState,
+    server_id: LanguageServerId,
+    enabled: bool,
+    log_type: LogType,
+    cx: &mut App,
+) {
+    if let LanguageServerKind::Remote { project } = &server_state.kind {
+        project
+            .update(cx, |project, cx| {
+                if let Some((client, project_id)) = project.lsp_store().read(cx).upstream_client() {
+                    client
+                        .send(proto::ToggleLspLogs {
+                            project_id,
+                            log_type: log_type as i32,
+                            server_id: server_id.to_proto(),
+                            enabled,
+                        })
+                        .log_err();
+                }
+            })
+            .ok();
     }
 }
 

crates/language_tools/src/lsp_log_view_tests.rs 🔗

@@ -53,7 +53,7 @@ async fn test_lsp_log_view(cx: &mut TestAppContext) {
         },
     );
 
-    let log_store = cx.new(|cx| LogStore::new(true, cx));
+    let log_store = cx.new(|cx| LogStore::new(false, cx));
     log_store.update(cx, |store, cx| store.add_project(&project, cx));
 
     let _rust_buffer = project

crates/languages/src/python.rs 🔗

@@ -34,7 +34,7 @@ use std::{
     path::{Path, PathBuf},
     sync::Arc,
 };
-use task::{TaskTemplate, TaskTemplates, VariableName};
+use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName};
 use util::ResultExt;
 
 pub(crate) struct PyprojectTomlManifestProvider;
@@ -328,41 +328,35 @@ impl LspAdapter for PythonLspAdapter {
                     .unwrap_or_default();
 
             // If we have a detected toolchain, configure Pyright to use it
-            if let Some(toolchain) = toolchain {
+            if let Some(toolchain) = toolchain
+                && let Ok(env) = serde_json::from_value::<
+                    pet_core::python_environment::PythonEnvironment,
+                >(toolchain.as_json.clone())
+            {
                 if user_settings.is_null() {
                     user_settings = Value::Object(serde_json::Map::default());
                 }
                 let object = user_settings.as_object_mut().unwrap();
 
                 let interpreter_path = toolchain.path.to_string();
+                if let Some(venv_dir) = env.prefix {
+                    // Set venvPath and venv at the root level
+                    // This matches the format of a pyrightconfig.json file
+                    if let Some(parent) = venv_dir.parent() {
+                        // Use relative path if the venv is inside the workspace
+                        let venv_path = if parent == adapter.worktree_root_path() {
+                            ".".to_string()
+                        } else {
+                            parent.to_string_lossy().into_owned()
+                        };
+                        object.insert("venvPath".to_string(), Value::String(venv_path));
+                    }
 
-                // Detect if this is a virtual environment
-                if let Some(interpreter_dir) = Path::new(&interpreter_path).parent()
-                    && let Some(venv_dir) = interpreter_dir.parent()
-                {
-                    // Check if this looks like a virtual environment
-                    if venv_dir.join("pyvenv.cfg").exists()
-                        || venv_dir.join("bin/activate").exists()
-                        || venv_dir.join("Scripts/activate.bat").exists()
-                    {
-                        // Set venvPath and venv at the root level
-                        // This matches the format of a pyrightconfig.json file
-                        if let Some(parent) = venv_dir.parent() {
-                            // Use relative path if the venv is inside the workspace
-                            let venv_path = if parent == adapter.worktree_root_path() {
-                                ".".to_string()
-                            } else {
-                                parent.to_string_lossy().into_owned()
-                            };
-                            object.insert("venvPath".to_string(), Value::String(venv_path));
-                        }
-
-                        if let Some(venv_name) = venv_dir.file_name() {
-                            object.insert(
-                                "venv".to_owned(),
-                                Value::String(venv_name.to_string_lossy().into_owned()),
-                            );
-                        }
+                    if let Some(venv_name) = venv_dir.file_name() {
+                        object.insert(
+                            "venv".to_owned(),
+                            Value::String(venv_name.to_string_lossy().into_owned()),
+                        );
                     }
                 }
 
@@ -894,20 +888,66 @@ impl ToolchainLister for PythonToolchainProvider {
     fn term(&self) -> SharedString {
         self.term.clone()
     }
-    async fn activation_script(&self, toolchain: &Toolchain, fs: &dyn Fs) -> Option<String> {
-        let toolchain = serde_json::from_value::<pet_core::python_environment::PythonEnvironment>(
+    async fn activation_script(
+        &self,
+        toolchain: &Toolchain,
+        shell: ShellKind,
+        fs: &dyn Fs,
+    ) -> Vec<String> {
+        let Ok(toolchain) = serde_json::from_value::<pet_core::python_environment::PythonEnvironment>(
             toolchain.as_json.clone(),
-        )
-        .ok()?;
-        let mut activation_script = None;
-        if let Some(prefix) = &toolchain.prefix {
-            #[cfg(not(target_os = "windows"))]
-            let path = prefix.join(BINARY_DIR).join("activate");
-            #[cfg(target_os = "windows")]
-            let path = prefix.join(BINARY_DIR).join("activate.ps1");
-            if fs.is_file(&path).await {
-                activation_script = Some(format!(". {}", path.display()));
+        ) else {
+            return vec![];
+        };
+        let mut activation_script = vec![];
+
+        match toolchain.kind {
+            Some(PythonEnvironmentKind::Pixi) => {
+                let env = toolchain.name.as_deref().unwrap_or("default");
+                activation_script.push(format!("pixi shell -e {env}"))
+            }
+            Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => {
+                if let Some(prefix) = &toolchain.prefix {
+                    let activate_keyword = match shell {
+                        ShellKind::Cmd => ".",
+                        ShellKind::Nushell => "overlay use",
+                        ShellKind::Powershell => ".",
+                        ShellKind::Fish => "source",
+                        ShellKind::Csh => "source",
+                        ShellKind::Posix => "source",
+                    };
+                    let activate_script_name = match shell {
+                        ShellKind::Posix => "activate",
+                        ShellKind::Csh => "activate.csh",
+                        ShellKind::Fish => "activate.fish",
+                        ShellKind::Nushell => "activate.nu",
+                        ShellKind::Powershell => "activate.ps1",
+                        ShellKind::Cmd => "activate.bat",
+                    };
+                    let path = prefix.join(BINARY_DIR).join(activate_script_name);
+                    if fs.is_file(&path).await {
+                        activation_script
+                            .push(format!("{activate_keyword} \"{}\"", path.display()));
+                    }
+                }
+            }
+            Some(PythonEnvironmentKind::Pyenv) => {
+                let Some(manager) = toolchain.manager else {
+                    return vec![];
+                };
+                let version = toolchain.version.as_deref().unwrap_or("system");
+                let pyenv = manager.executable;
+                let pyenv = pyenv.display();
+                activation_script.extend(match shell {
+                    ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")),
+                    ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")),
+                    ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")),
+                    ShellKind::Powershell => None,
+                    ShellKind::Csh => None,
+                    ShellKind::Cmd => None,
+                })
             }
+            _ => {}
         }
         activation_script
     }
@@ -1063,10 +1103,10 @@ impl LspAdapter for PyLspAdapter {
                 arguments: vec![],
             })
         } else {
-            let venv = toolchain?;
-            let pylsp_path = Path::new(venv.path.as_ref()).parent()?.join("pylsp");
+            let toolchain = toolchain?;
+            let pylsp_path = Path::new(toolchain.path.as_ref()).parent()?.join("pylsp");
             pylsp_path.exists().then(|| LanguageServerBinary {
-                path: venv.path.to_string().into(),
+                path: toolchain.path.to_string().into(),
                 arguments: vec![pylsp_path.into()],
                 env: None,
             })
@@ -1530,41 +1570,35 @@ impl LspAdapter for BasedPyrightLspAdapter {
                     .unwrap_or_default();
 
             // If we have a detected toolchain, configure Pyright to use it
-            if let Some(toolchain) = toolchain {
+            if let Some(toolchain) = toolchain
+                && let Ok(env) = serde_json::from_value::<
+                    pet_core::python_environment::PythonEnvironment,
+                >(toolchain.as_json.clone())
+            {
                 if user_settings.is_null() {
                     user_settings = Value::Object(serde_json::Map::default());
                 }
                 let object = user_settings.as_object_mut().unwrap();
 
                 let interpreter_path = toolchain.path.to_string();
+                if let Some(venv_dir) = env.prefix {
+                    // Set venvPath and venv at the root level
+                    // This matches the format of a pyrightconfig.json file
+                    if let Some(parent) = venv_dir.parent() {
+                        // Use relative path if the venv is inside the workspace
+                        let venv_path = if parent == adapter.worktree_root_path() {
+                            ".".to_string()
+                        } else {
+                            parent.to_string_lossy().into_owned()
+                        };
+                        object.insert("venvPath".to_string(), Value::String(venv_path));
+                    }
 
-                // Detect if this is a virtual environment
-                if let Some(interpreter_dir) = Path::new(&interpreter_path).parent()
-                    && let Some(venv_dir) = interpreter_dir.parent()
-                {
-                    // Check if this looks like a virtual environment
-                    if venv_dir.join("pyvenv.cfg").exists()
-                        || venv_dir.join("bin/activate").exists()
-                        || venv_dir.join("Scripts/activate.bat").exists()
-                    {
-                        // Set venvPath and venv at the root level
-                        // This matches the format of a pyrightconfig.json file
-                        if let Some(parent) = venv_dir.parent() {
-                            // Use relative path if the venv is inside the workspace
-                            let venv_path = if parent == adapter.worktree_root_path() {
-                                ".".to_string()
-                            } else {
-                                parent.to_string_lossy().into_owned()
-                            };
-                            object.insert("venvPath".to_string(), Value::String(venv_path));
-                        }
-
-                        if let Some(venv_name) = venv_dir.file_name() {
-                            object.insert(
-                                "venv".to_owned(),
-                                Value::String(venv_name.to_string_lossy().into_owned()),
-                            );
-                        }
+                    if let Some(venv_name) = venv_dir.file_name() {
+                        object.insert(
+                            "venv".to_owned(),
+                            Value::String(venv_name.to_string_lossy().into_owned()),
+                        );
                     }
                 }
 

crates/languages/src/rust.rs 🔗

@@ -1058,7 +1058,7 @@ mod tests {
     #[gpui::test]
     async fn test_process_rust_diagnostics() {
         let mut params = lsp::PublishDiagnosticsParams {
-            uri: lsp::Url::from_file_path(path!("/a")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/a")).unwrap(),
             version: None,
             diagnostics: vec![
                 // no newlines

crates/lsp/src/lsp.rs 🔗

@@ -100,8 +100,8 @@ pub struct LanguageServer {
     io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
     output_done_rx: Mutex<Option<barrier::Receiver>>,
     server: Arc<Mutex<Option<Child>>>,
-    workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
-    root_uri: Url,
+    workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
+    root_uri: Uri,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq, Hash)]
@@ -310,7 +310,7 @@ impl LanguageServer {
         binary: LanguageServerBinary,
         root_path: &Path,
         code_action_kinds: Option<Vec<CodeActionKind>>,
-        workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
+        workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
         cx: &mut AsyncApp,
     ) -> Result<Self> {
         let working_dir = if root_path.is_dir() {
@@ -318,7 +318,7 @@ impl LanguageServer {
         } else {
             root_path.parent().unwrap_or_else(|| Path::new("/"))
         };
-        let root_uri = Url::from_file_path(&working_dir)
+        let root_uri = Uri::from_file_path(&working_dir)
             .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?;
 
         log::info!(
@@ -384,8 +384,8 @@ impl LanguageServer {
         server: Option<Child>,
         code_action_kinds: Option<Vec<CodeActionKind>>,
         binary: LanguageServerBinary,
-        root_uri: Url,
-        workspace_folders: Option<Arc<Mutex<BTreeSet<Url>>>>,
+        root_uri: Uri,
+        workspace_folders: Option<Arc<Mutex<BTreeSet<Uri>>>>,
         cx: &mut AsyncApp,
         on_unhandled_notification: F,
     ) -> Self
@@ -1350,7 +1350,7 @@ impl LanguageServer {
     }
 
     /// Add new workspace folder to the list.
-    pub fn add_workspace_folder(&self, uri: Url) {
+    pub fn add_workspace_folder(&self, uri: Uri) {
         if self
             .capabilities()
             .workspace
@@ -1385,7 +1385,7 @@ impl LanguageServer {
     }
 
     /// Remove existing workspace folder from the list.
-    pub fn remove_workspace_folder(&self, uri: Url) {
+    pub fn remove_workspace_folder(&self, uri: Uri) {
         if self
             .capabilities()
             .workspace
@@ -1417,7 +1417,7 @@ impl LanguageServer {
             self.notify::<DidChangeWorkspaceFolders>(&params).ok();
         }
     }
-    pub fn set_workspace_folders(&self, folders: BTreeSet<Url>) {
+    pub fn set_workspace_folders(&self, folders: BTreeSet<Uri>) {
         let Some(workspace_folders) = self.workspace_folders.as_ref() else {
             return;
         };
@@ -1450,7 +1450,7 @@ impl LanguageServer {
         }
     }
 
-    pub fn workspace_folders(&self) -> BTreeSet<Url> {
+    pub fn workspace_folders(&self) -> BTreeSet<Uri> {
         self.workspace_folders.as_ref().map_or_else(
             || BTreeSet::from_iter([self.root_uri.clone()]),
             |folders| folders.lock().clone(),
@@ -1459,7 +1459,7 @@ impl LanguageServer {
 
     pub fn register_buffer(
         &self,
-        uri: Url,
+        uri: Uri,
         language_id: String,
         version: i32,
         initial_text: String,
@@ -1470,7 +1470,7 @@ impl LanguageServer {
         .ok();
     }
 
-    pub fn unregister_buffer(&self, uri: Url) {
+    pub fn unregister_buffer(&self, uri: Uri) {
         self.notify::<notification::DidCloseTextDocument>(&DidCloseTextDocumentParams {
             text_document: TextDocumentIdentifier::new(uri),
         })
@@ -1587,7 +1587,7 @@ impl FakeLanguageServer {
         let server_name = LanguageServerName(name.clone().into());
         let process_name = Arc::from(name.as_str());
         let root = Self::root_path();
-        let workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default();
+        let workspace_folders: Arc<Mutex<BTreeSet<Uri>>> = Default::default();
         let mut server = LanguageServer::new_internal(
             server_id,
             server_name.clone(),
@@ -1657,13 +1657,13 @@ impl FakeLanguageServer {
         (server, fake)
     }
     #[cfg(target_os = "windows")]
-    fn root_path() -> Url {
-        Url::from_file_path("C:/").unwrap()
+    fn root_path() -> Uri {
+        Uri::from_file_path("C:/").unwrap()
     }
 
     #[cfg(not(target_os = "windows"))]
-    fn root_path() -> Url {
-        Url::from_file_path("/").unwrap()
+    fn root_path() -> Uri {
+        Uri::from_file_path("/").unwrap()
     }
 }
 
@@ -1865,7 +1865,7 @@ mod tests {
         server
             .notify::<notification::DidOpenTextDocument>(&DidOpenTextDocumentParams {
                 text_document: TextDocumentItem::new(
-                    Url::from_str("file://a/b").unwrap(),
+                    Uri::from_str("file://a/b").unwrap(),
                     "rust".to_string(),
                     0,
                     "".to_string(),
@@ -1886,7 +1886,7 @@ mod tests {
             message: "ok".to_string(),
         });
         fake.notify::<notification::PublishDiagnostics>(&PublishDiagnosticsParams {
-            uri: Url::from_str("file://b/c").unwrap(),
+            uri: Uri::from_str("file://b/c").unwrap(),
             version: Some(5),
             diagnostics: vec![],
         });

crates/markdown_preview/Cargo.toml 🔗

@@ -19,19 +19,21 @@ anyhow.workspace = true
 async-recursion.workspace = true
 collections.workspace = true
 editor.workspace = true
+fs.workspace = true
 gpui.workspace = true
+html5ever.workspace = true
 language.workspace = true
 linkify.workspace = true
 log.workspace = true
+markup5ever_rcdom.workspace = true
 pretty_assertions.workspace = true
 pulldown-cmark.workspace = true
 settings.workspace = true
 theme.workspace = true
 ui.workspace = true
 util.workspace = true
-workspace.workspace = true
 workspace-hack.workspace = true
-fs.workspace = true
+workspace.workspace = true
 
 [dev-dependencies]
 editor = { workspace = true, features = ["test-support"] }

crates/markdown_preview/src/markdown_elements.rs 🔗

@@ -1,5 +1,6 @@
 use gpui::{
-    FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle, UnderlineStyle, px,
+    DefiniteLength, FontStyle, FontWeight, HighlightStyle, SharedString, StrikethroughStyle,
+    UnderlineStyle, px,
 };
 use language::HighlightId;
 use std::{fmt::Display, ops::Range, path::PathBuf};
@@ -15,6 +16,7 @@ pub enum ParsedMarkdownElement {
     /// A paragraph of text and other inline elements.
     Paragraph(MarkdownParagraph),
     HorizontalRule(Range<usize>),
+    Image(Image),
 }
 
 impl ParsedMarkdownElement {
@@ -30,6 +32,7 @@ impl ParsedMarkdownElement {
                 MarkdownParagraphChunk::Image(image) => image.source_range.clone(),
             },
             Self::HorizontalRule(range) => range.clone(),
+            Self::Image(image) => image.source_range.clone(),
         })
     }
 
@@ -290,6 +293,8 @@ pub struct Image {
     pub link: Link,
     pub source_range: Range<usize>,
     pub alt_text: Option<SharedString>,
+    pub width: Option<DefiniteLength>,
+    pub height: Option<DefiniteLength>,
 }
 
 impl Image {
@@ -303,10 +308,20 @@ impl Image {
             source_range,
             link,
             alt_text: None,
+            width: None,
+            height: None,
         })
     }
 
     pub fn set_alt_text(&mut self, alt_text: SharedString) {
         self.alt_text = Some(alt_text);
     }
+
+    pub fn set_width(&mut self, width: DefiniteLength) {
+        self.width = Some(width);
+    }
+
+    pub fn set_height(&mut self, height: DefiniteLength) {
+        self.height = Some(height);
+    }
 }

crates/markdown_preview/src/markdown_parser.rs 🔗

@@ -1,10 +1,12 @@
 use crate::markdown_elements::*;
 use async_recursion::async_recursion;
 use collections::FxHashMap;
-use gpui::FontWeight;
+use gpui::{DefiniteLength, FontWeight, px, relative};
+use html5ever::{ParseOpts, local_name, parse_document, tendril::TendrilSink};
 use language::LanguageRegistry;
+use markup5ever_rcdom::RcDom;
 use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd};
-use std::{ops::Range, path::PathBuf, sync::Arc, vec};
+use std::{cell::RefCell, collections::HashMap, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec};
 
 pub async fn parse_markdown(
     markdown_input: &str,
@@ -172,9 +174,14 @@ impl<'a> MarkdownParser<'a> {
 
                     self.cursor += 1;
 
-                    let code_block = self.parse_code_block(language).await;
+                    let code_block = self.parse_code_block(language).await?;
                     Some(vec![ParsedMarkdownElement::CodeBlock(code_block)])
                 }
+                Tag::HtmlBlock => {
+                    self.cursor += 1;
+
+                    Some(self.parse_html_block().await)
+                }
                 _ => None,
             },
             Event::Rule => {
@@ -378,7 +385,7 @@ impl<'a> MarkdownParser<'a> {
                     TagEnd::Image => {
                         if let Some(mut image) = image.take() {
                             if !text.is_empty() {
-                                image.alt_text = Some(std::mem::take(&mut text).into());
+                                image.set_alt_text(std::mem::take(&mut text).into());
                             }
                             markdown_text_like.push(MarkdownParagraphChunk::Image(image));
                         }
@@ -695,13 +702,22 @@ impl<'a> MarkdownParser<'a> {
         }
     }
 
-    async fn parse_code_block(&mut self, language: Option<String>) -> ParsedMarkdownCodeBlock {
-        let (_event, source_range) = self.previous().unwrap();
+    async fn parse_code_block(
+        &mut self,
+        language: Option<String>,
+    ) -> Option<ParsedMarkdownCodeBlock> {
+        let Some((_event, source_range)) = self.previous() else {
+            return None;
+        };
+
         let source_range = source_range.clone();
         let mut code = String::new();
 
         while !self.eof() {
-            let (current, _source_range) = self.current().unwrap();
+            let Some((current, _source_range)) = self.current() else {
+                break;
+            };
+
             match current {
                 Event::Text(text) => {
                     code.push_str(text);
@@ -734,23 +750,190 @@ impl<'a> MarkdownParser<'a> {
             None
         };
 
-        ParsedMarkdownCodeBlock {
+        Some(ParsedMarkdownCodeBlock {
             source_range,
             contents: code.into(),
             language,
             highlights,
+        })
+    }
+
+    async fn parse_html_block(&mut self) -> Vec<ParsedMarkdownElement> {
+        let mut elements = Vec::new();
+        let Some((_event, _source_range)) = self.previous() else {
+            return elements;
+        };
+
+        while !self.eof() {
+            let Some((current, source_range)) = self.current() else {
+                break;
+            };
+            let source_range = source_range.clone();
+            match current {
+                Event::Html(html) => {
+                    let mut cursor = std::io::Cursor::new(html.as_bytes());
+                    let Some(dom) = parse_document(RcDom::default(), ParseOpts::default())
+                        .from_utf8()
+                        .read_from(&mut cursor)
+                        .ok()
+                    else {
+                        self.cursor += 1;
+                        continue;
+                    };
+
+                    self.cursor += 1;
+
+                    self.parse_html_node(source_range, &dom.document, &mut elements);
+                }
+                Event::End(TagEnd::CodeBlock) => {
+                    self.cursor += 1;
+                    break;
+                }
+                _ => {
+                    break;
+                }
+            }
+        }
+
+        elements
+    }
+
+    fn parse_html_node(
+        &self,
+        source_range: Range<usize>,
+        node: &Rc<markup5ever_rcdom::Node>,
+        elements: &mut Vec<ParsedMarkdownElement>,
+    ) {
+        match &node.data {
+            markup5ever_rcdom::NodeData::Document => {
+                self.consume_children(source_range, node, elements);
+            }
+            markup5ever_rcdom::NodeData::Doctype { .. } => {}
+            markup5ever_rcdom::NodeData::Text { contents } => {
+                elements.push(ParsedMarkdownElement::Paragraph(vec![
+                    MarkdownParagraphChunk::Text(ParsedMarkdownText {
+                        source_range,
+                        contents: contents.borrow().to_string(),
+                        highlights: Vec::default(),
+                        region_ranges: Vec::default(),
+                        regions: Vec::default(),
+                    }),
+                ]));
+            }
+            markup5ever_rcdom::NodeData::Comment { .. } => {}
+            markup5ever_rcdom::NodeData::Element { name, attrs, .. } => {
+                if local_name!("img") == name.local {
+                    if let Some(image) = self.extract_image(source_range, attrs) {
+                        elements.push(ParsedMarkdownElement::Image(image));
+                    }
+                } else {
+                    self.consume_children(source_range, node, elements);
+                }
+            }
+            markup5ever_rcdom::NodeData::ProcessingInstruction { .. } => {}
+        }
+    }
+
+    fn consume_children(
+        &self,
+        source_range: Range<usize>,
+        node: &Rc<markup5ever_rcdom::Node>,
+        elements: &mut Vec<ParsedMarkdownElement>,
+    ) {
+        for node in node.children.borrow().iter() {
+            self.parse_html_node(source_range.clone(), node, elements);
+        }
+    }
+
+    fn attr_value(
+        attrs: &RefCell<Vec<html5ever::Attribute>>,
+        name: html5ever::LocalName,
+    ) -> Option<String> {
+        attrs.borrow().iter().find_map(|attr| {
+            if attr.name.local == name {
+                Some(attr.value.to_string())
+            } else {
+                None
+            }
+        })
+    }
+
+    fn extract_styles_from_attributes(
+        attrs: &RefCell<Vec<html5ever::Attribute>>,
+    ) -> HashMap<String, String> {
+        let mut styles = HashMap::new();
+
+        if let Some(style) = Self::attr_value(attrs, local_name!("style")) {
+            for decl in style.split(';') {
+                let mut parts = decl.splitn(2, ':');
+                if let Some((key, value)) = parts.next().zip(parts.next()) {
+                    styles.insert(
+                        key.trim().to_lowercase().to_string(),
+                        value.trim().to_string(),
+                    );
+                }
+            }
+        }
+
+        styles
+    }
+
+    fn extract_image(
+        &self,
+        source_range: Range<usize>,
+        attrs: &RefCell<Vec<html5ever::Attribute>>,
+    ) -> Option<Image> {
+        let src = Self::attr_value(attrs, local_name!("src"))?;
+
+        let mut image = Image::identify(src, source_range, self.file_location_directory.clone())?;
+
+        if let Some(alt) = Self::attr_value(attrs, local_name!("alt")) {
+            image.set_alt_text(alt.into());
+        }
+
+        let styles = Self::extract_styles_from_attributes(attrs);
+
+        if let Some(width) = Self::attr_value(attrs, local_name!("width"))
+            .or_else(|| styles.get("width").cloned())
+            .and_then(|width| Self::parse_length(&width))
+        {
+            image.set_width(width);
+        }
+
+        if let Some(height) = Self::attr_value(attrs, local_name!("height"))
+            .or_else(|| styles.get("height").cloned())
+            .and_then(|height| Self::parse_length(&height))
+        {
+            image.set_height(height);
+        }
+
+        Some(image)
+    }
+
+    /// Parses the width/height attribute value of an html element (e.g. img element)
+    fn parse_length(value: &str) -> Option<DefiniteLength> {
+        if value.ends_with("%") {
+            value
+                .trim_end_matches("%")
+                .parse::<f32>()
+                .ok()
+                .map(|value| relative(value / 100.))
+        } else {
+            value
+                .trim_end_matches("px")
+                .parse()
+                .ok()
+                .map(|value| px(value).into())
         }
     }
 }
 
 #[cfg(test)]
 mod tests {
-    use core::panic;
-
     use super::*;
-
     use ParsedMarkdownListItemType::*;
-    use gpui::BackgroundExecutor;
+    use core::panic;
+    use gpui::{AbsoluteLength, BackgroundExecutor, DefiniteLength};
     use language::{
         HighlightId, Language, LanguageConfig, LanguageMatcher, LanguageRegistry, tree_sitter_rust,
     };
@@ -925,6 +1108,8 @@ mod tests {
                     url: "https://blog.logrocket.com/wp-content/uploads/2024/04/exploring-zed-open-source-code-editor-rust-2.png".to_string(),
                 },
                 alt_text: Some("test".into()),
+                height: None,
+                width: None,
             },)
         );
     }
@@ -946,6 +1131,8 @@ mod tests {
                     url: "http://example.com/foo.png".to_string(),
                 },
                 alt_text: None,
+                height: None,
+                width: None,
             },)
         );
     }
@@ -965,6 +1152,8 @@ mod tests {
                     url: "http://example.com/foo.png".to_string(),
                 },
                 alt_text: Some("foo bar baz".into()),
+                height: None,
+                width: None,
             }),],
         );
     }
@@ -990,6 +1179,8 @@ mod tests {
                         url: "http://example.com/foo.png".to_string(),
                     },
                     alt_text: Some("foo".into()),
+                    height: None,
+                    width: None,
                 }),
                 MarkdownParagraphChunk::Text(ParsedMarkdownText {
                     source_range: 0..81,
@@ -1004,11 +1195,168 @@ mod tests {
                         url: "http://example.com/bar.png".to_string(),
                     },
                     alt_text: Some("bar".into()),
+                    height: None,
+                    width: None,
                 })
             ]
         );
     }
 
+    #[test]
+    fn test_parse_length() {
+        // Test percentage values
+        assert_eq!(
+            MarkdownParser::parse_length("50%"),
+            Some(DefiniteLength::Fraction(0.5))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("100%"),
+            Some(DefiniteLength::Fraction(1.0))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("25%"),
+            Some(DefiniteLength::Fraction(0.25))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("0%"),
+            Some(DefiniteLength::Fraction(0.0))
+        );
+
+        // Test pixel values
+        assert_eq!(
+            MarkdownParser::parse_length("100px"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0))))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("50px"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(50.0))))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("0px"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(0.0))))
+        );
+
+        // Test values without units (should be treated as pixels)
+        assert_eq!(
+            MarkdownParser::parse_length("100"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.0))))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("42"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0))))
+        );
+
+        // Test invalid values
+        assert_eq!(MarkdownParser::parse_length("invalid"), None);
+        assert_eq!(MarkdownParser::parse_length("px"), None);
+        assert_eq!(MarkdownParser::parse_length("%"), None);
+        assert_eq!(MarkdownParser::parse_length(""), None);
+        assert_eq!(MarkdownParser::parse_length("abc%"), None);
+        assert_eq!(MarkdownParser::parse_length("abcpx"), None);
+
+        // Test decimal values
+        assert_eq!(
+            MarkdownParser::parse_length("50.5%"),
+            Some(DefiniteLength::Fraction(0.505))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("100.25px"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.25))))
+        );
+        assert_eq!(
+            MarkdownParser::parse_length("42.0"),
+            Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(42.0))))
+        );
+    }
+
+    #[gpui::test]
+    async fn test_html_image_tag() {
+        let parsed = parse("<img src=\"http://example.com/foo.png\" />").await;
+
+        let ParsedMarkdownElement::Image(image) = &parsed.children[0] else {
+            panic!("Expected a image element");
+        };
+        assert_eq!(
+            image.clone(),
+            Image {
+                source_range: 0..40,
+                link: Link::Web {
+                    url: "http://example.com/foo.png".to_string(),
+                },
+                alt_text: None,
+                height: None,
+                width: None,
+            },
+        );
+    }
+
+    #[gpui::test]
+    async fn test_html_image_tag_with_alt_text() {
+        let parsed = parse("<img src=\"http://example.com/foo.png\" alt=\"Foo\" />").await;
+
+        let ParsedMarkdownElement::Image(image) = &parsed.children[0] else {
+            panic!("Expected a image element");
+        };
+        assert_eq!(
+            image.clone(),
+            Image {
+                source_range: 0..50,
+                link: Link::Web {
+                    url: "http://example.com/foo.png".to_string(),
+                },
+                alt_text: Some("Foo".into()),
+                height: None,
+                width: None,
+            },
+        );
+    }
+
+    #[gpui::test]
+    async fn test_html_image_tag_with_height_and_width() {
+        let parsed =
+            parse("<img src=\"http://example.com/foo.png\" height=\"100\" width=\"200\" />").await;
+
+        let ParsedMarkdownElement::Image(image) = &parsed.children[0] else {
+            panic!("Expected a image element");
+        };
+        assert_eq!(
+            image.clone(),
+            Image {
+                source_range: 0..65,
+                link: Link::Web {
+                    url: "http://example.com/foo.png".to_string(),
+                },
+                alt_text: None,
+                height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))),
+                width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))),
+            },
+        );
+    }
+
+    #[gpui::test]
+    async fn test_html_image_style_tag_with_height_and_width() {
+        let parsed = parse(
+            "<img src=\"http://example.com/foo.png\" style=\"height:100px; width:200px;\" />",
+        )
+        .await;
+
+        let ParsedMarkdownElement::Image(image) = &parsed.children[0] else {
+            panic!("Expected a image element");
+        };
+        assert_eq!(
+            image.clone(),
+            Image {
+                source_range: 0..75,
+                link: Link::Web {
+                    url: "http://example.com/foo.png".to_string(),
+                },
+                alt_text: None,
+                height: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(100.)))),
+                width: Some(DefiniteLength::Absolute(AbsoluteLength::Pixels(px(200.)))),
+            },
+        );
+    }
+
     #[gpui::test]
     async fn test_header_only_table() {
         let markdown = "\

crates/markdown_preview/src/markdown_renderer.rs 🔗

@@ -1,5 +1,5 @@
 use crate::markdown_elements::{
-    HeadingLevel, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown,
+    HeadingLevel, Image, Link, MarkdownParagraph, MarkdownParagraphChunk, ParsedMarkdown,
     ParsedMarkdownBlockQuote, ParsedMarkdownCodeBlock, ParsedMarkdownElement,
     ParsedMarkdownHeading, ParsedMarkdownListItem, ParsedMarkdownListItemType, ParsedMarkdownTable,
     ParsedMarkdownTableAlignment, ParsedMarkdownTableRow,
@@ -164,6 +164,7 @@ pub fn render_markdown_block(block: &ParsedMarkdownElement, cx: &mut RenderConte
         BlockQuote(block_quote) => render_markdown_block_quote(block_quote, cx),
         CodeBlock(code_block) => render_markdown_code_block(code_block, cx),
         HorizontalRule(_) => render_markdown_rule(cx),
+        Image(image) => render_markdown_image(image, cx),
     }
 }
 
@@ -722,65 +723,7 @@ fn render_markdown_text(parsed_new: &MarkdownParagraph, cx: &mut RenderContext)
             }
 
             MarkdownParagraphChunk::Image(image) => {
-                let image_resource = match image.link.clone() {
-                    Link::Web { url } => Resource::Uri(url.into()),
-                    Link::Path { path, .. } => Resource::Path(Arc::from(path)),
-                };
-
-                let element_id = cx.next_id(&image.source_range);
-
-                let image_element = div()
-                    .id(element_id)
-                    .cursor_pointer()
-                    .child(
-                        img(ImageSource::Resource(image_resource))
-                            .max_w_full()
-                            .with_fallback({
-                                let alt_text = image.alt_text.clone();
-                                move || div().children(alt_text.clone()).into_any_element()
-                            }),
-                    )
-                    .tooltip({
-                        let link = image.link.clone();
-                        move |_, cx| {
-                            InteractiveMarkdownElementTooltip::new(
-                                Some(link.to_string()),
-                                "open image",
-                                cx,
-                            )
-                            .into()
-                        }
-                    })
-                    .on_click({
-                        let workspace = workspace_clone.clone();
-                        let link = image.link.clone();
-                        move |_, window, cx| {
-                            if window.modifiers().secondary() {
-                                match &link {
-                                    Link::Web { url } => cx.open_url(url),
-                                    Link::Path { path, .. } => {
-                                        if let Some(workspace) = &workspace {
-                                            _ = workspace.update(cx, |workspace, cx| {
-                                                workspace
-                                                    .open_abs_path(
-                                                        path.clone(),
-                                                        OpenOptions {
-                                                            visible: Some(OpenVisible::None),
-                                                            ..Default::default()
-                                                        },
-                                                        window,
-                                                        cx,
-                                                    )
-                                                    .detach();
-                                            });
-                                        }
-                                    }
-                                }
-                            }
-                        }
-                    })
-                    .into_any();
-                any_element.push(image_element);
+                any_element.push(render_markdown_image(image, cx));
             }
         }
     }
@@ -793,18 +736,86 @@ fn render_markdown_rule(cx: &mut RenderContext) -> AnyElement {
     div().py(cx.scaled_rems(0.5)).child(rule).into_any()
 }
 
+fn render_markdown_image(image: &Image, cx: &mut RenderContext) -> AnyElement {
+    let image_resource = match image.link.clone() {
+        Link::Web { url } => Resource::Uri(url.into()),
+        Link::Path { path, .. } => Resource::Path(Arc::from(path)),
+    };
+
+    let element_id = cx.next_id(&image.source_range);
+    let workspace = cx.workspace.clone();
+
+    div()
+        .id(element_id)
+        .cursor_pointer()
+        .child(
+            img(ImageSource::Resource(image_resource))
+                .max_w_full()
+                .with_fallback({
+                    let alt_text = image.alt_text.clone();
+                    move || div().children(alt_text.clone()).into_any_element()
+                })
+                .when_some(image.height, |this, height| this.h(height))
+                .when_some(image.width, |this, width| this.w(width)),
+        )
+        .tooltip({
+            let link = image.link.clone();
+            let alt_text = image.alt_text.clone();
+            move |_, cx| {
+                InteractiveMarkdownElementTooltip::new(
+                    Some(alt_text.clone().unwrap_or(link.to_string().into())),
+                    "open image",
+                    cx,
+                )
+                .into()
+            }
+        })
+        .on_click({
+            let link = image.link.clone();
+            move |_, window, cx| {
+                if window.modifiers().secondary() {
+                    match &link {
+                        Link::Web { url } => cx.open_url(url),
+                        Link::Path { path, .. } => {
+                            if let Some(workspace) = &workspace {
+                                _ = workspace.update(cx, |workspace, cx| {
+                                    workspace
+                                        .open_abs_path(
+                                            path.clone(),
+                                            OpenOptions {
+                                                visible: Some(OpenVisible::None),
+                                                ..Default::default()
+                                            },
+                                            window,
+                                            cx,
+                                        )
+                                        .detach();
+                                });
+                            }
+                        }
+                    }
+                }
+            }
+        })
+        .into_any()
+}
+
 struct InteractiveMarkdownElementTooltip {
     tooltip_text: Option<SharedString>,
-    action_text: String,
+    action_text: SharedString,
 }
 
 impl InteractiveMarkdownElementTooltip {
-    pub fn new(tooltip_text: Option<String>, action_text: &str, cx: &mut App) -> Entity<Self> {
+    pub fn new(
+        tooltip_text: Option<SharedString>,
+        action_text: impl Into<SharedString>,
+        cx: &mut App,
+    ) -> Entity<Self> {
         let tooltip_text = tooltip_text.map(|t| util::truncate_and_trailoff(&t, 50).into());
 
         cx.new(|_cx| Self {
             tooltip_text,
-            action_text: action_text.to_string(),
+            action_text: action_text.into(),
         })
     }
 }

crates/outline_panel/src/outline_panel_settings.rs 🔗

@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
 use gpui::Pixels;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)]
 #[serde(rename_all = "snake_case")]
@@ -18,7 +18,7 @@ pub enum ShowIndentGuides {
     Never,
 }
 
-#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)]
 pub struct OutlinePanelSettings {
     pub button: bool,
     pub default_width: Pixels,

crates/paths/src/paths.rs 🔗

@@ -33,6 +33,11 @@ pub fn remote_server_dir_relative() -> &'static Path {
     Path::new(".zed_server")
 }
 
+/// Returns the relative path to the zed_wsl_server directory on the wsl host.
+pub fn remote_wsl_server_dir_relative() -> &'static Path {
+    Path::new(".zed_wsl_server")
+}
+
 /// Sets a custom directory for all user data, overriding the default data directory.
 /// This function must be called before any other path operations that depend on the data directory.
 /// The directory's path will be canonicalized to an absolute path by a blocking FS operation.

crates/project/src/buffer_store.rs 🔗

@@ -20,7 +20,7 @@ use language::{
     },
 };
 use rpc::{
-    AnyProtoClient, ErrorExt as _, TypedEnvelope,
+    AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope,
     proto::{self, ToProto},
 };
 use smol::channel::Receiver;
@@ -837,7 +837,15 @@ impl BufferStore {
             }
         };
 
-        cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
+        cx.background_spawn(async move {
+            task.await.map_err(|e| {
+                if e.error_code() != ErrorCode::Internal {
+                    anyhow!(e.error_code())
+                } else {
+                    anyhow!("{e}")
+                }
+            })
+        })
     }
 
     pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
@@ -944,7 +952,15 @@ impl BufferStore {
     ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
         self.loading_buffers.iter().map(|(path, task)| {
             let task = task.clone();
-            (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
+            (path, async move {
+                task.await.map_err(|e| {
+                    if e.error_code() != ErrorCode::Internal {
+                        anyhow!(e.error_code())
+                    } else {
+                        anyhow!("{e}")
+                    }
+                })
+            })
         })
     }
 

crates/project/src/debugger/dap_store.rs 🔗

@@ -258,8 +258,14 @@ impl DapStore {
                     let connection;
                     if let Some(c) = binary.connection {
                         let host = Ipv4Addr::LOCALHOST;
-                        let port = dap::transport::TcpTransport::unused_port(host).await?;
-                        port_forwarding = Some((port, c.host.to_string(), c.port));
+                        let port;
+                        if remote.read_with(cx, |remote, _cx| remote.shares_network_interface())? {
+                            port = c.port;
+                            port_forwarding = None;
+                        } else {
+                            port = dap::transport::TcpTransport::unused_port(host).await?;
+                            port_forwarding = Some((port, c.host.to_string(), c.port));
+                        }
                         connection = Some(TcpArguments {
                             port,
                             host,
@@ -276,7 +282,6 @@ impl DapStore {
                             &binary.arguments,
                             &binary.envs,
                             binary.cwd.map(|path| path.display().to_string()),
-                            None,
                             port_forwarding,
                         )
                     })??;

crates/project/src/lsp_command.rs 🔗

@@ -50,8 +50,8 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt
     }
 }
 
-pub fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Url> {
-    match lsp::Url::from_file_path(path) {
+pub fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Uri> {
+    match lsp::Uri::from_file_path(path) {
         Ok(url) => Ok(url),
         Err(()) => anyhow::bail!("Invalid file path provided to LSP request: {path:?}"),
     }
@@ -3135,7 +3135,7 @@ impl InlayHints {
                                     Some(((uri, range), server_id)) => Some((
                                         LanguageServerId(server_id as usize),
                                         lsp::Location {
-                                            uri: lsp::Url::parse(&uri)
+                                            uri: lsp::Uri::from_str(&uri)
                                                 .context("invalid uri in hint part {part:?}")?,
                                             range: lsp::Range::new(
                                                 point_to_lsp(PointUtf16::new(
@@ -3733,7 +3733,7 @@ impl GetDocumentDiagnostics {
             .filter_map(|diagnostics| {
                 Some(LspPullDiagnostics::Response {
                     server_id: LanguageServerId::from_proto(diagnostics.server_id),
-                    uri: lsp::Url::from_str(diagnostics.uri.as_str()).log_err()?,
+                    uri: lsp::Uri::from_str(diagnostics.uri.as_str()).log_err()?,
                     diagnostics: if diagnostics.changed {
                         PulledDiagnostics::Unchanged {
                             result_id: diagnostics.result_id?,
@@ -3788,7 +3788,7 @@ impl GetDocumentDiagnostics {
                             start: point_to_lsp(PointUtf16::new(start.row, start.column)),
                             end: point_to_lsp(PointUtf16::new(end.row, end.column)),
                         },
-                        uri: lsp::Url::parse(&info.location_url.unwrap()).unwrap(),
+                        uri: lsp::Uri::from_str(&info.location_url.unwrap()).unwrap(),
                     },
                     message: info.message,
                 }
@@ -3821,7 +3821,7 @@ impl GetDocumentDiagnostics {
             code_description: diagnostic
                 .code_description
                 .map(|code_description| CodeDescription {
-                    href: Some(lsp::Url::parse(&code_description).unwrap()),
+                    href: Some(lsp::Uri::from_str(&code_description).unwrap()),
                 }),
             related_information: Some(related_information),
             tags: Some(tags),
@@ -3961,7 +3961,7 @@ pub struct WorkspaceLspPullDiagnostics {
 }
 
 fn process_full_workspace_diagnostics_report(
-    diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+    diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
     server_id: LanguageServerId,
     report: lsp::WorkspaceFullDocumentDiagnosticReport,
 ) {
@@ -3984,7 +3984,7 @@ fn process_full_workspace_diagnostics_report(
 }
 
 fn process_unchanged_workspace_diagnostics_report(
-    diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+    diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
     server_id: LanguageServerId,
     report: lsp::WorkspaceUnchangedDocumentDiagnosticReport,
 ) {
@@ -4343,9 +4343,9 @@ impl LspCommand for GetDocumentColor {
 }
 
 fn process_related_documents(
-    diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+    diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
     server_id: LanguageServerId,
-    documents: impl IntoIterator<Item = (lsp::Url, lsp::DocumentDiagnosticReportKind)>,
+    documents: impl IntoIterator<Item = (lsp::Uri, lsp::DocumentDiagnosticReportKind)>,
 ) {
     for (url, report_kind) in documents {
         match report_kind {
@@ -4360,9 +4360,9 @@ fn process_related_documents(
 }
 
 fn process_unchanged_diagnostics_report(
-    diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+    diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
     server_id: LanguageServerId,
-    uri: lsp::Url,
+    uri: lsp::Uri,
     report: lsp::UnchangedDocumentDiagnosticReport,
 ) {
     let result_id = report.result_id;
@@ -4404,9 +4404,9 @@ fn process_unchanged_diagnostics_report(
 }
 
 fn process_full_diagnostics_report(
-    diagnostics: &mut HashMap<lsp::Url, LspPullDiagnostics>,
+    diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
     server_id: LanguageServerId,
-    uri: lsp::Url,
+    uri: lsp::Uri,
     report: lsp::FullDocumentDiagnosticReport,
 ) {
     let result_id = report.result_id;
@@ -4540,7 +4540,7 @@ mod tests {
     fn test_related_information() {
         let related_info = lsp::DiagnosticRelatedInformation {
             location: lsp::Location {
-                uri: lsp::Url::parse("file:///test.rs").unwrap(),
+                uri: lsp::Uri::from_str("file:///test.rs").unwrap(),
                 range: lsp::Range {
                     start: lsp::Position::new(1, 1),
                     end: lsp::Position::new(1, 5),

crates/project/src/lsp_store.rs 🔗

@@ -79,7 +79,7 @@ use lsp::{
     LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions,
     LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture,
     MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind,
-    TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams,
+    TextDocumentSyncSaveOptions, TextEdit, Uri, WillRenameFiles, WorkDoneProgressCancelParams,
     WorkspaceFolder, notification::DidRenameFiles,
 };
 use node_runtime::read_package_installed_version;
@@ -114,7 +114,7 @@ use std::{
 };
 use sum_tree::Dimensions;
 use text::{Anchor, BufferId, LineEnding, OffsetRangeExt};
-use url::Url;
+
 use util::{
     ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into,
     paths::{PathExt, SanitizedPath},
@@ -314,7 +314,7 @@ impl LocalLspStore {
             true,
             cx,
         );
-        let pending_workspace_folders: Arc<Mutex<BTreeSet<Url>>> = Default::default();
+        let pending_workspace_folders: Arc<Mutex<BTreeSet<Uri>>> = Default::default();
 
         let pending_server = cx.spawn({
             let adapter = adapter.clone();
@@ -2405,7 +2405,7 @@ impl LocalLspStore {
 
                     {
                         let uri =
-                            Url::from_file_path(worktree.read(cx).abs_path().join(&path.path));
+                            Uri::from_file_path(worktree.read(cx).abs_path().join(&path.path));
 
                         let server_id = self.get_or_insert_language_server(
                             &worktree,
@@ -2565,7 +2565,7 @@ impl LocalLspStore {
             None => return,
         };
 
-        let Ok(file_url) = lsp::Url::from_file_path(old_path.as_path()) else {
+        let Ok(file_url) = lsp::Uri::from_file_path(old_path.as_path()) else {
             debug_panic!(
                 "`{}` is not parseable as an URI",
                 old_path.to_string_lossy()
@@ -2578,7 +2578,7 @@ impl LocalLspStore {
     pub(crate) fn unregister_buffer_from_language_servers(
         &mut self,
         buffer: &Entity<Buffer>,
-        file_url: &lsp::Url,
+        file_url: &lsp::Uri,
         cx: &mut App,
     ) {
         buffer.update(cx, |buffer, cx| {
@@ -4694,7 +4694,7 @@ impl LspStore {
                     for node in nodes {
                         let server_id = node.server_id_or_init(|disposition| {
                             let path = &disposition.path;
-                            let uri = Url::from_file_path(worktree_root.join(&path.path));
+                            let uri = Uri::from_file_path(worktree_root.join(&path.path));
                             let key = LanguageServerSeed {
                                 worktree_id,
                                 name: disposition.server_name.clone(),
@@ -6578,7 +6578,7 @@ impl LspStore {
                                 File::from_dyn(buffer.file())
                                     .and_then(|file| {
                                         let abs_path = file.as_local()?.abs_path(cx);
-                                        lsp::Url::from_file_path(abs_path).ok()
+                                        lsp::Uri::from_file_path(abs_path).ok()
                                     })
                                     .is_none_or(|buffer_uri| {
                                         unchanged_buffers.contains(&buffer_uri)
@@ -7179,7 +7179,7 @@ impl LspStore {
         let buffer = buffer.read(cx);
         let file = File::from_dyn(buffer.file())?;
         let abs_path = file.as_local()?.abs_path(cx);
-        let uri = lsp::Url::from_file_path(abs_path).unwrap();
+        let uri = lsp::Uri::from_file_path(abs_path).unwrap();
         let next_snapshot = buffer.text_snapshot();
         for language_server in language_servers {
             let language_server = language_server.clone();
@@ -7816,7 +7816,7 @@ impl LspStore {
             };
 
             let symbol_abs_path = resolve_path(&worktree_abs_path, &symbol.path.path);
-            let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
+            let symbol_uri = if let Ok(uri) = lsp::Uri::from_file_path(symbol_abs_path) {
                 uri
             } else {
                 return Task::ready(Err(anyhow!("invalid symbol path")));
@@ -7830,14 +7830,14 @@ impl LspStore {
 
     pub(crate) fn open_local_buffer_via_lsp(
         &mut self,
-        mut abs_path: lsp::Url,
+        abs_path: lsp::Uri,
         language_server_id: LanguageServerId,
         cx: &mut Context<Self>,
     ) -> Task<Result<Entity<Buffer>>> {
         cx.spawn(async move |lsp_store, cx| {
             // Escape percent-encoded string.
             let current_scheme = abs_path.scheme().to_owned();
-            let _ = abs_path.set_scheme("file");
+            // Uri is immutable, so we can't modify the scheme
 
             let abs_path = abs_path
                 .to_file_path()
@@ -9230,8 +9230,12 @@ impl LspStore {
         maybe!({
             let local_store = self.as_local()?;
 
-            let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from)?;
-            let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from)?;
+            let old_uri = lsp::Uri::from_file_path(old_path)
+                .ok()
+                .map(|uri| uri.to_string())?;
+            let new_uri = lsp::Uri::from_file_path(new_path)
+                .ok()
+                .map(|uri| uri.to_string())?;
 
             for language_server in local_store.language_servers_for_worktree(worktree_id) {
                 let Some(filter) = local_store
@@ -9264,8 +9268,12 @@ impl LspStore {
         is_dir: bool,
         cx: AsyncApp,
     ) -> Task<ProjectTransaction> {
-        let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from);
-        let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from);
+        let old_uri = lsp::Uri::from_file_path(old_path)
+            .ok()
+            .map(|uri| uri.to_string());
+        let new_uri = lsp::Uri::from_file_path(new_path)
+            .ok()
+            .map(|uri| uri.to_string());
         cx.spawn(async move |cx| {
             let mut tasks = vec![];
             this.update(cx, |this, cx| {
@@ -10878,7 +10886,7 @@ impl LspStore {
         language_server: Arc<LanguageServer>,
         server_id: LanguageServerId,
         key: LanguageServerSeed,
-        workspace_folders: Arc<Mutex<BTreeSet<Url>>>,
+        workspace_folders: Arc<Mutex<BTreeSet<Uri>>>,
         cx: &mut Context<Self>,
     ) {
         let Some(local) = self.as_local_mut() else {
@@ -11038,7 +11046,7 @@ impl LspStore {
                     let snapshot = versions.last().unwrap();
                     let version = snapshot.version;
                     let initial_snapshot = &snapshot.snapshot;
-                    let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+                    let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap();
                     language_server.register_buffer(
                         uri,
                         adapter.language_id(&language.name()),
@@ -11277,7 +11285,7 @@ impl LspStore {
                                 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
                             };
                             Some(lsp::FileEvent {
-                                uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
+                                uri: lsp::Uri::from_file_path(abs_path.join(path)).unwrap(),
                                 typ,
                             })
                         })
@@ -11689,7 +11697,7 @@ impl LspStore {
                     File::from_dyn(buffer.file())
                         .and_then(|file| {
                             let abs_path = file.as_local()?.abs_path(cx);
-                            lsp::Url::from_file_path(abs_path).ok()
+                            lsp::Uri::from_file_path(abs_path).ok()
                         })
                         .is_none_or(|buffer_uri| {
                             unchanged_buffers.contains(&buffer_uri)
@@ -12821,7 +12829,7 @@ pub enum LanguageServerState {
     Starting {
         startup: Task<Option<Arc<LanguageServer>>>,
         /// List of language servers that will be added to the workspace once it's initialization completes.
-        pending_workspace_folders: Arc<Mutex<BTreeSet<Url>>>,
+        pending_workspace_folders: Arc<Mutex<BTreeSet<Uri>>>,
     },
 
     Running {
@@ -12833,7 +12841,7 @@ pub enum LanguageServerState {
 }
 
 impl LanguageServerState {
-    fn add_workspace_folder(&self, uri: Url) {
+    fn add_workspace_folder(&self, uri: Uri) {
         match self {
             LanguageServerState::Starting {
                 pending_workspace_folders,
@@ -12846,7 +12854,7 @@ impl LanguageServerState {
             }
         }
     }
-    fn _remove_workspace_folder(&self, uri: Url) {
+    fn _remove_workspace_folder(&self, uri: Uri) {
         match self {
             LanguageServerState::Starting {
                 pending_workspace_folders,

crates/project/src/lsp_store/log_store.rs 🔗

@@ -21,8 +21,8 @@ const SERVER_LOGS: &str = "Server Logs";
 const SERVER_TRACE: &str = "Server Trace";
 const SERVER_INFO: &str = "Server Info";
 
-pub fn init(store_logs: bool, cx: &mut App) -> Entity<LogStore> {
-    let log_store = cx.new(|cx| LogStore::new(store_logs, cx));
+pub fn init(on_headless_host: bool, cx: &mut App) -> Entity<LogStore> {
+    let log_store = cx.new(|cx| LogStore::new(on_headless_host, cx));
     cx.set_global(GlobalLogStore(log_store.clone()));
     log_store
 }
@@ -43,7 +43,7 @@ pub enum Event {
 impl EventEmitter<Event> for LogStore {}
 
 pub struct LogStore {
-    store_logs: bool,
+    on_headless_host: bool,
     projects: HashMap<WeakEntity<Project>, ProjectState>,
     pub copilot_log_subscription: Option<lsp::Subscription>,
     pub language_servers: HashMap<LanguageServerId, LanguageServerState>,
@@ -138,6 +138,7 @@ pub struct LanguageServerState {
     pub trace_level: TraceValue,
     pub log_level: MessageType,
     io_logs_subscription: Option<lsp::Subscription>,
+    pub toggled_log_kind: Option<LogKind>,
 }
 
 impl std::fmt::Debug for LanguageServerState {
@@ -151,6 +152,7 @@ impl std::fmt::Debug for LanguageServerState {
             .field("rpc_state", &self.rpc_state)
             .field("trace_level", &self.trace_level)
             .field("log_level", &self.log_level)
+            .field("toggled_log_kind", &self.toggled_log_kind)
             .finish_non_exhaustive()
     }
 }
@@ -226,14 +228,14 @@ impl LogKind {
 }
 
 impl LogStore {
-    pub fn new(store_logs: bool, cx: &mut Context<Self>) -> Self {
+    pub fn new(on_headless_host: bool, cx: &mut Context<Self>) -> Self {
         let (io_tx, mut io_rx) = mpsc::unbounded();
 
         let log_store = Self {
             projects: HashMap::default(),
             language_servers: HashMap::default(),
             copilot_log_subscription: None,
-            store_logs,
+            on_headless_host,
             io_tx,
         };
         cx.spawn(async move |log_store, cx| {
@@ -351,12 +353,26 @@ impl LogStore {
                                     }
                                 }
                             }
-                            crate::Event::ToggleLspLogs { server_id, enabled } => {
-                                // we do not support any other log toggling yet
-                                if *enabled {
-                                    log_store.enable_rpc_trace_for_language_server(*server_id);
-                                } else {
-                                    log_store.disable_rpc_trace_for_language_server(*server_id);
+                            crate::Event::ToggleLspLogs {
+                                server_id,
+                                enabled,
+                                toggled_log_kind,
+                            } => {
+                                if let Some(server_state) =
+                                    log_store.get_language_server_state(*server_id)
+                                {
+                                    if *enabled {
+                                        server_state.toggled_log_kind = Some(*toggled_log_kind);
+                                    } else {
+                                        server_state.toggled_log_kind = None;
+                                    }
+                                }
+                                if LogKind::Rpc == *toggled_log_kind {
+                                    if *enabled {
+                                        log_store.enable_rpc_trace_for_language_server(*server_id);
+                                    } else {
+                                        log_store.disable_rpc_trace_for_language_server(*server_id);
+                                    }
                                 }
                             }
                             _ => {}
@@ -395,6 +411,7 @@ impl LogStore {
                 trace_level: TraceValue::Off,
                 log_level: MessageType::LOG,
                 io_logs_subscription: None,
+                toggled_log_kind: None,
             }
         });
 
@@ -425,7 +442,7 @@ impl LogStore {
         message: &str,
         cx: &mut Context<Self>,
     ) -> Option<()> {
-        let store_logs = self.store_logs;
+        let store_logs = !self.on_headless_host;
         let language_server_state = self.get_language_server_state(id)?;
 
         let log_lines = &mut language_server_state.log_messages;
@@ -464,7 +481,7 @@ impl LogStore {
         verbose_info: Option<String>,
         cx: &mut Context<Self>,
     ) -> Option<()> {
-        let store_logs = self.store_logs;
+        let store_logs = !self.on_headless_host;
         let language_server_state = self.get_language_server_state(id)?;
 
         let log_lines = &mut language_server_state.trace_messages;
@@ -530,7 +547,7 @@ impl LogStore {
         message: &str,
         cx: &mut Context<'_, Self>,
     ) {
-        let store_logs = self.store_logs;
+        let store_logs = !self.on_headless_host;
         let Some(state) = self
             .get_language_server_state(language_server_id)
             .and_then(|state| state.rpc_state.as_mut())
@@ -673,6 +690,7 @@ impl LogStore {
     }
 
     fn emit_event(&mut self, e: Event, cx: &mut Context<Self>) {
+        let on_headless_host = self.on_headless_host;
         match &e {
             Event::NewServerLogEntry { id, kind, text } => {
                 if let Some(state) = self.get_language_server_state(*id) {
@@ -686,14 +704,18 @@ impl LogStore {
                     }
                     .and_then(|lsp_store| lsp_store.read(cx).downstream_client());
                     if let Some((client, project_id)) = downstream_client {
-                        client
-                            .send(proto::LanguageServerLog {
-                                project_id,
-                                language_server_id: id.to_proto(),
-                                message: text.clone(),
-                                log_type: Some(kind.to_proto()),
-                            })
-                            .ok();
+                        if on_headless_host
+                            || Some(LogKind::from_server_log_type(kind)) == state.toggled_log_kind
+                        {
+                            client
+                                .send(proto::LanguageServerLog {
+                                    project_id,
+                                    language_server_id: id.to_proto(),
+                                    message: text.clone(),
+                                    log_type: Some(kind.to_proto()),
+                                })
+                                .ok();
+                        }
                     }
                 }
             }

crates/project/src/lsp_store/lsp_ext_command.rs 🔗

@@ -213,7 +213,7 @@ impl LspCommand for OpenDocs {
     ) -> Result<OpenDocsParams> {
         Ok(OpenDocsParams {
             text_document: lsp::TextDocumentIdentifier {
-                uri: lsp::Url::from_file_path(path).unwrap(),
+                uri: lsp::Uri::from_file_path(path).unwrap(),
             },
             position: point_to_lsp(self.position),
         })

crates/project/src/project.rs 🔗

@@ -33,7 +33,7 @@ mod yarn;
 
 use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
 
-use crate::git_store::GitStore;
+use crate::{git_store::GitStore, lsp_store::log_store::LogKind};
 pub use git_store::{
     ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
     git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
@@ -87,7 +87,7 @@ use node_runtime::NodeRuntime;
 use parking_lot::Mutex;
 pub use prettier_store::PrettierStore;
 use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent};
-use remote::{RemoteClient, SshConnectionOptions};
+use remote::{RemoteClient, RemoteConnectionOptions};
 use rpc::{
     AnyProtoClient, ErrorCode,
     proto::{FromProto, LanguageServerPromptResponse, REMOTE_SERVER_PROJECT_ID, ToProto},
@@ -285,6 +285,7 @@ pub enum Event {
     ToggleLspLogs {
         server_id: LanguageServerId,
         enabled: bool,
+        toggled_log_kind: LogKind,
     },
     Toast {
         notification_id: SharedString,
@@ -929,7 +930,7 @@ pub enum LspPullDiagnostics {
         /// The id of the language server that produced diagnostics.
         server_id: LanguageServerId,
         /// URI of the resource,
-        uri: lsp::Url,
+        uri: lsp::Uri,
         /// The diagnostics produced by this language server.
         diagnostics: PulledDiagnostics,
     },
@@ -951,7 +952,7 @@ pub enum PulledDiagnostics {
 /// Whether to disable all AI features in Zed.
 ///
 /// Default: false
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, settings::SettingsUi)]
 pub struct DisableAiSettings {
     pub disable_ai: bool,
 }
@@ -1915,7 +1916,7 @@ impl Project {
             .map(|remote| remote.read(cx).connection_state())
     }
 
-    pub fn remote_connection_options(&self, cx: &App) -> Option<SshConnectionOptions> {
+    pub fn remote_connection_options(&self, cx: &App) -> Option<RemoteConnectionOptions> {
         self.remote_client
             .as_ref()
             .map(|remote| remote.read(cx).connection_options())
@@ -3598,7 +3599,7 @@ impl Project {
 
     pub fn open_local_buffer_via_lsp(
         &mut self,
-        abs_path: lsp::Url,
+        abs_path: lsp::Uri,
         language_server_id: LanguageServerId,
         cx: &mut Context<Self>,
     ) -> Task<Result<Entity<Buffer>>> {
@@ -4719,10 +4720,19 @@ impl Project {
         envelope: TypedEnvelope<proto::ToggleLspLogs>,
         mut cx: AsyncApp,
     ) -> Result<()> {
+        let toggled_log_kind =
+            match proto::toggle_lsp_logs::LogType::from_i32(envelope.payload.log_type)
+                .context("invalid log type")?
+            {
+                proto::toggle_lsp_logs::LogType::Log => LogKind::Logs,
+                proto::toggle_lsp_logs::LogType::Trace => LogKind::Trace,
+                proto::toggle_lsp_logs::LogType::Rpc => LogKind::Rpc,
+            };
         project.update(&mut cx, |_, cx| {
             cx.emit(Event::ToggleLspLogs {
                 server_id: LanguageServerId::from_proto(envelope.payload.server_id),
                 enabled: envelope.payload.enabled,
+                toggled_log_kind,
             })
         })?;
         Ok(())

crates/project/src/project_settings.rs 🔗

@@ -19,7 +19,7 @@ use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use settings::{
     InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources,
-    SettingsStore, parse_json_with_comments, watch_config_file,
+    SettingsStore, SettingsUi, parse_json_with_comments, watch_config_file,
 };
 use std::{
     collections::BTreeMap,
@@ -36,7 +36,7 @@ use crate::{
     worktree_store::{WorktreeStore, WorktreeStoreEvent},
 };
 
-#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)]
+#[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema, SettingsUi)]
 pub struct ProjectSettings {
     /// Configuration for language servers.
     ///

crates/project/src/project_tests.rs 🔗

@@ -18,7 +18,6 @@ use git::{
 };
 use git2::RepositoryInitOptions;
 use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
-use http_client::Url;
 use itertools::Itertools;
 use language::{
     Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
@@ -29,7 +28,7 @@ use language::{
 };
 use lsp::{
     DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
-    WillRenameFiles, notification::DidRenameFiles,
+    Uri, WillRenameFiles, notification::DidRenameFiles,
 };
 use parking_lot::Mutex;
 use paths::{config_dir, tasks_file};
@@ -40,7 +39,7 @@ use serde_json::json;
 #[cfg(not(windows))]
 use std::os;
 use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
-use task::{ResolvedTask, TaskContext};
+use task::{ResolvedTask, ShellKind, TaskContext};
 use unindent::Unindent as _;
 use util::{
     TryFutureExt as _, assert_set_eq, maybe, path,
@@ -701,7 +700,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
     assert_eq!(
         server.workspace_folders(),
         BTreeSet::from_iter(
-            [Url::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
+            [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
         )
     );
 
@@ -891,7 +890,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
             version: 0,
             text: "const A: i32 = 1;".to_string(),
             language_id: "rust".to_string(),
@@ -921,7 +920,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::VersionedTextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+            lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
             1
         )
     );
@@ -942,7 +941,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
             version: 0,
             text: "{\"a\": 1}".to_string(),
             language_id: "json".to_string(),
@@ -992,7 +991,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::VersionedTextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
+            lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
             1
         )
     );
@@ -1008,7 +1007,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+            lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
         )
     );
     assert_eq!(
@@ -1017,7 +1016,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
+            lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
         )
     );
 
@@ -1034,7 +1033,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .receive_notification::<lsp::notification::DidCloseTextDocument>()
             .await
             .text_document,
-        lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
+        lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
     );
     assert_eq!(
         fake_rust_server
@@ -1042,7 +1041,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
             version: 0,
             text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
             language_id: "rust".to_string(),
@@ -1084,7 +1083,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .receive_notification::<lsp::notification::DidCloseTextDocument>()
             .await
             .text_document,
-        lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
+        lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
     );
     assert_eq!(
         fake_json_server
@@ -1092,7 +1091,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
             version: 0,
             text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
             language_id: "json".to_string(),
@@ -1118,7 +1117,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::VersionedTextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+            lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
             1
         )
     );
@@ -1148,7 +1147,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
             version: 0,
             text: rust_buffer.update(cx, |buffer, _| buffer.text()),
             language_id: "rust".to_string(),
@@ -1169,13 +1168,13 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
         ],
         [
             lsp::TextDocumentItem {
-                uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
                 version: 0,
                 text: json_buffer.update(cx, |buffer, _| buffer.text()),
                 language_id: "json".to_string(),
             },
             lsp::TextDocumentItem {
-                uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
                 version: 0,
                 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
                 language_id: "json".to_string(),
@@ -1187,7 +1186,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
     cx.update(|_| drop(_json_handle));
     let close_message = lsp::DidCloseTextDocumentParams {
         text_document: lsp::TextDocumentIdentifier::new(
-            lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
+            lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
         ),
     };
     assert_eq!(
@@ -1316,7 +1315,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     let _out_of_worktree_buffer = project
         .update(cx, |project, cx| {
             project.open_local_buffer_via_lsp(
-                lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
                 server_id,
                 cx,
             )
@@ -1476,23 +1475,23 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
         &*file_changes.lock(),
         &[
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
                 typ: lsp::FileChangeType::CHANGED,
             },
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
                 typ: lsp::FileChangeType::DELETED,
             },
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
                 typ: lsp::FileChangeType::CREATED,
             },
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
                 typ: lsp::FileChangeType::CREATED,
             },
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
+                uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
                 typ: lsp::FileChangeType::CHANGED,
             },
         ]
@@ -1539,7 +1538,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
             .update_diagnostics(
                 LanguageServerId(0),
                 lsp::PublishDiagnosticsParams {
-                    uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+                    uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1558,7 +1557,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
             .update_diagnostics(
                 LanguageServerId(0),
                 lsp::PublishDiagnosticsParams {
-                    uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
+                    uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1650,7 +1649,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
             .update_diagnostics(
                 server_id,
                 lsp::PublishDiagnosticsParams {
-                    uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
+                    uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
@@ -1669,7 +1668,7 @@ async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
             .update_diagnostics(
                 server_id,
                 lsp::PublishDiagnosticsParams {
-                    uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
+                    uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
                     version: None,
                     diagnostics: vec![lsp::Diagnostic {
                         range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
@@ -1813,7 +1812,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
     );
 
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: None,
         diagnostics: vec![lsp::Diagnostic {
             range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
@@ -1866,7 +1865,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
 
     // Ensure publishing empty diagnostics twice only results in one update event.
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: None,
         diagnostics: Default::default(),
     });
@@ -1879,7 +1878,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
     );
 
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: None,
         diagnostics: Default::default(),
     });
@@ -2011,7 +2010,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
     // Publish diagnostics
     let fake_server = fake_servers.next().await.unwrap();
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: None,
         diagnostics: vec![lsp::Diagnostic {
             range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
@@ -2092,7 +2091,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
     // Before restarting the server, report diagnostics with an unknown buffer version.
     let fake_server = fake_servers.next().await.unwrap();
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: Some(10000),
         diagnostics: Vec::new(),
     });
@@ -2343,7 +2342,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
 
     // Report some diagnostics for the initial version of the buffer
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: Some(open_notification.text_document.version),
         diagnostics: vec![
             lsp::Diagnostic {
@@ -2431,7 +2430,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
 
     // Ensure overlapping diagnostics are highlighted correctly.
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: Some(open_notification.text_document.version),
         diagnostics: vec![
             lsp::Diagnostic {
@@ -2525,7 +2524,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
 
     // Handle out-of-order diagnostics
     fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
-        uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
         version: Some(change_notification_2.text_document.version),
         diagnostics: vec![
             lsp::Diagnostic {
@@ -3206,7 +3205,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
 
         Ok(Some(lsp::GotoDefinitionResponse::Scalar(
             lsp::Location::new(
-                lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
+                lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
                 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
             ),
         )))
@@ -3765,7 +3764,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
                                 edit: lsp::WorkspaceEdit {
                                     changes: Some(
                                         [(
-                                            lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
+                                            lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
                                             vec![lsp::TextEdit {
                                                 range: lsp::Range::new(
                                                     lsp::Position::new(0, 0),
@@ -3904,7 +3903,7 @@ async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
             .await
             .text_document,
         lsp::TextDocumentItem {
-            uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
+            uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
             version: 0,
             text: "".to_string(),
             language_id: "rust".to_string(),
@@ -4742,7 +4741,7 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
         .await
         .unwrap();
 
-    let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
+    let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
     let message = lsp::PublishDiagnosticsParams {
         uri: buffer_uri.clone(),
         diagnostics: vec![
@@ -5064,7 +5063,7 @@ async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
                     new_text: "This is not a drill".to_owned(),
                 })],
                 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
-                    uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
+                    uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
                     version: Some(1337),
                 },
             }]
@@ -5189,14 +5188,14 @@ async fn test_rename(cx: &mut gpui::TestAppContext) {
                 changes: Some(
                     [
                         (
-                            lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
+                            lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
                             vec![lsp::TextEdit::new(
                                 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
                                 "THREE".to_string(),
                             )],
                         ),
                         (
-                            lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
+                            lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
                             vec![
                                 lsp::TextEdit::new(
                                     lsp::Range::new(
@@ -9216,8 +9215,8 @@ fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
         fn manifest_name(&self) -> ManifestName {
             SharedString::new_static("pyproject.toml").into()
         }
-        async fn activation_script(&self, _: &Toolchain, _: &dyn Fs) -> Option<String> {
-            None
+        async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
+            vec![]
         }
     }
     Arc::new(

crates/project/src/terminals.rs 🔗

@@ -1,7 +1,8 @@
 use anyhow::Result;
 use collections::HashMap;
 use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
-use itertools::Itertools;
+
+use itertools::Itertools as _;
 use language::LanguageName;
 use remote::RemoteClient;
 use settings::{Settings, SettingsLocation};
@@ -11,7 +12,7 @@ use std::{
     path::{Path, PathBuf},
     sync::Arc,
 };
-use task::{Shell, ShellBuilder, SpawnInTerminal};
+use task::{Shell, ShellBuilder, ShellKind, SpawnInTerminal};
 use terminal::{
     TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::TerminalSettings,
 };
@@ -131,33 +132,62 @@ impl Project {
         cx.spawn(async move |project, cx| {
             let activation_script = maybe!(async {
                 let toolchain = toolchain?.await?;
-                lang_registry
-                    .language_for_name(&toolchain.language_name.0)
-                    .await
-                    .ok()?
-                    .toolchain_lister()?
-                    .activation_script(&toolchain, fs.as_ref())
-                    .await
+                Some(
+                    lang_registry
+                        .language_for_name(&toolchain.language_name.0)
+                        .await
+                        .ok()?
+                        .toolchain_lister()?
+                        .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref())
+                        .await,
+                )
             })
-            .await;
+            .await
+            .unwrap_or_default();
 
             project.update(cx, move |this, cx| {
                 let shell = {
                     env.extend(spawn_task.env);
                     match remote_client {
-                        Some(remote_client) => create_remote_shell(
-                            spawn_task
-                                .command
-                                .as_ref()
-                                .map(|command| (command, &spawn_task.args)),
-                            &mut env,
-                            path,
-                            remote_client,
-                            activation_script.clone(),
-                            cx,
-                        )?,
+                        Some(remote_client) => match activation_script.clone() {
+                            activation_script if !activation_script.is_empty() => {
+                                let activation_script = activation_script.join("; ");
+                                let to_run = if let Some(command) = spawn_task.command {
+                                    let command: Option<Cow<str>> = shlex::try_quote(&command).ok();
+                                    let args = spawn_task
+                                        .args
+                                        .iter()
+                                        .filter_map(|arg| shlex::try_quote(arg).ok());
+                                    command.into_iter().chain(args).join(" ")
+                                } else {
+                                    format!("exec {shell} -l")
+                                };
+                                let args = vec![
+                                    "-c".to_owned(),
+                                    format!("{activation_script}; {to_run}",),
+                                ];
+                                create_remote_shell(
+                                    Some((&shell, &args)),
+                                    &mut env,
+                                    path,
+                                    remote_client,
+                                    cx,
+                                )?
+                            }
+                            _ => create_remote_shell(
+                                spawn_task
+                                    .command
+                                    .as_ref()
+                                    .map(|command| (command, &spawn_task.args)),
+                                &mut env,
+                                path,
+                                remote_client,
+                                cx,
+                            )?,
+                        },
                         None => match activation_script.clone() {
-                            Some(activation_script) => {
+                            activation_script if !activation_script.is_empty() => {
+                                let activation_script = activation_script.join("; ");
                                 let to_run = if let Some(command) = spawn_task.command {
                                     let command: Option<Cow<str>> = shlex::try_quote(&command).ok();
                                     let args = spawn_task
@@ -169,7 +199,7 @@ impl Project {
                                     format!("exec {shell} -l")
                                 };
                                 Shell::WithArguments {
-                                    program: get_default_system_shell(),
+                                    program: shell,
                                     args: vec![
                                         "-c".to_owned(),
                                         format!("{activation_script}; {to_run}",),
@@ -177,7 +207,7 @@ impl Project {
                                     title_override: None,
                                 }
                             }
-                            None => {
+                            _ => {
                                 if let Some(program) = spawn_task.command {
                                     Shell::WithArguments {
                                         program,
@@ -302,31 +332,21 @@ impl Project {
                     .await
                     .ok();
                 let lister = language?.toolchain_lister();
-                lister?.activation_script(&toolchain, fs.as_ref()).await
+                Some(
+                    lister?
+                        .activation_script(&toolchain, ShellKind::new(&shell), fs.as_ref())
+                        .await,
+                )
             })
-            .await;
+            .await
+            .unwrap_or_default();
             project.update(cx, move |this, cx| {
                 let shell = {
                     match remote_client {
-                        Some(remote_client) => create_remote_shell(
-                            None,
-                            &mut env,
-                            path,
-                            remote_client,
-                            activation_script.clone(),
-                            cx,
-                        )?,
-                        None => match activation_script.clone() {
-                            Some(activation_script) => Shell::WithArguments {
-                                program: get_default_system_shell(),
-                                args: vec![
-                                    "-c".to_owned(),
-                                    format!("{activation_script}; exec {shell} -l",),
-                                ],
-                                title_override: Some(shell.into()),
-                            },
-                            None => settings.shell,
-                        },
+                        Some(remote_client) => {
+                            create_remote_shell(None, &mut env, path, remote_client, cx)?
+                        }
+                        None => settings.shell,
                     }
                 };
                 TerminalBuilder::new(
@@ -437,15 +457,10 @@ impl Project {
 
         match remote_client {
             Some(remote_client) => {
-                let command_template = remote_client.read(cx).build_command(
-                    Some(command),
-                    &args,
-                    &env,
-                    None,
-                    // todo
-                    None,
-                    None,
-                )?;
+                let command_template =
+                    remote_client
+                        .read(cx)
+                        .build_command(Some(command), &args, &env, None, None)?;
                 let mut command = std::process::Command::new(command_template.program);
                 command.args(command_template.args);
                 command.envs(command_template.env);
@@ -473,7 +488,6 @@ fn create_remote_shell(
     env: &mut HashMap<String, String>,
     working_directory: Option<Arc<Path>>,
     remote_client: Entity<RemoteClient>,
-    activation_script: Option<String>,
     cx: &mut App,
 ) -> Result<Shell> {
     // Alacritty sets its terminfo to `alacritty`, this requiring hosts to have it installed
@@ -493,13 +507,12 @@ fn create_remote_shell(
         args.as_slice(),
         env,
         working_directory.map(|path| path.display().to_string()),
-        activation_script,
         None,
     )?;
     *env = command.env;
 
     log::debug!("Connecting to a remote server: {:?}", command.program);
-    let host = remote_client.read(cx).connection_options().host;
+    let host = remote_client.read(cx).connection_options().display_name();
 
     Ok(Shell::WithArguments {
         program: command.program,

crates/project_panel/src/project_panel_settings.rs 🔗

@@ -2,7 +2,7 @@ use editor::ShowScrollbar;
 use gpui::Pixels;
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Copy, PartialEq)]
 #[serde(rename_all = "snake_case")]
@@ -28,7 +28,7 @@ pub enum EntrySpacing {
     Standard,
 }
 
-#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, SettingsUi)]
 pub struct ProjectPanelSettings {
     pub button: bool,
     pub hide_gitignore: bool,

crates/project_symbols/src/project_symbols.rs 🔗

@@ -1,18 +1,19 @@
 use editor::{Bias, Editor, SelectionEffects, scroll::Autoscroll, styled_runs_for_code_label};
 use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{
-    App, Context, DismissEvent, Entity, FontWeight, ParentElement, StyledText, Task, WeakEntity,
-    Window, rems,
+    App, Context, DismissEvent, Entity, HighlightStyle, ParentElement, StyledText, Task, TextStyle,
+    WeakEntity, Window, relative, rems,
 };
 use ordered_float::OrderedFloat;
 use picker::{Picker, PickerDelegate};
 use project::{Project, Symbol};
+use settings::Settings;
 use std::{borrow::Cow, cmp::Reverse, sync::Arc};
-use theme::ActiveTheme;
+use theme::{ActiveTheme, ThemeSettings};
 use util::ResultExt;
 use workspace::{
     Workspace,
-    ui::{Color, Label, LabelCommon, LabelLike, ListItem, ListItemSpacing, Toggleable, v_flex},
+    ui::{LabelLike, ListItem, ListItemSpacing, prelude::*},
 };
 
 pub fn init(cx: &mut App) {
@@ -213,7 +214,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
         &self,
         ix: usize,
         selected: bool,
-        window: &mut Window,
+        _window: &mut Window,
         cx: &mut Context<Picker<Self>>,
     ) -> Option<Self::ListItem> {
         let string_match = &self.matches[ix];
@@ -235,18 +236,29 @@ impl PickerDelegate for ProjectSymbolsDelegate {
         let label = symbol.label.text.clone();
         let path = path.to_string();
 
-        let highlights = gpui::combine_highlights(
-            string_match
-                .positions
-                .iter()
-                .map(|pos| (*pos..pos + 1, FontWeight::BOLD.into())),
-            syntax_runs.map(|(range, mut highlight)| {
-                // Ignore font weight for syntax highlighting, as we'll use it
-                // for fuzzy matches.
-                highlight.font_weight = None;
-                (range, highlight)
-            }),
-        );
+        let settings = ThemeSettings::get_global(cx);
+
+        let text_style = TextStyle {
+            color: cx.theme().colors().text,
+            font_family: settings.buffer_font.family.clone(),
+            font_features: settings.buffer_font.features.clone(),
+            font_fallbacks: settings.buffer_font.fallbacks.clone(),
+            font_size: settings.buffer_font_size(cx).into(),
+            font_weight: settings.buffer_font.weight,
+            line_height: relative(1.),
+            ..Default::default()
+        };
+
+        let highlight_style = HighlightStyle {
+            background_color: Some(cx.theme().colors().text_accent.alpha(0.3)),
+            ..Default::default()
+        };
+        let custom_highlights = string_match
+            .positions
+            .iter()
+            .map(|pos| (*pos..pos + 1, highlight_style));
+
+        let highlights = gpui::combine_highlights(custom_highlights, syntax_runs);
 
         Some(
             ListItem::new(ix)
@@ -255,13 +267,10 @@ impl PickerDelegate for ProjectSymbolsDelegate {
                 .toggle_state(selected)
                 .child(
                     v_flex()
-                        .child(
-                            LabelLike::new().child(
-                                StyledText::new(label)
-                                    .with_default_highlights(&window.text_style(), highlights),
-                            ),
-                        )
-                        .child(Label::new(path).color(Color::Muted)),
+                        .child(LabelLike::new().child(
+                            StyledText::new(label).with_default_highlights(&text_style, highlights),
+                        ))
+                        .child(Label::new(path).size(LabelSize::Small).color(Color::Muted)),
                 ),
         )
     }
@@ -437,7 +446,7 @@ mod tests {
             deprecated: None,
             container_name: None,
             location: lsp::Location::new(
-                lsp::Url::from_file_path(path.as_ref()).unwrap(),
+                lsp::Uri::from_file_path(path.as_ref()).unwrap(),
                 lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
             ),
         }

crates/recent_projects/src/disconnected_overlay.rs 🔗

@@ -1,6 +1,6 @@
 use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, WeakEntity};
 use project::project_settings::ProjectSettings;
-use remote::SshConnectionOptions;
+use remote::RemoteConnectionOptions;
 use settings::Settings;
 use ui::{
     Button, ButtonCommon, ButtonStyle, Clickable, Context, ElevationIndex, FluentBuilder, Headline,
@@ -9,11 +9,11 @@ use ui::{
 };
 use workspace::{ModalView, OpenOptions, Workspace, notifications::DetachAndPromptErr};
 
-use crate::open_ssh_project;
+use crate::open_remote_project;
 
 enum Host {
-    RemoteProject,
-    SshRemoteProject(SshConnectionOptions),
+    CollabGuestProject,
+    RemoteServerProject(RemoteConnectionOptions),
 }
 
 pub struct DisconnectedOverlay {
@@ -66,9 +66,9 @@ impl DisconnectedOverlay {
 
                 let remote_connection_options = project.read(cx).remote_connection_options(cx);
                 let host = if let Some(ssh_connection_options) = remote_connection_options {
-                    Host::SshRemoteProject(ssh_connection_options)
+                    Host::RemoteServerProject(ssh_connection_options)
                 } else {
-                    Host::RemoteProject
+                    Host::CollabGuestProject
                 };
 
                 workspace.toggle_modal(window, cx, |_, cx| DisconnectedOverlay {
@@ -86,14 +86,14 @@ impl DisconnectedOverlay {
         self.finished = true;
         cx.emit(DismissEvent);
 
-        if let Host::SshRemoteProject(ssh_connection_options) = &self.host {
-            self.reconnect_to_ssh_remote(ssh_connection_options.clone(), window, cx);
+        if let Host::RemoteServerProject(ssh_connection_options) = &self.host {
+            self.reconnect_to_remote_project(ssh_connection_options.clone(), window, cx);
         }
     }
 
-    fn reconnect_to_ssh_remote(
+    fn reconnect_to_remote_project(
         &self,
-        connection_options: SshConnectionOptions,
+        connection_options: RemoteConnectionOptions,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -114,7 +114,7 @@ impl DisconnectedOverlay {
             .collect();
 
         cx.spawn_in(window, async move |_, cx| {
-            open_ssh_project(
+            open_remote_project(
                 connection_options,
                 paths,
                 app_state,
@@ -138,13 +138,13 @@ impl DisconnectedOverlay {
 
 impl Render for DisconnectedOverlay {
     fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        let can_reconnect = matches!(self.host, Host::SshRemoteProject(_));
+        let can_reconnect = matches!(self.host, Host::RemoteServerProject(_));
 
         let message = match &self.host {
-            Host::RemoteProject => {
+            Host::CollabGuestProject => {
                 "Your connection to the remote project has been lost.".to_string()
             }
-            Host::SshRemoteProject(options) => {
+            Host::RemoteServerProject(options) => {
                 let autosave = if ProjectSettings::get_global(cx)
                     .session
                     .restore_unsaved_buffers
@@ -155,7 +155,8 @@ impl Render for DisconnectedOverlay {
                 };
                 format!(
                     "Your connection to {} has been lost.{}",
-                    options.host, autosave
+                    options.display_name(),
+                    autosave
                 )
             }
         };

crates/recent_projects/src/recent_projects.rs 🔗

@@ -1,9 +1,10 @@
 pub mod disconnected_overlay;
+mod remote_connections;
 mod remote_servers;
 mod ssh_config;
-mod ssh_connections;
 
-pub use ssh_connections::{is_connecting_over_ssh, open_ssh_project};
+use remote::RemoteConnectionOptions;
+pub use remote_connections::open_remote_project;
 
 use disconnected_overlay::DisconnectedOverlay;
 use fuzzy::{StringMatch, StringMatchCandidate};
@@ -16,9 +17,9 @@ use picker::{
     Picker, PickerDelegate,
     highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths},
 };
+pub use remote_connections::SshSettings;
 pub use remote_servers::RemoteServerProjects;
 use settings::Settings;
-pub use ssh_connections::SshSettings;
 use std::{path::Path, sync::Arc};
 use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, tooltip_container};
 use util::{ResultExt, paths::PathExt};
@@ -290,7 +291,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                     if workspace.database_id() == Some(*candidate_workspace_id) {
                         Task::ready(Ok(()))
                     } else {
-                        match candidate_workspace_location {
+                        match candidate_workspace_location.clone() {
                             SerializedWorkspaceLocation::Local => {
                                 let paths = candidate_workspace_paths.paths().to_vec();
                                 if replace_current_window {
@@ -320,7 +321,7 @@ impl PickerDelegate for RecentProjectsDelegate {
                                     workspace.open_workspace_for_paths(false, paths, window, cx)
                                 }
                             }
-                            SerializedWorkspaceLocation::Ssh(connection) => {
+                            SerializedWorkspaceLocation::Remote(mut connection) => {
                                 let app_state = workspace.app_state().clone();
 
                                 let replace_window = if replace_current_window {
@@ -334,18 +335,16 @@ impl PickerDelegate for RecentProjectsDelegate {
                                     ..Default::default()
                                 };
 
-                                let connection_options = SshSettings::get_global(cx)
-                                    .connection_options_for(
-                                        connection.host.clone(),
-                                        connection.port,
-                                        connection.user.clone(),
-                                    );
+                                if let RemoteConnectionOptions::Ssh(connection) = &mut connection {
+                                    SshSettings::get_global(cx)
+                                        .fill_connection_options_from_settings(connection);
+                                };
 
                                 let paths = candidate_workspace_paths.paths().to_vec();
 
                                 cx.spawn_in(window, async move |_, cx| {
-                                    open_ssh_project(
-                                        connection_options,
+                                    open_remote_project(
+                                        connection.clone(),
                                         paths,
                                         app_state,
                                         open_options,
@@ -418,9 +417,11 @@ impl PickerDelegate for RecentProjectsDelegate {
                                 SerializedWorkspaceLocation::Local => Icon::new(IconName::Screen)
                                     .color(Color::Muted)
                                     .into_any_element(),
-                                SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server)
-                                    .color(Color::Muted)
-                                    .into_any_element(),
+                                SerializedWorkspaceLocation::Remote(_) => {
+                                    Icon::new(IconName::Server)
+                                        .color(Color::Muted)
+                                        .into_any_element()
+                                }
                             })
                         })
                         .child({

crates/recent_projects/src/ssh_connections.rs → crates/recent_projects/src/remote_connections.rs 🔗

@@ -16,11 +16,12 @@ use language::CursorShape;
 use markdown::{Markdown, MarkdownElement, MarkdownStyle};
 use release_channel::ReleaseChannel;
 use remote::{
-    ConnectionIdentifier, RemoteClient, RemotePlatform, SshConnectionOptions, SshPortForwardOption,
+    ConnectionIdentifier, RemoteClient, RemoteConnectionOptions, RemotePlatform,
+    SshConnectionOptions, SshPortForwardOption,
 };
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use theme::ThemeSettings;
 use ui::{
     ActiveTheme, Color, Context, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label,
@@ -29,7 +30,7 @@ use ui::{
 use util::serde::default_true;
 use workspace::{AppState, ModalView, Workspace};
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct SshSettings {
     pub ssh_connections: Option<Vec<SshConnection>>,
     /// Whether to read ~/.ssh/config for ssh connection sources.
@@ -42,32 +43,35 @@ impl SshSettings {
         self.ssh_connections.clone().into_iter().flatten()
     }
 
+    pub fn fill_connection_options_from_settings(&self, options: &mut SshConnectionOptions) {
+        for conn in self.ssh_connections() {
+            if conn.host == options.host
+                && conn.username == options.username
+                && conn.port == options.port
+            {
+                options.nickname = conn.nickname;
+                options.upload_binary_over_ssh = conn.upload_binary_over_ssh.unwrap_or_default();
+                options.args = Some(conn.args);
+                options.port_forwards = conn.port_forwards;
+                break;
+            }
+        }
+    }
+
     pub fn connection_options_for(
         &self,
         host: String,
         port: Option<u16>,
         username: Option<String>,
     ) -> SshConnectionOptions {
-        for conn in self.ssh_connections() {
-            if conn.host == host && conn.username == username && conn.port == port {
-                return SshConnectionOptions {
-                    nickname: conn.nickname,
-                    upload_binary_over_ssh: conn.upload_binary_over_ssh.unwrap_or_default(),
-                    args: Some(conn.args),
-                    host,
-                    port,
-                    username,
-                    port_forwards: conn.port_forwards,
-                    password: None,
-                };
-            }
-        }
-        SshConnectionOptions {
+        let mut options = SshConnectionOptions {
             host,
             port,
             username,
             ..Default::default()
-        }
+        };
+        self.fill_connection_options_from_settings(&mut options);
+        options
     }
 }
 
@@ -135,7 +139,7 @@ impl Settings for SshSettings {
     fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
 }
 
-pub struct SshPrompt {
+pub struct RemoteConnectionPrompt {
     connection_string: SharedString,
     nickname: Option<SharedString>,
     status_message: Option<SharedString>,
@@ -144,7 +148,7 @@ pub struct SshPrompt {
     editor: Entity<Editor>,
 }
 
-impl Drop for SshPrompt {
+impl Drop for RemoteConnectionPrompt {
     fn drop(&mut self) {
         if let Some(cancel) = self.cancellation.take() {
             cancel.send(()).ok();
@@ -152,24 +156,22 @@ impl Drop for SshPrompt {
     }
 }
 
-pub struct SshConnectionModal {
-    pub(crate) prompt: Entity<SshPrompt>,
+pub struct RemoteConnectionModal {
+    pub(crate) prompt: Entity<RemoteConnectionPrompt>,
     paths: Vec<PathBuf>,
     finished: bool,
 }
 
-impl SshPrompt {
+impl RemoteConnectionPrompt {
     pub(crate) fn new(
-        connection_options: &SshConnectionOptions,
+        connection_string: String,
+        nickname: Option<String>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
-        let connection_string = connection_options.connection_string().into();
-        let nickname = connection_options.nickname.clone().map(|s| s.into());
-
         Self {
-            connection_string,
-            nickname,
+            connection_string: connection_string.into(),
+            nickname: nickname.map(|nickname| nickname.into()),
             editor: cx.new(|cx| Editor::single_line(window, cx)),
             status_message: None,
             cancellation: None,
@@ -232,7 +234,7 @@ impl SshPrompt {
     }
 }
 
-impl Render for SshPrompt {
+impl Render for RemoteConnectionPrompt {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
         let theme = ThemeSettings::get_global(cx);
 
@@ -297,15 +299,22 @@ impl Render for SshPrompt {
     }
 }
 
-impl SshConnectionModal {
+impl RemoteConnectionModal {
     pub(crate) fn new(
-        connection_options: &SshConnectionOptions,
+        connection_options: &RemoteConnectionOptions,
         paths: Vec<PathBuf>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Self {
+        let (connection_string, nickname) = match connection_options {
+            RemoteConnectionOptions::Ssh(options) => {
+                (options.connection_string(), options.nickname.clone())
+            }
+            RemoteConnectionOptions::Wsl(options) => (options.distro_name.clone(), None),
+        };
         Self {
-            prompt: cx.new(|cx| SshPrompt::new(connection_options, window, cx)),
+            prompt: cx
+                .new(|cx| RemoteConnectionPrompt::new(connection_string, nickname, window, cx)),
             finished: false,
             paths,
         }
@@ -386,7 +395,7 @@ impl RenderOnce for SshConnectionHeader {
     }
 }
 
-impl Render for SshConnectionModal {
+impl Render for RemoteConnectionModal {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
         let nickname = self.prompt.read(cx).nickname.clone();
         let connection_string = self.prompt.read(cx).connection_string.clone();
@@ -423,15 +432,15 @@ impl Render for SshConnectionModal {
     }
 }
 
-impl Focusable for SshConnectionModal {
+impl Focusable for RemoteConnectionModal {
     fn focus_handle(&self, cx: &gpui::App) -> gpui::FocusHandle {
         self.prompt.read(cx).editor.focus_handle(cx)
     }
 }
 
-impl EventEmitter<DismissEvent> for SshConnectionModal {}
+impl EventEmitter<DismissEvent> for RemoteConnectionModal {}
 
-impl ModalView for SshConnectionModal {
+impl ModalView for RemoteConnectionModal {
     fn on_before_dismiss(
         &mut self,
         _window: &mut Window,
@@ -446,13 +455,13 @@ impl ModalView for SshConnectionModal {
 }
 
 #[derive(Clone)]
-pub struct SshClientDelegate {
+pub struct RemoteClientDelegate {
     window: AnyWindowHandle,
-    ui: WeakEntity<SshPrompt>,
+    ui: WeakEntity<RemoteConnectionPrompt>,
     known_password: Option<String>,
 }
 
-impl remote::RemoteClientDelegate for SshClientDelegate {
+impl remote::RemoteClientDelegate for RemoteClientDelegate {
     fn ask_password(&self, prompt: String, tx: oneshot::Sender<String>, cx: &mut AsyncApp) {
         let mut known_password = self.known_password.clone();
         if let Some(password) = known_password.take() {
@@ -522,7 +531,7 @@ impl remote::RemoteClientDelegate for SshClientDelegate {
     }
 }
 
-impl SshClientDelegate {
+impl RemoteClientDelegate {
     fn update_status(&self, status: Option<&str>, cx: &mut AsyncApp) {
         self.window
             .update(cx, |_, _, cx| {
@@ -534,14 +543,10 @@ impl SshClientDelegate {
     }
 }
 
-pub fn is_connecting_over_ssh(workspace: &Workspace, cx: &App) -> bool {
-    workspace.active_modal::<SshConnectionModal>(cx).is_some()
-}
-
 pub fn connect_over_ssh(
     unique_identifier: ConnectionIdentifier,
     connection_options: SshConnectionOptions,
-    ui: Entity<SshPrompt>,
+    ui: Entity<RemoteConnectionPrompt>,
     window: &mut Window,
     cx: &mut App,
 ) -> Task<Result<Option<Entity<RemoteClient>>>> {
@@ -554,7 +559,7 @@ pub fn connect_over_ssh(
         unique_identifier,
         connection_options,
         rx,
-        Arc::new(SshClientDelegate {
+        Arc::new(RemoteClientDelegate {
             window,
             ui: ui.downgrade(),
             known_password,
@@ -563,8 +568,8 @@ pub fn connect_over_ssh(
     )
 }
 
-pub async fn open_ssh_project(
-    connection_options: SshConnectionOptions,
+pub async fn open_remote_project(
+    connection_options: RemoteConnectionOptions,
     paths: Vec<PathBuf>,
     app_state: Arc<AppState>,
     open_options: workspace::OpenOptions,
@@ -575,13 +580,7 @@ pub async fn open_ssh_project(
     } else {
         let workspace_position = cx
             .update(|cx| {
-                workspace::ssh_workspace_position_from_db(
-                    connection_options.host.clone(),
-                    connection_options.port,
-                    connection_options.username.clone(),
-                    &paths,
-                    cx,
-                )
+                workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx)
             })?
             .await
             .context("fetching ssh workspace position from db")?;
@@ -611,16 +610,16 @@ pub async fn open_ssh_project(
     loop {
         let (cancel_tx, cancel_rx) = oneshot::channel();
         let delegate = window.update(cx, {
-            let connection_options = connection_options.clone();
             let paths = paths.clone();
+            let connection_options = connection_options.clone();
             move |workspace, window, cx| {
                 window.activate_window();
                 workspace.toggle_modal(window, cx, |window, cx| {
-                    SshConnectionModal::new(&connection_options, paths, window, cx)
+                    RemoteConnectionModal::new(&connection_options, paths, window, cx)
                 });
 
                 let ui = workspace
-                    .active_modal::<SshConnectionModal>(cx)?
+                    .active_modal::<RemoteConnectionModal>(cx)?
                     .read(cx)
                     .prompt
                     .clone();
@@ -629,19 +628,25 @@ pub async fn open_ssh_project(
                     ui.set_cancellation_tx(cancel_tx);
                 });
 
-                Some(Arc::new(SshClientDelegate {
+                Some(Arc::new(RemoteClientDelegate {
                     window: window.window_handle(),
                     ui: ui.downgrade(),
-                    known_password: connection_options.password.clone(),
+                    known_password: if let RemoteConnectionOptions::Ssh(options) =
+                        &connection_options
+                    {
+                        options.password.clone()
+                    } else {
+                        None
+                    },
                 }))
             }
         })?;
 
         let Some(delegate) = delegate else { break };
 
-        let did_open_ssh_project = cx
+        let did_open_project = cx
             .update(|cx| {
-                workspace::open_ssh_project_with_new_connection(
+                workspace::open_remote_project_with_new_connection(
                     window,
                     connection_options.clone(),
                     cancel_rx,
@@ -655,19 +660,22 @@ pub async fn open_ssh_project(
 
         window
             .update(cx, |workspace, _, cx| {
-                if let Some(ui) = workspace.active_modal::<SshConnectionModal>(cx) {
+                if let Some(ui) = workspace.active_modal::<RemoteConnectionModal>(cx) {
                     ui.update(cx, |modal, cx| modal.finished(cx))
                 }
             })
             .ok();
 
-        if let Err(e) = did_open_ssh_project {
+        if let Err(e) = did_open_project {
             log::error!("Failed to open project: {e:?}");
             let response = window
                 .update(cx, |_, window, cx| {
                     window.prompt(
                         PromptLevel::Critical,
-                        "Failed to connect over SSH",
+                        match connection_options {
+                            RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH",
+                            RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL",
+                        },
                         Some(&e.to_string()),
                         &["Retry", "Ok"],
                         cx,

crates/recent_projects/src/remote_servers.rs 🔗

@@ -1,70 +1,52 @@
-use std::any::Any;
-use std::borrow::Cow;
-use std::collections::BTreeSet;
-use std::path::PathBuf;
-use std::rc::Rc;
-use std::sync::Arc;
-use std::sync::atomic;
-use std::sync::atomic::AtomicUsize;
-
+use crate::{
+    remote_connections::{
+        RemoteConnectionModal, RemoteConnectionPrompt, RemoteSettingsContent, SshConnection,
+        SshConnectionHeader, SshProject, SshSettings, connect_over_ssh, open_remote_project,
+    },
+    ssh_config::parse_ssh_config_hosts,
+};
 use editor::Editor;
 use file_finder::OpenPathDelegate;
-use futures::FutureExt;
-use futures::channel::oneshot;
-use futures::future::Shared;
-use futures::select;
-use gpui::ClickEvent;
-use gpui::ClipboardItem;
-use gpui::Subscription;
-use gpui::Task;
-use gpui::WeakEntity;
-use gpui::canvas;
+use futures::{FutureExt, channel::oneshot, future::Shared, select};
 use gpui::{
-    AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
-    PromptLevel, ScrollHandle, Window,
+    AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, EventEmitter,
+    FocusHandle, Focusable, PromptLevel, ScrollHandle, Subscription, Task, WeakEntity, Window,
+    canvas,
 };
-use paths::global_ssh_config_file;
-use paths::user_ssh_config_file;
+use paths::{global_ssh_config_file, user_ssh_config_file};
 use picker::Picker;
-use project::Fs;
-use project::Project;
-use remote::remote_client::ConnectionIdentifier;
-use remote::{RemoteClient, SshConnectionOptions};
-use settings::Settings;
-use settings::SettingsStore;
-use settings::update_settings_file;
-use settings::watch_config_file;
+use project::{Fs, Project};
+use remote::{
+    RemoteClient, RemoteConnectionOptions, SshConnectionOptions,
+    remote_client::ConnectionIdentifier,
+};
+use settings::{Settings, SettingsStore, update_settings_file, watch_config_file};
 use smol::stream::StreamExt as _;
-use ui::Navigable;
-use ui::NavigableEntry;
+use std::{
+    any::Any,
+    borrow::Cow,
+    collections::BTreeSet,
+    path::PathBuf,
+    rc::Rc,
+    sync::{
+        Arc,
+        atomic::{self, AtomicUsize},
+    },
+};
 use ui::{
-    IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Scrollbar, ScrollbarState,
-    Section, Tooltip, prelude::*,
+    IconButtonShape, List, ListItem, ListSeparator, Modal, ModalHeader, Navigable, NavigableEntry,
+    Scrollbar, ScrollbarState, Section, Tooltip, prelude::*,
 };
 use util::{
     ResultExt,
     paths::{PathStyle, RemotePathBuf},
 };
-use workspace::OpenOptions;
-use workspace::Toast;
-use workspace::notifications::NotificationId;
 use workspace::{
-    ModalView, Workspace, notifications::DetachAndPromptErr,
-    open_ssh_project_with_existing_connection,
+    ModalView, OpenOptions, Toast, Workspace,
+    notifications::{DetachAndPromptErr, NotificationId},
+    open_remote_project_with_existing_connection,
 };
 
-use crate::ssh_config::parse_ssh_config_hosts;
-use crate::ssh_connections::RemoteSettingsContent;
-use crate::ssh_connections::SshConnection;
-use crate::ssh_connections::SshConnectionHeader;
-use crate::ssh_connections::SshConnectionModal;
-use crate::ssh_connections::SshProject;
-use crate::ssh_connections::SshPrompt;
-use crate::ssh_connections::SshSettings;
-use crate::ssh_connections::connect_over_ssh;
-use crate::ssh_connections::open_ssh_project;
-
-mod navigation_base {}
 pub struct RemoteServerProjects {
     mode: Mode,
     focus_handle: FocusHandle,
@@ -79,7 +61,7 @@ pub struct RemoteServerProjects {
 struct CreateRemoteServer {
     address_editor: Entity<Editor>,
     address_error: Option<SharedString>,
-    ssh_prompt: Option<Entity<SshPrompt>>,
+    ssh_prompt: Option<Entity<RemoteConnectionPrompt>>,
     _creating: Option<Task<Option<()>>>,
 }
 
@@ -222,8 +204,13 @@ impl ProjectPicker {
                         })
                         .log_err()?;
 
-                    open_ssh_project_with_existing_connection(
-                        connection, project, paths, app_state, window, cx,
+                    open_remote_project_with_existing_connection(
+                        RemoteConnectionOptions::Ssh(connection),
+                        project,
+                        paths,
+                        app_state,
+                        window,
+                        cx,
                     )
                     .await
                     .log_err();
@@ -472,7 +459,14 @@ impl RemoteServerProjects {
                 return;
             }
         };
-        let ssh_prompt = cx.new(|cx| SshPrompt::new(&connection_options, window, cx));
+        let ssh_prompt = cx.new(|cx| {
+            RemoteConnectionPrompt::new(
+                connection_options.connection_string(),
+                connection_options.nickname.clone(),
+                window,
+                cx,
+            )
+        });
 
         let connection = connect_over_ssh(
             ConnectionIdentifier::setup(),
@@ -552,15 +546,20 @@ impl RemoteServerProjects {
         };
 
         let create_new_window = self.create_new_window;
-        let connection_options = ssh_connection.into();
+        let connection_options: SshConnectionOptions = ssh_connection.into();
         workspace.update(cx, |_, cx| {
             cx.defer_in(window, move |workspace, window, cx| {
                 let app_state = workspace.app_state().clone();
                 workspace.toggle_modal(window, cx, |window, cx| {
-                    SshConnectionModal::new(&connection_options, Vec::new(), window, cx)
+                    RemoteConnectionModal::new(
+                        &RemoteConnectionOptions::Ssh(connection_options.clone()),
+                        Vec::new(),
+                        window,
+                        cx,
+                    )
                 });
                 let prompt = workspace
-                    .active_modal::<SshConnectionModal>(cx)
+                    .active_modal::<RemoteConnectionModal>(cx)
                     .unwrap()
                     .read(cx)
                     .prompt
@@ -579,7 +578,7 @@ impl RemoteServerProjects {
                     let session = connect.await;
 
                     workspace.update(cx, |workspace, cx| {
-                        if let Some(prompt) = workspace.active_modal::<SshConnectionModal>(cx) {
+                        if let Some(prompt) = workspace.active_modal::<RemoteConnectionModal>(cx) {
                             prompt.update(cx, |prompt, cx| prompt.finished(cx))
                         }
                     })?;
@@ -898,8 +897,8 @@ impl RemoteServerProjects {
                 };
 
                 cx.spawn_in(window, async move |_, cx| {
-                    let result = open_ssh_project(
-                        server.into(),
+                    let result = open_remote_project(
+                        RemoteConnectionOptions::Ssh(server.into()),
                         project.paths.into_iter().map(PathBuf::from).collect(),
                         app_state,
                         OpenOptions {

crates/remote/src/remote.rs 🔗

@@ -6,6 +6,7 @@ mod transport;
 
 pub use remote_client::{
     ConnectionIdentifier, ConnectionState, RemoteClient, RemoteClientDelegate, RemoteClientEvent,
-    RemotePlatform,
+    RemoteConnectionOptions, RemotePlatform,
 };
 pub use transport::ssh::{SshConnectionOptions, SshPortForwardOption};
+pub use transport::wsl::WslConnectionOptions;

crates/remote/src/remote_client.rs 🔗

@@ -1,6 +1,11 @@
 use crate::{
-    SshConnectionOptions, protocol::MessageId, proxy::ProxyLaunchError,
-    transport::ssh::SshRemoteConnection,
+    SshConnectionOptions,
+    protocol::MessageId,
+    proxy::ProxyLaunchError,
+    transport::{
+        ssh::SshRemoteConnection,
+        wsl::{WslConnectionOptions, WslRemoteConnection},
+    },
 };
 use anyhow::{Context as _, Result, anyhow};
 use async_trait::async_trait;
@@ -237,7 +242,7 @@ impl From<&State> for ConnectionState {
 pub struct RemoteClient {
     client: Arc<ChannelClient>,
     unique_identifier: String,
-    connection_options: SshConnectionOptions,
+    connection_options: RemoteConnectionOptions,
     path_style: PathStyle,
     state: Option<State>,
 }
@@ -290,6 +295,22 @@ impl RemoteClient {
         cancellation: oneshot::Receiver<()>,
         delegate: Arc<dyn RemoteClientDelegate>,
         cx: &mut App,
+    ) -> Task<Result<Option<Entity<Self>>>> {
+        Self::new(
+            unique_identifier,
+            RemoteConnectionOptions::Ssh(connection_options),
+            cancellation,
+            delegate,
+            cx,
+        )
+    }
+
+    pub fn new(
+        unique_identifier: ConnectionIdentifier,
+        connection_options: RemoteConnectionOptions,
+        cancellation: oneshot::Receiver<()>,
+        delegate: Arc<dyn RemoteClientDelegate>,
+        cx: &mut App,
     ) -> Task<Result<Option<Entity<Self>>>> {
         let unique_identifier = unique_identifier.to_string(cx);
         cx.spawn(async move |cx| {
@@ -424,7 +445,7 @@ impl RemoteClient {
         }
 
         let state = self.state.take().unwrap();
-        let (attempts, ssh_connection, delegate) = match state {
+        let (attempts, remote_connection, delegate) = match state {
             State::Connected {
                 ssh_connection,
                 delegate,
@@ -482,15 +503,15 @@ impl RemoteClient {
                 };
             }
 
-            if let Err(error) = ssh_connection
+            if let Err(error) = remote_connection
                 .kill()
                 .await
                 .context("Failed to kill ssh process")
             {
-                failed!(error, attempts, ssh_connection, delegate);
+                failed!(error, attempts, remote_connection, delegate);
             };
 
-            let connection_options = ssh_connection.connection_options();
+            let connection_options = remote_connection.connection_options();
 
             let (outgoing_tx, outgoing_rx) = mpsc::unbounded::<Envelope>();
             let (incoming_tx, incoming_rx) = mpsc::unbounded::<Envelope>();
@@ -519,7 +540,7 @@ impl RemoteClient {
             {
                 Ok((ssh_connection, io_task)) => (ssh_connection, io_task),
                 Err(error) => {
-                    failed!(error, attempts, ssh_connection, delegate);
+                    failed!(error, attempts, remote_connection, delegate);
                 }
             };
 
@@ -751,13 +772,19 @@ impl RemoteClient {
         Some(self.state.as_ref()?.remote_connection()?.shell())
     }
 
+    pub fn shares_network_interface(&self) -> bool {
+        self.state
+            .as_ref()
+            .and_then(|state| state.remote_connection())
+            .map_or(false, |connection| connection.shares_network_interface())
+    }
+
     pub fn build_command(
         &self,
         program: Option<String>,
         args: &[String],
         env: &HashMap<String, String>,
         working_dir: Option<String>,
-        activation_script: Option<String>,
         port_forward: Option<(u16, String, u16)>,
     ) -> Result<CommandTemplate> {
         let Some(connection) = self
@@ -767,14 +794,7 @@ impl RemoteClient {
         else {
             return Err(anyhow!("no connection"));
         };
-        connection.build_command(
-            program,
-            args,
-            env,
-            working_dir,
-            activation_script,
-            port_forward,
-        )
+        connection.build_command(program, args, env, working_dir, port_forward)
     }
 
     pub fn upload_directory(
@@ -797,11 +817,7 @@ impl RemoteClient {
         self.client.clone().into()
     }
 
-    pub fn host(&self) -> String {
-        self.connection_options.host.clone()
-    }
-
-    pub fn connection_options(&self) -> SshConnectionOptions {
+    pub fn connection_options(&self) -> RemoteConnectionOptions {
         self.connection_options.clone()
     }
 
@@ -844,14 +860,14 @@ impl RemoteClient {
     pub fn fake_server(
         client_cx: &mut gpui::TestAppContext,
         server_cx: &mut gpui::TestAppContext,
-    ) -> (SshConnectionOptions, AnyProtoClient) {
+    ) -> (RemoteConnectionOptions, AnyProtoClient) {
         let port = client_cx
             .update(|cx| cx.default_global::<ConnectionPool>().connections.len() as u16 + 1);
-        let opts = SshConnectionOptions {
+        let opts = RemoteConnectionOptions::Ssh(SshConnectionOptions {
             host: "<fake>".to_string(),
             port: Some(port),
             ..Default::default()
-        };
+        });
         let (outgoing_tx, _) = mpsc::unbounded::<Envelope>();
         let (_, incoming_rx) = mpsc::unbounded::<Envelope>();
         let server_client =
@@ -882,13 +898,13 @@ impl RemoteClient {
 
     #[cfg(any(test, feature = "test-support"))]
     pub async fn fake_client(
-        opts: SshConnectionOptions,
+        opts: RemoteConnectionOptions,
         client_cx: &mut gpui::TestAppContext,
     ) -> Entity<Self> {
         let (_tx, rx) = oneshot::channel();
         client_cx
             .update(|cx| {
-                Self::ssh(
+                Self::new(
                     ConnectionIdentifier::setup(),
                     opts,
                     rx,
@@ -909,7 +925,7 @@ enum ConnectionPoolEntry {
 
 #[derive(Default)]
 struct ConnectionPool {
-    connections: HashMap<SshConnectionOptions, ConnectionPoolEntry>,
+    connections: HashMap<RemoteConnectionOptions, ConnectionPoolEntry>,
 }
 
 impl Global for ConnectionPool {}
@@ -917,7 +933,7 @@ impl Global for ConnectionPool {}
 impl ConnectionPool {
     pub fn connect(
         &mut self,
-        opts: SshConnectionOptions,
+        opts: RemoteConnectionOptions,
         delegate: &Arc<dyn RemoteClientDelegate>,
         cx: &mut App,
     ) -> Shared<Task<Result<Arc<dyn RemoteConnection>, Arc<anyhow::Error>>>> {
@@ -947,9 +963,18 @@ impl ConnectionPool {
                 let opts = opts.clone();
                 let delegate = delegate.clone();
                 async move |cx| {
-                    let connection = SshRemoteConnection::new(opts.clone(), delegate, cx)
-                        .await
-                        .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>);
+                    let connection = match opts.clone() {
+                        RemoteConnectionOptions::Ssh(opts) => {
+                            SshRemoteConnection::new(opts, delegate, cx)
+                                .await
+                                .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>)
+                        }
+                        RemoteConnectionOptions::Wsl(opts) => {
+                            WslRemoteConnection::new(opts, delegate, cx)
+                                .await
+                                .map(|connection| Arc::new(connection) as Arc<dyn RemoteConnection>)
+                        }
+                    };
 
                     cx.update_global(|pool: &mut Self, _| {
                         debug_assert!(matches!(
@@ -980,6 +1005,33 @@ impl ConnectionPool {
     }
 }
 
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum RemoteConnectionOptions {
+    Ssh(SshConnectionOptions),
+    Wsl(WslConnectionOptions),
+}
+
+impl RemoteConnectionOptions {
+    pub fn display_name(&self) -> String {
+        match self {
+            RemoteConnectionOptions::Ssh(opts) => opts.host.clone(),
+            RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(),
+        }
+    }
+}
+
+impl From<SshConnectionOptions> for RemoteConnectionOptions {
+    fn from(opts: SshConnectionOptions) -> Self {
+        RemoteConnectionOptions::Ssh(opts)
+    }
+}
+
+impl From<WslConnectionOptions> for RemoteConnectionOptions {
+    fn from(opts: WslConnectionOptions) -> Self {
+        RemoteConnectionOptions::Wsl(opts)
+    }
+}
+
 #[async_trait(?Send)]
 pub(crate) trait RemoteConnection: Send + Sync {
     fn start_proxy(
@@ -1000,16 +1052,18 @@ pub(crate) trait RemoteConnection: Send + Sync {
     ) -> Task<Result<()>>;
     async fn kill(&self) -> Result<()>;
     fn has_been_killed(&self) -> bool;
+    fn shares_network_interface(&self) -> bool {
+        false
+    }
     fn build_command(
         &self,
         program: Option<String>,
         args: &[String],
         env: &HashMap<String, String>,
         working_dir: Option<String>,
-        activation_script: Option<String>,
         port_forward: Option<(u16, String, u16)>,
     ) -> Result<CommandTemplate>;
-    fn connection_options(&self) -> SshConnectionOptions;
+    fn connection_options(&self) -> RemoteConnectionOptions;
     fn path_style(&self) -> PathStyle;
     fn shell(&self) -> String;
 
@@ -1126,7 +1180,7 @@ impl ChannelClient {
                                     }
                                     Err(error) => {
                                         log::error!(
-                                            "{}:error handling message. type:{}, error:{}",
+                                            "{}:error handling message. type:{}, error:{:#}",
                                             this.name,
                                             type_name,
                                             format!("{error:#}").lines().fold(
@@ -1316,7 +1370,7 @@ impl ProtoClient for ChannelClient {
 #[cfg(any(test, feature = "test-support"))]
 mod fake {
     use super::{ChannelClient, RemoteClientDelegate, RemoteConnection, RemotePlatform};
-    use crate::{SshConnectionOptions, remote_client::CommandTemplate};
+    use crate::remote_client::{CommandTemplate, RemoteConnectionOptions};
     use anyhow::Result;
     use async_trait::async_trait;
     use collections::HashMap;
@@ -1335,7 +1389,7 @@ mod fake {
     use util::paths::{PathStyle, RemotePathBuf};
 
     pub(super) struct FakeRemoteConnection {
-        pub(super) connection_options: SshConnectionOptions,
+        pub(super) connection_options: RemoteConnectionOptions,
         pub(super) server_channel: Arc<ChannelClient>,
         pub(super) server_cx: SendableCx,
     }
@@ -1373,7 +1427,6 @@ mod fake {
             args: &[String],
             env: &HashMap<String, String>,
             _: Option<String>,
-            _: Option<String>,
             _: Option<(u16, String, u16)>,
         ) -> Result<CommandTemplate> {
             let ssh_program = program.unwrap_or_else(|| "sh".to_string());
@@ -1396,7 +1449,7 @@ mod fake {
             unreachable!()
         }
 
-        fn connection_options(&self) -> SshConnectionOptions {
+        fn connection_options(&self) -> RemoteConnectionOptions {
             self.connection_options.clone()
         }
 

crates/remote/src/transport.rs 🔗

@@ -1 +1,336 @@
+use crate::{
+    json_log::LogRecord,
+    protocol::{MESSAGE_LEN_SIZE, message_len_from_buffer, read_message_with_len, write_message},
+};
+use anyhow::{Context as _, Result};
+use futures::{
+    AsyncReadExt as _, FutureExt as _, StreamExt as _,
+    channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender},
+};
+use gpui::{AppContext as _, AsyncApp, Task};
+use rpc::proto::Envelope;
+use smol::process::Child;
+
 pub mod ssh;
+pub mod wsl;
+
+fn handle_rpc_messages_over_child_process_stdio(
+    mut ssh_proxy_process: Child,
+    incoming_tx: UnboundedSender<Envelope>,
+    mut outgoing_rx: UnboundedReceiver<Envelope>,
+    mut connection_activity_tx: Sender<()>,
+    cx: &AsyncApp,
+) -> Task<Result<i32>> {
+    let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
+    let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
+    let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
+
+    let mut stdin_buffer = Vec::new();
+    let mut stdout_buffer = Vec::new();
+    let mut stderr_buffer = Vec::new();
+    let mut stderr_offset = 0;
+
+    let stdin_task = cx.background_spawn(async move {
+        while let Some(outgoing) = outgoing_rx.next().await {
+            write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
+        }
+        anyhow::Ok(())
+    });
+
+    let stdout_task = cx.background_spawn({
+        let mut connection_activity_tx = connection_activity_tx.clone();
+        async move {
+            loop {
+                stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
+                let len = child_stdout.read(&mut stdout_buffer).await?;
+
+                if len == 0 {
+                    return anyhow::Ok(());
+                }
+
+                if len < MESSAGE_LEN_SIZE {
+                    child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
+                }
+
+                let message_len = message_len_from_buffer(&stdout_buffer);
+                let envelope =
+                    read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
+                        .await?;
+                connection_activity_tx.try_send(()).ok();
+                incoming_tx.unbounded_send(envelope).ok();
+            }
+        }
+    });
+
+    let stderr_task: Task<anyhow::Result<()>> = cx.background_spawn(async move {
+        loop {
+            stderr_buffer.resize(stderr_offset + 1024, 0);
+
+            let len = child_stderr
+                .read(&mut stderr_buffer[stderr_offset..])
+                .await?;
+            if len == 0 {
+                return anyhow::Ok(());
+            }
+
+            stderr_offset += len;
+            let mut start_ix = 0;
+            while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
+                .iter()
+                .position(|b| b == &b'\n')
+            {
+                let line_ix = start_ix + ix;
+                let content = &stderr_buffer[start_ix..line_ix];
+                start_ix = line_ix + 1;
+                if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
+                    record.log(log::logger())
+                } else {
+                    eprintln!("(remote) {}", String::from_utf8_lossy(content));
+                }
+            }
+            stderr_buffer.drain(0..start_ix);
+            stderr_offset -= start_ix;
+
+            connection_activity_tx.try_send(()).ok();
+        }
+    });
+
+    cx.background_spawn(async move {
+        let result = futures::select! {
+            result = stdin_task.fuse() => {
+                result.context("stdin")
+            }
+            result = stdout_task.fuse() => {
+                result.context("stdout")
+            }
+            result = stderr_task.fuse() => {
+                result.context("stderr")
+            }
+        };
+
+        let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
+        match result {
+            Ok(_) => Ok(status),
+            Err(error) => Err(error),
+        }
+    })
+}
+
+#[cfg(debug_assertions)]
+async fn build_remote_server_from_source(
+    platform: &crate::RemotePlatform,
+    delegate: &dyn crate::RemoteClientDelegate,
+    cx: &mut AsyncApp,
+) -> Result<Option<std::path::PathBuf>> {
+    use std::path::Path;
+
+    let Some(build_remote_server) = std::env::var("ZED_BUILD_REMOTE_SERVER").ok() else {
+        return Ok(None);
+    };
+
+    use smol::process::{Command, Stdio};
+    use std::env::VarError;
+
+    async fn run_cmd(command: &mut Command) -> Result<()> {
+        let output = command
+            .kill_on_drop(true)
+            .stderr(Stdio::inherit())
+            .output()
+            .await?;
+        anyhow::ensure!(
+            output.status.success(),
+            "Failed to run command: {command:?}"
+        );
+        Ok(())
+    }
+
+    let use_musl = !build_remote_server.contains("nomusl");
+    let triple = format!(
+        "{}-{}",
+        platform.arch,
+        match platform.os {
+            "linux" =>
+                if use_musl {
+                    "unknown-linux-musl"
+                } else {
+                    "unknown-linux-gnu"
+                },
+            "macos" => "apple-darwin",
+            _ => anyhow::bail!("can't cross compile for: {:?}", platform),
+        }
+    );
+    let mut rust_flags = match std::env::var("RUSTFLAGS") {
+        Ok(val) => val,
+        Err(VarError::NotPresent) => String::new(),
+        Err(e) => {
+            log::error!("Failed to get env var `RUSTFLAGS` value: {e}");
+            String::new()
+        }
+    };
+    if platform.os == "linux" && use_musl {
+        rust_flags.push_str(" -C target-feature=+crt-static");
+    }
+    if build_remote_server.contains("mold") {
+        rust_flags.push_str(" -C link-arg=-fuse-ld=mold");
+    }
+
+    if platform.arch == std::env::consts::ARCH && platform.os == std::env::consts::OS {
+        delegate.set_status(Some("Building remote server binary from source"), cx);
+        log::info!("building remote server binary from source");
+        run_cmd(
+            Command::new("cargo")
+                .args([
+                    "build",
+                    "--package",
+                    "remote_server",
+                    "--features",
+                    "debug-embed",
+                    "--target-dir",
+                    "target/remote_server",
+                    "--target",
+                    &triple,
+                ])
+                .env("RUSTFLAGS", &rust_flags),
+        )
+        .await?;
+    } else if build_remote_server.contains("cross") {
+        #[cfg(target_os = "windows")]
+        use util::paths::SanitizedPath;
+
+        delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx);
+        log::info!("installing cross");
+        run_cmd(Command::new("cargo").args([
+            "install",
+            "cross",
+            "--git",
+            "https://github.com/cross-rs/cross",
+        ]))
+        .await?;
+
+        delegate.set_status(
+            Some(&format!(
+                "Building remote server binary from source for {} with Docker",
+                &triple
+            )),
+            cx,
+        );
+        log::info!("building remote server binary from source for {}", &triple);
+
+        // On Windows, the binding needs to be set to the canonical path
+        #[cfg(target_os = "windows")]
+        let src = SanitizedPath::new(&smol::fs::canonicalize("./target").await?).to_glob_string();
+        #[cfg(not(target_os = "windows"))]
+        let src = "./target";
+
+        run_cmd(
+            Command::new("cross")
+                .args([
+                    "build",
+                    "--package",
+                    "remote_server",
+                    "--features",
+                    "debug-embed",
+                    "--target-dir",
+                    "target/remote_server",
+                    "--target",
+                    &triple,
+                ])
+                .env(
+                    "CROSS_CONTAINER_OPTS",
+                    format!("--mount type=bind,src={src},dst=/app/target"),
+                )
+                .env("RUSTFLAGS", &rust_flags),
+        )
+        .await?;
+    } else {
+        let which = cx
+            .background_spawn(async move { which::which("zig") })
+            .await;
+
+        if which.is_err() {
+            #[cfg(not(target_os = "windows"))]
+            {
+                anyhow::bail!(
+                    "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
+                )
+            }
+            #[cfg(target_os = "windows")]
+            {
+                anyhow::bail!(
+                    "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
+                )
+            }
+        }
+
+        delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
+        log::info!("adding rustup target");
+        run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?;
+
+        delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
+        log::info!("installing cargo-zigbuild");
+        run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?;
+
+        delegate.set_status(
+            Some(&format!(
+                "Building remote binary from source for {triple} with Zig"
+            )),
+            cx,
+        );
+        log::info!("building remote binary from source for {triple} with Zig");
+        run_cmd(
+            Command::new("cargo")
+                .args([
+                    "zigbuild",
+                    "--package",
+                    "remote_server",
+                    "--features",
+                    "debug-embed",
+                    "--target-dir",
+                    "target/remote_server",
+                    "--target",
+                    &triple,
+                ])
+                .env("RUSTFLAGS", &rust_flags),
+        )
+        .await?;
+    };
+    let bin_path = Path::new("target")
+        .join("remote_server")
+        .join(&triple)
+        .join("debug")
+        .join("remote_server");
+
+    let path = if !build_remote_server.contains("nocompress") {
+        delegate.set_status(Some("Compressing binary"), cx);
+
+        #[cfg(not(target_os = "windows"))]
+        {
+            run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
+        }
+
+        #[cfg(target_os = "windows")]
+        {
+            // On Windows, we use 7z to compress the binary
+            let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
+            let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple);
+            if smol::fs::metadata(&gz_path).await.is_ok() {
+                smol::fs::remove_file(&gz_path).await?;
+            }
+            run_cmd(Command::new(seven_zip).args([
+                "a",
+                "-tgzip",
+                &gz_path,
+                &bin_path.to_string_lossy(),
+            ]))
+            .await?;
+        }
+
+        let mut archive_path = bin_path;
+        archive_path.set_extension("gz");
+        std::env::current_dir()?.join(archive_path)
+    } else {
+        bin_path
+    };
+
+    Ok(Some(path))
+}

crates/remote/src/transport/ssh.rs 🔗

@@ -1,14 +1,12 @@
 use crate::{
     RemoteClientDelegate, RemotePlatform,
-    json_log::LogRecord,
-    protocol::{MESSAGE_LEN_SIZE, message_len_from_buffer, read_message_with_len, write_message},
-    remote_client::{CommandTemplate, RemoteConnection},
+    remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
 };
 use anyhow::{Context as _, Result, anyhow};
 use async_trait::async_trait;
 use collections::HashMap;
 use futures::{
-    AsyncReadExt as _, FutureExt as _, StreamExt as _,
+    AsyncReadExt as _, FutureExt as _,
     channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender},
     select_biased,
 };
@@ -30,10 +28,7 @@ use std::{
     time::Instant,
 };
 use tempfile::TempDir;
-use util::{
-    get_default_system_shell,
-    paths::{PathStyle, RemotePathBuf},
-};
+use util::paths::{PathStyle, RemotePathBuf};
 
 pub(crate) struct SshRemoteConnection {
     socket: SshSocket,
@@ -102,8 +97,8 @@ impl RemoteConnection for SshRemoteConnection {
         self.master_process.lock().is_none()
     }
 
-    fn connection_options(&self) -> SshConnectionOptions {
-        self.socket.connection_options.clone()
+    fn connection_options(&self) -> RemoteConnectionOptions {
+        RemoteConnectionOptions::Ssh(self.socket.connection_options.clone())
     }
 
     fn shell(&self) -> String {
@@ -116,7 +111,6 @@ impl RemoteConnection for SshRemoteConnection {
         input_args: &[String],
         input_env: &HashMap<String, String>,
         working_dir: Option<String>,
-        activation_script: Option<String>,
         port_forward: Option<(u16, String, u16)>,
     ) -> Result<CommandTemplate> {
         use std::fmt::Write as _;
@@ -129,18 +123,16 @@ impl RemoteConnection for SshRemoteConnection {
             // shlex will wrap the command in single quotes (''), disabling ~ expansion,
             // replace ith with something that works
             const TILDE_PREFIX: &'static str = "~/";
-            if working_dir.starts_with(TILDE_PREFIX) {
+            let working_dir = if working_dir.starts_with(TILDE_PREFIX) {
                 let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
-                write!(&mut script, "cd \"$HOME/{working_dir}\"; ").unwrap();
+                format!("$HOME/{working_dir}")
             } else {
-                write!(&mut script, "cd \"{working_dir}\"; ").unwrap();
-            }
+                working_dir
+            };
+            write!(&mut script, "cd \"{working_dir}\"; ",).unwrap();
         } else {
             write!(&mut script, "cd; ").unwrap();
         };
-        if let Some(activation_script) = activation_script {
-            write!(&mut script, " {activation_script};").unwrap();
-        }
 
         for (k, v) in input_env.iter() {
             if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
@@ -162,8 +154,7 @@ impl RemoteConnection for SshRemoteConnection {
             write!(&mut script, "exec {shell} -l").unwrap();
         };
 
-        let sys_shell = get_default_system_shell();
-        let shell_invocation = format!("{sys_shell} -c {}", shlex::try_quote(&script).unwrap());
+        let shell_invocation = format!("{shell} -c {}", shlex::try_quote(&script).unwrap());
 
         let mut args = Vec::new();
         args.extend(self.socket.ssh_args());
@@ -274,7 +265,7 @@ impl RemoteConnection for SshRemoteConnection {
             }
         };
 
-        Self::multiplex(
+        super::handle_rpc_messages_over_child_process_stdio(
             ssh_proxy_process,
             incoming_tx,
             outgoing_rx,
@@ -422,109 +413,6 @@ impl SshRemoteConnection {
         Ok(this)
     }
 
-    fn multiplex(
-        mut ssh_proxy_process: Child,
-        incoming_tx: UnboundedSender<Envelope>,
-        mut outgoing_rx: UnboundedReceiver<Envelope>,
-        mut connection_activity_tx: Sender<()>,
-        cx: &AsyncApp,
-    ) -> Task<Result<i32>> {
-        let mut child_stderr = ssh_proxy_process.stderr.take().unwrap();
-        let mut child_stdout = ssh_proxy_process.stdout.take().unwrap();
-        let mut child_stdin = ssh_proxy_process.stdin.take().unwrap();
-
-        let mut stdin_buffer = Vec::new();
-        let mut stdout_buffer = Vec::new();
-        let mut stderr_buffer = Vec::new();
-        let mut stderr_offset = 0;
-
-        let stdin_task = cx.background_spawn(async move {
-            while let Some(outgoing) = outgoing_rx.next().await {
-                write_message(&mut child_stdin, &mut stdin_buffer, outgoing).await?;
-            }
-            anyhow::Ok(())
-        });
-
-        let stdout_task = cx.background_spawn({
-            let mut connection_activity_tx = connection_activity_tx.clone();
-            async move {
-                loop {
-                    stdout_buffer.resize(MESSAGE_LEN_SIZE, 0);
-                    let len = child_stdout.read(&mut stdout_buffer).await?;
-
-                    if len == 0 {
-                        return anyhow::Ok(());
-                    }
-
-                    if len < MESSAGE_LEN_SIZE {
-                        child_stdout.read_exact(&mut stdout_buffer[len..]).await?;
-                    }
-
-                    let message_len = message_len_from_buffer(&stdout_buffer);
-                    let envelope =
-                        read_message_with_len(&mut child_stdout, &mut stdout_buffer, message_len)
-                            .await?;
-                    connection_activity_tx.try_send(()).ok();
-                    incoming_tx.unbounded_send(envelope).ok();
-                }
-            }
-        });
-
-        let stderr_task: Task<anyhow::Result<()>> = cx.background_spawn(async move {
-            loop {
-                stderr_buffer.resize(stderr_offset + 1024, 0);
-
-                let len = child_stderr
-                    .read(&mut stderr_buffer[stderr_offset..])
-                    .await?;
-                if len == 0 {
-                    return anyhow::Ok(());
-                }
-
-                stderr_offset += len;
-                let mut start_ix = 0;
-                while let Some(ix) = stderr_buffer[start_ix..stderr_offset]
-                    .iter()
-                    .position(|b| b == &b'\n')
-                {
-                    let line_ix = start_ix + ix;
-                    let content = &stderr_buffer[start_ix..line_ix];
-                    start_ix = line_ix + 1;
-                    if let Ok(record) = serde_json::from_slice::<LogRecord>(content) {
-                        record.log(log::logger())
-                    } else {
-                        eprintln!("(remote) {}", String::from_utf8_lossy(content));
-                    }
-                }
-                stderr_buffer.drain(0..start_ix);
-                stderr_offset -= start_ix;
-
-                connection_activity_tx.try_send(()).ok();
-            }
-        });
-
-        cx.background_spawn(async move {
-            let result = futures::select! {
-                result = stdin_task.fuse() => {
-                    result.context("stdin")
-                }
-                result = stdout_task.fuse() => {
-                    result.context("stdout")
-                }
-                result = stderr_task.fuse() => {
-                    result.context("stderr")
-                }
-            };
-
-            let status = ssh_proxy_process.status().await?.code().unwrap_or(1);
-            match result {
-                Ok(_) => Ok(status),
-                Err(error) => Err(error),
-            }
-        })
-    }
-
-    #[allow(unused)]
     async fn ensure_server_binary(
         &self,
         delegate: &Arc<dyn RemoteClientDelegate>,
@@ -551,19 +439,20 @@ impl SshRemoteConnection {
             self.ssh_path_style,
         );
 
-        let build_remote_server = std::env::var("ZED_BUILD_REMOTE_SERVER").ok();
         #[cfg(debug_assertions)]
-        if let Some(build_remote_server) = build_remote_server {
-            let src_path = self.build_local(build_remote_server, delegate, cx).await?;
+        if let Some(remote_server_path) =
+            super::build_remote_server_from_source(&self.ssh_platform, delegate.as_ref(), cx)
+                .await?
+        {
             let tmp_path = RemotePathBuf::new(
                 paths::remote_server_dir_relative().join(format!(
                     "download-{}-{}",
                     std::process::id(),
-                    src_path.file_name().unwrap().to_string_lossy()
+                    remote_server_path.file_name().unwrap().to_string_lossy()
                 )),
                 self.ssh_path_style,
             );
-            self.upload_local_server_binary(&src_path, &tmp_path, delegate, cx)
+            self.upload_local_server_binary(&remote_server_path, &tmp_path, delegate, cx)
                 .await?;
             self.extract_server_binary(&dst_path, &tmp_path, delegate, cx)
                 .await?;
@@ -801,221 +690,6 @@ impl SshRemoteConnection {
         );
         Ok(())
     }
-
-    #[cfg(debug_assertions)]
-    async fn build_local(
-        &self,
-        build_remote_server: String,
-        delegate: &Arc<dyn RemoteClientDelegate>,
-        cx: &mut AsyncApp,
-    ) -> Result<PathBuf> {
-        use smol::process::{Command, Stdio};
-        use std::env::VarError;
-
-        async fn run_cmd(command: &mut Command) -> Result<()> {
-            let output = command
-                .kill_on_drop(true)
-                .stderr(Stdio::inherit())
-                .output()
-                .await?;
-            anyhow::ensure!(
-                output.status.success(),
-                "Failed to run command: {command:?}"
-            );
-            Ok(())
-        }
-
-        let use_musl = !build_remote_server.contains("nomusl");
-        let triple = format!(
-            "{}-{}",
-            self.ssh_platform.arch,
-            match self.ssh_platform.os {
-                "linux" =>
-                    if use_musl {
-                        "unknown-linux-musl"
-                    } else {
-                        "unknown-linux-gnu"
-                    },
-                "macos" => "apple-darwin",
-                _ => anyhow::bail!("can't cross compile for: {:?}", self.ssh_platform),
-            }
-        );
-        let mut rust_flags = match std::env::var("RUSTFLAGS") {
-            Ok(val) => val,
-            Err(VarError::NotPresent) => String::new(),
-            Err(e) => {
-                log::error!("Failed to get env var `RUSTFLAGS` value: {e}");
-                String::new()
-            }
-        };
-        if self.ssh_platform.os == "linux" && use_musl {
-            rust_flags.push_str(" -C target-feature=+crt-static");
-        }
-        if build_remote_server.contains("mold") {
-            rust_flags.push_str(" -C link-arg=-fuse-ld=mold");
-        }
-
-        if self.ssh_platform.arch == std::env::consts::ARCH
-            && self.ssh_platform.os == std::env::consts::OS
-        {
-            delegate.set_status(Some("Building remote server binary from source"), cx);
-            log::info!("building remote server binary from source");
-            run_cmd(
-                Command::new("cargo")
-                    .args([
-                        "build",
-                        "--package",
-                        "remote_server",
-                        "--features",
-                        "debug-embed",
-                        "--target-dir",
-                        "target/remote_server",
-                        "--target",
-                        &triple,
-                    ])
-                    .env("RUSTFLAGS", &rust_flags),
-            )
-            .await?;
-        } else if build_remote_server.contains("cross") {
-            #[cfg(target_os = "windows")]
-            use util::paths::SanitizedPath;
-
-            delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx);
-            log::info!("installing cross");
-            run_cmd(Command::new("cargo").args([
-                "install",
-                "cross",
-                "--git",
-                "https://github.com/cross-rs/cross",
-            ]))
-            .await?;
-
-            delegate.set_status(
-                Some(&format!(
-                    "Building remote server binary from source for {} with Docker",
-                    &triple
-                )),
-                cx,
-            );
-            log::info!("building remote server binary from source for {}", &triple);
-
-            // On Windows, the binding needs to be set to the canonical path
-            #[cfg(target_os = "windows")]
-            let src =
-                SanitizedPath::new(&smol::fs::canonicalize("./target").await?).to_glob_string();
-            #[cfg(not(target_os = "windows"))]
-            let src = "./target";
-            run_cmd(
-                Command::new("cross")
-                    .args([
-                        "build",
-                        "--package",
-                        "remote_server",
-                        "--features",
-                        "debug-embed",
-                        "--target-dir",
-                        "target/remote_server",
-                        "--target",
-                        &triple,
-                    ])
-                    .env(
-                        "CROSS_CONTAINER_OPTS",
-                        format!("--mount type=bind,src={src},dst=/app/target"),
-                    )
-                    .env("RUSTFLAGS", &rust_flags),
-            )
-            .await?;
-        } else {
-            let which = cx
-                .background_spawn(async move { which::which("zig") })
-                .await;
-
-            if which.is_err() {
-                #[cfg(not(target_os = "windows"))]
-                {
-                    anyhow::bail!(
-                        "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
-                    )
-                }
-                #[cfg(target_os = "windows")]
-                {
-                    anyhow::bail!(
-                        "zig not found on $PATH, install zig (use `winget install -e --id zig.zig` or see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross"
-                    )
-                }
-            }
-
-            delegate.set_status(Some("Adding rustup target for cross-compilation"), cx);
-            log::info!("adding rustup target");
-            run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?;
-
-            delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx);
-            log::info!("installing cargo-zigbuild");
-            run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?;
-
-            delegate.set_status(
-                Some(&format!(
-                    "Building remote binary from source for {triple} with Zig"
-                )),
-                cx,
-            );
-            log::info!("building remote binary from source for {triple} with Zig");
-            run_cmd(
-                Command::new("cargo")
-                    .args([
-                        "zigbuild",
-                        "--package",
-                        "remote_server",
-                        "--features",
-                        "debug-embed",
-                        "--target-dir",
-                        "target/remote_server",
-                        "--target",
-                        &triple,
-                    ])
-                    .env("RUSTFLAGS", &rust_flags),
-            )
-            .await?;
-        };
-        let bin_path = Path::new("target")
-            .join("remote_server")
-            .join(&triple)
-            .join("debug")
-            .join("remote_server");
-
-        let path = if !build_remote_server.contains("nocompress") {
-            delegate.set_status(Some("Compressing binary"), cx);
-
-            #[cfg(not(target_os = "windows"))]
-            {
-                run_cmd(Command::new("gzip").args(["-f", &bin_path.to_string_lossy()])).await?;
-            }
-            #[cfg(target_os = "windows")]
-            {
-                // On Windows, we use 7z to compress the binary
-                let seven_zip = which::which("7z.exe").context("7z.exe not found on $PATH, install it (e.g. with `winget install -e --id 7zip.7zip`) or, if you don't want this behaviour, set $env:ZED_BUILD_REMOTE_SERVER=\"nocompress\"")?;
-                let gz_path = format!("target/remote_server/{}/debug/remote_server.gz", triple);
-                if smol::fs::metadata(&gz_path).await.is_ok() {
-                    smol::fs::remove_file(&gz_path).await?;
-                }
-                run_cmd(Command::new(seven_zip).args([
-                    "a",
-                    "-tgzip",
-                    &gz_path,
-                    &bin_path.to_string_lossy(),
-                ]))
-                .await?;
-            }
-
-            let mut archive_path = bin_path;
-            archive_path.set_extension("gz");
-            std::env::current_dir()?.join(archive_path)
-        } else {
-            bin_path
-        };
-
-        Ok(path)
-    }
 }
 
 impl SshSocket {

crates/remote/src/transport/wsl.rs 🔗

@@ -0,0 +1,494 @@
+use crate::{
+    RemoteClientDelegate, RemotePlatform,
+    remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
+};
+use anyhow::{Result, anyhow, bail};
+use async_trait::async_trait;
+use collections::HashMap;
+use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender};
+use gpui::{App, AppContext as _, AsyncApp, SemanticVersion, Task};
+use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
+use rpc::proto::Envelope;
+use smol::{fs, process};
+use std::{
+    fmt::Write as _,
+    path::{Path, PathBuf},
+    process::Stdio,
+    sync::Arc,
+    time::Instant,
+};
+use util::paths::{PathStyle, RemotePathBuf};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WslConnectionOptions {
+    pub distro_name: String,
+    pub user: Option<String>,
+}
+
+pub(crate) struct WslRemoteConnection {
+    remote_binary_path: Option<RemotePathBuf>,
+    platform: RemotePlatform,
+    shell: String,
+    connection_options: WslConnectionOptions,
+}
+
+impl WslRemoteConnection {
+    pub(crate) async fn new(
+        connection_options: WslConnectionOptions,
+        delegate: Arc<dyn RemoteClientDelegate>,
+        cx: &mut AsyncApp,
+    ) -> Result<Self> {
+        log::info!(
+            "Connecting to WSL distro {} with user {:?}",
+            connection_options.distro_name,
+            connection_options.user
+        );
+        let (release_channel, version, commit) = cx.update(|cx| {
+            (
+                ReleaseChannel::global(cx),
+                AppVersion::global(cx),
+                AppCommitSha::try_global(cx),
+            )
+        })?;
+
+        let mut this = Self {
+            connection_options,
+            remote_binary_path: None,
+            platform: RemotePlatform { os: "", arch: "" },
+            shell: String::new(),
+        };
+        delegate.set_status(Some("Detecting WSL environment"), cx);
+        this.platform = this.detect_platform().await?;
+        this.shell = this.detect_shell().await?;
+        this.remote_binary_path = Some(
+            this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
+                .await?,
+        );
+
+        Ok(this)
+    }
+
+    async fn detect_platform(&self) -> Result<RemotePlatform> {
+        let arch_str = self.run_wsl_command("uname", &["-m"]).await?;
+        let arch_str = arch_str.trim().to_string();
+        let arch = match arch_str.as_str() {
+            "x86_64" => "x86_64",
+            "aarch64" | "arm64" => "aarch64",
+            _ => "x86_64",
+        };
+        Ok(RemotePlatform { os: "linux", arch })
+    }
+
+    async fn detect_shell(&self) -> Result<String> {
+        Ok(self
+            .run_wsl_command("sh", &["-c", "echo $SHELL"])
+            .await
+            .ok()
+            .and_then(|shell_path| shell_path.trim().split('/').next_back().map(str::to_string))
+            .unwrap_or_else(|| "bash".to_string()))
+    }
+
+    async fn windows_path_to_wsl_path(&self, source: &Path) -> Result<String> {
+        windows_path_to_wsl_path_impl(&self.connection_options, source).await
+    }
+
+    fn wsl_command(&self, program: &str, args: &[&str]) -> process::Command {
+        wsl_command_impl(&self.connection_options, program, args)
+    }
+
+    async fn run_wsl_command(&self, program: &str, args: &[&str]) -> Result<String> {
+        run_wsl_command_impl(&self.connection_options, program, args).await
+    }
+
+    async fn ensure_server_binary(
+        &self,
+        delegate: &Arc<dyn RemoteClientDelegate>,
+        release_channel: ReleaseChannel,
+        version: SemanticVersion,
+        commit: Option<AppCommitSha>,
+        cx: &mut AsyncApp,
+    ) -> Result<RemotePathBuf> {
+        let version_str = match release_channel {
+            ReleaseChannel::Nightly => {
+                let commit = commit.map(|s| s.full()).unwrap_or_default();
+                format!("{}-{}", version, commit)
+            }
+            ReleaseChannel::Dev => "build".to_string(),
+            _ => version.to_string(),
+        };
+
+        let binary_name = format!(
+            "zed-remote-server-{}-{}",
+            release_channel.dev_name(),
+            version_str
+        );
+
+        let dst_path = RemotePathBuf::new(
+            paths::remote_wsl_server_dir_relative().join(binary_name),
+            PathStyle::Posix,
+        );
+
+        if let Some(parent) = dst_path.parent() {
+            self.run_wsl_command("mkdir", &["-p", &parent.to_string()])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory: {}", e))?;
+        }
+
+        #[cfg(debug_assertions)]
+        if let Some(remote_server_path) =
+            super::build_remote_server_from_source(&self.platform, delegate.as_ref(), cx).await?
+        {
+            let tmp_path = RemotePathBuf::new(
+                paths::remote_wsl_server_dir_relative().join(format!(
+                    "download-{}-{}",
+                    std::process::id(),
+                    remote_server_path.file_name().unwrap().to_string_lossy()
+                )),
+                PathStyle::Posix,
+            );
+            self.upload_file(&remote_server_path, &tmp_path, delegate, cx)
+                .await?;
+            self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
+                .await?;
+            return Ok(dst_path);
+        }
+
+        if self
+            .run_wsl_command(&dst_path.to_string(), &["version"])
+            .await
+            .is_ok()
+        {
+            return Ok(dst_path);
+        }
+
+        delegate.set_status(Some("Installing remote server"), cx);
+
+        let wanted_version = match release_channel {
+            ReleaseChannel::Nightly => None,
+            ReleaseChannel::Dev => {
+                return Err(anyhow!("Dev builds require manual installation"));
+            }
+            _ => Some(cx.update(|cx| AppVersion::global(cx))?),
+        };
+
+        let src_path = delegate
+            .download_server_binary_locally(self.platform, release_channel, wanted_version, cx)
+            .await?;
+
+        let tmp_path = RemotePathBuf::new(
+            PathBuf::from(format!("{}.{}.tmp", dst_path, std::process::id())),
+            PathStyle::Posix,
+        );
+
+        self.upload_file(&src_path, &tmp_path, delegate, cx).await?;
+        self.extract_and_install(&tmp_path, &dst_path, delegate, cx)
+            .await?;
+
+        Ok(dst_path)
+    }
+
+    async fn upload_file(
+        &self,
+        src_path: &Path,
+        dst_path: &RemotePathBuf,
+        delegate: &Arc<dyn RemoteClientDelegate>,
+        cx: &mut AsyncApp,
+    ) -> Result<()> {
+        delegate.set_status(Some("Uploading remote server to WSL"), cx);
+
+        if let Some(parent) = dst_path.parent() {
+            self.run_wsl_command("mkdir", &["-p", &parent.to_string()])
+                .await
+                .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?;
+        }
+
+        let t0 = Instant::now();
+        let src_stat = fs::metadata(&src_path).await?;
+        let size = src_stat.len();
+        log::info!(
+            "uploading remote server to WSL {:?} ({}kb)",
+            dst_path,
+            size / 1024
+        );
+
+        let src_path_in_wsl = self.windows_path_to_wsl_path(src_path).await?;
+        self.run_wsl_command("cp", &["-f", &src_path_in_wsl, &dst_path.to_string()])
+            .await
+            .map_err(|e| {
+                anyhow!(
+                    "Failed to copy file {}({}) to WSL {:?}: {}",
+                    src_path.display(),
+                    src_path_in_wsl,
+                    dst_path,
+                    e
+                )
+            })?;
+
+        log::info!("uploaded remote server in {:?}", t0.elapsed());
+        Ok(())
+    }
+
+    async fn extract_and_install(
+        &self,
+        tmp_path: &RemotePathBuf,
+        dst_path: &RemotePathBuf,
+        delegate: &Arc<dyn RemoteClientDelegate>,
+        cx: &mut AsyncApp,
+    ) -> Result<()> {
+        delegate.set_status(Some("Extracting remote server"), cx);
+
+        let tmp_path_str = tmp_path.to_string();
+        let dst_path_str = dst_path.to_string();
+
+        // Build extraction script with proper error handling
+        let script = if tmp_path_str.ends_with(".gz") {
+            let uncompressed = tmp_path_str.trim_end_matches(".gz");
+            format!(
+                "set -e; gunzip -f '{}' && chmod 755 '{}' && mv -f '{}' '{}'",
+                tmp_path_str, uncompressed, uncompressed, dst_path_str
+            )
+        } else {
+            format!(
+                "set -e; chmod 755 '{}' && mv -f '{}' '{}'",
+                tmp_path_str, tmp_path_str, dst_path_str
+            )
+        };
+
+        self.run_wsl_command("sh", &["-c", &script])
+            .await
+            .map_err(|e| anyhow!("Failed to extract server binary: {}", e))?;
+        Ok(())
+    }
+}
+
+#[async_trait(?Send)]
+impl RemoteConnection for WslRemoteConnection {
+    fn start_proxy(
+        &self,
+        unique_identifier: String,
+        reconnect: bool,
+        incoming_tx: UnboundedSender<Envelope>,
+        outgoing_rx: UnboundedReceiver<Envelope>,
+        connection_activity_tx: Sender<()>,
+        delegate: Arc<dyn RemoteClientDelegate>,
+        cx: &mut AsyncApp,
+    ) -> Task<Result<i32>> {
+        delegate.set_status(Some("Starting proxy"), cx);
+
+        let Some(remote_binary_path) = &self.remote_binary_path else {
+            return Task::ready(Err(anyhow!("Remote binary path not set")));
+        };
+
+        let mut proxy_command = format!(
+            "exec {} proxy --identifier {}",
+            remote_binary_path, unique_identifier
+        );
+
+        if reconnect {
+            proxy_command.push_str(" --reconnect");
+        }
+
+        for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
+            if let Some(value) = std::env::var(env_var).ok() {
+                proxy_command = format!("{}='{}' {}", env_var, value, proxy_command);
+            }
+        }
+        let proxy_process = match self
+            .wsl_command("sh", &["-lc", &proxy_command])
+            .kill_on_drop(true)
+            .spawn()
+        {
+            Ok(process) => process,
+            Err(error) => {
+                return Task::ready(Err(anyhow!("failed to spawn remote server: {}", error)));
+            }
+        };
+
+        super::handle_rpc_messages_over_child_process_stdio(
+            proxy_process,
+            incoming_tx,
+            outgoing_rx,
+            connection_activity_tx,
+            cx,
+        )
+    }
+
+    fn upload_directory(
+        &self,
+        src_path: PathBuf,
+        dest_path: RemotePathBuf,
+        cx: &App,
+    ) -> Task<Result<()>> {
+        cx.background_spawn({
+            let options = self.connection_options.clone();
+            async move {
+                let wsl_src = windows_path_to_wsl_path_impl(&options, &src_path).await?;
+
+                run_wsl_command_impl(&options, "cp", &["-r", &wsl_src, &dest_path.to_string()])
+                    .await
+                    .map_err(|e| {
+                        anyhow!(
+                            "failed to upload directory {} -> {}: {}",
+                            src_path.display(),
+                            dest_path.to_string(),
+                            e
+                        )
+                    })?;
+
+                Ok(())
+            }
+        })
+    }
+
+    async fn kill(&self) -> Result<()> {
+        Ok(())
+    }
+
+    fn has_been_killed(&self) -> bool {
+        false
+    }
+
+    fn shares_network_interface(&self) -> bool {
+        true
+    }
+
+    fn build_command(
+        &self,
+        program: Option<String>,
+        args: &[String],
+        env: &HashMap<String, String>,
+        working_dir: Option<String>,
+        port_forward: Option<(u16, String, u16)>,
+    ) -> Result<CommandTemplate> {
+        if port_forward.is_some() {
+            bail!("WSL shares the network interface with the host system");
+        }
+
+        let working_dir = working_dir
+            .map(|working_dir| RemotePathBuf::new(working_dir.into(), PathStyle::Posix).to_string())
+            .unwrap_or("~".to_string());
+
+        let mut script = String::new();
+
+        for (k, v) in env.iter() {
+            write!(&mut script, "{}='{}' ", k, v).unwrap();
+        }
+
+        if let Some(program) = program {
+            let command = shlex::try_quote(&program)?;
+            script.push_str(&command);
+            for arg in args {
+                let arg = shlex::try_quote(&arg)?;
+                script.push_str(" ");
+                script.push_str(&arg);
+            }
+        } else {
+            write!(&mut script, "exec {} -l", self.shell).unwrap();
+        }
+
+        let wsl_args = if let Some(user) = &self.connection_options.user {
+            vec![
+                "--distribution".to_string(),
+                self.connection_options.distro_name.clone(),
+                "--user".to_string(),
+                user.clone(),
+                "--cd".to_string(),
+                working_dir,
+                "--".to_string(),
+                self.shell.clone(),
+                "-c".to_string(),
+                shlex::try_quote(&script)?.to_string(),
+            ]
+        } else {
+            vec![
+                "--distribution".to_string(),
+                self.connection_options.distro_name.clone(),
+                "--cd".to_string(),
+                working_dir,
+                "--".to_string(),
+                self.shell.clone(),
+                "-c".to_string(),
+                shlex::try_quote(&script)?.to_string(),
+            ]
+        };
+
+        Ok(CommandTemplate {
+            program: "wsl.exe".to_string(),
+            args: wsl_args,
+            env: HashMap::default(),
+        })
+    }
+
+    fn connection_options(&self) -> RemoteConnectionOptions {
+        RemoteConnectionOptions::Wsl(self.connection_options.clone())
+    }
+
+    fn path_style(&self) -> PathStyle {
+        PathStyle::Posix
+    }
+
+    fn shell(&self) -> String {
+        self.shell.clone()
+    }
+}
+
+/// `wslpath` is a executable available in WSL, it's a linux binary.
+/// So it doesn't support Windows style paths.
+async fn sanitize_path(path: &Path) -> Result<String> {
+    let path = smol::fs::canonicalize(path).await?;
+    let path_str = path.to_string_lossy();
+
+    let sanitized = path_str.strip_prefix(r"\\?\").unwrap_or(&path_str);
+    Ok(sanitized.replace('\\', "/"))
+}
+
+async fn windows_path_to_wsl_path_impl(
+    options: &WslConnectionOptions,
+    source: &Path,
+) -> Result<String> {
+    let source = sanitize_path(source).await?;
+    run_wsl_command_impl(options, "wslpath", &["-u", &source]).await
+}
+
+fn wsl_command_impl(
+    options: &WslConnectionOptions,
+    program: &str,
+    args: &[&str],
+) -> process::Command {
+    let mut command = util::command::new_smol_command("wsl.exe");
+
+    if let Some(user) = &options.user {
+        command.arg("--user").arg(user);
+    }
+
+    command
+        .stdin(Stdio::piped())
+        .stdout(Stdio::piped())
+        .stderr(Stdio::piped())
+        .arg("--distribution")
+        .arg(&options.distro_name)
+        .arg("--cd")
+        .arg("~")
+        .arg(program)
+        .args(args);
+
+    command
+}
+
+async fn run_wsl_command_impl(
+    options: &WslConnectionOptions,
+    program: &str,
+    args: &[&str],
+) -> Result<String> {
+    let output = wsl_command_impl(options, program, args).output().await?;
+
+    if !output.status.success() {
+        return Err(anyhow!(
+            "Command '{}' failed: {}",
+            program,
+            String::from_utf8_lossy(&output.stderr).trim()
+        ));
+    }
+
+    Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
+}

crates/remote_server/src/headless_project.rs 🔗

@@ -67,7 +67,7 @@ impl HeadlessProject {
         settings::init(cx);
         language::init(cx);
         project::Project::init_settings(cx);
-        log_store::init(false, cx);
+        log_store::init(true, cx);
     }
 
     pub fn new(
@@ -546,7 +546,9 @@ impl HeadlessProject {
             .context("lsp logs store is missing")?;
 
         lsp_logs.update(&mut cx, |lsp_logs, _| {
-            // we do not support any other log toggling yet
+            // RPC logs are very noisy and we need to toggle it on the headless server too.
+            // The rest of the logs for the ssh project are very important to have toggled always,
+            // to e.g. send language server error logs to the client before anything is toggled.
             if envelope.payload.enabled {
                 lsp_logs.enable_rpc_trace_for_language_server(server_id);
             } else {

crates/remote_server/src/remote_editing_tests.rs 🔗

@@ -533,7 +533,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext
         Ok(Some(lsp::WorkspaceEdit {
             changes: Some(
                 [(
-                    lsp::Url::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(),
+                    lsp::Uri::from_file_path(path!("/code/project1/src/lib.rs")).unwrap(),
                     vec![lsp::TextEdit::new(
                         lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 6)),
                         "two".to_string(),

crates/repl/src/jupyter_settings.rs 🔗

@@ -4,9 +4,9 @@ use editor::EditorSettings;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Debug, Default)]
+#[derive(Debug, Default, SettingsUi)]
 pub struct JupyterSettings {
     pub kernel_selections: HashMap<String, String>,
 }

crates/search/src/project_search.rs 🔗

@@ -11,6 +11,7 @@ use editor::{
     Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, SelectionEffects,
     actions::{Backtab, SelectAll, Tab},
     items::active_match_index,
+    multibuffer_context_lines,
 };
 use futures::{StreamExt, stream::FuturesOrdered};
 use gpui::{
@@ -345,7 +346,7 @@ impl ProjectSearch {
                                     excerpts.set_anchored_excerpts_for_path(
                                         buffer,
                                         ranges,
-                                        editor::DEFAULT_MULTIBUFFER_CONTEXT,
+                                        multibuffer_context_lines(cx),
                                         cx,
                                     )
                                 })

crates/settings/Cargo.toml 🔗

@@ -31,7 +31,9 @@ schemars.workspace = true
 serde.workspace = true
 serde_derive.workspace = true
 serde_json.workspace = true
+settings_ui_macros.workspace = true
 serde_json_lenient.workspace = true
+serde_path_to_error.workspace = true
 smallvec.workspace = true
 tree-sitter-json.workspace = true
 tree-sitter.workspace = true

crates/settings/src/base_keymap_setting.rs 🔗

@@ -1,13 +1,17 @@
 use std::fmt::{Display, Formatter};
 
-use crate::{Settings, SettingsSources, VsCodeSettings};
+use crate as settings;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
+use settings::{Settings, SettingsSources, VsCodeSettings};
+use settings_ui_macros::SettingsUi;
 
 /// Base key bindings scheme. Base keymaps can be overridden with user keymaps.
 ///
 /// Default: VSCode
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)]
+#[derive(
+    Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default, SettingsUi,
+)]
 pub enum BaseKeymap {
     #[default]
     VSCode,

crates/settings/src/settings.rs 🔗

@@ -4,6 +4,7 @@ mod keymap_file;
 mod settings_file;
 mod settings_json;
 mod settings_store;
+mod settings_ui;
 mod vscode_import;
 
 use gpui::{App, Global};
@@ -23,6 +24,9 @@ pub use settings_store::{
     InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation, SettingsSources,
     SettingsStore,
 };
+pub use settings_ui::*;
+// Re-export the derive macro
+pub use settings_ui_macros::SettingsUi;
 pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource};
 
 #[derive(Clone, Debug, PartialEq)]

crates/settings/src/settings_json.rs 🔗

@@ -87,9 +87,9 @@ pub fn update_value_in_json_text<'a>(
 }
 
 /// * `replace_key` - When an exact key match according to `key_path` is found, replace the key with `replace_key` if `Some`.
-fn replace_value_in_json_text(
+pub fn replace_value_in_json_text<T: AsRef<str>>(
     text: &str,
-    key_path: &[&str],
+    key_path: &[T],
     tab_size: usize,
     new_value: Option<&Value>,
     replace_key: Option<&str>,
@@ -141,7 +141,7 @@ fn replace_value_in_json_text(
         let found_key = text
             .get(key_range.clone())
             .map(|key_text| {
-                depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth])
+                depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth].as_ref())
             })
             .unwrap_or(false);
 
@@ -226,13 +226,13 @@ fn replace_value_in_json_text(
         }
     } else {
         // We have key paths, construct the sub objects
-        let new_key = key_path[depth];
+        let new_key = key_path[depth].as_ref();
 
         // We don't have the key, construct the nested objects
         let mut new_value =
             serde_json::to_value(new_value.unwrap_or(&serde_json::Value::Null)).unwrap();
         for key in key_path[(depth + 1)..].iter().rev() {
-            new_value = serde_json::json!({ key.to_string(): new_value });
+            new_value = serde_json::json!({ key.as_ref().to_string(): new_value });
         }
 
         if let Some(first_key_start) = first_key_start {
@@ -465,7 +465,7 @@ pub fn append_top_level_array_value_in_json_text(
     }
 
     let (mut replace_range, mut replace_value) =
-        replace_value_in_json_text("", &[], tab_size, Some(new_value), None);
+        replace_value_in_json_text::<&str>("", &[], tab_size, Some(new_value), None);
 
     replace_range.start = close_bracket_start;
     replace_range.end = close_bracket_start;
@@ -563,7 +563,8 @@ pub fn to_pretty_json(
 }
 
 pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
-    Ok(serde_json_lenient::from_str(content)?)
+    let mut deserializer = serde_json_lenient::Deserializer::from_str(content);
+    Ok(serde_path_to_error::deserialize(&mut deserializer)?)
 }
 
 #[cfg(test)]

crates/settings/src/settings_store.rs 🔗

@@ -7,11 +7,11 @@ use futures::{
     channel::{mpsc, oneshot},
     future::LocalBoxFuture,
 };
-use gpui::{App, AsyncApp, BorrowAppContext, Global, Task, UpdateGlobal};
+use gpui::{App, AsyncApp, BorrowAppContext, Global, SharedString, Task, UpdateGlobal};
 
 use paths::{EDITORCONFIG_NAME, local_settings_file_relative_path, task_file_name};
 use schemars::JsonSchema;
-use serde::{Deserialize, Serialize, de::DeserializeOwned};
+use serde::{Serialize, de::DeserializeOwned};
 use serde_json::{Value, json};
 use smallvec::SmallVec;
 use std::{
@@ -31,14 +31,15 @@ use util::{
 pub type EditorconfigProperties = ec4rs::Properties;
 
 use crate::{
-    ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, VsCodeSettings,
-    WorktreeId, parse_json_with_comments, update_value_in_json_text,
+    ActiveSettingsProfileName, ParameterizedJsonSchema, SettingsJsonSchemaParams, SettingsUiEntry,
+    VsCodeSettings, WorktreeId, parse_json_with_comments, replace_value_in_json_text,
+    settings_ui::SettingsUi, update_value_in_json_text,
 };
 
 /// A value that can be defined as a user setting.
 ///
 /// Settings can be loaded from a combination of multiple JSON files.
-pub trait Settings: 'static + Send + Sync {
+pub trait Settings: SettingsUi + 'static + Send + Sync {
     /// The name of a key within the JSON file from which this setting should
     /// be deserialized. If this is `None`, then the setting will be deserialized
     /// from the root object.
@@ -103,6 +104,18 @@ pub trait Settings: 'static + Send + Sync {
         cx.global::<SettingsStore>().get(None)
     }
 
+    #[track_caller]
+    fn try_get(cx: &App) -> Option<&Self>
+    where
+        Self: Sized,
+    {
+        if cx.has_global::<SettingsStore>() {
+            cx.global::<SettingsStore>().try_get(None)
+        } else {
+            None
+        }
+    }
+
     #[track_caller]
     fn try_read_global<R>(cx: &AsyncApp, f: impl FnOnce(&Self) -> R) -> Option<R>
     where
@@ -272,6 +285,7 @@ trait AnySettingValue: 'static + Send + Sync {
         text: &mut String,
         edits: &mut Vec<(Range<usize>, String)>,
     );
+    fn settings_ui_item(&self) -> SettingsUiEntry;
 }
 
 struct DeserializedSetting(Box<dyn Any>);
@@ -407,6 +421,16 @@ impl SettingsStore {
             .expect("no default value for setting type")
     }
 
+    /// Get the value of a setting.
+    ///
+    /// Does not panic
+    pub fn try_get<T: Settings>(&self, path: Option<SettingsLocation>) -> Option<&T> {
+        self.setting_values
+            .get(&TypeId::of::<T>())
+            .map(|value| value.value_for_path(path))
+            .and_then(|value| value.downcast_ref::<T>())
+    }
+
     /// Get all values from project specific settings
     pub fn get_all_locals<T: Settings>(&self) -> Vec<(WorktreeId, Arc<Path>, &T)> {
         self.setting_values
@@ -458,6 +482,11 @@ impl SettingsStore {
         self.raw_global_settings.as_ref()
     }
 
+    /// Access the raw JSON value of the default settings.
+    pub fn raw_default_settings(&self) -> &Value {
+        &self.raw_default_settings
+    }
+
     #[cfg(any(test, feature = "test-support"))]
     pub fn test(cx: &mut App) -> Self {
         let mut this = Self::new(cx);
@@ -510,49 +539,10 @@ impl SettingsStore {
         }
     }
 
-    pub fn update_settings_file<T: Settings>(
+    fn update_settings_file_inner(
         &self,
         fs: Arc<dyn Fs>,
-        update: impl 'static + Send + FnOnce(&mut T::FileContent, &App),
-    ) {
-        self.setting_file_updates_tx
-            .unbounded_send(Box::new(move |cx: AsyncApp| {
-                async move {
-                    let old_text = Self::load_settings(&fs).await?;
-                    let new_text = cx.read_global(|store: &SettingsStore, cx| {
-                        store.new_text_for_update::<T>(old_text, |content| update(content, cx))
-                    })?;
-                    let settings_path = paths::settings_file().as_path();
-                    if fs.is_file(settings_path).await {
-                        let resolved_path =
-                            fs.canonicalize(settings_path).await.with_context(|| {
-                                format!("Failed to canonicalize settings path {:?}", settings_path)
-                            })?;
-
-                        fs.atomic_write(resolved_path.clone(), new_text)
-                            .await
-                            .with_context(|| {
-                                format!("Failed to write settings to file {:?}", resolved_path)
-                            })?;
-                    } else {
-                        fs.atomic_write(settings_path.to_path_buf(), new_text)
-                            .await
-                            .with_context(|| {
-                                format!("Failed to write settings to file {:?}", settings_path)
-                            })?;
-                    }
-
-                    anyhow::Ok(())
-                }
-                .boxed_local()
-            }))
-            .ok();
-    }
-
-    pub fn import_vscode_settings(
-        &self,
-        fs: Arc<dyn Fs>,
-        vscode_settings: VsCodeSettings,
+        update: impl 'static + Send + FnOnce(String, AsyncApp) -> Result<String>,
     ) -> oneshot::Receiver<Result<()>> {
         let (tx, rx) = oneshot::channel::<Result<()>>();
         self.setting_file_updates_tx
@@ -560,9 +550,7 @@ impl SettingsStore {
                 async move {
                     let res = async move {
                         let old_text = Self::load_settings(&fs).await?;
-                        let new_text = cx.read_global(|store: &SettingsStore, _cx| {
-                            store.get_vscode_edits(old_text, &vscode_settings)
-                        })?;
+                        let new_text = update(old_text, cx)?;
                         let settings_path = paths::settings_file().as_path();
                         if fs.is_file(settings_path).await {
                             let resolved_path =
@@ -585,7 +573,6 @@ impl SettingsStore {
                                     format!("Failed to write settings to file {:?}", settings_path)
                                 })?;
                         }
-
                         anyhow::Ok(())
                     }
                     .await;
@@ -600,9 +587,67 @@ impl SettingsStore {
                 }
                 .boxed_local()
             }))
-            .ok();
+            .map_err(|err| anyhow::format_err!("Failed to update settings file: {}", err))
+            .log_with_level(log::Level::Warn);
+        return rx;
+    }
+
+    pub fn update_settings_file_at_path(
+        &self,
+        fs: Arc<dyn Fs>,
+        path: &[&str],
+        new_value: serde_json::Value,
+    ) -> oneshot::Receiver<Result<()>> {
+        let key_path = path
+            .into_iter()
+            .cloned()
+            .map(SharedString::new)
+            .collect::<Vec<_>>();
+        let update = move |mut old_text: String, cx: AsyncApp| {
+            cx.read_global(|store: &SettingsStore, _cx| {
+                // todo(settings_ui) use `update_value_in_json_text` for merging new and old objects with comment preservation, needs old value though...
+                let (range, replacement) = replace_value_in_json_text(
+                    &old_text,
+                    key_path.as_slice(),
+                    store.json_tab_size(),
+                    Some(&new_value),
+                    None,
+                );
+                old_text.replace_range(range, &replacement);
+                old_text
+            })
+        };
+        self.update_settings_file_inner(fs, update)
+    }
+
+    pub fn update_settings_file<T: Settings>(
+        &self,
+        fs: Arc<dyn Fs>,
+        update: impl 'static + Send + FnOnce(&mut T::FileContent, &App),
+    ) {
+        _ = self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
+            cx.read_global(|store: &SettingsStore, cx| {
+                store.new_text_for_update::<T>(old_text, |content| update(content, cx))
+            })
+        });
+    }
+
+    pub fn import_vscode_settings(
+        &self,
+        fs: Arc<dyn Fs>,
+        vscode_settings: VsCodeSettings,
+    ) -> oneshot::Receiver<Result<()>> {
+        self.update_settings_file_inner(fs, move |old_text: String, cx: AsyncApp| {
+            cx.read_global(|store: &SettingsStore, _cx| {
+                store.get_vscode_edits(old_text, &vscode_settings)
+            })
+        })
+    }
 
-        rx
+    pub fn settings_ui_items(&self) -> impl IntoIterator<Item = SettingsUiEntry> {
+        self.setting_values
+            .values()
+            .map(|item| item.settings_ui_item())
     }
 }
 
@@ -1419,9 +1464,29 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
                 return (T::KEY, Ok(DeserializedSetting(Box::new(value))));
             }
         }
-        let value = T::FileContent::deserialize(json)
+        let value = serde_path_to_error::deserialize::<_, T::FileContent>(json)
             .map(|value| DeserializedSetting(Box::new(value)))
-            .map_err(anyhow::Error::from);
+            .map_err(|err| {
+                // construct a path using the key and reported error path if possible.
+                // Unfortunately, serde_path_to_error does not expose the necessary
+                // methods and data to simply add the key to the path
+                let mut path = String::new();
+                if let Some(key) = key {
+                    path.push_str(key);
+                }
+                let err_path = err.path().to_string();
+                // when the path is empty, serde_path_to_error stringifies the path as ".",
+                // when the path is unknown, serde_path_to_error stringifies the path as an empty string
+                if !err_path.is_empty() && !err_path.starts_with(".") {
+                    path.push('.');
+                    path.push_str(&err_path);
+                }
+                if path.is_empty() {
+                    anyhow::Error::from(err.into_inner())
+                } else {
+                    anyhow::anyhow!("'{}': {}", err.into_inner(), path)
+                }
+            });
         (key, value)
     }
 
@@ -1498,6 +1563,10 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
             edits,
         );
     }
+
+    fn settings_ui_item(&self) -> SettingsUiEntry {
+        <T as SettingsUi>::settings_ui_entry()
+    }
 }
 
 #[cfg(test)]
@@ -1505,7 +1574,10 @@ mod tests {
     use crate::VsCodeSettingsSource;
 
     use super::*;
+    // This is so the SettingsUi macro can still work properly
+    use crate as settings;
     use serde_derive::Deserialize;
+    use settings_ui_macros::SettingsUi;
     use unindent::Unindent;
 
     #[gpui::test]
@@ -2048,14 +2120,14 @@ mod tests {
         pretty_assertions::assert_eq!(new, expected);
     }
 
-    #[derive(Debug, PartialEq, Deserialize)]
+    #[derive(Debug, PartialEq, Deserialize, SettingsUi)]
     struct UserSettings {
         name: String,
         age: u32,
         staff: bool,
     }
 
-    #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
+    #[derive(Default, Clone, Serialize, Deserialize, JsonSchema, SettingsUi)]
     struct UserSettingsContent {
         name: Option<String>,
         age: Option<u32>,
@@ -2075,7 +2147,7 @@ mod tests {
         }
     }
 
-    #[derive(Debug, Deserialize, PartialEq)]
+    #[derive(Debug, Deserialize, PartialEq, SettingsUi)]
     struct TurboSetting(bool);
 
     impl Settings for TurboSetting {
@@ -2089,7 +2161,7 @@ mod tests {
         fn import_from_vscode(_vscode: &VsCodeSettings, _current: &mut Self::FileContent) {}
     }
 
-    #[derive(Clone, Debug, PartialEq, Deserialize)]
+    #[derive(Clone, Debug, PartialEq, Deserialize, SettingsUi)]
     struct MultiKeySettings {
         #[serde(default)]
         key1: String,
@@ -2122,7 +2194,7 @@ mod tests {
         }
     }
 
-    #[derive(Debug, Deserialize)]
+    #[derive(Debug, Deserialize, SettingsUi)]
     struct JournalSettings {
         pub path: String,
         pub hour_format: HourFormat,
@@ -2223,7 +2295,7 @@ mod tests {
         );
     }
 
-    #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+    #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, SettingsUi)]
     struct LanguageSettings {
         #[serde(default)]
         languages: HashMap<String, LanguageSettingEntry>,

crates/settings/src/settings_ui.rs 🔗

@@ -0,0 +1,118 @@
+use anyhow::Context as _;
+use fs::Fs;
+use gpui::{AnyElement, App, AppContext as _, ReadGlobal as _, Window};
+use smallvec::SmallVec;
+
+use crate::SettingsStore;
+
+pub trait SettingsUi {
+    fn settings_ui_item() -> SettingsUiItem {
+        SettingsUiItem::None
+    }
+    fn settings_ui_entry() -> SettingsUiEntry;
+}
+
+pub struct SettingsUiEntry {
+    // todo(settings_ui): move this back here once there isn't a None variant
+    // pub path: &'static str,
+    // pub title: &'static str,
+    pub item: SettingsUiEntryVariant,
+}
+
+pub enum SettingsUiEntryVariant {
+    Group {
+        path: &'static str,
+        title: &'static str,
+        items: Vec<SettingsUiEntry>,
+    },
+    Item {
+        path: &'static str,
+        item: SettingsUiItemSingle,
+    },
+    // todo(settings_ui): remove
+    None,
+}
+
+pub enum SettingsUiItemSingle {
+    SwitchField,
+    NumericStepper,
+    ToggleGroup(&'static [&'static str]),
+    /// This should be used when toggle group size > 6
+    DropDown(&'static [&'static str]),
+    Custom(Box<dyn Fn(SettingsValue<serde_json::Value>, &mut Window, &mut App) -> AnyElement>),
+}
+
+pub struct SettingsValue<T> {
+    pub title: &'static str,
+    pub path: SmallVec<[&'static str; 1]>,
+    pub value: Option<T>,
+    pub default_value: T,
+}
+
+impl<T> SettingsValue<T> {
+    pub fn read(&self) -> &T {
+        match &self.value {
+            Some(value) => value,
+            None => &self.default_value,
+        }
+    }
+}
+
+impl SettingsValue<serde_json::Value> {
+    pub fn write_value(path: &SmallVec<[&'static str; 1]>, value: serde_json::Value, cx: &mut App) {
+        let settings_store = SettingsStore::global(cx);
+        let fs = <dyn Fs>::global(cx);
+
+        let rx = settings_store.update_settings_file_at_path(fs.clone(), path.as_slice(), value);
+        let path = path.clone();
+        cx.background_spawn(async move {
+            rx.await?
+                .with_context(|| format!("Failed to update setting at path `{:?}`", path.join(".")))
+        })
+        .detach_and_log_err(cx);
+    }
+}
+
+impl<T: serde::Serialize> SettingsValue<T> {
+    pub fn write(
+        path: &SmallVec<[&'static str; 1]>,
+        value: T,
+        cx: &mut App,
+    ) -> Result<(), serde_json::Error> {
+        SettingsValue::write_value(path, serde_json::to_value(value)?, cx);
+        Ok(())
+    }
+}
+
+pub enum SettingsUiItem {
+    Group {
+        title: &'static str,
+        items: Vec<SettingsUiEntry>,
+    },
+    Single(SettingsUiItemSingle),
+    None,
+}
+
+impl SettingsUi for bool {
+    fn settings_ui_item() -> SettingsUiItem {
+        SettingsUiItem::Single(SettingsUiItemSingle::SwitchField)
+    }
+
+    fn settings_ui_entry() -> SettingsUiEntry {
+        SettingsUiEntry {
+            item: SettingsUiEntryVariant::None,
+        }
+    }
+}
+
+impl SettingsUi for u64 {
+    fn settings_ui_item() -> SettingsUiItem {
+        SettingsUiItem::Single(SettingsUiItemSingle::NumericStepper)
+    }
+
+    fn settings_ui_entry() -> SettingsUiEntry {
+        SettingsUiEntry {
+            item: SettingsUiEntryVariant::None,
+        }
+    }
+}

crates/settings/src/vscode_import.rs 🔗

@@ -2,7 +2,7 @@ use anyhow::{Context as _, Result, anyhow};
 use fs::Fs;
 use paths::{cursor_settings_file_paths, vscode_settings_file_paths};
 use serde_json::{Map, Value};
-use std::{path::Path, rc::Rc, sync::Arc};
+use std::{path::Path, sync::Arc};
 
 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
 pub enum VsCodeSettingsSource {
@@ -21,7 +21,7 @@ impl std::fmt::Display for VsCodeSettingsSource {
 
 pub struct VsCodeSettings {
     pub source: VsCodeSettingsSource,
-    pub path: Rc<Path>,
+    pub path: Arc<Path>,
     content: Map<String, Value>,
 }
 

crates/settings_ui/Cargo.toml 🔗

@@ -11,45 +11,26 @@ workspace = true
 [lib]
 path = "src/settings_ui.rs"
 
+[features]
+default = []
+
 [dependencies]
 anyhow.workspace = true
-collections.workspace = true
-command_palette.workspace = true
 command_palette_hooks.workspace = true
-component.workspace = true
-db.workspace = true
 editor.workspace = true
 feature_flags.workspace = true
-fs.workspace = true
-fuzzy.workspace = true
 gpui.workspace = true
-itertools.workspace = true
-language.workspace = true
-log.workspace = true
-menu.workspace = true
-notifications.workspace = true
-paths.workspace = true
-project.workspace = true
-search.workspace = true
-serde.workspace = true
 serde_json.workspace = true
+serde.workspace = true
 settings.workspace = true
-telemetry.workspace = true
-tempfile.workspace = true
+smallvec.workspace = true
 theme.workspace = true
-tree-sitter-json.workspace = true
-tree-sitter-rust.workspace = true
 ui.workspace = true
-ui_input.workspace = true
-util.workspace = true
-vim.workspace = true
-workspace-hack.workspace = true
 workspace.workspace = true
-zed_actions.workspace = true
+workspace-hack.workspace = true
 
-[dev-dependencies]
-db = {"workspace"= true, "features" = ["test-support"]}
-fs = { workspace = true, features = ["test-support"] }
-gpui = { workspace = true, features = ["test-support"] }
-project = { workspace = true, features = ["test-support"] }
-workspace = { workspace = true, features = ["test-support"] }
+# Uncomment other workspace dependencies as needed
+# assistant.workspace = true
+# client.workspace = true
+# project.workspace = true
+# settings.workspace = true

crates/settings_ui/src/settings_ui.rs 🔗

@@ -1,20 +1,24 @@
 mod appearance_settings_controls;
 
 use std::any::TypeId;
+use std::ops::{Not, Range};
 
+use anyhow::Context as _;
 use command_palette_hooks::CommandPaletteFilter;
 use editor::EditorSettingsControls;
 use feature_flags::{FeatureFlag, FeatureFlagViewExt};
-use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, actions};
-use ui::prelude::*;
-use workspace::item::{Item, ItemEvent};
-use workspace::{Workspace, with_active_or_new_workspace};
+use gpui::{App, Entity, EventEmitter, FocusHandle, Focusable, ReadGlobal, actions};
+use settings::{SettingsStore, SettingsUiEntryVariant, SettingsUiItemSingle, SettingsValue};
+use smallvec::SmallVec;
+use ui::{NumericStepper, SwitchField, ToggleButtonGroup, ToggleButtonSimple, prelude::*};
+use workspace::{
+    Workspace,
+    item::{Item, ItemEvent},
+    with_active_or_new_workspace,
+};
 
 use crate::appearance_settings_controls::AppearanceSettingsControls;
 
-pub mod keybindings;
-pub mod ui_components;
-
 pub struct SettingsUiFeatureFlag;
 
 impl FeatureFlag for SettingsUiFeatureFlag {
@@ -75,18 +79,18 @@ pub fn init(cx: &mut App) {
         .detach();
     })
     .detach();
-
-    keybindings::init(cx);
 }
 
 pub struct SettingsPage {
     focus_handle: FocusHandle,
+    settings_tree: SettingsUiTree,
 }
 
 impl SettingsPage {
     pub fn new(_workspace: &Workspace, cx: &mut Context<Workspace>) -> Entity<Self> {
         cx.new(|cx| Self {
             focus_handle: cx.focus_handle(),
+            settings_tree: SettingsUiTree::new(cx),
         })
     }
 }
@@ -119,26 +123,472 @@ impl Item for SettingsPage {
     }
 }
 
+// We want to iterate over the side bar with root groups
+// - this is a loop over top level groups, and if any are expanded, recursively displaying their items
+// - Should be able to get all items from a group (flatten a group)
+// - Should be able to toggle/untoggle groups in UI (at least in sidebar)
+// - Search should be available
+//  - there should be an index of text -> item mappings, for using fuzzy::match
+//   - Do we want to show the parent groups when a item is matched?
+
+struct UIEntry {
+    title: &'static str,
+    path: &'static str,
+    _depth: usize,
+    // a
+    //  b     < a descendant range < a total descendant range
+    //    f   |                    |
+    //    g   |                    |
+    //  c     <                    |
+    //    d                        |
+    //    e                        <
+    descendant_range: Range<usize>,
+    total_descendant_range: Range<usize>,
+    next_sibling: Option<usize>,
+    // expanded: bool,
+    render: Option<SettingsUiItemSingle>,
+}
+
+struct SettingsUiTree {
+    root_entry_indices: Vec<usize>,
+    entries: Vec<UIEntry>,
+    active_entry_index: usize,
+}
+
+fn build_tree_item(
+    tree: &mut Vec<UIEntry>,
+    group: SettingsUiEntryVariant,
+    depth: usize,
+    prev_index: Option<usize>,
+) {
+    let index = tree.len();
+    tree.push(UIEntry {
+        title: "",
+        path: "",
+        _depth: depth,
+        descendant_range: index + 1..index + 1,
+        total_descendant_range: index + 1..index + 1,
+        render: None,
+        next_sibling: None,
+    });
+    if let Some(prev_index) = prev_index {
+        tree[prev_index].next_sibling = Some(index);
+    }
+    match group {
+        SettingsUiEntryVariant::Group {
+            path,
+            title,
+            items: group_items,
+        } => {
+            tree[index].path = path;
+            tree[index].title = title;
+            for group_item in group_items {
+                let prev_index = tree[index]
+                    .descendant_range
+                    .is_empty()
+                    .not()
+                    .then_some(tree[index].descendant_range.end - 1);
+                tree[index].descendant_range.end = tree.len() + 1;
+                build_tree_item(tree, group_item.item, depth + 1, prev_index);
+                tree[index].total_descendant_range.end = tree.len();
+            }
+        }
+        SettingsUiEntryVariant::Item { path, item } => {
+            tree[index].path = path;
+            // todo(settings_ui) create title from path in macro, and use here
+            tree[index].title = path;
+            tree[index].render = Some(item);
+        }
+        SettingsUiEntryVariant::None => {
+            return;
+        }
+    }
+}
+
+impl SettingsUiTree {
+    fn new(cx: &App) -> Self {
+        let settings_store = SettingsStore::global(cx);
+        let mut tree = vec![];
+        let mut root_entry_indices = vec![];
+        for item in settings_store.settings_ui_items() {
+            if matches!(item.item, SettingsUiEntryVariant::None) {
+                continue;
+            }
+
+            assert!(
+                matches!(item.item, SettingsUiEntryVariant::Group { .. }),
+                "top level items must be groups: {:?}",
+                match item.item {
+                    SettingsUiEntryVariant::Item { path, .. } => path,
+                    _ => unreachable!(),
+                }
+            );
+            let prev_root_entry_index = root_entry_indices.last().copied();
+            root_entry_indices.push(tree.len());
+            build_tree_item(&mut tree, item.item, 0, prev_root_entry_index);
+        }
+
+        root_entry_indices.sort_by_key(|i| tree[*i].title);
+
+        let active_entry_index = root_entry_indices[0];
+        Self {
+            entries: tree,
+            root_entry_indices,
+            active_entry_index,
+        }
+    }
+}
+
+fn render_nav(tree: &SettingsUiTree, _window: &mut Window, cx: &mut Context<SettingsPage>) -> Div {
+    let mut nav = v_flex().p_4().gap_2();
+    for &index in &tree.root_entry_indices {
+        nav = nav.child(
+            div()
+                .id(index)
+                .on_click(cx.listener(move |settings, _, _, _| {
+                    settings.settings_tree.active_entry_index = index;
+                }))
+                .child(
+                    Label::new(SharedString::new_static(tree.entries[index].title))
+                        .size(LabelSize::Large)
+                        .when(tree.active_entry_index == index, |this| {
+                            this.color(Color::Selected)
+                        }),
+                ),
+        );
+    }
+    nav
+}
+
+fn render_content(
+    tree: &SettingsUiTree,
+    window: &mut Window,
+    cx: &mut Context<SettingsPage>,
+) -> impl IntoElement {
+    let Some(entry) = tree.entries.get(tree.active_entry_index) else {
+        return div()
+            .size_full()
+            .child(Label::new(SharedString::new_static("No settings found")).color(Color::Error));
+    };
+    let mut content = v_flex().size_full().gap_4();
+
+    let mut child_index = entry
+        .descendant_range
+        .is_empty()
+        .not()
+        .then_some(entry.descendant_range.start);
+    let mut path = smallvec::smallvec![entry.path];
+
+    while let Some(index) = child_index {
+        let child = &tree.entries[index];
+        child_index = child.next_sibling;
+        if child.render.is_none() {
+            // todo(settings_ui): subgroups?
+            continue;
+        }
+        path.push(child.path);
+        let settings_value = settings_value_from_settings_and_path(
+            path.clone(),
+            // PERF: how to structure this better? There feels like there's a way to avoid the clone
+            // and every value lookup
+            SettingsStore::global(cx).raw_user_settings(),
+            SettingsStore::global(cx).raw_default_settings(),
+        );
+        content = content.child(
+            div()
+                .child(
+                    Label::new(SharedString::new_static(tree.entries[index].title))
+                        .size(LabelSize::Large)
+                        .when(tree.active_entry_index == index, |this| {
+                            this.color(Color::Selected)
+                        }),
+                )
+                .child(render_item_single(
+                    settings_value,
+                    child.render.as_ref().unwrap(),
+                    window,
+                    cx,
+                )),
+        );
+
+        path.pop();
+    }
+
+    return content;
+}
+
 impl Render for SettingsPage {
-    fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
-        v_flex()
+    fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        div()
+            .grid()
+            .grid_cols(16)
             .p_4()
+            .bg(cx.theme().colors().editor_background)
             .size_full()
-            .gap_4()
-            .child(Label::new("Settings").size(LabelSize::Large))
-            .child(
-                v_flex().gap_1().child(Label::new("Appearance")).child(
-                    v_flex()
-                        .elevation_2(cx)
-                        .child(AppearanceSettingsControls::new()),
-                ),
-            )
             .child(
-                v_flex().gap_1().child(Label::new("Editor")).child(
-                    v_flex()
-                        .elevation_2(cx)
-                        .child(EditorSettingsControls::new()),
-                ),
+                div()
+                    .col_span(2)
+                    .h_full()
+                    .child(render_nav(&self.settings_tree, window, cx)),
             )
+            .child(div().col_span(4).h_full().child(render_content(
+                &self.settings_tree,
+                window,
+                cx,
+            )))
     }
 }
+
+// todo(settings_ui): remove, only here as inspiration
+#[allow(dead_code)]
+fn render_old_appearance_settings(cx: &mut App) -> impl IntoElement {
+    v_flex()
+        .p_4()
+        .size_full()
+        .gap_4()
+        .child(Label::new("Settings").size(LabelSize::Large))
+        .child(
+            v_flex().gap_1().child(Label::new("Appearance")).child(
+                v_flex()
+                    .elevation_2(cx)
+                    .child(AppearanceSettingsControls::new()),
+            ),
+        )
+        .child(
+            v_flex().gap_1().child(Label::new("Editor")).child(
+                v_flex()
+                    .elevation_2(cx)
+                    .child(EditorSettingsControls::new()),
+            ),
+        )
+}
+
+fn element_id_from_path(path: &[&'static str]) -> ElementId {
+    if path.len() == 0 {
+        panic!("Path length must not be zero");
+    } else if path.len() == 1 {
+        ElementId::Name(SharedString::new_static(path[0]))
+    } else {
+        ElementId::from((
+            ElementId::from(SharedString::new_static(path[path.len() - 2])),
+            SharedString::new_static(path[path.len() - 1]),
+        ))
+    }
+}
+
+fn render_item_single(
+    settings_value: SettingsValue<serde_json::Value>,
+    item: &SettingsUiItemSingle,
+    window: &mut Window,
+    cx: &mut App,
+) -> AnyElement {
+    match item {
+        SettingsUiItemSingle::Custom(_) => div()
+            .child(format!("Item: {}", settings_value.path.join(".")))
+            .into_any_element(),
+        SettingsUiItemSingle::SwitchField => {
+            render_any_item(settings_value, render_switch_field, window, cx)
+        }
+        SettingsUiItemSingle::NumericStepper => {
+            render_any_item(settings_value, render_numeric_stepper, window, cx)
+        }
+        SettingsUiItemSingle::ToggleGroup(variants) => {
+            render_toggle_button_group(settings_value, variants, window, cx)
+        }
+        SettingsUiItemSingle::DropDown(_) => {
+            unimplemented!("This")
+        }
+    }
+}
+
+fn read_settings_value_from_path<'a>(
+    settings_contents: &'a serde_json::Value,
+    path: &[&'static str],
+) -> Option<&'a serde_json::Value> {
+    let Some((key, remaining)) = path.split_first() else {
+        return Some(settings_contents);
+    };
+    let Some(value) = settings_contents.get(key) else {
+        return None;
+    };
+
+    read_settings_value_from_path(value, remaining)
+}
+
+fn downcast_any_item<T: serde::de::DeserializeOwned>(
+    settings_value: SettingsValue<serde_json::Value>,
+) -> SettingsValue<T> {
+    let value = settings_value
+        .value
+        .map(|value| serde_json::from_value::<T>(value).expect("value is not a T"));
+    // todo(settings_ui) Create test that constructs UI tree, and asserts that all elements have default values
+    let default_value = serde_json::from_value::<T>(settings_value.default_value)
+        .expect("default value is not an Option<T>");
+    let deserialized_setting_value = SettingsValue {
+        title: settings_value.title,
+        path: settings_value.path,
+        value,
+        default_value,
+    };
+    deserialized_setting_value
+}
+
+fn render_any_item<T: serde::de::DeserializeOwned>(
+    settings_value: SettingsValue<serde_json::Value>,
+    render_fn: impl Fn(SettingsValue<T>, &mut Window, &mut App) -> AnyElement + 'static,
+    window: &mut Window,
+    cx: &mut App,
+) -> AnyElement {
+    let deserialized_setting_value = downcast_any_item(settings_value);
+    render_fn(deserialized_setting_value, window, cx)
+}
+
+fn render_numeric_stepper(
+    value: SettingsValue<u64>,
+    _window: &mut Window,
+    _cx: &mut App,
+) -> AnyElement {
+    let id = element_id_from_path(&value.path);
+    let path = value.path.clone();
+    let num = value.value.unwrap_or_else(|| value.default_value);
+
+    NumericStepper::new(
+        id,
+        num.to_string(),
+        {
+            let path = value.path.clone();
+            move |_, _, cx| {
+                let Some(number) = serde_json::Number::from_u128(num.saturating_sub(1) as u128)
+                else {
+                    return;
+                };
+                let new_value = serde_json::Value::Number(number);
+                SettingsValue::write_value(&path, new_value, cx);
+            }
+        },
+        move |_, _, cx| {
+            let Some(number) = serde_json::Number::from_u128(num.saturating_add(1) as u128) else {
+                return;
+            };
+
+            let new_value = serde_json::Value::Number(number);
+
+            SettingsValue::write_value(&path, new_value, cx);
+        },
+    )
+    .style(ui::NumericStepperStyle::Outlined)
+    .into_any_element()
+}
+
+fn render_switch_field(
+    value: SettingsValue<bool>,
+    _window: &mut Window,
+    _cx: &mut App,
+) -> AnyElement {
+    let id = element_id_from_path(&value.path);
+    let path = value.path.clone();
+    SwitchField::new(
+        id,
+        SharedString::new_static(value.title),
+        None,
+        match value.read() {
+            true => ToggleState::Selected,
+            false => ToggleState::Unselected,
+        },
+        move |toggle_state, _, cx| {
+            let new_value = serde_json::Value::Bool(match toggle_state {
+                ToggleState::Indeterminate => {
+                    return;
+                }
+                ToggleState::Selected => true,
+                ToggleState::Unselected => false,
+            });
+
+            SettingsValue::write_value(&path, new_value, cx);
+        },
+    )
+    .into_any_element()
+}
+
+fn render_toggle_button_group(
+    value: SettingsValue<serde_json::Value>,
+    variants: &'static [&'static str],
+    _: &mut Window,
+    _: &mut App,
+) -> AnyElement {
+    let value = downcast_any_item::<String>(value);
+
+    fn make_toggle_group<const LEN: usize>(
+        group_name: &'static str,
+        value: SettingsValue<String>,
+        variants: &'static [&'static str],
+    ) -> AnyElement {
+        let mut variants_array: [&'static str; LEN] = ["default"; LEN];
+        variants_array.copy_from_slice(variants);
+        let active_value = value.read();
+
+        let selected_idx = variants_array
+            .iter()
+            .enumerate()
+            .find_map(|(idx, variant)| {
+                if variant == &active_value {
+                    Some(idx)
+                } else {
+                    None
+                }
+            });
+
+        ToggleButtonGroup::single_row(
+            group_name,
+            variants_array.map(|variant| {
+                let path = value.path.clone();
+                ToggleButtonSimple::new(variant, move |_, _, cx| {
+                    SettingsValue::write_value(
+                        &path,
+                        serde_json::Value::String(variant.to_string()),
+                        cx,
+                    );
+                })
+            }),
+        )
+        .when_some(selected_idx, |this, ix| this.selected_index(ix))
+        .style(ui::ToggleButtonGroupStyle::Filled)
+        .into_any_element()
+    }
+
+    macro_rules! templ_toggl_with_const_param {
+        ($len:expr) => {
+            if variants.len() == $len {
+                return make_toggle_group::<$len>(value.title, value, variants);
+            }
+        };
+    }
+    templ_toggl_with_const_param!(1);
+    templ_toggl_with_const_param!(2);
+    templ_toggl_with_const_param!(3);
+    templ_toggl_with_const_param!(4);
+    templ_toggl_with_const_param!(5);
+    templ_toggl_with_const_param!(6);
+    unreachable!("Too many variants");
+}
+
+fn settings_value_from_settings_and_path(
+    path: SmallVec<[&'static str; 1]>,
+    user_settings: &serde_json::Value,
+    default_settings: &serde_json::Value,
+) -> SettingsValue<serde_json::Value> {
+    let default_value = read_settings_value_from_path(default_settings, &path)
+        .with_context(|| format!("No default value for item at path {:?}", path.join(".")))
+        .expect("Default value set for item")
+        .clone();
+
+    let value = read_settings_value_from_path(user_settings, &path).cloned();
+    let settings_value = SettingsValue {
+        default_value,
+        value,
+        path: path.clone(),
+        // todo(settings_ui) title for items
+        title: path.last().expect("path non empty"),
+    };
+    return settings_value;
+}

crates/settings_ui_macros/Cargo.toml 🔗

@@ -0,0 +1,22 @@
+[package]
+name = "settings_ui_macros"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lib]
+path = "src/settings_ui_macros.rs"
+proc-macro = true
+
+[lints]
+workspace = true
+
+[features]
+default = []
+
+[dependencies]
+proc-macro2.workspace = true
+quote.workspace = true
+syn.workspace = true
+workspace-hack.workspace = true

crates/settings_ui_macros/src/settings_ui_macros.rs 🔗

@@ -0,0 +1,201 @@
+use proc_macro2::TokenStream;
+use quote::{ToTokens, quote};
+use syn::{Data, DeriveInput, LitStr, Token, parse_macro_input};
+
+/// Derive macro for the `SettingsUi` marker trait.
+///
+/// This macro automatically implements the `SettingsUi` trait for the annotated type.
+/// The `SettingsUi` trait is a marker trait used to indicate that a type can be
+/// displayed in the settings UI.
+///
+/// # Example
+///
+/// ```
+/// use settings::SettingsUi;
+/// use settings_ui_macros::SettingsUi;
+///
+/// #[derive(SettingsUi)]
+/// #[settings_ui(group = "Standard")]
+/// struct MySettings {
+///     enabled: bool,
+///     count: usize,
+/// }
+/// ```
+#[proc_macro_derive(SettingsUi, attributes(settings_ui))]
+pub fn derive_settings_ui(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
+    let input = parse_macro_input!(input as DeriveInput);
+    let name = &input.ident;
+
+    // Handle generic parameters if present
+    let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
+
+    let mut group_name = Option::<String>::None;
+    let mut path_name = Option::<String>::None;
+
+    for attr in &input.attrs {
+        if attr.path().is_ident("settings_ui") {
+            attr.parse_nested_meta(|meta| {
+                if meta.path.is_ident("group") {
+                    if group_name.is_some() {
+                        return Err(meta.error("Only one 'group' path can be specified"));
+                    }
+                    meta.input.parse::<Token![=]>()?;
+                    let lit: LitStr = meta.input.parse()?;
+                    group_name = Some(lit.value());
+                } else if meta.path.is_ident("path") {
+                    // todo(settings_ui) try get KEY from Settings if possible, and once we do,
+                    // if can get key from settings, throw error if path also passed
+                    if path_name.is_some() {
+                        return Err(meta.error("Only one 'path' can be specified"));
+                    }
+                    meta.input.parse::<Token![=]>()?;
+                    let lit: LitStr = meta.input.parse()?;
+                    path_name = Some(lit.value());
+                }
+                Ok(())
+            })
+            .unwrap_or_else(|e| panic!("in #[settings_ui] attribute: {}", e));
+        }
+    }
+
+    if path_name.is_none() && group_name.is_some() {
+        // todo(settings_ui) derive path from settings
+        panic!("path is required when group is specified");
+    }
+
+    let ui_render_fn_body = generate_ui_item_body(group_name.as_ref(), path_name.as_ref(), &input);
+
+    let settings_ui_item_fn_body = path_name
+        .as_ref()
+        .map(|path_name| map_ui_item_to_render(path_name, quote! { Self }))
+        .unwrap_or(quote! {
+            settings::SettingsUiEntry {
+                item: settings::SettingsUiEntryVariant::None
+            }
+        });
+
+    let expanded = quote! {
+        impl #impl_generics settings::SettingsUi for #name #ty_generics #where_clause {
+            fn settings_ui_item() -> settings::SettingsUiItem {
+                #ui_render_fn_body
+            }
+
+            fn settings_ui_entry() -> settings::SettingsUiEntry {
+                #settings_ui_item_fn_body
+            }
+        }
+    };
+
+    proc_macro::TokenStream::from(expanded)
+}
+
+fn map_ui_item_to_render(path: &str, ty: TokenStream) -> TokenStream {
+    quote! {
+        settings::SettingsUiEntry {
+            item: match #ty::settings_ui_item() {
+                settings::SettingsUiItem::Group{title, items} => settings::SettingsUiEntryVariant::Group {
+                    title,
+                    path: #path,
+                    items,
+                },
+                settings::SettingsUiItem::Single(item) => settings::SettingsUiEntryVariant::Item {
+                    path: #path,
+                    item,
+                },
+                settings::SettingsUiItem::None => settings::SettingsUiEntryVariant::None,
+            }
+        }
+    }
+}
+
+fn generate_ui_item_body(
+    group_name: Option<&String>,
+    path_name: Option<&String>,
+    input: &syn::DeriveInput,
+) -> TokenStream {
+    match (group_name, path_name, &input.data) {
+        (_, _, Data::Union(_)) => unimplemented!("Derive SettingsUi for Unions"),
+        (None, None, Data::Struct(_)) => quote! {
+            settings::SettingsUiItem::None
+        },
+        (Some(_), None, Data::Struct(_)) => quote! {
+            settings::SettingsUiItem::None
+        },
+        (None, Some(_), Data::Struct(_)) => quote! {
+            settings::SettingsUiItem::None
+        },
+        (Some(group_name), _, Data::Struct(data_struct)) => {
+            let fields = data_struct
+                .fields
+                .iter()
+                .filter(|field| {
+                    !field.attrs.iter().any(|attr| {
+                        let mut has_skip = false;
+                        if attr.path().is_ident("settings_ui") {
+                            let _ = attr.parse_nested_meta(|meta| {
+                                if meta.path.is_ident("skip") {
+                                    has_skip = true;
+                                }
+                                Ok(())
+                            });
+                        }
+
+                        has_skip
+                    })
+                })
+                .map(|field| {
+                    (
+                        field.ident.clone().expect("tuple fields").to_string(),
+                        field.ty.to_token_stream(),
+                    )
+                })
+                .map(|(name, ty)| map_ui_item_to_render(&name, ty));
+
+            quote! {
+                settings::SettingsUiItem::Group{ title: #group_name, items: vec![#(#fields),*] }
+            }
+        }
+        (None, _, Data::Enum(data_enum)) => {
+            let mut lowercase = false;
+            for attr in &input.attrs {
+                if attr.path().is_ident("serde") {
+                    attr.parse_nested_meta(|meta| {
+                        if meta.path.is_ident("rename_all") {
+                            meta.input.parse::<Token![=]>()?;
+                            let lit = meta.input.parse::<LitStr>()?.value();
+                            // todo(settings_ui) snake case
+                            lowercase = lit == "lowercase" || lit == "snake_case";
+                        }
+                        Ok(())
+                    })
+                    .ok();
+                }
+            }
+            let length = data_enum.variants.len();
+
+            let variants = data_enum.variants.iter().map(|variant| {
+                let string = variant.ident.clone().to_string();
+
+                if lowercase {
+                    string.to_lowercase()
+                } else {
+                    string
+                }
+            });
+
+            if length > 6 {
+                quote! {
+                    settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::DropDown(&[#(#variants),*]))
+                }
+            } else {
+                quote! {
+                    settings::SettingsUiItem::Single(settings::SettingsUiItemSingle::ToggleGroup(&[#(#variants),*]))
+                }
+            }
+        }
+        // todo(settings_ui) discriminated unions
+        (_, _, Data::Enum(_)) => quote! {
+            settings::SettingsUiItem::None
+        },
+    }
+}

crates/supermaven/src/supermaven_completion_provider.rs 🔗

@@ -19,8 +19,10 @@ pub struct SupermavenCompletionProvider {
     supermaven: Entity<Supermaven>,
     buffer_id: Option<EntityId>,
     completion_id: Option<SupermavenCompletionStateId>,
+    completion_text: Option<String>,
     file_extension: Option<String>,
     pending_refresh: Option<Task<Result<()>>>,
+    completion_position: Option<language::Anchor>,
 }
 
 impl SupermavenCompletionProvider {
@@ -29,16 +31,19 @@ impl SupermavenCompletionProvider {
             supermaven,
             buffer_id: None,
             completion_id: None,
+            completion_text: None,
             file_extension: None,
             pending_refresh: None,
+            completion_position: None,
         }
     }
 }
 
 // Computes the edit prediction from the difference between the completion text.
-// this is defined by greedily matching the buffer text against the completion text, with any leftover buffer placed at the end.
-// for example, given the completion text "moo cows are cool" and the buffer text "cowsre pool", the completion state would be
-// the inlays "moo ", " a", and "cool" which will render as "[moo ]cows[ a]re [cool]pool" in the editor.
+// This is defined by greedily matching the buffer text against the completion text.
+// Inlays are inserted for parts of the completion text that are not present in the buffer text.
+// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]".
+// The parts in brackets are the inlays.
 fn completion_from_diff(
     snapshot: BufferSnapshot,
     completion_text: &str,
@@ -133,6 +138,14 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
         debounce: bool,
         cx: &mut Context<Self>,
     ) {
+        // Only make new completion requests when debounce is true (i.e., when text is typed)
+        // When debounce is false (i.e., cursor movement), we should not make new requests
+        if !debounce {
+            return;
+        }
+
+        reset_completion_cache(self, cx);
+
         let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| {
             supermaven.complete(&buffer_handle, cursor_position, cx)
         }) else {
@@ -146,6 +159,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
 
             while let Some(()) = completion.updates.next().await {
                 this.update(cx, |this, cx| {
+                    // Get the completion text and cache it
+                    if let Some(text) =
+                        this.supermaven
+                            .read(cx)
+                            .completion(&buffer_handle, cursor_position, cx)
+                    {
+                        this.completion_text = Some(text.to_string());
+
+                        this.completion_position = Some(cursor_position);
+                    }
+
                     this.completion_id = Some(completion.id);
                     this.buffer_id = Some(buffer_handle.entity_id());
                     this.file_extension = buffer_handle.read(cx).file().and_then(|file| {
@@ -156,7 +180,6 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
                                 .to_string(),
                         )
                     });
-                    this.pending_refresh = None;
                     cx.notify();
                 })?;
             }
@@ -174,13 +197,11 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
     }
 
     fn accept(&mut self, _cx: &mut Context<Self>) {
-        self.pending_refresh = None;
-        self.completion_id = None;
+        reset_completion_cache(self, _cx);
     }
 
     fn discard(&mut self, _cx: &mut Context<Self>) {
-        self.pending_refresh = None;
-        self.completion_id = None;
+        reset_completion_cache(self, _cx);
     }
 
     fn suggest(
@@ -189,10 +210,34 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
         cursor_position: Anchor,
         cx: &mut Context<Self>,
     ) -> Option<EditPrediction> {
-        let completion_text = self
-            .supermaven
-            .read(cx)
-            .completion(buffer, cursor_position, cx)?;
+        if self.buffer_id != Some(buffer.entity_id()) {
+            return None;
+        }
+
+        if self.completion_id.is_none() {
+            return None;
+        }
+
+        let completion_text = if let Some(cached_text) = &self.completion_text {
+            cached_text.as_str()
+        } else {
+            let text = self
+                .supermaven
+                .read(cx)
+                .completion(buffer, cursor_position, cx)?;
+            self.completion_text = Some(text.to_string());
+            text
+        };
+
+        // Check if the cursor is still at the same position as the completion request
+        // If we don't have a completion position stored, don't show the completion
+        if let Some(completion_position) = self.completion_position {
+            if cursor_position != completion_position {
+                return None;
+            }
+        } else {
+            return None;
+        }
 
         let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text);
 
@@ -200,15 +245,20 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
 
         if !completion_text.trim().is_empty() {
             let snapshot = buffer.read(cx).snapshot();
-            let mut point = cursor_position.to_point(&snapshot);
-            point.column = snapshot.line_len(point.row);
-            let range = cursor_position..snapshot.anchor_after(point);
+
+            // Calculate the range from cursor to end of line correctly
+            let cursor_point = cursor_position.to_point(&snapshot);
+            let end_of_line = snapshot.anchor_after(language::Point::new(
+                cursor_point.row,
+                snapshot.line_len(cursor_point.row),
+            ));
+            let delete_range = cursor_position..end_of_line;
 
             Some(completion_from_diff(
                 snapshot,
                 completion_text,
                 cursor_position,
-                range,
+                delete_range,
             ))
         } else {
             None
@@ -216,6 +266,17 @@ impl EditPredictionProvider for SupermavenCompletionProvider {
     }
 }
 
+fn reset_completion_cache(
+    provider: &mut SupermavenCompletionProvider,
+    _cx: &mut Context<SupermavenCompletionProvider>,
+) {
+    provider.pending_refresh = None;
+    provider.completion_id = None;
+    provider.completion_text = None;
+    provider.completion_position = None;
+    provider.buffer_id = None;
+}
+
 fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str {
     if has_leading_newline(text) {
         text

crates/terminal/src/terminal.rs 🔗

@@ -354,7 +354,7 @@ impl TerminalBuilder {
         window_id: u64,
         completion_tx: Option<Sender<Option<ExitStatus>>>,
         cx: &App,
-        activation_script: Option<String>,
+        activation_script: Vec<String>,
     ) -> Result<TerminalBuilder> {
         // If the parent environment doesn't have a locale set
         // (As is the case when launched from a .app on MacOS),
@@ -493,7 +493,9 @@ impl TerminalBuilder {
         let pty_tx = event_loop.channel();
         let _io_thread = event_loop.spawn(); // DANGER
 
-        let terminal = Terminal {
+        let no_task = task.is_none();
+
+        let mut terminal = Terminal {
             task,
             pty_tx: Notifier(pty_tx),
             completion_tx,
@@ -518,7 +520,7 @@ impl TerminalBuilder {
             last_hyperlink_search_position: None,
             #[cfg(windows)]
             shell_program,
-            activation_script,
+            activation_script: activation_script.clone(),
             template: CopyTemplate {
                 shell,
                 env,
@@ -529,6 +531,14 @@ impl TerminalBuilder {
             },
         };
 
+        if !activation_script.is_empty() && no_task {
+            for activation_script in activation_script {
+                terminal.input(activation_script.into_bytes());
+                terminal.write_to_pty(b"\n");
+            }
+            terminal.clear();
+        }
+
         Ok(TerminalBuilder {
             terminal,
             events_rx,
@@ -712,7 +722,7 @@ pub struct Terminal {
     #[cfg(windows)]
     shell_program: Option<String>,
     template: CopyTemplate,
-    activation_script: Option<String>,
+    activation_script: Vec<String>,
 }
 
 struct CopyTemplate {
@@ -2218,7 +2228,7 @@ mod tests {
                 0,
                 Some(completion_tx),
                 cx,
-                None,
+                vec![],
             )
             .unwrap()
             .subscribe(cx)

crates/terminal/src/terminal_settings.rs 🔗

@@ -6,7 +6,7 @@ use gpui::{AbsoluteLength, App, FontFallbacks, FontFeatures, FontWeight, Pixels,
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
 
-use settings::SettingsSources;
+use settings::{SettingsSources, SettingsUi};
 use std::path::PathBuf;
 use task::Shell;
 use theme::FontFamilyName;
@@ -24,7 +24,7 @@ pub struct Toolbar {
     pub breadcrumbs: bool,
 }
 
-#[derive(Clone, Debug, Deserialize)]
+#[derive(Clone, Debug, Deserialize, SettingsUi)]
 pub struct TerminalSettings {
     pub shell: Shell,
     pub working_directory: WorkingDirectory,

crates/terminal_view/src/terminal_element.rs 🔗

@@ -1,4 +1,4 @@
-use editor::{CursorLayout, HighlightedRange, HighlightedRangeLine};
+use editor::{CursorLayout, EditorSettings, HighlightedRange, HighlightedRangeLine};
 use gpui::{
     AbsoluteLength, AnyElement, App, AvailableSpace, Bounds, ContentMask, Context, DispatchPhase,
     Element, ElementId, Entity, FocusHandle, Font, FontFeatures, FontStyle, FontWeight,
@@ -1257,12 +1257,17 @@ impl Element for TerminalElement {
                         if let Some((start_y, highlighted_range_lines)) =
                             to_highlighted_range_lines(relative_highlighted_range, layout, origin)
                         {
+                            let corner_radius = if EditorSettings::get_global(cx).rounded_selection {
+                                0.15 * layout.dimensions.line_height
+                            } else {
+                                Pixels::ZERO
+                            };
                             let hr = HighlightedRange {
                                 start_y,
                                 line_height: layout.dimensions.line_height,
                                 lines: highlighted_range_lines,
                                 color: *color,
-                                corner_radius: 0.15 * layout.dimensions.line_height,
+                                corner_radius: corner_radius,
                             };
                             hr.paint(true, bounds, window);
                         }

crates/theme/src/settings.rs 🔗

@@ -13,7 +13,7 @@ use gpui::{
 use refineable::Refineable;
 use schemars::{JsonSchema, json_schema};
 use serde::{Deserialize, Serialize};
-use settings::{ParameterizedJsonSchema, Settings, SettingsSources};
+use settings::{ParameterizedJsonSchema, Settings, SettingsSources, SettingsUi};
 use std::sync::Arc;
 use util::ResultExt as _;
 use util::schemars::replace_subschema;
@@ -87,7 +87,7 @@ impl From<UiDensity> for String {
 }
 
 /// Customizable settings for the UI and theme system.
-#[derive(Clone, PartialEq)]
+#[derive(Clone, PartialEq, SettingsUi)]
 pub struct ThemeSettings {
     /// The UI font size. Determines the size of text in the UI,
     /// as well as the size of a [gpui::Rems] unit.

crates/title_bar/Cargo.toml 🔗

@@ -42,7 +42,7 @@ rpc.workspace = true
 schemars.workspace = true
 serde.workspace = true
 settings.workspace = true
-settings_ui.workspace = true
+keymap_editor.workspace = true
 smallvec.workspace = true
 story = { workspace = true, optional = true }
 telemetry.workspace = true

crates/title_bar/src/system_window_tabs.rs 🔗

@@ -1,4 +1,4 @@
-use settings::Settings;
+use settings::{Settings, SettingsStore};
 
 use gpui::{
     AnyWindowHandle, Context, Hsla, InteractiveElement, MouseButton, ParentElement, ScrollHandle,
@@ -11,7 +11,7 @@ use ui::{
     LabelSize, Tab, h_flex, prelude::*, right_click_menu,
 };
 use workspace::{
-    CloseWindow, ItemSettings, Workspace,
+    CloseWindow, ItemSettings, Workspace, WorkspaceSettings,
     item::{ClosePosition, ShowCloseButton},
 };
 
@@ -53,6 +53,46 @@ impl SystemWindowTabs {
     }
 
     pub fn init(cx: &mut App) {
+        let mut was_use_system_window_tabs =
+            WorkspaceSettings::get_global(cx).use_system_window_tabs;
+
+        cx.observe_global::<SettingsStore>(move |cx| {
+            let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs;
+            if use_system_window_tabs == was_use_system_window_tabs {
+                return;
+            }
+            was_use_system_window_tabs = use_system_window_tabs;
+
+            let tabbing_identifier = if use_system_window_tabs {
+                Some(String::from("zed"))
+            } else {
+                None
+            };
+
+            if use_system_window_tabs {
+                SystemWindowTabController::init(cx);
+            }
+
+            cx.windows().iter().for_each(|handle| {
+                let _ = handle.update(cx, |_, window, cx| {
+                    window.set_tabbing_identifier(tabbing_identifier.clone());
+                    if use_system_window_tabs {
+                        let tabs = if let Some(tabs) = window.tabbed_windows() {
+                            tabs
+                        } else {
+                            vec![SystemWindowTab::new(
+                                SharedString::from(window.window_title()),
+                                window.window_handle(),
+                            )]
+                        };
+
+                        SystemWindowTabController::add_tab(cx, handle.window_id(), tabs);
+                    }
+                });
+            });
+        })
+        .detach();
+
         cx.observe_new(|workspace: &mut Workspace, _, _| {
             workspace.register_action_renderer(|div, _, window, cx| {
                 let window_id = window.window_handle().window_id();
@@ -336,6 +376,7 @@ impl SystemWindowTabs {
 
 impl Render for SystemWindowTabs {
     fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        let use_system_window_tabs = WorkspaceSettings::get_global(cx).use_system_window_tabs;
         let active_background_color = cx.theme().colors().title_bar_background;
         let inactive_background_color = cx.theme().colors().tab_bar_background;
         let entity = cx.entity();
@@ -368,7 +409,9 @@ impl Render for SystemWindowTabs {
             .collect::<Vec<_>>();
 
         let number_of_tabs = tab_items.len().max(1);
-        if !window.tab_bar_visible() && !visible {
+        if (!window.tab_bar_visible() && !visible)
+            || (!use_system_window_tabs && number_of_tabs == 1)
+        {
             return h_flex().into_any_element();
         }
 

crates/title_bar/src/title_bar.rs 🔗

@@ -29,10 +29,11 @@ use gpui::{
     IntoElement, MouseButton, ParentElement, Render, StatefulInteractiveElement, Styled,
     Subscription, WeakEntity, Window, actions, div,
 };
+use keymap_editor;
 use onboarding_banner::OnboardingBanner;
 use project::Project;
+use remote::RemoteConnectionOptions;
 use settings::Settings as _;
-use settings_ui::keybindings;
 use std::sync::Arc;
 use theme::ActiveTheme;
 use title_bar_settings::TitleBarSettings;
@@ -304,12 +305,14 @@ impl TitleBar {
 
     fn render_remote_project_connection(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
         let options = self.project.read(cx).remote_connection_options(cx)?;
-        let host: SharedString = options.connection_string().into();
+        let host: SharedString = options.display_name().into();
 
-        let nickname = options
-            .nickname
-            .map(|nick| nick.into())
-            .unwrap_or_else(|| host.clone());
+        let nickname = if let RemoteConnectionOptions::Ssh(options) = options {
+            options.nickname.map(|nick| nick.into())
+        } else {
+            None
+        };
+        let nickname = nickname.unwrap_or_else(|| host.clone());
 
         let (indicator_color, meta) = match self.project.read(cx).remote_connection_state(cx)? {
             remote::ConnectionState::Connecting => (Color::Info, format!("Connecting to: {host}")),
@@ -684,7 +687,7 @@ impl TitleBar {
                             "Settings Profiles",
                             zed_actions::settings_profile_selector::Toggle.boxed_clone(),
                         )
-                        .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor))
+                        .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor))
                         .action(
                             "Themes…",
                             zed_actions::theme_selector::Toggle::default().boxed_clone(),
@@ -732,7 +735,7 @@ impl TitleBar {
                                 "Settings Profiles",
                                 zed_actions::settings_profile_selector::Toggle.boxed_clone(),
                             )
-                            .action("Key Bindings", Box::new(keybindings::OpenKeymapEditor))
+                            .action("Key Bindings", Box::new(keymap_editor::OpenKeymapEditor))
                             .action(
                                 "Themes…",
                                 zed_actions::theme_selector::Toggle::default().boxed_clone(),

crates/title_bar/src/title_bar_settings.rs 🔗

@@ -1,9 +1,10 @@
 use db::anyhow;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Copy, Clone, Deserialize, Debug)]
+#[derive(Copy, Clone, Deserialize, Debug, SettingsUi)]
+#[settings_ui(group = "Title Bar", path = "title_bar")]
 pub struct TitleBarSettings {
     pub show_branch_icon: bool,
     pub show_onboarding_banner: bool,

crates/vim/src/normal/scroll.rs 🔗

@@ -98,7 +98,7 @@ impl Vim {
         Vim::take_forced_motion(cx);
         self.exit_temporary_normal(window, cx);
         self.update_editor(cx, |_, editor, cx| {
-            scroll_editor(editor, move_cursor, &amount, window, cx)
+            scroll_editor(editor, move_cursor, amount, window, cx)
         });
     }
 }
@@ -106,7 +106,7 @@ impl Vim {
 fn scroll_editor(
     editor: &mut Editor,
     preserve_cursor_position: bool,
-    amount: &ScrollAmount,
+    amount: ScrollAmount,
     window: &mut Window,
     cx: &mut Context<Editor>,
 ) {
@@ -126,7 +126,7 @@ fn scroll_editor(
                 ScrollAmount::Line(amount.lines(visible_line_count) - 1.0)
             }
         }
-        _ => amount.clone(),
+        _ => amount,
     };
 
     editor.scroll_screen(&amount, window, cx);

crates/vim/src/test.rs 🔗

@@ -8,13 +8,15 @@ use collections::HashMap;
 use command_palette::CommandPalette;
 use editor::{
     AnchorRangeExt, DisplayPoint, Editor, EditorMode, MultiBuffer, actions::DeleteLine,
-    display_map::DisplayRow, test::editor_test_context::EditorTestContext,
+    code_context_menus::CodeContextMenu, display_map::DisplayRow,
+    test::editor_test_context::EditorTestContext,
 };
 use futures::StreamExt;
-use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext};
+use gpui::{KeyBinding, Modifiers, MouseButton, TestAppContext, px};
 use language::Point;
 pub use neovim_backed_test_context::*;
 use settings::SettingsStore;
+use ui::Pixels;
 use util::test::marked_text_ranges;
 pub use vim_test_context::*;
 
@@ -971,6 +973,87 @@ async fn test_comma_w(cx: &mut gpui::TestAppContext) {
         .assert_eq("hellˇo hello\nhello hello");
 }
 
+#[gpui::test]
+async fn test_completion_menu_scroll_aside(cx: &mut TestAppContext) {
+    let mut cx = VimTestContext::new_typescript(cx).await;
+
+    cx.lsp
+        .set_request_handler::<lsp::request::Completion, _, _>(move |_, _| async move {
+            Ok(Some(lsp::CompletionResponse::Array(vec![
+                lsp::CompletionItem {
+                    label: "Test Item".to_string(),
+                    documentation: Some(lsp::Documentation::String(
+                        "This is some very long documentation content that will be displayed in the aside panel for scrolling.\n".repeat(50)
+                    )),
+                    ..Default::default()
+                },
+            ])))
+        });
+
+    cx.set_state("variableˇ", Mode::Insert);
+    cx.simulate_keystroke(".");
+    cx.executor().run_until_parked();
+
+    let mut initial_offset: Pixels = px(0.0);
+
+    cx.update_editor(|editor, _, _| {
+        let binding = editor.context_menu().borrow();
+        let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+            panic!("Should have completions menu open");
+        };
+
+        initial_offset = menu.scroll_handle_aside.offset().y;
+    });
+
+    // The `ctrl-e` shortcut should scroll the completion menu's aside content
+    // down, so the updated offset should be lower than the initial offset.
+    cx.simulate_keystroke("ctrl-e");
+    cx.update_editor(|editor, _, _| {
+        let binding = editor.context_menu().borrow();
+        let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+            panic!("Should have completions menu open");
+        };
+
+        assert!(menu.scroll_handle_aside.offset().y < initial_offset);
+    });
+
+    // The `ctrl-y` shortcut should do the inverse scrolling as `ctrl-e`, so the
+    // offset should now be the same as the initial offset.
+    cx.simulate_keystroke("ctrl-y");
+    cx.update_editor(|editor, _, _| {
+        let binding = editor.context_menu().borrow();
+        let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+            panic!("Should have completions menu open");
+        };
+
+        assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset);
+    });
+
+    // The `ctrl-d` shortcut should scroll the completion menu's aside content
+    // down, so the updated offset should be lower than the initial offset.
+    cx.simulate_keystroke("ctrl-d");
+    cx.update_editor(|editor, _, _| {
+        let binding = editor.context_menu().borrow();
+        let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+            panic!("Should have completions menu open");
+        };
+
+        assert!(menu.scroll_handle_aside.offset().y < initial_offset);
+    });
+
+    // The `ctrl-u` shortcut should do the inverse scrolling as `ctrl-u`, so the
+    // offset should now be the same as the initial offset.
+    cx.simulate_keystroke("ctrl-u");
+    cx.update_editor(|editor, _, _| {
+        let binding = editor.context_menu().borrow();
+        let Some(CodeContextMenu::Completions(menu)) = binding.as_ref() else {
+            panic!("Should have completions menu open");
+        };
+
+        assert_eq!(menu.scroll_handle_aside.offset().y, initial_offset);
+    });
+}
+
 #[gpui::test]
 async fn test_rename(cx: &mut gpui::TestAppContext) {
     let mut cx = VimTestContext::new_typescript(cx).await;

crates/vim/src/test/vim_test_context.rs 🔗

@@ -49,6 +49,10 @@ impl VimTestContext {
         Self::new_with_lsp(
             EditorLspTestContext::new_typescript(
                 lsp::ServerCapabilities {
+                    completion_provider: Some(lsp::CompletionOptions {
+                        trigger_characters: Some(vec![".".to_string()]),
+                        ..Default::default()
+                    }),
                     rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
                         prepare_provider: Some(true),
                         work_done_progress_options: Default::default(),

crates/vim/src/vim.rs 🔗

@@ -39,7 +39,7 @@ use object::Object;
 use schemars::JsonSchema;
 use serde::Deserialize;
 use serde_derive::Serialize;
-use settings::{Settings, SettingsSources, SettingsStore, update_settings_file};
+use settings::{Settings, SettingsSources, SettingsStore, SettingsUi, update_settings_file};
 use state::{Mode, Operator, RecordedSelection, SearchState, VimGlobals};
 use std::{mem, ops::Range, sync::Arc};
 use surrounds::SurroundsType;
@@ -1774,7 +1774,7 @@ struct CursorShapeSettings {
     pub insert: Option<CursorShape>,
 }
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 struct VimSettings {
     pub default_mode: Mode,
     pub toggle_relative_line_numbers: bool,

crates/vim_mode_setting/src/vim_mode_setting.rs 🔗

@@ -6,7 +6,7 @@
 
 use anyhow::Result;
 use gpui::App;
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
 /// Initializes the `vim_mode_setting` crate.
 pub fn init(cx: &mut App) {
@@ -17,6 +17,7 @@ pub fn init(cx: &mut App) {
 /// Whether or not to enable Vim mode.
 ///
 /// Default: false
+#[derive(SettingsUi)]
 pub struct VimModeSetting(pub bool);
 
 impl Settings for VimModeSetting {
@@ -43,6 +44,7 @@ impl Settings for VimModeSetting {
 /// Whether or not to enable Helix mode.
 ///
 /// Default: false
+#[derive(SettingsUi)]
 pub struct HelixModeSetting(pub bool);
 
 impl Settings for HelixModeSetting {

crates/workspace/src/item.rs 🔗

@@ -17,7 +17,7 @@ use gpui::{
 use project::{Project, ProjectEntryId, ProjectPath};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsLocation, SettingsSources};
+use settings::{Settings, SettingsLocation, SettingsSources, SettingsUi};
 use smallvec::SmallVec;
 use std::{
     any::{Any, TypeId},
@@ -49,7 +49,7 @@ impl Default for SaveOptions {
     }
 }
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct ItemSettings {
     pub git_status: bool,
     pub close_position: ClosePosition,
@@ -59,7 +59,7 @@ pub struct ItemSettings {
     pub show_close_button: ShowCloseButton,
 }
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct PreviewTabsSettings {
     pub enabled: bool,
     pub enable_preview_from_file_finder: bool,

crates/workspace/src/persistence.rs 🔗

@@ -20,6 +20,7 @@ use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint};
 
 use language::{LanguageName, Toolchain};
 use project::WorktreeId;
+use remote::{RemoteConnectionOptions, SshConnectionOptions, WslConnectionOptions};
 use sqlez::{
     bindable::{Bind, Column, StaticColumnCount},
     statement::{SqlType, Statement},
@@ -33,11 +34,12 @@ use uuid::Uuid;
 use crate::{
     WorkspaceId,
     path_list::{PathList, SerializedPathList},
+    persistence::model::RemoteConnectionKind,
 };
 
 use model::{
-    GroupId, ItemId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
-    SerializedSshConnection, SerializedWorkspace, SshConnectionId,
+    GroupId, ItemId, PaneId, RemoteConnectionId, SerializedItem, SerializedPane,
+    SerializedPaneGroup, SerializedWorkspace,
 };
 
 use self::model::{DockStructure, SerializedWorkspaceLocation};
@@ -627,6 +629,88 @@ impl Domain for WorkspaceDb {
             END
             WHERE paths IS NOT NULL
         ),
+        sql!(
+            CREATE TABLE remote_connections(
+                id INTEGER PRIMARY KEY,
+                kind TEXT NOT NULL,
+                host TEXT,
+                port INTEGER,
+                user TEXT,
+                distro TEXT
+            );
+
+            CREATE TABLE workspaces_2(
+                workspace_id INTEGER PRIMARY KEY,
+                paths TEXT,
+                paths_order TEXT,
+                remote_connection_id INTEGER REFERENCES remote_connections(id),
+                timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL,
+                window_state TEXT,
+                window_x REAL,
+                window_y REAL,
+                window_width REAL,
+                window_height REAL,
+                display BLOB,
+                left_dock_visible INTEGER,
+                left_dock_active_panel TEXT,
+                right_dock_visible INTEGER,
+                right_dock_active_panel TEXT,
+                bottom_dock_visible INTEGER,
+                bottom_dock_active_panel TEXT,
+                left_dock_zoom INTEGER,
+                right_dock_zoom INTEGER,
+                bottom_dock_zoom INTEGER,
+                fullscreen INTEGER,
+                centered_layout INTEGER,
+                session_id TEXT,
+                window_id INTEGER
+            ) STRICT;
+
+            INSERT INTO remote_connections
+            SELECT
+                id,
+                "ssh" as kind,
+                host,
+                port,
+                user,
+                NULL as distro
+            FROM ssh_connections;
+
+            INSERT
+            INTO workspaces_2
+            SELECT
+                workspace_id,
+                paths,
+                paths_order,
+                ssh_connection_id as remote_connection_id,
+                timestamp,
+                window_state,
+                window_x,
+                window_y,
+                window_width,
+                window_height,
+                display,
+                left_dock_visible,
+                left_dock_active_panel,
+                right_dock_visible,
+                right_dock_active_panel,
+                bottom_dock_visible,
+                bottom_dock_active_panel,
+                left_dock_zoom,
+                right_dock_zoom,
+                bottom_dock_zoom,
+                fullscreen,
+                centered_layout,
+                session_id,
+                window_id
+            FROM
+                workspaces;
+
+            DROP TABLE workspaces;
+            ALTER TABLE workspaces_2 RENAME TO workspaces;
+
+            CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(remote_connection_id, paths);
+        ),
     ];
 
     // Allow recovering from bad migration that was initially shipped to nightly
@@ -650,10 +734,10 @@ impl WorkspaceDb {
         self.workspace_for_roots_internal(worktree_roots, None)
     }
 
-    pub(crate) fn ssh_workspace_for_roots<P: AsRef<Path>>(
+    pub(crate) fn remote_workspace_for_roots<P: AsRef<Path>>(
         &self,
         worktree_roots: &[P],
-        ssh_project_id: SshConnectionId,
+        ssh_project_id: RemoteConnectionId,
     ) -> Option<SerializedWorkspace> {
         self.workspace_for_roots_internal(worktree_roots, Some(ssh_project_id))
     }
@@ -661,7 +745,7 @@ impl WorkspaceDb {
     pub(crate) fn workspace_for_roots_internal<P: AsRef<Path>>(
         &self,
         worktree_roots: &[P],
-        ssh_connection_id: Option<SshConnectionId>,
+        remote_connection_id: Option<RemoteConnectionId>,
     ) -> Option<SerializedWorkspace> {
         // paths are sorted before db interactions to ensure that the order of the paths
         // doesn't affect the workspace selection for existing workspaces
@@ -713,13 +797,13 @@ impl WorkspaceDb {
                 FROM workspaces
                 WHERE
                     paths IS ? AND
-                    ssh_connection_id IS ?
+                    remote_connection_id IS ?
                 LIMIT 1
             })
             .map(|mut prepared_statement| {
                 (prepared_statement)((
                     root_paths.serialize().paths,
-                    ssh_connection_id.map(|id| id.0 as i32),
+                    remote_connection_id.map(|id| id.0 as i32),
                 ))
                 .unwrap()
             })
@@ -803,14 +887,12 @@ impl WorkspaceDb {
         log::debug!("Saving workspace at location: {:?}", workspace.location);
         self.write(move |conn| {
             conn.with_savepoint("update_worktrees", || {
-                let ssh_connection_id = match &workspace.location {
+                let remote_connection_id = match workspace.location.clone() {
                     SerializedWorkspaceLocation::Local => None,
-                    SerializedWorkspaceLocation::Ssh(connection) => {
-                        Some(Self::get_or_create_ssh_connection_query(
+                    SerializedWorkspaceLocation::Remote(connection_options) => {
+                        Some(Self::get_or_create_remote_connection_internal(
                             conn,
-                            connection.host.clone(),
-                            connection.port,
-                            connection.user.clone(),
+                            connection_options
                         )?.0)
                     }
                 };
@@ -860,11 +942,11 @@ impl WorkspaceDb {
                     WHERE
                         workspace_id != ?1 AND
                         paths IS ?2 AND
-                        ssh_connection_id IS ?3
+                        remote_connection_id IS ?3
                 ))?((
                     workspace.id,
                     paths.paths.clone(),
-                    ssh_connection_id,
+                    remote_connection_id,
                 ))
                 .context("clearing out old locations")?;
 
@@ -874,7 +956,7 @@ impl WorkspaceDb {
                         workspace_id,
                         paths,
                         paths_order,
-                        ssh_connection_id,
+                        remote_connection_id,
                         left_dock_visible,
                         left_dock_active_panel,
                         left_dock_zoom,
@@ -893,7 +975,7 @@ impl WorkspaceDb {
                     UPDATE SET
                         paths = ?2,
                         paths_order = ?3,
-                        ssh_connection_id = ?4,
+                        remote_connection_id = ?4,
                         left_dock_visible = ?5,
                         left_dock_active_panel = ?6,
                         left_dock_zoom = ?7,
@@ -912,7 +994,7 @@ impl WorkspaceDb {
                     workspace.id,
                     paths.paths.clone(),
                     paths.order.clone(),
-                    ssh_connection_id,
+                    remote_connection_id,
                     workspace.docks,
                     workspace.session_id,
                     workspace.window_id,
@@ -931,39 +1013,78 @@ impl WorkspaceDb {
         .await;
     }
 
-    pub(crate) async fn get_or_create_ssh_connection(
+    pub(crate) async fn get_or_create_remote_connection(
         &self,
-        host: String,
-        port: Option<u16>,
-        user: Option<String>,
-    ) -> Result<SshConnectionId> {
-        self.write(move |conn| Self::get_or_create_ssh_connection_query(conn, host, port, user))
+        options: RemoteConnectionOptions,
+    ) -> Result<RemoteConnectionId> {
+        self.write(move |conn| Self::get_or_create_remote_connection_internal(conn, options))
             .await
     }
 
-    fn get_or_create_ssh_connection_query(
+    fn get_or_create_remote_connection_internal(
+        this: &Connection,
+        options: RemoteConnectionOptions,
+    ) -> Result<RemoteConnectionId> {
+        let kind;
+        let user;
+        let mut host = None;
+        let mut port = None;
+        let mut distro = None;
+        match options {
+            RemoteConnectionOptions::Ssh(options) => {
+                kind = RemoteConnectionKind::Ssh;
+                host = Some(options.host);
+                port = options.port;
+                user = options.username;
+            }
+            RemoteConnectionOptions::Wsl(options) => {
+                kind = RemoteConnectionKind::Wsl;
+                distro = Some(options.distro_name);
+                user = options.user;
+            }
+        }
+        Self::get_or_create_remote_connection_query(this, kind, host, port, user, distro)
+    }
+
+    fn get_or_create_remote_connection_query(
         this: &Connection,
-        host: String,
+        kind: RemoteConnectionKind,
+        host: Option<String>,
         port: Option<u16>,
         user: Option<String>,
-    ) -> Result<SshConnectionId> {
+        distro: Option<String>,
+    ) -> Result<RemoteConnectionId> {
         if let Some(id) = this.select_row_bound(sql!(
-            SELECT id FROM ssh_connections WHERE host IS ? AND port IS ? AND user IS ? LIMIT 1
-        ))?((host.clone(), port, user.clone()))?
-        {
-            Ok(SshConnectionId(id))
+            SELECT id
+            FROM remote_connections
+            WHERE
+                kind IS ? AND
+                host IS ? AND
+                port IS ? AND
+                user IS ? AND
+                distro IS ?
+            LIMIT 1
+        ))?((
+            kind.serialize(),
+            host.clone(),
+            port,
+            user.clone(),
+            distro.clone(),
+        ))? {
+            Ok(RemoteConnectionId(id))
         } else {
-            log::debug!("Inserting SSH project at host {host}");
             let id = this.select_row_bound(sql!(
-                INSERT INTO ssh_connections (
+                INSERT INTO remote_connections (
+                    kind,
                     host,
                     port,
-                    user
-                ) VALUES (?1, ?2, ?3)
+                    user,
+                    distro
+                ) VALUES (?1, ?2, ?3, ?4, ?5)
                 RETURNING id
-            ))?((host, port, user))?
-            .context("failed to insert ssh project")?;
-            Ok(SshConnectionId(id))
+            ))?((kind.serialize(), host, port, user, distro))?
+            .context("failed to insert remote project")?;
+            Ok(RemoteConnectionId(id))
         }
     }
 
@@ -973,15 +1094,17 @@ impl WorkspaceDb {
         }
     }
 
-    fn recent_workspaces(&self) -> Result<Vec<(WorkspaceId, PathList, Option<u64>)>> {
+    fn recent_workspaces(
+        &self,
+    ) -> Result<Vec<(WorkspaceId, PathList, Option<RemoteConnectionId>)>> {
         Ok(self
             .recent_workspaces_query()?
             .into_iter()
-            .map(|(id, paths, order, ssh_connection_id)| {
+            .map(|(id, paths, order, remote_connection_id)| {
                 (
                     id,
                     PathList::deserialize(&SerializedPathList { paths, order }),
-                    ssh_connection_id,
+                    remote_connection_id.map(RemoteConnectionId),
                 )
             })
             .collect())
@@ -989,11 +1112,11 @@ impl WorkspaceDb {
 
     query! {
         fn recent_workspaces_query() -> Result<Vec<(WorkspaceId, String, String, Option<u64>)>> {
-            SELECT workspace_id, paths, paths_order, ssh_connection_id
+            SELECT workspace_id, paths, paths_order, remote_connection_id
             FROM workspaces
             WHERE
                 paths IS NOT NULL OR
-                ssh_connection_id IS NOT NULL
+                remote_connection_id IS NOT NULL
             ORDER BY timestamp DESC
         }
     }
@@ -1001,15 +1124,15 @@ impl WorkspaceDb {
     fn session_workspaces(
         &self,
         session_id: String,
-    ) -> Result<Vec<(PathList, Option<u64>, Option<SshConnectionId>)>> {
+    ) -> Result<Vec<(PathList, Option<u64>, Option<RemoteConnectionId>)>> {
         Ok(self
             .session_workspaces_query(session_id)?
             .into_iter()
-            .map(|(paths, order, window_id, ssh_connection_id)| {
+            .map(|(paths, order, window_id, remote_connection_id)| {
                 (
                     PathList::deserialize(&SerializedPathList { paths, order }),
                     window_id,
-                    ssh_connection_id.map(SshConnectionId),
+                    remote_connection_id.map(RemoteConnectionId),
                 )
             })
             .collect())
@@ -1017,7 +1140,7 @@ impl WorkspaceDb {
 
     query! {
         fn session_workspaces_query(session_id: String) -> Result<Vec<(String, String, Option<u64>, Option<u64>)>> {
-            SELECT paths, paths_order, window_id, ssh_connection_id
+            SELECT paths, paths_order, window_id, remote_connection_id
             FROM workspaces
             WHERE session_id = ?1
             ORDER BY timestamp DESC
@@ -1039,40 +1162,55 @@ impl WorkspaceDb {
         }
     }
 
-    fn ssh_connections(&self) -> Result<HashMap<SshConnectionId, SerializedSshConnection>> {
-        Ok(self
-            .ssh_connections_query()?
-            .into_iter()
-            .map(|(id, host, port, user)| {
-                (
-                    SshConnectionId(id),
-                    SerializedSshConnection { host, port, user },
-                )
-            })
-            .collect())
-    }
-
-    query! {
-        pub fn ssh_connections_query() -> Result<Vec<(u64, String, Option<u16>, Option<String>)>> {
-            SELECT id, host, port, user
-            FROM ssh_connections
-        }
-    }
-
-    pub(crate) fn ssh_connection(&self, id: SshConnectionId) -> Result<SerializedSshConnection> {
-        let row = self.ssh_connection_query(id.0)?;
-        Ok(SerializedSshConnection {
-            host: row.0,
-            port: row.1,
-            user: row.2,
+    fn remote_connections(&self) -> Result<HashMap<RemoteConnectionId, RemoteConnectionOptions>> {
+        Ok(self.select(sql!(
+            SELECT
+                id, kind, host, port, user, distro
+            FROM
+                remote_connections
+        ))?()?
+        .into_iter()
+        .filter_map(|(id, kind, host, port, user, distro)| {
+            Some((
+                RemoteConnectionId(id),
+                Self::remote_connection_from_row(kind, host, port, user, distro)?,
+            ))
         })
+        .collect())
     }
 
-    query! {
-        fn ssh_connection_query(id: u64) -> Result<(String, Option<u16>, Option<String>)> {
-            SELECT host, port, user
-            FROM ssh_connections
+    pub(crate) fn remote_connection(
+        &self,
+        id: RemoteConnectionId,
+    ) -> Result<RemoteConnectionOptions> {
+        let (kind, host, port, user, distro) = self.select_row_bound(sql!(
+            SELECT kind, host, port, user, distro
+            FROM remote_connections
             WHERE id = ?
+        ))?(id.0)?
+        .context("no such remote connection")?;
+        Self::remote_connection_from_row(kind, host, port, user, distro)
+            .context("invalid remote_connection row")
+    }
+
+    fn remote_connection_from_row(
+        kind: String,
+        host: Option<String>,
+        port: Option<u16>,
+        user: Option<String>,
+        distro: Option<String>,
+    ) -> Option<RemoteConnectionOptions> {
+        match RemoteConnectionKind::deserialize(&kind)? {
+            RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions {
+                distro_name: distro?,
+                user: user,
+            })),
+            RemoteConnectionKind::Ssh => Some(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host?,
+                port,
+                username: user,
+                ..Default::default()
+            })),
         }
     }
 
@@ -1108,14 +1246,14 @@ impl WorkspaceDb {
     ) -> Result<Vec<(WorkspaceId, SerializedWorkspaceLocation, PathList)>> {
         let mut result = Vec::new();
         let mut delete_tasks = Vec::new();
-        let ssh_connections = self.ssh_connections()?;
+        let remote_connections = self.remote_connections()?;
 
-        for (id, paths, ssh_connection_id) in self.recent_workspaces()? {
-            if let Some(ssh_connection_id) = ssh_connection_id.map(SshConnectionId) {
-                if let Some(ssh_connection) = ssh_connections.get(&ssh_connection_id) {
+        for (id, paths, remote_connection_id) in self.recent_workspaces()? {
+            if let Some(remote_connection_id) = remote_connection_id {
+                if let Some(connection_options) = remote_connections.get(&remote_connection_id) {
                     result.push((
                         id,
-                        SerializedWorkspaceLocation::Ssh(ssh_connection.clone()),
+                        SerializedWorkspaceLocation::Remote(connection_options.clone()),
                         paths,
                     ));
                 } else {
@@ -1157,12 +1295,14 @@ impl WorkspaceDb {
     ) -> Result<Vec<(SerializedWorkspaceLocation, PathList)>> {
         let mut workspaces = Vec::new();
 
-        for (paths, window_id, ssh_connection_id) in
+        for (paths, window_id, remote_connection_id) in
             self.session_workspaces(last_session_id.to_owned())?
         {
-            if let Some(ssh_connection_id) = ssh_connection_id {
+            if let Some(remote_connection_id) = remote_connection_id {
                 workspaces.push((
-                    SerializedWorkspaceLocation::Ssh(self.ssh_connection(ssh_connection_id)?),
+                    SerializedWorkspaceLocation::Remote(
+                        self.remote_connection(remote_connection_id)?,
+                    ),
                     paths,
                     window_id.map(WindowId::from),
                 ));
@@ -1545,6 +1685,7 @@ mod tests {
     };
     use gpui;
     use pretty_assertions::assert_eq;
+    use remote::SshConnectionOptions;
     use std::{thread, time::Duration};
 
     #[gpui::test]
@@ -2196,14 +2337,20 @@ mod tests {
         };
 
         let connection_id = db
-            .get_or_create_ssh_connection("my-host".to_string(), Some(1234), None)
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: "my-host".to_string(),
+                port: Some(1234),
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
         let workspace_5 = SerializedWorkspace {
             id: WorkspaceId(5),
             paths: PathList::default(),
-            location: SerializedWorkspaceLocation::Ssh(db.ssh_connection(connection_id).unwrap()),
+            location: SerializedWorkspaceLocation::Remote(
+                db.remote_connection(connection_id).unwrap(),
+            ),
             center_group: Default::default(),
             window_bounds: Default::default(),
             display: Default::default(),
@@ -2362,13 +2509,12 @@ mod tests {
     }
 
     #[gpui::test]
-    async fn test_last_session_workspace_locations_ssh_projects() {
-        let db = WorkspaceDb::open_test_db(
-            "test_serializing_workspaces_last_session_workspaces_ssh_projects",
-        )
-        .await;
+    async fn test_last_session_workspace_locations_remote() {
+        let db =
+            WorkspaceDb::open_test_db("test_serializing_workspaces_last_session_workspaces_remote")
+                .await;
 
-        let ssh_connections = [
+        let remote_connections = [
             ("host-1", "my-user-1"),
             ("host-2", "my-user-2"),
             ("host-3", "my-user-3"),
@@ -2376,30 +2522,31 @@ mod tests {
         ]
         .into_iter()
         .map(|(host, user)| async {
-            db.get_or_create_ssh_connection(host.to_string(), None, Some(user.to_string()))
+            let options = RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host.to_string(),
+                username: Some(user.to_string()),
+                ..Default::default()
+            });
+            db.get_or_create_remote_connection(options.clone())
                 .await
                 .unwrap();
-            SerializedSshConnection {
-                host: host.into(),
-                port: None,
-                user: Some(user.into()),
-            }
+            options
         })
         .collect::<Vec<_>>();
 
-        let ssh_connections = futures::future::join_all(ssh_connections).await;
+        let remote_connections = futures::future::join_all(remote_connections).await;
 
         let workspaces = [
-            (1, ssh_connections[0].clone(), 9),
-            (2, ssh_connections[1].clone(), 5),
-            (3, ssh_connections[2].clone(), 8),
-            (4, ssh_connections[3].clone(), 2),
+            (1, remote_connections[0].clone(), 9),
+            (2, remote_connections[1].clone(), 5),
+            (3, remote_connections[2].clone(), 8),
+            (4, remote_connections[3].clone(), 2),
         ]
         .into_iter()
-        .map(|(id, ssh_connection, window_id)| SerializedWorkspace {
+        .map(|(id, remote_connection, window_id)| SerializedWorkspace {
             id: WorkspaceId(id),
             paths: PathList::default(),
-            location: SerializedWorkspaceLocation::Ssh(ssh_connection),
+            location: SerializedWorkspaceLocation::Remote(remote_connection),
             center_group: Default::default(),
             window_bounds: Default::default(),
             display: Default::default(),
@@ -2429,28 +2576,28 @@ mod tests {
         assert_eq!(
             have[0],
             (
-                SerializedWorkspaceLocation::Ssh(ssh_connections[3].clone()),
+                SerializedWorkspaceLocation::Remote(remote_connections[3].clone()),
                 PathList::default()
             )
         );
         assert_eq!(
             have[1],
             (
-                SerializedWorkspaceLocation::Ssh(ssh_connections[2].clone()),
+                SerializedWorkspaceLocation::Remote(remote_connections[2].clone()),
                 PathList::default()
             )
         );
         assert_eq!(
             have[2],
             (
-                SerializedWorkspaceLocation::Ssh(ssh_connections[1].clone()),
+                SerializedWorkspaceLocation::Remote(remote_connections[1].clone()),
                 PathList::default()
             )
         );
         assert_eq!(
             have[3],
             (
-                SerializedWorkspaceLocation::Ssh(ssh_connections[0].clone()),
+                SerializedWorkspaceLocation::Remote(remote_connections[0].clone()),
                 PathList::default()
             )
         );
@@ -2465,13 +2612,23 @@ mod tests {
         let user = Some("user".to_string());
 
         let connection_id = db
-            .get_or_create_ssh_connection(host.clone(), port, user.clone())
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host.clone(),
+                port,
+                username: user.clone(),
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
         // Test that calling the function again with the same parameters returns the same project
         let same_connection = db
-            .get_or_create_ssh_connection(host.clone(), port, user.clone())
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host.clone(),
+                port,
+                username: user.clone(),
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
@@ -2483,7 +2640,12 @@ mod tests {
         let user2 = Some("otheruser".to_string());
 
         let different_connection = db
-            .get_or_create_ssh_connection(host2.clone(), port2, user2.clone())
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host2.clone(),
+                port: port2,
+                username: user2.clone(),
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
@@ -2497,12 +2659,22 @@ mod tests {
         let (host, port, user) = ("example.com".to_string(), None, None);
 
         let connection_id = db
-            .get_or_create_ssh_connection(host.clone(), port, None)
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host.clone(),
+                port,
+                username: None,
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
         let same_connection_id = db
-            .get_or_create_ssh_connection(host.clone(), port, user.clone())
+            .get_or_create_remote_connection(RemoteConnectionOptions::Ssh(SshConnectionOptions {
+                host: host.clone(),
+                port,
+                username: user.clone(),
+                ..Default::default()
+            }))
             .await
             .unwrap();
 
@@ -2510,8 +2682,8 @@ mod tests {
     }
 
     #[gpui::test]
-    async fn test_get_ssh_connections() {
-        let db = WorkspaceDb::open_test_db("test_get_ssh_connections").await;
+    async fn test_get_remote_connections() {
+        let db = WorkspaceDb::open_test_db("test_get_remote_connections").await;
 
         let connections = [
             ("example.com".to_string(), None, None),
@@ -2526,39 +2698,49 @@ mod tests {
         let mut ids = Vec::new();
         for (host, port, user) in connections.iter() {
             ids.push(
-                db.get_or_create_ssh_connection(host.clone(), *port, user.clone())
-                    .await
-                    .unwrap(),
+                db.get_or_create_remote_connection(RemoteConnectionOptions::Ssh(
+                    SshConnectionOptions {
+                        host: host.clone(),
+                        port: *port,
+                        username: user.clone(),
+                        ..Default::default()
+                    },
+                ))
+                .await
+                .unwrap(),
             );
         }
 
-        let stored_projects = db.ssh_connections().unwrap();
+        let stored_connections = db.remote_connections().unwrap();
         assert_eq!(
-            stored_projects,
+            stored_connections,
             [
                 (
                     ids[0],
-                    SerializedSshConnection {
+                    RemoteConnectionOptions::Ssh(SshConnectionOptions {
                         host: "example.com".into(),
                         port: None,
-                        user: None,
-                    }
+                        username: None,
+                        ..Default::default()
+                    }),
                 ),
                 (
                     ids[1],
-                    SerializedSshConnection {
+                    RemoteConnectionOptions::Ssh(SshConnectionOptions {
                         host: "anotherexample.com".into(),
                         port: Some(123),
-                        user: Some("user2".into()),
-                    }
+                        username: Some("user2".into()),
+                        ..Default::default()
+                    }),
                 ),
                 (
                     ids[2],
-                    SerializedSshConnection {
+                    RemoteConnectionOptions::Ssh(SshConnectionOptions {
                         host: "yetanother.com".into(),
                         port: Some(345),
-                        user: None,
-                    }
+                        username: None,
+                        ..Default::default()
+                    }),
                 ),
             ]
             .into_iter()

crates/workspace/src/persistence/model.rs 🔗

@@ -12,7 +12,7 @@ use db::sqlez::{
 use gpui::{AsyncWindowContext, Entity, WeakEntity};
 
 use project::{Project, debugger::breakpoint_store::SourceBreakpoint};
-use serde::{Deserialize, Serialize};
+use remote::RemoteConnectionOptions;
 use std::{
     collections::BTreeMap,
     path::{Path, PathBuf},
@@ -24,19 +24,18 @@ use uuid::Uuid;
 #[derive(
     Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
 )]
-pub(crate) struct SshConnectionId(pub u64);
+pub(crate) struct RemoteConnectionId(pub u64);
 
-#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
-pub struct SerializedSshConnection {
-    pub host: String,
-    pub port: Option<u16>,
-    pub user: Option<String>,
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub(crate) enum RemoteConnectionKind {
+    Ssh,
+    Wsl,
 }
 
 #[derive(Debug, PartialEq, Clone)]
 pub enum SerializedWorkspaceLocation {
     Local,
-    Ssh(SerializedSshConnection),
+    Remote(RemoteConnectionOptions),
 }
 
 impl SerializedWorkspaceLocation {
@@ -68,6 +67,23 @@ pub struct DockStructure {
     pub(crate) bottom: DockData,
 }
 
+impl RemoteConnectionKind {
+    pub(crate) fn serialize(&self) -> &'static str {
+        match self {
+            RemoteConnectionKind::Ssh => "ssh",
+            RemoteConnectionKind::Wsl => "wsl",
+        }
+    }
+
+    pub(crate) fn deserialize(text: &str) -> Option<Self> {
+        match text {
+            "ssh" => Some(Self::Ssh),
+            "wsl" => Some(Self::Wsl),
+            _ => None,
+        }
+    }
+}
+
 impl Column for DockStructure {
     fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
         let (left, next_index) = DockData::column(statement, start_index)?;

crates/workspace/src/workspace.rs 🔗

@@ -67,14 +67,14 @@ pub use pane_group::*;
 use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace};
 pub use persistence::{
     DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items,
-    model::{ItemId, SerializedSshConnection, SerializedWorkspaceLocation},
+    model::{ItemId, SerializedWorkspaceLocation},
 };
 use postage::stream::Stream;
 use project::{
     DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId,
     debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus},
 };
-use remote::{RemoteClientDelegate, SshConnectionOptions, remote_client::ConnectionIdentifier};
+use remote::{RemoteClientDelegate, RemoteConnectionOptions, remote_client::ConnectionIdentifier};
 use schemars::JsonSchema;
 use serde::Deserialize;
 use session::AppSession;
@@ -648,23 +648,30 @@ impl ProjectItemRegistry {
                             ) as Box<_>;
                             Ok((project_entry_id, build_workspace_item))
                         }
-                        Err(e) => match entry_abs_path.as_deref().filter(|_| is_file) {
-                            Some(abs_path) => match cx.update(|window, cx| {
-                                T::for_broken_project_item(abs_path, is_local, &e, window, cx)
-                            })? {
-                                Some(broken_project_item_view) => {
-                                    let build_workspace_item = Box::new(
-                                    move |_: &mut Pane, _: &mut Window, cx: &mut Context<Pane>| {
-                                        cx.new(|_| broken_project_item_view).boxed_clone()
-                                    },
-                                )
-                                    as Box<_>;
-                                    Ok((None, build_workspace_item))
+                        Err(e) => {
+                            if e.error_code() == ErrorCode::Internal {
+                                if let Some(abs_path) =
+                                    entry_abs_path.as_deref().filter(|_| is_file)
+                                {
+                                    if let Some(broken_project_item_view) =
+                                        cx.update(|window, cx| {
+                                            T::for_broken_project_item(
+                                                abs_path, is_local, &e, window, cx,
+                                            )
+                                        })?
+                                    {
+                                        let build_workspace_item = Box::new(
+                                            move |_: &mut Pane, _: &mut Window, cx: &mut Context<Pane>| {
+                                                cx.new(|_| broken_project_item_view).boxed_clone()
+                                            },
+                                        )
+                                        as Box<_>;
+                                        return Ok((None, build_workspace_item));
+                                    }
                                 }
-                                None => Err(e)?,
-                            },
-                            None => Err(e)?,
-                        },
+                            }
+                            Err(e)
+                        }
                     }
                 }))
             });
@@ -5255,14 +5262,7 @@ impl Workspace {
     fn serialize_workspace_location(&self, cx: &App) -> WorkspaceLocation {
         let paths = PathList::new(&self.root_paths(cx));
         if let Some(connection) = self.project.read(cx).remote_connection_options(cx) {
-            WorkspaceLocation::Location(
-                SerializedWorkspaceLocation::Ssh(SerializedSshConnection {
-                    host: connection.host,
-                    port: connection.port,
-                    user: connection.username,
-                }),
-                paths,
-            )
+            WorkspaceLocation::Location(SerializedWorkspaceLocation::Remote(connection), paths)
         } else if self.project.read(cx).is_local() {
             if !paths.is_empty() {
                 WorkspaceLocation::Location(SerializedWorkspaceLocation::Local, paths)
@@ -7275,9 +7275,9 @@ pub fn create_and_open_local_file(
     })
 }
 
-pub fn open_ssh_project_with_new_connection(
+pub fn open_remote_project_with_new_connection(
     window: WindowHandle<Workspace>,
-    connection_options: SshConnectionOptions,
+    connection_options: RemoteConnectionOptions,
     cancel_rx: oneshot::Receiver<()>,
     delegate: Arc<dyn RemoteClientDelegate>,
     app_state: Arc<AppState>,
@@ -7286,11 +7286,11 @@ pub fn open_ssh_project_with_new_connection(
 ) -> Task<Result<()>> {
     cx.spawn(async move |cx| {
         let (workspace_id, serialized_workspace) =
-            serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
+            serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?;
 
         let session = match cx
             .update(|cx| {
-                remote::RemoteClient::ssh(
+                remote::RemoteClient::new(
                     ConnectionIdentifier::Workspace(workspace_id.0),
                     connection_options,
                     cancel_rx,
@@ -7316,7 +7316,7 @@ pub fn open_ssh_project_with_new_connection(
             )
         })?;
 
-        open_ssh_project_inner(
+        open_remote_project_inner(
             project,
             paths,
             workspace_id,
@@ -7329,8 +7329,8 @@ pub fn open_ssh_project_with_new_connection(
     })
 }
 
-pub fn open_ssh_project_with_existing_connection(
-    connection_options: SshConnectionOptions,
+pub fn open_remote_project_with_existing_connection(
+    connection_options: RemoteConnectionOptions,
     project: Entity<Project>,
     paths: Vec<PathBuf>,
     app_state: Arc<AppState>,
@@ -7339,9 +7339,9 @@ pub fn open_ssh_project_with_existing_connection(
 ) -> Task<Result<()>> {
     cx.spawn(async move |cx| {
         let (workspace_id, serialized_workspace) =
-            serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?;
+            serialize_remote_project(connection_options.clone(), paths.clone(), cx).await?;
 
-        open_ssh_project_inner(
+        open_remote_project_inner(
             project,
             paths,
             workspace_id,
@@ -7354,7 +7354,7 @@ pub fn open_ssh_project_with_existing_connection(
     })
 }
 
-async fn open_ssh_project_inner(
+async fn open_remote_project_inner(
     project: Entity<Project>,
     paths: Vec<PathBuf>,
     workspace_id: WorkspaceId,
@@ -7441,22 +7441,18 @@ async fn open_ssh_project_inner(
     Ok(())
 }
 
-fn serialize_ssh_project(
-    connection_options: SshConnectionOptions,
+fn serialize_remote_project(
+    connection_options: RemoteConnectionOptions,
     paths: Vec<PathBuf>,
     cx: &AsyncApp,
 ) -> Task<Result<(WorkspaceId, Option<SerializedWorkspace>)>> {
     cx.background_spawn(async move {
-        let ssh_connection_id = persistence::DB
-            .get_or_create_ssh_connection(
-                connection_options.host.clone(),
-                connection_options.port,
-                connection_options.username.clone(),
-            )
+        let remote_connection_id = persistence::DB
+            .get_or_create_remote_connection(connection_options)
             .await?;
 
         let serialized_workspace =
-            persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id);
+            persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
 
         let workspace_id = if let Some(workspace_id) =
             serialized_workspace.as_ref().map(|workspace| workspace.id)
@@ -8006,22 +8002,20 @@ pub struct WorkspacePosition {
     pub centered_layout: bool,
 }
 
-pub fn ssh_workspace_position_from_db(
-    host: String,
-    port: Option<u16>,
-    user: Option<String>,
+pub fn remote_workspace_position_from_db(
+    connection_options: RemoteConnectionOptions,
     paths_to_open: &[PathBuf],
     cx: &App,
 ) -> Task<Result<WorkspacePosition>> {
     let paths = paths_to_open.to_vec();
 
     cx.background_spawn(async move {
-        let ssh_connection_id = persistence::DB
-            .get_or_create_ssh_connection(host, port, user)
+        let remote_connection_id = persistence::DB
+            .get_or_create_remote_connection(connection_options)
             .await
             .context("fetching serialized ssh project")?;
         let serialized_workspace =
-            persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id);
+            persistence::DB.remote_workspace_for_roots(&paths, remote_connection_id);
 
         let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() {
             (Some(WindowBounds::Windowed(bounds)), None)

crates/workspace/src/workspace_settings.rs 🔗

@@ -6,9 +6,9 @@ use collections::HashMap;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct WorkspaceSettings {
     pub active_pane_modifiers: ActivePanelModifiers,
     pub bottom_dock_layout: BottomDockLayout,
@@ -216,7 +216,7 @@ pub struct WorkspaceSettingsContent {
     pub zoomed_padding: Option<bool>,
 }
 
-#[derive(Deserialize)]
+#[derive(Deserialize, SettingsUi)]
 pub struct TabBarSettings {
     pub show: bool,
     pub show_nav_history_buttons: bool,

crates/worktree/src/worktree_settings.rs 🔗

@@ -4,10 +4,10 @@ use anyhow::Context as _;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources};
+use settings::{Settings, SettingsSources, SettingsUi};
 use util::paths::PathMatcher;
 
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Clone, PartialEq, Eq, SettingsUi)]
 pub struct WorktreeSettings {
     pub file_scan_inclusions: PathMatcher,
     pub file_scan_exclusions: PathMatcher,

crates/zed/Cargo.toml 🔗

@@ -131,6 +131,7 @@ serde_json.workspace = true
 session.workspace = true
 settings.workspace = true
 settings_ui.workspace = true
+keymap_editor.workspace = true
 shellexpand.workspace = true
 smol.workspace = true
 snippet_provider.workspace = true

crates/zed/resources/windows/zed-wsl 🔗

@@ -0,0 +1,25 @@
+#!/usr/bin/env sh
+
+if [ "$ZED_WSL_DEBUG_INFO" = true ]; then
+	set -x
+fi
+
+ZED_PATH="$(dirname "$(realpath "$0")")"
+
+IN_WSL=false
+if [ -n "$WSL_DISTRO_NAME" ]; then
+	# $WSL_DISTRO_NAME is available since WSL builds 18362, also for WSL2
+	IN_WSL=true
+fi
+
+if [ $IN_WSL = true ]; then
+    WSL_USER="$USER"
+    if [ -z "$WSL_USER" ]; then
+        WSL_USER="$USERNAME"
+    fi
+    "$ZED_PATH/zed.exe" --wsl "$WSL_USER@$WSL_DISTRO_NAME" "$@"
+    exit $?
+else
+    echo "Only WSL is supported for now" >&2
+    exit 1
+fi

crates/zed/src/main.rs 🔗

@@ -23,13 +23,14 @@ use http_client::{Url, read_proxy_from_env};
 use language::LanguageRegistry;
 use onboarding::{FIRST_OPEN, show_onboarding_view};
 use prompt_store::PromptBuilder;
+use remote::RemoteConnectionOptions;
 use reqwest_client::ReqwestClient;
 
 use assets::Assets;
 use node_runtime::{NodeBinaryOptions, NodeRuntime};
 use parking_lot::Mutex;
 use project::project_settings::ProjectSettings;
-use recent_projects::{SshSettings, open_ssh_project};
+use recent_projects::{SshSettings, open_remote_project};
 use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
 use session::{AppSession, Session};
 use settings::{BaseKeymap, Settings, SettingsStore, watch_config_file};
@@ -360,6 +361,7 @@ pub fn main() {
             open_listener.open(RawOpenRequest {
                 urls,
                 diff_paths: Vec::new(),
+                ..Default::default()
             })
         }
     });
@@ -632,6 +634,7 @@ pub fn main() {
         svg_preview::init(cx);
         onboarding::init(cx);
         settings_ui::init(cx);
+        keymap_editor::init(cx);
         extensions_ui::init(cx);
         zeta::init(cx);
         inspector_ui::init(app_state.clone(), cx);
@@ -695,7 +698,7 @@ pub fn main() {
         let urls: Vec<_> = args
             .paths_or_urls
             .iter()
-            .filter_map(|arg| parse_url_arg(arg, cx).log_err())
+            .map(|arg| parse_url_arg(arg, cx))
             .collect();
 
         let diff_paths: Vec<[String; 2]> = args
@@ -705,7 +708,11 @@ pub fn main() {
             .collect();
 
         if !urls.is_empty() || !diff_paths.is_empty() {
-            open_listener.open(RawOpenRequest { urls, diff_paths })
+            open_listener.open(RawOpenRequest {
+                urls,
+                diff_paths,
+                wsl: args.wsl,
+            })
         }
 
         match open_rx
@@ -791,10 +798,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
         return;
     }
 
-    if let Some(connection_options) = request.ssh_connection {
+    if let Some(connection_options) = request.remote_connection {
         cx.spawn(async move |cx| {
             let paths: Vec<PathBuf> = request.open_paths.into_iter().map(PathBuf::from).collect();
-            open_ssh_project(
+            open_remote_project(
                 connection_options,
                 paths,
                 app_state,
@@ -948,7 +955,7 @@ async fn installation_id() -> Result<IdType> {
 async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp) -> Result<()> {
     if let Some(locations) = restorable_workspace_locations(cx, &app_state).await {
         let use_system_window_tabs = cx
-            .update(|cx| WorkspaceSettings::get(None, cx).use_system_window_tabs)
+            .update(|cx| WorkspaceSettings::get_global(cx).use_system_window_tabs)
             .unwrap_or(false);
         let mut results: Vec<Result<(), Error>> = Vec::new();
         let mut tasks = Vec::new();
@@ -977,31 +984,24 @@ async fn restore_or_create_workspace(app_state: Arc<AppState>, cx: &mut AsyncApp
                         tasks.push(task);
                     }
                 }
-                SerializedWorkspaceLocation::Ssh(ssh) => {
+                SerializedWorkspaceLocation::Remote(mut connection_options) => {
                     let app_state = app_state.clone();
-                    let ssh_host = ssh.host.clone();
-                    let task = cx.spawn(async move |cx| {
-                        let connection_options = cx.update(|cx| {
+                    if let RemoteConnectionOptions::Ssh(options) = &mut connection_options {
+                        cx.update(|cx| {
                             SshSettings::get_global(cx)
-                                .connection_options_for(ssh.host, ssh.port, ssh.user)
-                        });
-
-                        match connection_options {
-                            Ok(connection_options) => recent_projects::open_ssh_project(
-                                connection_options,
-                                paths.paths().into_iter().map(PathBuf::from).collect(),
-                                app_state,
-                                workspace::OpenOptions::default(),
-                                cx,
-                            )
-                            .await
-                            .map_err(|e| anyhow::anyhow!(e)),
-                            Err(e) => Err(anyhow::anyhow!(
-                                "Failed to get SSH connection options for {}: {}",
-                                ssh_host,
-                                e
-                            )),
-                        }
+                                .fill_connection_options_from_settings(options)
+                        })?;
+                    }
+                    let task = cx.spawn(async move |cx| {
+                        recent_projects::open_remote_project(
+                            connection_options,
+                            paths.paths().into_iter().map(PathBuf::from).collect(),
+                            app_state,
+                            workspace::OpenOptions::default(),
+                            cx,
+                        )
+                        .await
+                        .map_err(|e| anyhow::anyhow!(e))
                     });
                     tasks.push(task);
                 }
@@ -1183,6 +1183,18 @@ struct Args {
     #[arg(long, value_name = "DIR")]
     user_data_dir: Option<String>,
 
+    /// The username and WSL distribution to use when opening paths. If not specified,
+    /// Zed will attempt to open the paths directly.
+    ///
+    /// The username is optional, and if not specified, the default user for the distribution
+    /// will be used.
+    ///
+    /// Example: `me@Ubuntu` or `Ubuntu`.
+    ///
+    /// WARN: You should not fill in this field by hand.
+    #[arg(long, value_name = "USER@DISTRO")]
+    wsl: Option<String>,
+
     /// Instructs zed to run as a dev server on this machine. (not implemented)
     #[arg(long)]
     dev_server_token: Option<String>,
@@ -1241,18 +1253,18 @@ impl ToString for IdType {
     }
 }
 
-fn parse_url_arg(arg: &str, cx: &App) -> Result<String> {
+fn parse_url_arg(arg: &str, cx: &App) -> String {
     match std::fs::canonicalize(Path::new(&arg)) {
-        Ok(path) => Ok(format!("file://{}", path.display())),
-        Err(error) => {
+        Ok(path) => format!("file://{}", path.display()),
+        Err(_) => {
             if arg.starts_with("file://")
                 || arg.starts_with("zed-cli://")
                 || arg.starts_with("ssh://")
                 || parse_zed_link(arg, cx).is_some()
             {
-                Ok(arg.into())
+                arg.into()
             } else {
-                anyhow::bail!("error parsing path argument: {error}")
+                format!("file://{arg}")
             }
         }
     }

crates/zed/src/zed.rs 🔗

@@ -48,7 +48,7 @@ use project::{DirectoryLister, ProjectItem};
 use project_panel::ProjectPanel;
 use prompt_store::PromptBuilder;
 use quick_action_bar::QuickActionBar;
-use recent_projects::open_ssh_project;
+use recent_projects::open_remote_project;
 use release_channel::{AppCommitSha, ReleaseChannel};
 use rope::Rope;
 use search::project_search::ProjectSearchBar;
@@ -1491,7 +1491,7 @@ fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec<KeyBinding>) {
         workspace::NewWindow,
     )]);
     // todo: nicer api here?
-    settings_ui::keybindings::KeymapEventChannel::trigger_keymap_changed(cx);
+    keymap_editor::KeymapEventChannel::trigger_keymap_changed(cx);
 }
 
 pub fn load_default_keymap(cx: &mut App) {
@@ -1557,7 +1557,7 @@ pub fn open_new_ssh_project_from_project(
     };
     let connection_options = ssh_client.read(cx).connection_options();
     cx.spawn_in(window, async move |_, cx| {
-        open_ssh_project(
+        open_remote_project(
             connection_options,
             paths,
             app_state,

crates/zed/src/zed/app_menus.rs 🔗

@@ -1,6 +1,5 @@
 use collab_ui::collab_panel;
 use gpui::{Menu, MenuItem, OsAction};
-use settings_ui::keybindings;
 use terminal_view::terminal_panel;
 
 pub fn app_menus() -> Vec<Menu> {
@@ -17,7 +16,7 @@ pub fn app_menus() -> Vec<Menu> {
                     name: "Settings".into(),
                     items: vec![
                         MenuItem::action("Open Settings", super::OpenSettings),
-                        MenuItem::action("Open Key Bindings", keybindings::OpenKeymapEditor),
+                        MenuItem::action("Open Key Bindings", keymap_editor::OpenKeymapEditor),
                         MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
                         MenuItem::action(
                             "Open Default Key Bindings",

crates/zed/src/zed/open_listener.rs 🔗

@@ -17,8 +17,8 @@ use gpui::{App, AsyncApp, Global, WindowHandle};
 use language::Point;
 use onboarding::FIRST_OPEN;
 use onboarding::show_onboarding_view;
-use recent_projects::{SshSettings, open_ssh_project};
-use remote::SshConnectionOptions;
+use recent_projects::{SshSettings, open_remote_project};
+use remote::{RemoteConnectionOptions, WslConnectionOptions};
 use settings::Settings;
 use std::path::{Path, PathBuf};
 use std::sync::Arc;
@@ -37,7 +37,7 @@ pub struct OpenRequest {
     pub diff_paths: Vec<[String; 2]>,
     pub open_channel_notes: Vec<(u64, Option<String>)>,
     pub join_channel: Option<u64>,
-    pub ssh_connection: Option<SshConnectionOptions>,
+    pub remote_connection: Option<RemoteConnectionOptions>,
 }
 
 #[derive(Debug)]
@@ -51,6 +51,23 @@ pub enum OpenRequestKind {
 impl OpenRequest {
     pub fn parse(request: RawOpenRequest, cx: &App) -> Result<Self> {
         let mut this = Self::default();
+
+        this.diff_paths = request.diff_paths;
+        if let Some(wsl) = request.wsl {
+            let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') {
+                if user.is_empty() {
+                    anyhow::bail!("user is empty in wsl argument");
+                }
+                (Some(user.to_string()), distro.to_string())
+            } else {
+                (None, wsl)
+            };
+            this.remote_connection = Some(RemoteConnectionOptions::Wsl(WslConnectionOptions {
+                distro_name,
+                user,
+            }));
+        }
+
         for url in request.urls {
             if let Some(server_name) = url.strip_prefix("zed-cli://") {
                 this.kind = Some(OpenRequestKind::CliConnection(connect_to_cli(server_name)?));
@@ -80,8 +97,6 @@ impl OpenRequest {
             }
         }
 
-        this.diff_paths = request.diff_paths;
-
         Ok(this)
     }
 
@@ -108,13 +123,15 @@ impl OpenRequest {
         if let Some(password) = url.password() {
             connection_options.password = Some(password.to_string());
         }
-        if let Some(ssh_connection) = &self.ssh_connection {
+
+        let connection_options = RemoteConnectionOptions::Ssh(connection_options);
+        if let Some(ssh_connection) = &self.remote_connection {
             anyhow::ensure!(
                 *ssh_connection == connection_options,
-                "cannot open multiple ssh connections"
+                "cannot open multiple different remote connections"
             );
         }
-        self.ssh_connection = Some(connection_options);
+        self.remote_connection = Some(connection_options);
         self.parse_file_path(url.path());
         Ok(())
     }
@@ -152,6 +169,7 @@ pub struct OpenListener(UnboundedSender<RawOpenRequest>);
 pub struct RawOpenRequest {
     pub urls: Vec<String>,
     pub diff_paths: Vec<[String; 2]>,
+    pub wsl: Option<String>,
 }
 
 impl Global for OpenListener {}
@@ -303,13 +321,21 @@ pub async fn handle_cli_connection(
                 paths,
                 diff_paths,
                 wait,
+                wsl,
                 open_new_workspace,
                 env,
                 user_data_dir: _,
             } => {
                 if !urls.is_empty() {
                     cx.update(|cx| {
-                        match OpenRequest::parse(RawOpenRequest { urls, diff_paths }, cx) {
+                        match OpenRequest::parse(
+                            RawOpenRequest {
+                                urls,
+                                diff_paths,
+                                wsl,
+                            },
+                            cx,
+                        ) {
                             Ok(open_request) => {
                                 handle_open_request(open_request, app_state.clone(), cx);
                                 responses.send(CliResponse::Exit { status: 0 }).log_err();
@@ -422,30 +448,26 @@ async fn open_workspaces(
                         errored = true
                     }
                 }
-                SerializedWorkspaceLocation::Ssh(ssh) => {
+                SerializedWorkspaceLocation::Remote(mut connection) => {
                     let app_state = app_state.clone();
-                    let connection_options = cx.update(|cx| {
-                        SshSettings::get_global(cx)
-                            .connection_options_for(ssh.host, ssh.port, ssh.user)
-                    });
-                    if let Ok(connection_options) = connection_options {
-                        cx.spawn(async move |cx| {
-                            open_ssh_project(
-                                connection_options,
-                                workspace_paths.paths().to_vec(),
-                                app_state,
-                                OpenOptions::default(),
-                                cx,
-                            )
-                            .await
-                            .log_err();
-                        })
-                        .detach();
-                        // We don't set `errored` here if `open_ssh_project` fails, because for ssh projects, the
-                        // error is displayed in the window.
-                    } else {
-                        errored = false;
+                    if let RemoteConnectionOptions::Ssh(options) = &mut connection {
+                        cx.update(|cx| {
+                            SshSettings::get_global(cx)
+                                .fill_connection_options_from_settings(options)
+                        })?;
                     }
+                    cx.spawn(async move |cx| {
+                        open_remote_project(
+                            connection,
+                            workspace_paths.paths().to_vec(),
+                            app_state,
+                            OpenOptions::default(),
+                            cx,
+                        )
+                        .await
+                        .log_err();
+                    })
+                    .detach();
                 }
             }
         }
@@ -587,6 +609,7 @@ mod tests {
     };
     use editor::Editor;
     use gpui::TestAppContext;
+    use remote::SshConnectionOptions;
     use serde_json::json;
     use std::sync::Arc;
     use util::path;
@@ -609,8 +632,8 @@ mod tests {
             .unwrap()
         });
         assert_eq!(
-            request.ssh_connection.unwrap(),
-            SshConnectionOptions {
+            request.remote_connection.unwrap(),
+            RemoteConnectionOptions::Ssh(SshConnectionOptions {
                 host: "localhost".into(),
                 username: Some("me".into()),
                 port: None,
@@ -619,7 +642,7 @@ mod tests {
                 port_forwards: None,
                 nickname: None,
                 upload_binary_over_ssh: false,
-            }
+            })
         );
         assert_eq!(request.open_paths, vec!["/"]);
     }

crates/zed/src/zed/windows_only_instance.rs 🔗

@@ -153,6 +153,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> {
             urls,
             diff_paths,
             wait: false,
+            wsl: args.wsl.clone(),
             open_new_workspace: None,
             env: None,
             user_data_dir: args.user_data_dir.clone(),

crates/zeta/Cargo.toml 🔗

@@ -35,6 +35,7 @@ futures.workspace = true
 gpui.workspace = true
 http_client.workspace = true
 indoc.workspace = true
+itertools.workspace = true
 language.workspace = true
 language_model.workspace = true
 log.workspace = true
@@ -48,6 +49,7 @@ release_channel.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 settings.workspace = true
+strum.workspace = true
 telemetry.workspace = true
 telemetry_events.workspace = true
 theme.workspace = true
@@ -78,7 +80,6 @@ settings = { workspace = true, features = ["test-support"] }
 theme = { workspace = true, features = ["test-support"] }
 tree-sitter-go.workspace = true
 tree-sitter-rust.workspace = true
-unindent.workspace = true
 workspace = { workspace = true, features = ["test-support"] }
 worktree = { workspace = true, features = ["test-support"] }
 zlog.workspace = true

crates/zeta/license_examples/0bsd.txt 🔗

@@ -0,0 +1,13 @@
+Zero-Clause BSD
+=============
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
+FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
+DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

crates/zeta/license_examples/apache-2.0-ex1.txt 🔗

@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+

crates/zeta/src/license_detection/apache.regex → crates/zeta/license_examples/apache-2.0-ex2.txt 🔗

@@ -1,109 +1,110 @@
-                                 ^Apache License
-                           Version 2\.0, January 2004
-                        http://www\.apache\.org/licenses/
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
 
    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
 
-   1\. Definitions\.
+   1. Definitions.
 
       "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document\.
+      and distribution as defined by Sections 1 through 9 of this document.
 
       "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License\.
+      the copyright owner that is granting the License.
 
       "Legal Entity" shall mean the union of the acting entity and all
       other entities that control, are controlled by, or are under common
-      control with that entity\. For the purposes of this definition,
-      "control" means \(i\) the power, direct or indirect, to cause the
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
       direction or management of such entity, whether by contract or
-      otherwise, or \(ii\) ownership of fifty percent \(50%\) or more of the
-      outstanding shares, or \(iii\) beneficial ownership of such entity\.
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
 
-      "You" \(or "Your"\) shall mean an individual or Legal Entity
-      exercising permissions granted by this License\.
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
 
       "Source" form shall mean the preferred form for making modifications,
       including but not limited to software source code, documentation
-      source, and configuration files\.
+      source, and configuration files.
 
       "Object" form shall mean any form resulting from mechanical
       transformation or translation of a Source form, including but
       not limited to compiled object code, generated documentation,
-      and conversions to other media types\.
+      and conversions to other media types.
 
       "Work" shall mean the work of authorship, whether in Source or
       Object form, made available under the License, as indicated by a
       copyright notice that is included in or attached to the work
-      \(an example is provided in the Appendix below\)\.
+      (an example is provided in the Appendix below).
 
       "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on \(or derived from\) the Work and for which the
+      form, that is based on (or derived from) the Work and for which the
       editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship\. For the purposes
+      represent, as a whole, an original work of authorship. For the purposes
       of this License, Derivative Works shall not include works that remain
-      separable from, or merely link \(or bind by name\) to the interfaces of,
-      the Work and Derivative Works thereof\.
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
 
       "Contribution" shall mean any work of authorship, including
       the original version of the Work and any modifications or additions
       to that Work or Derivative Works thereof, that is intentionally
       submitted to Licensor for inclusion in the Work by the copyright owner
       or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner\. For the purposes of this definition, "submitted"
+      the copyright owner. For the purposes of this definition, "submitted"
       means any form of electronic, verbal, or written communication sent
       to the Licensor or its representatives, including but not limited to
       communication on electronic mailing lists, source code control systems,
       and issue tracking systems that are managed by, or on behalf of, the
       Licensor for the purpose of discussing and improving the Work, but
       excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution\."
+      designated in writing by the copyright owner as "Not a Contribution."
 
       "Contributor" shall mean Licensor and any individual or Legal Entity
       on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work\.
+      subsequently incorporated within the Work.
 
-   2\. Grant of Copyright License\. Subject to the terms and conditions of
+   2. Grant of Copyright License. Subject to the terms and conditions of
       this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
       copyright license to reproduce, prepare Derivative Works of,
       publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form\.
+      Work and such Derivative Works in Source or Object form.
 
-   3\. Grant of Patent License\. Subject to the terms and conditions of
+   3. Grant of Patent License. Subject to the terms and conditions of
       this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non\-exclusive, no\-charge, royalty\-free, irrevocable
-      \(except as stated in this section\) patent license to make, have made,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
       use, offer to sell, sell, import, and otherwise transfer the Work,
       where such license applies only to those patent claims licensable
       by such Contributor that are necessarily infringed by their
-      Contribution\(s\) alone or by combination of their Contribution\(s\)
-      with the Work to which such Contribution\(s\) was submitted\. If You
-      institute patent litigation against any entity \(including a
-      cross\-claim or counterclaim in a lawsuit\) alleging that the Work
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
       or a Contribution incorporated within the Work constitutes direct
       or contributory patent infringement, then any patent licenses
       granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed\.
+      as of the date such litigation is filed.
 
-   4\. Redistribution\. You may reproduce and distribute copies of the
+   4. Redistribution. You may reproduce and distribute copies of the
       Work or Derivative Works thereof in any medium, with or without
       modifications, and in Source or Object form, provided that You
       meet the following conditions:
 
-      \(a\) You must give any other recipients of the Work or
+      (a) You must give any other recipients of the Work or
           Derivative Works a copy of this License; and
 
-      \(b\) You must cause any modified files to carry prominent notices
+      (b) You must cause any modified files to carry prominent notices
           stating that You changed the files; and
 
-      \(c\) You must retain, in the Source form of any Derivative Works
+      (c) You must retain, in the Source form of any Derivative Works
           that You distribute, all copyright, patent, trademark, and
           attribution notices from the Source form of the Work,
           excluding those notices that do not pertain to any part of
           the Derivative Works; and
 
-      \(d\) If the Work includes a "NOTICE" text file as part of its
+      (d) If the Work includes a "NOTICE" text file as part of its
           distribution, then any Derivative Works that You distribute must
           include a readable copy of the attribution notices contained
           within such NOTICE file, excluding those notices that do not
@@ -112,90 +113,77 @@
           as part of the Derivative Works; within the Source form or
           documentation, if provided along with the Derivative Works; or,
           within a display generated by the Derivative Works, if and
-          wherever such third\-party notices normally appear\. The contents
+          wherever such third-party notices normally appear. The contents
           of the NOTICE file are for informational purposes only and
-          do not modify the License\. You may add Your own attribution
+          do not modify the License. You may add Your own attribution
           notices within Derivative Works that You distribute, alongside
           or as an addendum to the NOTICE text from the Work, provided
           that such additional attribution notices cannot be construed
-          as modifying the License\.
+          as modifying the License.
 
       You may add Your own copyright statement to Your modifications and
       may provide additional or different license terms and conditions
       for use, reproduction, or distribution of Your modifications, or
       for any such Derivative Works as a whole, provided Your use,
       reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License\.
+      the conditions stated in this License.
 
-   5\. Submission of Contributions\. Unless You explicitly state otherwise,
+   5. Submission of Contributions. Unless You explicitly state otherwise,
       any Contribution intentionally submitted for inclusion in the Work
       by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions\.
+      this License, without any additional terms or conditions.
       Notwithstanding the above, nothing herein shall supersede or modify
       the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions\.
+      with Licensor regarding such Contributions.
 
-   6\. Trademarks\. This License does not grant permission to use the trade
+   6. Trademarks. This License does not grant permission to use the trade
       names, trademarks, service marks, or product names of the Licensor,
       except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file\.
+      origin of the Work and reproducing the content of the NOTICE file.
 
-   7\. Disclaimer of Warranty\. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work \(and each
-      Contributor provides its Contributions\) on an "AS IS" BASIS,
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
       implied, including, without limitation, any warranties or conditions
-      of TITLE, NON\-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE\. You are solely responsible for determining the
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
       appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License\.
+      risks associated with Your exercise of permissions under this License.
 
-   8\. Limitation of Liability\. In no event and under no legal theory,
-      whether in tort \(including negligence\), contract, or otherwise,
-      unless required by applicable law \(such as deliberate and grossly
-      negligent acts\) or agreed to in writing, shall any Contributor be
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
       liable to You for damages, including any direct, indirect, special,
       incidental, or consequential damages of any character arising as a
       result of this License or out of the use or inability to use the
-      Work \(including but not limited to damages for loss of goodwill,
+      Work (including but not limited to damages for loss of goodwill,
       work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses\), even if such Contributor
-      has been advised of the possibility of such damages\.
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
 
-   9\. Accepting Warranty or Additional Liability\. While redistributing
+   9. Accepting Warranty or Additional Liability. While redistributing
       the Work or Derivative Works thereof, You may choose to offer,
       and charge a fee for, acceptance of support, warranty, indemnity,
       or other liability obligations and/or rights consistent with this
-      License\. However, in accepting such obligations, You may act only
+      License. However, in accepting such obligations, You may act only
       on Your own behalf and on Your sole responsibility, not on behalf
       of any other Contributor, and only if You agree to indemnify,
       defend, and hold each Contributor harmless for any liability
       incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability\.(:?
-
-   END OF TERMS AND CONDITIONS)?(:?
-
-   APPENDIX: How to apply the Apache License to your work\.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "\[\]"
-      replaced with your own identifying information\. \(Don't include
-      the brackets!\)  The text should be enclosed in the appropriate
-      comment syntax for the file format\. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third\-party archives\.)?(:?
+      of your accepting any such warranty or additional liability.
 
-   Copyright .*)?(:?
+   END OF TERMS AND CONDITIONS
 
-   Licensed under the Apache License, Version 2\.0 \(the "License"\);
-   you may not use this file except in compliance with the License\.
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
    You may obtain a copy of the License at
 
-       http://www\.apache\.org/licenses/LICENSE\-2\.0
+       http://www.apache.org/licenses/LICENSE-2.0
 
    Unless required by applicable law or agreed to in writing, software
    distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied\.
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    See the License for the specific language governing permissions and
-   limitations under the License\.)?$
+   limitations under the License.

crates/zeta/license_examples/apache-2.0-ex3.txt 🔗

@@ -0,0 +1,13 @@
+Copyright 2011 Someone
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   https://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.

crates/zeta/license_examples/apache-2.0-ex4.txt 🔗

@@ -0,0 +1,187 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   Copyright (c) 2017, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

crates/zeta/license_examples/bsd-1-clause.txt 🔗

@@ -0,0 +1,20 @@
+Copyright (c) 2024 John Doe
+Some Organization
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list
+of conditions and the following disclaimer.
+
+THIS SOFTWARE IS PROVIDED BY [Name of Organization] “AS IS” AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
+SHALL [Name of Organisation] BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+OF SUCH DAMAGE.

crates/zeta/license_examples/bsd-2-clause-ex0.txt 🔗

@@ -0,0 +1,26 @@
+Copyright (c) 2024
+
+John Doe (john.doe@gmail.com)
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/license_examples/bsd-3-clause-ex0.txt 🔗

@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2025, John Doe
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/license_examples/bsd-3-clause-ex1.txt 🔗

@@ -0,0 +1,27 @@
+// Copyright 2024 (this is copy modified from chromium)
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//    * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//    * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//    * Neither the name of da company nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/license_examples/bsd-3-clause-ex2.txt 🔗

@@ -0,0 +1,31 @@
+The Glasgow Haskell Compiler License
+
+Copyright 2002, The University Court of the University of Glasgow.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+- Redistributions of source code must retain the above copyright notice,
+this list of conditions and the following disclaimer.
+
+- Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+- Neither name of the University nor the names of its contributors may be
+used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF
+GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.

crates/zeta/license_examples/bsd-3-clause-ex3.txt 🔗

@@ -0,0 +1,30 @@
+Copyright (c) 2019 Someone
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * Neither the name of Someone nor the names of other
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/license_examples/bsd-3-clause-ex4.txt 🔗

@@ -0,0 +1,27 @@
+Copyright (c) 2009-2011, Mozilla Foundation and contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+  this list of conditions and the following disclaimer in the documentation
+  and/or other materials provided with the distribution.
+
+* Neither the names of the Mozilla Foundation nor the names of project
+  contributors may be used to endorse or promote products derived from this
+  software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/license_examples/isc.txt 🔗

@@ -0,0 +1,15 @@
+ISC License
+
+Copyright (c) 2024, John Doe
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

crates/zeta/license_examples/mit-ex1.txt 🔗

@@ -0,0 +1,26 @@
+Copyright (c) 2006-2009 Someone
+Copyright (c) 2009-2013 Some organization
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.

crates/zeta/license_examples/mit-ex2.txt 🔗

@@ -0,0 +1,22 @@
+(The MIT License)
+
+Copyright (c) someone
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

crates/zeta/license_examples/mit-ex3.txt 🔗

@@ -0,0 +1,21 @@
+    MIT License
+
+    Copyright (c) Someone.
+
+    Permission is hereby granted, free of charge, to any person obtaining a copy
+    of this software and associated documentation files (the "Software"), to deal
+    in the Software without restriction, including without limitation the rights
+    to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+    copies of the Software, and to permit persons to whom the Software is
+    furnished to do so, subject to the following conditions:
+
+    The above copyright notice and this permission notice shall be included in all
+    copies or substantial portions of the Software.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+    IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+    SOFTWARE

crates/zeta/license_examples/upl-1.0.txt 🔗

@@ -0,0 +1,35 @@
+Copyright (c) 2025, John Doe
+
+The Universal Permissive License (UPL), Version 1.0
+
+Subject to the condition set forth below, permission is hereby granted to any person
+obtaining a copy of this software, associated documentation and/or data (collectively
+the "Software"), free of charge and under any and all copyright rights in the
+Software, and any and all patent rights owned or freely licensable by each licensor
+hereunder covering either (i) the unmodified Software as contributed to or provided
+by such licensor, or (ii) the Larger Works (as defined below), to deal in both
+
+(a) the Software, and
+
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
+    included with the Software (each a "Larger Work" to which the Software is
+    contributed by such licensors),
+
+without restriction, including without limitation the rights to copy, create
+derivative works of, display, perform, and distribute the Software and make, use,
+sell, offer for sale, import, export, have made, and have sold the Software and the
+Larger Work(s), and to sublicense the foregoing rights on either these or other
+terms.
+
+This license is subject to the following condition:
+
+The above copyright notice and either this complete permission notice or at a minimum
+a reference to the UPL must be included in all copies or substantial portions of the
+Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

crates/zeta/license_examples/zlib-ex0.txt 🔗

@@ -0,0 +1,19 @@
+Copyright (c) 2021 Someone
+
+This software is provided 'as-is', without any express or implied warranty. In
+no event will the authors be held liable for any damages arising from the use of
+this software.
+
+Permission is granted to anyone to use this software for any purpose, including
+commercial applications, and to alter it and redistribute it freely, subject to
+the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not claim
+    that you wrote the original software. If you use this software in a product,
+    an acknowledgment in the product documentation would be appreciated but is
+    not required.
+
+ 2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+
+ 3. This notice may not be removed or altered from any source distribution.

crates/zeta/license_patterns/0bsd-pattern 🔗

@@ -0,0 +1,11 @@
+-- 0..512
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
+FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
+DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

crates/zeta/license_patterns/apache-2.0-pattern 🔗

@@ -0,0 +1,109 @@
+-- 0..512
+-- 0..0 optional:
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http
+-- 0..1 optional:
+://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-- 0..5
+Apache License
+
+Version 2.0, January 2004
+
+http
+-- 0..1
+://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+-- 1..5
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+-- 1..5
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+-- 1..5
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+-- 1..5

crates/zeta/license_patterns/apache-2.0-reference-pattern 🔗

@@ -0,0 +1,14 @@
+-- 0..512
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http
+-- 0..1
+://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.

crates/zeta/license_patterns/bsd-pattern 🔗

@@ -0,0 +1,32 @@
+-- 0..512
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+-- 1..5
+Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+-- 1..5 optional:
+Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
+
+-- 1..128 optional:
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+-- 1..5
+THIS SOFTWARE IS PROVIDED
+-- 1..128
+“AS IS” AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL
+-- 1..128
+BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

crates/zeta/src/license_detection/isc.regex → crates/zeta/license_patterns/isc-pattern 🔗

@@ -1,15 +1,12 @@
-^.*ISC License.*
-
-Copyright.*
-
+-- 0..512
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies\.
+copyright notice and this permission notice appear in all copies.
 
 THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
 WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS\. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
 ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
 WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
 ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE\.$
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

crates/zeta/src/license_detection/mit.regex → crates/zeta/license_patterns/mit-pattern 🔗

@@ -1,21 +1,18 @@
-^.*MIT License.*
-
-Copyright.*
-
+-- 0..512
 Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files \(the "Software"\), to deal
+of this software and associated documentation files (the "Software"), to deal
 in the Software without restriction, including without limitation the rights
 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 copies of the Software, and to permit persons to whom the Software is
 furnished to do so, subject to the following conditions:
 
 The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software\.
+copies or substantial portions of the Software.
 
 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE\.$
+SOFTWARE.

crates/zeta/src/license_detection/upl.regex → crates/zeta/license_patterns/upl-1.0-pattern 🔗

@@ -1,35 +1,32 @@
-^Copyright.*
-
-The Universal Permissive License.*
-
+-- 0..512
 Subject to the condition set forth below, permission is hereby granted to any person
-obtaining a copy of this software, associated documentation and/or data \(collectively
-the "Software"\), free of charge and under any and all copyright rights in the
+obtaining a copy of this software, associated documentation and/or data (collectively
+the "Software"), free of charge and under any and all copyright rights in the
 Software, and any and all patent rights owned or freely licensable by each licensor
-hereunder covering either \(i\) the unmodified Software as contributed to or provided
-by such licensor, or \(ii\) the Larger Works \(as defined below\), to deal in both
+hereunder covering either (i) the unmodified Software as contributed to or provided
+by such licensor, or (ii) the Larger Works (as defined below), to deal in both
 
-\(a\) the Software, and
+(a) the Software, and
 
-\(b\) any piece of software and/or hardware listed in the lrgrwrks\.txt file if one is
-    included with the Software \(each a "Larger Work" to which the Software is
-    contributed by such licensors\),
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
+    included with the Software (each a "Larger Work" to which the Software is
+    contributed by such licensors),
 
 without restriction, including without limitation the rights to copy, create
 derivative works of, display, perform, and distribute the Software and make, use,
 sell, offer for sale, import, export, have made, and have sold the Software and the
-Larger Work\(s\), and to sublicense the foregoing rights on either these or other
-terms\.
+Larger Work(s), and to sublicense the foregoing rights on either these or other
+terms.
 
 This license is subject to the following condition:
 
 The above copyright notice and either this complete permission notice or at a minimum
 a reference to the UPL must be included in all copies or substantial portions of the
-Software\.
+Software.
 
 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
-OR THE USE OR OTHER DEALINGS IN THE SOFTWARE\.$
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

crates/zeta/license_patterns/zlib-pattern 🔗

@@ -0,0 +1,21 @@
+-- 0..512
+This software is provided 'as-is', without any express or implied warranty. In
+no event will the authors be held liable for any damages arising from the use of
+this software.
+
+Permission is granted to anyone to use this software for any purpose, including
+commercial applications, and to alter it and redistribute it freely, subject to
+the following restrictions:
+
+-- 1..5
+The origin of this software must not be misrepresented; you must not claim
+that you wrote the original software. If you use this software in a product,
+an acknowledgment in the product documentation would be appreciated but is
+not required.
+
+-- 1..5
+Altered source versions must be plainly marked as such, and must not be
+misrepresented as being the original software.
+
+-- 1..5
+This notice may not be removed or altered from any source distribution.

crates/zeta/src/license_detection.rs 🔗

@@ -1,24 +1,35 @@
 use std::{
     collections::BTreeSet,
+    fmt::{Display, Formatter},
+    ops::Range,
     path::{Path, PathBuf},
     sync::{Arc, LazyLock},
 };
 
+use anyhow::{Result, anyhow};
 use fs::Fs;
 use futures::StreamExt as _;
 use gpui::{App, AppContext as _, Entity, Subscription, Task};
+use itertools::Itertools;
 use postage::watch;
 use project::Worktree;
-use regex::Regex;
-use util::ResultExt as _;
+use strum::VariantArray;
+use util::{ResultExt as _, maybe};
 use worktree::ChildEntriesOptions;
 
 /// Matches the most common license locations, with US and UK English spelling.
 static LICENSE_FILE_NAME_REGEX: LazyLock<regex::bytes::Regex> = LazyLock::new(|| {
     regex::bytes::RegexBuilder::new(
         "^ \
-        (?: license | licence) \
-        (?: [\\-._] (?: apache | isc | mit | upl))? \
+        (?: license | licence)? \
+        (?: [\\-._]? \
+            (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \
+                0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \
+                isc | \
+                mit | \
+                upl | \
+                zlib))? \
+        (?: [\\-._]? (?: license | licence))? \
         (?: \\.txt | \\.md)? \
         $",
     )
@@ -28,40 +39,206 @@ static LICENSE_FILE_NAME_REGEX: LazyLock<regex::bytes::Regex> = LazyLock::new(||
     .unwrap()
 });
 
-fn is_license_eligible_for_data_collection(license: &str) -> bool {
-    static LICENSE_REGEXES: LazyLock<Vec<Regex>> = LazyLock::new(|| {
-        [
-            include_str!("license_detection/apache.regex"),
-            include_str!("license_detection/isc.regex"),
-            include_str!("license_detection/mit.regex"),
-            include_str!("license_detection/upl.regex"),
-        ]
-        .into_iter()
-        .map(|pattern| Regex::new(&canonicalize_license_text(pattern)).unwrap())
-        .collect()
-    });
-
-    let license = canonicalize_license_text(license);
-    LICENSE_REGEXES.iter().any(|regex| regex.is_match(&license))
+#[derive(Debug, Clone, Copy, Eq, Ord, PartialOrd, PartialEq, VariantArray)]
+pub enum OpenSourceLicense {
+    Apache2_0,
+    BSDZero,
+    BSD,
+    ISC,
+    MIT,
+    UPL1_0,
+    Zlib,
 }
 
-/// Canonicalizes the whitespace of license text and license regexes.
+impl Display for OpenSourceLicense {
+    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+        write!(f, "{}", self.spdx_identifier())
+    }
+}
+
+impl OpenSourceLicense {
+    /// These are SPDX identifiers for the licenses, except for BSD, where the variants are not
+    /// distinguished.
+    pub fn spdx_identifier(&self) -> &'static str {
+        match self {
+            OpenSourceLicense::Apache2_0 => "apache-2.0",
+            OpenSourceLicense::BSDZero => "0bsd",
+            OpenSourceLicense::BSD => "bsd",
+            OpenSourceLicense::ISC => "isc",
+            OpenSourceLicense::MIT => "mit",
+            OpenSourceLicense::UPL1_0 => "upl-1.0",
+            OpenSourceLicense::Zlib => "zlib",
+        }
+    }
+
+    pub fn patterns(&self) -> &'static [&'static str] {
+        match self {
+            OpenSourceLicense::Apache2_0 => &[
+                include_str!("../license_patterns/apache-2.0-pattern"),
+                include_str!("../license_patterns/apache-2.0-reference-pattern"),
+            ],
+            OpenSourceLicense::BSDZero => &[include_str!("../license_patterns/0bsd-pattern")],
+            OpenSourceLicense::BSD => &[include_str!("../license_patterns/bsd-pattern")],
+            OpenSourceLicense::ISC => &[include_str!("../license_patterns/isc-pattern")],
+            OpenSourceLicense::MIT => &[include_str!("../license_patterns/mit-pattern")],
+            OpenSourceLicense::UPL1_0 => &[include_str!("../license_patterns/upl-1.0-pattern")],
+            OpenSourceLicense::Zlib => &[include_str!("../license_patterns/zlib-pattern")],
+        }
+    }
+}
+
+// TODO: Consider using databake or similar to not parse at runtime.
+static LICENSE_PATTERNS: LazyLock<LicensePatterns> = LazyLock::new(|| {
+    let mut approximate_max_length = 0;
+    let mut patterns = Vec::new();
+    for license in OpenSourceLicense::VARIANTS {
+        for pattern in license.patterns() {
+            let (pattern, length) = parse_pattern(pattern).unwrap();
+            patterns.push((*license, pattern));
+            approximate_max_length = approximate_max_length.max(length);
+        }
+    }
+    LicensePatterns {
+        patterns,
+        approximate_max_length,
+    }
+});
+
+fn detect_license(text: &str) -> Option<OpenSourceLicense> {
+    let text = canonicalize_license_text(text);
+    for (license, pattern) in LICENSE_PATTERNS.patterns.iter() {
+        log::trace!("Checking if license is {}", license);
+        if check_pattern(&pattern, &text) {
+            return Some(*license);
+        }
+    }
+
+    None
+}
+
+struct LicensePatterns {
+    patterns: Vec<(OpenSourceLicense, Vec<PatternPart>)>,
+    approximate_max_length: usize,
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+struct PatternPart {
+    /// Indicates that matching `text` is optional. Skipping `match_any_chars` is conditional on
+    /// matching `text`.
+    optional: bool,
+    /// Indicates the number of characters that can be skipped before matching `text`.
+    match_any_chars: Range<usize>,
+    /// The text to match, may be empty.
+    text: String,
+}
+
+/// Lines that start with "-- " begin a `PatternPart`. `-- 1..10` specifies `match_any_chars:
+/// 1..10`. `-- 1..10 optional:` additionally specifies `optional: true`. It's a parse error for a
+/// line to start with `--` without matching this format.
+///
+/// Text that does not have `--` prefixes participate in the `text` field and are canonicalized by
+/// lowercasing, replacing all runs of whitespace with a single space, and otherwise only keeping
+/// ascii alphanumeric characters.
+fn parse_pattern(pattern_source: &str) -> Result<(Vec<PatternPart>, usize)> {
+    let mut pattern = Vec::new();
+    let mut part = PatternPart::default();
+    let mut approximate_max_length = 0;
+    for line in pattern_source.lines() {
+        if let Some(directive) = line.trim().strip_prefix("--") {
+            if part != PatternPart::default() {
+                pattern.push(part);
+                part = PatternPart::default();
+            }
+            let valid = maybe!({
+                let directive_chunks = directive.split_whitespace().collect::<Vec<_>>();
+                if !(1..=2).contains(&directive_chunks.len()) {
+                    return None;
+                }
+                if directive_chunks.len() == 2 {
+                    part.optional = true;
+                }
+                let range_chunks = directive_chunks[0].split("..").collect::<Vec<_>>();
+                if range_chunks.len() != 2 {
+                    return None;
+                }
+                part.match_any_chars.start = range_chunks[0].parse::<usize>().ok()?;
+                part.match_any_chars.end = range_chunks[1].parse::<usize>().ok()?;
+                if part.match_any_chars.start > part.match_any_chars.end {
+                    return None;
+                }
+                approximate_max_length += part.match_any_chars.end;
+                Some(())
+            });
+            if valid.is_none() {
+                return Err(anyhow!("Invalid pattern directive: {}", line));
+            }
+            continue;
+        }
+        approximate_max_length += line.len() + 1;
+        let line = canonicalize_license_text(line);
+        if line.is_empty() {
+            continue;
+        }
+        if !part.text.is_empty() {
+            part.text.push(' ');
+        }
+        part.text.push_str(&line);
+    }
+    if part != PatternPart::default() {
+        pattern.push(part);
+    }
+    Ok((pattern, approximate_max_length))
+}
+
+/// Checks a pattern against text by iterating over the pattern parts in reverse order, and checking
+/// matches with the end of a prefix of the input. Assumes that `canonicalize_license_text` has
+/// already been applied to the input.
+fn check_pattern(pattern: &[PatternPart], input: &str) -> bool {
+    let mut input_ix = input.len();
+    let mut match_any_chars = 0..0;
+    for part in pattern.iter().rev() {
+        if part.text.is_empty() {
+            match_any_chars.start += part.match_any_chars.start;
+            match_any_chars.end += part.match_any_chars.end;
+            continue;
+        }
+        let mut matched = false;
+        for skip_count in match_any_chars.start..=match_any_chars.end {
+            let end_ix = input_ix.saturating_sub(skip_count);
+            if end_ix < part.text.len() {
+                break;
+            }
+            if input[..end_ix].ends_with(&part.text) {
+                matched = true;
+                input_ix = end_ix - part.text.len();
+                match_any_chars = part.match_any_chars.clone();
+                break;
+            }
+        }
+        if !matched && !part.optional {
+            log::trace!(
+                "Failed to match pattern `...{}` against input `...{}`",
+                &part.text[part.text.len().saturating_sub(128)..],
+                &input[input_ix.saturating_sub(128)..]
+            );
+            return false;
+        }
+    }
+    match_any_chars.contains(&input_ix)
+}
+
+/// Canonicalizes license text by removing all non-alphanumeric characters, lowercasing, and turning
+/// runs of whitespace into a single space. Unicode alphanumeric characters are intentionally
+/// preserved since these should cause license mismatch when not within a portion of the license
+/// where arbitrary text is allowed.
 fn canonicalize_license_text(license: &str) -> String {
-    static PARAGRAPH_SEPARATOR_REGEX: LazyLock<Regex> =
-        LazyLock::new(|| Regex::new(r"\s*\n\s*\n\s*").unwrap());
-
-    PARAGRAPH_SEPARATOR_REGEX
-        .split(license)
-        .filter(|paragraph| !paragraph.trim().is_empty())
-        .map(|paragraph| {
-            paragraph
-                .trim()
-                .split_whitespace()
-                .collect::<Vec<_>>()
-                .join(" ")
-        })
-        .collect::<Vec<_>>()
-        .join("\n\n")
+    license
+        .chars()
+        .filter(|c| c.is_ascii_whitespace() || c.is_alphanumeric())
+        .map(|c| c.to_ascii_lowercase())
+        .collect::<String>()
+        .split_ascii_whitespace()
+        .join(" ")
 }
 
 pub enum LicenseDetectionWatcher {
@@ -144,7 +321,7 @@ impl LicenseDetectionWatcher {
 
     async fn is_path_eligible(fs: &Arc<dyn Fs>, abs_path: PathBuf) -> Option<bool> {
         log::debug!("checking if `{abs_path:?}` is an open source license");
-        // Resolve symlinks so that the file size from metadata is correct.
+        // resolve symlinks so that the file size from metadata is correct
         let Some(abs_path) = fs.canonicalize(&abs_path).await.ok() else {
             log::debug!(
                 "`{abs_path:?}` license file probably deleted (error canonicalizing the path)"
@@ -152,12 +329,17 @@ impl LicenseDetectionWatcher {
             return None;
         };
         let metadata = fs.metadata(&abs_path).await.log_err()??;
-        // If the license file is >32kb it's unlikely to legitimately match any eligible license.
-        if metadata.len > 32768 {
+        if metadata.len > LICENSE_PATTERNS.approximate_max_length as u64 {
+            log::debug!(
+                "`{abs_path:?}` license file was skipped \
+                because its size of {} bytes was larger than the max size of {} bytes",
+                metadata.len,
+                LICENSE_PATTERNS.approximate_max_length
+            );
             return None;
         }
         let text = fs.load(&abs_path).await.log_err()?;
-        let is_eligible = is_license_eligible_for_data_collection(&text);
+        let is_eligible = detect_license(&text).is_some();
         if is_eligible {
             log::debug!(
                 "`{abs_path:?}` matches a license that is eligible for data collection (if enabled)"
@@ -188,233 +370,220 @@ mod tests {
     use gpui::TestAppContext;
     use serde_json::json;
     use settings::{Settings as _, SettingsStore};
-    use unindent::unindent;
     use worktree::WorktreeSettings;
 
     use super::*;
 
-    const MIT_LICENSE: &str = include_str!("license_detection/mit-text");
-    const APACHE_LICENSE: &str = include_str!("license_detection/apache-text");
+    const APACHE_2_0_TXT: &str = include_str!("../license_examples/apache-2.0-ex0.txt");
+    const ISC_TXT: &str = include_str!("../license_examples/isc.txt");
+    const MIT_TXT: &str = include_str!("../license_examples/mit-ex0.txt");
+    const UPL_1_0_TXT: &str = include_str!("../license_examples/upl-1.0.txt");
+    const BSD_0_TXT: &str = include_str!("../license_examples/0bsd.txt");
 
-    #[test]
-    fn test_mit_positive_detection() {
-        assert!(is_license_eligible_for_data_collection(MIT_LICENSE));
+    #[track_caller]
+    fn assert_matches_license(text: &str, license: OpenSourceLicense) {
+        assert_eq!(detect_license(text), Some(license));
+        assert!(text.len() < LICENSE_PATTERNS.approximate_max_length);
     }
 
+    /*
+    // Uncomment this and run with `cargo test -p zeta -- --no-capture &> licenses-output` to
+    // traverse your entire home directory and run license detection on every file that has a
+    // license-like name.
     #[test]
-    fn test_mit_negative_detection() {
-        let example_license = format!(
-            r#"{MIT_LICENSE}
-
-            This project is dual licensed under the MIT License and the Apache License, Version 2.0."#
+    fn test_check_all_licenses_in_home_dir() {
+        let mut detected = Vec::new();
+        let mut unrecognized = Vec::new();
+        let mut walked_entries = 0;
+        let homedir = std::env::home_dir().unwrap();
+        for entry in walkdir::WalkDir::new(&homedir) {
+            walked_entries += 1;
+            if walked_entries % 10000 == 0 {
+                println!(
+                    "So far visited {} files in {}",
+                    walked_entries,
+                    homedir.display()
+                );
+            }
+            let Ok(entry) = entry else {
+                continue;
+            };
+            if !LICENSE_FILE_NAME_REGEX.is_match(entry.file_name().as_encoded_bytes()) {
+                continue;
+            }
+            let Ok(contents) = std::fs::read_to_string(entry.path()) else {
+                continue;
+            };
+            let path_string = entry.path().to_string_lossy().to_string();
+            let license = detect_license(&contents);
+            match license {
+                Some(license) => detected.push((license, path_string)),
+                None => unrecognized.push(path_string),
+            }
+        }
+        println!("\nDetected licenses:\n");
+        detected.sort();
+        for (license, path) in &detected {
+            println!("{}: {}", license.spdx_identifier(), path);
+        }
+        println!("\nUnrecognized licenses:\n");
+        for path in &unrecognized {
+            println!("{}", path);
+        }
+        panic!(
+            "{} licenses detected, {} unrecognized",
+            detected.len(),
+            unrecognized.len()
         );
-        assert!(!is_license_eligible_for_data_collection(&example_license));
+        println!("This line has a warning to make sure this test is always commented out");
     }
+    */
 
     #[test]
-    fn test_isc_positive_detection() {
-        let example_license = unindent(
-            r#"
-                ISC License
-
-                Copyright (c) 2024, John Doe
-
-                Permission to use, copy, modify, and/or distribute this software for any
-                purpose with or without fee is hereby granted, provided that the above
-                copyright notice and this permission notice appear in all copies.
-
-                THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-                WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-                MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-                ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-                WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-                ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-                OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-            "#
-            .trim(),
+    fn test_apache_positive_detection() {
+        assert_matches_license(APACHE_2_0_TXT, OpenSourceLicense::Apache2_0);
+        assert_matches_license(
+            include_str!("../license_examples/apache-2.0-ex1.txt"),
+            OpenSourceLicense::Apache2_0,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/apache-2.0-ex2.txt"),
+            OpenSourceLicense::Apache2_0,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/apache-2.0-ex3.txt"),
+            OpenSourceLicense::Apache2_0,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/apache-2.0-ex4.txt"),
+            OpenSourceLicense::Apache2_0,
+        );
+        assert_matches_license(
+            include_str!("../../../LICENSE-APACHE"),
+            OpenSourceLicense::Apache2_0,
         );
-
-        assert!(is_license_eligible_for_data_collection(&example_license));
     }
 
     #[test]
-    fn test_isc_negative_detection() {
-        let example_license = unindent(
-            r#"
-                ISC License
-
-                Copyright (c) 2024, John Doe
-
-                Permission to use, copy, modify, and/or distribute this software for any
-                purpose with or without fee is hereby granted, provided that the above
-                copyright notice and this permission notice appear in all copies.
-
-                THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-                WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-                MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-                ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-                WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-                ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-                OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-                This project is dual licensed under the ISC License and the MIT License.
-            "#
-            .trim(),
+    fn test_apache_negative_detection() {
+        assert_eq!(
+            detect_license(&format!(
+                "{APACHE_2_0_TXT}\n\nThe terms in this license are void if P=NP."
+            )),
+            None
         );
-
-        assert!(!is_license_eligible_for_data_collection(&example_license));
     }
 
     #[test]
-    fn test_upl_positive_detection() {
-        let example_license = unindent(
-            r#"
-                Copyright (c) 2025, John Doe
-
-                The Universal Permissive License (UPL), Version 1.0
-
-                Subject to the condition set forth below, permission is hereby granted to any person
-                obtaining a copy of this software, associated documentation and/or data (collectively
-                the "Software"), free of charge and under any and all copyright rights in the
-                Software, and any and all patent rights owned or freely licensable by each licensor
-                hereunder covering either (i) the unmodified Software as contributed to or provided
-                by such licensor, or (ii) the Larger Works (as defined below), to deal in both
-
-                (a) the Software, and
-
-                (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
-                    included with the Software (each a "Larger Work" to which the Software is
-                    contributed by such licensors),
-
-                without restriction, including without limitation the rights to copy, create
-                derivative works of, display, perform, and distribute the Software and make, use,
-                sell, offer for sale, import, export, have made, and have sold the Software and the
-                Larger Work(s), and to sublicense the foregoing rights on either these or other
-                terms.
-
-                This license is subject to the following condition:
-
-                The above copyright notice and either this complete permission notice or at a minimum
-                a reference to the UPL must be included in all copies or substantial portions of the
-                Software.
-
-                THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-                INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-                PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-                HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
-                CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
-                OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-            "#
-            .trim(),
+    fn test_bsd_1_clause_positive_detection() {
+        assert_matches_license(
+            include_str!("../license_examples/bsd-1-clause.txt"),
+            OpenSourceLicense::BSD,
         );
-
-        assert!(is_license_eligible_for_data_collection(&example_license));
     }
 
     #[test]
-    fn test_upl_negative_detection() {
-        let example_license = unindent(
-            r#"
-                UPL License
-
-                Copyright (c) 2024, John Doe
-
-                The Universal Permissive License (UPL), Version 1.0
-
-                Subject to the condition set forth below, permission is hereby granted to any person
-                obtaining a copy of this software, associated documentation and/or data (collectively
-                the "Software"), free of charge and under any and all copyright rights in the
-                Software, and any and all patent rights owned or freely licensable by each licensor
-                hereunder covering either (i) the unmodified Software as contributed to or provided
-                by such licensor, or (ii) the Larger Works (as defined below), to deal in both
-
-                (a) the Software, and
-
-                (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is
-                    included with the Software (each a "Larger Work" to which the Software is
-                    contributed by such licensors),
+    fn test_bsd_2_clause_positive_detection() {
+        assert_matches_license(
+            include_str!("../license_examples/bsd-2-clause-ex0.txt"),
+            OpenSourceLicense::BSD,
+        );
+    }
 
-                without restriction, including without limitation the rights to copy, create
-                derivative works of, display, perform, and distribute the Software and make, use,
-                sell, offer for sale, import, export, have made, and have sold the Software and the
-                Larger Work(s), and to sublicense the foregoing rights on either these or other
-                terms.
+    #[test]
+    fn test_bsd_3_clause_positive_detection() {
+        assert_matches_license(
+            include_str!("../license_examples/bsd-3-clause-ex0.txt"),
+            OpenSourceLicense::BSD,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/bsd-3-clause-ex1.txt"),
+            OpenSourceLicense::BSD,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/bsd-3-clause-ex2.txt"),
+            OpenSourceLicense::BSD,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/bsd-3-clause-ex3.txt"),
+            OpenSourceLicense::BSD,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/bsd-3-clause-ex4.txt"),
+            OpenSourceLicense::BSD,
+        );
+    }
 
-                This license is subject to the following condition:
+    #[test]
+    fn test_bsd_0_positive_detection() {
+        assert_matches_license(BSD_0_TXT, OpenSourceLicense::BSDZero);
+    }
 
-                The above copyright notice and either this complete permission notice or at a minimum
-                a reference to the UPL must be included in all copies or substantial portions of the
-                Software.
+    #[test]
+    fn test_isc_positive_detection() {
+        assert_matches_license(ISC_TXT, OpenSourceLicense::ISC);
+    }
 
-                THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
-                INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-                PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-                HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
-                CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
-                OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+    #[test]
+    fn test_isc_negative_detection() {
+        let license_text = format!(
+            r#"{ISC_TXT}
 
-                This project is dual licensed under the ISC License and the MIT License.
-            "#
-            .trim(),
+            This project is dual licensed under the ISC License and the MIT License."#
         );
 
-        assert!(!is_license_eligible_for_data_collection(&example_license));
+        assert_eq!(detect_license(&license_text), None);
     }
 
     #[test]
-    fn test_apache_positive_detection() {
-        assert!(is_license_eligible_for_data_collection(APACHE_LICENSE));
-
-        let license_with_appendix = format!(
-            r#"{APACHE_LICENSE}
-
-            END OF TERMS AND CONDITIONS
-
-            APPENDIX: How to apply the Apache License to your work.
+    fn test_mit_positive_detection() {
+        assert_matches_license(MIT_TXT, OpenSourceLicense::MIT);
+        assert_matches_license(
+            include_str!("../license_examples/mit-ex1.txt"),
+            OpenSourceLicense::MIT,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/mit-ex2.txt"),
+            OpenSourceLicense::MIT,
+        );
+        assert_matches_license(
+            include_str!("../license_examples/mit-ex3.txt"),
+            OpenSourceLicense::MIT,
+        );
+    }
 
-               To apply the Apache License to your work, attach the following
-               boilerplate notice, with the fields enclosed by brackets "[]"
-               replaced with your own identifying information. (Don't include
-               the brackets!)  The text should be enclosed in the appropriate
-               comment syntax for the file format. We also recommend that a
-               file or class name and description of purpose be included on the
-               same "printed page" as the copyright notice for easier
-               identification within third-party archives.
+    #[test]
+    fn test_mit_negative_detection() {
+        let license_text = format!(
+            r#"{MIT_TXT}
 
-            Copyright [yyyy] [name of copyright owner]
+            This project is dual licensed under the MIT License and the Apache License, Version 2.0."#
+        );
+        assert_eq!(detect_license(&license_text), None);
+    }
 
-            Licensed under the Apache License, Version 2.0 (the "License");
-            you may not use this file except in compliance with the License.
-            You may obtain a copy of the License at
+    #[test]
+    fn test_upl_positive_detection() {
+        assert_matches_license(UPL_1_0_TXT, OpenSourceLicense::UPL1_0);
+    }
 
-                http://www.apache.org/licenses/LICENSE-2.0
+    #[test]
+    fn test_upl_negative_detection() {
+        let license_text = format!(
+            r#"{UPL_1_0_TXT}
 
-            Unless required by applicable law or agreed to in writing, software
-            distributed under the License is distributed on an "AS IS" BASIS,
-            WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-            See the License for the specific language governing permissions and
-            limitations under the License."#
+            This project is dual licensed under the UPL License and the MIT License."#
         );
-        assert!(is_license_eligible_for_data_collection(
-            &license_with_appendix
-        ));
-
-        // Sometimes people fill in the appendix with copyright info.
-        let license_with_copyright = license_with_appendix.replace(
-            "Copyright [yyyy] [name of copyright owner]",
-            "Copyright 2025 John Doe",
-        );
-        assert!(license_with_copyright != license_with_appendix);
-        assert!(is_license_eligible_for_data_collection(
-            &license_with_copyright
-        ));
+
+        assert_eq!(detect_license(&license_text), None);
     }
 
     #[test]
-    fn test_apache_negative_detection() {
-        assert!(!is_license_eligible_for_data_collection(&format!(
-            "{APACHE_LICENSE}\n\nThe terms in this license are void if P=NP."
-        )));
+    fn test_zlib_positive_detection() {
+        assert_matches_license(
+            include_str!("../license_examples/zlib-ex0.txt"),
+            OpenSourceLicense::Zlib,
+        );
     }
 
     #[test]
@@ -439,10 +608,22 @@ mod tests {
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-ISC"));
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-UPL"));
 
+        // Test with "license" coming after
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-LICENSE"));
+
+        // Test version numbers
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"APACHE-2.0"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-1"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-2"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"BSD-3-CLAUSE"));
+
         // Test combinations
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-MIT.txt"));
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENCE.ISC.md"));
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"license_upl"));
+        assert!(LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.APACHE.2.0"));
 
         // Test case insensitive
         assert!(LICENSE_FILE_NAME_REGEX.is_match(b"License"));
@@ -461,82 +642,20 @@ mod tests {
         assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.old"));
         assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE-GPL"));
         assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSEABC"));
-        assert!(!LICENSE_FILE_NAME_REGEX.is_match(b""));
     }
 
     #[test]
     fn test_canonicalize_license_text() {
-        // Test basic whitespace normalization
-        let input = "Line 1\n   Line 2   \n\n\n  Line 3  ";
-        let expected = "Line 1 Line 2\n\nLine 3";
-        assert_eq!(canonicalize_license_text(input), expected);
-
-        // Test paragraph separation
-        let input = "Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines";
-        let expected = "Paragraph 1 with multiple lines\n\nParagraph 2 with more lines";
-        assert_eq!(canonicalize_license_text(input), expected);
-
-        // Test empty paragraphs are filtered out
-        let input = "\n\n\nParagraph 1\n\n\n   \n\n\nParagraph 2\n\n\n";
-        let expected = "Paragraph 1\n\nParagraph 2";
-        assert_eq!(canonicalize_license_text(input), expected);
-
-        // Test single line
-        let input = "   Single line with spaces   ";
-        let expected = "Single line with spaces";
-        assert_eq!(canonicalize_license_text(input), expected);
-
-        // Test multiple consecutive spaces within lines
-        let input = "Word1    Word2\n\nWord3     Word4";
-        let expected = "Word1 Word2\n\nWord3 Word4";
+        let input = "  Paragraph 1\nwith multiple lines\n\n\n\nParagraph 2\nwith more lines\n  ";
+        let expected = "paragraph 1 with multiple lines paragraph 2 with more lines";
         assert_eq!(canonicalize_license_text(input), expected);
 
         // Test tabs and mixed whitespace
         let input = "Word1\t\tWord2\n\n   Word3\r\n\r\n\r\nWord4   ";
-        let expected = "Word1 Word2\n\nWord3\n\nWord4";
+        let expected = "word1 word2 word3 word4";
         assert_eq!(canonicalize_license_text(input), expected);
     }
 
-    #[test]
-    fn test_license_detection_canonicalizes_whitespace() {
-        let mit_with_weird_spacing = unindent(
-            r#"
-                MIT License
-
-
-                Copyright (c) 2024 John Doe
-
-
-                Permission is hereby granted, free of charge, to any person obtaining a copy
-                of this software   and   associated   documentation files (the "Software"), to deal
-                in the Software without restriction, including without limitation the rights
-                to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-                copies of the Software, and to permit persons to whom the Software is
-                furnished to do so, subject to the following conditions:
-
-
-
-                The above copyright notice and this permission notice shall be included in all
-                copies or substantial portions of the Software.
-
-
-
-                THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-                IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-                FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-                AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-                LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-                OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-                SOFTWARE.
-            "#
-            .trim(),
-        );
-
-        assert!(is_license_eligible_for_data_collection(
-            &mit_with_weird_spacing
-        ));
-    }
-
     fn init_test(cx: &mut TestAppContext) {
         cx.update(|cx| {
             let settings_store = SettingsStore::test(cx);
@@ -590,14 +709,14 @@ mod tests {
         assert!(matches!(watcher, LicenseDetectionWatcher::Local { .. }));
         assert!(!watcher.is_project_open_source());
 
-        fs.write(Path::new("/root/LICENSE-MIT"), MIT_LICENSE.as_bytes())
+        fs.write(Path::new("/root/LICENSE-MIT"), MIT_TXT.as_bytes())
             .await
             .unwrap();
 
         cx.background_executor.run_until_parked();
         assert!(watcher.is_project_open_source());
 
-        fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_LICENSE.as_bytes())
+        fs.write(Path::new("/root/LICENSE-APACHE"), APACHE_2_0_TXT.as_bytes())
             .await
             .unwrap();
 
@@ -630,7 +749,7 @@ mod tests {
         let fs = FakeFs::new(cx.background_executor.clone());
         fs.insert_tree(
             "/root",
-            json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_LICENSE }),
+            json!({ "main.rs": "fn main() {}", "LICENSE-MIT": MIT_TXT }),
         )
         .await;
 

crates/zlog_settings/src/zlog_settings.rs 🔗

@@ -3,7 +3,7 @@ use anyhow::Result;
 use gpui::App;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore};
+use settings::{Settings, SettingsStore, SettingsUi};
 
 pub fn init(cx: &mut App) {
     ZlogSettings::register(cx);
@@ -15,7 +15,7 @@ pub fn init(cx: &mut App) {
     .detach();
 }
 
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, SettingsUi)]
 pub struct ZlogSettings {
     #[serde(default, flatten)]
     pub scopes: std::collections::HashMap<String, String>,

docs/src/accounts.md 🔗

@@ -30,3 +30,8 @@ To sign out of Zed, you can use either of these methods:
 Your Zed account's email address is the address provided by GitHub OAuth. If you have a public email address then it will be used, otherwise your primary GitHub email address will be used. Changes to your email address on GitHub can be synced to your Zed account by [signing in to zed.dev](https://zed.dev/sign_in).
 
 Stripe is used for billing, and will use your Zed account's email address when starting a subscription. Changes to your Zed account email address do not currently update the email address used in Stripe. See [Updating Billing Information](./ai/billing.md#updating-billing-info) for how to change this email address.
+
+## Hiding Sign In button from the interface
+
+In case the Sign In feature is not used, it's possible to hide that from the interface by using `show_sign_in` settings property.
+Refer to [Visual Customization page](./visual-customization.md) for more details.

docs/src/ai/llm-providers.md 🔗

@@ -40,7 +40,6 @@ Ensure your credentials have the following permissions set up:
 
 - `bedrock:InvokeModelWithResponseStream`
 - `bedrock:InvokeModel`
-- `bedrock:ConverseStream`
 
 Your IAM policy should look similar to:
 
@@ -52,8 +51,7 @@ Your IAM policy should look similar to:
       "Effect": "Allow",
       "Action": [
         "bedrock:InvokeModel",
-        "bedrock:InvokeModelWithResponseStream",
-        "bedrock:ConverseStream"
+        "bedrock:InvokeModelWithResponseStream"
       ],
       "Resource": "*"
     }

docs/src/configuring-zed.md 🔗

@@ -685,6 +685,12 @@ List of `string` values
 - Setting: `selection_highlight`
 - Default: `true`
 
+## Rounded Selection
+
+- Description: Whether the text selection should have rounded corners.
+- Setting: `rounded_selection`
+- Default: `true`
+
 ## Cursor Blink
 
 - Description: Whether or not the cursor blinks.
@@ -1461,6 +1467,16 @@ This setting enables integration with macOS’s native window tabbing feature. W
 
 Positive `integer` values
 
+## Excerpt Context Lines
+
+- Description: The number of lines of context to provide when showing excerpts in the multibuffer.
+- Setting: `excerpt_context_lines`
+- Default: `2`
+
+**Options**
+
+Positive `integer` value between 1 and 32. Values outside of this range will be clamped to this range.
+
 ## Extend Comment On Newline
 
 - Description: Whether to start a new line with a comment when a previous line is a comment as well.

docs/src/debugger.md 🔗

@@ -78,11 +78,10 @@ While configuration fields are debug adapter-dependent, most adapters support th
     // The debug adapter that Zed should use to debug the program
     "adapter": "Example adapter name",
     // Request:
-    //  - launch: Zed will launch the program if specified or shows a debug terminal with the right configuration
-    //  - attach: Zed will attach to a running program to debug it or when the process_id is not specified we will show a process picker (only supported for node currently)
+    //  - launch: Zed will launch the program if specified, or show a debug terminal with the right configuration
+    //  - attach: Zed will attach to a running program to debug it, or when the process_id is not specified, will show a process picker (only supported for node currently)
     "request": "launch",
-    // program: The program that you want to debug
-    // This field supports path resolution with ~ or . symbols
+    // The program to debug. This field supports path resolution with ~ or . symbols.
     "program": "path_to_program",
     // cwd: defaults to the current working directory of your project ($ZED_WORKTREE_ROOT)
     "cwd": "$ZED_WORKTREE_ROOT"
@@ -148,6 +147,8 @@ The debug adapter will then stop whenever an exception of a given kind occurs. W
 
 ## Settings
 
+The settings for the debugger are grouped under the `debugger` key in `settings.json`:
+
 - `dock`: Determines the position of the debug panel in the UI.
 - `stepping_granularity`: Determines the stepping granularity.
 - `save_breakpoints`: Whether the breakpoints should be reused across Zed sessions.

docs/src/development/releases.md 🔗

@@ -51,7 +51,6 @@ Credentials for various services used in this process can be found in 1Password.
    - We sometimes correct things here and there that didn't translate from GitHub's renderer to Kit's.
 
 1. Build social media posts based on the popular items in stable.
-
    - You can use the [prior week's post chain](https://zed.dev/channel/tweets-23331) as your outline.
    - Stage the copy and assets using [Buffer](https://buffer.com), for both X and BlueSky.
    - Publish both, one at a time, ensuring both are posted to each respective platform.
@@ -89,7 +88,6 @@ You will need write access to the Zed repository to do this:
    - Download the artifacts for each release draft and test that you can run them locally.
 
 1. Publish stable / preview drafts, one at a time.
-
    - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild.
      The release will be public once the rebuild has completed.
 

docs/src/development/windows.md 🔗

@@ -114,20 +114,7 @@ cargo test --workspace
 
 ## Installing from msys2
 
-[MSYS2](https://msys2.org/) distribution provides Zed as a package [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed). The package is available for UCRT64, CLANG64 and CLANGARM64 repositories. To download it, run
-
-```sh
-pacman -Syu
-pacman -S $MINGW_PACKAGE_PREFIX-zed
-```
-
-You can see the [build script](https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-zed/PKGBUILD) for more details on build process.
-
-> Please, report any issue in [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed) first.
-
-See also MSYS2 [documentation page](https://www.msys2.org/docs/ides-editors).
-
-Note that `collab` is not supported for MSYS2.
+Zed does not support unofficial MSYS2 Zed packages built for Mingw-w64. Please report any issues you may have with [mingw-w64-zed](https://packages.msys2.org/base/mingw-w64-zed) to [msys2/MINGW-packages/issues](https://github.com/msys2/MINGW-packages/issues?q=is%3Aissue+is%3Aopen+zed).
 
 ## Troubleshooting
 

docs/src/visual-customization.md 🔗

@@ -8,7 +8,7 @@ See [Configuring Zed](./configuring-zed.md) for additional information and other
 
 Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu.
 
-You can preview/choose amongsts your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
+You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings:
 
 ```json
 {
@@ -334,7 +334,9 @@ TBD: Centered layout related settings
 ```json
 {
   // The default number of lines to expand excerpts in the multibuffer by.
-  "expand_excerpt_lines": 5
+  "expand_excerpt_lines": 5,
+  // The default number of lines of context provided for excerpts in the multibuffer by.
+  "excerpt_context_lines": 2
 }
 ```
 

flake.lock 🔗

@@ -2,11 +2,11 @@
   "nodes": {
     "crane": {
       "locked": {
-        "lastModified": 1754269165,
-        "narHash": "sha256-0tcS8FHd4QjbCVoxN9jI+PjHgA4vc/IjkUSp+N3zy0U=",
+        "lastModified": 1755993354,
+        "narHash": "sha256-FCRRAzSaL/+umLIm3RU3O/+fJ2ssaPHseI2SSFL8yZU=",
         "owner": "ipetkov",
         "repo": "crane",
-        "rev": "444e81206df3f7d92780680e45858e31d2f07a08",
+        "rev": "25bd41b24426c7734278c2ff02e53258851db914",
         "type": "github"
       },
       "original": {
@@ -33,10 +33,10 @@
     "nixpkgs": {
       "locked": {
         "lastModified": 315532800,
-        "narHash": "sha256-5VYevX3GccubYeccRGAXvCPA1ktrGmIX1IFC0icX07g=",
-        "rev": "a683adc19ff5228af548c6539dbc3440509bfed3",
+        "narHash": "sha256-E8CyvVDZuIsF7puIw+OLkrFmhj3qUV+iwPcNbBhdcxM=",
+        "rev": "a918bb3594dd243c2f8534b3be01b3cb4ed35fd1",
         "type": "tarball",
-        "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre840248.a683adc19ff5/nixexprs.tar.xz"
+        "url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.11pre854010.a918bb3594dd/nixexprs.tar.xz"
       },
       "original": {
         "type": "tarball",
@@ -58,11 +58,11 @@
         ]
       },
       "locked": {
-        "lastModified": 1754575663,
-        "narHash": "sha256-afOx8AG0KYtw7mlt6s6ahBBy7eEHZwws3iCRoiuRQS4=",
+        "lastModified": 1756607787,
+        "narHash": "sha256-ciwAdgtlAN1PCaidWK6RuWsTBL8DVuyDCGM+X3ein5Q=",
         "owner": "oxalica",
         "repo": "rust-overlay",
-        "rev": "6db0fb0e9cec2e9729dc52bf4898e6c135bb8a0f",
+        "rev": "f46d294b87ebb9f7124f1ce13aa2a5f5acc0f3eb",
         "type": "github"
       },
       "original": {

nix/build.nix 🔗

@@ -145,7 +145,6 @@ let
         ]
         ++ lib.optionals stdenv'.hostPlatform.isDarwin [
           apple-sdk_15
-          darwin.apple_sdk.frameworks.System
           (darwinMinVersionHook "10.15")
         ];
 

script/bundle-windows.ps1 🔗

@@ -150,6 +150,7 @@ function CollectFiles {
     Move-Item -Path "$innoDir\zed_explorer_command_injector.appx" -Destination "$innoDir\appx\zed_explorer_command_injector.appx" -Force
     Move-Item -Path "$innoDir\zed_explorer_command_injector.dll" -Destination "$innoDir\appx\zed_explorer_command_injector.dll" -Force
     Move-Item -Path "$innoDir\cli.exe" -Destination "$innoDir\bin\zed.exe" -Force
+    Move-Item -Path "$innoDir\zed-wsl" -Destination "$innoDir\bin\zed" -Force
     Move-Item -Path "$innoDir\auto_update_helper.exe" -Destination "$innoDir\tools\auto_update_helper.exe" -Force
     Move-Item -Path ".\AGS_SDK-6.3.0\ags_lib\lib\amd_ags_x64.dll" -Destination "$innoDir\amd_ags_x64.dll" -Force
 }

script/danger/pnpm-lock.yaml 🔗

@@ -33,8 +33,8 @@ packages:
     resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==}
     engines: {node: '>= 18'}
 
-  '@octokit/core@5.2.1':
-    resolution: {integrity: sha512-dKYCMuPO1bmrpuogcjQ8z7ICCH3FP6WmxpwC03yjzGfZhj9fTJg6+bS1+UAplekbN2C+M61UNllGOOoAfGCrdQ==}
+  '@octokit/core@5.2.2':
+    resolution: {integrity: sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==}
     engines: {node: '>= 18'}
 
   '@octokit/endpoint@9.0.6':
@@ -131,8 +131,8 @@ packages:
   commander@2.20.3:
     resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
 
-  core-js@3.41.0:
-    resolution: {integrity: sha512-SJ4/EHwS36QMJd6h/Rg+GyR4A5xE0FSI3eZ+iBVpfqf1x0eTSg1smWLHrA+2jQThZSh97fmSgFSU8B61nxosxA==}
+  core-js@3.45.1:
+    resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==}
 
   danger-plugin-pr-hygiene@0.6.1:
     resolution: {integrity: sha512-nb+iUQvirE3BlKXI1WoOND6sujyGzHar590mJm5tt4RLi65HXFaU5hqONxgDoWFujJNHYnXse9yaZdxnxEi4QA==}
@@ -142,8 +142,8 @@ packages:
     engines: {node: '>=18'}
     hasBin: true
 
-  debug@4.4.0:
-    resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==}
+  debug@4.4.1:
+    resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==}
     engines: {node: '>=6.0'}
     peerDependencies:
       supports-color: '*'
@@ -252,8 +252,8 @@ packages:
     resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==}
     engines: {node: '>=12', npm: '>=6'}
 
-  jwa@1.4.1:
-    resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==}
+  jwa@1.4.2:
+    resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==}
 
   jws@3.2.2:
     resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==}
@@ -385,8 +385,8 @@ packages:
   safe-buffer@5.2.1:
     resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
 
-  semver@7.7.1:
-    resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==}
+  semver@7.7.2:
+    resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==}
     engines: {node: '>=10'}
     hasBin: true
 
@@ -460,7 +460,7 @@ snapshots:
 
   '@octokit/auth-token@4.0.0': {}
 
-  '@octokit/core@5.2.1':
+  '@octokit/core@5.2.2':
     dependencies:
       '@octokit/auth-token': 4.0.0
       '@octokit/graphql': 7.1.1
@@ -483,18 +483,18 @@ snapshots:
 
   '@octokit/openapi-types@24.2.0': {}
 
-  '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.1)':
+  '@octokit/plugin-paginate-rest@11.4.4-cjs.2(@octokit/core@5.2.2)':
     dependencies:
-      '@octokit/core': 5.2.1
+      '@octokit/core': 5.2.2
       '@octokit/types': 13.10.0
 
-  '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.1)':
+  '@octokit/plugin-request-log@4.0.1(@octokit/core@5.2.2)':
     dependencies:
-      '@octokit/core': 5.2.1
+      '@octokit/core': 5.2.2
 
-  '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.1)':
+  '@octokit/plugin-rest-endpoint-methods@13.3.2-cjs.1(@octokit/core@5.2.2)':
     dependencies:
-      '@octokit/core': 5.2.1
+      '@octokit/core': 5.2.2
       '@octokit/types': 13.10.0
 
   '@octokit/request-error@5.1.1':
@@ -512,10 +512,10 @@ snapshots:
 
   '@octokit/rest@20.1.2':
     dependencies:
-      '@octokit/core': 5.2.1
-      '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.1)
-      '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.1)
-      '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.1)
+      '@octokit/core': 5.2.2
+      '@octokit/plugin-paginate-rest': 11.4.4-cjs.2(@octokit/core@5.2.2)
+      '@octokit/plugin-request-log': 4.0.1(@octokit/core@5.2.2)
+      '@octokit/plugin-rest-endpoint-methods': 13.3.2-cjs.1(@octokit/core@5.2.2)
 
   '@octokit/types@13.10.0':
     dependencies:
@@ -525,7 +525,7 @@ snapshots:
 
   agent-base@6.0.2:
     dependencies:
-      debug: 4.4.0
+      debug: 4.4.1
     transitivePeerDependencies:
       - supports-color
 
@@ -571,7 +571,7 @@ snapshots:
 
   commander@2.20.3: {}
 
-  core-js@3.41.0: {}
+  core-js@3.45.1: {}
 
   danger-plugin-pr-hygiene@0.6.1: {}
 
@@ -582,8 +582,8 @@ snapshots:
       async-retry: 1.2.3
       chalk: 2.4.2
       commander: 2.20.3
-      core-js: 3.41.0
-      debug: 4.4.0
+      core-js: 3.45.1
+      debug: 4.4.1
       fast-json-patch: 3.1.1
       get-stdin: 6.0.0
       http-proxy-agent: 5.0.0
@@ -618,7 +618,7 @@ snapshots:
       - encoding
       - supports-color
 
-  debug@4.4.0:
+  debug@4.4.1:
     dependencies:
       ms: 2.1.3
 
@@ -688,14 +688,14 @@ snapshots:
     dependencies:
       '@tootallnate/once': 2.0.0
       agent-base: 6.0.2
-      debug: 4.4.0
+      debug: 4.4.1
     transitivePeerDependencies:
       - supports-color
 
   https-proxy-agent@5.0.1:
     dependencies:
       agent-base: 6.0.2
-      debug: 4.4.0
+      debug: 4.4.1
     transitivePeerDependencies:
       - supports-color
 
@@ -720,9 +720,9 @@ snapshots:
       lodash.isstring: 4.0.1
       lodash.once: 4.1.1
       ms: 2.1.3
-      semver: 7.7.1
+      semver: 7.7.2
 
-  jwa@1.4.1:
+  jwa@1.4.2:
     dependencies:
       buffer-equal-constant-time: 1.0.1
       ecdsa-sig-formatter: 1.0.11
@@ -730,7 +730,7 @@ snapshots:
 
   jws@3.2.2:
     dependencies:
-      jwa: 1.4.1
+      jwa: 1.4.2
       safe-buffer: 5.2.1
 
   lodash.find@4.6.0: {}
@@ -823,7 +823,7 @@ snapshots:
 
   safe-buffer@5.2.1: {}
 
-  semver@7.7.1: {}
+  semver@7.7.2: {}
 
   side-channel-list@1.0.0:
     dependencies:

script/issue_response/package.json 🔗

@@ -9,14 +9,14 @@
     "start": "node main.js"
   },
   "dependencies": {
-    "@octokit/rest": "^21.1.0",
-    "@slack/webhook": "^7.0.4",
+    "@octokit/rest": "^21.1.1",
+    "@slack/webhook": "^7.0.6",
     "date-fns": "^4.1.0",
-    "octokit": "^4.1.1"
+    "octokit": "^4.1.4"
   },
   "devDependencies": {
-    "@octokit/types": "^13.8.0",
-    "@slack/types": "^2.14.0",
+    "@octokit/types": "^13.10.0",
+    "@slack/types": "^2.16.0",
     "@tsconfig/node20": "20.1.5",
     "@tsconfig/strictest": "2.0.5",
     "typescript": "5.7.3"

script/issue_response/pnpm-lock.yaml 🔗

@@ -9,24 +9,24 @@ importers:
   .:
     dependencies:
       '@octokit/rest':
-        specifier: ^21.1.0
+        specifier: ^21.1.1
         version: 21.1.1
       '@slack/webhook':
-        specifier: ^7.0.4
-        version: 7.0.5
+        specifier: ^7.0.6
+        version: 7.0.6
       date-fns:
         specifier: ^4.1.0
         version: 4.1.0
       octokit:
-        specifier: ^4.1.1
-        version: 4.1.2
+        specifier: ^4.1.4
+        version: 4.1.4
     devDependencies:
       '@octokit/types':
-        specifier: ^13.8.0
-        version: 13.8.0
+        specifier: ^13.10.0
+        version: 13.10.0
       '@slack/types':
-        specifier: ^2.14.0
-        version: 2.14.0
+        specifier: ^2.16.0
+        version: 2.16.0
       '@tsconfig/node20':
         specifier: 20.1.5
         version: 20.1.5
@@ -39,44 +39,44 @@ importers:
 
 packages:
 
-  '@octokit/app@15.1.4':
-    resolution: {integrity: sha512-PM1MqlPAnItjQIKWRmSoJu02+m7Eif4Am3w5C+Ctkw0//QETWMbW2ejBZhcw3aS7wRcFSbS+lH3NoYm614aZVQ==}
+  '@octokit/app@15.1.6':
+    resolution: {integrity: sha512-WELCamoCJo9SN0lf3SWZccf68CF0sBNPQuLYmZ/n87p5qvBJDe9aBtr5dHkh7T9nxWZ608pizwsUbypSzZAiUw==}
     engines: {node: '>= 18'}
 
-  '@octokit/auth-app@7.1.5':
-    resolution: {integrity: sha512-boklS4E6LpbA3nRx+SU2fRKRGZJdOGoSZne/i3Y0B5rfHOcGwFgcXrwDLdtbv4igfDSnAkZaoNBv1GYjPDKRNw==}
+  '@octokit/auth-app@7.2.2':
+    resolution: {integrity: sha512-p6hJtEyQDCJEPN9ijjhEC/kpFHMHN4Gca9r+8S0S8EJi7NaWftaEmexjxxpT1DFBeJpN4u/5RE22ArnyypupJw==}
     engines: {node: '>= 18'}
 
-  '@octokit/auth-oauth-app@8.1.3':
-    resolution: {integrity: sha512-4e6OjVe5rZ8yBe8w7byBjpKtSXFuro7gqeGAAZc7QYltOF8wB93rJl2FE0a4U1Mt88xxPv/mS+25/0DuLk0Ewg==}
+  '@octokit/auth-oauth-app@8.1.4':
+    resolution: {integrity: sha512-71iBa5SflSXcclk/OL3lJzdt4iFs56OJdpBGEBl1wULp7C58uiswZLV6TdRaiAzHP1LT8ezpbHlKuxADb+4NkQ==}
     engines: {node: '>= 18'}
 
-  '@octokit/auth-oauth-device@7.1.3':
-    resolution: {integrity: sha512-BECO/N4B/Uikj0w3GCvjf/odMujtYTP3q82BJSjxC2J3rxTEiZIJ+z2xnRlDb0IE9dQSaTgRqUPVOieSbFcVzg==}
+  '@octokit/auth-oauth-device@7.1.5':
+    resolution: {integrity: sha512-lR00+k7+N6xeECj0JuXeULQ2TSBB/zjTAmNF2+vyGPDEFx1dgk1hTDmL13MjbSmzusuAmuJD8Pu39rjp9jH6yw==}
     engines: {node: '>= 18'}
 
-  '@octokit/auth-oauth-user@5.1.3':
-    resolution: {integrity: sha512-zNPByPn9K7TC+OOHKGxU+MxrE9SZAN11UHYEFLsK2NRn3akJN2LHRl85q+Eypr3tuB2GrKx3rfj2phJdkYCvzw==}
+  '@octokit/auth-oauth-user@5.1.6':
+    resolution: {integrity: sha512-/R8vgeoulp7rJs+wfJ2LtXEVC7pjQTIqDab7wPKwVG6+2v/lUnCOub6vaHmysQBbb45FknM3tbHW8TOVqYHxCw==}
     engines: {node: '>= 18'}
 
   '@octokit/auth-token@5.1.2':
     resolution: {integrity: sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw==}
     engines: {node: '>= 18'}
 
-  '@octokit/auth-unauthenticated@6.1.2':
-    resolution: {integrity: sha512-07DlUGcz/AAVdzu3EYfi/dOyMSHp9YsOxPl/MPmtlVXWiD//GlV8HgZsPhud94DEyx+RfrW0wSl46Lx+AWbOlg==}
+  '@octokit/auth-unauthenticated@6.1.3':
+    resolution: {integrity: sha512-d5gWJla3WdSl1yjbfMpET+hUSFCE15qM0KVSB0H1shyuJihf/RL1KqWoZMIaonHvlNojkL9XtLFp8QeLe+1iwA==}
     engines: {node: '>= 18'}
 
-  '@octokit/core@6.1.4':
-    resolution: {integrity: sha512-lAS9k7d6I0MPN+gb9bKDt7X8SdxknYqAMh44S5L+lNqIN2NuV8nvv3g8rPp7MuRxcOpxpUIATWprO0C34a8Qmg==}
+  '@octokit/core@6.1.6':
+    resolution: {integrity: sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA==}
     engines: {node: '>= 18'}
 
-  '@octokit/endpoint@10.1.3':
-    resolution: {integrity: sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==}
+  '@octokit/endpoint@10.1.4':
+    resolution: {integrity: sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA==}
     engines: {node: '>= 18'}
 
-  '@octokit/graphql@8.2.1':
-    resolution: {integrity: sha512-n57hXtOoHrhwTWdvhVkdJHdhTv0JstjDbDRhJfwIRNfFqmSo1DaK/mD2syoNUoLCyqSjBpGAKOG0BuwF392slw==}
+  '@octokit/graphql@8.2.2':
+    resolution: {integrity: sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA==}
     engines: {node: '>= 18'}
 
   '@octokit/oauth-app@7.1.6':
@@ -87,15 +87,18 @@ packages:
     resolution: {integrity: sha512-ooXV8GBSabSWyhLUowlMIVd9l1s2nsOGQdlP2SQ4LnkEsGXzeCvbSbCPdZThXhEFzleGPwbapT0Sb+YhXRyjCA==}
     engines: {node: '>= 18'}
 
-  '@octokit/oauth-methods@5.1.4':
-    resolution: {integrity: sha512-Jc/ycnePClOvO1WL7tlC+TRxOFtyJBGuTDsL4dzXNiVZvzZdrPuNw7zHI3qJSUX2n6RLXE5L0SkFmYyNaVUFoQ==}
+  '@octokit/oauth-methods@5.1.5':
+    resolution: {integrity: sha512-Ev7K8bkYrYLhoOSZGVAGsLEscZQyq7XQONCBBAl2JdMg7IT3PQn/y8P0KjloPoYpI5UylqYrLeUcScaYWXwDvw==}
     engines: {node: '>= 18'}
 
-  '@octokit/openapi-types@23.0.1':
-    resolution: {integrity: sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==}
+  '@octokit/openapi-types@24.2.0':
+    resolution: {integrity: sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==}
 
-  '@octokit/openapi-webhooks-types@9.1.0':
-    resolution: {integrity: sha512-bO1D2jLdU8qEvqmbWjNxJzDYSFT4wesiYKIKP6f4LaM0XUGtn/0LBv/20hu9YqcnpdX38X5o/xANTMtIAqdwYw==}
+  '@octokit/openapi-types@25.1.0':
+    resolution: {integrity: sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==}
+
+  '@octokit/openapi-webhooks-types@11.0.0':
+    resolution: {integrity: sha512-ZBzCFj98v3SuRM7oBas6BHZMJRadlnDoeFfvm1olVxZnYeU6Vh97FhPxyS5aLh5pN51GYv2I51l/hVUAVkGBlA==}
 
   '@octokit/plugin-paginate-graphql@5.2.4':
     resolution: {integrity: sha512-pLZES1jWaOynXKHOqdnwZ5ULeVR6tVVCMm+AUbp0htdcyXDU95WbkYdU4R2ej1wKj5Tu94Mee2Ne0PjPO9cCyA==}
@@ -103,8 +106,14 @@ packages:
     peerDependencies:
       '@octokit/core': '>=6'
 
-  '@octokit/plugin-paginate-rest@11.4.2':
-    resolution: {integrity: sha512-BXJ7XPCTDXFF+wxcg/zscfgw2O/iDPtNSkwwR1W1W5c4Mb3zav/M2XvxQ23nVmKj7jpweB4g8viMeCQdm7LMVA==}
+  '@octokit/plugin-paginate-rest@11.6.0':
+    resolution: {integrity: sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw==}
+    engines: {node: '>= 18'}
+    peerDependencies:
+      '@octokit/core': '>=6'
+
+  '@octokit/plugin-paginate-rest@12.0.0':
+    resolution: {integrity: sha512-MPd6WK1VtZ52lFrgZ0R2FlaoiWllzgqFHaSZxvp72NmoDeZ0m8GeJdg4oB6ctqMTYyrnDYp592Xma21mrgiyDA==}
     engines: {node: '>= 18'}
     peerDependencies:
       '@octokit/core': '>=6'
@@ -115,53 +124,62 @@ packages:
     peerDependencies:
       '@octokit/core': '>=6'
 
-  '@octokit/plugin-rest-endpoint-methods@13.3.1':
-    resolution: {integrity: sha512-o8uOBdsyR+WR8MK9Cco8dCgvG13H1RlM1nWnK/W7TEACQBFux/vPREgKucxUfuDQ5yi1T3hGf4C5ZmZXAERgwQ==}
+  '@octokit/plugin-rest-endpoint-methods@13.5.0':
+    resolution: {integrity: sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw==}
+    engines: {node: '>= 18'}
+    peerDependencies:
+      '@octokit/core': '>=6'
+
+  '@octokit/plugin-rest-endpoint-methods@14.0.0':
+    resolution: {integrity: sha512-iQt6ovem4b7zZYZQtdv+PwgbL5VPq37th1m2x2TdkgimIDJpsi2A6Q/OI/23i/hR6z5mL0EgisNR4dcbmckSZQ==}
     engines: {node: '>= 18'}
     peerDependencies:
       '@octokit/core': '>=6'
 
-  '@octokit/plugin-retry@7.1.4':
-    resolution: {integrity: sha512-7AIP4p9TttKN7ctygG4BtR7rrB0anZqoU9ThXFk8nETqIfvgPUANTSYHqWYknK7W3isw59LpZeLI8pcEwiJdRg==}
+  '@octokit/plugin-retry@7.2.1':
+    resolution: {integrity: sha512-wUc3gv0D6vNHpGxSaR3FlqJpTXGWgqmk607N9L3LvPL4QjaxDgX/1nY2mGpT37Khn+nlIXdljczkRnNdTTV3/A==}
     engines: {node: '>= 18'}
     peerDependencies:
       '@octokit/core': '>=6'
 
-  '@octokit/plugin-throttling@9.4.0':
-    resolution: {integrity: sha512-IOlXxXhZA4Z3m0EEYtrrACkuHiArHLZ3CvqWwOez/pURNqRuwfoFlTPbN5Muf28pzFuztxPyiUiNwz8KctdZaQ==}
+  '@octokit/plugin-throttling@10.0.0':
+    resolution: {integrity: sha512-Kuq5/qs0DVYTHZuBAzCZStCzo2nKvVRo/TDNhCcpC2TKiOGz/DisXMCvjt3/b5kr6SCI1Y8eeeJTHBxxpFvZEg==}
     engines: {node: '>= 18'}
     peerDependencies:
       '@octokit/core': ^6.1.3
 
-  '@octokit/request-error@6.1.7':
-    resolution: {integrity: sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==}
+  '@octokit/request-error@6.1.8':
+    resolution: {integrity: sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==}
     engines: {node: '>= 18'}
 
-  '@octokit/request@9.2.2':
-    resolution: {integrity: sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==}
+  '@octokit/request@9.2.4':
+    resolution: {integrity: sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA==}
     engines: {node: '>= 18'}
 
   '@octokit/rest@21.1.1':
     resolution: {integrity: sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg==}
     engines: {node: '>= 18'}
 
-  '@octokit/types@13.8.0':
-    resolution: {integrity: sha512-x7DjTIbEpEWXK99DMd01QfWy0hd5h4EN+Q7shkdKds3otGQP+oWE/y0A76i1OvH9fygo4ddvNf7ZvF0t78P98A==}
+  '@octokit/types@13.10.0':
+    resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==}
+
+  '@octokit/types@14.1.0':
+    resolution: {integrity: sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==}
 
   '@octokit/webhooks-methods@5.1.1':
     resolution: {integrity: sha512-NGlEHZDseJTCj8TMMFehzwa9g7On4KJMPVHDSrHxCQumL6uSQR8wIkP/qesv52fXqV1BPf4pTxwtS31ldAt9Xg==}
     engines: {node: '>= 18'}
 
-  '@octokit/webhooks@13.6.1':
-    resolution: {integrity: sha512-vk0jnc5k0/mLMUI4IA9LfSYkLs3OHtfa7B3h4aRG6to912V3wIG8lS/wKwatwYxRkAug4oE8is0ERRI8pzoYTw==}
+  '@octokit/webhooks@13.9.1':
+    resolution: {integrity: sha512-Nss2b4Jyn4wB3EAqAPJypGuCJFalz/ZujKBQQ5934To7Xw9xjf4hkr/EAByxQY7hp7MKd790bWGz7XYSTsHmaw==}
     engines: {node: '>= 18'}
 
-  '@slack/types@2.14.0':
-    resolution: {integrity: sha512-n0EGm7ENQRxlXbgKSrQZL69grzg1gHLAVd+GlRVQJ1NSORo0FrApR7wql/gaKdu2n4TO83Sq/AmeUOqD60aXUA==}
+  '@slack/types@2.16.0':
+    resolution: {integrity: sha512-bICnyukvdklXhwxprR3uF1+ZFkTvWTZge4evlCS4G1H1HU6QLY68AcjqzQRymf7/5gNt6Y4OBb4NdviheyZcAg==}
     engines: {node: '>= 12.13.0', npm: '>= 6.12.0'}
 
-  '@slack/webhook@7.0.5':
-    resolution: {integrity: sha512-PmbZx89+SmH4zt78FUwe4If8hWX2MAIRmGXjmlF0A8PwyJb/H7CWaQYV6DDlZn1+7Zs6CEytKH0ejEE/idVSDw==}
+  '@slack/webhook@7.0.6':
+    resolution: {integrity: sha512-RvNCcOjNbzl5uQ2TZsbTJ+A+5ptoWMwnyd/W4lKzeXFToIwebeaZiuntcP0usmhZHj1LH9H1T9WN6Bt1B/DLyg==}
     engines: {node: '>= 18', npm: '>= 8.6.0'}
 
   '@tsconfig/node20@20.1.5':
@@ -170,17 +188,17 @@ packages:
   '@tsconfig/strictest@2.0.5':
     resolution: {integrity: sha512-ec4tjL2Rr0pkZ5hww65c+EEPYwxOi4Ryv+0MtjeaSQRJyq322Q27eOQiFbuNgw2hpL4hB1/W/HBGk3VKS43osg==}
 
-  '@types/aws-lambda@8.10.147':
-    resolution: {integrity: sha512-nD0Z9fNIZcxYX5Mai2CTmFD7wX7UldCkW2ezCF8D1T5hdiLsnTWDGRpfRYntU6VjTdLQjOvyszru7I1c1oCQew==}
+  '@types/aws-lambda@8.10.152':
+    resolution: {integrity: sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw==}
 
-  '@types/node@22.13.13':
-    resolution: {integrity: sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==}
+  '@types/node@24.3.0':
+    resolution: {integrity: sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==}
 
   asynckit@0.4.0:
     resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
 
-  axios@1.8.4:
-    resolution: {integrity: sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==}
+  axios@1.11.0:
+    resolution: {integrity: sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==}
 
   before-after-hook@3.0.2:
     resolution: {integrity: sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==}
@@ -226,8 +244,8 @@ packages:
   fast-content-type-parse@2.0.1:
     resolution: {integrity: sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==}
 
-  follow-redirects@1.15.9:
-    resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==}
+  follow-redirects@1.15.11:
+    resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
     engines: {node: '>=4.0'}
     peerDependencies:
       debug: '*'
@@ -235,8 +253,8 @@ packages:
       debug:
         optional: true
 
-  form-data@4.0.2:
-    resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==}
+  form-data@4.0.4:
+    resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==}
     engines: {node: '>= 6'}
 
   function-bind@1.1.2:
@@ -278,8 +296,8 @@ packages:
     resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
     engines: {node: '>= 0.6'}
 
-  octokit@4.1.2:
-    resolution: {integrity: sha512-0kcTxJOK3yQrJsRb8wKa28hlTze4QOz4sLuUnfXXnhboDhFKgv8LxS86tFwbsafDW9JZ08ByuVAE8kQbYJIZkA==}
+  octokit@4.1.4:
+    resolution: {integrity: sha512-cRvxRte6FU3vAHRC9+PMSY3D+mRAs2Rd9emMoqp70UGRvJRM3sbAoim2IXRZNNsf8wVfn4sGxVBHRAP+JBVX/g==}
     engines: {node: '>= 18'}
 
   proxy-from-env@1.1.0:
@@ -294,182 +312,198 @@ packages:
     engines: {node: '>=14.17'}
     hasBin: true
 
-  undici-types@6.20.0:
-    resolution: {integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==}
+  undici-types@7.10.0:
+    resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==}
 
-  universal-github-app-jwt@2.2.0:
-    resolution: {integrity: sha512-G5o6f95b5BggDGuUfKDApKaCgNYy2x7OdHY0zSMF081O0EJobw+1130VONhrA7ezGSV2FNOGyM+KQpQZAr9bIQ==}
+  universal-github-app-jwt@2.2.2:
+    resolution: {integrity: sha512-dcmbeSrOdTnsjGjUfAlqNDJrhxXizjAz94ija9Qw8YkZ1uu0d+GoZzyH+Jb9tIIqvGsadUfwg+22k5aDqqwzbw==}
 
-  universal-user-agent@7.0.2:
-    resolution: {integrity: sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==}
+  universal-user-agent@7.0.3:
+    resolution: {integrity: sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==}
 
 snapshots:
 
-  '@octokit/app@15.1.4':
+  '@octokit/app@15.1.6':
     dependencies:
-      '@octokit/auth-app': 7.1.5
-      '@octokit/auth-unauthenticated': 6.1.2
-      '@octokit/core': 6.1.4
+      '@octokit/auth-app': 7.2.2
+      '@octokit/auth-unauthenticated': 6.1.3
+      '@octokit/core': 6.1.6
       '@octokit/oauth-app': 7.1.6
-      '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
-      '@octokit/types': 13.8.0
-      '@octokit/webhooks': 13.6.1
+      '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6)
+      '@octokit/types': 14.1.0
+      '@octokit/webhooks': 13.9.1
 
-  '@octokit/auth-app@7.1.5':
+  '@octokit/auth-app@7.2.2':
     dependencies:
-      '@octokit/auth-oauth-app': 8.1.3
-      '@octokit/auth-oauth-user': 5.1.3
-      '@octokit/request': 9.2.2
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/auth-oauth-app': 8.1.4
+      '@octokit/auth-oauth-user': 5.1.6
+      '@octokit/request': 9.2.4
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
       toad-cache: 3.7.0
-      universal-github-app-jwt: 2.2.0
-      universal-user-agent: 7.0.2
+      universal-github-app-jwt: 2.2.2
+      universal-user-agent: 7.0.3
 
-  '@octokit/auth-oauth-app@8.1.3':
+  '@octokit/auth-oauth-app@8.1.4':
     dependencies:
-      '@octokit/auth-oauth-device': 7.1.3
-      '@octokit/auth-oauth-user': 5.1.3
-      '@octokit/request': 9.2.2
-      '@octokit/types': 13.8.0
-      universal-user-agent: 7.0.2
+      '@octokit/auth-oauth-device': 7.1.5
+      '@octokit/auth-oauth-user': 5.1.6
+      '@octokit/request': 9.2.4
+      '@octokit/types': 14.1.0
+      universal-user-agent: 7.0.3
 
-  '@octokit/auth-oauth-device@7.1.3':
+  '@octokit/auth-oauth-device@7.1.5':
     dependencies:
-      '@octokit/oauth-methods': 5.1.4
-      '@octokit/request': 9.2.2
-      '@octokit/types': 13.8.0
-      universal-user-agent: 7.0.2
+      '@octokit/oauth-methods': 5.1.5
+      '@octokit/request': 9.2.4
+      '@octokit/types': 14.1.0
+      universal-user-agent: 7.0.3
 
-  '@octokit/auth-oauth-user@5.1.3':
+  '@octokit/auth-oauth-user@5.1.6':
     dependencies:
-      '@octokit/auth-oauth-device': 7.1.3
-      '@octokit/oauth-methods': 5.1.4
-      '@octokit/request': 9.2.2
-      '@octokit/types': 13.8.0
-      universal-user-agent: 7.0.2
+      '@octokit/auth-oauth-device': 7.1.5
+      '@octokit/oauth-methods': 5.1.5
+      '@octokit/request': 9.2.4
+      '@octokit/types': 14.1.0
+      universal-user-agent: 7.0.3
 
   '@octokit/auth-token@5.1.2': {}
 
-  '@octokit/auth-unauthenticated@6.1.2':
+  '@octokit/auth-unauthenticated@6.1.3':
     dependencies:
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
 
-  '@octokit/core@6.1.4':
+  '@octokit/core@6.1.6':
     dependencies:
       '@octokit/auth-token': 5.1.2
-      '@octokit/graphql': 8.2.1
-      '@octokit/request': 9.2.2
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/graphql': 8.2.2
+      '@octokit/request': 9.2.4
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
       before-after-hook: 3.0.2
-      universal-user-agent: 7.0.2
+      universal-user-agent: 7.0.3
 
-  '@octokit/endpoint@10.1.3':
+  '@octokit/endpoint@10.1.4':
     dependencies:
-      '@octokit/types': 13.8.0
-      universal-user-agent: 7.0.2
+      '@octokit/types': 14.1.0
+      universal-user-agent: 7.0.3
 
-  '@octokit/graphql@8.2.1':
+  '@octokit/graphql@8.2.2':
     dependencies:
-      '@octokit/request': 9.2.2
-      '@octokit/types': 13.8.0
-      universal-user-agent: 7.0.2
+      '@octokit/request': 9.2.4
+      '@octokit/types': 14.1.0
+      universal-user-agent: 7.0.3
 
   '@octokit/oauth-app@7.1.6':
     dependencies:
-      '@octokit/auth-oauth-app': 8.1.3
-      '@octokit/auth-oauth-user': 5.1.3
-      '@octokit/auth-unauthenticated': 6.1.2
-      '@octokit/core': 6.1.4
+      '@octokit/auth-oauth-app': 8.1.4
+      '@octokit/auth-oauth-user': 5.1.6
+      '@octokit/auth-unauthenticated': 6.1.3
+      '@octokit/core': 6.1.6
       '@octokit/oauth-authorization-url': 7.1.1
-      '@octokit/oauth-methods': 5.1.4
-      '@types/aws-lambda': 8.10.147
-      universal-user-agent: 7.0.2
+      '@octokit/oauth-methods': 5.1.5
+      '@types/aws-lambda': 8.10.152
+      universal-user-agent: 7.0.3
 
   '@octokit/oauth-authorization-url@7.1.1': {}
 
-  '@octokit/oauth-methods@5.1.4':
+  '@octokit/oauth-methods@5.1.5':
     dependencies:
       '@octokit/oauth-authorization-url': 7.1.1
-      '@octokit/request': 9.2.2
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/request': 9.2.4
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
+
+  '@octokit/openapi-types@24.2.0': {}
 
-  '@octokit/openapi-types@23.0.1': {}
+  '@octokit/openapi-types@25.1.0': {}
 
-  '@octokit/openapi-webhooks-types@9.1.0': {}
+  '@octokit/openapi-webhooks-types@11.0.0': {}
 
-  '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.4)':
+  '@octokit/plugin-paginate-graphql@5.2.4(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
+      '@octokit/core': 6.1.6
 
-  '@octokit/plugin-paginate-rest@11.4.2(@octokit/core@6.1.4)':
+  '@octokit/plugin-paginate-rest@11.6.0(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
-      '@octokit/types': 13.8.0
+      '@octokit/core': 6.1.6
+      '@octokit/types': 13.10.0
 
-  '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.4)':
+  '@octokit/plugin-paginate-rest@12.0.0(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
+      '@octokit/core': 6.1.6
+      '@octokit/types': 14.1.0
 
-  '@octokit/plugin-rest-endpoint-methods@13.3.1(@octokit/core@6.1.4)':
+  '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
-      '@octokit/types': 13.8.0
+      '@octokit/core': 6.1.6
 
-  '@octokit/plugin-retry@7.1.4(@octokit/core@6.1.4)':
+  '@octokit/plugin-rest-endpoint-methods@13.5.0(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/core': 6.1.6
+      '@octokit/types': 13.10.0
+
+  '@octokit/plugin-rest-endpoint-methods@14.0.0(@octokit/core@6.1.6)':
+    dependencies:
+      '@octokit/core': 6.1.6
+      '@octokit/types': 14.1.0
+
+  '@octokit/plugin-retry@7.2.1(@octokit/core@6.1.6)':
+    dependencies:
+      '@octokit/core': 6.1.6
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
       bottleneck: 2.19.5
 
-  '@octokit/plugin-throttling@9.4.0(@octokit/core@6.1.4)':
+  '@octokit/plugin-throttling@10.0.0(@octokit/core@6.1.6)':
     dependencies:
-      '@octokit/core': 6.1.4
-      '@octokit/types': 13.8.0
+      '@octokit/core': 6.1.6
+      '@octokit/types': 14.1.0
       bottleneck: 2.19.5
 
-  '@octokit/request-error@6.1.7':
+  '@octokit/request-error@6.1.8':
     dependencies:
-      '@octokit/types': 13.8.0
+      '@octokit/types': 14.1.0
 
-  '@octokit/request@9.2.2':
+  '@octokit/request@9.2.4':
     dependencies:
-      '@octokit/endpoint': 10.1.3
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/endpoint': 10.1.4
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
       fast-content-type-parse: 2.0.1
-      universal-user-agent: 7.0.2
+      universal-user-agent: 7.0.3
 
   '@octokit/rest@21.1.1':
     dependencies:
-      '@octokit/core': 6.1.4
-      '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
-      '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.4)
-      '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4)
+      '@octokit/core': 6.1.6
+      '@octokit/plugin-paginate-rest': 11.6.0(@octokit/core@6.1.6)
+      '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.6)
+      '@octokit/plugin-rest-endpoint-methods': 13.5.0(@octokit/core@6.1.6)
+
+  '@octokit/types@13.10.0':
+    dependencies:
+      '@octokit/openapi-types': 24.2.0
 
-  '@octokit/types@13.8.0':
+  '@octokit/types@14.1.0':
     dependencies:
-      '@octokit/openapi-types': 23.0.1
+      '@octokit/openapi-types': 25.1.0
 
   '@octokit/webhooks-methods@5.1.1': {}
 
-  '@octokit/webhooks@13.6.1':
+  '@octokit/webhooks@13.9.1':
     dependencies:
-      '@octokit/openapi-webhooks-types': 9.1.0
-      '@octokit/request-error': 6.1.7
+      '@octokit/openapi-webhooks-types': 11.0.0
+      '@octokit/request-error': 6.1.8
       '@octokit/webhooks-methods': 5.1.1
 
-  '@slack/types@2.14.0': {}
+  '@slack/types@2.16.0': {}
 
-  '@slack/webhook@7.0.5':
+  '@slack/webhook@7.0.6':
     dependencies:
-      '@slack/types': 2.14.0
-      '@types/node': 22.13.13
-      axios: 1.8.4
+      '@slack/types': 2.16.0
+      '@types/node': 24.3.0
+      axios: 1.11.0
     transitivePeerDependencies:
       - debug
 
@@ -477,18 +511,18 @@ snapshots:
 
   '@tsconfig/strictest@2.0.5': {}
 
-  '@types/aws-lambda@8.10.147': {}
+  '@types/aws-lambda@8.10.152': {}
 
-  '@types/node@22.13.13':
+  '@types/node@24.3.0':
     dependencies:
-      undici-types: 6.20.0
+      undici-types: 7.10.0
 
   asynckit@0.4.0: {}
 
-  axios@1.8.4:
+  axios@1.11.0:
     dependencies:
-      follow-redirects: 1.15.9
-      form-data: 4.0.2
+      follow-redirects: 1.15.11
+      form-data: 4.0.4
       proxy-from-env: 1.1.0
     transitivePeerDependencies:
       - debug
@@ -533,13 +567,14 @@ snapshots:
 
   fast-content-type-parse@2.0.1: {}
 
-  follow-redirects@1.15.9: {}
+  follow-redirects@1.15.11: {}
 
-  form-data@4.0.2:
+  form-data@4.0.4:
     dependencies:
       asynckit: 0.4.0
       combined-stream: 1.0.8
       es-set-tostringtag: 2.1.0
+      hasown: 2.0.2
       mime-types: 2.1.35
 
   function-bind@1.1.2: {}
@@ -582,18 +617,19 @@ snapshots:
     dependencies:
       mime-db: 1.52.0
 
-  octokit@4.1.2:
+  octokit@4.1.4:
     dependencies:
-      '@octokit/app': 15.1.4
-      '@octokit/core': 6.1.4
+      '@octokit/app': 15.1.6
+      '@octokit/core': 6.1.6
       '@octokit/oauth-app': 7.1.6
-      '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.4)
-      '@octokit/plugin-paginate-rest': 11.4.2(@octokit/core@6.1.4)
-      '@octokit/plugin-rest-endpoint-methods': 13.3.1(@octokit/core@6.1.4)
-      '@octokit/plugin-retry': 7.1.4(@octokit/core@6.1.4)
-      '@octokit/plugin-throttling': 9.4.0(@octokit/core@6.1.4)
-      '@octokit/request-error': 6.1.7
-      '@octokit/types': 13.8.0
+      '@octokit/plugin-paginate-graphql': 5.2.4(@octokit/core@6.1.6)
+      '@octokit/plugin-paginate-rest': 12.0.0(@octokit/core@6.1.6)
+      '@octokit/plugin-rest-endpoint-methods': 14.0.0(@octokit/core@6.1.6)
+      '@octokit/plugin-retry': 7.2.1(@octokit/core@6.1.6)
+      '@octokit/plugin-throttling': 10.0.0(@octokit/core@6.1.6)
+      '@octokit/request-error': 6.1.8
+      '@octokit/types': 14.1.0
+      '@octokit/webhooks': 13.9.1
 
   proxy-from-env@1.1.0: {}
 
@@ -601,8 +637,8 @@ snapshots:
 
   typescript@5.7.3: {}
 
-  undici-types@6.20.0: {}
+  undici-types@7.10.0: {}
 
-  universal-github-app-jwt@2.2.0: {}
+  universal-github-app-jwt@2.2.2: {}
 
-  universal-user-agent@7.0.2: {}
+  universal-user-agent@7.0.3: {}

tooling/workspace-hack/Cargo.toml 🔗

@@ -568,7 +568,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin
 tokio-socks = { version = "0.5", features = ["futures-io"] }
 tokio-stream = { version = "0.1", features = ["fs"] }
 tower = { version = "0.5", default-features = false, features = ["timeout", "util"] }
-winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
+winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
 windows-core = { version = "0.61" }
 windows-numerics = { version = "0.2" }
 windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] }
@@ -592,7 +592,7 @@ tokio-rustls = { version = "0.26", default-features = false, features = ["loggin
 tokio-socks = { version = "0.5", features = ["futures-io"] }
 tokio-stream = { version = "0.1", features = ["fs"] }
 tower = { version = "0.5", default-features = false, features = ["timeout", "util"] }
-winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "errhandlingapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
+winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] }
 windows-core = { version = "0.61" }
 windows-numerics = { version = "0.2" }
 windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] }

typos.toml 🔗

@@ -36,7 +36,10 @@ extend-exclude = [
     # glsl isn't recognized by this tool.
     "extensions/glsl/languages/glsl/",
     # Windows likes its abbreviations.
-    "crates/gpui/src/platform/windows/",
+    "crates/gpui/src/platform/windows/directx_renderer.rs",
+    "crates/gpui/src/platform/windows/events.rs",
+    "crates/gpui/src/platform/windows/direct_write.rs",
+    "crates/gpui/src/platform/windows/window.rs",
     # Some typos in the base mdBook CSS.
     "docs/theme/css/",
     # Spellcheck triggers on `|Fixe[sd]|` regex part.