Merge remote-tracking branch 'origin/main' into provider-extensions

Richard Feldman created

Change summary

.github/ISSUE_TEMPLATE/10_bug_report.yml                   |  16 
.github/workflows/after_release.yml                        |  25 
Cargo.lock                                                 |   9 
Cargo.toml                                                 |   2 
assets/keymaps/default-linux.json                          |  12 
assets/keymaps/default-macos.json                          |   8 
assets/keymaps/default-windows.json                        |  17 
assets/keymaps/vim.json                                    |   9 
assets/settings/default.json                               | 368 +++---
crates/agent_servers/src/acp.rs                            |  21 
crates/context_server/src/transport/stdio_transport.rs     |   6 
crates/diagnostics/src/diagnostics.rs                      |  81 
crates/edit_prediction/Cargo.toml                          |   2 
crates/edit_prediction/src/edit_prediction.rs              | 117 +
crates/edit_prediction/src/edit_prediction_tests.rs        |  65 
crates/edit_prediction/src/license_detection.rs            |   3 
crates/edit_prediction/src/udiff.rs                        |  55 
crates/edit_prediction/src/zed_edit_prediction_delegate.rs | 120 +-
crates/edit_prediction/src/zeta2.rs                        |  20 
crates/edit_prediction_cli/Cargo.toml                      |   4 
crates/edit_prediction_cli/src/distill.rs                  |  14 
crates/edit_prediction_cli/src/example.rs                  |  64 
crates/edit_prediction_cli/src/format_prompt.rs            | 150 +-
crates/edit_prediction_cli/src/headless.rs                 |  22 
crates/edit_prediction_cli/src/load_project.rs             | 150 +
crates/edit_prediction_cli/src/main.rs                     |  62 
crates/edit_prediction_cli/src/predict.rs                  |  48 
crates/edit_prediction_cli/src/progress.rs                 | 474 ++++++++
crates/edit_prediction_cli/src/retrieve_context.rs         | 211 +--
crates/edit_prediction_cli/src/score.rs                    |   3 
crates/edit_prediction_cli/src/teacher.prompt.md           |   1 
crates/editor/src/bracket_colorization.rs                  |   4 
crates/editor/src/display_map.rs                           |  42 
crates/editor/src/display_map/block_map.rs                 |  12 
crates/editor/src/display_map/wrap_map.rs                  |   6 
crates/editor/src/editor.rs                                |  21 
crates/editor/src/editor_tests.rs                          |  71 
crates/editor/src/highlight_matching_bracket.rs            |   1 
crates/editor/src/hover_popover.rs                         |   5 
crates/eval/src/instance.rs                                |   2 
crates/fs/src/fake_git_repo.rs                             |  13 
crates/fs/src/fs.rs                                        |  23 
crates/git/src/blame.rs                                    |  10 
crates/git/src/repository.rs                               |  16 
crates/git_ui/src/blame_ui.rs                              |  14 
crates/git_ui/src/commit_modal.rs                          |  59 
crates/git_ui/src/commit_tooltip.rs                        |  46 
crates/git_ui/src/commit_view.rs                           |  80 -
crates/git_ui/src/conflict_view.rs                         |   3 
crates/git_ui/src/git_panel.rs                             | 181 ++
crates/gpui/Cargo.toml                                     |   5 
crates/gpui/build.rs                                       |   2 
crates/gpui/src/app.rs                                     |  27 
crates/gpui/src/app/context.rs                             |  23 
crates/gpui/src/executor.rs                                | 220 +++
crates/gpui/src/geometry.rs                                |   4 
crates/gpui/src/gpui.rs                                    |   9 
crates/gpui/src/platform.rs                                |  19 
crates/gpui/src/platform/linux/dispatcher.rs               | 329 ++++
crates/gpui/src/platform/linux/platform.rs                 |  10 
crates/gpui/src/platform/linux/wayland/client.rs           |  10 
crates/gpui/src/platform/linux/x11/client.rs               |   8 
crates/gpui/src/platform/mac/dispatcher.rs                 | 145 ++
crates/gpui/src/platform/mac/display.rs                    |  53 
crates/gpui/src/platform/test/dispatcher.rs                |  12 
crates/gpui/src/platform/windows/dispatcher.rs             |  85 
crates/gpui/src/platform/windows/display.rs                |  32 
crates/gpui/src/platform/windows/events.rs                 |   3 
crates/gpui/src/platform/windows/platform.rs               |  24 
crates/gpui/src/platform/windows/window.rs                 |   4 
crates/gpui/src/profiler.rs                                |  16 
crates/gpui/src/queue.rs                                   | 329 +++++
crates/gpui/src/styled.rs                                  |  15 
crates/gpui/src/window.rs                                  | 200 +-
crates/image_viewer/src/image_info.rs                      |   4 
crates/language/src/buffer.rs                              | 273 ++--
crates/language/src/buffer/row_chunk.rs                    |   2 
crates/language/src/language.rs                            |  23 
crates/language_models/src/provider/open_ai.rs             |   8 
crates/languages/src/javascript/highlights.scm             | 107 +
crates/languages/src/jsdoc/highlights.scm                  |   1 
crates/languages/src/markdown/config.toml                  |   6 
crates/languages/src/markdown/indents.scm                  |   2 
crates/languages/src/rust.rs                               |   6 
crates/languages/src/tsx/highlights-jsx.scm                |   0 
crates/languages/src/tsx/highlights.scm                    | 161 ++
crates/languages/src/typescript/highlights.scm             | 134 ++
crates/lsp/src/lsp.rs                                      |   6 
crates/markdown_preview/src/markdown_preview.rs            |  14 
crates/markdown_preview/src/markdown_preview_view.rs       |  67 +
crates/markdown_preview/src/markdown_renderer.rs           |  10 
crates/multi_buffer/src/multi_buffer.rs                    |   4 
crates/open_ai/src/open_ai.rs                              |   8 
crates/project/src/buffer_store.rs                         |  18 
crates/project/src/debugger/locators/cargo.rs              |  23 
crates/project/src/git_store.rs                            |   3 
crates/project/src/lsp_store.rs                            |  28 
crates/project/src/project.rs                              |   8 
crates/project/src/project_tests.rs                        |   6 
crates/project/src/worktree_store.rs                       |   9 
crates/remote/src/transport/ssh.rs                         |   2 
crates/remote_server/src/headless_project.rs               |   1 
crates/repl/src/repl.rs                                    |   4 
crates/search/Cargo.toml                                   |   6 
crates/search/src/project_search.rs                        |   1 
crates/terminal/Cargo.toml                                 |   2 
crates/terminal/src/terminal_hyperlinks.rs                 |  33 
crates/terminal_view/src/terminal_panel.rs                 |   2 
crates/text/src/text.rs                                    |  28 
crates/util/src/paths.rs                                   |  11 
crates/util/src/shell.rs                                   |   9 
crates/util/src/shell_builder.rs                           | 131 +
crates/util/src/shell_env.rs                               |  20 
crates/vim/Cargo.toml                                      |   1 
crates/vim/src/test/vim_test_context.rs                    |   1 
crates/workspace/src/workspace.rs                          |   6 
crates/worktree/src/worktree.rs                            |  60 
crates/worktree/src/worktree_tests.rs                      |  25 
crates/zed/src/main.rs                                     |   4 
crates/ztracing/Cargo.toml                                 |   1 
crates/ztracing/src/lib.rs                                 |  38 
script/prettier                                            |   4 
tooling/xtask/src/tasks/workflows/after_release.rs         |  74 
typos.toml                                                 |   2 
124 files changed, 4,232 insertions(+), 1,517 deletions(-)

Detailed changes

.github/ISSUE_TEMPLATE/10_bug_report.yml 🔗

@@ -75,6 +75,22 @@ body:
         </details>
     validations:
       required: false
+  - type: textarea
+    attributes:
+      label: Relevant Keymap
+      description: |
+        Open the command palette in Zed, then type “zed: open keymap file” and copy/paste the file's contents.
+      value: |
+        <details><summary>keymap.json</summary>
+
+        <!-- Paste your keymap file inside the code block. -->
+        ```json
+
+        ```
+
+        </details>
+    validations:
+      required: false
   - type: textarea
     attributes:
       label: (for AI issues) Model provider details

.github/workflows/after_release.yml 🔗

@@ -5,13 +5,27 @@ on:
   release:
     types:
     - published
+  workflow_dispatch:
+    inputs:
+      tag_name:
+        description: tag_name
+        required: true
+        type: string
+      prerelease:
+        description: prerelease
+        required: true
+        type: boolean
+      body:
+        description: body
+        type: string
+        default: ''
 jobs:
   rebuild_releases_page:
     if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: after_release::rebuild_releases_page::refresh_cloud_releases
-      run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
+      run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name || inputs.tag_name }}
       shell: bash -euxo pipefail {0}
     - name: after_release::rebuild_releases_page::redeploy_zed_dev
       run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
@@ -27,7 +41,7 @@ jobs:
     - id: get-release-url
       name: after_release::post_to_discord::get_release_url
       run: |
-        if [ "${{ github.event.release.prerelease }}" == "true" ]; then
+        if [ "${{ github.event.release.prerelease || inputs.prerelease }}" == "true" ]; then
             URL="https://zed.dev/releases/preview"
         else
             URL="https://zed.dev/releases/stable"
@@ -40,9 +54,9 @@ jobs:
       uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757
       with:
         stringToTruncate: |
-          📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
+          📣 Zed [${{ github.event.release.tag_name || inputs.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>)  was just released!
 
-          ${{ github.event.release.body }}
+          ${{ github.event.release.body || inputs.body }}
         maxLength: 2000
         truncationSymbol: '...'
     - name: after_release::post_to_discord::discord_webhook_action
@@ -56,7 +70,7 @@ jobs:
     - id: set-package-name
       name: after_release::publish_winget::set_package_name
       run: |
-        if ("${{ github.event.release.prerelease }}" -eq "true") {
+        if ("${{ github.event.release.prerelease || inputs.prerelease }}" -eq "true") {
             $PACKAGE_NAME = "ZedIndustries.Zed.Preview"
         } else {
             $PACKAGE_NAME = "ZedIndustries.Zed"
@@ -68,6 +82,7 @@ jobs:
       uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
       with:
         identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
+        release-tag: ${{ github.event.release.tag_name || inputs.tag_name }}
         max-versions-to-keep: 5
         token: ${{ secrets.WINGET_TOKEN }}
   create_sentry_release:

Cargo.lock 🔗

@@ -5179,6 +5179,7 @@ dependencies = [
  "language_model",
  "language_models",
  "languages",
+ "libc",
  "log",
  "node_runtime",
  "paths",
@@ -5200,7 +5201,6 @@ dependencies = [
  "wasmtime",
  "watch",
  "zeta_prompt",
- "zlog",
 ]
 
 [[package]]
@@ -7247,6 +7247,7 @@ dependencies = [
  "libc",
  "log",
  "lyon",
+ "mach2 0.5.0",
  "media",
  "metal",
  "naga",
@@ -14455,12 +14456,14 @@ dependencies = [
  "settings",
  "smol",
  "theme",
+ "tracing",
  "ui",
  "unindent",
  "util",
  "util_macros",
  "workspace",
  "zed_actions",
+ "ztracing",
 ]
 
 [[package]]
@@ -16373,13 +16376,13 @@ dependencies = [
  "alacritty_terminal",
  "anyhow",
  "collections",
- "fancy-regex",
  "futures 0.3.31",
  "gpui",
  "itertools 0.14.0",
  "libc",
  "log",
  "rand 0.9.2",
+ "regex",
  "release_channel",
  "schemars",
  "serde",
@@ -18107,6 +18110,7 @@ dependencies = [
  "language",
  "log",
  "lsp",
+ "markdown_preview",
  "menu",
  "multi_buffer",
  "nvim-rs",
@@ -21031,6 +21035,7 @@ dependencies = [
  "tracing",
  "tracing-subscriber",
  "tracing-tracy",
+ "zlog",
  "ztracing_macro",
 ]
 

Cargo.toml 🔗

@@ -631,7 +631,7 @@ shellexpand = "2.1.0"
 shlex = "1.3.0"
 simplelog = "0.12.2"
 slotmap = "1.0.6"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { version = "1.6", features = ["union", "const_new"] }
 smol = "2.0"
 sqlformat = "0.2"
 stacksafe = "0.1"

assets/keymaps/default-linux.json 🔗

@@ -25,7 +25,8 @@
       "ctrl-shift-w": "workspace::CloseWindow",
       "shift-escape": "workspace::ToggleZoom",
       "open": "workspace::Open",
-      "ctrl-o": "workspace::Open",
+      "ctrl-o": "workspace::OpenFiles",
+      "ctrl-k ctrl-o": "workspace::Open",
       "ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
       "ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
       "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
@@ -814,7 +815,6 @@
       "ctrl-]": "agent::CycleNextInlineAssist",
       "ctrl-shift-enter": "inline_assistant::ThumbsUpResult",
       "ctrl-shift-backspace": "inline_assistant::ThumbsDownResult"
-
     }
   },
   {
@@ -1192,8 +1192,12 @@
   {
     "context": "MarkdownPreview",
     "bindings": {
-      "pageup": "markdown::MovePageUp",
-      "pagedown": "markdown::MovePageDown"
+      "pageup": "markdown::ScrollPageUp",
+      "pagedown": "markdown::ScrollPageDown",
+      "up": "markdown::ScrollUp",
+      "down": "markdown::ScrollDown",
+      "alt-up": "markdown::ScrollUpByItem",
+      "alt-down": "markdown::ScrollDownByItem"
     }
   },
   {

assets/keymaps/default-macos.json 🔗

@@ -1296,8 +1296,12 @@
   {
     "context": "MarkdownPreview",
     "bindings": {
-      "pageup": "markdown::MovePageUp",
-      "pagedown": "markdown::MovePageDown"
+      "pageup": "markdown::ScrollPageUp",
+      "pagedown": "markdown::ScrollPageDown",
+      "up": "markdown::ScrollUp",
+      "down": "markdown::ScrollDown",
+      "alt-up": "markdown::ScrollUpByItem",
+      "alt-down": "markdown::ScrollDownByItem"
     }
   },
   {

assets/keymaps/default-windows.json 🔗

@@ -489,8 +489,8 @@
     "bindings": {
       "ctrl-[": "editor::Outdent",
       "ctrl-]": "editor::Indent",
-      "ctrl-shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above
-      "ctrl-shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below
+      "ctrl-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above
+      "ctrl-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below
       "ctrl-shift-k": "editor::DeleteLine",
       "alt-up": "editor::MoveLineUp",
       "alt-down": "editor::MoveLineDown",
@@ -501,9 +501,12 @@
       "ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
       "ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
       "ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch  / find_under_expand
+      "ctrl-f3": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch  / find_under_expand
       "ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch  / find_under_expand_skip
+      "ctrl-shift-f3": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch  / find_under_expand
       "ctrl-k ctrl-i": "editor::Hover",
       "ctrl-k ctrl-b": "editor::BlameHover",
+      "ctrl-k ctrl-f": "editor::FormatSelections",
       "ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
       "f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
       "shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
@@ -536,7 +539,7 @@
       "ctrl-k p": "editor::CopyPath",
       "ctrl-\\": "pane::SplitRight",
       "alt-.": "editor::GoToHunk",
-      "alt-,": "editor::GoToPreviousHunk"
+      "alt-,": "editor::GoToPreviousHunk",
     }
   },
   {
@@ -1220,8 +1223,12 @@
     "context": "MarkdownPreview",
     "use_key_equivalents": true,
     "bindings": {
-      "pageup": "markdown::MovePageUp",
-      "pagedown": "markdown::MovePageDown"
+      "pageup": "markdown::ScrollPageUp",
+      "pagedown": "markdown::ScrollPageDown",
+      "up": "markdown::ScrollUp",
+      "down": "markdown::ScrollDown",
+      "alt-up": "markdown::ScrollUpByItem",
+      "alt-down": "markdown::ScrollDownByItem"
     }
   },
   {

assets/keymaps/vim.json 🔗

@@ -1046,5 +1046,14 @@
       "g g": "settings_editor::FocusFirstNavEntry",
       "shift-g": "settings_editor::FocusLastNavEntry"
     }
+  },
+  {
+    "context": "MarkdownPreview",
+    "bindings": {
+      "ctrl-u": "markdown::ScrollPageUp",
+      "ctrl-d": "markdown::ScrollPageDown",
+      "ctrl-y": "markdown::ScrollUp",
+      "ctrl-e": "markdown::ScrollDown"
+    }
   }
 ]

assets/settings/default.json 🔗

@@ -12,7 +12,7 @@
   "theme": {
     "mode": "system",
     "light": "One Light",
-    "dark": "One Dark"
+    "dark": "One Dark",
   },
   "icon_theme": "Zed (Default)",
   // The name of a base set of key bindings to use.
@@ -29,7 +29,7 @@
   // Features that can be globally enabled or disabled
   "features": {
     // Which edit prediction provider to use.
-    "edit_prediction_provider": "zed"
+    "edit_prediction_provider": "zed",
   },
   // The name of a font to use for rendering text in the editor
   // ".ZedMono" currently aliases to Lilex
@@ -69,7 +69,7 @@
   // The OpenType features to enable for text in the UI
   "ui_font_features": {
     // Disable ligatures:
-    "calt": false
+    "calt": false,
   },
   // The weight of the UI font in standard CSS units from 100 to 900.
   "ui_font_weight": 400,
@@ -87,7 +87,7 @@
     "border_size": 0.0,
     // Opacity of the inactive panes. 0 means transparent, 1 means opaque.
     // Values are clamped to the [0.0, 1.0] range.
-    "inactive_opacity": 1.0
+    "inactive_opacity": 1.0,
   },
   // Layout mode of the bottom dock. Defaults to "contained"
   //   choices: contained, full, left_aligned, right_aligned
@@ -103,12 +103,12 @@
     "left_padding": 0.2,
     // The relative width of the right padding of the central pane from the
     // workspace when the centered layout is used.
-    "right_padding": 0.2
+    "right_padding": 0.2,
   },
   // Image viewer settings
   "image_viewer": {
     // The unit for image file sizes: "binary" (KiB, MiB) or decimal (KB, MB)
-    "unit": "binary"
+    "unit": "binary",
   },
   // Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
   //
@@ -296,7 +296,7 @@
     // When true, enables drag and drop text selection in buffer.
     "enabled": true,
     // The delay in milliseconds that must elapse before drag and drop is allowed. Otherwise, a new text selection is created.
-    "delay": 300
+    "delay": 300,
   },
   // What to do when go to definition yields no results.
   //
@@ -400,14 +400,14 @@
   // Visible characters used to render whitespace when show_whitespaces is enabled.
   "whitespace_map": {
     "space": "•",
-    "tab": "→"
+    "tab": "→",
   },
   // Settings related to calls in Zed
   "calls": {
     // Join calls with the microphone live by default
     "mute_on_join": false,
     // Share your project when you are the first to join a channel
-    "share_on_join": false
+    "share_on_join": false,
   },
   // Toolbar related settings
   "toolbar": {
@@ -420,7 +420,7 @@
     // Whether to show agent review buttons in the editor toolbar.
     "agent_review": true,
     // Whether to show code action buttons in the editor toolbar.
-    "code_actions": false
+    "code_actions": false,
   },
   // Whether to allow windows to tab together based on the user’s tabbing preference (macOS only).
   "use_system_window_tabs": false,
@@ -439,7 +439,7 @@
     // Whether to show the sign in button in the titlebar.
     "show_sign_in": true,
     // Whether to show the menus in the titlebar.
-    "show_menus": false
+    "show_menus": false,
   },
   "audio": {
     // Opt into the new audio system.
@@ -472,7 +472,7 @@
     // the future we will migrate by setting this to false
     //
     // You need to rejoin a call for this setting to apply
-    "experimental.legacy_audio_compatible": true
+    "experimental.legacy_audio_compatible": true,
   },
   // Scrollbar related settings
   "scrollbar": {
@@ -511,8 +511,8 @@
       // When false, forcefully disables the horizontal scrollbar. Otherwise, obey other settings.
       "horizontal": true,
       // When false, forcefully disables the vertical scrollbar. Otherwise, obey other settings.
-      "vertical": true
-    }
+      "vertical": true,
+    },
   },
   // Minimap related settings
   "minimap": {
@@ -560,7 +560,7 @@
     // 3. "gutter" or "none" to not highlight the current line in the minimap.
     "current_line_highlight": null,
     // Maximum number of columns to display in the minimap.
-    "max_width_columns": 80
+    "max_width_columns": 80,
   },
   // Enable middle-click paste on Linux.
   "middle_click_paste": true,
@@ -583,7 +583,7 @@
     // Whether to show fold buttons in the gutter.
     "folds": true,
     // Minimum number of characters to reserve space for in the gutter.
-    "min_line_number_digits": 4
+    "min_line_number_digits": 4,
   },
   "indent_guides": {
     // Whether to show indent guides in the editor.
@@ -604,7 +604,7 @@
     //
     // 1. "disabled"
     // 2. "indent_aware"
-    "background_coloring": "disabled"
+    "background_coloring": "disabled",
   },
   // Whether the editor will scroll beyond the last line.
   "scroll_beyond_last_line": "one_page",
@@ -623,7 +623,7 @@
   "fast_scroll_sensitivity": 4.0,
   "sticky_scroll": {
     // Whether to stick scopes to the top of the editor.
-    "enabled": false
+    "enabled": false,
   },
   "relative_line_numbers": "disabled",
   // If 'search_wrap' is disabled, search result do not wrap around the end of the file.
@@ -641,7 +641,7 @@
     // Whether to interpret the search query as a regular expression.
     "regex": false,
     // Whether to center the cursor on each search match when navigating.
-    "center_on_match": false
+    "center_on_match": false,
   },
   // When to populate a new search's query based on the text under the cursor.
   // This setting can take the following three values:
@@ -684,8 +684,8 @@
       "shift": false,
       "alt": false,
       "platform": false,
-      "function": false
-    }
+      "function": false,
+    },
   },
   // Whether to resize all the panels in a dock when resizing the dock.
   // Can be a combination of "left", "right" and "bottom".
@@ -733,7 +733,7 @@
       //    "always"
       // 5. Never show the scrollbar:
       //    "never"
-      "show": null
+      "show": null,
     },
     // Which files containing diagnostic errors/warnings to mark in the project panel.
     // This setting can take the following three values:
@@ -756,7 +756,7 @@
       //    "always"
       // 2. Never show indent guides:
       //    "never"
-      "show": "always"
+      "show": "always",
     },
     // Sort order for entries in the project panel.
     // This setting can take three values:
@@ -781,8 +781,8 @@
       // Whether to automatically open files after pasting or duplicating them.
       "on_paste": true,
       // Whether to automatically open files dropped from external sources.
-      "on_drop": true
-    }
+      "on_drop": true,
+    },
   },
   "outline_panel": {
     // Whether to show the outline panel button in the status bar
@@ -815,7 +815,7 @@
       //    "always"
       // 2. Never show indent guides:
       //    "never"
-      "show": "always"
+      "show": "always",
     },
     // Scrollbar-related settings
     "scrollbar": {
@@ -832,11 +832,11 @@
       //    "always"
       // 5. Never show the scrollbar:
       //    "never"
-      "show": null
+      "show": null,
     },
     // Default depth to expand outline items in the current file.
     // Set to 0 to collapse all items that have children, 1 or higher to collapse items at that depth or deeper.
-    "expand_outlines_with_depth": 100
+    "expand_outlines_with_depth": 100,
   },
   "collaboration_panel": {
     // Whether to show the collaboration panel button in the status bar.
@@ -844,7 +844,7 @@
     // Where to dock the collaboration panel. Can be 'left' or 'right'.
     "dock": "left",
     // Default width of the collaboration panel.
-    "default_width": 240
+    "default_width": 240,
   },
   "git_panel": {
     // Whether to show the git panel button in the status bar.
@@ -880,12 +880,12 @@
       // Choices: always, auto, never, system
       // Default: inherits editor scrollbar settings
       // "show": null
-    }
+    },
   },
   "message_editor": {
     // Whether to automatically replace emoji shortcodes with emoji characters.
     // For example: typing `:wave:` gets replaced with `👋`.
-    "auto_replace_emoji_shortcode": true
+    "auto_replace_emoji_shortcode": true,
   },
   "notification_panel": {
     // Whether to show the notification panel button in the status bar.
@@ -893,7 +893,7 @@
     // Where to dock the notification panel. Can be 'left' or 'right'.
     "dock": "right",
     // Default width of the notification panel.
-    "default_width": 380
+    "default_width": 380,
   },
   "agent": {
     // Whether the agent is enabled.
@@ -915,7 +915,7 @@
       // The provider to use.
       "provider": "zed.dev",
       // The model to use.
-      "model": "claude-sonnet-4"
+      "model": "claude-sonnet-4",
     },
     // Additional parameters for language model requests. When making a request to a model, parameters will be taken
     // from the last entry in this list that matches the model's provider and name. In each entry, both provider
@@ -970,8 +970,8 @@
           "grep": true,
           "terminal": true,
           "thinking": true,
-          "web_search": true
-        }
+          "web_search": true,
+        },
       },
       "ask": {
         "name": "Ask",
@@ -988,14 +988,14 @@
           "open": true,
           "grep": true,
           "thinking": true,
-          "web_search": true
-        }
+          "web_search": true,
+        },
       },
       "minimal": {
         "name": "Minimal",
         "enable_all_context_servers": false,
-        "tools": {}
-      }
+        "tools": {},
+      },
     },
     // Where to show notifications when the agent has either completed
     // its response, or else needs confirmation before it can run a
@@ -1024,7 +1024,7 @@
     // Minimum number of lines to display in the agent message editor.
     //
     // Default: 4
-    "message_editor_min_lines": 4
+    "message_editor_min_lines": 4,
   },
   // Whether the screen sharing icon is shown in the os status bar.
   "show_call_status_icon": true,
@@ -1059,7 +1059,7 @@
     // Whether or not to show the navigation history buttons.
     "show_nav_history_buttons": true,
     // Whether or not to show the tab bar buttons.
-    "show_tab_bar_buttons": true
+    "show_tab_bar_buttons": true,
   },
   // Settings related to the editor's tabs
   "tabs": {
@@ -1098,7 +1098,7 @@
     //    "errors"
     // 3. Mark files with errors and warnings:
     //    "all"
-    "show_diagnostics": "off"
+    "show_diagnostics": "off",
   },
   // Settings related to preview tabs.
   "preview_tabs": {
@@ -1119,7 +1119,7 @@
     "enable_preview_file_from_code_navigation": true,
     // Whether to keep tabs in preview mode when code navigation is used to navigate away from them.
     // If `enable_preview_file_from_code_navigation` or `enable_preview_multibuffer_from_code_navigation` is also true, the new tab may replace the existing one.
-    "enable_keep_preview_on_code_navigation": false
+    "enable_keep_preview_on_code_navigation": false,
   },
   // Settings related to the file finder.
   "file_finder": {
@@ -1163,7 +1163,7 @@
     //   * "all": Use all gitignored files
     //   * "indexed": Use only the files Zed had indexed
     //   * "smart": Be smart and search for ignored when called from a gitignored worktree
-    "include_ignored": "smart"
+    "include_ignored": "smart",
   },
   // Whether or not to remove any trailing whitespace from lines of a buffer
   // before saving it.
@@ -1234,7 +1234,7 @@
     // Send debug info like crash reports.
     "diagnostics": true,
     // Send anonymized usage data like what languages you're using Zed with.
-    "metrics": true
+    "metrics": true,
   },
   // Whether to disable all AI features in Zed.
   //
@@ -1268,7 +1268,7 @@
       "enabled": true,
       // Minimum time to wait before pulling diagnostics from the language server(s).
       // 0 turns the debounce off.
-      "debounce_ms": 50
+      "debounce_ms": 50,
     },
     // Settings for inline diagnostics
     "inline": {
@@ -1286,8 +1286,8 @@
       "min_column": 0,
       // The minimum severity of the diagnostics to show inline.
       // Inherits editor's diagnostics' max severity settings when `null`.
-      "max_severity": null
-    }
+      "max_severity": null,
+    },
   },
   // Files or globs of files that will be excluded by Zed entirely. They will be skipped during file
   // scans, file searches, and not be displayed in the project file tree. Takes precedence over `file_scan_inclusions`.
@@ -1301,7 +1301,7 @@
     "**/.DS_Store",
     "**/Thumbs.db",
     "**/.classpath",
-    "**/.settings"
+    "**/.settings",
   ],
   // Files or globs of files that will be included by Zed, even when ignored by git. This is useful
   // for files that are not tracked by git, but are still important to your project. Note that globs
@@ -1336,14 +1336,14 @@
       // Whether or not to display the git commit summary on the same line.
       "show_commit_summary": false,
       // The minimum column number to show the inline blame information at
-      "min_column": 0
+      "min_column": 0,
     },
     "blame": {
-      "show_avatar": true
+      "show_avatar": true,
     },
     // Control which information is shown in the branch picker.
     "branch_picker": {
-      "show_author_name": true
+      "show_author_name": true,
     },
     // How git hunks are displayed visually in the editor.
     // This setting can take two values:
@@ -1355,7 +1355,7 @@
     "hunk_style": "staged_hollow",
     // Should the name or path be displayed first in the git view.
     // "path_style": "file_name_first" or "file_path_first"
-    "path_style": "file_name_first"
+    "path_style": "file_name_first",
   },
   // The list of custom Git hosting providers.
   "git_hosting_providers": [
@@ -1389,7 +1389,7 @@
       "**/secrets.yml",
       "**/.zed/settings.json", // zed project settings
       "/**/zed/settings.json", // zed user settings
-      "/**/zed/keymap.json"
+      "/**/zed/keymap.json",
     ],
     // When to show edit predictions previews in buffer.
     // This setting takes two possible values:
@@ -1407,15 +1407,15 @@
     "copilot": {
       "enterprise_uri": null,
       "proxy": null,
-      "proxy_no_verify": null
+      "proxy_no_verify": null,
     },
     "codestral": {
       "model": null,
-      "max_tokens": null
+      "max_tokens": null,
     },
     // Whether edit predictions are enabled when editing text threads in the agent panel.
     // This setting has no effect if globally disabled.
-    "enabled_in_text_threads": true
+    "enabled_in_text_threads": true,
   },
   // Settings specific to journaling
   "journal": {
@@ -1425,7 +1425,7 @@
     // May take 2 values:
     // 1. hour12
     // 2. hour24
-    "hour_format": "hour12"
+    "hour_format": "hour12",
   },
   // Status bar-related settings.
   "status_bar": {
@@ -1436,7 +1436,7 @@
     // Whether to show the cursor position button in the status bar.
     "cursor_position_button": true,
     // Whether to show active line endings button in the status bar.
-    "line_endings_button": false
+    "line_endings_button": false,
   },
   // Settings specific to the terminal
   "terminal": {
@@ -1557,8 +1557,8 @@
         // Preferred Conda manager to use when activating Conda environments.
         // Values: "auto", "conda", "mamba", "micromamba"
         // Default: "auto"
-        "conda_manager": "auto"
-      }
+        "conda_manager": "auto",
+      },
     },
     "toolbar": {
       // Whether to display the terminal title in its toolbar's breadcrumbs.
@@ -1566,7 +1566,7 @@
       //
       // The shell running in the terminal needs to be configured to emit the title.
       // Example: `echo -e "\e]2;New Title\007";`
-      "breadcrumbs": false
+      "breadcrumbs": false,
     },
     // Scrollbar-related settings
     "scrollbar": {
@@ -1583,7 +1583,7 @@
       //    "always"
       // 5. Never show the scrollbar:
       //    "never"
-      "show": null
+      "show": null,
     },
     // Set the terminal's font size. If this option is not included,
     // the terminal will default to matching the buffer's font size.
@@ -1646,30 +1646,26 @@
       // surrounding symbols or quotes
       [
         "(?x)",
-        "# optionally starts with 0-2 opening prefix symbols",
-        "[({\\[<]{0,2}",
-        "# which may be followed by an opening quote",
-        "(?<quote>[\"'`])?",
-        "# `path` is the shortest sequence of any non-space character",
-        "(?<link>(?<path>[^ ]+?",
-        "    # which may end with a line and optionally a column,",
-        "    (?<line_column>:+[0-9]+(:[0-9]+)?|:?\\([0-9]+([,:][0-9]+)?\\))?",
-        "))",
-        "# which must be followed by a matching quote",
-        "(?(<quote>)\\k<quote>)",
-        "# and optionally a single closing symbol",
-        "[)}\\]>]?",
-        "# if line/column matched, may be followed by a description",
-        "(?(<line_column>):[^ 0-9][^ ]*)?",
-        "# which may be followed by trailing punctuation",
-        "[.,:)}\\]>]*",
-        "# and always includes trailing whitespace or end of line",
-        "([ ]+|$)"
-      ]
+        "(?<path>",
+        "    (",
+        "        # multi-char path: first char (not opening delimiter or space)",
+        "        [^({\\[<\"'`\\ ]",
+        "        # middle chars: non-space, and colon/paren only if not followed by digit/paren",
+        "        ([^\\ :(]|[:(][^0-9()])*",
+        "        # last char: not closing delimiter or colon",
+        "        [^()}\\]>\"'`.,;:\\ ]",
+        "    |",
+        "        # single-char path: not delimiter, punctuation, or space",
+        "        [^(){}\\[\\]<>\"'`.,;:\\ ]",
+        "    )",
+        "    # optional line/column suffix (included in path for PathWithPosition::parse_str)",
+        "    (:+[0-9]+(:[0-9]+)?|:?\\([0-9]+([,:]?[0-9]+)?\\))?",
+        ")",
+      ],
     ],
     // Timeout for hover and Cmd-click path hyperlink discovery in milliseconds. Specifying a
     // timeout of `0` will disable path hyperlinking in terminal.
-    "path_hyperlink_timeout_ms": 1
+    "path_hyperlink_timeout_ms": 1,
   },
   "code_actions_on_format": {},
   // Settings related to running tasks.
@@ -1685,7 +1681,7 @@
     // * Zed task from history (e.g. one-off task was spawned before)
     //
     // Default: true
-    "prefer_lsp": true
+    "prefer_lsp": true,
   },
   // An object whose keys are language names, and whose values
   // are arrays of filenames or extensions of files that should
@@ -1702,7 +1698,7 @@
   "file_types": {
     "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"],
     "Markdown": [".rules", ".cursorrules", ".windsurfrules", ".clinerules"],
-    "Shell Script": [".env.*"]
+    "Shell Script": [".env.*"],
   },
   // Settings for which version of Node.js and NPM to use when installing
   // language servers and Copilot.
@@ -1718,7 +1714,7 @@
     // `path`, but not `npm_path`, Zed will assume that `npm` is located at
     // `${path}/../npm`.
     "path": null,
-    "npm_path": null
+    "npm_path": null,
   },
   // The extensions that Zed should automatically install on startup.
   //
@@ -1730,7 +1726,7 @@
     "anthropic": true,
     "google-ai": true,
     "openai": true,
-    "openrouter": true
+    "openrouter": true,
   },
   // The capabilities granted to extensions.
   //
@@ -1738,7 +1734,7 @@
   "granted_extension_capabilities": [
     { "kind": "process:exec", "command": "*", "args": ["**"] },
     { "kind": "download_file", "host": "*", "path": ["**"] },
-    { "kind": "npm:install", "package": "*" }
+    { "kind": "npm:install", "package": "*" },
   ],
   // Controls how completions are processed for this language.
   "completions": {
@@ -1789,7 +1785,7 @@
     // 4. "replace_suffix"
     //   Behaves like `"replace"` if the text after the cursor is a suffix of the completion, and like
     //   `"insert"` otherwise.
-    "lsp_insert_mode": "replace_suffix"
+    "lsp_insert_mode": "replace_suffix",
   },
   // Different settings for specific languages.
   "languages": {
@@ -1797,116 +1793,116 @@
       "language_servers": ["astro-language-server", "..."],
       "prettier": {
         "allowed": true,
-        "plugins": ["prettier-plugin-astro"]
-      }
+        "plugins": ["prettier-plugin-astro"],
+      },
     },
     "Blade": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "C": {
       "format_on_save": "off",
       "use_on_type_format": false,
       "prettier": {
-        "allowed": false
-      }
+        "allowed": false,
+      },
     },
     "C++": {
       "format_on_save": "off",
       "use_on_type_format": false,
       "prettier": {
-        "allowed": false
-      }
+        "allowed": false,
+      },
     },
     "CSharp": {
-      "language_servers": ["roslyn", "!omnisharp", "..."]
+      "language_servers": ["roslyn", "!omnisharp", "..."],
     },
     "CSS": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "Dart": {
-      "tab_size": 2
+      "tab_size": 2,
     },
     "Diff": {
       "show_edit_predictions": false,
       "remove_trailing_whitespace_on_save": false,
-      "ensure_final_newline_on_save": false
+      "ensure_final_newline_on_save": false,
     },
     "Elixir": {
-      "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."]
+      "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."],
     },
     "Elm": {
-      "tab_size": 4
+      "tab_size": 4,
     },
     "Erlang": {
-      "language_servers": ["erlang-ls", "!elp", "..."]
+      "language_servers": ["erlang-ls", "!elp", "..."],
     },
     "Git Commit": {
       "allow_rewrap": "anywhere",
       "soft_wrap": "editor_width",
-      "preferred_line_length": 72
+      "preferred_line_length": 72,
     },
     "Go": {
       "hard_tabs": true,
       "code_actions_on_format": {
-        "source.organizeImports": true
+        "source.organizeImports": true,
       },
-      "debuggers": ["Delve"]
+      "debuggers": ["Delve"],
     },
     "GraphQL": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "HEEX": {
-      "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."]
+      "language_servers": ["elixir-ls", "!expert", "!next-ls", "!lexical", "..."],
     },
     "HTML": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "HTML+ERB": {
-      "language_servers": ["herb", "!ruby-lsp", "..."]
+      "language_servers": ["herb", "!ruby-lsp", "..."],
     },
     "Java": {
       "prettier": {
         "allowed": true,
-        "plugins": ["prettier-plugin-java"]
-      }
+        "plugins": ["prettier-plugin-java"],
+      },
     },
     "JavaScript": {
       "language_servers": ["!typescript-language-server", "vtsls", "..."],
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "JSON": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "JSONC": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "JS+ERB": {
-      "language_servers": ["!ruby-lsp", "..."]
+      "language_servers": ["!ruby-lsp", "..."],
     },
     "Kotlin": {
-      "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."]
+      "language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."],
     },
     "LaTeX": {
       "formatter": "language_server",
       "language_servers": ["texlab", "..."],
       "prettier": {
         "allowed": true,
-        "plugins": ["prettier-plugin-latex"]
-      }
+        "plugins": ["prettier-plugin-latex"],
+      },
     },
     "Markdown": {
       "format_on_save": "off",
@@ -1914,136 +1910,142 @@
       "remove_trailing_whitespace_on_save": false,
       "allow_rewrap": "anywhere",
       "soft_wrap": "editor_width",
+      "completions": {
+        "words": "disabled",
+      },
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "PHP": {
       "language_servers": ["phpactor", "!intelephense", "!phptools", "..."],
       "prettier": {
         "allowed": true,
         "plugins": ["@prettier/plugin-php"],
-        "parser": "php"
-      }
+        "parser": "php",
+      },
     },
     "Plain Text": {
       "allow_rewrap": "anywhere",
-      "soft_wrap": "editor_width"
+      "soft_wrap": "editor_width",
+      "completions": {
+        "words": "disabled",
+      },
     },
     "Python": {
       "code_actions_on_format": {
-        "source.organizeImports.ruff": true
+        "source.organizeImports.ruff": true,
       },
       "formatter": {
         "language_server": {
-          "name": "ruff"
-        }
+          "name": "ruff",
+        },
       },
       "debuggers": ["Debugpy"],
-      "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."]
+      "language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."],
     },
     "Ruby": {
-      "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."]
+      "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."],
     },
     "Rust": {
-      "debuggers": ["CodeLLDB"]
+      "debuggers": ["CodeLLDB"],
     },
     "SCSS": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "Starlark": {
-      "language_servers": ["starpls", "!buck2-lsp", "..."]
+      "language_servers": ["starpls", "!buck2-lsp", "..."],
     },
     "Svelte": {
       "language_servers": ["svelte-language-server", "..."],
       "prettier": {
         "allowed": true,
-        "plugins": ["prettier-plugin-svelte"]
-      }
+        "plugins": ["prettier-plugin-svelte"],
+      },
     },
     "TSX": {
       "language_servers": ["!typescript-language-server", "vtsls", "..."],
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "Twig": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "TypeScript": {
       "language_servers": ["!typescript-language-server", "vtsls", "..."],
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "SystemVerilog": {
       "format_on_save": "off",
       "language_servers": ["!slang", "..."],
-      "use_on_type_format": false
+      "use_on_type_format": false,
     },
     "Vue.js": {
       "language_servers": ["vue-language-server", "vtsls", "..."],
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "XML": {
       "prettier": {
         "allowed": true,
-        "plugins": ["@prettier/plugin-xml"]
-      }
+        "plugins": ["@prettier/plugin-xml"],
+      },
     },
     "YAML": {
       "prettier": {
-        "allowed": true
-      }
+        "allowed": true,
+      },
     },
     "YAML+ERB": {
-      "language_servers": ["!ruby-lsp", "..."]
+      "language_servers": ["!ruby-lsp", "..."],
     },
     "Zig": {
-      "language_servers": ["zls", "..."]
-    }
+      "language_servers": ["zls", "..."],
+    },
   },
   // Different settings for specific language models.
   "language_models": {
     "anthropic": {
-      "api_url": "https://api.anthropic.com"
+      "api_url": "https://api.anthropic.com",
     },
     "bedrock": {},
     "google": {
-      "api_url": "https://generativelanguage.googleapis.com"
+      "api_url": "https://generativelanguage.googleapis.com",
     },
     "ollama": {
-      "api_url": "http://localhost:11434"
+      "api_url": "http://localhost:11434",
     },
     "openai": {
-      "api_url": "https://api.openai.com/v1"
+      "api_url": "https://api.openai.com/v1",
     },
     "openai_compatible": {},
     "open_router": {
-      "api_url": "https://openrouter.ai/api/v1"
+      "api_url": "https://openrouter.ai/api/v1",
     },
     "lmstudio": {
-      "api_url": "http://localhost:1234/api/v0"
+      "api_url": "http://localhost:1234/api/v0",
     },
     "deepseek": {
-      "api_url": "https://api.deepseek.com/v1"
+      "api_url": "https://api.deepseek.com/v1",
     },
     "mistral": {
-      "api_url": "https://api.mistral.ai/v1"
+      "api_url": "https://api.mistral.ai/v1",
     },
     "vercel": {
-      "api_url": "https://api.v0.dev/v1"
+      "api_url": "https://api.v0.dev/v1",
     },
     "x_ai": {
-      "api_url": "https://api.x.ai/v1"
+      "api_url": "https://api.x.ai/v1",
     },
-    "zed.dev": {}
+    "zed.dev": {},
   },
   "session": {
     // Whether or not to restore unsaved buffers on restart.
@@ -2052,7 +2054,7 @@
     // dirty files when closing the application.
     //
     // Default: true
-    "restore_unsaved_buffers": true
+    "restore_unsaved_buffers": true,
   },
   // Zed's Prettier integration settings.
   // Allows to enable/disable formatting with Prettier
@@ -2070,11 +2072,11 @@
     // "singleQuote": true
     // Forces Prettier integration to use a specific parser name when formatting files with the language
     // when set to a non-empty string.
-    "parser": ""
+    "parser": "",
   },
   // Settings for auto-closing of JSX tags.
   "jsx_tag_auto_close": {
-    "enabled": true
+    "enabled": true,
   },
   // LSP Specific settings.
   "lsp": {
@@ -2095,19 +2097,19 @@
     // Specify the DAP name as a key here.
     "CodeLLDB": {
       "env": {
-        "RUST_LOG": "info"
-      }
-    }
+        "RUST_LOG": "info",
+      },
+    },
   },
   // Common language server settings.
   "global_lsp_settings": {
     // Whether to show the LSP servers button in the status bar.
-    "button": true
+    "button": true,
   },
   // Jupyter settings
   "jupyter": {
     "enabled": true,
-    "kernel_selections": {}
+    "kernel_selections": {},
     // Specify the language name as the key and the kernel name as the value.
     // "kernel_selections": {
     //    "python": "conda-base"
@@ -2121,7 +2123,7 @@
     "max_columns": 128,
     // Maximum number of lines to keep in REPL's scrollback buffer.
     // Clamped with [4, 256] range.
-    "max_lines": 32
+    "max_lines": 32,
   },
   // Vim settings
   "vim": {
@@ -2135,7 +2137,7 @@
     // Specify the mode as the key and the shape as the value.
     // The mode can be one of the following: "normal", "replace", "insert", "visual".
     // The shape can be one of the following: "block", "bar", "underline", "hollow".
-    "cursor_shape": {}
+    "cursor_shape": {},
   },
   // The server to connect to. If the environment variable
   // ZED_SERVER_URL is set, it will override this setting.
@@ -2168,9 +2170,9 @@
   "windows": {
     "languages": {
       "PHP": {
-        "language_servers": ["intelephense", "!phpactor", "!phptools", "..."]
-      }
-    }
+        "language_servers": ["intelephense", "!phpactor", "!phptools", "..."],
+      },
+    },
   },
   // Whether to show full labels in line indicator or short ones
   //

crates/agent_servers/src/acp.rs 🔗

@@ -11,8 +11,6 @@ use project::agent_server_store::AgentServerCommand;
 use serde::Deserialize;
 use settings::Settings as _;
 use task::ShellBuilder;
-#[cfg(windows)]
-use task::ShellKind;
 use util::ResultExt as _;
 
 use std::path::PathBuf;
@@ -92,23 +90,8 @@ impl AcpConnection {
     ) -> Result<Self> {
         let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?;
         let builder = ShellBuilder::new(&shell, cfg!(windows));
-        #[cfg(windows)]
-        let kind = builder.kind();
-        let (cmd, args) = builder.build(Some(command.path.display().to_string()), &command.args);
-
-        let mut child = util::command::new_smol_command(cmd);
-        #[cfg(windows)]
-        if kind == ShellKind::Cmd {
-            use smol::process::windows::CommandExt;
-            for arg in args {
-                child.raw_arg(arg);
-            }
-        } else {
-            child.args(args);
-        }
-        #[cfg(not(windows))]
-        child.args(args);
-
+        let mut child =
+            builder.build_command(Some(command.path.display().to_string()), &command.args);
         child
             .envs(command.env.iter().flatten())
             .stdin(std::process::Stdio::piped())

crates/context_server/src/transport/stdio_transport.rs 🔗

@@ -33,12 +33,10 @@ impl StdioTransport {
     ) -> Result<Self> {
         let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone())?;
         let builder = ShellBuilder::new(&shell, cfg!(windows));
-        let (command, args) =
-            builder.build(Some(binary.executable.display().to_string()), &binary.args);
+        let mut command =
+            builder.build_command(Some(binary.executable.display().to_string()), &binary.args);
 
-        let mut command = util::command::new_smol_command(command);
         command
-            .args(args)
             .envs(binary.env.unwrap_or_default())
             .stdin(std::process::Stdio::piped())
             .stdout(std::process::Stdio::piped())

crates/diagnostics/src/diagnostics.rs 🔗

@@ -1045,54 +1045,47 @@ async fn heuristic_syntactic_expand(
         let node_range = node_start..node_end;
         let row_count = node_end.row - node_start.row + 1;
         let mut ancestor_range = None;
-        let reached_outline_node = cx.background_executor().scoped({
-            let node_range = node_range.clone();
-            let outline_range = outline_range.clone();
-            let ancestor_range = &mut ancestor_range;
-            |scope| {
-                scope.spawn(async move {
-                    // Stop if we've exceeded the row count or reached an outline node. Then, find the interval
-                    // of node children which contains the query range. For example, this allows just returning
-                    // the header of a declaration rather than the entire declaration.
-                    if row_count > max_row_count || outline_range == Some(node_range.clone()) {
-                        let mut cursor = node.walk();
-                        let mut included_child_start = None;
-                        let mut included_child_end = None;
-                        let mut previous_end = node_start;
-                        if cursor.goto_first_child() {
-                            loop {
-                                let child_node = cursor.node();
-                                let child_range =
-                                    previous_end..Point::from_ts_point(child_node.end_position());
-                                if included_child_start.is_none()
-                                    && child_range.contains(&input_range.start)
-                                {
-                                    included_child_start = Some(child_range.start);
-                                }
-                                if child_range.contains(&input_range.end) {
-                                    included_child_end = Some(child_range.end);
-                                }
-                                previous_end = child_range.end;
-                                if !cursor.goto_next_sibling() {
-                                    break;
-                                }
+        cx.background_executor()
+            .await_on_background(async {
+                // Stop if we've exceeded the row count or reached an outline node. Then, find the interval
+                // of node children which contains the query range. For example, this allows just returning
+                // the header of a declaration rather than the entire declaration.
+                if row_count > max_row_count || outline_range == Some(node_range.clone()) {
+                    let mut cursor = node.walk();
+                    let mut included_child_start = None;
+                    let mut included_child_end = None;
+                    let mut previous_end = node_start;
+                    if cursor.goto_first_child() {
+                        loop {
+                            let child_node = cursor.node();
+                            let child_range =
+                                previous_end..Point::from_ts_point(child_node.end_position());
+                            if included_child_start.is_none()
+                                && child_range.contains(&input_range.start)
+                            {
+                                included_child_start = Some(child_range.start);
                             }
-                        }
-                        let end = included_child_end.unwrap_or(node_range.end);
-                        if let Some(start) = included_child_start {
-                            let row_count = end.row - start.row;
-                            if row_count < max_row_count {
-                                *ancestor_range =
-                                    Some(Some(RangeInclusive::new(start.row, end.row)));
-                                return;
+                            if child_range.contains(&input_range.end) {
+                                included_child_end = Some(child_range.end);
+                            }
+                            previous_end = child_range.end;
+                            if !cursor.goto_next_sibling() {
+                                break;
                             }
                         }
-                        *ancestor_range = Some(None);
                     }
-                })
-            }
-        });
-        reached_outline_node.await;
+                    let end = included_child_end.unwrap_or(node_range.end);
+                    if let Some(start) = included_child_start {
+                        let row_count = end.row - start.row;
+                        if row_count < max_row_count {
+                            ancestor_range = Some(Some(RangeInclusive::new(start.row, end.row)));
+                            return;
+                        }
+                    }
+                    ancestor_range = Some(None);
+                }
+            })
+            .await;
         if let Some(node) = ancestor_range {
             return node;
         }

crates/edit_prediction/Cargo.toml 🔗

@@ -12,7 +12,7 @@ workspace = true
 path = "src/edit_prediction.rs"
 
 [features]
-eval-support = []
+cli-support = []
 
 [dependencies]
 ai_onboarding.workspace = true

crates/edit_prediction/src/edit_prediction.rs 🔗

@@ -55,7 +55,7 @@ pub mod open_ai_response;
 mod prediction;
 pub mod sweep_ai;
 
-#[cfg(any(test, feature = "test-support", feature = "eval-support"))]
+#[cfg(any(test, feature = "test-support", feature = "cli-support"))]
 pub mod udiff;
 
 mod zed_edit_prediction_delegate;
@@ -158,7 +158,7 @@ pub struct EditPredictionStore {
     use_context: bool,
     options: ZetaOptions,
     update_required: bool,
-    #[cfg(feature = "eval-support")]
+    #[cfg(feature = "cli-support")]
     eval_cache: Option<Arc<dyn EvalCache>>,
     edit_prediction_model: EditPredictionModel,
     pub sweep_ai: SweepAi,
@@ -283,6 +283,18 @@ impl ProjectState {
         })
         .detach()
     }
+
+    fn active_buffer(
+        &self,
+        project: &Entity<Project>,
+        cx: &App,
+    ) -> Option<(Entity<Buffer>, Option<Anchor>)> {
+        let project = project.read(cx);
+        let active_path = project.path_for_entry(project.active_entry()?, cx)?;
+        let active_buffer = project.buffer_store().read(cx).get_by_path(&active_path)?;
+        let registered_buffer = self.registered_buffers.get(&active_buffer.entity_id())?;
+        Some((active_buffer, registered_buffer.last_position))
+    }
 }
 
 #[derive(Debug, Clone)]
@@ -373,6 +385,7 @@ impl std::ops::Deref for BufferEditPrediction<'_> {
 
 struct RegisteredBuffer {
     snapshot: BufferSnapshot,
+    last_position: Option<Anchor>,
     _subscriptions: [gpui::Subscription; 2],
 }
 
@@ -492,7 +505,7 @@ impl EditPredictionStore {
                 },
             ),
             update_required: false,
-            #[cfg(feature = "eval-support")]
+            #[cfg(feature = "cli-support")]
             eval_cache: None,
             edit_prediction_model: EditPredictionModel::Zeta2,
             sweep_ai: SweepAi::new(cx),
@@ -541,7 +554,7 @@ impl EditPredictionStore {
             .is_some()
     }
 
-    #[cfg(feature = "eval-support")]
+    #[cfg(feature = "cli-support")]
     pub fn with_eval_cache(&mut self, cache: Arc<dyn EvalCache>) {
         self.eval_cache = Some(cache);
     }
@@ -564,6 +577,12 @@ impl EditPredictionStore {
         }
     }
 
+    pub fn clear_history_for_project(&mut self, project: &Entity<Project>) {
+        if let Some(project_state) = self.projects.get_mut(&project.entity_id()) {
+            project_state.events.clear();
+        }
+    }
+
     pub fn edit_history_for_project(
         &self,
         project: &Entity<Project>,
@@ -795,6 +814,7 @@ impl EditPredictionStore {
                 let project_entity_id = project.entity_id();
                 entry.insert(RegisteredBuffer {
                     snapshot,
+                    last_position: None,
                     _subscriptions: [
                         cx.subscribe(buffer, {
                             let project = project.downgrade();
@@ -882,13 +902,21 @@ impl EditPredictionStore {
         });
     }
 
-    fn current_prediction_for_buffer(
-        &self,
+    fn prediction_at(
+        &mut self,
         buffer: &Entity<Buffer>,
+        position: Option<language::Anchor>,
         project: &Entity<Project>,
         cx: &App,
     ) -> Option<BufferEditPrediction<'_>> {
-        let project_state = self.projects.get(&project.entity_id())?;
+        let project_state = self.projects.get_mut(&project.entity_id())?;
+        if let Some(position) = position
+            && let Some(buffer) = project_state
+                .registered_buffers
+                .get_mut(&buffer.entity_id())
+        {
+            buffer.last_position = Some(position);
+        }
 
         let CurrentEditPrediction {
             requested_by,
@@ -1131,12 +1159,21 @@ impl EditPredictionStore {
         };
 
         self.queue_prediction_refresh(project.clone(), project.entity_id(), cx, move |this, cx| {
-            let Some(open_buffer_task) = project
-                .update(cx, |project, cx| {
-                    project
-                        .active_entry()
-                        .and_then(|entry| project.path_for_entry(entry, cx))
-                        .map(|path| project.open_buffer(path, cx))
+            let Some((active_buffer, snapshot, cursor_point)) = this
+                .read_with(cx, |this, cx| {
+                    let project_state = this.projects.get(&project.entity_id())?;
+                    let (buffer, position) = project_state.active_buffer(&project, cx)?;
+                    let snapshot = buffer.read(cx).snapshot();
+
+                    if !Self::predictions_enabled_at(&snapshot, position, cx) {
+                        return None;
+                    }
+
+                    let cursor_point = position
+                        .map(|pos| pos.to_point(&snapshot))
+                        .unwrap_or_default();
+
+                    Some((buffer, snapshot, cursor_point))
                 })
                 .log_err()
                 .flatten()
@@ -1145,14 +1182,11 @@ impl EditPredictionStore {
             };
 
             cx.spawn(async move |cx| {
-                let active_buffer = open_buffer_task.await?;
-                let snapshot = active_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
-
                 let Some((jump_buffer, jump_position)) = Self::next_diagnostic_location(
                     active_buffer,
                     &snapshot,
                     Default::default(),
-                    Default::default(),
+                    cursor_point,
                     &project,
                     cx,
                 )
@@ -1197,6 +1231,37 @@ impl EditPredictionStore {
         });
     }
 
+    fn predictions_enabled_at(
+        snapshot: &BufferSnapshot,
+        position: Option<language::Anchor>,
+        cx: &App,
+    ) -> bool {
+        let file = snapshot.file();
+        let all_settings = all_language_settings(file, cx);
+        if !all_settings.show_edit_predictions(snapshot.language(), cx)
+            || file.is_some_and(|file| !all_settings.edit_predictions_enabled_for_file(file, cx))
+        {
+            return false;
+        }
+
+        if let Some(last_position) = position {
+            let settings = snapshot.settings_at(last_position, cx);
+
+            if !settings.edit_predictions_disabled_in.is_empty()
+                && let Some(scope) = snapshot.language_scope_at(last_position)
+                && let Some(scope_name) = scope.override_name()
+                && settings
+                    .edit_predictions_disabled_in
+                    .iter()
+                    .any(|s| s == scope_name)
+            {
+                return false;
+            }
+        }
+
+        true
+    }
+
     #[cfg(not(test))]
     pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300);
     #[cfg(test)]
@@ -1531,8 +1596,8 @@ impl EditPredictionStore {
         client: Arc<Client>,
         llm_token: LlmApiToken,
         app_version: Version,
-        #[cfg(feature = "eval-support")] eval_cache: Option<Arc<dyn EvalCache>>,
-        #[cfg(feature = "eval-support")] eval_cache_kind: EvalCacheEntryKind,
+        #[cfg(feature = "cli-support")] eval_cache: Option<Arc<dyn EvalCache>>,
+        #[cfg(feature = "cli-support")] eval_cache_kind: EvalCacheEntryKind,
     ) -> Result<(open_ai::Response, Option<EditPredictionUsage>)> {
         let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() {
             http_client::Url::parse(&predict_edits_url)?
@@ -1542,7 +1607,7 @@ impl EditPredictionStore {
                 .build_zed_llm_url("/predict_edits/raw", &[])?
         };
 
-        #[cfg(feature = "eval-support")]
+        #[cfg(feature = "cli-support")]
         let cache_key = if let Some(cache) = eval_cache {
             use collections::FxHasher;
             use std::hash::{Hash, Hasher};
@@ -1576,7 +1641,7 @@ impl EditPredictionStore {
         )
         .await?;
 
-        #[cfg(feature = "eval-support")]
+        #[cfg(feature = "cli-support")]
         if let Some((cache, request, key)) = cache_key {
             cache.write(key, &request, &serde_json::to_string_pretty(&response)?);
         }
@@ -1708,7 +1773,7 @@ impl EditPredictionStore {
         }
     }
 
-    #[cfg(feature = "eval-support")]
+    #[cfg(feature = "cli-support")]
     pub fn set_context_for_buffer(
         &mut self,
         project: &Entity<Project>,
@@ -1833,10 +1898,10 @@ pub struct ZedUpdateRequiredError {
     minimum_version: Version,
 }
 
-#[cfg(feature = "eval-support")]
+#[cfg(feature = "cli-support")]
 pub type EvalCacheKey = (EvalCacheEntryKind, u64);
 
-#[cfg(feature = "eval-support")]
+#[cfg(feature = "cli-support")]
 #[derive(Debug, Clone, Copy, PartialEq)]
 pub enum EvalCacheEntryKind {
     Context,
@@ -1844,7 +1909,7 @@ pub enum EvalCacheEntryKind {
     Prediction,
 }
 
-#[cfg(feature = "eval-support")]
+#[cfg(feature = "cli-support")]
 impl std::fmt::Display for EvalCacheEntryKind {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
@@ -1855,7 +1920,7 @@ impl std::fmt::Display for EvalCacheEntryKind {
     }
 }
 
-#[cfg(feature = "eval-support")]
+#[cfg(feature = "cli-support")]
 pub trait EvalCache: Send + Sync {
     fn read(&self, key: EvalCacheKey) -> Option<String>;
     fn write(&self, key: EvalCacheKey, input: &str, value: &str);

crates/edit_prediction/src/edit_prediction_tests.rs 🔗

@@ -45,10 +45,6 @@ async fn test_current_state(cx: &mut TestAppContext) {
     .await;
     let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
 
-    ep_store.update(cx, |ep_store, cx| {
-        ep_store.register_project(&project, cx);
-    });
-
     let buffer1 = project
         .update(cx, |project, cx| {
             let path = project.find_project_path(path!("/root/1.txt"), cx).unwrap();
@@ -60,6 +56,11 @@ async fn test_current_state(cx: &mut TestAppContext) {
     let snapshot1 = buffer1.read_with(cx, |buffer, _cx| buffer.snapshot());
     let position = snapshot1.anchor_before(language::Point::new(1, 3));
 
+    ep_store.update(cx, |ep_store, cx| {
+        ep_store.register_project(&project, cx);
+        ep_store.register_buffer(&buffer1, &project, cx);
+    });
+
     // Prediction for current file
 
     ep_store.update(cx, |ep_store, cx| {
@@ -84,9 +85,9 @@ async fn test_current_state(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         let prediction = ep_store
-            .current_prediction_for_buffer(&buffer1, &project, cx)
+            .prediction_at(&buffer1, None, &project, cx)
             .unwrap();
         assert_matches!(prediction, BufferEditPrediction::Local { .. });
     });
@@ -140,9 +141,9 @@ async fn test_current_state(cx: &mut TestAppContext) {
         .unwrap();
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         let prediction = ep_store
-            .current_prediction_for_buffer(&buffer1, &project, cx)
+            .prediction_at(&buffer1, None, &project, cx)
             .unwrap();
         assert_matches!(
             prediction,
@@ -158,9 +159,9 @@ async fn test_current_state(cx: &mut TestAppContext) {
         .await
         .unwrap();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         let prediction = ep_store
-            .current_prediction_for_buffer(&buffer2, &project, cx)
+            .prediction_at(&buffer2, None, &project, cx)
             .unwrap();
         assert_matches!(prediction, BufferEditPrediction::Local { .. });
     });
@@ -344,10 +345,10 @@ async fn test_empty_prediction(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         assert!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .is_none()
         );
     });
@@ -404,10 +405,10 @@ async fn test_interpolated_empty(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         assert!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .is_none()
         );
     });
@@ -469,10 +470,10 @@ async fn test_replace_current(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -492,11 +493,11 @@ async fn test_replace_current(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // second replaces first
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -551,10 +552,10 @@ async fn test_current_preferred(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -586,11 +587,11 @@ async fn test_current_preferred(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // first is preferred over second
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -657,11 +658,11 @@ async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // current prediction is second
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -675,11 +676,11 @@ async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // current prediction is still second, since first was cancelled
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -768,11 +769,11 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // current prediction is first
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -786,11 +787,11 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // current prediction is still first, since second was cancelled
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,
@@ -804,11 +805,11 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) {
 
     cx.run_until_parked();
 
-    ep_store.read_with(cx, |ep_store, cx| {
+    ep_store.update(cx, |ep_store, cx| {
         // third completes and replaces first
         assert_eq!(
             ep_store
-                .current_prediction_for_buffer(&buffer, &project, cx)
+                .prediction_at(&buffer, None, &project, cx)
                 .unwrap()
                 .id
                 .0,

crates/edit_prediction/src/license_detection.rs 🔗

@@ -735,6 +735,7 @@ mod tests {
             true,
             fs.clone(),
             Default::default(),
+            true,
             &mut cx.to_async(),
         )
         .await
@@ -758,6 +759,7 @@ mod tests {
             true,
             fs.clone(),
             Default::default(),
+            true,
             &mut cx.to_async(),
         )
         .await
@@ -816,6 +818,7 @@ mod tests {
             true,
             fs.clone(),
             Default::default(),
+            true,
             &mut cx.to_async(),
         )
         .await

crates/edit_prediction/src/udiff.rs 🔗

@@ -15,7 +15,9 @@ use collections::HashMap;
 use gpui::AsyncApp;
 use gpui::Entity;
 use language::{Anchor, Buffer, OffsetRangeExt as _, TextBufferSnapshot};
-use project::Project;
+use project::{Project, ProjectPath};
+use util::paths::PathStyle;
+use util::rel_path::RelPath;
 
 #[derive(Clone, Debug)]
 pub struct OpenedBuffers(#[allow(unused)] HashMap<String, Entity<Buffer>>);
@@ -28,18 +30,27 @@ pub async fn apply_diff(
 ) -> Result<OpenedBuffers> {
     let mut included_files = HashMap::default();
 
+    let worktree_id = project.read_with(cx, |project, cx| {
+        anyhow::Ok(
+            project
+                .visible_worktrees(cx)
+                .next()
+                .context("no worktrees")?
+                .read(cx)
+                .id(),
+        )
+    })??;
+
     for line in diff_str.lines() {
         let diff_line = DiffLine::parse(line);
 
         if let DiffLine::OldPath { path } = diff_line {
             let buffer = project
                 .update(cx, |project, cx| {
-                    let project_path =
-                        project
-                            .find_project_path(path.as_ref(), cx)
-                            .with_context(|| {
-                                format!("Failed to find worktree for new path: {}", path)
-                            })?;
+                    let project_path = ProjectPath {
+                        worktree_id,
+                        path: RelPath::new(Path::new(path.as_ref()), PathStyle::Posix)?.into_arc(),
+                    };
                     anyhow::Ok(project.open_buffer(project_path, cx))
                 })??
                 .await?;
@@ -127,7 +138,7 @@ pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result<String> {
             DiffEvent::Hunk { hunk, .. } => {
                 let hunk_offset = text
                     .find(&hunk.context)
-                    .ok_or_else(|| anyhow!("couldn't result hunk {:?}", hunk.context))?;
+                    .ok_or_else(|| anyhow!("couldn't resolve hunk {:?}", hunk.context))?;
                 for edit in hunk.edits.iter().rev() {
                     let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end);
                     text.replace_range(range, &edit.text);
@@ -726,38 +737,38 @@ mod tests {
         let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
 
         let diff = indoc! {r#"
-            --- a/root/file1
-            +++ b/root/file1
+            --- a/file1
+            +++ b/file1
              one
              two
             -three
             +3
              four
              five
-            --- a/root/file1
-            +++ b/root/file1
+            --- a/file1
+            +++ b/file1
              3
             -four
             -five
             +4
             +5
-            --- a/root/file1
-            +++ b/root/file1
+            --- a/file1
+            +++ b/file1
             -one
             -two
              3
              4
-            --- a/root/file2
-            +++ b/root/file2
+            --- a/file2
+            +++ b/file2
             +5
              six
-            --- a/root/file2
-            +++ b/root/file2
+            --- a/file2
+            +++ b/file2
              seven
             +7.5
              eight
-            --- a/root/file2
-            +++ b/root/file2
+            --- a/file2
+            +++ b/file2
              ten
             +11
         "#};
@@ -826,8 +837,8 @@ mod tests {
         let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
 
         let diff = indoc! {r#"
-            --- a/root/file1
-            +++ b/root/file1
+            --- a/file1
+            +++ b/file1
              one
              two
             -three

crates/edit_prediction/src/zed_edit_prediction_delegate.rs 🔗

@@ -125,14 +125,15 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
             return;
         }
 
-        if let Some(current) = store.current_prediction_for_buffer(&buffer, &self.project, cx)
-            && let BufferEditPrediction::Local { prediction } = current
-            && prediction.interpolate(buffer.read(cx)).is_some()
-        {
-            return;
-        }
-
         self.store.update(cx, |store, cx| {
+            if let Some(current) =
+                store.prediction_at(&buffer, Some(cursor_position), &self.project, cx)
+                && let BufferEditPrediction::Local { prediction } = current
+                && prediction.interpolate(buffer.read(cx)).is_some()
+            {
+                return;
+            }
+
             store.refresh_context(&self.project, &buffer, cursor_position, cx);
             store.refresh_prediction_from_buffer(self.project.clone(), buffer, cursor_position, cx)
         });
@@ -171,69 +172,68 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
         cursor_position: language::Anchor,
         cx: &mut Context<Self>,
     ) -> Option<edit_prediction_types::EditPrediction> {
-        let prediction =
-            self.store
-                .read(cx)
-                .current_prediction_for_buffer(buffer, &self.project, cx)?;
-
-        let prediction = match prediction {
-            BufferEditPrediction::Local { prediction } => prediction,
-            BufferEditPrediction::Jump { prediction } => {
-                return Some(edit_prediction_types::EditPrediction::Jump {
-                    id: Some(prediction.id.to_string().into()),
-                    snapshot: prediction.snapshot.clone(),
-                    target: prediction.edits.first().unwrap().0.start,
-                });
-            }
-        };
+        self.store.update(cx, |store, cx| {
+            let prediction =
+                store.prediction_at(buffer, Some(cursor_position), &self.project, cx)?;
+
+            let prediction = match prediction {
+                BufferEditPrediction::Local { prediction } => prediction,
+                BufferEditPrediction::Jump { prediction } => {
+                    return Some(edit_prediction_types::EditPrediction::Jump {
+                        id: Some(prediction.id.to_string().into()),
+                        snapshot: prediction.snapshot.clone(),
+                        target: prediction.edits.first().unwrap().0.start,
+                    });
+                }
+            };
 
-        let buffer = buffer.read(cx);
-        let snapshot = buffer.snapshot();
+            let buffer = buffer.read(cx);
+            let snapshot = buffer.snapshot();
 
-        let Some(edits) = prediction.interpolate(&snapshot) else {
-            self.store.update(cx, |store, _cx| {
+            let Some(edits) = prediction.interpolate(&snapshot) else {
                 store.reject_current_prediction(
                     EditPredictionRejectReason::InterpolatedEmpty,
                     &self.project,
                 );
-            });
-            return None;
-        };
-
-        let cursor_row = cursor_position.to_point(&snapshot).row;
-        let (closest_edit_ix, (closest_edit_range, _)) =
-            edits.iter().enumerate().min_by_key(|(_, (range, _))| {
-                let distance_from_start = cursor_row.abs_diff(range.start.to_point(&snapshot).row);
-                let distance_from_end = cursor_row.abs_diff(range.end.to_point(&snapshot).row);
-                cmp::min(distance_from_start, distance_from_end)
-            })?;
-
-        let mut edit_start_ix = closest_edit_ix;
-        for (range, _) in edits[..edit_start_ix].iter().rev() {
-            let distance_from_closest_edit = closest_edit_range.start.to_point(&snapshot).row
-                - range.end.to_point(&snapshot).row;
-            if distance_from_closest_edit <= 1 {
-                edit_start_ix -= 1;
-            } else {
-                break;
+                return None;
+            };
+
+            let cursor_row = cursor_position.to_point(&snapshot).row;
+            let (closest_edit_ix, (closest_edit_range, _)) =
+                edits.iter().enumerate().min_by_key(|(_, (range, _))| {
+                    let distance_from_start =
+                        cursor_row.abs_diff(range.start.to_point(&snapshot).row);
+                    let distance_from_end = cursor_row.abs_diff(range.end.to_point(&snapshot).row);
+                    cmp::min(distance_from_start, distance_from_end)
+                })?;
+
+            let mut edit_start_ix = closest_edit_ix;
+            for (range, _) in edits[..edit_start_ix].iter().rev() {
+                let distance_from_closest_edit = closest_edit_range.start.to_point(&snapshot).row
+                    - range.end.to_point(&snapshot).row;
+                if distance_from_closest_edit <= 1 {
+                    edit_start_ix -= 1;
+                } else {
+                    break;
+                }
             }
-        }
 
-        let mut edit_end_ix = closest_edit_ix + 1;
-        for (range, _) in &edits[edit_end_ix..] {
-            let distance_from_closest_edit =
-                range.start.to_point(buffer).row - closest_edit_range.end.to_point(&snapshot).row;
-            if distance_from_closest_edit <= 1 {
-                edit_end_ix += 1;
-            } else {
-                break;
+            let mut edit_end_ix = closest_edit_ix + 1;
+            for (range, _) in &edits[edit_end_ix..] {
+                let distance_from_closest_edit = range.start.to_point(buffer).row
+                    - closest_edit_range.end.to_point(&snapshot).row;
+                if distance_from_closest_edit <= 1 {
+                    edit_end_ix += 1;
+                } else {
+                    break;
+                }
             }
-        }
 
-        Some(edit_prediction_types::EditPrediction::Local {
-            id: Some(prediction.id.to_string().into()),
-            edits: edits[edit_start_ix..edit_end_ix].to_vec(),
-            edit_preview: Some(prediction.edit_preview.clone()),
+            Some(edit_prediction_types::EditPrediction::Local {
+                id: Some(prediction.id.to_string().into()),
+                edits: edits[edit_start_ix..edit_end_ix].to_vec(),
+                edit_preview: Some(prediction.edit_preview.clone()),
+            })
         })
     }
 }

crates/edit_prediction/src/zeta2.rs 🔗

@@ -1,4 +1,4 @@
-#[cfg(feature = "eval-support")]
+#[cfg(feature = "cli-support")]
 use crate::EvalCacheEntryKind;
 use crate::open_ai_response::text_from_response;
 use crate::prediction::EditPredictionResult;
@@ -44,7 +44,7 @@ pub fn request_prediction_with_zeta2(
     let llm_token = store.llm_token.clone();
     let app_version = AppVersion::global(cx);
 
-    #[cfg(feature = "eval-support")]
+    #[cfg(feature = "cli-support")]
     let eval_cache = store.eval_cache.clone();
 
     let request_task = cx.background_spawn({
@@ -95,9 +95,9 @@ pub fn request_prediction_with_zeta2(
                 client,
                 llm_token,
                 app_version,
-                #[cfg(feature = "eval-support")]
+                #[cfg(feature = "cli-support")]
                 eval_cache,
-                #[cfg(feature = "eval-support")]
+                #[cfg(feature = "cli-support")]
                 EvalCacheEntryKind::Prediction,
             )
             .await;
@@ -226,3 +226,15 @@ pub fn zeta2_prompt_input(
     };
     (editable_offset_range, prompt_input)
 }
+
+#[cfg(feature = "cli-support")]
+pub fn zeta2_output_for_patch(input: &zeta_prompt::ZetaPromptInput, patch: &str) -> String {
+    eprintln!("{}", patch);
+    eprintln!("---------------------");
+    eprintln!("{}", input.cursor_excerpt);
+    crate::udiff::apply_diff_to_string(
+        patch,
+        &input.cursor_excerpt[input.editable_range_in_excerpt.clone()],
+    )
+    .unwrap()
+}

crates/edit_prediction_cli/Cargo.toml 🔗

@@ -34,6 +34,7 @@ language_extension.workspace = true
 language_model.workspace = true
 language_models.workspace = true
 languages = { workspace = true, features = ["load-grammars"] }
+libc.workspace = true
 log.workspace = true
 node_runtime.workspace = true
 paths.workspace = true
@@ -52,10 +53,9 @@ sqlez_macros.workspace = true
 terminal_view.workspace = true
 util.workspace = true
 watch.workspace = true
-edit_prediction = { workspace = true, features = ["eval-support"] }
+edit_prediction = { workspace = true, features = ["cli-support"] }
 wasmtime.workspace = true
 zeta_prompt.workspace = true
-zlog.workspace = true
 
 # Wasmtime is included as a dependency in order to enable the same
 # features that are enabled in Zed.

crates/edit_prediction_cli/src/distill.rs 🔗

@@ -0,0 +1,14 @@
+use std::mem;
+
+use crate::example::Example;
+
+pub async fn run_distill(example: &mut Example) {
+    let [prediction]: [_; 1] = mem::take(&mut example.predictions)
+        .try_into()
+        .expect("Run predict first with a single repetition");
+
+    example.expected_patch = prediction.actual_patch;
+    example.prompt = None;
+    example.predictions = Vec::new();
+    example.score = Vec::new();
+}

crates/edit_prediction_cli/src/example.rs 🔗

@@ -1,9 +1,6 @@
-use crate::{
-    PredictionProvider, PromptFormat,
-    metrics::ClassificationMetrics,
-    paths::{REPOS_DIR, WORKTREES_DIR},
-};
+use crate::{PredictionProvider, PromptFormat, metrics::ClassificationMetrics};
 use anyhow::{Context as _, Result};
+use collections::HashMap;
 use edit_prediction::udiff::OpenedBuffers;
 use gpui::Entity;
 use http_client::Url;
@@ -25,6 +22,7 @@ pub struct Example {
     pub name: String,
     pub repository_url: String,
     pub revision: String,
+    #[serde(default)]
     pub uncommitted_diff: String,
     pub cursor_path: Arc<Path>,
     pub cursor_position: String,
@@ -101,7 +99,7 @@ pub struct ExampleScore {
 }
 
 impl Example {
-    fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> {
+    pub fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> {
         // git@github.com:owner/repo.git
         if self.repository_url.contains('@') {
             let (owner, repo) = self
@@ -133,17 +131,6 @@ impl Example {
             Ok((owner.into(), repo.into()))
         }
     }
-
-    pub fn worktree_path(&self) -> PathBuf {
-        WORKTREES_DIR
-            .join(&self.name)
-            .join(self.repo_name().unwrap().1.as_ref())
-    }
-
-    pub fn repo_path(&self) -> PathBuf {
-        let (repo_owner, repo_name) = self.repo_name().expect("failed to get repo name");
-        REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref())
-    }
 }
 
 pub fn read_examples(inputs: &[PathBuf]) -> Vec<Example> {
@@ -195,9 +182,9 @@ pub fn read_examples(inputs: &[PathBuf]) -> Vec<Example> {
                     .enumerate()
                     .map(|(line_ix, line)| {
                         let mut example =
-                            serde_json::from_str::<Example>(line).unwrap_or_else(|_| {
+                            serde_json::from_str::<Example>(line).unwrap_or_else(|error| {
                                 panic!(
-                                    "Failed to parse example on {}:{}",
+                                    "Failed to parse example on {}:{}\n{error}",
                                     path.display(),
                                     line_ix + 1
                                 )
@@ -217,6 +204,8 @@ pub fn read_examples(inputs: &[PathBuf]) -> Vec<Example> {
             }
         }
     }
+
+    sort_examples_by_repo_and_rev(&mut examples);
     examples
 }
 
@@ -234,6 +223,25 @@ pub fn write_examples(examples: &[Example], output_path: Option<&PathBuf>) {
     }
 }
 
+pub fn sort_examples_by_repo_and_rev(examples: &mut [Example]) {
+    examples.sort_by(|a, b| {
+        a.repository_url
+            .cmp(&b.repository_url)
+            .then(b.revision.cmp(&a.revision))
+    });
+}
+
+pub fn group_examples_by_repo(examples: &mut [Example]) -> Vec<Vec<&mut Example>> {
+    let mut examples_by_repo = HashMap::default();
+    for example in examples.iter_mut() {
+        examples_by_repo
+            .entry(example.repository_url.clone())
+            .or_insert_with(Vec::new)
+            .push(example);
+    }
+    examples_by_repo.into_values().collect()
+}
+
 fn parse_markdown_example(id: String, input: &str) -> Result<Example> {
     use pulldown_cmark::{CodeBlockKind, CowStr, Event, HeadingLevel, Parser, Tag, TagEnd};
 
@@ -264,12 +272,12 @@ fn parse_markdown_example(id: String, input: &str) -> Result<Example> {
         state: None,
     };
 
-    let mut name = String::new();
     let mut text = String::new();
     let mut block_info: CowStr = "".into();
 
     #[derive(PartialEq)]
     enum Section {
+        Start,
         UncommittedDiff,
         EditHistory,
         CursorPosition,
@@ -278,14 +286,16 @@ fn parse_markdown_example(id: String, input: &str) -> Result<Example> {
         Other,
     }
 
-    let mut current_section = Section::Other;
+    let mut current_section = Section::Start;
 
     for event in parser {
         match event {
             Event::Text(line) => {
                 text.push_str(&line);
 
-                if let Some((field, value)) = line.split_once('=') {
+                if let Section::Start = current_section
+                    && let Some((field, value)) = line.split_once('=')
+                {
                     match field.trim() {
                         REPOSITORY_URL_FIELD => {
                             example.repository_url = value.trim().to_string();
@@ -297,14 +307,6 @@ fn parse_markdown_example(id: String, input: &str) -> Result<Example> {
                     }
                 }
             }
-            Event::End(TagEnd::Heading(HeadingLevel::H1)) => {
-                if !name.is_empty() {
-                    anyhow::bail!(
-                        "Found multiple H1 headings. There should only be one with the name of the example."
-                    );
-                }
-                name = mem::take(&mut text);
-            }
             Event::End(TagEnd::Heading(HeadingLevel::H2)) => {
                 let title = mem::take(&mut text);
                 current_section = if title.eq_ignore_ascii_case(UNCOMMITTED_DIFF_HEADING) {
@@ -363,7 +365,7 @@ fn parse_markdown_example(id: String, input: &str) -> Result<Example> {
                     Section::ExpectedPatch => {
                         example.expected_patch = mem::take(&mut text);
                     }
-                    Section::Other => {}
+                    Section::Start | Section::Other => {}
                 }
             }
             _ => {}

crates/edit_prediction_cli/src/format_prompt.rs 🔗

@@ -2,9 +2,14 @@ use crate::{
     PromptFormat,
     example::{Example, ExamplePrompt},
     headless::EpAppState,
+    load_project::run_load_project,
+    progress::{Progress, Step},
     retrieve_context::run_context_retrieval,
 };
-use edit_prediction::{EditPredictionStore, zeta2::zeta2_prompt_input};
+use edit_prediction::{
+    EditPredictionStore,
+    zeta2::{zeta2_output_for_patch, zeta2_prompt_input},
+};
 use gpui::AsyncApp;
 use std::sync::Arc;
 use zeta_prompt::format_zeta_prompt;
@@ -15,11 +20,22 @@ pub async fn run_format_prompt(
     app_state: Arc<EpAppState>,
     mut cx: AsyncApp,
 ) {
-    run_context_retrieval(example, app_state, cx.clone()).await;
-
-    let prompt = match prompt_format {
-        PromptFormat::Teacher => TeacherPrompt::format(example),
+    run_context_retrieval(example, app_state.clone(), cx.clone()).await;
+
+    let _step_progress = Progress::global().start(Step::FormatPrompt, &example.name);
+
+    match prompt_format {
+        PromptFormat::Teacher => {
+            let prompt = TeacherPrompt::format_prompt(example);
+            example.prompt = Some(ExamplePrompt {
+                input: prompt,
+                expected_output: example.expected_patch.clone(), // TODO
+                format: prompt_format,
+            });
+        }
         PromptFormat::Zeta2 => {
+            run_load_project(example, app_state, cx.clone()).await;
+
             let ep_store = cx
                 .update(|cx| EditPredictionStore::try_global(cx).unwrap())
                 .unwrap();
@@ -41,30 +57,28 @@ pub async fn run_format_prompt(
                     )
                 })
                 .unwrap();
-            format_zeta_prompt(&input)
+            let prompt = format_zeta_prompt(&input);
+            let expected_output = zeta2_output_for_patch(&input, &example.expected_patch.clone());
+            example.prompt = Some(ExamplePrompt {
+                input: prompt,
+                expected_output,
+                format: prompt_format,
+            });
         }
     };
-
-    example.prompt = Some(ExamplePrompt {
-        input: prompt,
-        expected_output: example.expected_patch.clone(), // TODO
-        format: prompt_format,
-    });
 }
 
-pub trait PromptFormatter {
-    fn format(example: &Example) -> String;
-}
+pub struct TeacherPrompt;
 
-pub trait PromptParser {
-    /// Return unified diff patch of prediction given raw LLM response
-    fn parse(example: &Example, response: &str) -> String;
-}
+impl TeacherPrompt {
+    const PROMPT: &str = include_str!("teacher.prompt.md");
+    pub(crate) const EDITABLE_REGION_START: &str = "<|editable_region_start|>\n";
+    pub(crate) const EDITABLE_REGION_END: &str = "<|editable_region_end|>";
 
-pub struct TeacherPrompt;
+    /// Truncate edit history to this number of last lines
+    const MAX_HISTORY_LINES: usize = 128;
 
-impl PromptFormatter for TeacherPrompt {
-    fn format(example: &Example) -> String {
+    pub fn format_prompt(example: &Example) -> String {
         let edit_history = Self::format_edit_history(&example.edit_history);
         let context = Self::format_context(example);
         let editable_region = Self::format_editable_region(example);
@@ -76,15 +90,46 @@ impl PromptFormatter for TeacherPrompt {
 
         prompt
     }
-}
 
-impl TeacherPrompt {
-    const PROMPT: &str = include_str!("teacher.prompt.md");
-    pub(crate) const EDITABLE_REGION_START: &str = "<|editable_region_start|>\n";
-    pub(crate) const EDITABLE_REGION_END: &str = "<|editable_region_end|>";
+    pub fn parse(example: &Example, response: &str) -> String {
+        // Ideally, we should always be able to find cursor position in the retrieved context.
+        // In reality, sometimes we don't find it for these reasons:
+        // 1. `example.cursor_position` contains _more_ context than included in the retrieved context
+        //    (can be fixed by getting cursor coordinates at the load_example stage)
+        // 2. Context retriever just didn't include cursor line.
+        //
+        // In that case, fallback to using `cursor_position` as excerpt.
+        let cursor_file = &example
+            .buffer
+            .as_ref()
+            .expect("`buffer` should be filled in in the context collection step")
+            .content;
 
-    /// Truncate edit history to this number of last lines
-    const MAX_HISTORY_LINES: usize = 128;
+        // Extract updated (new) editable region from the model response
+        let new_editable_region = extract_last_codeblock(response);
+
+        // Reconstruct old editable region we sent to the model
+        let old_editable_region = Self::format_editable_region(example);
+        let old_editable_region = Self::extract_editable_region(&old_editable_region);
+        if !cursor_file.contains(&old_editable_region) {
+            panic!("Something's wrong: editable_region is not found in the cursor file")
+        }
+
+        // Apply editable region to a larger context and compute diff.
+        // This is needed to get a better context lines around the editable region
+        let edited_file = cursor_file.replace(&old_editable_region, &new_editable_region);
+        let diff = language::unified_diff(&cursor_file, &edited_file);
+
+        let diff = indoc::formatdoc! {"
+            --- a/{path}
+            +++ b/{path}
+            {diff}",
+            path = example.cursor_path.to_string_lossy(),
+            diff = diff,
+        };
+
+        diff
+    }
 
     fn format_edit_history(edit_history: &str) -> String {
         // Strip comments ("garbage lines") from edit history
@@ -157,49 +202,6 @@ impl TeacherPrompt {
     }
 }
 
-impl PromptParser for TeacherPrompt {
-    fn parse(example: &Example, response: &str) -> String {
-        // Ideally, we should always be able to find cursor position in the retrieved context.
-        // In reality, sometimes we don't find it for these reasons:
-        // 1. `example.cursor_position` contains _more_ context than included in the retrieved context
-        //    (can be fixed by getting cursor coordinates at the load_example stage)
-        // 2. Context retriever just didn't include cursor line.
-        //
-        // In that case, fallback to using `cursor_position` as excerpt.
-        let cursor_file = &example
-            .buffer
-            .as_ref()
-            .expect("`buffer` should be filled in in the context collection step")
-            .content;
-
-        // Extract updated (new) editable region from the model response
-        let new_editable_region = extract_last_codeblock(response);
-
-        // Reconstruct old editable region we sent to the model
-        let old_editable_region = Self::format_editable_region(example);
-        let old_editable_region = Self::extract_editable_region(&old_editable_region);
-        if !cursor_file.contains(&old_editable_region) {
-            panic!("Something's wrong: editable_region is not found in the cursor file")
-        }
-
-        // Apply editable region to a larger context and compute diff.
-        // This is needed to get a better context lines around the editable region
-        let edited_file = cursor_file.replace(&old_editable_region, &new_editable_region);
-        let diff = language::unified_diff(&cursor_file, &edited_file);
-
-        let diff = indoc::formatdoc! {"
-            --- a/{path}
-            +++ b/{path}
-            {diff}
-            ",
-            path = example.cursor_path.to_string_lossy(),
-            diff = diff,
-        };
-
-        diff
-    }
-}
-
 fn extract_last_codeblock(text: &str) -> String {
     let mut last_block = None;
     let mut search_start = 0;
@@ -221,7 +223,7 @@ fn extract_last_codeblock(text: &str) -> String {
         }
 
         if let Some(end_pos) = text[backtick_end..].find(&closing_backticks) {
-            let code_block = &text[backtick_end + 1..backtick_end + end_pos - 1];
+            let code_block = &text[backtick_end + 1..backtick_end + end_pos];
             last_block = Some(code_block.to_string());
             search_start = backtick_end + end_pos + backtick_count;
         } else {
@@ -250,7 +252,7 @@ mod tests {
             `````
             "};
         let last_block = extract_last_codeblock(text);
-        assert_eq!(last_block, "last block");
+        assert_eq!(last_block, "last block\n");
     }
 
     #[test]

crates/edit_prediction_cli/src/headless.rs 🔗

@@ -1,4 +1,5 @@
 use client::{Client, ProxySettings, UserStore};
+use collections::HashMap;
 use extension::ExtensionHostProxy;
 use fs::RealFs;
 use gpui::http_client::read_proxy_from_env;
@@ -7,12 +8,13 @@ use gpui_tokio::Tokio;
 use language::LanguageRegistry;
 use language_extension::LspAccess;
 use node_runtime::{NodeBinaryOptions, NodeRuntime};
+use project::Project;
 use project::project_settings::ProjectSettings;
 use release_channel::{AppCommitSha, AppVersion};
 use reqwest_client::ReqwestClient;
 use settings::{Settings, SettingsStore};
 use std::path::PathBuf;
-use std::sync::Arc;
+use std::sync::{Arc, Mutex};
 use util::ResultExt as _;
 
 /// Headless subset of `workspace::AppState`.
@@ -22,9 +24,22 @@ pub struct EpAppState {
     pub user_store: Entity<UserStore>,
     pub fs: Arc<dyn fs::Fs>,
     pub node_runtime: NodeRuntime,
+    pub project_cache: ProjectCache,
+}
+
+#[derive(Default)]
+pub struct ProjectCache(Mutex<HashMap<String, Entity<Project>>>);
+
+impl ProjectCache {
+    pub fn insert(&self, repository_url: String, project: Entity<Project>) {
+        self.0.lock().unwrap().insert(repository_url, project);
+    }
+
+    pub fn get(&self, repository_url: &String) -> Option<Entity<Project>> {
+        self.0.lock().unwrap().get(repository_url).cloned()
+    }
 }
 
-// TODO: dedupe with crates/eval/src/eval.rs
 pub fn init(cx: &mut App) -> EpAppState {
     let app_commit_sha = option_env!("ZED_COMMIT_SHA").map(|s| AppCommitSha::new(s.to_owned()));
 
@@ -112,11 +127,14 @@ pub fn init(cx: &mut App) -> EpAppState {
     prompt_store::init(cx);
     terminal_view::init(cx);
 
+    let project_cache = ProjectCache::default();
+
     EpAppState {
         languages,
         client,
         user_store,
         fs,
         node_runtime,
+        project_cache,
     }
 }

crates/edit_prediction_cli/src/load_project.rs 🔗

@@ -1,6 +1,8 @@
 use crate::{
     example::{Example, ExampleBuffer, ExampleState},
     headless::EpAppState,
+    paths::{REPOS_DIR, WORKTREES_DIR},
+    progress::{InfoStyle, Progress, Step, StepProgress},
 };
 use anyhow::{Result, anyhow};
 use collections::HashMap;
@@ -11,7 +13,7 @@ use futures::{
     lock::{Mutex, OwnedMutexGuard},
 };
 use gpui::{AsyncApp, Entity};
-use language::{Anchor, Buffer, ToOffset, ToPoint};
+use language::{Anchor, Buffer, LanguageNotFound, ToOffset, ToPoint};
 use project::buffer_store::BufferStoreEvent;
 use project::{Project, ProjectPath};
 use std::{
@@ -28,42 +30,37 @@ pub async fn run_load_project(example: &mut Example, app_state: Arc<EpAppState>,
         return;
     }
 
-    let project = setup_project(example, &app_state, &mut cx).await;
-    let buffer_store = project
-        .read_with(&cx, |project, _| project.buffer_store().clone())
-        .unwrap();
+    let progress = Progress::global().start(Step::LoadProject, &example.name);
 
-    let ep_store = cx
-        .update(|cx| EditPredictionStore::try_global(cx).unwrap())
-        .unwrap();
-
-    cx.subscribe(&buffer_store, {
-        let project = project.clone();
-        move |_, event, cx| match event {
-            BufferStoreEvent::BufferAdded(buffer) => {
-                ep_store.update(cx, |store, cx| store.register_buffer(&buffer, &project, cx));
-            }
-            _ => {}
-        }
-    })
-    .unwrap()
-    .detach();
+    let project = setup_project(example, &app_state, &progress, &mut cx).await;
 
     let _open_buffers = apply_edit_history(example, &project, &mut cx)
         .await
         .unwrap();
+
     let (buffer, cursor_position) = cursor_position(example, &project, &mut cx).await;
-    example.buffer = buffer
+    let (example_buffer, language_name) = buffer
         .read_with(&cx, |buffer, _cx| {
             let cursor_point = cursor_position.to_point(&buffer);
-            Some(ExampleBuffer {
-                content: buffer.text(),
-                cursor_row: cursor_point.row,
-                cursor_column: cursor_point.column,
-                cursor_offset: cursor_position.to_offset(&buffer),
-            })
+            let language_name = buffer
+                .language()
+                .map(|l| l.name().to_string())
+                .unwrap_or_else(|| "Unknown".to_string());
+            (
+                ExampleBuffer {
+                    content: buffer.text(),
+                    cursor_row: cursor_point.row,
+                    cursor_column: cursor_point.column,
+                    cursor_offset: cursor_position.to_offset(&buffer),
+                },
+                language_name,
+            )
         })
         .unwrap();
+
+    progress.set_info(language_name, InfoStyle::Normal);
+
+    example.buffer = Some(example_buffer);
     example.state = Some(ExampleState {
         buffer,
         project,
@@ -77,6 +74,19 @@ async fn cursor_position(
     project: &Entity<Project>,
     cx: &mut AsyncApp,
 ) -> (Entity<Buffer>, Anchor) {
+    let language_registry = project
+        .read_with(cx, |project, _| project.languages().clone())
+        .unwrap();
+    let result = language_registry
+        .load_language_for_file_path(&example.cursor_path)
+        .await;
+
+    if let Err(error) = result
+        && !error.is::<LanguageNotFound>()
+    {
+        panic!("Failed to load language for file path: {}", error);
+    }
+
     let worktree = project
         .read_with(cx, |project, cx| {
             project.visible_worktrees(cx).next().unwrap()
@@ -115,7 +125,8 @@ async fn cursor_position(
         let mut matches = text.match_indices(&cursor_excerpt);
         let (excerpt_offset, _) = matches.next().unwrap_or_else(|| {
             panic!(
-                "\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n.Cursor excerpt did not exist in buffer."
+                "\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n.Example: {}\nCursor excerpt did not exist in buffer.",
+                example.name
             );
         });
         assert!(matches.next().is_none(), "More than one cursor position match found for {}", &example.name);
@@ -133,9 +144,38 @@ async fn cursor_position(
 async fn setup_project(
     example: &mut Example,
     app_state: &Arc<EpAppState>,
+    step_progress: &StepProgress,
     cx: &mut AsyncApp,
 ) -> Entity<Project> {
-    setup_worktree(example).await;
+    let ep_store = cx
+        .update(|cx| EditPredictionStore::try_global(cx).unwrap())
+        .unwrap();
+
+    let worktree_path = setup_worktree(example, step_progress).await;
+
+    if let Some(project) = app_state.project_cache.get(&example.repository_url) {
+        ep_store
+            .update(cx, |ep_store, _| {
+                ep_store.clear_history_for_project(&project);
+            })
+            .unwrap();
+        let buffer_store = project
+            .read_with(cx, |project, _| project.buffer_store().clone())
+            .unwrap();
+        let buffers = buffer_store
+            .read_with(cx, |buffer_store, _| {
+                buffer_store.buffers().collect::<Vec<_>>()
+            })
+            .unwrap();
+        for buffer in buffers {
+            buffer
+                .update(cx, |buffer, cx| buffer.reload(cx))
+                .unwrap()
+                .await
+                .ok();
+        }
+        return project;
+    }
 
     let project = cx
         .update(|cx| {
@@ -151,27 +191,47 @@ async fn setup_project(
         })
         .unwrap();
 
-    let worktree = project
+    project
         .update(cx, |project, cx| {
-            project.create_worktree(&example.worktree_path(), true, cx)
+            project.disable_worktree_scanner(cx);
+            project.create_worktree(&worktree_path, true, cx)
         })
         .unwrap()
         .await
         .unwrap();
-    worktree
-        .read_with(cx, |worktree, _cx| {
-            worktree.as_local().unwrap().scan_complete()
-        })
-        .unwrap()
-        .await;
+
+    app_state
+        .project_cache
+        .insert(example.repository_url.clone(), project.clone());
+
+    let buffer_store = project
+        .read_with(cx, |project, _| project.buffer_store().clone())
+        .unwrap();
+    cx.subscribe(&buffer_store, {
+        let project = project.clone();
+        move |_, event, cx| match event {
+            BufferStoreEvent::BufferAdded(buffer) => {
+                ep_store.update(cx, |store, cx| store.register_buffer(&buffer, &project, cx));
+            }
+            _ => {}
+        }
+    })
+    .unwrap()
+    .detach();
+
     project
 }
 
-pub async fn setup_worktree(example: &Example) {
-    let repo_dir = example.repo_path();
+async fn setup_worktree(example: &Example, step_progress: &StepProgress) -> PathBuf {
+    let (repo_owner, repo_name) = example.repo_name().expect("failed to get repo name");
+    let repo_dir = REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref());
+    let worktree_path = WORKTREES_DIR
+        .join(repo_owner.as_ref())
+        .join(repo_name.as_ref());
     let repo_lock = lock_repo(&repo_dir).await;
 
     if !repo_dir.is_dir() {
+        step_progress.set_substatus(format!("cloning {}", repo_name));
         fs::create_dir_all(&repo_dir).unwrap();
         run_git(&repo_dir, &["init"]).await.unwrap();
         run_git(
@@ -191,6 +251,7 @@ pub async fn setup_worktree(example: &Example) {
     let revision = if let Ok(revision) = revision {
         revision
     } else {
+        step_progress.set_substatus("fetching");
         if run_git(
             &repo_dir,
             &["fetch", "--depth", "1", "origin", &example.revision],
@@ -203,16 +264,11 @@ pub async fn setup_worktree(example: &Example) {
         let revision = run_git(&repo_dir, &["rev-parse", "FETCH_HEAD"])
             .await
             .unwrap();
-        if revision != example.revision {
-            run_git(&repo_dir, &["tag", &example.revision, &revision])
-                .await
-                .unwrap();
-        }
         revision
     };
 
     // Create the worktree for this example if needed.
-    let worktree_path = example.worktree_path();
+    step_progress.set_substatus("preparing worktree");
     if worktree_path.is_dir() {
         run_git(&worktree_path, &["clean", "--force", "-d"])
             .await
@@ -248,6 +304,7 @@ pub async fn setup_worktree(example: &Example) {
 
     // Apply the uncommitted diff for this example.
     if !example.uncommitted_diff.is_empty() {
+        step_progress.set_substatus("applying diff");
         let mut apply_process = smol::process::Command::new("git")
             .current_dir(&worktree_path)
             .args(&["apply", "-"])
@@ -273,6 +330,9 @@ pub async fn setup_worktree(example: &Example) {
             );
         }
     }
+
+    step_progress.clear_substatus();
+    worktree_path
 }
 
 async fn apply_edit_history(

crates/edit_prediction_cli/src/main.rs 🔗

@@ -1,4 +1,5 @@
 mod anthropic_client;
+mod distill;
 mod example;
 mod format_prompt;
 mod headless;
@@ -6,6 +7,7 @@ mod load_project;
 mod metrics;
 mod paths;
 mod predict;
+mod progress;
 mod retrieve_context;
 mod score;
 
@@ -16,10 +18,12 @@ use reqwest_client::ReqwestClient;
 use serde::{Deserialize, Serialize};
 use std::{path::PathBuf, sync::Arc};
 
-use crate::example::{read_examples, write_examples};
+use crate::distill::run_distill;
+use crate::example::{group_examples_by_repo, read_examples, write_examples};
 use crate::format_prompt::run_format_prompt;
 use crate::load_project::run_load_project;
 use crate::predict::run_prediction;
+use crate::progress::Progress;
 use crate::retrieve_context::run_context_retrieval;
 use crate::score::run_scoring;
 
@@ -28,7 +32,7 @@ use crate::score::run_scoring;
 struct EpArgs {
     #[arg(long, default_value_t = false)]
     printenv: bool,
-    #[clap(long, default_value_t = 10)]
+    #[clap(long, default_value_t = 10, global = true)]
     max_parallelism: usize,
     #[command(subcommand)]
     command: Option<Command>,
@@ -45,7 +49,7 @@ enum Command {
     /// Parse markdown examples and output a combined .jsonl file
     ParseExample,
     /// Create git worktrees for each example and load file contents
-    LoadBuffer,
+    LoadProject,
     /// Retrieve context for input examples.
     Context,
     /// Generate a prompt string for a specific model
@@ -54,6 +58,9 @@ enum Command {
     Predict(PredictArgs),
     /// Computes a score based on actual and expected patches
     Score(PredictArgs),
+    /// Prepares a distillation dataset by copying expected outputs to
+    /// predicted outputs and removing actual outputs and prompts.
+    Distill,
     /// Print aggregated scores
     Eval(PredictArgs),
     /// Remove git repositories and worktrees
@@ -87,6 +94,7 @@ enum PredictionProvider {
     Zeta1,
     Zeta2,
     Teacher,
+    TeacherNonBatching,
 }
 
 impl EpArgs {
@@ -104,8 +112,6 @@ impl EpArgs {
 }
 
 fn main() {
-    zlog::init();
-    zlog::init_output_stderr();
     let args = EpArgs::parse();
 
     if args.printenv {
@@ -139,27 +145,35 @@ fn main() {
         EditPredictionStore::global(&app_state.client, &app_state.user_store, cx);
 
         cx.spawn(async move |cx| {
-            match &command {
-                Command::Predict(args) => predict::sync_batches(&args.provider).await,
-                _ => (),
+            if let Command::Predict(args) = &command {
+                predict::sync_batches(&args.provider).await
             };
 
-            for data in examples.chunks_mut(args.max_parallelism) {
-                let mut futures = Vec::new();
-                for example in data.iter_mut() {
-                    let cx = cx.clone();
-                    let app_state = app_state.clone();
-                    futures.push(async {
+            let total_examples = examples.len();
+            Progress::global().set_total_examples(total_examples);
+
+            let mut grouped_examples = group_examples_by_repo(&mut examples);
+            let example_batches = grouped_examples.chunks_mut(args.max_parallelism);
+
+            for example_batch in example_batches {
+                let futures = example_batch.into_iter().map(|repo_examples| async {
+                    for example in repo_examples.iter_mut() {
                         match &command {
                             Command::ParseExample => {}
-                            Command::LoadBuffer => {
-                                run_load_project(example, app_state.clone(), cx).await;
+                            Command::LoadProject => {
+                                run_load_project(example, app_state.clone(), cx.clone()).await;
                             }
                             Command::Context => {
-                                run_context_retrieval(example, app_state, cx).await;
+                                run_context_retrieval(example, app_state.clone(), cx.clone()).await;
                             }
                             Command::FormatPrompt(args) => {
-                                run_format_prompt(example, args.prompt_format, app_state, cx).await;
+                                run_format_prompt(
+                                    example,
+                                    args.prompt_format,
+                                    app_state.clone(),
+                                    cx.clone(),
+                                )
+                                .await;
                             }
                             Command::Predict(args) => {
                                 run_prediction(
@@ -167,21 +181,25 @@ fn main() {
                                     Some(args.provider),
                                     args.repetitions,
                                     app_state.clone(),
-                                    cx,
+                                    cx.clone(),
                                 )
                                 .await;
                             }
+                            Command::Distill => {
+                                run_distill(example).await;
+                            }
                             Command::Score(args) | Command::Eval(args) => {
-                                run_scoring(example, &args, app_state, cx).await;
+                                run_scoring(example, &args, app_state.clone(), cx.clone()).await;
                             }
                             Command::Clean => {
                                 unreachable!()
                             }
                         }
-                    });
-                }
+                    }
+                });
                 futures::future::join_all(futures).await;
             }
+            Progress::global().clear();
 
             if args.output.is_some() || !matches!(command, Command::Eval(_)) {
                 write_examples(&examples, output.as_ref());

crates/edit_prediction_cli/src/predict.rs 🔗

@@ -2,10 +2,11 @@ use crate::{
     PredictionProvider, PromptFormat,
     anthropic_client::AnthropicClient,
     example::{Example, ExamplePrediction},
-    format_prompt::{PromptParser, TeacherPrompt, run_format_prompt},
+    format_prompt::{TeacherPrompt, run_format_prompt},
     headless::EpAppState,
     load_project::run_load_project,
     paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR},
+    progress::{InfoStyle, Progress, Step},
     retrieve_context::run_context_retrieval,
 };
 use edit_prediction::{DebugEvent, EditPredictionStore};
@@ -30,20 +31,28 @@ pub async fn run_prediction(
         return;
     }
 
-    run_load_project(example, app_state.clone(), cx.clone()).await;
+    let provider = provider.unwrap();
+
     run_context_retrieval(example, app_state.clone(), cx.clone()).await;
 
-    let provider = provider.unwrap();
+    if matches!(
+        provider,
+        PredictionProvider::Teacher | PredictionProvider::TeacherNonBatching
+    ) {
+        let _step_progress = Progress::global().start(Step::Predict, &example.name);
 
-    if matches!(provider, PredictionProvider::Teacher) {
         if example.prompt.is_none() {
             run_format_prompt(example, PromptFormat::Teacher, app_state.clone(), cx).await;
         }
 
-        let batched = true;
+        let batched = matches!(provider, PredictionProvider::Teacher);
         return predict_anthropic(example, repetition_count, batched).await;
     }
 
+    run_load_project(example, app_state.clone(), cx.clone()).await;
+
+    let _step_progress = Progress::global().start(Step::Predict, &example.name);
+
     if matches!(
         provider,
         PredictionProvider::Zeta1 | PredictionProvider::Zeta2
@@ -75,7 +84,9 @@ pub async fn run_prediction(
                 PredictionProvider::Zeta2 => edit_prediction::EditPredictionModel::Zeta2,
                 PredictionProvider::Sweep => edit_prediction::EditPredictionModel::Sweep,
                 PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury,
-                PredictionProvider::Teacher => unreachable!(),
+                PredictionProvider::Teacher | PredictionProvider::TeacherNonBatching => {
+                    unreachable!()
+                }
             };
             store.set_edit_prediction_model(model);
         })
@@ -175,18 +186,31 @@ pub async fn run_prediction(
             .await
             .unwrap();
 
+        let actual_patch = prediction
+            .and_then(|prediction| {
+                let prediction = prediction.prediction.ok()?;
+                prediction.edit_preview.as_unified_diff(&prediction.edits)
+            })
+            .unwrap_or_default();
+
+        let has_prediction = !actual_patch.is_empty();
+
         updated_example
             .lock()
             .unwrap()
             .predictions
             .last_mut()
             .unwrap()
-            .actual_patch = prediction
-            .and_then(|prediction| {
-                let prediction = prediction.prediction.ok()?;
-                prediction.edit_preview.as_unified_diff(&prediction.edits)
-            })
-            .unwrap_or_default();
+            .actual_patch = actual_patch;
+
+        if ix == repetition_count - 1 {
+            let (info, style) = if has_prediction {
+                ("predicted", InfoStyle::Normal)
+            } else {
+                ("no prediction", InfoStyle::Warning)
+            };
+            _step_progress.set_info(info, style);
+        }
     }
 
     ep_store

crates/edit_prediction_cli/src/progress.rs 🔗

@@ -0,0 +1,474 @@
+use std::{
+    borrow::Cow,
+    collections::HashMap,
+    io::{IsTerminal, Write},
+    sync::{Arc, Mutex, OnceLock},
+    time::{Duration, Instant},
+};
+
+use log::{Level, Log, Metadata, Record};
+
+pub struct Progress {
+    inner: Mutex<ProgressInner>,
+}
+
+struct ProgressInner {
+    completed: Vec<CompletedTask>,
+    in_progress: HashMap<String, InProgressTask>,
+    is_tty: bool,
+    terminal_width: usize,
+    max_example_name_len: usize,
+    status_lines_displayed: usize,
+    total_examples: usize,
+    last_line_is_logging: bool,
+}
+
+#[derive(Clone)]
+struct InProgressTask {
+    step: Step,
+    started_at: Instant,
+    substatus: Option<String>,
+    info: Option<(String, InfoStyle)>,
+}
+
+struct CompletedTask {
+    step: Step,
+    example_name: String,
+    duration: Duration,
+    info: Option<(String, InfoStyle)>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Step {
+    LoadProject,
+    Context,
+    FormatPrompt,
+    Predict,
+    Score,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum InfoStyle {
+    Normal,
+    Warning,
+}
+
+impl Step {
+    pub fn label(&self) -> &'static str {
+        match self {
+            Step::LoadProject => "Load",
+            Step::Context => "Context",
+            Step::FormatPrompt => "Format",
+            Step::Predict => "Predict",
+            Step::Score => "Score",
+        }
+    }
+
+    fn color_code(&self) -> &'static str {
+        match self {
+            Step::LoadProject => "\x1b[33m",
+            Step::Context => "\x1b[35m",
+            Step::FormatPrompt => "\x1b[34m",
+            Step::Predict => "\x1b[32m",
+            Step::Score => "\x1b[31m",
+        }
+    }
+}
+
+static GLOBAL: OnceLock<Arc<Progress>> = OnceLock::new();
+static LOGGER: ProgressLogger = ProgressLogger;
+
+const RIGHT_MARGIN: usize = 4;
+const MAX_STATUS_LINES: usize = 10;
+
+impl Progress {
+    /// Returns the global Progress instance, initializing it if necessary.
+    pub fn global() -> Arc<Progress> {
+        GLOBAL
+            .get_or_init(|| {
+                let progress = Arc::new(Self {
+                    inner: Mutex::new(ProgressInner {
+                        completed: Vec::new(),
+                        in_progress: HashMap::new(),
+                        is_tty: std::io::stderr().is_terminal(),
+                        terminal_width: get_terminal_width(),
+                        max_example_name_len: 0,
+                        status_lines_displayed: 0,
+                        total_examples: 0,
+                        last_line_is_logging: false,
+                    }),
+                });
+                let _ = log::set_logger(&LOGGER);
+                log::set_max_level(log::LevelFilter::Error);
+                progress
+            })
+            .clone()
+    }
+
+    pub fn set_total_examples(&self, total: usize) {
+        let mut inner = self.inner.lock().unwrap();
+        inner.total_examples = total;
+    }
+
+    /// Prints a message to stderr, clearing and redrawing status lines to avoid corruption.
+    /// This should be used for any output that needs to appear above the status lines.
+    fn log(&self, message: &str) {
+        let mut inner = self.inner.lock().unwrap();
+        Self::clear_status_lines(&mut inner);
+
+        if !inner.last_line_is_logging {
+            let reset = "\x1b[0m";
+            let dim = "\x1b[2m";
+            let divider = "─".repeat(inner.terminal_width.saturating_sub(RIGHT_MARGIN));
+            eprintln!("{dim}{divider}{reset}");
+            inner.last_line_is_logging = true;
+        }
+
+        eprintln!("{}", message);
+    }
+
+    pub fn start(self: &Arc<Self>, step: Step, example_name: &str) -> StepProgress {
+        let mut inner = self.inner.lock().unwrap();
+
+        Self::clear_status_lines(&mut inner);
+
+        inner.max_example_name_len = inner.max_example_name_len.max(example_name.len());
+        inner.in_progress.insert(
+            example_name.to_string(),
+            InProgressTask {
+                step,
+                started_at: Instant::now(),
+                substatus: None,
+                info: None,
+            },
+        );
+
+        Self::print_status_lines(&mut inner);
+
+        StepProgress {
+            progress: self.clone(),
+            step,
+            example_name: example_name.to_string(),
+        }
+    }
+
+    fn finish(&self, step: Step, example_name: &str) {
+        let mut inner = self.inner.lock().unwrap();
+
+        let Some(task) = inner.in_progress.remove(example_name) else {
+            return;
+        };
+
+        if task.step == step {
+            inner.completed.push(CompletedTask {
+                step: task.step,
+                example_name: example_name.to_string(),
+                duration: task.started_at.elapsed(),
+                info: task.info,
+            });
+
+            Self::clear_status_lines(&mut inner);
+            Self::print_logging_closing_divider(&mut inner);
+            Self::print_completed(&inner, inner.completed.last().unwrap());
+            Self::print_status_lines(&mut inner);
+        } else {
+            inner.in_progress.insert(example_name.to_string(), task);
+        }
+    }
+
+    fn print_logging_closing_divider(inner: &mut ProgressInner) {
+        if inner.last_line_is_logging {
+            let reset = "\x1b[0m";
+            let dim = "\x1b[2m";
+            let divider = "─".repeat(inner.terminal_width.saturating_sub(RIGHT_MARGIN));
+            eprintln!("{dim}{divider}{reset}");
+            inner.last_line_is_logging = false;
+        }
+    }
+
+    fn clear_status_lines(inner: &mut ProgressInner) {
+        if inner.is_tty && inner.status_lines_displayed > 0 {
+            // Move up and clear each line we previously displayed
+            for _ in 0..inner.status_lines_displayed {
+                eprint!("\x1b[A\x1b[K");
+            }
+            let _ = std::io::stderr().flush();
+            inner.status_lines_displayed = 0;
+        }
+    }
+
+    fn print_completed(inner: &ProgressInner, task: &CompletedTask) {
+        let duration = format_duration(task.duration);
+        let name_width = inner.max_example_name_len;
+
+        if inner.is_tty {
+            let reset = "\x1b[0m";
+            let bold = "\x1b[1m";
+            let dim = "\x1b[2m";
+
+            let yellow = "\x1b[33m";
+            let info_part = task
+                .info
+                .as_ref()
+                .map(|(s, style)| {
+                    if *style == InfoStyle::Warning {
+                        format!("{yellow}{s}{reset}")
+                    } else {
+                        s.to_string()
+                    }
+                })
+                .unwrap_or_default();
+
+            let prefix = format!(
+                "{bold}{color}{label:>12}{reset} {name:<name_width$} {dim}│{reset} {info_part}",
+                color = task.step.color_code(),
+                label = task.step.label(),
+                name = task.example_name,
+            );
+
+            let duration_with_margin = format!("{duration} ");
+            let padding_needed = inner
+                .terminal_width
+                .saturating_sub(RIGHT_MARGIN)
+                .saturating_sub(duration_with_margin.len())
+                .saturating_sub(strip_ansi_len(&prefix));
+            let padding = " ".repeat(padding_needed);
+
+            eprintln!("{prefix}{padding}{dim}{duration_with_margin}{reset}");
+        } else {
+            let info_part = task
+                .info
+                .as_ref()
+                .map(|(s, _)| format!(" | {}", s))
+                .unwrap_or_default();
+
+            eprintln!(
+                "{label:>12} {name:<name_width$}{info_part} {duration}",
+                label = task.step.label(),
+                name = task.example_name,
+            );
+        }
+    }
+
+    fn print_status_lines(inner: &mut ProgressInner) {
+        if !inner.is_tty || inner.in_progress.is_empty() {
+            inner.status_lines_displayed = 0;
+            return;
+        }
+
+        let reset = "\x1b[0m";
+        let bold = "\x1b[1m";
+        let dim = "\x1b[2m";
+
+        // Build the done/in-progress/total label
+        let done_count = inner.completed.len();
+        let in_progress_count = inner.in_progress.len();
+        let range_label = format!(
+            " {}/{}/{} ",
+            done_count, in_progress_count, inner.total_examples
+        );
+
+        // Print a divider line with range label aligned with timestamps
+        let range_visible_len = range_label.len();
+        let left_divider_len = inner
+            .terminal_width
+            .saturating_sub(RIGHT_MARGIN)
+            .saturating_sub(range_visible_len);
+        let left_divider = "─".repeat(left_divider_len);
+        let right_divider = "─".repeat(RIGHT_MARGIN);
+        eprintln!("{dim}{left_divider}{reset}{range_label}{dim}{right_divider}{reset}");
+
+        let mut tasks: Vec<_> = inner.in_progress.iter().collect();
+        tasks.sort_by_key(|(name, _)| *name);
+
+        let total_tasks = tasks.len();
+        let mut lines_printed = 0;
+
+        for (name, task) in tasks.iter().take(MAX_STATUS_LINES) {
+            let elapsed = format_duration(task.started_at.elapsed());
+            let substatus_part = task
+                .substatus
+                .as_ref()
+                .map(|s| truncate_with_ellipsis(s, 30))
+                .unwrap_or_default();
+
+            let step_label = task.step.label();
+            let step_color = task.step.color_code();
+            let name_width = inner.max_example_name_len;
+
+            let prefix = format!(
+                "{bold}{step_color}{step_label:>12}{reset} {name:<name_width$} {dim}│{reset} {substatus_part}",
+                name = name,
+            );
+
+            let duration_with_margin = format!("{elapsed} ");
+            let padding_needed = inner
+                .terminal_width
+                .saturating_sub(RIGHT_MARGIN)
+                .saturating_sub(duration_with_margin.len())
+                .saturating_sub(strip_ansi_len(&prefix));
+            let padding = " ".repeat(padding_needed);
+
+            eprintln!("{prefix}{padding}{dim}{duration_with_margin}{reset}");
+            lines_printed += 1;
+        }
+
+        // Show "+N more" on its own line if there are more tasks
+        if total_tasks > MAX_STATUS_LINES {
+            let remaining = total_tasks - MAX_STATUS_LINES;
+            eprintln!("{:>12} +{remaining} more", "");
+            lines_printed += 1;
+        }
+
+        inner.status_lines_displayed = lines_printed + 1; // +1 for the divider line
+        let _ = std::io::stderr().flush();
+    }
+
+    pub fn clear(&self) {
+        let mut inner = self.inner.lock().unwrap();
+        Self::clear_status_lines(&mut inner);
+    }
+}
+
+pub struct StepProgress {
+    progress: Arc<Progress>,
+    step: Step,
+    example_name: String,
+}
+
+impl StepProgress {
+    pub fn set_substatus(&self, substatus: impl Into<Cow<'static, str>>) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.substatus = Some(substatus.into().into_owned());
+            Progress::clear_status_lines(&mut inner);
+            Progress::print_status_lines(&mut inner);
+        }
+    }
+
+    pub fn clear_substatus(&self) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.substatus = None;
+            Progress::clear_status_lines(&mut inner);
+            Progress::print_status_lines(&mut inner);
+        }
+    }
+
+    pub fn set_info(&self, info: impl Into<String>, style: InfoStyle) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.info = Some((info.into(), style));
+        }
+    }
+}
+
+impl Drop for StepProgress {
+    fn drop(&mut self) {
+        self.progress.finish(self.step, &self.example_name);
+    }
+}
+
+struct ProgressLogger;
+
+impl Log for ProgressLogger {
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        metadata.level() <= Level::Info
+    }
+
+    fn log(&self, record: &Record) {
+        if !self.enabled(record.metadata()) {
+            return;
+        }
+
+        let level_color = match record.level() {
+            Level::Error => "\x1b[31m",
+            Level::Warn => "\x1b[33m",
+            Level::Info => "\x1b[32m",
+            Level::Debug => "\x1b[34m",
+            Level::Trace => "\x1b[35m",
+        };
+        let reset = "\x1b[0m";
+        let bold = "\x1b[1m";
+
+        let level_label = match record.level() {
+            Level::Error => "Error",
+            Level::Warn => "Warn",
+            Level::Info => "Info",
+            Level::Debug => "Debug",
+            Level::Trace => "Trace",
+        };
+
+        let message = format!(
+            "{bold}{level_color}{level_label:>12}{reset} {}",
+            record.args()
+        );
+
+        if let Some(progress) = GLOBAL.get() {
+            progress.log(&message);
+        } else {
+            eprintln!("{}", message);
+        }
+    }
+
+    fn flush(&self) {
+        let _ = std::io::stderr().flush();
+    }
+}
+
+#[cfg(unix)]
+fn get_terminal_width() -> usize {
+    unsafe {
+        let mut winsize: libc::winsize = std::mem::zeroed();
+        if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) == 0
+            && winsize.ws_col > 0
+        {
+            winsize.ws_col as usize
+        } else {
+            80
+        }
+    }
+}
+
+#[cfg(not(unix))]
+fn get_terminal_width() -> usize {
+    80
+}
+
+fn strip_ansi_len(s: &str) -> usize {
+    let mut len = 0;
+    let mut in_escape = false;
+    for c in s.chars() {
+        if c == '\x1b' {
+            in_escape = true;
+        } else if in_escape {
+            if c == 'm' {
+                in_escape = false;
+            }
+        } else {
+            len += 1;
+        }
+    }
+    len
+}
+
+fn truncate_with_ellipsis(s: &str, max_len: usize) -> String {
+    if s.len() <= max_len {
+        s.to_string()
+    } else {
+        format!("{}…", &s[..max_len.saturating_sub(1)])
+    }
+}
+
+fn format_duration(duration: Duration) -> String {
+    const MINUTE_IN_MILLIS: f32 = 60. * 1000.;
+
+    let millis = duration.as_millis() as f32;
+    if millis < 1000.0 {
+        format!("{}ms", millis)
+    } else if millis < MINUTE_IN_MILLIS {
+        format!("{:.1}s", millis / 1_000.0)
+    } else {
+        format!("{:.1}m", millis / MINUTE_IN_MILLIS)
+    }
+}

crates/edit_prediction_cli/src/retrieve_context.rs 🔗

@@ -2,15 +2,16 @@ use crate::{
     example::{Example, ExampleContext},
     headless::EpAppState,
     load_project::run_load_project,
+    progress::{InfoStyle, Progress, Step, StepProgress},
 };
-use anyhow::Result;
 use collections::HashSet;
 use edit_prediction::{DebugEvent, EditPredictionStore};
 use futures::{FutureExt as _, StreamExt as _, channel::mpsc};
-use gpui::{AsyncApp, Entity, Task};
-use language::{Buffer, LanguageNotFound};
+use gpui::{AsyncApp, Entity};
+use language::Buffer;
 use project::Project;
-use std::{sync::Arc, time::Duration};
+use std::sync::Arc;
+use std::time::Duration;
 
 pub async fn run_context_retrieval(
     example: &mut Example,
@@ -23,6 +24,10 @@ pub async fn run_context_retrieval(
 
     run_load_project(example, app_state.clone(), cx.clone()).await;
 
+    let step_progress: Arc<StepProgress> = Progress::global()
+        .start(Step::Context, &example.name)
+        .into();
+
     let state = example.state.as_ref().unwrap();
     let project = state.project.clone();
 
@@ -31,8 +36,7 @@ pub async fn run_context_retrieval(
             project.register_buffer_with_language_servers(&state.buffer, cx)
         })
         .unwrap();
-
-    wait_for_language_server_to_start(example, &project, &state.buffer, &mut cx).await;
+    wait_for_language_servers_to_start(&project, &state.buffer, &step_progress, &mut cx).await;
 
     let ep_store = cx
         .update(|cx| EditPredictionStore::try_global(cx).unwrap())
@@ -60,113 +64,83 @@ pub async fn run_context_retrieval(
         .update(&mut cx, |store, cx| store.context_for_project(&project, cx))
         .unwrap();
 
+    let excerpt_count: usize = context_files.iter().map(|f| f.excerpts.len()).sum();
+    step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal);
+
     example.context = Some(ExampleContext {
         files: context_files,
     });
 }
 
-async fn wait_for_language_server_to_start(
-    example: &Example,
+async fn wait_for_language_servers_to_start(
     project: &Entity<Project>,
     buffer: &Entity<Buffer>,
+    step_progress: &Arc<StepProgress>,
     cx: &mut AsyncApp,
 ) {
-    let language_registry = project
-        .read_with(cx, |project, _| project.languages().clone())
-        .unwrap();
-    let result = language_registry
-        .load_language_for_file_path(&example.cursor_path)
-        .await;
-
-    if let Err(error) = result
-        && !error.is::<LanguageNotFound>()
-    {
-        panic!("Failed to load language for file path: {}", error);
-    }
-
-    let Some(language_id) = buffer
-        .read_with(cx, |buffer, _cx| {
-            buffer.language().map(|language| language.id())
-        })
-        .unwrap()
-    else {
-        panic!("No language for {:?}", example.cursor_path);
-    };
-
-    let mut ready_languages = HashSet::default();
-    let log_prefix = format!("{} | ", example.name);
-    if !ready_languages.contains(&language_id) {
-        wait_for_lang_server(&project, &buffer, log_prefix, cx)
-            .await
-            .unwrap();
-        ready_languages.insert(language_id);
-    }
-
-    let lsp_store = project
-        .read_with(cx, |project, _cx| project.lsp_store())
-        .unwrap();
-
-    // hacky wait for buffer to be registered with the language server
-    for _ in 0..100 {
-        if lsp_store
-            .update(cx, |lsp_store, cx| {
-                buffer.update(cx, |buffer, cx| {
-                    lsp_store
-                        .language_servers_for_local_buffer(&buffer, cx)
-                        .next()
-                        .map(|(_, language_server)| language_server.server_id())
-                })
-            })
-            .unwrap()
-            .is_some()
-        {
-            return;
-        } else {
-            cx.background_executor()
-                .timer(Duration::from_millis(10))
-                .await;
-        }
-    }
-
-    panic!("No language server found for buffer");
-}
-
-pub fn wait_for_lang_server(
-    project: &Entity<Project>,
-    buffer: &Entity<Buffer>,
-    log_prefix: String,
-    cx: &mut AsyncApp,
-) -> Task<Result<()>> {
-    eprintln!("{}⏵ Waiting for language server", log_prefix);
-
-    let (mut tx, mut rx) = mpsc::channel(1);
-
     let lsp_store = project
         .read_with(cx, |project, _| project.lsp_store())
         .unwrap();
 
-    let has_lang_server = buffer
+    let (language_server_ids, mut starting_language_server_ids) = buffer
         .update(cx, |buffer, cx| {
             lsp_store.update(cx, |lsp_store, cx| {
-                lsp_store
-                    .language_servers_for_local_buffer(buffer, cx)
-                    .next()
-                    .is_some()
+                let ids = lsp_store.language_servers_for_local_buffer(buffer, cx);
+                let starting_ids = ids
+                    .iter()
+                    .copied()
+                    .filter(|id| !lsp_store.language_server_statuses.contains_key(&id))
+                    .collect::<HashSet<_>>();
+                (ids, starting_ids)
             })
         })
-        .unwrap_or(false);
+        .unwrap_or_default();
+
+    step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len()));
+
+    let timeout = cx
+        .background_executor()
+        .timer(Duration::from_secs(60 * 5))
+        .shared();
+
+    let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
+    let added_subscription = cx.subscribe(project, {
+        let step_progress = step_progress.clone();
+        move |_, event, _| match event {
+            project::Event::LanguageServerAdded(language_server_id, name, _) => {
+                step_progress.set_substatus(format!("LSP started: {}", name));
+                tx.try_send(*language_server_id).ok();
+            }
+            _ => {}
+        }
+    });
+
+    while !starting_language_server_ids.is_empty() {
+        futures::select! {
+            language_server_id = rx.next() => {
+                if let Some(id) = language_server_id {
+                    starting_language_server_ids.remove(&id);
+                }
+            },
+            _ = timeout.clone().fuse() => {
+                panic!("LSP wait timed out after 5 minutes");
+            }
+        }
+    }
+
+    drop(added_subscription);
 
-    if has_lang_server {
+    if !language_server_ids.is_empty() {
         project
             .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
             .unwrap()
             .detach();
     }
-    let (mut added_tx, mut added_rx) = mpsc::channel(1);
 
+    let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
     let subscriptions = [
         cx.subscribe(&lsp_store, {
-            let log_prefix = log_prefix.clone();
+            let step_progress = step_progress.clone();
             move |_, event, _| {
                 if let project::LspStoreEvent::LanguageServerUpdate {
                     message:
@@ -179,50 +153,47 @@ pub fn wait_for_lang_server(
                     ..
                 } = event
                 {
-                    eprintln!("{}⟲ {message}", log_prefix)
+                    step_progress.set_substatus(message.clone());
                 }
             }
         }),
         cx.subscribe(project, {
-            let buffer = buffer.clone();
-            move |project, event, cx| match event {
-                project::Event::LanguageServerAdded(_, _, _) => {
-                    let buffer = buffer.clone();
-                    project
-                        .update(cx, |project, cx| project.save_buffer(buffer, cx))
-                        .detach();
-                    added_tx.try_send(()).ok();
-                }
-                project::Event::DiskBasedDiagnosticsFinished { .. } => {
-                    tx.try_send(()).ok();
+            let step_progress = step_progress.clone();
+            move |_, event, cx| match event {
+                project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
+                    let lsp_store = lsp_store.read(cx);
+                    let name = lsp_store
+                        .language_server_adapter_for_id(*language_server_id)
+                        .unwrap()
+                        .name();
+                    step_progress.set_substatus(format!("LSP idle: {}", name));
+                    tx.try_send(*language_server_id).ok();
                 }
                 _ => {}
             }
         }),
     ];
 
-    cx.spawn(async move |cx| {
-        if !has_lang_server {
-            // some buffers never have a language server, so this aborts quickly in that case.
-            let timeout = cx.background_executor().timer(Duration::from_secs(500));
-            futures::select! {
-                _ = added_rx.next() => {},
-                _ = timeout.fuse() => {
-                    anyhow::bail!("Waiting for language server add timed out after 5 seconds");
+    project
+        .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+        .unwrap()
+        .await
+        .unwrap();
+
+    let mut pending_language_server_ids = HashSet::from_iter(language_server_ids.into_iter());
+    while !pending_language_server_ids.is_empty() {
+        futures::select! {
+            language_server_id = rx.next() => {
+                if let Some(id) = language_server_id {
+                    pending_language_server_ids.remove(&id);
                 }
-            };
-        }
-        let timeout = cx.background_executor().timer(Duration::from_secs(60 * 5));
-        let result = futures::select! {
-            _ = rx.next() => {
-                eprintln!("{}⚑ Language server idle", log_prefix);
-                anyhow::Ok(())
             },
-            _ = timeout.fuse() => {
-                anyhow::bail!("LSP wait timed out after 5 minutes");
+            _ = timeout.clone().fuse() => {
+                panic!("LSP wait timed out after 5 minutes");
             }
-        };
-        drop(subscriptions);
-        result
-    })
+        }
+    }
+
+    drop(subscriptions);
+    step_progress.clear_substatus();
 }

crates/edit_prediction_cli/src/score.rs 🔗

@@ -4,6 +4,7 @@ use crate::{
     headless::EpAppState,
     metrics::{self, ClassificationMetrics},
     predict::run_prediction,
+    progress::{Progress, Step},
 };
 use edit_prediction::udiff::DiffLine;
 use gpui::AsyncApp;
@@ -24,6 +25,8 @@ pub async fn run_scoring(
     )
     .await;
 
+    let _progress = Progress::global().start(Step::Score, &example.name);
+
     let expected_patch = parse_patch(&example.expected_patch);
 
     let mut scores = vec![];

crates/edit_prediction_cli/src/teacher.prompt.md 🔗

@@ -18,6 +18,7 @@ Focus on:
 Rules:
 - Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
 - Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
+- Keep existing formatting unless it's absolutely necessary 
 
 Input format:
 - You receive small code fragments called context (structs, field definitions, function signatures, etc.). They may or may not be relevant.

crates/editor/src/bracket_colorization.rs 🔗

@@ -45,7 +45,7 @@ impl Editor {
 
         let bracket_matches_by_accent = self.visible_excerpts(false, cx).into_iter().fold(
             HashMap::default(),
-            |mut acc, (excerpt_id, (buffer, buffer_version, buffer_range))| {
+            |mut acc, (excerpt_id, (buffer, _, buffer_range))| {
                 let buffer_snapshot = buffer.read(cx).snapshot();
                 if language_settings::language_settings(
                     buffer_snapshot.language().map(|language| language.name()),
@@ -62,7 +62,7 @@ impl Editor {
                     let brackets_by_accent = buffer_snapshot
                         .fetch_bracket_ranges(
                             buffer_range.start..buffer_range.end,
-                            Some((&buffer_version, fetched_chunks)),
+                            Some(fetched_chunks),
                         )
                         .into_iter()
                         .flat_map(|(chunk_range, pairs)| {

crates/editor/src/display_map.rs 🔗

@@ -56,6 +56,7 @@ use sum_tree::{Bias, TreeMap};
 use text::{BufferId, LineIndent};
 use ui::{SharedString, px};
 use unicode_segmentation::UnicodeSegmentation;
+use ztracing::instrument;
 
 use std::{
     any::TypeId,
@@ -168,6 +169,7 @@ impl DisplayMap {
         }
     }
 
+    #[instrument(skip_all)]
     pub fn snapshot(&mut self, cx: &mut Context<Self>) -> DisplaySnapshot {
         let tab_size = Self::tab_size(&self.buffer, cx);
 
@@ -195,6 +197,7 @@ impl DisplayMap {
         }
     }
 
+    #[instrument(skip_all)]
     pub fn set_state(&mut self, other: &DisplaySnapshot, cx: &mut Context<Self>) {
         self.fold(
             other
@@ -211,6 +214,7 @@ impl DisplayMap {
     }
 
     /// Creates folds for the given creases.
+    #[instrument(skip_all)]
     pub fn fold<T: Clone + ToOffset>(&mut self, creases: Vec<Crease<T>>, cx: &mut Context<Self>) {
         let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
@@ -279,6 +283,7 @@ impl DisplayMap {
     }
 
     /// Removes any folds with the given ranges.
+    #[instrument(skip_all)]
     pub fn remove_folds_with_type<T: ToOffset>(
         &mut self,
         ranges: impl IntoIterator<Item = Range<T>>,
@@ -304,6 +309,7 @@ impl DisplayMap {
     }
 
     /// Removes any folds whose ranges intersect any of the given ranges.
+    #[instrument(skip_all)]
     pub fn unfold_intersecting<T: ToOffset>(
         &mut self,
         ranges: impl IntoIterator<Item = Range<T>>,
@@ -335,6 +341,7 @@ impl DisplayMap {
         block_map.remove_intersecting_replace_blocks(offset_ranges, inclusive);
     }
 
+    #[instrument(skip_all)]
     pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId, cx: &mut Context<Self>) {
         let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
@@ -349,6 +356,7 @@ impl DisplayMap {
         block_map.disable_header_for_buffer(buffer_id)
     }
 
+    #[instrument(skip_all)]
     pub fn fold_buffers(
         &mut self,
         buffer_ids: impl IntoIterator<Item = language::BufferId>,
@@ -367,6 +375,7 @@ impl DisplayMap {
         block_map.fold_buffers(buffer_ids, self.buffer.read(cx), cx)
     }
 
+    #[instrument(skip_all)]
     pub fn unfold_buffers(
         &mut self,
         buffer_ids: impl IntoIterator<Item = language::BufferId>,
@@ -385,14 +394,17 @@ impl DisplayMap {
         block_map.unfold_buffers(buffer_ids, self.buffer.read(cx), cx)
     }
 
+    #[instrument(skip_all)]
     pub(crate) fn is_buffer_folded(&self, buffer_id: language::BufferId) -> bool {
         self.block_map.folded_buffers.contains(&buffer_id)
     }
 
+    #[instrument(skip_all)]
     pub(crate) fn folded_buffers(&self) -> &HashSet<BufferId> {
         &self.block_map.folded_buffers
     }
 
+    #[instrument(skip_all)]
     pub fn insert_creases(
         &mut self,
         creases: impl IntoIterator<Item = Crease<Anchor>>,
@@ -402,6 +414,7 @@ impl DisplayMap {
         self.crease_map.insert(creases, &snapshot)
     }
 
+    #[instrument(skip_all)]
     pub fn remove_creases(
         &mut self,
         crease_ids: impl IntoIterator<Item = CreaseId>,
@@ -411,6 +424,7 @@ impl DisplayMap {
         self.crease_map.remove(crease_ids, &snapshot)
     }
 
+    #[instrument(skip_all)]
     pub fn insert_blocks(
         &mut self,
         blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
@@ -429,6 +443,7 @@ impl DisplayMap {
         block_map.insert(blocks)
     }
 
+    #[instrument(skip_all)]
     pub fn resize_blocks(&mut self, heights: HashMap<CustomBlockId, u32>, cx: &mut Context<Self>) {
         let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
@@ -443,10 +458,12 @@ impl DisplayMap {
         block_map.resize(heights);
     }
 
+    #[instrument(skip_all)]
     pub fn replace_blocks(&mut self, renderers: HashMap<CustomBlockId, RenderBlock>) {
         self.block_map.replace_blocks(renderers);
     }
 
+    #[instrument(skip_all)]
     pub fn remove_blocks(&mut self, ids: HashSet<CustomBlockId>, cx: &mut Context<Self>) {
         let snapshot = self.buffer.read(cx).snapshot(cx);
         let edits = self.buffer_subscription.consume().into_inner();
@@ -461,6 +478,7 @@ impl DisplayMap {
         block_map.remove(ids);
     }
 
+    #[instrument(skip_all)]
     pub fn row_for_block(
         &mut self,
         block_id: CustomBlockId,
@@ -480,6 +498,7 @@ impl DisplayMap {
         Some(DisplayRow(block_row.0))
     }
 
+    #[instrument(skip_all)]
     pub fn highlight_text(
         &mut self,
         key: HighlightKey,
@@ -507,6 +526,7 @@ impl DisplayMap {
         self.text_highlights.insert(key, to_insert);
     }
 
+    #[instrument(skip_all)]
     pub(crate) fn highlight_inlays(
         &mut self,
         type_id: TypeId,
@@ -526,6 +546,7 @@ impl DisplayMap {
         }
     }
 
+    #[instrument(skip_all)]
     pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> {
         let highlights = self.text_highlights.get(&HighlightKey::Type(type_id))?;
         Some((highlights.0, &highlights.1))
@@ -538,6 +559,7 @@ impl DisplayMap {
         self.text_highlights.values()
     }
 
+    #[instrument(skip_all)]
     pub fn clear_highlights(&mut self, type_id: TypeId) -> bool {
         let mut cleared = self
             .text_highlights
@@ -566,6 +588,7 @@ impl DisplayMap {
             .update(cx, |map, cx| map.set_wrap_width(width, cx))
     }
 
+    #[instrument(skip_all)]
     pub fn update_fold_widths(
         &mut self,
         widths: impl IntoIterator<Item = (ChunkRendererId, Pixels)>,
@@ -597,6 +620,7 @@ impl DisplayMap {
         self.inlay_map.current_inlays()
     }
 
+    #[instrument(skip_all)]
     pub(crate) fn splice_inlays(
         &mut self,
         to_remove: &[InlayId],
@@ -626,6 +650,7 @@ impl DisplayMap {
         self.block_map.read(snapshot, edits);
     }
 
+    #[instrument(skip_all)]
     fn tab_size(buffer: &Entity<MultiBuffer>, cx: &App) -> NonZeroU32 {
         let buffer = buffer.read(cx).as_singleton().map(|buffer| buffer.read(cx));
         let language = buffer
@@ -675,6 +700,7 @@ pub struct HighlightedChunk<'a> {
 }
 
 impl<'a> HighlightedChunk<'a> {
+    #[instrument(skip_all)]
     fn highlight_invisibles(
         self,
         editor_style: &'a EditorStyle,
@@ -832,6 +858,7 @@ impl DisplaySnapshot {
         self.buffer_snapshot().widest_line_number()
     }
 
+    #[instrument(skip_all)]
     pub fn prev_line_boundary(&self, mut point: MultiBufferPoint) -> (Point, DisplayPoint) {
         loop {
             let mut inlay_point = self.inlay_snapshot().to_inlay_point(point);
@@ -850,6 +877,7 @@ impl DisplaySnapshot {
         }
     }
 
+    #[instrument(skip_all)]
     pub fn next_line_boundary(
         &self,
         mut point: MultiBufferPoint,
@@ -888,6 +916,7 @@ impl DisplaySnapshot {
         new_start..new_end
     }
 
+    #[instrument(skip_all)]
     pub fn point_to_display_point(&self, point: MultiBufferPoint, bias: Bias) -> DisplayPoint {
         let inlay_point = self.inlay_snapshot().to_inlay_point(point);
         let fold_point = self.fold_snapshot().to_fold_point(inlay_point, bias);
@@ -917,6 +946,7 @@ impl DisplaySnapshot {
             .anchor_at(point.to_offset(self, bias), bias)
     }
 
+    #[instrument(skip_all)]
     fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
         let block_point = point.0;
         let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
@@ -928,6 +958,7 @@ impl DisplaySnapshot {
         fold_point.to_inlay_point(self.fold_snapshot())
     }
 
+    #[instrument(skip_all)]
     pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
         let block_point = point.0;
         let wrap_point = self.block_snapshot.to_wrap_point(block_point, bias);
@@ -937,6 +968,7 @@ impl DisplaySnapshot {
             .0
     }
 
+    #[instrument(skip_all)]
     pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
         let tab_point = self.tab_snapshot().fold_point_to_tab_point(fold_point);
         let wrap_point = self.wrap_snapshot().tab_point_to_wrap_point(tab_point);
@@ -949,6 +981,7 @@ impl DisplaySnapshot {
     }
 
     /// Returns text chunks starting at the given display row until the end of the file
+    #[instrument(skip_all)]
     pub fn text_chunks(&self, display_row: DisplayRow) -> impl Iterator<Item = &str> {
         self.block_snapshot
             .chunks(
@@ -961,6 +994,7 @@ impl DisplaySnapshot {
     }
 
     /// Returns text chunks starting at the end of the given display row in reverse until the start of the file
+    #[instrument(skip_all)]
     pub fn reverse_text_chunks(&self, display_row: DisplayRow) -> impl Iterator<Item = &str> {
         (0..=display_row.0).rev().flat_map(move |row| {
             self.block_snapshot
@@ -977,6 +1011,7 @@ impl DisplaySnapshot {
         })
     }
 
+    #[instrument(skip_all)]
     pub fn chunks(
         &self,
         display_rows: Range<DisplayRow>,
@@ -995,6 +1030,7 @@ impl DisplaySnapshot {
         )
     }
 
+    #[instrument(skip_all)]
     pub fn highlighted_chunks<'a>(
         &'a self,
         display_rows: Range<DisplayRow>,
@@ -1071,6 +1107,7 @@ impl DisplaySnapshot {
         })
     }
 
+    #[instrument(skip_all)]
     pub fn layout_row(
         &self,
         display_row: DisplayRow,
@@ -1132,6 +1169,7 @@ impl DisplaySnapshot {
         layout_line.closest_index_for_x(x) as u32
     }
 
+    #[instrument(skip_all)]
     pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<SharedString> {
         point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
         let chars = self
@@ -1321,6 +1359,7 @@ impl DisplaySnapshot {
             .unwrap_or(false)
     }
 
+    #[instrument(skip_all)]
     pub fn crease_for_buffer_row(&self, buffer_row: MultiBufferRow) -> Option<Crease<Point>> {
         let start =
             MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot().line_len(buffer_row));
@@ -1407,6 +1446,7 @@ impl DisplaySnapshot {
     }
 
     #[cfg(any(test, feature = "test-support"))]
+    #[instrument(skip_all)]
     pub fn text_highlight_ranges<Tag: ?Sized + 'static>(
         &self,
     ) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
@@ -1417,6 +1457,7 @@ impl DisplaySnapshot {
     }
 
     #[cfg(any(test, feature = "test-support"))]
+    #[instrument(skip_all)]
     pub fn all_text_highlight_ranges<Tag: ?Sized + 'static>(
         &self,
     ) -> Vec<(gpui::Hsla, Range<Point>)> {
@@ -1466,6 +1507,7 @@ impl DisplaySnapshot {
     ///
     /// This moves by buffer rows instead of display rows, a distinction that is
     /// important when soft wrapping is enabled.
+    #[instrument(skip_all)]
     pub fn start_of_relative_buffer_row(&self, point: DisplayPoint, times: isize) -> DisplayPoint {
         let start = self.display_point_to_fold_point(point, Bias::Left);
         let target = start.row() as isize + times;

crates/editor/src/display_map/block_map.rs 🔗

@@ -529,7 +529,7 @@ impl BlockMap {
         BlockMapWriter(self)
     }
 
-    #[ztracing::instrument(skip_all, fields(edits))]
+    #[ztracing::instrument(skip_all, fields(edits = ?edits))]
     fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: WrapPatch) {
         let _timer = zlog::time!("BlockMap::sync").warn_if_gt(std::time::Duration::from_millis(50));
 
@@ -570,6 +570,9 @@ impl BlockMap {
         let mut wrap_point_cursor = wrap_snapshot.wrap_point_cursor();
 
         while let Some(edit) = edits.next() {
+            let span = ztracing::debug_span!("while edits", edit = ?edit);
+            let _enter = span.enter();
+
             let mut old_start = edit.old.start;
             let mut new_start = edit.new.start;
 
@@ -628,6 +631,8 @@ impl BlockMap {
             let mut old_end = edit.old.end;
             let mut new_end = edit.new.end;
             loop {
+                let span = ztracing::debug_span!("decide where edit ends loop");
+                let _enter = span.enter();
                 // Seek to the transform starting at or after the end of the edit
                 cursor.seek(&old_end, Bias::Left);
                 cursor.next();
@@ -736,6 +741,10 @@ impl BlockMap {
             // and then insert the block itself.
             let mut just_processed_folded_buffer = false;
             for (block_placement, block) in blocks_in_edit.drain(..) {
+                let span =
+                    ztracing::debug_span!("for block in edits", block_height = block.height());
+                let _enter = span.enter();
+
                 let mut summary = TransformSummary {
                     input_rows: WrapRow(0),
                     output_rows: BlockRow(block.height()),
@@ -957,6 +966,7 @@ impl BlockMap {
     }
 }
 
+#[ztracing::instrument(skip(tree, wrap_snapshot))]
 fn push_isomorphic(tree: &mut SumTree<Transform>, rows: RowDelta, wrap_snapshot: &WrapSnapshot) {
     if rows == RowDelta(0) {
         return;

crates/editor/src/display_map/wrap_map.rs 🔗

@@ -840,7 +840,7 @@ impl WrapSnapshot {
         self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
     }
 
-    #[ztracing::instrument(skip_all, fields(point, ret))]
+    #[ztracing::instrument(skip_all, fields(point=?point, ret))]
     pub fn prev_row_boundary(&self, mut point: WrapPoint) -> WrapRow {
         if self.transforms.is_empty() {
             return WrapRow(0);
@@ -851,11 +851,14 @@ impl WrapSnapshot {
         let mut cursor = self
             .transforms
             .cursor::<Dimensions<WrapPoint, TabPoint>>(());
+        // start
         cursor.seek(&point, Bias::Right);
+        // end
         if cursor.item().is_none() {
             cursor.prev();
         }
 
+        // start
         while let Some(transform) = cursor.item() {
             if transform.is_isomorphic() && cursor.start().1.column() == 0 {
                 return cmp::min(cursor.end().0.row(), point.row());
@@ -863,6 +866,7 @@ impl WrapSnapshot {
                 cursor.prev();
             }
         }
+        // end
 
         unreachable!()
     }

crates/editor/src/editor.rs 🔗

@@ -7135,6 +7135,7 @@ impl Editor {
         Some((query, selection_anchor_range))
     }
 
+    #[ztracing::instrument(skip_all)]
     fn update_selection_occurrence_highlights(
         &mut self,
         query_text: String,
@@ -7279,6 +7280,7 @@ impl Editor {
         });
     }
 
+    #[ztracing::instrument(skip_all)]
     fn refresh_selected_text_highlights(
         &mut self,
         on_buffer_edit: bool,
@@ -20973,9 +20975,22 @@ impl Editor {
                 buffer_ranges.last()
             }?;
 
-            let selection = text::ToPoint::to_point(&range.start, buffer).row
-                ..text::ToPoint::to_point(&range.end, buffer).row;
-            Some((multi_buffer.buffer(buffer.remote_id()).unwrap(), selection))
+            let start_row_in_buffer = text::ToPoint::to_point(&range.start, buffer).row;
+            let end_row_in_buffer = text::ToPoint::to_point(&range.end, buffer).row;
+
+            let Some(buffer_diff) = multi_buffer.diff_for(buffer.remote_id()) else {
+                let selection = start_row_in_buffer..end_row_in_buffer;
+
+                return Some((multi_buffer.buffer(buffer.remote_id()).unwrap(), selection));
+            };
+
+            let buffer_diff_snapshot = buffer_diff.read(cx).snapshot(cx);
+
+            Some((
+                multi_buffer.buffer(buffer.remote_id()).unwrap(),
+                buffer_diff_snapshot.row_to_base_text_row(start_row_in_buffer, buffer)
+                    ..buffer_diff_snapshot.row_to_base_text_row(end_row_in_buffer, buffer),
+            ))
         });
 
         let Some((buffer, selection)) = buffer_and_selection else {

crates/editor/src/editor_tests.rs 🔗

@@ -27701,6 +27701,7 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) {
     cx.update_editor(|editor, window, cx| {
         editor.handle_input("x", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         - [ ] Item 1
             - [ ] Item 1.a
@@ -27716,8 +27717,7 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) {
             - [ ] Item 1.a
         - [x] Item 2
             - [x] Item 2.a
-            - [x] Item 2.bˇ
-        "
+            - [x] Item 2.bˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.newline(&Newline, window, cx);
@@ -27728,34 +27728,41 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) {
         - [x] Item 2
             - [x] Item 2.a
             - [x] Item 2.b
-            ˇ
-        "
+            ˇ"
     });
 
     // Case 3: Test adding a new nested list item preserves indent
+    cx.set_state(&indoc! {"
+        - [ ] Item 1
+            - [ ] Item 1.a
+        - [x] Item 2
+            - [x] Item 2.a
+            - [x] Item 2.b
+            ˇ"
+    });
     cx.update_editor(|editor, window, cx| {
         editor.handle_input("-", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         - [ ] Item 1
             - [ ] Item 1.a
         - [x] Item 2
             - [x] Item 2.a
             - [x] Item 2.b
-            -ˇ
-        "
+            -ˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.handle_input(" [x] Item 2.c", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         - [ ] Item 1
             - [ ] Item 1.a
         - [x] Item 2
             - [x] Item 2.a
             - [x] Item 2.b
-            - [x] Item 2.cˇ
-        "
+            - [x] Item 2.cˇ"
     });
 
     // Case 4: Test adding new line after nested ordered list preserves indent of previous line
@@ -27764,8 +27771,7 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) {
             1. Item 1.a
         2. Item 2
             1. Item 2.a
-            2. Item 2.bˇ
-        "
+            2. Item 2.bˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.newline(&Newline, window, cx);
@@ -27776,60 +27782,81 @@ async fn test_markdown_indents(cx: &mut gpui::TestAppContext) {
         2. Item 2
             1. Item 2.a
             2. Item 2.b
-            ˇ
-        "
+            ˇ"
     });
 
     // Case 5: Adding new ordered list item preserves indent
+    cx.set_state(indoc! {"
+        1. Item 1
+            1. Item 1.a
+        2. Item 2
+            1. Item 2.a
+            2. Item 2.b
+            ˇ"
+    });
     cx.update_editor(|editor, window, cx| {
         editor.handle_input("3", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         1. Item 1
             1. Item 1.a
         2. Item 2
             1. Item 2.a
             2. Item 2.b
-            3ˇ
-        "
+            3ˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.handle_input(".", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         1. Item 1
             1. Item 1.a
         2. Item 2
             1. Item 2.a
             2. Item 2.b
-            3.ˇ
-        "
+            3.ˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.handle_input(" Item 2.c", window, cx);
     });
+    cx.run_until_parked();
     cx.assert_editor_state(indoc! {"
         1. Item 1
             1. Item 1.a
         2. Item 2
             1. Item 2.a
             2. Item 2.b
-            3. Item 2.cˇ
-        "
+            3. Item 2.cˇ"
     });
 
+    // Case 6: Test adding new line after nested ordered list preserves indent of previous line
+    cx.set_state(indoc! {"
+        - Item 1
+            - Item 1.a
+            - Item 1.a
+        ˇ"});
+    cx.update_editor(|editor, window, cx| {
+        editor.handle_input("-", window, cx);
+    });
+    cx.run_until_parked();
+    cx.assert_editor_state(indoc! {"
+        - Item 1
+            - Item 1.a
+            - Item 1.a
+        -ˇ"});
+
     // Case 7: Test blockquote newline preserves something
     cx.set_state(indoc! {"
-        > Item 1ˇ
-        "
+        > Item 1ˇ"
     });
     cx.update_editor(|editor, window, cx| {
         editor.newline(&Newline, window, cx);
     });
     cx.assert_editor_state(indoc! {"
         > Item 1
-        ˇ
-        "
+        ˇ"
     });
 }
 

crates/editor/src/hover_popover.rs 🔗

@@ -623,7 +623,10 @@ pub fn hover_markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
     });
     MarkdownStyle {
         base_text_style,
-        code_block: StyleRefinement::default().my(rems(1.)).font_buffer(cx),
+        code_block: StyleRefinement::default()
+            .my(rems(1.))
+            .font_buffer(cx)
+            .font_features(buffer_font_features.clone()),
         inline_code: TextStyleRefinement {
             background_color: Some(cx.theme().colors().background),
             font_family: Some(buffer_font_family),

crates/eval/src/instance.rs 🔗

@@ -892,7 +892,7 @@ pub fn wait_for_lang_server(
         .update(cx, |buffer, cx| {
             lsp_store.update(cx, |lsp_store, cx| {
                 lsp_store
-                    .language_servers_for_local_buffer(buffer, cx)
+                    .running_language_servers_for_local_buffer(buffer, cx)
                     .next()
                     .is_some()
             })

crates/fs/src/fake_git_repo.rs 🔗

@@ -23,6 +23,7 @@ use std::{
     path::PathBuf,
     sync::{Arc, LazyLock},
 };
+use text::LineEnding;
 use util::{paths::PathStyle, rel_path::RelPath};
 
 pub static LOAD_INDEX_TEXT_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
@@ -200,6 +201,7 @@ impl GitRepository for FakeGitRepository {
         async {
             Ok(CommitDetails {
                 sha: commit.into(),
+                message: "initial commit".into(),
                 ..Default::default()
             })
         }
@@ -451,7 +453,12 @@ impl GitRepository for FakeGitRepository {
         })
     }
 
-    fn blame(&self, path: RepoPath, _content: Rope) -> BoxFuture<'_, Result<git::blame::Blame>> {
+    fn blame(
+        &self,
+        path: RepoPath,
+        _content: Rope,
+        _line_ending: LineEnding,
+    ) -> BoxFuture<'_, Result<git::blame::Blame>> {
         self.with_state_async(false, move |state| {
             state
                 .blames
@@ -568,7 +575,7 @@ impl GitRepository for FakeGitRepository {
         _askpass: AskPassDelegate,
         _env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        unimplemented!()
+        async { Ok(()) }.boxed()
     }
 
     fn run_hook(
@@ -576,7 +583,7 @@ impl GitRepository for FakeGitRepository {
         _hook: RunHook,
         _env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        unimplemented!()
+        async { Ok(()) }.boxed()
     }
 
     fn push(

crates/fs/src/fs.rs 🔗

@@ -803,7 +803,7 @@ impl Fs for RealFs {
         }
         let file = smol::fs::File::create(path).await?;
         let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file);
-        for chunk in chunks(text, line_ending) {
+        for chunk in text::chunks_with_line_ending(text, line_ending) {
             writer.write_all(chunk.as_bytes()).await?;
         }
         writer.flush().await?;
@@ -2555,7 +2555,7 @@ impl Fs for FakeFs {
     async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
         self.simulate_random_delay().await;
         let path = normalize_path(path);
-        let content = chunks(text, line_ending).collect::<String>();
+        let content = text::chunks_with_line_ending(text, line_ending).collect::<String>();
         if let Some(path) = path.parent() {
             self.create_dir(path).await?;
         }
@@ -2773,25 +2773,6 @@ impl Fs for FakeFs {
     }
 }
 
-fn chunks(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
-    rope.chunks().flat_map(move |chunk| {
-        let mut newline = false;
-        let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
-        chunk
-            .lines()
-            .flat_map(move |line| {
-                let ending = if newline {
-                    Some(line_ending.as_str())
-                } else {
-                    None
-                };
-                newline = true;
-                ending.into_iter().chain([line])
-            })
-            .chain(end_with_newline)
-    })
-}
-
 pub fn normalize_path(path: &Path) -> PathBuf {
     let mut components = path.components().peekable();
     let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {

crates/git/src/blame.rs 🔗

@@ -8,7 +8,7 @@ use gpui::SharedString;
 use serde::{Deserialize, Serialize};
 use std::process::Stdio;
 use std::{ops::Range, path::Path};
-use text::Rope;
+use text::{LineEnding, Rope};
 use time::OffsetDateTime;
 use time::UtcOffset;
 use time::macros::format_description;
@@ -35,8 +35,10 @@ impl Blame {
         working_directory: &Path,
         path: &RepoPath,
         content: &Rope,
+        line_ending: LineEnding,
     ) -> Result<Self> {
-        let output = run_git_blame(git_binary, working_directory, path, content).await?;
+        let output =
+            run_git_blame(git_binary, working_directory, path, content, line_ending).await?;
         let mut entries = parse_git_blame(&output)?;
         entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
 
@@ -63,12 +65,12 @@ async fn run_git_blame(
     working_directory: &Path,
     path: &RepoPath,
     contents: &Rope,
+    line_ending: LineEnding,
 ) -> Result<String> {
     let mut child = util::command::new_smol_command(git_binary)
         .current_dir(working_directory)
         .arg("blame")
         .arg("--incremental")
-        .arg("-w")
         .arg("--contents")
         .arg("-")
         .arg(path.as_unix_str())
@@ -83,7 +85,7 @@ async fn run_git_blame(
         .as_mut()
         .context("failed to get pipe to stdin of git blame command")?;
 
-    for chunk in contents.chunks() {
+    for chunk in text::chunks_with_line_ending(contents, line_ending) {
         stdin.write_all(chunk.as_bytes()).await?;
     }
     stdin.flush().await?;

crates/git/src/repository.rs 🔗

@@ -14,6 +14,7 @@ use rope::Rope;
 use schemars::JsonSchema;
 use serde::Deserialize;
 use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
+use text::LineEnding;
 
 use std::collections::HashSet;
 use std::ffi::{OsStr, OsString};
@@ -487,7 +488,12 @@ pub trait GitRepository: Send + Sync {
     fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>>;
 
     fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result<CommitDiff>>;
-    fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result<crate::blame::Blame>>;
+    fn blame(
+        &self,
+        path: RepoPath,
+        content: Rope,
+        line_ending: LineEnding,
+    ) -> BoxFuture<'_, Result<crate::blame::Blame>>;
     fn file_history(&self, path: RepoPath) -> BoxFuture<'_, Result<FileHistory>>;
     fn file_history_paginated(
         &self,
@@ -1512,7 +1518,12 @@ impl GitRepository for RealGitRepository {
             .boxed()
     }
 
-    fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result<crate::blame::Blame>> {
+    fn blame(
+        &self,
+        path: RepoPath,
+        content: Rope,
+        line_ending: LineEnding,
+    ) -> BoxFuture<'_, Result<crate::blame::Blame>> {
         let working_directory = self.working_directory();
         let git_binary_path = self.any_git_binary_path.clone();
         let executor = self.executor.clone();
@@ -1524,6 +1535,7 @@ impl GitRepository for RealGitRepository {
                     &working_directory?,
                     &path,
                     &content,
+                    line_ending,
                 )
                 .await
             })

crates/git_ui/src/blame_ui.rs 🔗

@@ -47,11 +47,13 @@ impl BlameRenderer for GitBlameRenderer {
         let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED);
 
         let avatar = if ProjectSettings::get_global(cx).git.blame.show_avatar {
-            CommitAvatar::new(
-                &blame_entry.sha.to_string().into(),
-                details.as_ref().and_then(|it| it.remote.as_ref()),
+            Some(
+                CommitAvatar::new(
+                    &blame_entry.sha.to_string().into(),
+                    details.as_ref().and_then(|it| it.remote.as_ref()),
+                )
+                .render(window, cx),
             )
-            .render(window, cx)
         } else {
             None
         };
@@ -65,7 +67,7 @@ impl BlameRenderer for GitBlameRenderer {
                         .w_full()
                         .gap_2()
                         .justify_between()
-                        .font_family(style.font().family)
+                        .font(style.font())
                         .line_height(style.line_height)
                         .text_color(cx.theme().status().hint)
                         .child(
@@ -264,7 +266,7 @@ impl BlameRenderer for GitBlameRenderer {
                                     .flex_wrap()
                                     .border_b_1()
                                     .border_color(cx.theme().colors().border_variant)
-                                    .children(avatar)
+                                    .child(avatar)
                                     .child(author)
                                     .when(!author_email.is_empty(), |this| {
                                         this.child(

crates/git_ui/src/commit_modal.rs 🔗

@@ -139,7 +139,7 @@ impl CommitModal {
                             && !git_panel.amend_pending()
                         {
                             git_panel.set_amend_pending(true, cx);
-                            git_panel.load_last_commit_message_if_empty(cx);
+                            git_panel.load_last_commit_message(cx);
                         }
                     }
                     ForceMode::Commit => {
@@ -492,53 +492,20 @@ impl CommitModal {
         }
     }
 
-    fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context<Self>) {
-        if self.git_panel.read(cx).amend_pending() {
-            return;
+    fn on_commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context<Self>) {
+        if self.git_panel.update(cx, |git_panel, cx| {
+            git_panel.commit(&self.commit_editor.focus_handle(cx), window, cx)
+        }) {
+            telemetry::event!("Git Committed", source = "Git Modal");
+            cx.emit(DismissEvent);
         }
-        telemetry::event!("Git Committed", source = "Git Modal");
-        self.git_panel.update(cx, |git_panel, cx| {
-            git_panel.commit_changes(
-                CommitOptions {
-                    amend: false,
-                    signoff: git_panel.signoff_enabled(),
-                },
-                window,
-                cx,
-            )
-        });
-        cx.emit(DismissEvent);
     }
 
-    fn amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context<Self>) {
-        if self
-            .git_panel
-            .read(cx)
-            .active_repository
-            .as_ref()
-            .and_then(|repo| repo.read(cx).head_commit.as_ref())
-            .is_none()
-        {
-            return;
-        }
-        if !self.git_panel.read(cx).amend_pending() {
-            self.git_panel.update(cx, |git_panel, cx| {
-                git_panel.set_amend_pending(true, cx);
-                git_panel.load_last_commit_message_if_empty(cx);
-            });
-        } else {
+    fn on_amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context<Self>) {
+        if self.git_panel.update(cx, |git_panel, cx| {
+            git_panel.amend(&self.commit_editor.focus_handle(cx), window, cx)
+        }) {
             telemetry::event!("Git Amended", source = "Git Modal");
-            self.git_panel.update(cx, |git_panel, cx| {
-                git_panel.set_amend_pending(false, cx);
-                git_panel.commit_changes(
-                    CommitOptions {
-                        amend: true,
-                        signoff: git_panel.signoff_enabled(),
-                    },
-                    window,
-                    cx,
-                );
-            });
             cx.emit(DismissEvent);
         }
     }
@@ -564,8 +531,8 @@ impl Render for CommitModal {
             .id("commit-modal")
             .key_context("GitCommit")
             .on_action(cx.listener(Self::dismiss))
-            .on_action(cx.listener(Self::commit))
-            .on_action(cx.listener(Self::amend))
+            .on_action(cx.listener(Self::on_commit))
+            .on_action(cx.listener(Self::on_amend))
             .when(!DisableAiSettings::get_global(cx).disable_ai, |this| {
                 this.on_action(cx.listener(|this, _: &GenerateCommitMessage, _, cx| {
                     this.git_panel.update(cx, |panel, cx| {

crates/git_ui/src/commit_tooltip.rs 🔗

@@ -29,11 +29,16 @@ pub struct CommitDetails {
 pub struct CommitAvatar<'a> {
     sha: &'a SharedString,
     remote: Option<&'a GitRemote>,
+    size: Option<IconSize>,
 }
 
 impl<'a> CommitAvatar<'a> {
     pub fn new(sha: &'a SharedString, remote: Option<&'a GitRemote>) -> Self {
-        Self { sha, remote }
+        Self {
+            sha,
+            remote,
+            size: None,
+        }
     }
 
     pub fn from_commit_details(details: &'a CommitDetails) -> Self {
@@ -43,28 +48,37 @@ impl<'a> CommitAvatar<'a> {
                 .message
                 .as_ref()
                 .and_then(|details| details.remote.as_ref()),
+            size: None,
         }
     }
-}
 
-impl<'a> CommitAvatar<'a> {
-    pub fn render(&'a self, window: &mut Window, cx: &mut App) -> Option<impl IntoElement + use<>> {
+    pub fn size(mut self, size: IconSize) -> Self {
+        self.size = Some(size);
+        self
+    }
+
+    pub fn render(&'a self, window: &mut Window, cx: &mut App) -> AnyElement {
+        match self.avatar(window, cx) {
+            // Loading or no avatar found
+            None => Icon::new(IconName::Person)
+                .color(Color::Muted)
+                .when_some(self.size, |this, size| this.size(size))
+                .into_any_element(),
+            // Found
+            Some(avatar) => avatar
+                .when_some(self.size, |this, size| this.size(size.rems()))
+                .into_any_element(),
+        }
+    }
+
+    pub fn avatar(&'a self, window: &mut Window, cx: &mut App) -> Option<Avatar> {
         let remote = self
             .remote
             .filter(|remote| remote.host_supports_avatars())?;
-
         let avatar_url = CommitAvatarAsset::new(remote.clone(), self.sha.clone());
 
-        let element = match window.use_asset::<CommitAvatarAsset>(&avatar_url, cx) {
-            // Loading or no avatar found
-            None | Some(None) => Icon::new(IconName::Person)
-                .color(Color::Muted)
-                .into_element()
-                .into_any(),
-            // Found
-            Some(Some(url)) => Avatar::new(url.to_string()).into_element().into_any(),
-        };
-        Some(element)
+        let url = window.use_asset::<CommitAvatarAsset>(&avatar_url, cx)??;
+        Some(Avatar::new(url.to_string()))
     }
 }
 
@@ -253,7 +267,7 @@ impl Render for CommitTooltip {
                                 .gap_x_2()
                                 .overflow_x_hidden()
                                 .flex_wrap()
-                                .children(avatar)
+                                .child(avatar)
                                 .child(author)
                                 .when(!author_email.is_empty(), |this| {
                                     this.child(

crates/git_ui/src/commit_view.rs 🔗

@@ -5,8 +5,8 @@ use editor::{Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context
 use git::repository::{CommitDetails, CommitDiff, RepoPath};
 use git::{GitHostingProviderRegistry, GitRemote, parse_git_remote_url};
 use gpui::{
-    AnyElement, App, AppContext as _, Asset, AsyncApp, AsyncWindowContext, Context, Element,
-    Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ParentElement,
+    AnyElement, App, AppContext as _, AsyncApp, AsyncWindowContext, Context, Element, Entity,
+    EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, ParentElement,
     PromptLevel, Render, Styled, Task, WeakEntity, Window, actions,
 };
 use language::{
@@ -21,7 +21,7 @@ use std::{
     sync::Arc,
 };
 use theme::ActiveTheme;
-use ui::{Avatar, DiffStat, Tooltip, prelude::*};
+use ui::{DiffStat, Tooltip, prelude::*};
 use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff};
 use workspace::item::TabTooltipContent;
 use workspace::{
@@ -33,6 +33,7 @@ use workspace::{
     searchable::SearchableItemHandle,
 };
 
+use crate::commit_tooltip::CommitAvatar;
 use crate::git_panel::GitPanel;
 
 actions!(git, [ApplyCurrentStash, PopCurrentStash, DropCurrentStash,]);
@@ -318,17 +319,7 @@ impl CommitView {
         cx: &mut App,
     ) -> AnyElement {
         let size = size.into();
-        let remote = self.remote.as_ref().filter(|r| r.host_supports_avatars());
-
-        if let Some(remote) = remote {
-            let avatar_asset = CommitAvatarAsset::new(remote.clone(), sha.clone());
-            if let Some(Some(url)) = window.use_asset::<CommitAvatarAsset>(&avatar_asset, cx) {
-                return Avatar::new(url.to_string())
-                    .size(size)
-                    .into_element()
-                    .into_any();
-            }
-        }
+        let avatar = CommitAvatar::new(sha, self.remote.as_ref());
 
         v_flex()
             .w(size)
@@ -339,10 +330,15 @@ impl CommitView {
             .justify_center()
             .items_center()
             .child(
-                Icon::new(IconName::Person)
-                    .color(Color::Muted)
-                    .size(IconSize::Medium)
-                    .into_element(),
+                avatar
+                    .avatar(window, cx)
+                    .map(|a| a.size(size).into_any_element())
+                    .unwrap_or_else(|| {
+                        Icon::new(IconName::Person)
+                            .color(Color::Muted)
+                            .size(IconSize::Medium)
+                            .into_any_element()
+                    }),
             )
             .into_any()
     }
@@ -647,54 +643,6 @@ impl CommitView {
     }
 }
 
-#[derive(Clone, Debug)]
-struct CommitAvatarAsset {
-    sha: SharedString,
-    remote: GitRemote,
-}
-
-impl std::hash::Hash for CommitAvatarAsset {
-    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
-        self.sha.hash(state);
-        self.remote.host.name().hash(state);
-    }
-}
-
-impl CommitAvatarAsset {
-    fn new(remote: GitRemote, sha: SharedString) -> Self {
-        Self { remote, sha }
-    }
-}
-
-impl Asset for CommitAvatarAsset {
-    type Source = Self;
-    type Output = Option<SharedString>;
-
-    fn load(
-        source: Self::Source,
-        cx: &mut App,
-    ) -> impl Future<Output = Self::Output> + Send + 'static {
-        let client = cx.http_client();
-        async move {
-            match source
-                .remote
-                .host
-                .commit_author_avatar_url(
-                    &source.remote.owner,
-                    &source.remote.repo,
-                    source.sha.clone(),
-                    client,
-                )
-                .await
-            {
-                Ok(Some(url)) => Some(SharedString::from(url.to_string())),
-                Ok(None) => None,
-                Err(_) => None,
-            }
-        }
-    }
-}
-
 impl language::File for GitBlob {
     fn as_local(&self) -> Option<&dyn language::LocalFile> {
         None

crates/git_ui/src/conflict_view.rs 🔗

@@ -111,6 +111,7 @@ fn excerpt_for_buffer_updated(
     );
 }
 
+#[ztracing::instrument(skip_all)]
 fn buffer_added(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Editor>) {
     let Some(project) = editor.project() else {
         return;
@@ -166,6 +167,7 @@ fn buffers_removed(editor: &mut Editor, removed_buffer_ids: &[BufferId], cx: &mu
     editor.remove_blocks(removed_block_ids, None, cx);
 }
 
+#[ztracing::instrument(skip_all)]
 fn conflicts_updated(
     editor: &mut Editor,
     conflict_set: Entity<ConflictSet>,
@@ -311,6 +313,7 @@ fn conflicts_updated(
     }
 }
 
+#[ztracing::instrument(skip_all)]
 fn update_conflict_highlighting(
     editor: &mut Editor,
     conflict: &ConflictRegion,

crates/git_ui/src/git_panel.rs 🔗

@@ -1934,16 +1934,26 @@ impl GitPanel {
         }
     }
 
-    fn commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context<Self>) {
+    fn on_commit(&mut self, _: &git::Commit, window: &mut Window, cx: &mut Context<Self>) {
+        if self.commit(&self.commit_editor.focus_handle(cx), window, cx) {
+            telemetry::event!("Git Committed", source = "Git Panel");
+        }
+    }
+
+    /// Commits staged changes with the current commit message.
+    ///
+    /// Returns `true` if the commit was executed, `false` otherwise.
+    pub(crate) fn commit(
+        &mut self,
+        commit_editor_focus_handle: &FocusHandle,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> bool {
         if self.amend_pending {
-            return;
+            return false;
         }
-        if self
-            .commit_editor
-            .focus_handle(cx)
-            .contains_focused(window, cx)
-        {
-            telemetry::event!("Git Committed", source = "Git Panel");
+
+        if commit_editor_focus_handle.contains_focused(window, cx) {
             self.commit_changes(
                 CommitOptions {
                     amend: false,
@@ -1951,24 +1961,39 @@ impl GitPanel {
                 },
                 window,
                 cx,
-            )
+            );
+            true
         } else {
             cx.propagate();
+            false
         }
     }
 
-    fn amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context<Self>) {
-        if self
-            .commit_editor
-            .focus_handle(cx)
-            .contains_focused(window, cx)
-        {
+    fn on_amend(&mut self, _: &git::Amend, window: &mut Window, cx: &mut Context<Self>) {
+        if self.amend(&self.commit_editor.focus_handle(cx), window, cx) {
+            telemetry::event!("Git Amended", source = "Git Panel");
+        }
+    }
+
+    /// Amends the most recent commit with staged changes and/or an updated commit message.
+    ///
+    /// Uses a two-stage workflow where the first invocation loads the commit
+    /// message for editing, second invocation performs the amend. Returns
+    /// `true` if the amend was executed, `false` otherwise.
+    pub(crate) fn amend(
+        &mut self,
+        commit_editor_focus_handle: &FocusHandle,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> bool {
+        if commit_editor_focus_handle.contains_focused(window, cx) {
             if self.head_commit(cx).is_some() {
                 if !self.amend_pending {
                     self.set_amend_pending(true, cx);
-                    self.load_last_commit_message_if_empty(cx);
+                    self.load_last_commit_message(cx);
+
+                    return false;
                 } else {
-                    telemetry::event!("Git Amended", source = "Git Panel");
                     self.commit_changes(
                         CommitOptions {
                             amend: true,
@@ -1977,13 +2002,16 @@ impl GitPanel {
                         window,
                         cx,
                     );
+
+                    return true;
                 }
             }
+            return false;
         } else {
             cx.propagate();
+            return false;
         }
     }
-
     pub fn head_commit(&self, cx: &App) -> Option<CommitDetails> {
         self.active_repository
             .as_ref()
@@ -1991,13 +2019,11 @@ impl GitPanel {
             .cloned()
     }
 
-    pub fn load_last_commit_message_if_empty(&mut self, cx: &mut Context<Self>) {
-        if !self.commit_editor.read(cx).is_empty(cx) {
-            return;
-        }
+    pub fn load_last_commit_message(&mut self, cx: &mut Context<Self>) {
         let Some(head_commit) = self.head_commit(cx) else {
             return;
         };
+
         let recent_sha = head_commit.sha.to_string();
         let detail_task = self.load_commit_details(recent_sha, cx);
         cx.spawn(async move |this, cx| {
@@ -2133,11 +2159,16 @@ impl GitPanel {
             let result = task.await;
             this.update_in(cx, |this, window, cx| {
                 this.pending_commit.take();
+
                 match result {
                     Ok(()) => {
-                        this.commit_editor
-                            .update(cx, |editor, cx| editor.clear(window, cx));
-                        this.original_commit_message = None;
+                        if options.amend {
+                            this.set_amend_pending(false, cx);
+                        } else {
+                            this.commit_editor
+                                .update(cx, |editor, cx| editor.clear(window, cx));
+                            this.original_commit_message = None;
+                        }
                     }
                     Err(e) => this.show_error_toast("commit", e, cx),
                 }
@@ -2146,9 +2177,6 @@ impl GitPanel {
         });
 
         self.pending_commit = Some(task);
-        if options.amend {
-            self.set_amend_pending(false, cx);
-        }
     }
 
     pub(crate) fn uncommit(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -5067,6 +5095,9 @@ impl GitPanel {
         self.amend_pending
     }
 
+    /// Sets the pending amend state, ensuring that the original commit message
+    /// is either saved, when `value` is `true` and there's no pending amend, or
+    /// restored, when `value` is `false` and there's a pending amend.
     pub fn set_amend_pending(&mut self, value: bool, cx: &mut Context<Self>) {
         if value && !self.amend_pending {
             let current_message = self.commit_message_buffer(cx).read(cx).text();
@@ -5184,7 +5215,7 @@ impl GitPanel {
     pub(crate) fn toggle_amend_pending(&mut self, cx: &mut Context<Self>) {
         self.set_amend_pending(!self.amend_pending, cx);
         if self.amend_pending {
-            self.load_last_commit_message_if_empty(cx);
+            self.load_last_commit_message(cx);
         }
     }
 }
@@ -5215,8 +5246,8 @@ impl Render for GitPanel {
             .when(has_write_access && !project.is_read_only(cx), |this| {
                 this.on_action(cx.listener(Self::toggle_staged_for_selected))
                     .on_action(cx.listener(Self::stage_range))
-                    .on_action(cx.listener(GitPanel::commit))
-                    .on_action(cx.listener(GitPanel::amend))
+                    .on_action(cx.listener(GitPanel::on_commit))
+                    .on_action(cx.listener(GitPanel::on_amend))
                     .on_action(cx.listener(GitPanel::toggle_signoff_enabled))
                     .on_action(cx.listener(Self::stage_all))
                     .on_action(cx.listener(Self::unstage_all))
@@ -6557,6 +6588,94 @@ mod tests {
         });
     }
 
+    #[gpui::test]
+    async fn test_amend(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            "/root",
+            json!({
+                "project": {
+                    ".git": {},
+                    "src": {
+                        "main.rs": "fn main() {}"
+                    }
+                }
+            }),
+        )
+        .await;
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/project/.git")),
+            &[("src/main.rs", StatusCode::Modified.worktree())],
+        );
+
+        let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await;
+        let workspace =
+            cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+        let cx = &mut VisualTestContext::from_window(*workspace, cx);
+
+        // Wait for the project scanning to finish so that `head_commit(cx)` is
+        // actually set, otherwise no head commit would be available from which
+        // to fetch the latest commit message from.
+        cx.executor().run_until_parked();
+
+        let panel = workspace.update(cx, GitPanel::new).unwrap();
+        panel.read_with(cx, |panel, cx| {
+            assert!(panel.active_repository.is_some());
+            assert!(panel.head_commit(cx).is_some());
+        });
+
+        panel.update_in(cx, |panel, window, cx| {
+            // Update the commit editor's message to ensure that its contents
+            // are later restored, after amending is finished.
+            panel.commit_message_buffer(cx).update(cx, |buffer, cx| {
+                buffer.set_text("refactor: update main.rs", cx);
+            });
+
+            // Start amending the previous commit.
+            panel.focus_editor(&Default::default(), window, cx);
+            panel.on_amend(&Amend, window, cx);
+        });
+
+        // Since `GitPanel.amend` attempts to fetch the latest commit message in
+        // a background task, we need to wait for it to complete before being
+        // able to assert that the commit message editor's state has been
+        // updated.
+        cx.run_until_parked();
+
+        panel.update_in(cx, |panel, window, cx| {
+            assert_eq!(
+                panel.commit_message_buffer(cx).read(cx).text(),
+                "initial commit"
+            );
+            assert_eq!(
+                panel.original_commit_message,
+                Some("refactor: update main.rs".to_string())
+            );
+
+            // Finish amending the previous commit.
+            panel.focus_editor(&Default::default(), window, cx);
+            panel.on_amend(&Amend, window, cx);
+        });
+
+        // Since the actual commit logic is run in a background task, we need to
+        // await its completion to actually ensure that the commit message
+        // editor's contents are set to the original message and haven't been
+        // cleared.
+        cx.run_until_parked();
+
+        panel.update_in(cx, |panel, _window, cx| {
+            // After amending, the commit editor's message should be restored to
+            // the original message.
+            assert_eq!(
+                panel.commit_message_buffer(cx).read(cx).text(),
+                "refactor: update main.rs"
+            );
+            assert!(panel.original_commit_message.is_none());
+        });
+    }
+
     #[gpui::test]
     async fn test_open_diff(cx: &mut TestAppContext) {
         init_test(cx);

crates/gpui/Cargo.toml 🔗

@@ -21,7 +21,6 @@ default = ["font-kit", "wayland", "x11", "windows-manifest"]
 test-support = [
     "leak-detection",
     "collections/test-support",
-    "rand",
     "util/test-support",
     "http_client/test-support",
     "wayland",
@@ -109,7 +108,7 @@ parking = "2.0.0"
 parking_lot.workspace = true
 postage.workspace = true
 profiling.workspace = true
-rand = { optional = true, workspace = true }
+rand.workspace = true
 raw-window-handle = "0.6"
 refineable.workspace = true
 resvg = { version = "0.45.0", default-features = false, features = [
@@ -158,8 +157,10 @@ media.workspace = true
 objc.workspace = true
 objc2 = { version = "0.6", optional = true }
 objc2-metal = { version = "0.3", optional = true }
+mach2.workspace = true
 #TODO: replace with "objc2"
 metal.workspace = true
+flume = "0.11"
 
 [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "macos"))'.dependencies]
 pathfinder_geometry = "0.5"

crates/gpui/build.rs 🔗

@@ -84,6 +84,8 @@ mod macos {
             .allowlist_var("_dispatch_main_q")
             .allowlist_var("_dispatch_source_type_data_add")
             .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH")
+            .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT")
+            .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW")
             .allowlist_var("DISPATCH_TIME_NOW")
             .allowlist_function("dispatch_get_global_queue")
             .allowlist_function("dispatch_async_f")

crates/gpui/src/app.rs 🔗

@@ -38,10 +38,11 @@ use crate::{
     AssetSource, BackgroundExecutor, Bounds, ClipboardItem, CursorStyle, DispatchPhase, DisplayId,
     EventEmitter, FocusHandle, FocusMap, ForegroundExecutor, Global, KeyBinding, KeyContext,
     Keymap, Keystroke, LayoutId, Menu, MenuItem, OwnedMenu, PathPromptOptions, Pixels, Platform,
-    PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, Point, PromptBuilder,
-    PromptButton, PromptHandle, PromptLevel, Render, RenderImage, RenderablePromptHandle,
-    Reservation, ScreenCaptureSource, SharedString, SubscriberSet, Subscription, SvgRenderer, Task,
-    TextSystem, Window, WindowAppearance, WindowHandle, WindowId, WindowInvalidator,
+    PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper, Point, Priority,
+    PromptBuilder, PromptButton, PromptHandle, PromptLevel, Render, RenderImage,
+    RenderablePromptHandle, Reservation, ScreenCaptureSource, SharedString, SubscriberSet,
+    Subscription, SvgRenderer, Task, TextSystem, Window, WindowAppearance, WindowHandle, WindowId,
+    WindowInvalidator,
     colors::{Colors, GlobalColors},
     current_platform, hash, init_app_menus,
 };
@@ -1494,6 +1495,24 @@ impl App {
             .spawn(async move { f(&mut cx).await })
     }
 
+    /// Spawns the future returned by the given function on the main thread with
+    /// the given priority. The closure will be invoked with [AsyncApp], which
+    /// allows the application state to be accessed across await points.
+    pub fn spawn_with_priority<AsyncFn, R>(&self, priority: Priority, f: AsyncFn) -> Task<R>
+    where
+        AsyncFn: AsyncFnOnce(&mut AsyncApp) -> R + 'static,
+        R: 'static,
+    {
+        if self.quitting {
+            debug_panic!("Can't spawn on main thread after on_app_quit")
+        };
+
+        let mut cx = self.to_async();
+
+        self.foreground_executor
+            .spawn_with_priority(priority, async move { f(&mut cx).await })
+    }
+
     /// Schedules the given function to be run at the end of the current effect cycle, allowing entities
     /// that are currently on the stack to be returned to the app.
     pub fn defer(&mut self, f: impl FnOnce(&mut App) + 'static) {

crates/gpui/src/app/context.rs 🔗

@@ -1,7 +1,7 @@
 use crate::{
     AnyView, AnyWindowHandle, AppContext, AsyncApp, DispatchPhase, Effect, EntityId, EventEmitter,
-    FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, Reservation, SubscriberSet,
-    Subscription, Task, WeakEntity, WeakFocusHandle, Window, WindowHandle,
+    FocusHandle, FocusOutEvent, Focusable, Global, KeystrokeObserver, Priority, Reservation,
+    SubscriberSet, Subscription, Task, WeakEntity, WeakFocusHandle, Window, WindowHandle,
 };
 use anyhow::Result;
 use futures::FutureExt;
@@ -667,6 +667,25 @@ impl<'a, T: 'static> Context<'a, T> {
         window.spawn(self, async move |cx| f(view, cx).await)
     }
 
+    /// Schedule a future to be run asynchronously with the given priority.
+    /// The given callback is invoked with a [`WeakEntity<V>`] to avoid leaking the entity for a long-running process.
+    /// It's also given an [`AsyncWindowContext`], which can be used to access the state of the entity across await points.
+    /// The returned future will be polled on the main thread.
+    #[track_caller]
+    pub fn spawn_in_with_priority<AsyncFn, R>(
+        &self,
+        priority: Priority,
+        window: &Window,
+        f: AsyncFn,
+    ) -> Task<R>
+    where
+        R: 'static,
+        AsyncFn: AsyncFnOnce(WeakEntity<T>, &mut AsyncWindowContext) -> R + 'static,
+    {
+        let view = self.weak_entity();
+        window.spawn_with_priority(priority, self, async move |cx| f(view, cx).await)
+    }
+
     /// Register a callback to be invoked when the given global state changes.
     pub fn observe_global_in<G: Global>(
         &mut self,

crates/gpui/src/executor.rs 🔗

@@ -1,6 +1,7 @@
-use crate::{App, PlatformDispatcher, RunnableMeta, RunnableVariant};
+use crate::{App, PlatformDispatcher, RunnableMeta, RunnableVariant, TaskTiming, profiler};
 use async_task::Runnable;
 use futures::channel::mpsc;
+use parking_lot::{Condvar, Mutex};
 use smol::prelude::*;
 use std::{
     fmt::Debug,
@@ -46,6 +47,52 @@ pub struct ForegroundExecutor {
     not_send: PhantomData<Rc<()>>,
 }
 
+/// Realtime task priority
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
+#[repr(u8)]
+pub enum RealtimePriority {
+    /// Audio task
+    Audio,
+    /// Other realtime task
+    #[default]
+    Other,
+}
+
+/// Task priority
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
+#[repr(u8)]
+pub enum Priority {
+    /// Realtime priority
+    ///
+    /// Spawning a task with this priority will spin it off on a separate thread dedicated just to that task.
+    Realtime(RealtimePriority),
+    /// High priority
+    ///
+    /// Only use for tasks that are critical to the user experience / responsiveness of the editor.
+    High,
+    /// Medium priority, probably suits most of your use cases.
+    #[default]
+    Medium,
+    /// Low priority
+    ///
+    /// Prioritize this for background work that can come in large quantities
+    /// to not starve the executor of resources for high priority tasks
+    Low,
+}
+
+impl Priority {
+    #[allow(dead_code)]
+    pub(crate) const fn probability(&self) -> u32 {
+        match self {
+            // realtime priorities are not considered for probability scheduling
+            Priority::Realtime(_) => 0,
+            Priority::High => 60,
+            Priority::Medium => 30,
+            Priority::Low => 10,
+        }
+    }
+}
+
 /// Task is a primitive that allows work to happen in the background.
 ///
 /// It implements [`Future`] so you can `.await` on it.
@@ -151,7 +198,77 @@ impl BackgroundExecutor {
     where
         R: Send + 'static,
     {
-        self.spawn_internal::<R>(Box::pin(future), None)
+        self.spawn_with_priority(Priority::default(), future)
+    }
+
+    /// Enqueues the given future to be run to completion on a background thread.
+    #[track_caller]
+    pub fn spawn_with_priority<R>(
+        &self,
+        priority: Priority,
+        future: impl Future<Output = R> + Send + 'static,
+    ) -> Task<R>
+    where
+        R: Send + 'static,
+    {
+        self.spawn_internal::<R>(Box::pin(future), None, priority)
+    }
+
+    /// Enqueues the given future to be run to completion on a background thread and blocking the current task on it.
+    ///
+    /// This allows to spawn background work that borrows from its scope. Note that the supplied future will run to
+    /// completion before the current task is resumed, even if the current task is slated for cancellation.
+    pub async fn await_on_background<R>(&self, future: impl Future<Output = R> + Send) -> R
+    where
+        R: Send,
+    {
+        // We need to ensure that cancellation of the parent task does not drop the environment
+        // before the our own task has completed or got cancelled.
+        struct NotifyOnDrop<'a>(&'a (Condvar, Mutex<bool>));
+
+        impl Drop for NotifyOnDrop<'_> {
+            fn drop(&mut self) {
+                *self.0.1.lock() = true;
+                self.0.0.notify_all();
+            }
+        }
+
+        struct WaitOnDrop<'a>(&'a (Condvar, Mutex<bool>));
+
+        impl Drop for WaitOnDrop<'_> {
+            fn drop(&mut self) {
+                let mut done = self.0.1.lock();
+                if !*done {
+                    self.0.0.wait(&mut done);
+                }
+            }
+        }
+
+        let dispatcher = self.dispatcher.clone();
+        let location = core::panic::Location::caller();
+
+        let pair = &(Condvar::new(), Mutex::new(false));
+        let _wait_guard = WaitOnDrop(pair);
+
+        let (runnable, task) = unsafe {
+            async_task::Builder::new()
+                .metadata(RunnableMeta { location })
+                .spawn_unchecked(
+                    move |_| async {
+                        let _notify_guard = NotifyOnDrop(pair);
+                        future.await
+                    },
+                    move |runnable| {
+                        dispatcher.dispatch(
+                            RunnableVariant::Meta(runnable),
+                            None,
+                            Priority::default(),
+                        )
+                    },
+                )
+        };
+        runnable.schedule();
+        task.await
     }
 
     /// Enqueues the given future to be run to completion on a background thread.
@@ -165,7 +282,7 @@ impl BackgroundExecutor {
     where
         R: Send + 'static,
     {
-        self.spawn_internal::<R>(Box::pin(future), Some(label))
+        self.spawn_internal::<R>(Box::pin(future), Some(label), Priority::default())
     }
 
     #[track_caller]
@@ -173,15 +290,55 @@ impl BackgroundExecutor {
         &self,
         future: AnyFuture<R>,
         label: Option<TaskLabel>,
+        priority: Priority,
     ) -> Task<R> {
         let dispatcher = self.dispatcher.clone();
-        let location = core::panic::Location::caller();
-        let (runnable, task) = async_task::Builder::new()
-            .metadata(RunnableMeta { location })
-            .spawn(
-                move |_| future,
-                move |runnable| dispatcher.dispatch(RunnableVariant::Meta(runnable), label),
+        let (runnable, task) = if let Priority::Realtime(realtime) = priority {
+            let location = core::panic::Location::caller();
+            let (mut tx, rx) = flume::bounded::<Runnable<RunnableMeta>>(1);
+
+            dispatcher.spawn_realtime(
+                realtime,
+                Box::new(move || {
+                    while let Ok(runnable) = rx.recv() {
+                        let start = Instant::now();
+                        let location = runnable.metadata().location;
+                        let mut timing = TaskTiming {
+                            location,
+                            start,
+                            end: None,
+                        };
+                        profiler::add_task_timing(timing);
+
+                        runnable.run();
+
+                        let end = Instant::now();
+                        timing.end = Some(end);
+                        profiler::add_task_timing(timing);
+                    }
+                }),
             );
+
+            async_task::Builder::new()
+                .metadata(RunnableMeta { location })
+                .spawn(
+                    move |_| future,
+                    move |runnable| {
+                        let _ = tx.send(runnable);
+                    },
+                )
+        } else {
+            let location = core::panic::Location::caller();
+            async_task::Builder::new()
+                .metadata(RunnableMeta { location })
+                .spawn(
+                    move |_| future,
+                    move |runnable| {
+                        dispatcher.dispatch(RunnableVariant::Meta(runnable), label, priority)
+                    },
+                )
+        };
+
         runnable.schedule();
         Task(TaskState::Spawned(task))
     }
@@ -354,11 +511,28 @@ impl BackgroundExecutor {
     where
         F: FnOnce(&mut Scope<'scope>),
     {
-        let mut scope = Scope::new(self.clone());
+        let mut scope = Scope::new(self.clone(), Priority::default());
         (scheduler)(&mut scope);
         let spawned = mem::take(&mut scope.futures)
             .into_iter()
-            .map(|f| self.spawn(f))
+            .map(|f| self.spawn_with_priority(scope.priority, f))
+            .collect::<Vec<_>>();
+        for task in spawned {
+            task.await;
+        }
+    }
+
+    /// Scoped lets you start a number of tasks and waits
+    /// for all of them to complete before returning.
+    pub async fn scoped_priority<'scope, F>(&self, priority: Priority, scheduler: F)
+    where
+        F: FnOnce(&mut Scope<'scope>),
+    {
+        let mut scope = Scope::new(self.clone(), priority);
+        (scheduler)(&mut scope);
+        let spawned = mem::take(&mut scope.futures)
+            .into_iter()
+            .map(|f| self.spawn_with_priority(scope.priority, f))
             .collect::<Vec<_>>();
         for task in spawned {
             task.await;
@@ -494,6 +668,19 @@ impl ForegroundExecutor {
     /// Enqueues the given Task to run on the main thread at some point in the future.
     #[track_caller]
     pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
+    where
+        R: 'static,
+    {
+        self.spawn_with_priority(Priority::default(), future)
+    }
+
+    /// Enqueues the given Task to run on the main thread at some point in the future.
+    #[track_caller]
+    pub fn spawn_with_priority<R>(
+        &self,
+        priority: Priority,
+        future: impl Future<Output = R> + 'static,
+    ) -> Task<R>
     where
         R: 'static,
     {
@@ -505,16 +692,19 @@ impl ForegroundExecutor {
             dispatcher: Arc<dyn PlatformDispatcher>,
             future: AnyLocalFuture<R>,
             location: &'static core::panic::Location<'static>,
+            priority: Priority,
         ) -> Task<R> {
             let (runnable, task) = spawn_local_with_source_location(
                 future,
-                move |runnable| dispatcher.dispatch_on_main_thread(RunnableVariant::Meta(runnable)),
+                move |runnable| {
+                    dispatcher.dispatch_on_main_thread(RunnableVariant::Meta(runnable), priority)
+                },
                 RunnableMeta { location },
             );
             runnable.schedule();
             Task(TaskState::Spawned(task))
         }
-        inner::<R>(dispatcher, Box::pin(future), location)
+        inner::<R>(dispatcher, Box::pin(future), location, priority)
     }
 }
 
@@ -590,6 +780,7 @@ where
 /// Scope manages a set of tasks that are enqueued and waited on together. See [`BackgroundExecutor::scoped`].
 pub struct Scope<'a> {
     executor: BackgroundExecutor,
+    priority: Priority,
     futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
     tx: Option<mpsc::Sender<()>>,
     rx: mpsc::Receiver<()>,
@@ -597,10 +788,11 @@ pub struct Scope<'a> {
 }
 
 impl<'a> Scope<'a> {
-    fn new(executor: BackgroundExecutor) -> Self {
+    fn new(executor: BackgroundExecutor, priority: Priority) -> Self {
         let (tx, rx) = mpsc::channel(1);
         Self {
             executor,
+            priority,
             tx: Some(tx),
             rx,
             futures: Default::default(),

crates/gpui/src/geometry.rs 🔗

@@ -1416,9 +1416,9 @@ where
     /// ```
     pub fn contains(&self, point: &Point<T>) -> bool {
         point.x >= self.origin.x
-            && point.x <= self.origin.x.clone() + self.size.width.clone()
+            && point.x < self.origin.x.clone() + self.size.width.clone()
             && point.y >= self.origin.y
-            && point.y <= self.origin.y.clone() + self.size.height.clone()
+            && point.y < self.origin.y.clone() + self.size.height.clone()
     }
 
     /// Checks if this bounds is completely contained within another bounds.

crates/gpui/src/gpui.rs 🔗

@@ -31,6 +31,8 @@ mod path_builder;
 mod platform;
 pub mod prelude;
 mod profiler;
+#[cfg(any(target_os = "windows", target_os = "linux"))]
+mod queue;
 mod scene;
 mod shared_string;
 mod shared_uri;
@@ -89,16 +91,20 @@ pub use keymap::*;
 pub use path_builder::*;
 pub use platform::*;
 pub use profiler::*;
+#[cfg(any(target_os = "windows", target_os = "linux"))]
+pub(crate) use queue::{PriorityQueueReceiver, PriorityQueueSender};
 pub use refineable::*;
 pub use scene::*;
 pub use shared_string::*;
 pub use shared_uri::*;
 pub use smol::Timer;
+use std::{any::Any, future::Future};
 pub use style::*;
 pub use styled::*;
 pub use subscription::*;
 pub use svg_renderer::*;
 pub(crate) use tab_stop::*;
+use taffy::TaffyLayoutEngine;
 pub use taffy::{AvailableSpace, LayoutId};
 #[cfg(any(test, feature = "test-support"))]
 pub use test::*;
@@ -109,9 +115,6 @@ pub use util::{FutureExt, Timeout, arc_cow::ArcCow};
 pub use view::*;
 pub use window::*;
 
-use std::{any::Any, future::Future};
-use taffy::TaffyLayoutEngine;
-
 /// The context trait, allows the different contexts in GPUI to be used
 /// interchangeably for certain operations.
 pub trait AppContext {

crates/gpui/src/platform.rs 🔗

@@ -39,9 +39,10 @@ use crate::{
     Action, AnyWindowHandle, App, AsyncWindowContext, BackgroundExecutor, Bounds,
     DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun,
     ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput,
-    Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph,
-    ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, TaskLabel, TaskTiming,
-    ThreadTaskTimings, Window, WindowControlArea, hash, point, px, size,
+    Point, Priority, RealtimePriority, RenderGlyphParams, RenderImage, RenderImageParams,
+    RenderSvgParams, Scene, ShapedGlyph, ShapedRun, SharedString, Size, SvgRenderer,
+    SystemWindowTab, Task, TaskLabel, TaskTiming, ThreadTaskTimings, Window, WindowControlArea,
+    hash, point, px, size,
 };
 use anyhow::Result;
 use async_task::Runnable;
@@ -289,6 +290,13 @@ pub trait PlatformDisplay: Send + Sync + Debug {
     /// Get the bounds for this display
     fn bounds(&self) -> Bounds<Pixels>;
 
+    /// Get the visible bounds for this display, excluding taskbar/dock areas.
+    /// This is the usable area where windows can be placed without being obscured.
+    /// Defaults to the full display bounds if not overridden.
+    fn visible_bounds(&self) -> Bounds<Pixels> {
+        self.bounds()
+    }
+
     /// Get the default bounds for this display to place a window
     fn default_bounds(&self) -> Bounds<Pixels> {
         let bounds = self.bounds();
@@ -580,9 +588,10 @@ pub trait PlatformDispatcher: Send + Sync {
     fn get_all_timings(&self) -> Vec<ThreadTaskTimings>;
     fn get_current_thread_timings(&self) -> Vec<TaskTiming>;
     fn is_main_thread(&self) -> bool;
-    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>);
-    fn dispatch_on_main_thread(&self, runnable: RunnableVariant);
+    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>, priority: Priority);
+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority);
     fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant);
+    fn spawn_realtime(&self, priority: RealtimePriority, f: Box<dyn FnOnce() + Send>);
 
     fn now(&self) -> Instant {
         Instant::now()

crates/gpui/src/platform/linux/dispatcher.rs 🔗

@@ -1,9 +1,10 @@
 use crate::{
-    GLOBAL_THREAD_TIMINGS, PlatformDispatcher, RunnableVariant, THREAD_TIMINGS, TaskLabel,
-    TaskTiming, ThreadTaskTimings,
+    GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, PriorityQueueReceiver,
+    PriorityQueueSender, RealtimePriority, RunnableVariant, THREAD_TIMINGS, TaskLabel, TaskTiming,
+    ThreadTaskTimings, profiler,
 };
 use calloop::{
-    EventLoop,
+    EventLoop, PostAction,
     channel::{self, Sender},
     timer::TimeoutAction,
 };
@@ -19,9 +20,9 @@ struct TimerAfter {
 }
 
 pub(crate) struct LinuxDispatcher {
-    main_sender: Sender<RunnableVariant>,
+    main_sender: PriorityQueueCalloopSender<RunnableVariant>,
     timer_sender: Sender<TimerAfter>,
-    background_sender: flume::Sender<RunnableVariant>,
+    background_sender: PriorityQueueSender<RunnableVariant>,
     _background_threads: Vec<thread::JoinHandle<()>>,
     main_thread_id: thread::ThreadId,
 }
@@ -29,18 +30,20 @@ pub(crate) struct LinuxDispatcher {
 const MIN_THREADS: usize = 2;
 
 impl LinuxDispatcher {
-    pub fn new(main_sender: Sender<RunnableVariant>) -> Self {
-        let (background_sender, background_receiver) = flume::unbounded::<RunnableVariant>();
+    pub fn new(main_sender: PriorityQueueCalloopSender<RunnableVariant>) -> Self {
+        let (background_sender, background_receiver) = PriorityQueueReceiver::new();
         let thread_count =
             std::thread::available_parallelism().map_or(MIN_THREADS, |i| i.get().max(MIN_THREADS));
 
+        // These thread should really be lower prio then the foreground
+        // executor
         let mut background_threads = (0..thread_count)
             .map(|i| {
-                let receiver = background_receiver.clone();
+                let mut receiver = background_receiver.clone();
                 std::thread::Builder::new()
                     .name(format!("Worker-{i}"))
                     .spawn(move || {
-                        for runnable in receiver {
+                        for runnable in receiver.iter() {
                             let start = Instant::now();
 
                             let mut location = match runnable {
@@ -51,7 +54,7 @@ impl LinuxDispatcher {
                                         start,
                                         end: None,
                                     };
-                                    Self::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -63,7 +66,7 @@ impl LinuxDispatcher {
                                         start,
                                         end: None,
                                     };
-                                    Self::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -72,7 +75,7 @@ impl LinuxDispatcher {
 
                             let end = Instant::now();
                             location.end = Some(end);
-                            Self::add_task_timing(location);
+                            profiler::add_task_timing(location);
 
                             log::trace!(
                                 "background thread {}: ran runnable. took: {:?}",
@@ -113,7 +116,7 @@ impl LinuxDispatcher {
                                                         start,
                                                         end: None,
                                                     };
-                                                    Self::add_task_timing(timing);
+                                                    profiler::add_task_timing(timing);
 
                                                     runnable.run();
                                                     timing
@@ -124,7 +127,7 @@ impl LinuxDispatcher {
                                                         start,
                                                         end: None,
                                                     };
-                                                    Self::add_task_timing(timing);
+                                                    profiler::add_task_timing(timing);
 
                                                     runnable.run();
                                                     timing
@@ -133,7 +136,7 @@ impl LinuxDispatcher {
                                             let end = Instant::now();
 
                                             timing.end = Some(end);
-                                            Self::add_task_timing(timing);
+                                            profiler::add_task_timing(timing);
                                         }
                                         TimeoutAction::Drop
                                     },
@@ -157,22 +160,6 @@ impl LinuxDispatcher {
             main_thread_id: thread::current().id(),
         }
     }
-
-    pub(crate) fn add_task_timing(timing: TaskTiming) {
-        THREAD_TIMINGS.with(|timings| {
-            let mut timings = timings.lock();
-            let timings = &mut timings.timings;
-
-            if let Some(last_timing) = timings.iter_mut().rev().next() {
-                if last_timing.location == timing.location {
-                    last_timing.end = timing.end;
-                    return;
-                }
-            }
-
-            timings.push_back(timing);
-        });
-    }
 }
 
 impl PlatformDispatcher for LinuxDispatcher {
@@ -199,22 +186,26 @@ impl PlatformDispatcher for LinuxDispatcher {
         thread::current().id() == self.main_thread_id
     }
 
-    fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>) {
-        self.background_sender.send(runnable).unwrap();
+    fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>, priority: Priority) {
+        self.background_sender
+            .send(priority, runnable)
+            .unwrap_or_else(|_| panic!("blocking sender returned without value"));
     }
 
-    fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
-        self.main_sender.send(runnable).unwrap_or_else(|runnable| {
-            // NOTE: Runnable may wrap a Future that is !Send.
-            //
-            // This is usually safe because we only poll it on the main thread.
-            // However if the send fails, we know that:
-            // 1. main_receiver has been dropped (which implies the app is shutting down)
-            // 2. we are on a background thread.
-            // It is not safe to drop something !Send on the wrong thread, and
-            // the app will exit soon anyway, so we must forget the runnable.
-            std::mem::forget(runnable);
-        });
+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority) {
+        self.main_sender
+            .send(priority, runnable)
+            .unwrap_or_else(|runnable| {
+                // NOTE: Runnable may wrap a Future that is !Send.
+                //
+                // This is usually safe because we only poll it on the main thread.
+                // However if the send fails, we know that:
+                // 1. main_receiver has been dropped (which implies the app is shutting down)
+                // 2. we are on a background thread.
+                // It is not safe to drop something !Send on the wrong thread, and
+                // the app will exit soon anyway, so we must forget the runnable.
+                std::mem::forget(runnable);
+            });
     }
 
     fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
@@ -222,4 +213,252 @@ impl PlatformDispatcher for LinuxDispatcher {
             .send(TimerAfter { duration, runnable })
             .ok();
     }
+
+    fn spawn_realtime(&self, priority: RealtimePriority, f: Box<dyn FnOnce() + Send>) {
+        std::thread::spawn(move || {
+            // SAFETY: always safe to call
+            let thread_id = unsafe { libc::pthread_self() };
+
+            let policy = match priority {
+                RealtimePriority::Audio => libc::SCHED_FIFO,
+                RealtimePriority::Other => libc::SCHED_RR,
+            };
+            let sched_priority = match priority {
+                RealtimePriority::Audio => 65,
+                RealtimePriority::Other => 45,
+            };
+
+            let sched_param = libc::sched_param { sched_priority };
+            // SAFETY: sched_param is a valid initialized structure
+            let result = unsafe { libc::pthread_setschedparam(thread_id, policy, &sched_param) };
+            if result != 0 {
+                log::warn!("failed to set realtime thread priority to {:?}", priority);
+            }
+
+            f();
+        });
+    }
+}
+
+pub struct PriorityQueueCalloopSender<T> {
+    sender: PriorityQueueSender<T>,
+    ping: calloop::ping::Ping,
+}
+
+impl<T> PriorityQueueCalloopSender<T> {
+    fn new(tx: PriorityQueueSender<T>, ping: calloop::ping::Ping) -> Self {
+        Self { sender: tx, ping }
+    }
+
+    fn send(&self, priority: Priority, item: T) -> Result<(), crate::queue::SendError<T>> {
+        let res = self.sender.send(priority, item);
+        if res.is_ok() {
+            self.ping.ping();
+        }
+        res
+    }
+}
+
+impl<T> Drop for PriorityQueueCalloopSender<T> {
+    fn drop(&mut self) {
+        self.ping.ping();
+    }
+}
+
+pub struct PriorityQueueCalloopReceiver<T> {
+    receiver: PriorityQueueReceiver<T>,
+    source: calloop::ping::PingSource,
+    ping: calloop::ping::Ping,
+}
+
+impl<T> PriorityQueueCalloopReceiver<T> {
+    pub fn new() -> (PriorityQueueCalloopSender<T>, Self) {
+        let (ping, source) = calloop::ping::make_ping().expect("Failed to create a Ping.");
+
+        let (tx, rx) = PriorityQueueReceiver::new();
+
+        (
+            PriorityQueueCalloopSender::new(tx, ping.clone()),
+            Self {
+                receiver: rx,
+                source,
+                ping,
+            },
+        )
+    }
 }
+
+use calloop::channel::Event;
+
+#[derive(Debug)]
+pub struct ChannelError(calloop::ping::PingError);
+
+impl std::fmt::Display for ChannelError {
+    #[cfg_attr(feature = "nightly_coverage", coverage(off))]
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(&self.0, f)
+    }
+}
+
+impl std::error::Error for ChannelError {
+    #[cfg_attr(feature = "nightly_coverage", coverage(off))]
+    fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+        Some(&self.0)
+    }
+}
+
+impl<T> calloop::EventSource for PriorityQueueCalloopReceiver<T> {
+    type Event = Event<T>;
+    type Metadata = ();
+    type Ret = ();
+    type Error = ChannelError;
+
+    fn process_events<F>(
+        &mut self,
+        readiness: calloop::Readiness,
+        token: calloop::Token,
+        mut callback: F,
+    ) -> Result<calloop::PostAction, Self::Error>
+    where
+        F: FnMut(Self::Event, &mut Self::Metadata) -> Self::Ret,
+    {
+        let mut clear_readiness = false;
+        let mut disconnected = false;
+
+        let action = self
+            .source
+            .process_events(readiness, token, |(), &mut ()| {
+                let mut is_empty = true;
+
+                let mut receiver = self.receiver.clone();
+                for runnable in receiver.try_iter() {
+                    match runnable {
+                        Ok(r) => {
+                            callback(Event::Msg(r), &mut ());
+                            is_empty = false;
+                        }
+                        Err(_) => {
+                            disconnected = true;
+                        }
+                    }
+                }
+
+                if disconnected {
+                    callback(Event::Closed, &mut ());
+                }
+
+                if is_empty {
+                    clear_readiness = true;
+                }
+            })
+            .map_err(ChannelError)?;
+
+        if disconnected {
+            Ok(PostAction::Remove)
+        } else if clear_readiness {
+            Ok(action)
+        } else {
+            // Re-notify the ping source so we can try again.
+            self.ping.ping();
+            Ok(PostAction::Continue)
+        }
+    }
+
+    fn register(
+        &mut self,
+        poll: &mut calloop::Poll,
+        token_factory: &mut calloop::TokenFactory,
+    ) -> calloop::Result<()> {
+        self.source.register(poll, token_factory)
+    }
+
+    fn reregister(
+        &mut self,
+        poll: &mut calloop::Poll,
+        token_factory: &mut calloop::TokenFactory,
+    ) -> calloop::Result<()> {
+        self.source.reregister(poll, token_factory)
+    }
+
+    fn unregister(&mut self, poll: &mut calloop::Poll) -> calloop::Result<()> {
+        self.source.unregister(poll)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn calloop_works() {
+        let mut event_loop = calloop::EventLoop::try_new().unwrap();
+        let handle = event_loop.handle();
+
+        let (tx, rx) = PriorityQueueCalloopReceiver::new();
+
+        struct Data {
+            got_msg: bool,
+            got_closed: bool,
+        }
+
+        let mut data = Data {
+            got_msg: false,
+            got_closed: false,
+        };
+
+        let _channel_token = handle
+            .insert_source(rx, move |evt, &mut (), data: &mut Data| match evt {
+                Event::Msg(()) => {
+                    data.got_msg = true;
+                }
+
+                Event::Closed => {
+                    data.got_closed = true;
+                }
+            })
+            .unwrap();
+
+        // nothing is sent, nothing is received
+        event_loop
+            .dispatch(Some(::std::time::Duration::ZERO), &mut data)
+            .unwrap();
+
+        assert!(!data.got_msg);
+        assert!(!data.got_closed);
+        // a message is send
+
+        tx.send(Priority::Medium, ()).unwrap();
+        event_loop
+            .dispatch(Some(::std::time::Duration::ZERO), &mut data)
+            .unwrap();
+
+        assert!(data.got_msg);
+        assert!(!data.got_closed);
+
+        // the sender is dropped
+        drop(tx);
+        event_loop
+            .dispatch(Some(::std::time::Duration::ZERO), &mut data)
+            .unwrap();
+
+        assert!(data.got_msg);
+        assert!(data.got_closed);
+    }
+}
+
+// running 1 test
+// test platform::linux::dispatcher::tests::tomato ... FAILED
+
+// failures:
+
+// ---- platform::linux::dispatcher::tests::tomato stdout ----
+// [crates/gpui/src/platform/linux/dispatcher.rs:262:9]
+// returning 1 tasks to process
+// [crates/gpui/src/platform/linux/dispatcher.rs:480:75] evt = Msg(
+//     (),
+// )
+// returning 0 tasks to process
+
+// thread 'platform::linux::dispatcher::tests::tomato' (478301) panicked at crates/gpui/src/platform/linux/dispatcher.rs:515:9:
+// assertion failed: data.got_closed
+// note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace

crates/gpui/src/platform/linux/platform.rs 🔗

@@ -14,7 +14,7 @@ use std::{
 };
 
 use anyhow::{Context as _, anyhow};
-use calloop::{LoopSignal, channel::Channel};
+use calloop::LoopSignal;
 use futures::channel::oneshot;
 use util::ResultExt as _;
 use util::command::{new_smol_command, new_std_command};
@@ -25,8 +25,8 @@ use crate::{
     Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId,
     ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions,
     Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper,
-    PlatformTextSystem, PlatformWindow, Point, Result, RunnableVariant, Task, WindowAppearance,
-    WindowParams, px,
+    PlatformTextSystem, PlatformWindow, Point, PriorityQueueCalloopReceiver, Result,
+    RunnableVariant, Task, WindowAppearance, WindowParams, px,
 };
 
 #[cfg(any(feature = "wayland", feature = "x11"))]
@@ -149,8 +149,8 @@ pub(crate) struct LinuxCommon {
 }
 
 impl LinuxCommon {
-    pub fn new(signal: LoopSignal) -> (Self, Channel<RunnableVariant>) {
-        let (main_sender, main_receiver) = calloop::channel::channel::<RunnableVariant>();
+    pub fn new(signal: LoopSignal) -> (Self, PriorityQueueCalloopReceiver<RunnableVariant>) {
+        let (main_sender, main_receiver) = PriorityQueueCalloopReceiver::new();
 
         #[cfg(any(feature = "wayland", feature = "x11"))]
         let text_system = Arc::new(crate::CosmicTextSystem::new());

crates/gpui/src/platform/linux/wayland/client.rs 🔗

@@ -77,10 +77,10 @@ use crate::{
     LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent,
     MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay,
     PlatformInput, PlatformKeyboardLayout, Point, ResultExt as _, SCROLL_LINES, ScrollDelta,
-    ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
+    ScrollWheelEvent, Size, TouchPhase, WindowParams, point, profiler, px, size,
 };
 use crate::{
-    LinuxDispatcher, RunnableVariant, TaskTiming,
+    RunnableVariant, TaskTiming,
     platform::{PlatformWindow, blade::BladeContext},
 };
 use crate::{
@@ -503,7 +503,7 @@ impl WaylandClient {
                                         start,
                                         end: None,
                                     };
-                                    LinuxDispatcher::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -515,7 +515,7 @@ impl WaylandClient {
                                         start,
                                         end: None,
                                     };
-                                    LinuxDispatcher::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -524,7 +524,7 @@ impl WaylandClient {
 
                             let end = Instant::now();
                             timing.end = Some(end);
-                            LinuxDispatcher::add_task_timing(timing);
+                            profiler::add_task_timing(timing);
                         });
                     }
                 }

crates/gpui/src/platform/linux/x11/client.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{Capslock, LinuxDispatcher, ResultExt as _, RunnableVariant, TaskTiming, xcb_flush};
+use crate::{Capslock, ResultExt as _, RunnableVariant, TaskTiming, profiler, xcb_flush};
 use anyhow::{Context as _, anyhow};
 use ashpd::WindowIdentifier;
 use calloop::{
@@ -322,7 +322,7 @@ impl X11Client {
                                         start,
                                         end: None,
                                     };
-                                    LinuxDispatcher::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -334,7 +334,7 @@ impl X11Client {
                                         start,
                                         end: None,
                                     };
-                                    LinuxDispatcher::add_task_timing(timing);
+                                    profiler::add_task_timing(timing);
 
                                     runnable.run();
                                     timing
@@ -343,7 +343,7 @@ impl X11Client {
 
                             let end = Instant::now();
                             timing.end = Some(end);
-                            LinuxDispatcher::add_task_timing(timing);
+                            profiler::add_task_timing(timing);
                         });
                     }
                 }

crates/gpui/src/platform/mac/dispatcher.rs 🔗

@@ -3,11 +3,22 @@
 #![allow(non_snake_case)]
 
 use crate::{
-    GLOBAL_THREAD_TIMINGS, PlatformDispatcher, RunnableMeta, RunnableVariant, THREAD_TIMINGS,
-    TaskLabel, TaskTiming, ThreadTaskTimings,
+    GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RealtimePriority, RunnableMeta,
+    RunnableVariant, THREAD_TIMINGS, TaskLabel, TaskTiming, ThreadTaskTimings,
 };
 
+use anyhow::Context;
 use async_task::Runnable;
+use mach2::{
+    kern_return::KERN_SUCCESS,
+    mach_time::mach_timebase_info_data_t,
+    thread_policy::{
+        THREAD_EXTENDED_POLICY, THREAD_EXTENDED_POLICY_COUNT, THREAD_PRECEDENCE_POLICY,
+        THREAD_PRECEDENCE_POLICY_COUNT, THREAD_TIME_CONSTRAINT_POLICY,
+        THREAD_TIME_CONSTRAINT_POLICY_COUNT, thread_extended_policy_data_t,
+        thread_precedence_policy_data_t, thread_time_constraint_policy_data_t,
+    },
+};
 use objc::{
     class, msg_send,
     runtime::{BOOL, YES},
@@ -15,9 +26,11 @@ use objc::{
 };
 use std::{
     ffi::c_void,
+    mem::MaybeUninit,
     ptr::{NonNull, addr_of},
     time::{Duration, Instant},
 };
+use util::ResultExt;
 
 /// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent
 /// these pub items from leaking into public API.
@@ -56,7 +69,7 @@ impl PlatformDispatcher for MacDispatcher {
         is_main_thread == YES
     }
 
-    fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>) {
+    fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>, priority: Priority) {
         let (context, trampoline) = match runnable {
             RunnableVariant::Meta(runnable) => (
                 runnable.into_raw().as_ptr() as *mut c_void,
@@ -67,16 +80,24 @@ impl PlatformDispatcher for MacDispatcher {
                 Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)),
             ),
         };
+
+        let queue_priority = match priority {
+            Priority::Realtime(_) => unreachable!(),
+            Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize,
+            Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize,
+            Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize,
+        };
+
         unsafe {
             dispatch_async_f(
-                dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0),
+                dispatch_get_global_queue(queue_priority, 0),
                 context,
                 trampoline,
             );
         }
     }
 
-    fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) {
         let (context, trampoline) = match runnable {
             RunnableVariant::Meta(runnable) => (
                 runnable.into_raw().as_ptr() as *mut c_void,
@@ -110,6 +131,120 @@ impl PlatformDispatcher for MacDispatcher {
             dispatch_after_f(when, queue, context, trampoline);
         }
     }
+
+    fn spawn_realtime(&self, priority: RealtimePriority, f: Box<dyn FnOnce() + Send>) {
+        std::thread::spawn(move || {
+            match priority {
+                RealtimePriority::Audio => set_audio_thread_priority(),
+                RealtimePriority::Other => set_high_thread_priority(),
+            }
+            .context(format!("for priority {:?}", priority))
+            .log_err();
+
+            f();
+        });
+    }
+}
+
+fn set_high_thread_priority() -> anyhow::Result<()> {
+    // SAFETY: always safe to call
+    let thread_id = unsafe { libc::pthread_self() };
+
+    // SAFETY: all sched_param members are valid when initialized to zero.
+    let mut sched_param = unsafe { MaybeUninit::<libc::sched_param>::zeroed().assume_init() };
+    sched_param.sched_priority = 45;
+
+    let result = unsafe { libc::pthread_setschedparam(thread_id, libc::SCHED_FIFO, &sched_param) };
+    if result != 0 {
+        anyhow::bail!("failed to set realtime thread priority")
+    }
+
+    Ok(())
+}
+
+fn set_audio_thread_priority() -> anyhow::Result<()> {
+    // https://chromium.googlesource.com/chromium/chromium/+/master/base/threading/platform_thread_mac.mm#93
+
+    // SAFETY: always safe to call
+    let thread_id = unsafe { libc::pthread_self() };
+
+    // SAFETY: thread_id is a valid thread id
+    let thread_id = unsafe { libc::pthread_mach_thread_np(thread_id) };
+
+    // Fixed priority thread
+    let mut policy = thread_extended_policy_data_t { timeshare: 0 };
+
+    // SAFETY: thread_id is a valid thread id
+    // SAFETY: thread_extended_policy_data_t is passed as THREAD_EXTENDED_POLICY
+    let result = unsafe {
+        mach2::thread_policy::thread_policy_set(
+            thread_id,
+            THREAD_EXTENDED_POLICY,
+            &mut policy as *mut _ as *mut _,
+            THREAD_EXTENDED_POLICY_COUNT,
+        )
+    };
+
+    if result != KERN_SUCCESS {
+        anyhow::bail!("failed to set thread extended policy");
+    }
+
+    // relatively high priority
+    let mut precedence = thread_precedence_policy_data_t { importance: 63 };
+
+    // SAFETY: thread_id is a valid thread id
+    // SAFETY: thread_precedence_policy_data_t is passed as THREAD_PRECEDENCE_POLICY
+    let result = unsafe {
+        mach2::thread_policy::thread_policy_set(
+            thread_id,
+            THREAD_PRECEDENCE_POLICY,
+            &mut precedence as *mut _ as *mut _,
+            THREAD_PRECEDENCE_POLICY_COUNT,
+        )
+    };
+
+    if result != KERN_SUCCESS {
+        anyhow::bail!("failed to set thread precedence policy");
+    }
+
+    const GUARANTEED_AUDIO_DUTY_CYCLE: f32 = 0.75;
+    const MAX_AUDIO_DUTY_CYCLE: f32 = 0.85;
+
+    // ~128 frames @ 44.1KHz
+    const TIME_QUANTUM: f32 = 2.9;
+
+    const AUDIO_TIME_NEEDED: f32 = GUARANTEED_AUDIO_DUTY_CYCLE * TIME_QUANTUM;
+    const MAX_TIME_ALLOWED: f32 = MAX_AUDIO_DUTY_CYCLE * TIME_QUANTUM;
+
+    let mut timebase_info = mach_timebase_info_data_t { numer: 0, denom: 0 };
+    // SAFETY: timebase_info is a valid pointer to a mach_timebase_info_data_t struct
+    unsafe { mach2::mach_time::mach_timebase_info(&mut timebase_info) };
+
+    let ms_to_abs_time = ((timebase_info.denom as f32) / (timebase_info.numer as f32)) * 1000000f32;
+
+    let mut time_constraints = thread_time_constraint_policy_data_t {
+        period: (TIME_QUANTUM * ms_to_abs_time) as u32,
+        computation: (AUDIO_TIME_NEEDED * ms_to_abs_time) as u32,
+        constraint: (MAX_TIME_ALLOWED * ms_to_abs_time) as u32,
+        preemptible: 0,
+    };
+
+    // SAFETY: thread_id is a valid thread id
+    // SAFETY: thread_precedence_pthread_time_constraint_policy_data_t is passed as THREAD_TIME_CONSTRAINT_POLICY
+    let result = unsafe {
+        mach2::thread_policy::thread_policy_set(
+            thread_id,
+            THREAD_TIME_CONSTRAINT_POLICY,
+            &mut time_constraints as *mut _ as *mut _,
+            THREAD_TIME_CONSTRAINT_POLICY_COUNT,
+        )
+    };
+
+    if result != KERN_SUCCESS {
+        anyhow::bail!("failed to set thread time constraint policy");
+    }
+
+    Ok(())
 }
 
 extern "C" fn trampoline(runnable: *mut c_void) {

crates/gpui/src/platform/mac/display.rs 🔗

@@ -1,9 +1,9 @@
-use crate::{Bounds, DisplayId, Pixels, PlatformDisplay, px, size};
+use crate::{Bounds, DisplayId, Pixels, PlatformDisplay, point, px, size};
 use anyhow::Result;
 use cocoa::{
     appkit::NSScreen,
     base::{id, nil},
-    foundation::{NSDictionary, NSString},
+    foundation::{NSArray, NSDictionary, NSString},
 };
 use core_foundation::uuid::{CFUUIDGetUUIDBytes, CFUUIDRef};
 use core_graphics::display::{CGDirectDisplayID, CGDisplayBounds, CGGetActiveDisplayList};
@@ -114,4 +114,53 @@ impl PlatformDisplay for MacDisplay {
             }
         }
     }
+
+    fn visible_bounds(&self) -> Bounds<Pixels> {
+        unsafe {
+            let dominated_screen = self.get_nsscreen();
+
+            if dominated_screen == nil {
+                return self.bounds();
+            }
+
+            let screen_frame = NSScreen::frame(dominated_screen);
+            let visible_frame = NSScreen::visibleFrame(dominated_screen);
+
+            // Convert from bottom-left origin (AppKit) to top-left origin
+            let origin_y =
+                screen_frame.size.height - visible_frame.origin.y - visible_frame.size.height
+                    + screen_frame.origin.y;
+
+            Bounds {
+                origin: point(
+                    px(visible_frame.origin.x as f32 - screen_frame.origin.x as f32),
+                    px(origin_y as f32),
+                ),
+                size: size(
+                    px(visible_frame.size.width as f32),
+                    px(visible_frame.size.height as f32),
+                ),
+            }
+        }
+    }
+}
+
+impl MacDisplay {
+    /// Find the NSScreen corresponding to this display
+    unsafe fn get_nsscreen(&self) -> id {
+        let screens = unsafe { NSScreen::screens(nil) };
+        let count = unsafe { NSArray::count(screens) };
+        let screen_number_key: id = unsafe { NSString::alloc(nil).init_str("NSScreenNumber") };
+
+        for i in 0..count {
+            let screen = unsafe { NSArray::objectAtIndex(screens, i) };
+            let device_description = unsafe { NSScreen::deviceDescription(screen) };
+            let screen_number = unsafe { device_description.objectForKey_(screen_number_key) };
+            let screen_id: CGDirectDisplayID = msg_send![screen_number, unsignedIntegerValue];
+            if screen_id == self.0 {
+                return screen;
+            }
+        }
+        nil
+    }
 }

crates/gpui/src/platform/test/dispatcher.rs 🔗

@@ -1,4 +1,4 @@
-use crate::{PlatformDispatcher, RunnableVariant, TaskLabel};
+use crate::{PlatformDispatcher, Priority, RunnableVariant, TaskLabel};
 use backtrace::Backtrace;
 use collections::{HashMap, HashSet, VecDeque};
 use parking::Unparker;
@@ -284,7 +284,7 @@ impl PlatformDispatcher for TestDispatcher {
         state.start_time + state.time
     }
 
-    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>) {
+    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>, _priority: Priority) {
         {
             let mut state = self.state.lock();
             if label.is_some_and(|label| state.deprioritized_task_labels.contains(&label)) {
@@ -296,7 +296,7 @@ impl PlatformDispatcher for TestDispatcher {
         self.unpark_all();
     }
 
-    fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) {
         self.state
             .lock()
             .foreground
@@ -318,4 +318,10 @@ impl PlatformDispatcher for TestDispatcher {
     fn as_test(&self) -> Option<&TestDispatcher> {
         Some(self)
     }
+
+    fn spawn_realtime(&self, _priority: crate::RealtimePriority, f: Box<dyn FnOnce() + Send>) {
+        std::thread::spawn(move || {
+            f();
+        });
+    }
 }

crates/gpui/src/platform/windows/dispatcher.rs 🔗

@@ -4,24 +4,31 @@ use std::{
     time::{Duration, Instant},
 };
 
-use flume::Sender;
+use anyhow::Context;
 use util::ResultExt;
 use windows::{
-    System::Threading::{ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler},
+    System::Threading::{
+        ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority,
+    },
     Win32::{
         Foundation::{LPARAM, WPARAM},
+        System::Threading::{
+            GetCurrentThread, HIGH_PRIORITY_CLASS, SetPriorityClass, SetThreadPriority,
+            THREAD_PRIORITY_HIGHEST, THREAD_PRIORITY_TIME_CRITICAL,
+        },
         UI::WindowsAndMessaging::PostMessageW,
     },
 };
 
 use crate::{
-    GLOBAL_THREAD_TIMINGS, HWND, PlatformDispatcher, RunnableVariant, SafeHwnd, THREAD_TIMINGS,
-    TaskLabel, TaskTiming, ThreadTaskTimings, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
+    GLOBAL_THREAD_TIMINGS, HWND, PlatformDispatcher, Priority, PriorityQueueSender,
+    RealtimePriority, RunnableVariant, SafeHwnd, THREAD_TIMINGS, TaskLabel, TaskTiming,
+    ThreadTaskTimings, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, profiler,
 };
 
 pub(crate) struct WindowsDispatcher {
     pub(crate) wake_posted: AtomicBool,
-    main_sender: Sender<RunnableVariant>,
+    main_sender: PriorityQueueSender<RunnableVariant>,
     main_thread_id: ThreadId,
     pub(crate) platform_window_handle: SafeHwnd,
     validation_number: usize,
@@ -29,7 +36,7 @@ pub(crate) struct WindowsDispatcher {
 
 impl WindowsDispatcher {
     pub(crate) fn new(
-        main_sender: Sender<RunnableVariant>,
+        main_sender: PriorityQueueSender<RunnableVariant>,
         platform_window_handle: HWND,
         validation_number: usize,
     ) -> Self {
@@ -45,7 +52,7 @@ impl WindowsDispatcher {
         }
     }
 
-    fn dispatch_on_threadpool(&self, runnable: RunnableVariant) {
+    fn dispatch_on_threadpool(&self, priority: WorkItemPriority, runnable: RunnableVariant) {
         let handler = {
             let mut task_wrapper = Some(runnable);
             WorkItemHandler::new(move |_| {
@@ -53,7 +60,8 @@ impl WindowsDispatcher {
                 Ok(())
             })
         };
-        ThreadPool::RunAsync(&handler).log_err();
+
+        ThreadPool::RunWithPriorityAsync(&handler, priority).log_err();
     }
 
     fn dispatch_on_threadpool_after(&self, runnable: RunnableVariant, duration: Duration) {
@@ -79,7 +87,7 @@ impl WindowsDispatcher {
                     start,
                     end: None,
                 };
-                Self::add_task_timing(timing);
+                profiler::add_task_timing(timing);
 
                 runnable.run();
 
@@ -91,7 +99,7 @@ impl WindowsDispatcher {
                     start,
                     end: None,
                 };
-                Self::add_task_timing(timing);
+                profiler::add_task_timing(timing);
 
                 runnable.run();
 
@@ -102,23 +110,7 @@ impl WindowsDispatcher {
         let end = Instant::now();
         timing.end = Some(end);
 
-        Self::add_task_timing(timing);
-    }
-
-    pub(crate) fn add_task_timing(timing: TaskTiming) {
-        THREAD_TIMINGS.with(|timings| {
-            let mut timings = timings.lock();
-            let timings = &mut timings.timings;
-
-            if let Some(last_timing) = timings.iter_mut().rev().next() {
-                if last_timing.location == timing.location {
-                    last_timing.end = timing.end;
-                    return;
-                }
-            }
-
-            timings.push_back(timing);
-        });
+        profiler::add_task_timing(timing);
     }
 }
 
@@ -146,15 +138,22 @@ impl PlatformDispatcher for WindowsDispatcher {
         current().id() == self.main_thread_id
     }
 
-    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>) {
-        self.dispatch_on_threadpool(runnable);
+    fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>, priority: Priority) {
+        let priority = match priority {
+            Priority::Realtime(_) => unreachable!(),
+            Priority::High => WorkItemPriority::High,
+            Priority::Medium => WorkItemPriority::Normal,
+            Priority::Low => WorkItemPriority::Low,
+        };
+        self.dispatch_on_threadpool(priority, runnable);
+
         if let Some(label) = label {
             log::debug!("TaskLabel: {label:?}");
         }
     }
 
-    fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
-        match self.main_sender.send(runnable) {
+    fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority) {
+        match self.main_sender.send(priority, runnable) {
             Ok(_) => {
                 if !self.wake_posted.swap(true, Ordering::AcqRel) {
                     unsafe {
@@ -185,4 +184,28 @@ impl PlatformDispatcher for WindowsDispatcher {
     fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
         self.dispatch_on_threadpool_after(runnable, duration);
     }
+
+    fn spawn_realtime(&self, priority: RealtimePriority, f: Box<dyn FnOnce() + Send>) {
+        std::thread::spawn(move || {
+            // SAFETY: always safe to call
+            let thread_handle = unsafe { GetCurrentThread() };
+
+            let thread_priority = match priority {
+                RealtimePriority::Audio => THREAD_PRIORITY_TIME_CRITICAL,
+                RealtimePriority::Other => THREAD_PRIORITY_HIGHEST,
+            };
+
+            // SAFETY: thread_handle is a valid handle to a thread
+            unsafe { SetPriorityClass(thread_handle, HIGH_PRIORITY_CLASS) }
+                .context("thread priority class")
+                .log_err();
+
+            // SAFETY: thread_handle is a valid handle to a thread
+            unsafe { SetThreadPriority(thread_handle, thread_priority) }
+                .context("thread priority")
+                .log_err();
+
+            f();
+        });
+    }
 }

crates/gpui/src/platform/windows/display.rs 🔗

@@ -23,6 +23,7 @@ pub(crate) struct WindowsDisplay {
     pub display_id: DisplayId,
     scale_factor: f32,
     bounds: Bounds<Pixels>,
+    visible_bounds: Bounds<Pixels>,
     physical_bounds: Bounds<DevicePixels>,
     uuid: Uuid,
 }
@@ -36,6 +37,7 @@ impl WindowsDisplay {
         let screen = available_monitors().into_iter().nth(display_id.0 as _)?;
         let info = get_monitor_info(screen).log_err()?;
         let monitor_size = info.monitorInfo.rcMonitor;
+        let work_area = info.monitorInfo.rcWork;
         let uuid = generate_uuid(&info.szDevice);
         let scale_factor = get_scale_factor_for_monitor(screen).log_err()?;
         let physical_size = size(
@@ -55,6 +57,14 @@ impl WindowsDisplay {
                 ),
                 size: physical_size.to_pixels(scale_factor),
             },
+            visible_bounds: Bounds {
+                origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor),
+                size: size(
+                    (work_area.right - work_area.left) as f32 / scale_factor,
+                    (work_area.bottom - work_area.top) as f32 / scale_factor,
+                )
+                .map(crate::px),
+            },
             physical_bounds: Bounds {
                 origin: point(monitor_size.left.into(), monitor_size.top.into()),
                 size: physical_size,
@@ -66,6 +76,7 @@ impl WindowsDisplay {
     pub fn new_with_handle(monitor: HMONITOR) -> anyhow::Result<Self> {
         let info = get_monitor_info(monitor)?;
         let monitor_size = info.monitorInfo.rcMonitor;
+        let work_area = info.monitorInfo.rcWork;
         let uuid = generate_uuid(&info.szDevice);
         let display_id = available_monitors()
             .iter()
@@ -89,6 +100,14 @@ impl WindowsDisplay {
                 ),
                 size: physical_size.to_pixels(scale_factor),
             },
+            visible_bounds: Bounds {
+                origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor),
+                size: size(
+                    (work_area.right - work_area.left) as f32 / scale_factor,
+                    (work_area.bottom - work_area.top) as f32 / scale_factor,
+                )
+                .map(crate::px),
+            },
             physical_bounds: Bounds {
                 origin: point(monitor_size.left.into(), monitor_size.top.into()),
                 size: physical_size,
@@ -100,6 +119,7 @@ impl WindowsDisplay {
     fn new_with_handle_and_id(handle: HMONITOR, display_id: DisplayId) -> anyhow::Result<Self> {
         let info = get_monitor_info(handle)?;
         let monitor_size = info.monitorInfo.rcMonitor;
+        let work_area = info.monitorInfo.rcWork;
         let uuid = generate_uuid(&info.szDevice);
         let scale_factor = get_scale_factor_for_monitor(handle)?;
         let physical_size = size(
@@ -119,6 +139,14 @@ impl WindowsDisplay {
                 ),
                 size: physical_size.to_pixels(scale_factor),
             },
+            visible_bounds: Bounds {
+                origin: logical_point(work_area.left as f32, work_area.top as f32, scale_factor),
+                size: size(
+                    (work_area.right - work_area.left) as f32 / scale_factor,
+                    (work_area.bottom - work_area.top) as f32 / scale_factor,
+                )
+                .map(crate::px),
+            },
             physical_bounds: Bounds {
                 origin: point(monitor_size.left.into(), monitor_size.top.into()),
                 size: physical_size,
@@ -193,6 +221,10 @@ impl PlatformDisplay for WindowsDisplay {
     fn bounds(&self) -> Bounds<Pixels> {
         self.bounds
     }
+
+    fn visible_bounds(&self) -> Bounds<Pixels> {
+        self.visible_bounds
+    }
 }
 
 fn available_monitors() -> SmallVec<[HMONITOR; 4]> {

crates/gpui/src/platform/windows/events.rs 🔗

@@ -243,7 +243,8 @@ impl WindowsWindowInner {
 
     fn handle_timer_msg(&self, handle: HWND, wparam: WPARAM) -> Option<isize> {
         if wparam.0 == SIZE_MOVE_LOOP_TIMER_ID {
-            for runnable in self.main_receiver.drain() {
+            let mut runnables = self.main_receiver.clone().try_iter();
+            while let Some(Ok(runnable)) = runnables.next() {
                 WindowsDispatcher::execute_runnable(runnable);
             }
             self.handle_paint_msg(handle)

crates/gpui/src/platform/windows/platform.rs 🔗

@@ -51,7 +51,7 @@ struct WindowsPlatformInner {
     raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
     // The below members will never change throughout the entire lifecycle of the app.
     validation_number: usize,
-    main_receiver: flume::Receiver<RunnableVariant>,
+    main_receiver: PriorityQueueReceiver<RunnableVariant>,
     dispatcher: Arc<WindowsDispatcher>,
 }
 
@@ -98,7 +98,7 @@ impl WindowsPlatform {
             OleInitialize(None).context("unable to initialize Windows OLE")?;
         }
         let directx_devices = DirectXDevices::new().context("Creating DirectX devices")?;
-        let (main_sender, main_receiver) = flume::unbounded::<RunnableVariant>();
+        let (main_sender, main_receiver) = PriorityQueueReceiver::new();
         let validation_number = if usize::BITS == 64 {
             rand::random::<u64>() as usize
         } else {
@@ -857,22 +857,24 @@ impl WindowsPlatformInner {
                     }
                     break 'tasks;
                 }
-                match self.main_receiver.try_recv() {
-                    Err(_) => break 'timeout_loop,
-                    Ok(runnable) => WindowsDispatcher::execute_runnable(runnable),
+                let mut main_receiver = self.main_receiver.clone();
+                match main_receiver.try_pop() {
+                    Ok(Some(runnable)) => WindowsDispatcher::execute_runnable(runnable),
+                    _ => break 'timeout_loop,
                 }
             }
 
             // Someone could enqueue a Runnable here. The flag is still true, so they will not PostMessage.
             // We need to check for those Runnables after we clear the flag.
             self.dispatcher.wake_posted.store(false, Ordering::Release);
-            match self.main_receiver.try_recv() {
-                Err(_) => break 'tasks,
-                Ok(runnable) => {
+            let mut main_receiver = self.main_receiver.clone();
+            match main_receiver.try_pop() {
+                Ok(Some(runnable)) => {
                     self.dispatcher.wake_posted.store(true, Ordering::Release);
 
                     WindowsDispatcher::execute_runnable(runnable);
                 }
+                _ => break 'tasks,
             }
         }
 
@@ -934,7 +936,7 @@ pub(crate) struct WindowCreationInfo {
     pub(crate) windows_version: WindowsVersion,
     pub(crate) drop_target_helper: IDropTargetHelper,
     pub(crate) validation_number: usize,
-    pub(crate) main_receiver: flume::Receiver<RunnableVariant>,
+    pub(crate) main_receiver: PriorityQueueReceiver<RunnableVariant>,
     pub(crate) platform_window_handle: HWND,
     pub(crate) disable_direct_composition: bool,
     pub(crate) directx_devices: DirectXDevices,
@@ -947,8 +949,8 @@ struct PlatformWindowCreateContext {
     inner: Option<Result<Rc<WindowsPlatformInner>>>,
     raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
     validation_number: usize,
-    main_sender: Option<flume::Sender<RunnableVariant>>,
-    main_receiver: Option<flume::Receiver<RunnableVariant>>,
+    main_sender: Option<PriorityQueueSender<RunnableVariant>>,
+    main_receiver: Option<PriorityQueueReceiver<RunnableVariant>>,
     directx_devices: Option<DirectXDevices>,
     dispatcher: Option<Arc<WindowsDispatcher>>,
 }

crates/gpui/src/platform/windows/window.rs 🔗

@@ -81,7 +81,7 @@ pub(crate) struct WindowsWindowInner {
     pub(crate) executor: ForegroundExecutor,
     pub(crate) windows_version: WindowsVersion,
     pub(crate) validation_number: usize,
-    pub(crate) main_receiver: flume::Receiver<RunnableVariant>,
+    pub(crate) main_receiver: PriorityQueueReceiver<RunnableVariant>,
     pub(crate) platform_window_handle: HWND,
 }
 
@@ -362,7 +362,7 @@ struct WindowCreateContext {
     windows_version: WindowsVersion,
     drop_target_helper: IDropTargetHelper,
     validation_number: usize,
-    main_receiver: flume::Receiver<RunnableVariant>,
+    main_receiver: PriorityQueueReceiver<RunnableVariant>,
     platform_window_handle: HWND,
     appearance: WindowAppearance,
     disable_direct_composition: bool,

crates/gpui/src/profiler.rs 🔗

@@ -216,3 +216,19 @@ impl Drop for ThreadTimings {
         thread_timings.swap_remove(index);
     }
 }
+
+pub(crate) fn add_task_timing(timing: TaskTiming) {
+    THREAD_TIMINGS.with(|timings| {
+        let mut timings = timings.lock();
+        let timings = &mut timings.timings;
+
+        if let Some(last_timing) = timings.iter_mut().rev().next() {
+            if last_timing.location == timing.location {
+                last_timing.end = timing.end;
+                return;
+            }
+        }
+
+        timings.push_back(timing);
+    });
+}

crates/gpui/src/queue.rs 🔗

@@ -0,0 +1,329 @@
+use std::{
+    fmt,
+    iter::FusedIterator,
+    sync::{Arc, atomic::AtomicUsize},
+};
+
+use rand::{Rng, SeedableRng, rngs::SmallRng};
+
+use crate::Priority;
+
+struct PriorityQueues<T> {
+    high_priority: Vec<T>,
+    medium_priority: Vec<T>,
+    low_priority: Vec<T>,
+}
+
+impl<T> PriorityQueues<T> {
+    fn is_empty(&self) -> bool {
+        self.high_priority.is_empty()
+            && self.medium_priority.is_empty()
+            && self.low_priority.is_empty()
+    }
+}
+
+struct PriorityQueueState<T> {
+    queues: parking_lot::Mutex<PriorityQueues<T>>,
+    condvar: parking_lot::Condvar,
+    receiver_count: AtomicUsize,
+    sender_count: AtomicUsize,
+}
+
+impl<T> PriorityQueueState<T> {
+    fn send(&self, priority: Priority, item: T) -> Result<(), SendError<T>> {
+        if self
+            .receiver_count
+            .load(std::sync::atomic::Ordering::Relaxed)
+            == 0
+        {
+            return Err(SendError(item));
+        }
+
+        let mut queues = self.queues.lock();
+        match priority {
+            Priority::Realtime(_) => unreachable!(),
+            Priority::High => queues.high_priority.push(item),
+            Priority::Medium => queues.medium_priority.push(item),
+            Priority::Low => queues.low_priority.push(item),
+        };
+        self.condvar.notify_one();
+        Ok(())
+    }
+
+    fn recv<'a>(&'a self) -> Result<parking_lot::MutexGuard<'a, PriorityQueues<T>>, RecvError> {
+        let mut queues = self.queues.lock();
+
+        let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed);
+        if queues.is_empty() && sender_count == 0 {
+            return Err(crate::queue::RecvError);
+        }
+
+        // parking_lot doesn't do spurious wakeups so an if is fine
+        if queues.is_empty() {
+            self.condvar.wait(&mut queues);
+        }
+
+        Ok(queues)
+    }
+
+    fn try_recv<'a>(
+        &'a self,
+    ) -> Result<Option<parking_lot::MutexGuard<'a, PriorityQueues<T>>>, RecvError> {
+        let mut queues = self.queues.lock();
+
+        let sender_count = self.sender_count.load(std::sync::atomic::Ordering::Relaxed);
+        if queues.is_empty() && sender_count == 0 {
+            return Err(crate::queue::RecvError);
+        }
+
+        if queues.is_empty() {
+            Ok(None)
+        } else {
+            Ok(Some(queues))
+        }
+    }
+}
+
+pub(crate) struct PriorityQueueSender<T> {
+    state: Arc<PriorityQueueState<T>>,
+}
+
+impl<T> PriorityQueueSender<T> {
+    fn new(state: Arc<PriorityQueueState<T>>) -> Self {
+        Self { state }
+    }
+
+    pub(crate) fn send(&self, priority: Priority, item: T) -> Result<(), SendError<T>> {
+        self.state.send(priority, item)?;
+        Ok(())
+    }
+}
+
+impl<T> Drop for PriorityQueueSender<T> {
+    fn drop(&mut self) {
+        self.state
+            .sender_count
+            .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
+    }
+}
+
+pub(crate) struct PriorityQueueReceiver<T> {
+    state: Arc<PriorityQueueState<T>>,
+    rand: SmallRng,
+    disconnected: bool,
+}
+
+impl<T> Clone for PriorityQueueReceiver<T> {
+    fn clone(&self) -> Self {
+        self.state
+            .receiver_count
+            .fetch_add(1, std::sync::atomic::Ordering::AcqRel);
+        Self {
+            state: Arc::clone(&self.state),
+            rand: SmallRng::seed_from_u64(0),
+            disconnected: self.disconnected,
+        }
+    }
+}
+
+pub(crate) struct SendError<T>(T);
+
+impl<T: fmt::Debug> fmt::Debug for SendError<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("SendError").field(&self.0).finish()
+    }
+}
+
+#[derive(Debug)]
+pub(crate) struct RecvError;
+
+#[allow(dead_code)]
+impl<T> PriorityQueueReceiver<T> {
+    pub(crate) fn new() -> (PriorityQueueSender<T>, Self) {
+        let state = PriorityQueueState {
+            queues: parking_lot::Mutex::new(PriorityQueues {
+                high_priority: Vec::new(),
+                medium_priority: Vec::new(),
+                low_priority: Vec::new(),
+            }),
+            condvar: parking_lot::Condvar::new(),
+            receiver_count: AtomicUsize::new(1),
+            sender_count: AtomicUsize::new(1),
+        };
+        let state = Arc::new(state);
+
+        let sender = PriorityQueueSender::new(Arc::clone(&state));
+
+        let receiver = PriorityQueueReceiver {
+            state,
+            rand: SmallRng::seed_from_u64(0),
+            disconnected: false,
+        };
+
+        (sender, receiver)
+    }
+
+    /// Tries to pop one element from the priority queue without blocking.
+    ///
+    /// This will early return if there are no elements in the queue.
+    ///
+    /// This method is best suited if you only intend to pop one element, for better performance
+    /// on large queues see [`Self::try_iter`]
+    ///
+    /// # Errors
+    ///
+    /// If the sender was dropped
+    pub(crate) fn try_pop(&mut self) -> Result<Option<T>, RecvError> {
+        self.pop_inner(false)
+    }
+
+    /// Pops an element from the priority queue blocking if necessary.
+    ///
+    /// This method is best suited if you only intend to pop one element, for better performance
+    /// on large queues see [`Self::iter``]
+    ///
+    /// # Errors
+    ///
+    /// If the sender was dropped
+    pub(crate) fn pop(&mut self) -> Result<T, RecvError> {
+        self.pop_inner(true).map(|e| e.unwrap())
+    }
+
+    /// Returns an iterator over the elements of the queue
+    /// this iterator will end when all elements have been consumed and will not wait for new ones.
+    pub(crate) fn try_iter(self) -> TryIter<T> {
+        TryIter {
+            receiver: self,
+            ended: false,
+        }
+    }
+
+    /// Returns an iterator over the elements of the queue
+    /// this iterator will wait for new elements if the queue is empty.
+    pub(crate) fn iter(self) -> Iter<T> {
+        Iter(self)
+    }
+
+    #[inline(always)]
+    // algorithm is the loaded die from biased coin from
+    // https://www.keithschwarz.com/darts-dice-coins/
+    fn pop_inner(&mut self, block: bool) -> Result<Option<T>, RecvError> {
+        use Priority as P;
+
+        let mut queues = if !block {
+            let Some(queues) = self.state.try_recv()? else {
+                return Ok(None);
+            };
+            queues
+        } else {
+            self.state.recv()?
+        };
+
+        let high = P::High.probability() * !queues.high_priority.is_empty() as u32;
+        let medium = P::Medium.probability() * !queues.medium_priority.is_empty() as u32;
+        let low = P::Low.probability() * !queues.low_priority.is_empty() as u32;
+        let mut mass = high + medium + low; //%
+
+        if !queues.high_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::High.probability(), mass);
+            if flip {
+                return Ok(queues.high_priority.pop());
+            }
+            mass -= P::High.probability();
+        }
+
+        if !queues.medium_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::Medium.probability(), mass);
+            if flip {
+                return Ok(queues.medium_priority.pop());
+            }
+            mass -= P::Medium.probability();
+        }
+
+        if !queues.low_priority.is_empty() {
+            let flip = self.rand.random_ratio(P::Low.probability(), mass);
+            if flip {
+                return Ok(queues.low_priority.pop());
+            }
+        }
+
+        Ok(None)
+    }
+}
+
+impl<T> Drop for PriorityQueueReceiver<T> {
+    fn drop(&mut self) {
+        self.state
+            .receiver_count
+            .fetch_sub(1, std::sync::atomic::Ordering::AcqRel);
+    }
+}
+
+/// If None is returned the sender disconnected
+pub(crate) struct Iter<T>(PriorityQueueReceiver<T>);
+impl<T> Iterator for Iter<T> {
+    type Item = T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.0.pop_inner(true).ok().flatten()
+    }
+}
+impl<T> FusedIterator for Iter<T> {}
+
+/// If None is returned there are no more elements in the queue
+pub(crate) struct TryIter<T> {
+    receiver: PriorityQueueReceiver<T>,
+    ended: bool,
+}
+impl<T> Iterator for TryIter<T> {
+    type Item = Result<T, RecvError>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if self.ended {
+            return None;
+        }
+
+        let res = self.receiver.pop_inner(false);
+        self.ended = res.is_err();
+
+        res.transpose()
+    }
+}
+impl<T> FusedIterator for TryIter<T> {}
+
+#[cfg(test)]
+mod tests {
+    use collections::HashSet;
+
+    use super::*;
+
+    #[test]
+    fn all_tasks_get_yielded() {
+        let (tx, mut rx) = PriorityQueueReceiver::new();
+        tx.send(Priority::Medium, 20).unwrap();
+        tx.send(Priority::High, 30).unwrap();
+        tx.send(Priority::Low, 10).unwrap();
+        tx.send(Priority::Medium, 21).unwrap();
+        tx.send(Priority::High, 31).unwrap();
+
+        drop(tx);
+
+        assert_eq!(
+            rx.iter().collect::<HashSet<_>>(),
+            [30, 31, 20, 21, 10].into_iter().collect::<HashSet<_>>()
+        )
+    }
+
+    #[test]
+    fn new_high_prio_task_get_scheduled_quickly() {
+        let (tx, mut rx) = PriorityQueueReceiver::new();
+        for _ in 0..100 {
+            tx.send(Priority::Low, 1).unwrap();
+        }
+
+        assert_eq!(rx.pop().unwrap(), 1);
+        tx.send(Priority::High, 3).unwrap();
+        assert_eq!(rx.pop().unwrap(), 3);
+        assert_eq!(rx.pop().unwrap(), 1);
+    }
+}

crates/gpui/src/styled.rs 🔗

@@ -1,8 +1,9 @@
 use crate::{
     self as gpui, AbsoluteLength, AlignContent, AlignItems, BorderStyle, CursorStyle,
-    DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontStyle, FontWeight,
-    GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle, StyleRefinement,
-    TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px, relative, rems,
+    DefiniteLength, Display, Fill, FlexDirection, FlexWrap, Font, FontFeatures, FontStyle,
+    FontWeight, GridPlacement, Hsla, JustifyContent, Length, SharedString, StrikethroughStyle,
+    StyleRefinement, TextAlign, TextOverflow, TextStyleRefinement, UnderlineStyle, WhiteSpace, px,
+    relative, rems,
 };
 pub use gpui_macros::{
     border_style_methods, box_shadow_style_methods, cursor_style_methods, margin_style_methods,
@@ -630,6 +631,14 @@ pub trait Styled: Sized {
         self
     }
 
+    /// Sets the font features of this element and its children.
+    fn font_features(mut self, features: FontFeatures) -> Self {
+        self.text_style()
+            .get_or_insert_with(Default::default)
+            .font_features = Some(features);
+        self
+    }
+
     /// Sets the font of this element and its children.
     fn font(mut self, font: Font) -> Self {
         let Font {

crates/gpui/src/window.rs 🔗

@@ -9,14 +9,15 @@ use crate::{
     KeyBinding, KeyContext, KeyDownEvent, KeyEvent, Keystroke, KeystrokeEvent, LayoutId,
     LineLayoutIndex, Modifiers, ModifiersChangedEvent, MonochromeSprite, MouseButton, MouseEvent,
     MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput,
-    PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, PromptButton, PromptLevel, Quad,
-    Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Replay, ResizeEdge,
-    SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, ScaledPixels, Scene, Shadow,
-    SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, SystemWindowTab,
-    SystemWindowTabController, TabStopMap, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement,
-    TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance,
-    WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem,
-    point, prelude::*, px, rems, size, transparent_black,
+    PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, Priority, PromptButton,
+    PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams,
+    Replay, ResizeEdge, SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y,
+    ScaledPixels, Scene, Shadow, SharedString, Size, StrikethroughStyle, Style, SubscriberSet,
+    Subscription, SystemWindowTab, SystemWindowTabController, TabStopMap, TaffyLayoutEngine, Task,
+    TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle,
+    WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations,
+    WindowOptions, WindowParams, WindowTextSystem, point, prelude::*, px, rems, size,
+    transparent_black,
 };
 use anyhow::{Context as _, Result, anyhow};
 use collections::{FxHashMap, FxHashSet};
@@ -918,86 +919,69 @@ pub(crate) struct ElementStateBox {
     pub(crate) type_name: &'static str,
 }
 
-fn default_bounds(display_id: Option<DisplayId>, cx: &mut App) -> Bounds<Pixels> {
-    #[cfg(target_os = "macos")]
-    {
-        const CASCADE_OFFSET: f32 = 25.0;
-
-        let display = display_id
-            .map(|id| cx.find_display(id))
-            .unwrap_or_else(|| cx.primary_display());
-
-        let display_bounds = display
-            .as_ref()
-            .map(|d| d.bounds())
-            .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE));
-
-        // TODO, BUG: if you open a window with the currently active window
-        // on the stack, this will erroneously select the 'unwrap_or_else'
-        // code path
-        let (base_origin, base_size) = cx
-            .active_window()
-            .and_then(|w| {
-                w.update(cx, |_, window, _| {
-                    let bounds = window.bounds();
-                    (bounds.origin, bounds.size)
-                })
-                .ok()
-            })
-            .unwrap_or_else(|| {
-                let default_bounds = display
-                    .as_ref()
-                    .map(|d| d.default_bounds())
-                    .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE));
-                (default_bounds.origin, default_bounds.size)
-            });
-
-        let cascade_offset = point(px(CASCADE_OFFSET), px(CASCADE_OFFSET));
-        let proposed_origin = base_origin + cascade_offset;
-        let proposed_bounds = Bounds::new(proposed_origin, base_size);
-
-        let display_right = display_bounds.origin.x + display_bounds.size.width;
-        let display_bottom = display_bounds.origin.y + display_bounds.size.height;
-        let window_right = proposed_bounds.origin.x + proposed_bounds.size.width;
-        let window_bottom = proposed_bounds.origin.y + proposed_bounds.size.height;
-
-        let fits_horizontally = window_right <= display_right;
-        let fits_vertically = window_bottom <= display_bottom;
-
-        let final_origin = match (fits_horizontally, fits_vertically) {
-            (true, true) => proposed_origin,
-            (false, true) => point(display_bounds.origin.x, base_origin.y),
-            (true, false) => point(base_origin.x, display_bounds.origin.y),
-            (false, false) => display_bounds.origin,
-        };
-
-        Bounds::new(final_origin, base_size)
-    }
-
-    #[cfg(not(target_os = "macos"))]
-    {
-        const DEFAULT_WINDOW_OFFSET: Point<Pixels> = point(px(0.), px(35.));
-
-        // TODO, BUG: if you open a window with the currently active window
-        // on the stack, this will erroneously select the 'unwrap_or_else'
-        // code path
-        cx.active_window()
-            .and_then(|w| w.update(cx, |_, window, _| window.bounds()).ok())
-            .map(|mut bounds| {
-                bounds.origin += DEFAULT_WINDOW_OFFSET;
-                bounds
-            })
-            .unwrap_or_else(|| {
-                let display = display_id
-                    .map(|id| cx.find_display(id))
-                    .unwrap_or_else(|| cx.primary_display());
-
-                display
-                    .as_ref()
-                    .map(|display| display.default_bounds())
-                    .unwrap_or_else(|| Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE))
-            })
-    }
+fn default_bounds(display_id: Option<DisplayId>, cx: &mut App) -> WindowBounds {
+    // TODO, BUG: if you open a window with the currently active window
+    // on the stack, this will erroneously fallback to `None`
+    //
+    // TODO these should be the initial window bounds not considering maximized/fullscreen
+    let active_window_bounds = cx
+        .active_window()
+        .and_then(|w| w.update(cx, |_, window, _| window.window_bounds()).ok());
+
+    const CASCADE_OFFSET: f32 = 25.0;
+
+    let display = display_id
+        .map(|id| cx.find_display(id))
+        .unwrap_or_else(|| cx.primary_display());
+
+    let default_placement = || Bounds::new(point(px(0.), px(0.)), DEFAULT_WINDOW_SIZE);
+
+    // Use visible_bounds to exclude taskbar/dock areas
+    let display_bounds = display
+        .as_ref()
+        .map(|d| d.visible_bounds())
+        .unwrap_or_else(default_placement);
+
+    let (
+        Bounds {
+            origin: base_origin,
+            size: base_size,
+        },
+        window_bounds_ctor,
+    ): (_, fn(Bounds<Pixels>) -> WindowBounds) = match active_window_bounds {
+        Some(bounds) => match bounds {
+            WindowBounds::Windowed(bounds) => (bounds, WindowBounds::Windowed),
+            WindowBounds::Maximized(bounds) => (bounds, WindowBounds::Maximized),
+            WindowBounds::Fullscreen(bounds) => (bounds, WindowBounds::Fullscreen),
+        },
+        None => (
+            display
+                .as_ref()
+                .map(|d| d.default_bounds())
+                .unwrap_or_else(default_placement),
+            WindowBounds::Windowed,
+        ),
+    };
+
+    let cascade_offset = point(px(CASCADE_OFFSET), px(CASCADE_OFFSET));
+    let proposed_origin = base_origin + cascade_offset;
+    let proposed_bounds = Bounds::new(proposed_origin, base_size);
+
+    let display_right = display_bounds.origin.x + display_bounds.size.width;
+    let display_bottom = display_bounds.origin.y + display_bounds.size.height;
+    let window_right = proposed_bounds.origin.x + proposed_bounds.size.width;
+    let window_bottom = proposed_bounds.origin.y + proposed_bounds.size.height;
+
+    let fits_horizontally = window_right <= display_right;
+    let fits_vertically = window_bottom <= display_bottom;
+
+    let final_origin = match (fits_horizontally, fits_vertically) {
+        (true, true) => proposed_origin,
+        (false, true) => point(display_bounds.origin.x, base_origin.y),
+        (true, false) => point(base_origin.x, display_bounds.origin.y),
+        (false, false) => display_bounds.origin,
+    };
+    window_bounds_ctor(Bounds::new(final_origin, base_size))
 }
 
 impl Window {
@@ -1024,13 +1008,11 @@ impl Window {
             tabbing_identifier,
         } = options;
 
-        let bounds = window_bounds
-            .map(|bounds| bounds.get_bounds())
-            .unwrap_or_else(|| default_bounds(display_id, cx));
+        let window_bounds = window_bounds.unwrap_or_else(|| default_bounds(display_id, cx));
         let mut platform_window = cx.platform.open_window(
             handle,
             WindowParams {
-                bounds,
+                bounds: window_bounds.get_bounds(),
                 titlebar,
                 kind,
                 is_movable,
@@ -1071,12 +1053,10 @@ impl Window {
             .request_decorations(window_decorations.unwrap_or(WindowDecorations::Server));
         platform_window.set_background_appearance(window_background);
 
-        if let Some(ref window_open_state) = window_bounds {
-            match window_open_state {
-                WindowBounds::Fullscreen(_) => platform_window.toggle_fullscreen(),
-                WindowBounds::Maximized(_) => platform_window.zoom(),
-                WindowBounds::Windowed(_) => {}
-            }
+        match window_bounds {
+            WindowBounds::Fullscreen(_) => platform_window.toggle_fullscreen(),
+            WindowBounds::Maximized(_) => platform_window.zoom(),
+            WindowBounds::Windowed(_) => {}
         }
 
         platform_window.on_close(Box::new({
@@ -1518,7 +1498,8 @@ impl Window {
         style
     }
 
-    /// Check if the platform window is maximized
+    /// Check if the platform window is maximized.
+    ///
     /// On some platforms (namely Windows) this is different than the bounds being the size of the display
     pub fn is_maximized(&self) -> bool {
         self.platform_window.is_maximized()
@@ -1745,6 +1726,27 @@ impl Window {
         })
     }
 
+    /// Spawn the future returned by the given closure on the application thread
+    /// pool, with the given priority. The closure is provided a handle to the
+    /// current window and an `AsyncWindowContext` for use within your future.
+    #[track_caller]
+    pub fn spawn_with_priority<AsyncFn, R>(
+        &self,
+        priority: Priority,
+        cx: &App,
+        f: AsyncFn,
+    ) -> Task<R>
+    where
+        R: 'static,
+        AsyncFn: AsyncFnOnce(&mut AsyncWindowContext) -> R + 'static,
+    {
+        let handle = self.handle;
+        cx.spawn_with_priority(priority, async move |app| {
+            let mut async_window_cx = AsyncWindowContext::new_context(app.clone(), handle);
+            f(&mut async_window_cx).await
+        })
+    }
+
     fn bounds_changed(&mut self, cx: &mut App) {
         self.scale_factor = self.platform_window.scale_factor();
         self.viewport_size = self.platform_window.content_size();

crates/image_viewer/src/image_info.rs 🔗

@@ -77,9 +77,7 @@ impl Render for ImageInfo {
             .to_string(),
         );
 
-        div().child(
-            Button::new("image-metadata", components.join(" • ")).label_size(LabelSize::Small),
-        )
+        div().child(Label::new(components.join(" • ")).size(LabelSize::Small))
     }
 }
 

crates/language/src/buffer.rs 🔗

@@ -22,8 +22,8 @@ pub use crate::{
     proto,
 };
 use anyhow::{Context as _, Result};
+use clock::Lamport;
 pub use clock::ReplicaId;
-use clock::{Global, Lamport};
 use collections::{HashMap, HashSet};
 use fs::MTime;
 use futures::channel::oneshot;
@@ -33,7 +33,7 @@ use gpui::{
 };
 
 use lsp::{LanguageServerId, NumberOrString};
-use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
+use parking_lot::Mutex;
 use serde::{Deserialize, Serialize};
 use serde_json::Value;
 use settings::WorktreeId;
@@ -130,29 +130,37 @@ pub struct Buffer {
     has_unsaved_edits: Cell<(clock::Global, bool)>,
     change_bits: Vec<rc::Weak<Cell<bool>>>,
     _subscriptions: Vec<gpui::Subscription>,
-    tree_sitter_data: Arc<Mutex<TreeSitterData>>,
+    tree_sitter_data: Arc<TreeSitterData>,
 }
 
-#[derive(Debug, Clone)]
+#[derive(Debug)]
 pub struct TreeSitterData {
     chunks: RowChunks,
-    brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
+    brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
 }
 
 const MAX_ROWS_IN_A_CHUNK: u32 = 50;
 
 impl TreeSitterData {
-    fn clear(&mut self) {
-        self.brackets_by_chunks = vec![None; self.chunks.len()];
+    fn clear(&mut self, snapshot: text::BufferSnapshot) {
+        self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
+        self.brackets_by_chunks.get_mut().clear();
+        self.brackets_by_chunks
+            .get_mut()
+            .resize(self.chunks.len(), None);
     }
 
     fn new(snapshot: text::BufferSnapshot) -> Self {
         let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
         Self {
-            brackets_by_chunks: vec![None; chunks.len()],
+            brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
             chunks,
         }
     }
+
+    fn version(&self) -> &clock::Global {
+        self.chunks.version()
+    }
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -176,7 +184,7 @@ pub struct BufferSnapshot {
     remote_selections: TreeMap<ReplicaId, SelectionSet>,
     language: Option<Arc<Language>>,
     non_text_state_update_count: usize,
-    tree_sitter_data: Arc<Mutex<TreeSitterData>>,
+    tree_sitter_data: Arc<TreeSitterData>,
 }
 
 /// The kind and amount of indentation in a particular line. For now,
@@ -1062,7 +1070,7 @@ impl Buffer {
         let tree_sitter_data = TreeSitterData::new(snapshot);
         Self {
             saved_mtime,
-            tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
+            tree_sitter_data: Arc::new(tree_sitter_data),
             saved_version: buffer.version(),
             preview_version: buffer.version(),
             reload_task: None,
@@ -1119,7 +1127,7 @@ impl Buffer {
                 file: None,
                 diagnostics: Default::default(),
                 remote_selections: Default::default(),
-                tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
+                tree_sitter_data: Arc::new(tree_sitter_data),
                 language,
                 non_text_state_update_count: 0,
             }
@@ -1141,7 +1149,7 @@ impl Buffer {
         BufferSnapshot {
             text,
             syntax,
-            tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
+            tree_sitter_data: Arc::new(tree_sitter_data),
             file: None,
             diagnostics: Default::default(),
             remote_selections: Default::default(),
@@ -1170,7 +1178,7 @@ impl Buffer {
         BufferSnapshot {
             text,
             syntax,
-            tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
+            tree_sitter_data: Arc::new(tree_sitter_data),
             file: None,
             diagnostics: Default::default(),
             remote_selections: Default::default(),
@@ -1187,10 +1195,16 @@ impl Buffer {
         syntax_map.interpolate(&text);
         let syntax = syntax_map.snapshot();
 
+        let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
+            Arc::new(TreeSitterData::new(text.clone()))
+        } else {
+            self.tree_sitter_data.clone()
+        };
+
         BufferSnapshot {
             text,
             syntax,
-            tree_sitter_data: self.tree_sitter_data.clone(),
+            tree_sitter_data,
             file: self.file.clone(),
             remote_selections: self.remote_selections.clone(),
             diagnostics: self.diagnostics.clone(),
@@ -1624,6 +1638,16 @@ impl Buffer {
         self.sync_parse_timeout = timeout;
     }
 
+    fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
+        match Arc::get_mut(&mut self.tree_sitter_data) {
+            Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
+            None => {
+                let tree_sitter_data = TreeSitterData::new(snapshot);
+                self.tree_sitter_data = Arc::new(tree_sitter_data)
+            }
+        }
+    }
+
     /// Called after an edit to synchronize the buffer's main parse tree with
     /// the buffer's new underlying state.
     ///
@@ -1648,6 +1672,9 @@ impl Buffer {
     /// for the same buffer, we only initiate a new parse if we are not already
     /// parsing in the background.
     pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
+        if self.text.version() != *self.tree_sitter_data.version() {
+            self.invalidate_tree_sitter_data(self.text.snapshot());
+        }
         if self.reparse.is_some() {
             return;
         }
@@ -1749,7 +1776,9 @@ impl Buffer {
         self.syntax_map.lock().did_parse(syntax_snapshot);
         self.request_autoindent(cx);
         self.parse_status.0.send(ParseStatus::Idle).unwrap();
-        self.tree_sitter_data.lock().clear();
+        if self.text.version() != *self.tree_sitter_data.version() {
+            self.invalidate_tree_sitter_data(self.text.snapshot());
+        }
         cx.emit(BufferEvent::Reparsed);
         cx.notify();
     }
@@ -4281,155 +4310,123 @@ impl BufferSnapshot {
     pub fn fetch_bracket_ranges(
         &self,
         range: Range<usize>,
-        known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
+        known_chunks: Option<&HashSet<Range<BufferRow>>>,
     ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
-        let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
-
-        let known_chunks = match known_chunks {
-            Some((known_version, known_chunks)) => {
-                if !tree_sitter_data
-                    .chunks
-                    .version()
-                    .changed_since(known_version)
-                {
-                    known_chunks.clone()
-                } else {
-                    HashSet::default()
-                }
-            }
-            None => HashSet::default(),
-        };
-
-        let mut new_bracket_matches = HashMap::default();
         let mut all_bracket_matches = HashMap::default();
 
-        for chunk in tree_sitter_data
+        for chunk in self
+            .tree_sitter_data
             .chunks
             .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
         {
-            if known_chunks.contains(&chunk.row_range()) {
+            if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
                 continue;
             }
-            let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
+            let Some(chunk_range) = self.tree_sitter_data.chunks.chunk_range(chunk) else {
                 continue;
             };
-            let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
-
-            let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
-                Some(cached_brackets) => cached_brackets,
-                None => {
-                    let mut all_brackets = Vec::new();
-                    let mut opens = Vec::new();
-                    let mut color_pairs = Vec::new();
-
-                    let mut matches =
-                        self.syntax
-                            .matches(chunk_range.clone(), &self.text, |grammar| {
-                                grammar.brackets_config.as_ref().map(|c| &c.query)
-                            });
-                    let configs = matches
-                        .grammars()
-                        .iter()
-                        .map(|grammar| grammar.brackets_config.as_ref().unwrap())
-                        .collect::<Vec<_>>();
-
-                    while let Some(mat) = matches.peek() {
-                        let mut open = None;
-                        let mut close = None;
-                        let syntax_layer_depth = mat.depth;
-                        let config = configs[mat.grammar_index];
-                        let pattern = &config.patterns[mat.pattern_index];
-                        for capture in mat.captures {
-                            if capture.index == config.open_capture_ix {
-                                open = Some(capture.node.byte_range());
-                            } else if capture.index == config.close_capture_ix {
-                                close = Some(capture.node.byte_range());
-                            }
-                        }
+            let chunk_range = chunk_range.to_offset(&self);
 
-                        matches.advance();
+            if let Some(cached_brackets) =
+                &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
+            {
+                all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
+                continue;
+            }
 
-                        let Some((open_range, close_range)) = open.zip(close) else {
-                            continue;
-                        };
+            let mut all_brackets = Vec::new();
+            let mut opens = Vec::new();
+            let mut color_pairs = Vec::new();
 
-                        let bracket_range = open_range.start..=close_range.end;
-                        if !bracket_range.overlaps(&chunk_range) {
-                            continue;
-                        }
+            let mut matches = self
+                .syntax
+                .matches(chunk_range.clone(), &self.text, |grammar| {
+                    grammar.brackets_config.as_ref().map(|c| &c.query)
+                });
+            let configs = matches
+                .grammars()
+                .iter()
+                .map(|grammar| grammar.brackets_config.as_ref().unwrap())
+                .collect::<Vec<_>>();
+
+            while let Some(mat) = matches.peek() {
+                let mut open = None;
+                let mut close = None;
+                let syntax_layer_depth = mat.depth;
+                let config = configs[mat.grammar_index];
+                let pattern = &config.patterns[mat.pattern_index];
+                for capture in mat.captures {
+                    if capture.index == config.open_capture_ix {
+                        open = Some(capture.node.byte_range());
+                    } else if capture.index == config.close_capture_ix {
+                        close = Some(capture.node.byte_range());
+                    }
+                }
 
-                        let index = all_brackets.len();
-                        all_brackets.push(BracketMatch {
-                            open_range: open_range.clone(),
-                            close_range: close_range.clone(),
-                            newline_only: pattern.newline_only,
-                            syntax_layer_depth,
-                            color_index: None,
-                        });
+                matches.advance();
 
-                        // Certain languages have "brackets" that are not brackets, e.g. tags. and such
-                        // bracket will match the entire tag with all text inside.
-                        // For now, avoid highlighting any pair that has more than single char in each bracket.
-                        // We need to  colorize `<Element/>` bracket pairs, so cannot make this check stricter.
-                        let should_color = !pattern.rainbow_exclude
-                            && (open_range.len() == 1 || close_range.len() == 1);
-                        if should_color {
-                            opens.push(open_range.clone());
-                            color_pairs.push((open_range, close_range, index));
-                        }
-                    }
+                let Some((open_range, close_range)) = open.zip(close) else {
+                    continue;
+                };
 
-                    opens.sort_by_key(|r| (r.start, r.end));
-                    opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
-                    color_pairs.sort_by_key(|(_, close, _)| close.end);
+                let bracket_range = open_range.start..=close_range.end;
+                if !bracket_range.overlaps(&chunk_range) {
+                    continue;
+                }
 
-                    let mut open_stack = Vec::new();
-                    let mut open_index = 0;
-                    for (open, close, index) in color_pairs {
-                        while open_index < opens.len() && opens[open_index].start < close.start {
-                            open_stack.push(opens[open_index].clone());
-                            open_index += 1;
-                        }
+                let index = all_brackets.len();
+                all_brackets.push(BracketMatch {
+                    open_range: open_range.clone(),
+                    close_range: close_range.clone(),
+                    newline_only: pattern.newline_only,
+                    syntax_layer_depth,
+                    color_index: None,
+                });
 
-                        if open_stack.last() == Some(&open) {
-                            let depth_index = open_stack.len() - 1;
-                            all_brackets[index].color_index = Some(depth_index);
-                            open_stack.pop();
-                        }
-                    }
+                // Certain languages have "brackets" that are not brackets, e.g. tags. and such
+                // bracket will match the entire tag with all text inside.
+                // For now, avoid highlighting any pair that has more than single char in each bracket.
+                // We need to  colorize `<Element/>` bracket pairs, so cannot make this check stricter.
+                let should_color =
+                    !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
+                if should_color {
+                    opens.push(open_range.clone());
+                    color_pairs.push((open_range, close_range, index));
+                }
+            }
 
-                    all_brackets.sort_by_key(|bracket_match| {
-                        (bracket_match.open_range.start, bracket_match.open_range.end)
-                    });
-                    new_bracket_matches.insert(chunk.id, all_brackets.clone());
-                    all_brackets
+            opens.sort_by_key(|r| (r.start, r.end));
+            opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
+            color_pairs.sort_by_key(|(_, close, _)| close.end);
+
+            let mut open_stack = Vec::new();
+            let mut open_index = 0;
+            for (open, close, index) in color_pairs {
+                while open_index < opens.len() && opens[open_index].start < close.start {
+                    open_stack.push(opens[open_index].clone());
+                    open_index += 1;
                 }
-            };
-            all_bracket_matches.insert(chunk.row_range(), bracket_matches);
-        }
 
-        let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
-        if latest_tree_sitter_data.chunks.version() == &self.version {
-            for (chunk_id, new_matches) in new_bracket_matches {
-                let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
-                if old_chunks.is_none() {
-                    *old_chunks = Some(new_matches);
+                if open_stack.last() == Some(&open) {
+                    let depth_index = open_stack.len() - 1;
+                    all_brackets[index].color_index = Some(depth_index);
+                    open_stack.pop();
                 }
             }
-        }
 
-        all_bracket_matches
-    }
+            all_brackets.sort_by_key(|bracket_match| {
+                (bracket_match.open_range.start, bracket_match.open_range.end)
+            });
 
-    fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
-        let mut tree_sitter_data = self.tree_sitter_data.lock();
-        if self
-            .version
-            .changed_since(tree_sitter_data.chunks.version())
-        {
-            *tree_sitter_data = TreeSitterData::new(self.text.clone());
+            if let empty_slot @ None =
+                &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
+            {
+                *empty_slot = Some(all_brackets.clone());
+            }
+            all_bracket_matches.insert(chunk.row_range(), all_brackets);
         }
-        tree_sitter_data
+
+        all_bracket_matches
     }
 
     pub fn all_bracket_ranges(

crates/language/src/buffer/row_chunk.rs 🔗

@@ -19,7 +19,7 @@ use crate::BufferRow;
 /// <https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#inlayHintParams>
 #[derive(Clone)]
 pub struct RowChunks {
-    pub(crate) snapshot: text::BufferSnapshot,
+    snapshot: text::BufferSnapshot,
     chunks: Arc<[RowChunk]>,
 }
 

crates/language/src/language.rs 🔗

@@ -535,7 +535,7 @@ pub trait LspInstaller {
         _version: &Self::BinaryVersion,
         _container_dir: &PathBuf,
         _delegate: &dyn LspAdapterDelegate,
-    ) -> impl Future<Output = Option<LanguageServerBinary>> {
+    ) -> impl Send + Future<Output = Option<LanguageServerBinary>> {
         async { None }
     }
 
@@ -544,7 +544,7 @@ pub trait LspInstaller {
         latest_version: Self::BinaryVersion,
         container_dir: PathBuf,
         delegate: &dyn LspAdapterDelegate,
-    ) -> impl Future<Output = Result<LanguageServerBinary>>;
+    ) -> impl Send + Future<Output = Result<LanguageServerBinary>>;
 
     fn cached_server_binary(
         &self,
@@ -575,6 +575,7 @@ pub trait DynLspInstaller {
 #[async_trait(?Send)]
 impl<LI, BinaryVersion> DynLspInstaller for LI
 where
+    BinaryVersion: Send + Sync,
     LI: LspInstaller<BinaryVersion = BinaryVersion> + LspAdapter,
 {
     async fn try_fetch_server_binary(
@@ -593,8 +594,13 @@ where
             .fetch_latest_server_version(delegate.as_ref(), pre_release, cx)
             .await?;
 
-        if let Some(binary) = self
-            .check_if_version_installed(&latest_version, &container_dir, delegate.as_ref())
+        if let Some(binary) = cx
+            .background_executor()
+            .await_on_background(self.check_if_version_installed(
+                &latest_version,
+                &container_dir,
+                delegate.as_ref(),
+            ))
             .await
         {
             log::debug!("language server {:?} is already installed", name.0);
@@ -603,8 +609,13 @@ where
         } else {
             log::debug!("downloading language server {:?}", name.0);
             delegate.update_status(name.clone(), BinaryStatus::Downloading);
-            let binary = self
-                .fetch_server_binary(latest_version, container_dir, delegate.as_ref())
+            let binary = cx
+                .background_executor()
+                .await_on_background(self.fetch_server_binary(
+                    latest_version,
+                    container_dir,
+                    delegate.as_ref(),
+                ))
                 .await;
 
             delegate.update_status(name.clone(), BinaryStatus::None);

crates/language_models/src/provider/open_ai.rs 🔗

@@ -278,6 +278,7 @@ impl LanguageModel for OpenAiLanguageModel {
             | Model::FiveMini
             | Model::FiveNano
             | Model::FivePointOne
+            | Model::FivePointTwo
             | Model::O1
             | Model::O3
             | Model::O4Mini => true,
@@ -675,8 +676,11 @@ pub fn count_open_ai_tokens(
             | Model::O4Mini
             | Model::Five
             | Model::FiveMini
-            | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), // GPT-5.1 doesn't have tiktoken support yet; fall back on gpt-4o tokenizer
-            Model::FivePointOne => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
+            | Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
+            // GPT-5.1 and 5.2 don't have dedicated tiktoken support; use gpt-5 tokenizer
+            Model::FivePointOne | Model::FivePointTwo => {
+                tiktoken_rs::num_tokens_from_messages("gpt-5", &messages)
+            }
         }
         .map(|tokens| tokens as u64)
     })

crates/languages/src/javascript/highlights.scm 🔗

@@ -2,6 +2,40 @@
 
 (identifier) @variable
 
+(call_expression
+  function: (member_expression
+    object: (identifier) @type.builtin
+    (#any-of?
+      @type.builtin
+      "Promise"
+      "Array"
+      "Object"
+      "Map"
+      "Set"
+      "WeakMap"
+      "WeakSet"
+      "Date"
+      "Error"
+      "TypeError"
+      "RangeError"
+      "SyntaxError"
+      "ReferenceError"
+      "EvalError"
+      "URIError"
+      "RegExp"
+      "Function"
+      "Number"
+      "String"
+      "Boolean"
+      "Symbol"
+      "BigInt"
+      "Proxy"
+      "ArrayBuffer"
+      "DataView"
+    )
+  )
+)
+
 ; Properties
 
 (property_identifier) @property
@@ -18,6 +52,12 @@
   function: (member_expression
       property: [(property_identifier) (private_property_identifier)] @function.method))
 
+(new_expression
+  constructor: (identifier) @type)
+
+(nested_type_identifier
+  module: (identifier) @type)
+
 ; Function and method definitions
 
 (function_expression
@@ -47,10 +87,45 @@
   left: (identifier) @function
   right: [(function_expression) (arrow_function)])
 
+; Parameters
+
+(required_parameter
+  (identifier) @variable.parameter)
+
+(required_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(optional_parameter
+  (identifier) @variable.parameter)
+
+(optional_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(catch_clause
+  parameter: (identifier) @variable.parameter)
+
+(index_signature
+  name: (identifier) @variable.parameter)
+
+(arrow_function
+  parameter: (identifier) @variable.parameter)
+
 ; Special identifiers
+;
+(class_declaration
+  (type_identifier) @type.class)
+
+(extends_clause
+  value: (identifier) @type.class)
 
-((identifier) @type
- (#match? @type "^[A-Z]"))
 (type_identifier) @type
 (predefined_type) @type.builtin
 
@@ -251,6 +326,34 @@
 (jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
 (jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
 
+(jsx_opening_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+(jsx_closing_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+(jsx_self_closing_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+
 (jsx_attribute (property_identifier) @attribute.jsx)
 (jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx)
 (jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)

crates/languages/src/markdown/config.toml 🔗

@@ -24,5 +24,9 @@ rewrap_prefixes = [
 auto_indent_on_paste = false
 auto_indent_using_last_non_empty_line = false
 tab_size = 2
-decrease_indent_pattern = "^.*$"
+decrease_indent_patterns = [
+  { pattern = "^\\s*-",    valid_after = ["list_item"] },
+  { pattern = "^\\s*\\d",    valid_after = ["list_item"] },
+  { pattern = "^\\s*",    valid_after = ["list_item"] },
+]
 prettier_parser_name = "markdown"

crates/languages/src/rust.rs 🔗

@@ -882,7 +882,7 @@ impl ContextProvider for RustContextProvider {
                     RUST_BIN_REQUIRED_FEATURES_FLAG_TASK_VARIABLE.template_value(),
                     RUST_BIN_REQUIRED_FEATURES_TASK_VARIABLE.template_value(),
                 ],
-                cwd: Some("$ZED_DIRNAME".to_owned()),
+                cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
                 tags: vec!["rust-main".to_owned()],
                 ..TaskTemplate::default()
             },
@@ -904,14 +904,14 @@ impl ContextProvider for RustContextProvider {
                 label: "Run".into(),
                 command: "cargo".into(),
                 args: run_task_args,
-                cwd: Some("$ZED_DIRNAME".to_owned()),
+                cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
                 ..TaskTemplate::default()
             },
             TaskTemplate {
                 label: "Clean".into(),
                 command: "cargo".into(),
                 args: vec!["clean".into()],
-                cwd: Some("$ZED_DIRNAME".to_owned()),
+                cwd: Some(RUST_MANIFEST_DIRNAME_TASK_VARIABLE.template_value()),
                 ..TaskTemplate::default()
             },
         ];

crates/languages/src/tsx/highlights.scm 🔗

@@ -2,6 +2,40 @@
 
 (identifier) @variable
 
+(call_expression
+  function: (member_expression
+    object: (identifier) @type.builtin
+    (#any-of?
+      @type.builtin
+      "Promise"
+      "Array"
+      "Object"
+      "Map"
+      "Set"
+      "WeakMap"
+      "WeakSet"
+      "Date"
+      "Error"
+      "TypeError"
+      "RangeError"
+      "SyntaxError"
+      "ReferenceError"
+      "EvalError"
+      "URIError"
+      "RegExp"
+      "Function"
+      "Number"
+      "String"
+      "Boolean"
+      "Symbol"
+      "BigInt"
+      "Proxy"
+      "ArrayBuffer"
+      "DataView"
+    )
+  )
+)
+
 ; Properties
 
 (property_identifier) @property
@@ -18,6 +52,12 @@
   function: (member_expression
     property: [(property_identifier) (private_property_identifier)] @function.method))
 
+(new_expression
+  constructor: (identifier) @type)
+
+(nested_type_identifier
+  module: (identifier) @type)
+
 ; Function and method definitions
 
 (function_expression
@@ -47,13 +87,68 @@
   left: (identifier) @function
   right: [(function_expression) (arrow_function)])
 
+; Parameters
+
+(required_parameter
+  (identifier) @variable.parameter)
+
+(required_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(optional_parameter
+  (identifier) @variable.parameter)
+
+(optional_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(catch_clause
+  parameter: (identifier) @variable.parameter)
+
+(index_signature
+  name: (identifier) @variable.parameter)
+
+(arrow_function
+  parameter: (identifier) @variable.parameter)
+
+(type_predicate
+  name: (identifier) @variable.parameter)
+
 ; Special identifiers
 
-((identifier) @type
- (#match? @type "^[A-Z]"))
+(type_annotation) @type
 (type_identifier) @type
 (predefined_type) @type.builtin
 
+(type_alias_declaration
+  (type_identifier) @type)
+
+(type_alias_declaration
+  value: (_
+    (type_identifier) @type))
+
+(interface_declaration
+  (type_identifier) @type)
+
+(class_declaration
+  (type_identifier) @type.class)
+
+(extends_clause
+  value: (identifier) @type.class)
+
+(extends_type_clause
+  type: (type_identifier) @type)
+
+(implements_clause
+  (type_identifier) @type)
+
 ([
   (identifier)
   (shorthand_property_identifier)
@@ -231,8 +326,42 @@
   "<" @punctuation.bracket
   ">" @punctuation.bracket)
 
+(type_parameters
+  "<" @punctuation.bracket
+  ">" @punctuation.bracket)
+
 (decorator "@" @punctuation.special)
 
+(union_type
+  ("|") @punctuation.special)
+
+(intersection_type
+  ("&") @punctuation.special)
+
+(type_annotation
+  (":") @punctuation.special)
+
+(index_signature
+  (":") @punctuation.special)
+
+(type_predicate_annotation
+  (":") @punctuation.special)
+
+(public_field_definition
+  ("?") @punctuation.special)
+
+(property_signature
+  ("?") @punctuation.special)
+
+(method_signature
+  ("?") @punctuation.special)
+
+(optional_parameter
+  ([
+    "?"
+    ":"
+  ]) @punctuation.special)
+
 ; Keywords
 
 [ "abstract"
@@ -257,6 +386,34 @@
 (jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
 (jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$"))
 
+(jsx_opening_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+(jsx_closing_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+(jsx_self_closing_element
+  [
+    (identifier) @type
+    (member_expression
+      object: (identifier) @type
+      property: (property_identifier) @type
+    )
+  ]
+)
+
 (jsx_attribute (property_identifier) @attribute.jsx)
 (jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx)
 (jsx_closing_element (["</" ">"]) @punctuation.bracket.jsx)

crates/languages/src/typescript/highlights.scm 🔗

@@ -2,13 +2,69 @@
 
 (identifier) @variable
 
+(call_expression
+  function: (member_expression
+    object: (identifier) @type.builtin
+    (#any-of?
+      @type.builtin
+      "Promise"
+      "Array"
+      "Object"
+      "Map"
+      "Set"
+      "WeakMap"
+      "WeakSet"
+      "Date"
+      "Error"
+      "TypeError"
+      "RangeError"
+      "SyntaxError"
+      "ReferenceError"
+      "EvalError"
+      "URIError"
+      "RegExp"
+      "Function"
+      "Number"
+      "String"
+      "Boolean"
+      "Symbol"
+      "BigInt"
+      "Proxy"
+      "ArrayBuffer"
+      "DataView"
+    )
+  )
+)
+
 ; Special identifiers
 
-((identifier) @type
- (#match? @type "^[A-Z]"))
+(type_annotation) @type
+
 (type_identifier) @type
 (predefined_type) @type.builtin
 
+(type_alias_declaration
+  (type_identifier) @type)
+
+(type_alias_declaration
+  value: (_
+    (type_identifier) @type))
+
+(interface_declaration
+  (type_identifier) @type)
+
+(class_declaration
+  (type_identifier) @type.class)
+
+(extends_clause
+  value: (identifier) @type.class)
+
+(extends_type_clause
+  type: (type_identifier) @type)
+
+(implements_clause
+  (type_identifier) @type)
+
 ;; Enables ts-pretty-errors
 ;; The Lsp returns "snippets" of typescript, which are not valid typescript in totality,
 ;; but should still be highlighted
@@ -83,6 +139,12 @@
   function: (member_expression
     property: [(property_identifier) (private_property_identifier)] @function.method))
 
+(new_expression
+  constructor: (identifier) @type)
+
+(nested_type_identifier
+  module: (identifier) @type)
+
 ; Function and method definitions
 
 (function_expression
@@ -114,6 +176,40 @@
 
 (arrow_function) @function
 
+; Parameters
+
+(required_parameter
+  (identifier) @variable.parameter)
+
+(required_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(optional_parameter
+  (identifier) @variable.parameter)
+
+(optional_parameter
+  (_
+    ([
+      (identifier)
+      (shorthand_property_identifier_pattern)
+    ]) @variable.parameter))
+
+(catch_clause
+  parameter: (identifier) @variable.parameter)
+
+(index_signature
+  name: (identifier) @variable.parameter)
+
+(arrow_function
+  parameter: (identifier) @variable.parameter)
+
+(type_predicate
+  name: (identifier) @variable.parameter)
+
 ; Literals
 
 (this) @variable.special
@@ -244,8 +340,42 @@
   "<" @punctuation.bracket
   ">" @punctuation.bracket)
 
+(type_parameters
+  "<" @punctuation.bracket
+  ">" @punctuation.bracket)
+
 (decorator "@" @punctuation.special)
 
+(union_type
+  ("|") @punctuation.special)
+
+(intersection_type
+  ("&") @punctuation.special)
+
+(type_annotation
+  (":") @punctuation.special)
+
+(index_signature
+  (":") @punctuation.special)
+
+(type_predicate_annotation
+  (":") @punctuation.special)
+
+(public_field_definition
+  ("?") @punctuation.special)
+
+(property_signature
+  ("?") @punctuation.special)
+
+(method_signature
+  ("?") @punctuation.special)
+
+(optional_parameter
+  ([
+    "?"
+    ":"
+  ]) @punctuation.special)
+
 ; Keywords
 
 [

crates/lsp/src/lsp.rs 🔗

@@ -331,14 +331,13 @@ impl LanguageServer {
         };
         let root_uri = Uri::from_file_path(&working_dir)
             .map_err(|()| anyhow!("{working_dir:?} is not a valid URI"))?;
-
         log::info!(
-            "starting language server process. binary path: {:?}, working directory: {:?}, args: {:?}",
+            "starting language server process. binary path: \
+            {:?}, working directory: {:?}, args: {:?}",
             binary.path,
             working_dir,
             &binary.arguments
         );
-
         let mut command = util::command::new_smol_command(&binary.path);
         command
             .current_dir(working_dir)
@@ -348,6 +347,7 @@ impl LanguageServer {
             .stdout(Stdio::piped())
             .stderr(Stdio::piped())
             .kill_on_drop(true);
+
         let mut server = command
             .spawn()
             .with_context(|| format!("failed to spawn command {command:?}",))?;

crates/markdown_preview/src/markdown_preview.rs 🔗

@@ -11,9 +11,19 @@ actions!(
     markdown,
     [
         /// Scrolls up by one page in the markdown preview.
-        MovePageUp,
+        #[action(deprecated_aliases = ["markdown::MovePageUp"])]
+        ScrollPageUp,
         /// Scrolls down by one page in the markdown preview.
-        MovePageDown,
+        #[action(deprecated_aliases = ["markdown::MovePageDown"])]
+        ScrollPageDown,
+        /// Scrolls up by approximately one visual line.
+        ScrollUp,
+        /// Scrolls down by approximately one visual line.
+        ScrollDown,
+        /// Scrolls up by one markdown element in the markdown preview
+        ScrollUpByItem,
+        /// Scrolls down by one markdown element in the markdown preview
+        ScrollDownByItem,
         /// Opens a markdown preview for the current file.
         OpenPreview,
         /// Opens a markdown preview in a split pane.

crates/markdown_preview/src/markdown_preview_view.rs 🔗

@@ -1,3 +1,4 @@
+use std::cmp::min;
 use std::sync::Arc;
 use std::time::Duration;
 use std::{ops::Range, path::PathBuf};
@@ -20,11 +21,12 @@ use workspace::{Pane, Workspace};
 use crate::markdown_elements::ParsedMarkdownElement;
 use crate::markdown_renderer::CheckboxClickedEvent;
 use crate::{
-    MovePageDown, MovePageUp, OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide,
+    OpenFollowingPreview, OpenPreview, OpenPreviewToTheSide, ScrollPageDown, ScrollPageUp,
     markdown_elements::ParsedMarkdown,
     markdown_parser::parse_markdown,
     markdown_renderer::{RenderContext, render_markdown_block},
 };
+use crate::{ScrollDown, ScrollDownByItem, ScrollUp, ScrollUpByItem};
 
 const REPARSE_DEBOUNCE: Duration = Duration::from_millis(200);
 
@@ -425,7 +427,7 @@ impl MarkdownPreviewView {
         !(current_block.is_list_item() && next_block.map(|b| b.is_list_item()).unwrap_or(false))
     }
 
-    fn scroll_page_up(&mut self, _: &MovePageUp, _window: &mut Window, cx: &mut Context<Self>) {
+    fn scroll_page_up(&mut self, _: &ScrollPageUp, _window: &mut Window, cx: &mut Context<Self>) {
         let viewport_height = self.list_state.viewport_bounds().size.height;
         if viewport_height.is_zero() {
             return;
@@ -435,7 +437,12 @@ impl MarkdownPreviewView {
         cx.notify();
     }
 
-    fn scroll_page_down(&mut self, _: &MovePageDown, _window: &mut Window, cx: &mut Context<Self>) {
+    fn scroll_page_down(
+        &mut self,
+        _: &ScrollPageDown,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
         let viewport_height = self.list_state.viewport_bounds().size.height;
         if viewport_height.is_zero() {
             return;
@@ -444,6 +451,56 @@ impl MarkdownPreviewView {
         self.list_state.scroll_by(viewport_height);
         cx.notify();
     }
+
+    fn scroll_up(&mut self, _: &ScrollUp, window: &mut Window, cx: &mut Context<Self>) {
+        let scroll_top = self.list_state.logical_scroll_top();
+        if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) {
+            let item_height = bounds.size.height;
+            // Scroll no more than the rough equivalent of a large headline
+            let max_height = window.rem_size() * 2;
+            let scroll_height = min(item_height, max_height);
+            self.list_state.scroll_by(-scroll_height);
+        }
+        cx.notify();
+    }
+
+    fn scroll_down(&mut self, _: &ScrollDown, window: &mut Window, cx: &mut Context<Self>) {
+        let scroll_top = self.list_state.logical_scroll_top();
+        if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) {
+            let item_height = bounds.size.height;
+            // Scroll no more than the rough equivalent of a large headline
+            let max_height = window.rem_size() * 2;
+            let scroll_height = min(item_height, max_height);
+            self.list_state.scroll_by(scroll_height);
+        }
+        cx.notify();
+    }
+
+    fn scroll_up_by_item(
+        &mut self,
+        _: &ScrollUpByItem,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let scroll_top = self.list_state.logical_scroll_top();
+        if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) {
+            self.list_state.scroll_by(-bounds.size.height);
+        }
+        cx.notify();
+    }
+
+    fn scroll_down_by_item(
+        &mut self,
+        _: &ScrollDownByItem,
+        _window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let scroll_top = self.list_state.logical_scroll_top();
+        if let Some(bounds) = self.list_state.bounds_for_item(scroll_top.item_ix) {
+            self.list_state.scroll_by(bounds.size.height);
+        }
+        cx.notify();
+    }
 }
 
 impl Focusable for MarkdownPreviewView {
@@ -496,6 +553,10 @@ impl Render for MarkdownPreviewView {
             .track_focus(&self.focus_handle(cx))
             .on_action(cx.listener(MarkdownPreviewView::scroll_page_up))
             .on_action(cx.listener(MarkdownPreviewView::scroll_page_down))
+            .on_action(cx.listener(MarkdownPreviewView::scroll_up))
+            .on_action(cx.listener(MarkdownPreviewView::scroll_down))
+            .on_action(cx.listener(MarkdownPreviewView::scroll_up_by_item))
+            .on_action(cx.listener(MarkdownPreviewView::scroll_down_by_item))
             .size_full()
             .bg(cx.theme().colors().editor_background)
             .p_4()

crates/markdown_preview/src/markdown_renderer.rs 🔗

@@ -75,8 +75,10 @@ impl RenderContext {
 
         let settings = ThemeSettings::get_global(cx);
         let buffer_font_family = settings.buffer_font.family.clone();
+        let buffer_font_features = settings.buffer_font.features.clone();
         let mut buffer_text_style = window.text_style();
         buffer_text_style.font_family = buffer_font_family.clone();
+        buffer_text_style.font_features = buffer_font_features;
         buffer_text_style.font_size = AbsoluteLength::from(settings.buffer_font_size(cx));
 
         RenderContext {
@@ -631,8 +633,14 @@ fn render_markdown_code_block(
         .tooltip(Tooltip::text("Copy code block"))
         .visible_on_hover("markdown-block");
 
+    let font = gpui::Font {
+        family: cx.buffer_font_family.clone(),
+        features: cx.buffer_text_style.font_features.clone(),
+        ..Default::default()
+    };
+
     cx.with_common_p(div())
-        .font_family(cx.buffer_font_family.clone())
+        .font(font)
         .px_3()
         .py_3()
         .bg(cx.code_block_background_color)

crates/multi_buffer/src/multi_buffer.rs 🔗

@@ -1202,6 +1202,7 @@ impl MultiBuffer {
     }
 
     /// Returns an up-to-date snapshot of the MultiBuffer.
+    #[ztracing::instrument(skip_all)]
     pub fn snapshot(&self, cx: &App) -> MultiBufferSnapshot {
         self.sync(cx);
         self.snapshot.borrow().clone()
@@ -1927,6 +1928,7 @@ impl MultiBuffer {
         cx.notify();
     }
 
+    #[ztracing::instrument(skip_all)]
     pub fn excerpts_for_buffer(
         &self,
         buffer_id: BufferId,
@@ -2887,6 +2889,7 @@ impl MultiBuffer {
         cx.notify();
     }
 
+    #[ztracing::instrument(skip_all)]
     fn sync(&self, cx: &App) {
         let changed = self.buffer_changed_since_sync.replace(false);
         if !changed {
@@ -5627,6 +5630,7 @@ impl MultiBufferSnapshot {
     /// excerpt
     ///
     /// Can optionally pass a range_filter to filter the ranges of brackets to consider
+    #[ztracing::instrument(skip_all)]
     pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
         &self,
         range: Range<T>,

crates/open_ai/src/open_ai.rs 🔗

@@ -87,6 +87,8 @@ pub enum Model {
     FiveNano,
     #[serde(rename = "gpt-5.1")]
     FivePointOne,
+    #[serde(rename = "gpt-5.2")]
+    FivePointTwo,
     #[serde(rename = "custom")]
     Custom {
         name: String,
@@ -123,6 +125,7 @@ impl Model {
             "gpt-5-mini" => Ok(Self::FiveMini),
             "gpt-5-nano" => Ok(Self::FiveNano),
             "gpt-5.1" => Ok(Self::FivePointOne),
+            "gpt-5.2" => Ok(Self::FivePointTwo),
             invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
         }
     }
@@ -145,6 +148,7 @@ impl Model {
             Self::FiveMini => "gpt-5-mini",
             Self::FiveNano => "gpt-5-nano",
             Self::FivePointOne => "gpt-5.1",
+            Self::FivePointTwo => "gpt-5.2",
             Self::Custom { name, .. } => name,
         }
     }
@@ -167,6 +171,7 @@ impl Model {
             Self::FiveMini => "gpt-5-mini",
             Self::FiveNano => "gpt-5-nano",
             Self::FivePointOne => "gpt-5.1",
+            Self::FivePointTwo => "gpt-5.2",
             Self::Custom {
                 name, display_name, ..
             } => display_name.as_ref().unwrap_or(name),
@@ -191,6 +196,7 @@ impl Model {
             Self::FiveMini => 272_000,
             Self::FiveNano => 272_000,
             Self::FivePointOne => 400_000,
+            Self::FivePointTwo => 400_000,
             Self::Custom { max_tokens, .. } => *max_tokens,
         }
     }
@@ -216,6 +222,7 @@ impl Model {
             Self::FiveMini => Some(128_000),
             Self::FiveNano => Some(128_000),
             Self::FivePointOne => Some(128_000),
+            Self::FivePointTwo => Some(128_000),
         }
     }
 
@@ -244,6 +251,7 @@ impl Model {
             | Self::Five
             | Self::FiveMini
             | Self::FivePointOne
+            | Self::FivePointTwo
             | Self::FiveNano => true,
             Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false,
         }

crates/project/src/buffer_store.rs 🔗

@@ -24,7 +24,7 @@ use rpc::{
 
 use std::{io, sync::Arc, time::Instant};
 use text::{BufferId, ReplicaId};
-use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
+use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath};
 use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
 
 /// A set of open buffers.
@@ -620,21 +620,7 @@ impl LocalBufferStore {
         let load_file = worktree.update(cx, |worktree, cx| worktree.load_file(path.as_ref(), cx));
         cx.spawn(async move |this, cx| {
             let path = path.clone();
-            let single_file_path = cx.update(|cx| {
-                if worktree.read(cx).is_single_file() {
-                    Some(worktree.read(cx).abs_path())
-                } else {
-                    None
-                }
-            })?;
-            let path_string = single_file_path
-                .as_ref()
-                .map(|path| path.to_string_lossy())
-                .unwrap_or_else(|| path.display(PathStyle::local()));
-            let buffer = match load_file
-                .await
-                .with_context(|| format!("Opening path \"{path_string}\""))
-            {
+            let buffer = match load_file.await {
                 Ok(loaded) => {
                     let reservation = cx.reserve_entity::<Buffer>()?;
                     let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());

crates/project/src/debugger/locators/cargo.rs 🔗

@@ -115,18 +115,17 @@ impl DapLocator for CargoLocator {
             .clone()
             .context("Couldn't get cwd from debug config which is needed for locators")?;
         let builder = ShellBuilder::new(&build_config.shell, cfg!(windows)).non_interactive();
-        let (program, args) = builder.build(
-            Some("cargo".into()),
-            &build_config
-                .args
-                .iter()
-                .cloned()
-                .take_while(|arg| arg != "--")
-                .chain(Some("--message-format=json".to_owned()))
-                .collect::<Vec<_>>(),
-        );
-        let mut child = util::command::new_smol_command(program)
-            .args(args)
+        let mut child = builder
+            .build_command(
+                Some("cargo".into()),
+                &build_config
+                    .args
+                    .iter()
+                    .cloned()
+                    .take_while(|arg| arg != "--")
+                    .chain(Some("--message-format=json".to_owned()))
+                    .collect::<Vec<_>>(),
+            )
             .envs(build_config.env.iter().map(|(k, v)| (k.clone(), v.clone())))
             .current_dir(cwd)
             .stdout(Stdio::piped())

crates/project/src/git_store.rs 🔗

@@ -1031,6 +1031,7 @@ impl GitStore {
             Some(version) => buffer.rope_for_version(version),
             None => buffer.as_rope().clone(),
         };
+        let line_ending = buffer.line_ending();
         let version = version.unwrap_or(buffer.version());
         let buffer_id = buffer.remote_id();
 
@@ -1042,7 +1043,7 @@ impl GitStore {
                 .map_err(|err| anyhow::anyhow!(err))?;
             match repository_state {
                 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
-                    .blame(repo_path.clone(), content)
+                    .blame(repo_path.clone(), content, line_ending)
                     .await
                     .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
                     .map(Some),

crates/project/src/lsp_store.rs 🔗

@@ -201,7 +201,10 @@ pub enum LspFormatTarget {
     Ranges(BTreeMap<BufferId, Vec<Range<Anchor>>>),
 }
 
-pub type OpenLspBufferHandle = Entity<Entity<Buffer>>;
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct OpenLspBufferHandle(Entity<OpenLspBuffer>);
+
+struct OpenLspBuffer(Entity<Buffer>);
 
 impl FormatTrigger {
     fn from_proto(value: i32) -> FormatTrigger {
@@ -4207,7 +4210,7 @@ impl LspStore {
         cx: &mut Context<Self>,
     ) -> OpenLspBufferHandle {
         let buffer_id = buffer.read(cx).remote_id();
-        let handle = cx.new(|_| buffer.clone());
+        let handle = OpenLspBufferHandle(cx.new(|_| OpenLspBuffer(buffer.clone())));
         if let Some(local) = self.as_local_mut() {
             let refcount = local.registered_buffers.entry(buffer_id).or_insert(0);
             if !ignore_refcounts {
@@ -4229,7 +4232,7 @@ impl LspStore {
                 local.register_buffer_with_language_servers(buffer, only_register_servers, cx);
             }
             if !ignore_refcounts {
-                cx.observe_release(&handle, move |lsp_store, buffer, cx| {
+                cx.observe_release(&handle.0, move |lsp_store, buffer, cx| {
                     let refcount = {
                         let local = lsp_store.as_local_mut().unwrap();
                         let Some(refcount) = local.registered_buffers.get_mut(&buffer_id) else {
@@ -4246,8 +4249,8 @@ impl LspStore {
                         local.registered_buffers.remove(&buffer_id);
 
                         local.buffers_opened_in_servers.remove(&buffer_id);
-                        if let Some(file) = File::from_dyn(buffer.read(cx).file()).cloned() {
-                            local.unregister_old_buffer_from_language_servers(buffer, &file, cx);
+                        if let Some(file) = File::from_dyn(buffer.0.read(cx).file()).cloned() {
+                            local.unregister_old_buffer_from_language_servers(&buffer.0, &file, cx);
 
                             let buffer_abs_path = file.abs_path(cx);
                             for (_, buffer_pull_diagnostics_result_ids) in
@@ -6782,7 +6785,7 @@ impl LspStore {
             })
         } else {
             let servers = buffer.update(cx, |buffer, cx| {
-                self.language_servers_for_local_buffer(buffer, cx)
+                self.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(_, server)| server.clone())
                     .collect::<Vec<_>>()
             });
@@ -8122,7 +8125,7 @@ impl LspStore {
         })
     }
 
-    pub fn language_servers_for_local_buffer<'a>(
+    pub fn running_language_servers_for_local_buffer<'a>(
         &'a self,
         buffer: &Buffer,
         cx: &mut App,
@@ -8144,6 +8147,17 @@ impl LspStore {
             )
     }
 
+    pub fn language_servers_for_local_buffer(
+        &self,
+        buffer: &Buffer,
+        cx: &mut App,
+    ) -> Vec<LanguageServerId> {
+        let local = self.as_local();
+        local
+            .map(|local| local.language_server_ids_for_buffer(buffer, cx))
+            .unwrap_or_default()
+    }
+
     pub fn language_server_for_local_buffer<'a>(
         &'a self,
         buffer: &'a Buffer,

crates/project/src/project.rs 🔗

@@ -2622,6 +2622,12 @@ impl Project {
         !self.is_local()
     }
 
+    pub fn disable_worktree_scanner(&mut self, cx: &mut Context<Self>) {
+        self.worktree_store.update(cx, |worktree_store, _cx| {
+            worktree_store.disable_scanner();
+        });
+    }
+
     #[inline]
     pub fn create_buffer(
         &mut self,
@@ -5184,7 +5190,7 @@ impl Project {
     #[cfg(any(test, feature = "test-support"))]
     pub fn has_language_servers_for(&self, buffer: &Buffer, cx: &mut App) -> bool {
         self.lsp_store.update(cx, |this, cx| {
-            this.language_servers_for_local_buffer(buffer, cx)
+            this.running_language_servers_for_local_buffer(buffer, cx)
                 .next()
                 .is_some()
         })

crates/project/src/project_tests.rs 🔗

@@ -691,7 +691,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
     let servers = project.update(cx, |project, cx| {
         project.lsp_store.update(cx, |this, cx| {
             first_buffer.update(cx, |buffer, cx| {
-                this.language_servers_for_local_buffer(buffer, cx)
+                this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
                     .collect::<Vec<_>>()
             })
@@ -720,7 +720,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
     let servers = project.update(cx, |project, cx| {
         project.lsp_store.update(cx, |this, cx| {
             second_project_buffer.update(cx, |buffer, cx| {
-                this.language_servers_for_local_buffer(buffer, cx)
+                this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
                     .collect::<Vec<_>>()
             })
@@ -791,7 +791,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
     let servers = project.update(cx, |project, cx| {
         project.lsp_store.update(cx, |this, cx| {
             second_project_buffer.update(cx, |buffer, cx| {
-                this.language_servers_for_local_buffer(buffer, cx)
+                this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
                     .collect::<Vec<_>>()
             })

crates/project/src/worktree_store.rs 🔗

@@ -57,6 +57,7 @@ pub struct WorktreeStore {
     retain_worktrees: bool,
     worktrees: Vec<WorktreeHandle>,
     worktrees_reordered: bool,
+    scanning_enabled: bool,
     #[allow(clippy::type_complexity)]
     loading_worktrees:
         HashMap<Arc<SanitizedPath>, Shared<Task<Result<Entity<Worktree>, Arc<anyhow::Error>>>>>,
@@ -93,6 +94,7 @@ impl WorktreeStore {
             downstream_client: None,
             worktrees: Vec::new(),
             worktrees_reordered: false,
+            scanning_enabled: true,
             retain_worktrees,
             state: WorktreeStoreState::Local { fs },
         }
@@ -110,6 +112,7 @@ impl WorktreeStore {
             downstream_client: None,
             worktrees: Vec::new(),
             worktrees_reordered: false,
+            scanning_enabled: true,
             retain_worktrees,
             state: WorktreeStoreState::Remote {
                 upstream_client,
@@ -119,6 +122,10 @@ impl WorktreeStore {
         }
     }
 
+    pub fn disable_scanner(&mut self) {
+        self.scanning_enabled = false;
+    }
+
     /// Iterates through all worktrees, including ones that don't appear in the project panel
     pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Entity<Worktree>> {
         self.worktrees
@@ -576,6 +583,7 @@ impl WorktreeStore {
         cx: &mut Context<Self>,
     ) -> Task<Result<Entity<Worktree>, Arc<anyhow::Error>>> {
         let next_entry_id = self.next_entry_id.clone();
+        let scanning_enabled = self.scanning_enabled;
 
         cx.spawn(async move |this, cx| {
             let worktree = Worktree::local(
@@ -583,6 +591,7 @@ impl WorktreeStore {
                 visible,
                 fs,
                 next_entry_id,
+                scanning_enabled,
                 cx,
             )
             .await;

crates/remote/src/transport/ssh.rs 🔗

@@ -116,7 +116,7 @@ impl MasterProcess {
             .args(additional_args)
             .args(args);
 
-        master_process.arg(format!("ControlPath='{}'", socket_path.display()));
+        master_process.arg(format!("ControlPath={}", socket_path.display()));
 
         let process = master_process.arg(&url).spawn()?;
 

crates/repl/src/repl.rs 🔗

@@ -12,7 +12,7 @@ mod session;
 use std::{sync::Arc, time::Duration};
 
 use async_dispatcher::{Dispatcher, Runnable, set_dispatcher};
-use gpui::{App, PlatformDispatcher, RunnableVariant};
+use gpui::{App, PlatformDispatcher, Priority, RunnableVariant};
 use project::Fs;
 pub use runtimelib::ExecutionState;
 
@@ -46,7 +46,7 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher {
     impl Dispatcher for ZedDispatcher {
         fn dispatch(&self, runnable: Runnable) {
             self.dispatcher
-                .dispatch(RunnableVariant::Compat(runnable), None);
+                .dispatch(RunnableVariant::Compat(runnable), None, Priority::default());
         }
 
         fn dispatch_after(&self, duration: Duration, runnable: Runnable) {

crates/search/Cargo.toml 🔗

@@ -43,6 +43,8 @@ util_macros.workspace = true
 workspace.workspace = true
 zed_actions.workspace = true
 itertools.workspace = true
+ztracing.workspace = true
+tracing.workspace = true
 
 [dev-dependencies]
 client = { workspace = true, features = ["test-support"] }
@@ -53,3 +55,7 @@ lsp.workspace = true
 pretty_assertions.workspace = true
 unindent.workspace = true
 workspace = { workspace = true, features = ["test-support"] }
+
+[package.metadata.cargo-machete]
+ignored = ["tracing"]
+

crates/terminal/Cargo.toml 🔗

@@ -28,6 +28,7 @@ gpui.workspace = true
 itertools.workspace = true
 libc.workspace = true
 log.workspace = true
+regex.workspace = true
 release_channel.workspace = true
 schemars.workspace = true
 serde.workspace = true
@@ -38,7 +39,6 @@ task.workspace = true
 theme.workspace = true
 thiserror.workspace = true
 util.workspace = true
-fancy-regex.workspace = true
 urlencoding.workspace = true
 
 [target.'cfg(windows)'.dependencies]

crates/terminal/src/terminal_hyperlinks.rs 🔗

@@ -8,8 +8,8 @@ use alacritty_terminal::{
         search::{Match, RegexIter, RegexSearch},
     },
 };
-use fancy_regex::Regex;
 use log::{info, warn};
+use regex::Regex;
 use std::{
     ops::{Index, Range},
     time::{Duration, Instant},
@@ -308,17 +308,6 @@ fn path_match<T>(
         let mut path_found = false;
 
         for captures in regex.captures_iter(&line) {
-            let captures = match captures {
-                Ok(captures) => captures,
-                Err(error) => {
-                    warn!("Error '{error}' searching for path hyperlinks in line: {line}");
-                    info!(
-                        "Skipping match from path hyperlinks with regex: {}",
-                        regex.as_str()
-                    );
-                    continue;
-                }
-            };
             path_found = true;
             let match_range = captures.get(0).unwrap().range();
             let (path_range, line_column) = if let Some(path) = captures.name("path") {
@@ -376,7 +365,7 @@ mod tests {
         term::{Config, cell::Flags, test::TermSize},
         vte::ansi::Handler,
     };
-    use fancy_regex::Regex;
+    use regex::Regex;
     use settings::{self, Settings, SettingsContent};
     use std::{cell::RefCell, ops::RangeInclusive, path::PathBuf, rc::Rc};
     use url::Url;
@@ -386,7 +375,7 @@ mod tests {
         let results: Vec<_> = Regex::new(re)
             .unwrap()
             .find_iter(hay)
-            .map(|m| m.unwrap().as_str())
+            .map(|m| m.as_str())
             .collect();
         assert_eq!(results, expected);
     }
@@ -578,8 +567,6 @@ mod tests {
             test_path!("/test/cool.rs(4,2)👉:", "What is this?");
 
             // path, line, column, and description
-            test_path!("/test/cool.rs:4:2👉:Error!");
-            test_path!("/test/cool.rs:4:2:👉Error!");
             test_path!("‹«/test/co👉ol.rs»:«4»:«2»›:Error!");
             test_path!("‹«/test/co👉ol.rs»(«4»,«2»)›:Error!");
 
@@ -590,6 +577,7 @@ mod tests {
 
             // Python
             test_path!("‹«awe👉some.py»›");
+            test_path!("‹«👉a»› ");
 
             test_path!("    ‹F👉ile \"«/awesome.py»\", line «42»›: Wat?");
             test_path!("    ‹File \"«/awe👉some.py»\", line «42»›");
@@ -602,18 +590,14 @@ mod tests {
             // path, line, column and description
             test_path!("‹«/👉test/cool.rs»:«4»:«2»›:例Desc例例例");
             test_path!("‹«/test/cool.rs»:«4»:«👉2»›:例Desc例例例");
-            test_path!("/test/cool.rs:4:2:例Desc例👉例例");
             test_path!("‹«/👉test/cool.rs»(«4»,«2»)›:例Desc例例例");
             test_path!("‹«/test/cool.rs»(«4»👉,«2»)›:例Desc例例例");
-            test_path!("/test/cool.rs(4,2):例Desc例👉例例");
 
             // path, line, column and description w/extra colons
             test_path!("‹«/👉test/cool.rs»:«4»:«2»›::例Desc例例例");
             test_path!("‹«/test/cool.rs»:«4»:«👉2»›::例Desc例例例");
-            test_path!("/test/cool.rs:4:2::例Desc例👉例例");
             test_path!("‹«/👉test/cool.rs»(«4»,«2»)›::例Desc例例例");
             test_path!("‹«/test/cool.rs»(«4»,«2»👉)›::例Desc例例例");
-            test_path!("/test/cool.rs(4,2)::例Desc例👉例例");
         }
 
         #[test]
@@ -658,8 +642,6 @@ mod tests {
             test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›:");
             test_path!("‹«/test/co👉ol.rs»::«42»›");
             test_path!("‹«/test/co👉ol.rs»::«42»›:");
-            test_path!("‹«/test/co👉ol.rs:4:2»(«1»,«618»)›");
-            test_path!("‹«/test/co👉ol.rs:4:2»(«1»,«618»)›:");
             test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›::");
         }
 
@@ -675,7 +657,7 @@ mod tests {
             test_path!("<‹«/test/co👉ol.rs»:«4»›>");
 
             test_path!("[\"‹«/test/co👉ol.rs»:«4»›\"]");
-            test_path!("'‹«(/test/co👉ol.rs:4)»›'");
+            test_path!("'(‹«/test/co👉ol.rs»:«4»›)'");
 
             test_path!("\"‹«/test/co👉ol.rs»:«4»:«2»›\"");
             test_path!("'‹«/test/co👉ol.rs»:«4»:«2»›'");
@@ -724,7 +706,7 @@ mod tests {
             test_path!("‹«/test/co👉ol.rs»:«4»›:,");
             test_path!("/test/cool.rs:4:👉,");
             test_path!("[\"‹«/test/co👉ol.rs»:«4»›\"]:,");
-            test_path!("'‹«(/test/co👉ol.rs:4),,»›'..");
+            test_path!("'(‹«/test/co👉ol.rs»:«4»›),,'...");
             test_path!("('‹«/test/co👉ol.rs»:«4»›'::: was here...)");
             test_path!("[Here's <‹«/test/co👉ol.rs»:«4»›>]::: ");
         }
@@ -849,9 +831,6 @@ mod tests {
                 test_path!(
                     "‹«test/c👉ontrollers/template_items_controller_test.rb»:«20»›:in 'block (2 levels) in <class:TemplateItemsControllerTest>'"
                 );
-                test_path!(
-                    "test/controllers/template_items_controller_test.rb:19:i👉n 'block in <class:TemplateItemsControllerTest>'"
-                );
             }
 
             #[test]

crates/terminal_view/src/terminal_panel.rs 🔗

@@ -550,7 +550,7 @@ impl TerminalPanel {
 
         let builder = ShellBuilder::new(&shell, is_windows);
         let command_label = builder.command_label(task.command.as_deref().unwrap_or(""));
-        let (command, args) = builder.build(task.command.clone(), &task.args);
+        let (command, args) = builder.build_no_quote(task.command.clone(), &task.args);
 
         let task = SpawnInTerminal {
             command_label,

crates/text/src/text.rs 🔗

@@ -2321,8 +2321,13 @@ impl BufferSnapshot {
         } else if anchor.is_max() {
             self.visible_text.len()
         } else {
-            debug_assert!(anchor.buffer_id == Some(self.remote_id));
-            debug_assert!(self.version.observed(anchor.timestamp));
+            debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
+            debug_assert!(
+                self.version.observed(anchor.timestamp),
+                "Anchor timestamp {:?} not observed by buffer {:?}",
+                anchor.timestamp,
+                self.version
+            );
             let anchor_key = InsertionFragmentKey {
                 timestamp: anchor.timestamp,
                 split_offset: anchor.offset,
@@ -3382,6 +3387,25 @@ impl LineEnding {
     }
 }
 
+pub fn chunks_with_line_ending(rope: &Rope, line_ending: LineEnding) -> impl Iterator<Item = &str> {
+    rope.chunks().flat_map(move |chunk| {
+        let mut newline = false;
+        let end_with_newline = chunk.ends_with('\n').then_some(line_ending.as_str());
+        chunk
+            .lines()
+            .flat_map(move |line| {
+                let ending = if newline {
+                    Some(line_ending.as_str())
+                } else {
+                    None
+                };
+                newline = true;
+                ending.into_iter().chain([line])
+            })
+            .chain(end_with_newline)
+    })
+}
+
 #[cfg(debug_assertions)]
 pub mod debug {
     use super::*;

crates/util/src/paths.rs 🔗

@@ -227,9 +227,16 @@ impl SanitizedPath {
         #[cfg(not(target_os = "windows"))]
         return unsafe { mem::transmute::<Arc<Path>, Arc<Self>>(path) };
 
-        // TODO: could avoid allocating here if dunce::simplified results in the same path
         #[cfg(target_os = "windows")]
-        return Self::new(&path).into();
+        {
+            let simplified = dunce::simplified(path.as_ref());
+            if simplified == path.as_ref() {
+                // safe because `Path` and `SanitizedPath` have the same repr and Drop impl
+                unsafe { mem::transmute::<Arc<Path>, Arc<Self>>(path) }
+            } else {
+                Self::unchecked_new(simplified).into()
+            }
+        }
     }
 
     pub fn new_arc<T: AsRef<Path> + ?Sized>(path: &T) -> Arc<Self> {

crates/util/src/shell.rs 🔗

@@ -702,7 +702,10 @@ impl ShellKind {
                     .map(|quoted| Cow::Owned(self.prepend_command_prefix(&quoted).into_owned()));
             }
         }
-        self.try_quote(arg)
+        self.try_quote(arg).map(|quoted| match quoted {
+            unquoted @ Cow::Borrowed(_) => unquoted,
+            Cow::Owned(quoted) => Cow::Owned(self.prepend_command_prefix(&quoted).into_owned()),
+        })
     }
 
     pub fn split(&self, input: &str) -> Option<Vec<String>> {
@@ -916,7 +919,7 @@ mod tests {
                 .try_quote_prefix_aware("'uname'")
                 .unwrap()
                 .into_owned(),
-            "\"'uname'\"".to_string()
+            "^\"'uname'\"".to_string()
         );
         assert_eq!(
             shell_kind.try_quote("^uname").unwrap().into_owned(),
@@ -949,7 +952,7 @@ mod tests {
                 .try_quote_prefix_aware("'uname a'")
                 .unwrap()
                 .into_owned(),
-            "\"'uname a'\"".to_string()
+            "^\"'uname a'\"".to_string()
         );
         assert_eq!(
             shell_kind.try_quote("^'uname a'").unwrap().into_owned(),

crates/util/src/shell_builder.rs 🔗

@@ -80,27 +80,23 @@ impl ShellBuilder {
         task_args: &[String],
     ) -> (String, Vec<String>) {
         if let Some(task_command) = task_command {
-            let task_command = self.kind.prepend_command_prefix(&task_command);
             let task_command = if !task_args.is_empty() {
                 match self.kind.try_quote_prefix_aware(&task_command) {
-                    Some(task_command) => task_command,
+                    Some(task_command) => task_command.into_owned(),
                     None => task_command,
                 }
             } else {
                 task_command
             };
-            let mut combined_command =
-                task_args
-                    .iter()
-                    .fold(task_command.into_owned(), |mut command, arg| {
-                        command.push(' ');
-                        let shell_variable = self.kind.to_shell_variable(arg);
-                        command.push_str(&match self.kind.try_quote(&shell_variable) {
-                            Some(shell_variable) => shell_variable,
-                            None => Cow::Owned(shell_variable),
-                        });
-                        command
-                    });
+            let mut combined_command = task_args.iter().fold(task_command, |mut command, arg| {
+                command.push(' ');
+                let shell_variable = self.kind.to_shell_variable(arg);
+                command.push_str(&match self.kind.try_quote(&shell_variable) {
+                    Some(shell_variable) => shell_variable,
+                    None => Cow::Owned(shell_variable),
+                });
+                command
+            });
             if self.redirect_stdin {
                 match self.kind {
                     ShellKind::Fish => {
@@ -134,6 +130,90 @@ impl ShellBuilder {
         (self.program, self.args)
     }
 
+    // This should not exist, but our task infra is broken beyond repair right now
+    #[doc(hidden)]
+    pub fn build_no_quote(
+        mut self,
+        task_command: Option<String>,
+        task_args: &[String],
+    ) -> (String, Vec<String>) {
+        if let Some(task_command) = task_command {
+            let mut combined_command = task_args.iter().fold(task_command, |mut command, arg| {
+                command.push(' ');
+                command.push_str(&self.kind.to_shell_variable(arg));
+                command
+            });
+            if self.redirect_stdin {
+                match self.kind {
+                    ShellKind::Fish => {
+                        combined_command.insert_str(0, "begin; ");
+                        combined_command.push_str("; end </dev/null");
+                    }
+                    ShellKind::Posix
+                    | ShellKind::Nushell
+                    | ShellKind::Csh
+                    | ShellKind::Tcsh
+                    | ShellKind::Rc
+                    | ShellKind::Xonsh
+                    | ShellKind::Elvish => {
+                        combined_command.insert(0, '(');
+                        combined_command.push_str(") </dev/null");
+                    }
+                    ShellKind::PowerShell | ShellKind::Pwsh => {
+                        combined_command.insert_str(0, "$null | & {");
+                        combined_command.push_str("}");
+                    }
+                    ShellKind::Cmd => {
+                        combined_command.push_str("< NUL");
+                    }
+                }
+            }
+
+            self.args
+                .extend(self.kind.args_for_shell(self.interactive, combined_command));
+        }
+
+        (self.program, self.args)
+    }
+
+    /// Builds a command with the given task command and arguments.
+    ///
+    /// Prefer this over manually constructing a command with the output of `Self::build`,
+    /// as this method handles `cmd` weirdness on windows correctly.
+    pub fn build_command(
+        self,
+        mut task_command: Option<String>,
+        task_args: &[String],
+    ) -> smol::process::Command {
+        #[cfg(windows)]
+        let kind = self.kind;
+        if task_args.is_empty() {
+            task_command = task_command
+                .as_ref()
+                .map(|cmd| self.kind.try_quote_prefix_aware(&cmd).map(Cow::into_owned))
+                .unwrap_or(task_command);
+        }
+        let (program, args) = self.build(task_command, task_args);
+
+        let mut child = crate::command::new_smol_command(program);
+
+        #[cfg(windows)]
+        if kind == ShellKind::Cmd {
+            use smol::process::windows::CommandExt;
+
+            for arg in args {
+                child.raw_arg(arg);
+            }
+        } else {
+            child.args(args);
+        }
+
+        #[cfg(not(windows))]
+        child.args(args);
+
+        child
+    }
+
     pub fn kind(&self) -> ShellKind {
         self.kind
     }
@@ -166,7 +246,7 @@ mod test {
             vec![
                 "-i",
                 "-c",
-                "^echo '$env.hello' '$env.world' nothing '--($env.something)' '$' '${test'"
+                "echo '$env.hello' '$env.world' nothing '--($env.something)' '$' '${test'"
             ]
         );
     }
@@ -181,7 +261,7 @@ mod test {
             .build(Some("echo".into()), &["nothing".to_string()]);
 
         assert_eq!(program, "nu");
-        assert_eq!(args, vec!["-i", "-c", "(^echo nothing) </dev/null"]);
+        assert_eq!(args, vec!["-i", "-c", "(echo nothing) </dev/null"]);
     }
 
     #[test]
@@ -196,4 +276,23 @@ mod test {
         assert_eq!(program, "fish");
         assert_eq!(args, vec!["-i", "-c", "begin; echo test; end </dev/null"]);
     }
+
+    #[test]
+    fn does_not_quote_sole_command_only() {
+        let shell = Shell::Program("fish".to_owned());
+        let shell_builder = ShellBuilder::new(&shell, false);
+
+        let (program, args) = shell_builder.build(Some("echo".into()), &[]);
+
+        assert_eq!(program, "fish");
+        assert_eq!(args, vec!["-i", "-c", "echo"]);
+
+        let shell = Shell::Program("fish".to_owned());
+        let shell_builder = ShellBuilder::new(&shell, false);
+
+        let (program, args) = shell_builder.build(Some("echo oo".into()), &[]);
+
+        assert_eq!(program, "fish");
+        assert_eq!(args, vec!["-i", "-c", "echo oo"]);
+    }
 }

crates/util/src/shell_env.rs 🔗

@@ -132,7 +132,7 @@ async fn spawn_and_read_fd(
 #[cfg(windows)]
 async fn capture_windows(
     shell_path: &Path,
-    _args: &[String],
+    args: &[String],
     directory: &Path,
 ) -> Result<collections::HashMap<String, String>> {
     use std::process::Stdio;
@@ -141,17 +141,17 @@ async fn capture_windows(
         std::env::current_exe().context("Failed to determine current zed executable path.")?;
 
     let shell_kind = ShellKind::new(shell_path, true);
-    if let ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh =
-        shell_kind
-    {
-        return Err(anyhow::anyhow!("unsupported shell kind"));
-    }
     let mut cmd = crate::command::new_smol_command(shell_path);
+    cmd.args(args);
     let cmd = match shell_kind {
-        ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh => {
-            unreachable!()
-        }
-        ShellKind::Posix => cmd.args([
+        ShellKind::Csh
+        | ShellKind::Tcsh
+        | ShellKind::Rc
+        | ShellKind::Fish
+        | ShellKind::Xonsh
+        | ShellKind::Posix => cmd.args([
+            "-l",
+            "-i",
             "-c",
             &format!(
                 "cd '{}'; '{}' --printenv",

crates/vim/Cargo.toml 🔗

@@ -63,6 +63,7 @@ indoc.workspace = true
 language = { workspace = true, features = ["test-support"] }
 project = { workspace = true, features = ["test-support"] }
 lsp = { workspace = true, features = ["test-support"] }
+markdown_preview.workspace = true
 parking_lot.workspace = true
 project_panel.workspace = true
 release_channel.workspace = true

crates/workspace/src/workspace.rs 🔗

@@ -2452,6 +2452,12 @@ impl Workspace {
             .0
             .split(' ')
             .flat_map(|k| Keystroke::parse(k).log_err())
+            .map(|k| {
+                cx.keyboard_mapper()
+                    .map_key_equivalent(k, true)
+                    .inner()
+                    .clone()
+            })
             .collect();
         let _ = self.send_keystrokes_impl(keystrokes, window, cx);
     }

crates/worktree/src/worktree.rs 🔗

@@ -14,7 +14,7 @@ use futures::{
         mpsc::{self, UnboundedSender},
         oneshot,
     },
-    select_biased,
+    select_biased, stream,
     task::Poll,
 };
 use fuzzy::CharBag;
@@ -22,7 +22,8 @@ use git::{
     COMMIT_MESSAGE, DOT_GIT, FSMONITOR_DAEMON, GITIGNORE, INDEX_LOCK, LFS_DIR, status::GitSummary,
 };
 use gpui::{
-    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
+    App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Priority,
+    Task,
 };
 use ignore::IgnoreStack;
 use language::DiskState;
@@ -97,6 +98,7 @@ pub enum CreatedEntry {
     Excluded { abs_path: PathBuf },
 }
 
+#[derive(Debug)]
 pub struct LoadedFile {
     pub file: Arc<File>,
     pub text: String,
@@ -129,6 +131,7 @@ pub struct LocalWorktree {
     next_entry_id: Arc<AtomicUsize>,
     settings: WorktreeSettings,
     share_private_files: bool,
+    scanning_enabled: bool,
 }
 
 pub struct PathPrefixScanRequest {
@@ -356,6 +359,7 @@ impl Worktree {
         visible: bool,
         fs: Arc<dyn Fs>,
         next_entry_id: Arc<AtomicUsize>,
+        scanning_enabled: bool,
         cx: &mut AsyncApp,
     ) -> Result<Entity<Self>> {
         let abs_path = path.into();
@@ -459,6 +463,7 @@ impl Worktree {
                 fs_case_sensitive,
                 visible,
                 settings,
+                scanning_enabled,
             };
             worktree.start_background_scanner(scan_requests_rx, path_prefixes_to_scan_rx, cx);
             Worktree::Local(worktree)
@@ -1049,13 +1054,18 @@ impl LocalWorktree {
         let share_private_files = self.share_private_files;
         let next_entry_id = self.next_entry_id.clone();
         let fs = self.fs.clone();
+        let scanning_enabled = self.scanning_enabled;
         let settings = self.settings.clone();
         let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
         let background_scanner = cx.background_spawn({
             let abs_path = snapshot.abs_path.as_path().to_path_buf();
             let background = cx.background_executor().clone();
             async move {
-                let (events, watcher) = fs.watch(&abs_path, FS_WATCH_LATENCY).await;
+                let (events, watcher) = if scanning_enabled {
+                    fs.watch(&abs_path, FS_WATCH_LATENCY).await
+                } else {
+                    (Box::pin(stream::pending()) as _, Arc::new(NullWatcher) as _)
+                };
                 let fs_case_sensitive = fs.is_case_sensitive().await.unwrap_or_else(|e| {
                     log::error!("Failed to determine whether filesystem is case sensitive: {e:#}");
                     true
@@ -1080,6 +1090,7 @@ impl LocalWorktree {
                     }),
                     phase: BackgroundScannerPhase::InitialScan,
                     share_private_files,
+                    scanning_enabled,
                     settings,
                     watcher,
                 };
@@ -3617,6 +3628,7 @@ struct BackgroundScanner {
     watcher: Arc<dyn Watcher>,
     settings: WorktreeSettings,
     share_private_files: bool,
+    scanning_enabled: bool,
 }
 
 #[derive(Copy, Clone, PartialEq)]
@@ -3632,14 +3644,23 @@ impl BackgroundScanner {
         // the git repository in an ancestor directory. Find any gitignore files
         // in ancestor directories.
         let root_abs_path = self.state.lock().await.snapshot.abs_path.clone();
-        let (ignores, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await;
-        self.state
-            .lock()
-            .await
-            .snapshot
-            .ignores_by_parent_abs_path
-            .extend(ignores);
-        let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo {
+
+        let repo = if self.scanning_enabled {
+            let (ignores, repo) = discover_ancestor_git_repo(self.fs.clone(), &root_abs_path).await;
+            self.state
+                .lock()
+                .await
+                .snapshot
+                .ignores_by_parent_abs_path
+                .extend(ignores);
+            repo
+        } else {
+            None
+        };
+
+        let containing_git_repository = if let Some((ancestor_dot_git, work_directory)) = repo
+            && self.scanning_enabled
+        {
             maybe!(async {
                 self.state
                     .lock()
@@ -3663,6 +3684,7 @@ impl BackgroundScanner {
 
         let mut global_gitignore_events = if let Some(global_gitignore_path) =
             &paths::global_gitignore_path()
+            && self.scanning_enabled
         {
             let is_file = self.fs.is_file(&global_gitignore_path).await;
             self.state.lock().await.snapshot.global_gitignore = if is_file {
@@ -3705,7 +3727,7 @@ impl BackgroundScanner {
                         .insert_entry(root_entry, self.fs.as_ref(), self.watcher.as_ref())
                         .await;
                 }
-                if root_entry.is_dir() {
+                if root_entry.is_dir() && self.scanning_enabled {
                     state
                         .enqueue_scan_dir(
                             root_abs_path.as_path().into(),
@@ -4123,7 +4145,7 @@ impl BackgroundScanner {
 
         let progress_update_count = AtomicUsize::new(0);
         self.executor
-            .scoped(|scope| {
+            .scoped_priority(Priority::Low, |scope| {
                 for _ in 0..self.executor.num_cpus() {
                     scope.spawn(async {
                         let mut last_progress_update_count = 0;
@@ -5641,3 +5663,15 @@ async fn discover_git_paths(dot_git_abs_path: &Arc<Path>, fs: &dyn Fs) -> (Arc<P
     };
     (repository_dir_abs_path, common_dir_abs_path)
 }
+
+struct NullWatcher;
+
+impl fs::Watcher for NullWatcher {
+    fn add(&self, _path: &Path) -> Result<()> {
+        Ok(())
+    }
+
+    fn remove(&self, _path: &Path) -> Result<()> {
+        Ok(())
+    }
+}

crates/worktree/src/worktree_tests.rs 🔗

@@ -44,6 +44,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
         true,
         fs,
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -108,6 +109,7 @@ async fn test_circular_symlinks(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -207,6 +209,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -357,6 +360,7 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -434,6 +438,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -598,6 +603,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -698,6 +704,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -791,6 +798,7 @@ async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -856,6 +864,7 @@ async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext)
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -914,6 +923,7 @@ async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppC
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -999,6 +1009,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1080,6 +1091,7 @@ async fn test_hidden_files(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1190,6 +1202,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1301,6 +1314,7 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
         true,
         Arc::new(RealFs::new(None, cx.executor())),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1339,6 +1353,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
         true,
         fs,
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1407,6 +1422,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
         true,
         fs_fake,
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1448,6 +1464,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
         true,
         fs_real,
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1556,6 +1573,7 @@ async fn test_create_file_in_expanded_gitignored_dir(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1651,6 +1669,7 @@ async fn test_fs_event_for_gitignored_dir_does_not_lose_contents(cx: &mut TestAp
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1728,6 +1747,7 @@ async fn test_random_worktree_operations_during_initial_scan(
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1818,6 +1838,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -1890,6 +1911,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
             true,
             fs.clone(),
             Default::default(),
+            true,
             &mut cx.to_async(),
         )
         .await
@@ -2203,6 +2225,7 @@ async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
         true,
         fs.clone(),
         Default::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -2235,6 +2258,7 @@ async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestA
         true,
         fs.clone(),
         Arc::default(),
+        true,
         &mut cx.to_async(),
     )
     .await
@@ -2312,6 +2336,7 @@ async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppCon
         true,
         fs.clone(),
         Arc::default(),
+        true,
         &mut cx.to_async(),
     )
     .await

crates/zed/src/main.rs 🔗

@@ -166,8 +166,6 @@ fn fail_to_open_window(e: anyhow::Error, _cx: &mut App) {
 pub static STARTUP_TIME: OnceLock<Instant> = OnceLock::new();
 
 pub fn main() {
-    ztracing::init();
-
     STARTUP_TIME.get_or_init(|| Instant::now());
 
     #[cfg(unix)]
@@ -242,6 +240,7 @@ pub fn main() {
     }
 
     zlog::init();
+
     if stdout_is_a_pty() {
         zlog::init_output_stdout();
     } else {
@@ -251,6 +250,7 @@ pub fn main() {
             zlog::init_output_stdout();
         };
     }
+    ztracing::init();
 
     let version = option_env!("ZED_BUILD_ID");
     let app_commit_sha =

crates/ztracing/Cargo.toml 🔗

@@ -12,6 +12,7 @@ workspace = true
 tracy = ["tracing-tracy"]
 
 [dependencies]
+zlog.workspace = true
 tracing.workspace = true
 
 tracing-subscriber = "0.3.22"

crates/ztracing/src/lib.rs 🔗

@@ -1,10 +1,46 @@
+pub use tracing::Level;
+
 #[cfg(ztracing)]
-pub use tracing::instrument;
+pub use tracing::{
+    debug_span, error_span, event, info_span, instrument, span, trace_span, warn_span,
+};
 #[cfg(not(ztracing))]
 pub use ztracing_macro::instrument;
 
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as trace_span;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as info_span;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as debug_span;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as warn_span;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as error_span;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as event;
+#[cfg(not(ztracing))]
+pub use __consume_all_tokens as span;
+
+#[cfg(not(ztracing))]
+#[macro_export]
+macro_rules! __consume_all_tokens {
+    ($($t:tt)*) => {
+        $crate::FakeSpan
+    };
+}
+
+pub struct FakeSpan;
+impl FakeSpan {
+    pub fn enter(&self) {}
+}
+
+// #[cfg(not(ztracing))]
+// pub use span;
+
 #[cfg(ztracing)]
 pub fn init() {
+    zlog::info!("Starting tracy subscriber, you can now connect the profiler");
     use tracing_subscriber::prelude::*;
     tracing::subscriber::set_global_default(
         tracing_subscriber::registry().with(tracing_tracy::TracyLayer::default()),

script/prettier 🔗

@@ -3,9 +3,9 @@ set -euxo pipefail
 
 PRETTIER_VERSION=3.5.0
 
-pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
+pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --parser=jsonc --check || {
     echo "To fix, run from the root of the Zed repo:"
-    echo "  pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
+    echo "  pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --parser=jsonc --write"
     false
 }
 

tooling/xtask/src/tasks/workflows/after_release.rs 🔗

@@ -4,10 +4,18 @@ use crate::tasks::workflows::{
     release::{self, notify_on_failure},
     runners,
     steps::{CommonJobConditions, NamedJob, checkout_repo, dependant_job, named},
-    vars::{self, StepOutput},
+    vars::{self, StepOutput, WorkflowInput},
 };
 
+const TAG_NAME: &str = "${{ github.event.release.tag_name || inputs.tag_name }}";
+const IS_PRERELEASE: &str = "${{ github.event.release.prerelease || inputs.prerelease }}";
+const RELEASE_BODY: &str = "${{ github.event.release.body || inputs.body }}";
+
 pub fn after_release() -> Workflow {
+    let tag_name = WorkflowInput::string("tag_name", None);
+    let prerelease = WorkflowInput::bool("prerelease", None);
+    let body = WorkflowInput::string("body", Some(String::new()));
+
     let refresh_zed_dev = rebuild_releases_page();
     let post_to_discord = post_to_discord(&[&refresh_zed_dev]);
     let publish_winget = publish_winget();
@@ -20,7 +28,14 @@ pub fn after_release() -> Workflow {
     ]);
 
     named::workflow()
-        .on(Event::default().release(Release::default().types(vec![ReleaseType::Published])))
+        .on(Event::default()
+            .release(Release::default().types(vec![ReleaseType::Published]))
+            .workflow_dispatch(
+                WorkflowDispatch::default()
+                    .add_input(tag_name.name, tag_name.input())
+                    .add_input(prerelease.name, prerelease.input())
+                    .add_input(body.name, body.input()),
+            ))
         .add_job(refresh_zed_dev.name, refresh_zed_dev.job)
         .add_job(post_to_discord.name, post_to_discord.job)
         .add_job(publish_winget.name, publish_winget.job)
@@ -30,9 +45,9 @@ pub fn after_release() -> Workflow {
 
 fn rebuild_releases_page() -> NamedJob {
     fn refresh_cloud_releases() -> Step<Run> {
-        named::bash(
-            "curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}",
-        )
+        named::bash(format!(
+            "curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag={TAG_NAME}"
+        ))
     }
 
     fn redeploy_zed_dev() -> Step<Run> {
@@ -51,15 +66,16 @@ fn rebuild_releases_page() -> NamedJob {
 
 fn post_to_discord(deps: &[&NamedJob]) -> NamedJob {
     fn get_release_url() -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            if [ "${{ github.event.release.prerelease }}" == "true" ]; then
-                URL="https://zed.dev/releases/preview"
-            else
-                URL="https://zed.dev/releases/stable"
-            fi
-
-            echo "URL=$URL" >> "$GITHUB_OUTPUT"
-        "#})
+        named::bash(format!(
+            r#"if [ "{IS_PRERELEASE}" == "true" ]; then
+    URL="https://zed.dev/releases/preview"
+else
+    URL="https://zed.dev/releases/stable"
+fi
+
+echo "URL=$URL" >> "$GITHUB_OUTPUT"
+"#
+        ))
         .id("get-release-url")
     }
 
@@ -72,11 +88,9 @@ fn post_to_discord(deps: &[&NamedJob]) -> NamedJob {
         .id("get-content")
         .add_with((
             "stringToTruncate",
-            indoc::indoc! {r#"
-                📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
-
-                ${{ github.event.release.body }}
-            "#},
+            format!(
+                "📣 Zed [{TAG_NAME}](<${{{{ steps.get-release-url.outputs.URL }}}}>)  was just released!\n\n{RELEASE_BODY}\n"
+            ),
         ))
         .add_with(("maxLength", 2000))
         .add_with(("truncationSymbol", "..."))
@@ -102,16 +116,17 @@ fn post_to_discord(deps: &[&NamedJob]) -> NamedJob {
 
 fn publish_winget() -> NamedJob {
     fn set_package_name() -> (Step<Run>, StepOutput) {
-        let step = named::pwsh(indoc::indoc! {r#"
-            if ("${{ github.event.release.prerelease }}" -eq "true") {
-                $PACKAGE_NAME = "ZedIndustries.Zed.Preview"
-            } else {
-                $PACKAGE_NAME = "ZedIndustries.Zed"
-            }
-
-            echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT
-        "#})
-        .id("set-package-name");
+        let script = format!(
+            r#"if ("{IS_PRERELEASE}" -eq "true") {{
+    $PACKAGE_NAME = "ZedIndustries.Zed.Preview"
+}} else {{
+    $PACKAGE_NAME = "ZedIndustries.Zed"
+}}
+
+echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT
+"#
+        );
+        let step = named::pwsh(&script).id("set-package-name");
 
         let output = StepOutput::new(&step, "PACKAGE_NAME");
         (step, output)
@@ -124,6 +139,7 @@ fn publish_winget() -> NamedJob {
             "19e706d4c9121098010096f9c495a70a7518b30f", // v2
         )
         .add_with(("identifier", package_name.to_string()))
+        .add_with(("release-tag", TAG_NAME))
         .add_with(("max-versions-to-keep", 5))
         .add_with(("token", vars::WINGET_TOKEN))
     }

typos.toml 🔗

@@ -52,6 +52,8 @@ extend-exclude = [
     "crates/project_panel/benches/linux_repo_snapshot.txt",
     # Some multibuffer test cases have word fragments that register as typos
     "crates/multi_buffer/src/multi_buffer_tests.rs",
+    # Macos apis
+    "crates/gpui/src/platform/mac/dispatcher.rs",
 ]
 
 [default]