Merge remote-tracking branch 'origin/main' into AI-112/delete-worktree-on-archive

Richard Feldman created

Change summary

Cargo.lock                                                     |   11 
assets/settings/default.json                                   |   15 
crates/acp_thread/src/acp_thread.rs                            |    2 
crates/acp_thread/src/diff.rs                                  |    2 
crates/acp_tools/src/acp_tools.rs                              |    9 
crates/action_log/src/action_log.rs                            |   10 
crates/agent/src/edit_agent.rs                                 |    4 
crates/agent/src/tool_permissions.rs                           |    4 
crates/agent/src/tools/streaming_edit_file_tool.rs             |    6 
crates/agent_settings/src/agent_settings.rs                    |    8 
crates/agent_ui/src/agent_diff.rs                              |   41 
crates/agent_ui/src/agent_panel.rs                             |  431 
crates/agent_ui/src/agent_ui.rs                                |    6 
crates/agent_ui/src/buffer_codegen.rs                          |   15 
crates/agent_ui/src/completion_provider.rs                     |   80 
crates/agent_ui/src/conversation_view.rs                       |    2 
crates/agent_ui/src/conversation_view/thread_view.rs           |   49 
crates/agent_ui/src/inline_assistant.rs                        |   78 
crates/agent_ui/src/mention_set.rs                             |   29 
crates/agent_ui/src/message_editor.rs                          |   92 
crates/agent_ui/src/terminal_codegen.rs                        |   11 
crates/agent_ui/src/terminal_inline_assistant.rs               |   13 
crates/agent_ui/src/thread_metadata_store.rs                   |  119 
crates/agent_ui/src/threads_archive_view.rs                    |  616 
crates/buffer_diff/src/buffer_diff.rs                          |   14 
crates/call/src/call_impl/room.rs                              |    6 
crates/client/src/client.rs                                    |    2 
crates/collab/migrations.sqlite/20221109000000_test_schema.sql |    3 
crates/collab/migrations/20251208000000_test_schema.sql        |    3 
crates/collab/src/db.rs                                        |    1 
crates/collab/src/db/queries/projects.rs                       |    4 
crates/collab/src/db/tables/project.rs                         |    1 
crates/collab/src/rpc.rs                                       |   25 
crates/collab/tests/integration/channel_buffer_tests.rs        |    2 
crates/collab/tests/integration/db_tests/db_tests.rs           |   39 
crates/collab/tests/integration/following_tests.rs             |   17 
crates/collab_ui/src/collab_panel.rs                           |    4 
crates/csv_preview/src/csv_preview.rs                          |   50 
crates/csv_preview/src/parser.rs                               |    5 
crates/csv_preview/src/renderer/render_table.rs                |   43 
crates/csv_preview/src/renderer/row_identifiers.rs             |    1 
crates/csv_preview/src/renderer/table_cell.rs                  |    1 
crates/debugger_ui/src/debugger_ui.rs                          |    2 
crates/debugger_ui/src/session/running/console.rs              |    5 
crates/dev_container/Cargo.toml                                |   13 
crates/dev_container/src/command_json.rs                       |   64 
crates/dev_container/src/devcontainer_api.rs                   |  586 
crates/dev_container/src/devcontainer_json.rs                  | 1358 +
crates/dev_container/src/devcontainer_manifest.rs              | 4566 ++++
crates/dev_container/src/docker.rs                             |  463 
crates/dev_container/src/features.rs                           |  254 
crates/dev_container/src/lib.rs                                |  413 
crates/dev_container/src/oci.rs                                |  470 
crates/diagnostics/src/buffer_diagnostics.rs                   |   49 
crates/diagnostics/src/diagnostic_renderer.rs                  |   28 
crates/diagnostics/src/diagnostics.rs                          |   50 
crates/edit_prediction/src/capture_example.rs                  |    2 
crates/edit_prediction/src/edit_prediction.rs                  |   14 
crates/edit_prediction/src/udiff.rs                            |    3 
crates/edit_prediction_ui/src/edit_prediction_context_view.rs  |   12 
crates/edit_prediction_ui/src/rate_prediction_modal.rs         |   40 
crates/editor/src/bracket_colorization.rs                      |  214 
crates/editor/src/code_completion_tests.rs                     |    4 
crates/editor/src/code_context_menus.rs                        |   18 
crates/editor/src/display_map.rs                               |  123 
crates/editor/src/display_map/block_map.rs                     |  151 
crates/editor/src/display_map/crease_map.rs                    |    2 
crates/editor/src/display_map/fold_map.rs                      |   22 
crates/editor/src/display_map/inlay_map.rs                     |   22 
crates/editor/src/document_colors.rs                           |  105 
crates/editor/src/document_symbols.rs                          |   84 
crates/editor/src/edit_prediction_tests.rs                     |    9 
crates/editor/src/editor.rs                                    |  556 
crates/editor/src/editor_tests.rs                              |  366 
crates/editor/src/element.rs                                   |  213 
crates/editor/src/folding_ranges.rs                            |    6 
crates/editor/src/git/blame.rs                                 |   19 
crates/editor/src/hover_links.rs                               |   40 
crates/editor/src/hover_popover.rs                             |   18 
crates/editor/src/inlays.rs                                    |    1 
crates/editor/src/inlays/inlay_hints.rs                        |   85 
crates/editor/src/items.rs                                     |  378 
crates/editor/src/jsx_tag_auto_close.rs                        |   11 
crates/editor/src/linked_editing_ranges.rs                     |   25 
crates/editor/src/lsp_ext.rs                                   |   28 
crates/editor/src/mouse_context_menu.rs                        |   21 
crates/editor/src/movement.rs                                  |   36 
crates/editor/src/runnables.rs                                 |   73 
crates/editor/src/rust_analyzer_ext.rs                         |   58 
crates/editor/src/scroll.rs                                    |    4 
crates/editor/src/scroll/actions.rs                            |    2 
crates/editor/src/selections_collection.rs                     |  109 
crates/editor/src/semantic_tokens.rs                           |   20 
crates/editor/src/signature_help.rs                            |    8 
crates/editor/src/split.rs                                     |  660 
crates/editor/src/split_editor_view.rs                         |   17 
crates/editor/src/tasks.rs                                     |  101 
crates/editor/src/test.rs                                      |    4 
crates/editor/src/test/editor_test_context.rs                  |  106 
crates/encoding_selector/src/active_buffer_encoding.rs         |    2 
crates/encoding_selector/src/encoding_selector.rs              |    4 
crates/git_graph/src/git_graph.rs                              |  165 
crates/git_ui/src/commit_view.rs                               |    7 
crates/git_ui/src/conflict_view.rs                             |  123 
crates/git_ui/src/git_panel.rs                                 |    7 
crates/git_ui/src/project_diff.rs                              |   70 
crates/git_ui/src/text_diff_view.rs                            |   36 
crates/go_to_line/src/cursor_position.rs                       |   31 
crates/go_to_line/src/go_to_line.rs                            |   12 
crates/gpui/src/elements/div.rs                                |  215 
crates/gpui/src/window.rs                                      |    9 
crates/inspector_ui/src/div_inspector.rs                       |    4 
crates/keymap_editor/src/action_completion_provider.rs         |    1 
crates/keymap_editor/src/keymap_editor.rs                      |   56 
crates/language/src/diagnostic_set.rs                          |   22 
crates/language/src/proto.rs                                   |    8 
crates/language/src/syntax_map.rs                              |   40 
crates/language_model/Cargo.toml                               |    1 
crates/language_model/src/language_model.rs                    |  193 
crates/language_model/src/provider.rs                          |   12 
crates/language_model/src/provider/anthropic.rs                |   80 
crates/language_model/src/provider/google.rs                   |    5 
crates/language_model/src/provider/open_ai.rs                  |   28 
crates/language_model/src/provider/open_router.rs              |   69 
crates/language_model/src/provider/x_ai.rs                     |    4 
crates/language_model/src/provider/zed.rs                      |    5 
crates/language_model/src/registry.rs                          |    2 
crates/language_models/src/provider/anthropic.rs               |    9 
crates/language_models/src/provider/anthropic/telemetry.rs     |    6 
crates/language_models/src/provider/cloud.rs                   |   37 
crates/language_models/src/provider/google.rs                  |   10 
crates/language_models/src/provider/open_ai.rs                 |    7 
crates/language_selector/src/active_buffer_language.rs         |    2 
crates/language_selector/src/language_selector.rs              |   23 
crates/language_tools/src/highlights_tree_view.rs              |  176 
crates/language_tools/src/lsp_button.rs                        |   17 
crates/language_tools/src/syntax_tree_view.rs                  |   19 
crates/languages/src/eslint.rs                                 |  421 
crates/languages/src/lib.rs                                    |    2 
crates/line_ending_selector/src/line_ending_indicator.rs       |    2 
crates/line_ending_selector/src/line_ending_selector.rs        |    2 
crates/markdown/src/html/html_rendering.rs                     |   11 
crates/markdown/src/markdown.rs                                |   61 
crates/markdown/src/mermaid.rs                                 |   11 
crates/markdown_preview/src/markdown_preview_view.rs           |   94 
crates/migrator/src/migrations.rs                              |    4 
crates/migrator/src/migrations/m_2026_03_30/settings.rs        |   29 
crates/migrator/src/migrations/m_2026_03_31/settings.rs        |   29 
crates/migrator/src/migrator.rs                                |  236 
crates/multi_buffer/src/anchor.rs                              |  516 
crates/multi_buffer/src/multi_buffer.rs                        |  650 
crates/multi_buffer/src/multi_buffer_tests.rs                  |  991 
crates/multi_buffer/src/path_key.rs                            |  849 
crates/multi_buffer/src/transaction.rs                         |   81 
crates/outline/src/outline.rs                                  |   42 
crates/outline_panel/src/outline_panel.rs                      |  566 
crates/project/src/git_store.rs                                |    9 
crates/project/src/lsp_store/semantic_tokens.rs                |    2 
crates/project/src/project.rs                                  |    6 
crates/project/tests/integration/project_tests.rs              |   33 
crates/proto/proto/buffer.proto                                |    7 
crates/proto/proto/call.proto                                  |   19 
crates/recent_projects/src/recent_projects.rs                  |    9 
crates/recent_projects/src/remote_connections.rs               |    1 
crates/recent_projects/src/remote_servers.rs                   |   23 
crates/remote/src/transport/docker.rs                          |   22 
crates/search/src/buffer_search.rs                             |   22 
crates/search/src/project_search.rs                            |   57 
crates/settings_content/src/agent.rs                           |   44 
crates/settings_content/src/settings_content.rs                |    5 
crates/settings_ui/src/page_data.rs                            |    4 
crates/settings_ui/src/settings_ui.rs                          |    1 
crates/sidebar/src/sidebar.rs                                  |    1 
crates/tasks_ui/src/tasks_ui.rs                                |   12 
crates/text/src/anchor.rs                                      |   48 
crates/text/src/patch.rs                                       |    3 
crates/text/src/text.rs                                        |   33 
crates/toolchain_selector/src/active_toolchain.rs              |    2 
crates/toolchain_selector/src/toolchain_selector.rs            |    4 
crates/ui/src/components/data_table.rs                         |  672 
crates/ui/src/components/data_table/tests.rs                   |    4 
crates/util/src/command.rs                                     |    8 
crates/util/src/command/darwin.rs                              |    8 
crates/vim/src/command.rs                                      |    4 
crates/vim/src/helix.rs                                        |    1 
crates/vim/src/motion.rs                                       |   40 
crates/vim/src/normal.rs                                       |    2 
crates/vim/src/normal/search.rs                                |   41 
crates/vim/src/object.rs                                       |  289 
crates/vim/src/state.rs                                        |   37 
crates/vim/src/test.rs                                         |   15 
crates/vim/src/vim.rs                                          |   10 
crates/workspace/src/notifications.rs                          |    9 
crates/workspace/src/pane_group.rs                             |   10 
crates/workspace/src/persistence.rs                            |   27 
crates/workspace/src/workspace.rs                              |    4 
crates/zed/Cargo.toml                                          |    2 
crates/zed/src/visual_test_runner.rs                           |    4 
crates/zed/src/zed/telemetry_log.rs                            |    9 
docs/src/ai/agent-settings.md                                  |    9 
flake.lock                                                     |   30 
nix/build.nix                                                  |    6 
nix/livekit-libwebrtc/package.nix                              |    6 
203 files changed, 15,541 insertions(+), 6,172 deletions(-)

Detailed changes

Cargo.lock πŸ”—

@@ -4729,6 +4729,9 @@ dependencies = [
 name = "dev_container"
 version = "0.1.0"
 dependencies = [
+ "async-tar",
+ "async-trait",
+ "env_logger 0.11.8",
  "fs",
  "futures 0.3.31",
  "gpui",
@@ -4736,16 +4739,17 @@ dependencies = [
  "http_client",
  "log",
  "menu",
- "node_runtime",
  "paths",
  "picker",
  "project",
  "serde",
  "serde_json",
+ "serde_json_lenient",
  "settings",
- "smol",
+ "shlex",
  "ui",
  "util",
+ "walkdir",
  "workspace",
  "worktree",
 ]
@@ -9330,7 +9334,6 @@ dependencies = [
  "schemars",
  "serde",
  "serde_json",
- "settings",
  "smol",
  "thiserror 2.0.17",
  "util",
@@ -21890,7 +21893,7 @@ dependencies = [
 
 [[package]]
 name = "zed"
-version = "0.231.0"
+version = "0.232.0"
 dependencies = [
  "acp_thread",
  "acp_tools",

assets/settings/default.json πŸ”—

@@ -1102,11 +1102,14 @@
     // "all_screens" - Show these notifications on all screens
     // "never" - Never show these notifications
     "notify_when_agent_waiting": "primary_screen",
-    // Whether to play a sound when the agent has either completed
+    // When to play a sound when the agent has either completed
     // its response, or needs user input.
-
-    // Default: false
-    "play_sound_when_agent_done": false,
+    // "never" - Never play the sound
+    // "when_hidden" - Only play the sound when the agent panel is not visible
+    // "always" - Always play the sound
+    //
+    // Default: never
+    "play_sound_when_agent_done": "never",
     // Whether to have edit cards in the agent panel expanded, showing a preview of the full diff.
     //
     // Default: true
@@ -1117,8 +1120,8 @@
     "expand_terminal_card": true,
     // How thinking blocks should be displayed by default in the agent panel.
     //
-    // Default: automatic
-    "thinking_display": "automatic",
+    // Default: auto
+    "thinking_display": "auto",
     // Whether clicking the stop button on a running terminal tool should also cancel the agent's generation.
     // Note that this only applies to the stop button, not to ctrl+c inside the terminal.
     //

crates/acp_thread/src/acp_thread.rs πŸ”—

@@ -2616,7 +2616,7 @@ impl AcpThread {
                     text_diff(old_text.as_str(), &content)
                         .into_iter()
                         .map(|(range, replacement)| {
-                            (snapshot.anchor_range_around(range), replacement)
+                            (snapshot.anchor_range_inside(range), replacement)
                         })
                         .collect::<Vec<_>>()
                 })

crates/acp_thread/src/diff.rs πŸ”—

@@ -191,7 +191,7 @@ impl Diff {
     }
 
     pub fn has_revealed_range(&self, cx: &App) -> bool {
-        self.multibuffer().read(cx).paths().next().is_some()
+        !self.multibuffer().read(cx).is_empty()
     }
 
     pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool {

crates/acp_tools/src/acp_tools.rs πŸ”—

@@ -13,7 +13,7 @@ use gpui::{
     StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*,
 };
 use language::LanguageRegistry;
-use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
+use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle};
 use project::{AgentId, Project};
 use settings::Settings;
 use theme_settings::ThemeSettings;
@@ -384,8 +384,11 @@ impl AcpTools {
                             )
                             .code_block_renderer(
                                 CodeBlockRenderer::Default {
-                                    copy_button: false,
-                                    copy_button_on_hover: expanded,
+                                    copy_button_visibility: if expanded {
+                                        CopyButtonVisibility::VisibleOnHover
+                                    } else {
+                                        CopyButtonVisibility::Hidden
+                                    },
                                     border: false,
                                 },
                             ),

crates/action_log/src/action_log.rs πŸ”—

@@ -738,6 +738,7 @@ impl ActionLog {
                 let task = if let Some(existing_file_content) = existing_file_content {
                     // Capture the agent's content before restoring existing file content
                     let agent_content = buffer.read(cx).text();
+                    let buffer_id = buffer.read(cx).remote_id();
 
                     buffer.update(cx, |buffer, cx| {
                         buffer.start_transaction();
@@ -750,7 +751,10 @@ impl ActionLog {
 
                     undo_info = Some(PerBufferUndo {
                         buffer: buffer.downgrade(),
-                        edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
+                        edits_to_restore: vec![(
+                            Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
+                            agent_content,
+                        )],
                         status: UndoBufferStatus::Created {
                             had_existing_content: true,
                         },
@@ -990,8 +994,8 @@ impl ActionLog {
                 let mut valid_edits = Vec::new();
 
                 for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
-                    if anchor_range.start.buffer_id == Some(buffer.remote_id())
-                        && anchor_range.end.buffer_id == Some(buffer.remote_id())
+                    if anchor_range.start.buffer_id == buffer.remote_id()
+                        && anchor_range.end.buffer_id == buffer.remote_id()
                     {
                         valid_edits.push((anchor_range, text_to_restore));
                     }

crates/agent/src/edit_agent.rs πŸ”—

@@ -374,13 +374,13 @@ impl EditAgent {
                         buffer.edit(edits.iter().cloned(), None, cx);
                         let max_edit_end = buffer
                             .summaries_for_anchors::<Point, _>(
-                                edits.iter().map(|(range, _)| &range.end),
+                                edits.iter().map(|(range, _)| range.end),
                             )
                             .max()
                             .unwrap();
                         let min_edit_start = buffer
                             .summaries_for_anchors::<Point, _>(
-                                edits.iter().map(|(range, _)| &range.start),
+                                edits.iter().map(|(range, _)| range.start),
                             )
                             .min()
                             .unwrap();

crates/agent/src/tool_permissions.rs πŸ”—

@@ -563,7 +563,7 @@ mod tests {
     use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool};
     use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules};
     use gpui::px;
-    use settings::{DockPosition, NotifyWhenAgentWaiting};
+    use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone};
     use std::sync::Arc;
 
     fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings {
@@ -584,7 +584,7 @@ mod tests {
             default_profile: AgentProfileId::default(),
             profiles: Default::default(),
             notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
-            play_sound_when_agent_done: false,
+            play_sound_when_agent_done: PlaySoundWhenAgentDone::default(),
             single_file_review: false,
             model_parameters: vec![],
             enable_feedback: false,

crates/agent/src/tools/streaming_edit_file_tool.rs πŸ”—

@@ -760,7 +760,7 @@ impl EditSession {
                     {
                         if let Some(match_range) = matcher.push(chunk, None) {
                             let anchor_range = self.buffer.read_with(cx, |buffer, _cx| {
-                                buffer.anchor_range_between(match_range.clone())
+                                buffer.anchor_range_outside(match_range.clone())
                             });
                             self.diff
                                 .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
@@ -795,7 +795,7 @@ impl EditSession {
 
                     let anchor_range = self
                         .buffer
-                        .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone()));
+                        .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone()));
                     self.diff
                         .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
 
@@ -953,7 +953,7 @@ fn apply_char_operations(
             }
             CharOperation::Delete { bytes } => {
                 let delete_end = *edit_cursor + bytes;
-                let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
+                let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end);
                 agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
                 *edit_cursor = delete_end;
             }

crates/agent_settings/src/agent_settings.rs πŸ”—

@@ -13,8 +13,8 @@ use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
 use settings::{
     DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation,
-    NotifyWhenAgentWaiting, RegisterSetting, Settings, SettingsContent, SettingsStore,
-    SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
+    NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent,
+    SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
     update_settings_file,
 };
 
@@ -165,7 +165,7 @@ pub struct AgentSettings {
     pub profiles: IndexMap<AgentProfileId, AgentProfileSettings>,
 
     pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
-    pub play_sound_when_agent_done: bool,
+    pub play_sound_when_agent_done: PlaySoundWhenAgentDone,
     pub single_file_review: bool,
     pub model_parameters: Vec<LanguageModelParameters>,
     pub enable_feedback: bool,
@@ -618,7 +618,7 @@ impl Settings for AgentSettings {
                 .collect(),
 
             notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(),
-            play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(),
+            play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(),
             single_file_review: agent.single_file_review.unwrap(),
             model_parameters: agent.model_parameters,
             enable_feedback: agent.enable_feedback.unwrap(),

crates/agent_ui/src/agent_diff.rs πŸ”—

@@ -138,11 +138,12 @@ impl AgentDiffPane {
             path_a.cmp(&path_b)
         });
 
-        let mut paths_to_delete = self
+        let mut buffers_to_delete = self
             .multibuffer
             .read(cx)
-            .paths()
-            .cloned()
+            .snapshot(cx)
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
             .collect::<HashSet<_>>();
 
         for (buffer, diff_handle) in sorted_buffers {
@@ -151,7 +152,7 @@ impl AgentDiffPane {
             }
 
             let path_key = PathKey::for_buffer(&buffer, cx);
-            paths_to_delete.remove(&path_key);
+            buffers_to_delete.remove(&buffer.read(cx).remote_id());
 
             let snapshot = buffer.read(cx).snapshot();
 
@@ -168,7 +169,7 @@ impl AgentDiffPane {
             let (was_empty, is_excerpt_newly_added) =
                 self.multibuffer.update(cx, |multibuffer, cx| {
                     let was_empty = multibuffer.is_empty();
-                    let (_, is_excerpt_newly_added) = multibuffer.set_excerpts_for_path(
+                    let is_excerpt_newly_added = multibuffer.update_excerpts_for_path(
                         path_key.clone(),
                         buffer.clone(),
                         diff_hunk_ranges,
@@ -183,13 +184,13 @@ impl AgentDiffPane {
                 if was_empty {
                     let first_hunk = editor
                         .diff_hunks_in_ranges(
-                            &[editor::Anchor::min()..editor::Anchor::max()],
+                            &[editor::Anchor::Min..editor::Anchor::Max],
                             &self.multibuffer.read(cx).read(cx),
                         )
                         .next();
 
                     if let Some(first_hunk) = first_hunk {
-                        let first_hunk_start = first_hunk.multi_buffer_range().start;
+                        let first_hunk_start = first_hunk.multi_buffer_range.start;
                         editor.change_selections(Default::default(), window, cx, |selections| {
                             selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
                         })
@@ -208,8 +209,8 @@ impl AgentDiffPane {
         }
 
         self.multibuffer.update(cx, |multibuffer, cx| {
-            for path in paths_to_delete {
-                multibuffer.remove_excerpts_for_path(path, cx);
+            for buffer_id in buffers_to_delete {
+                multibuffer.remove_excerpts_for_buffer(buffer_id, cx);
             }
         });
 
@@ -239,13 +240,13 @@ impl AgentDiffPane {
             self.editor.update(cx, |editor, cx| {
                 let first_hunk = editor
                     .diff_hunks_in_ranges(
-                        &[position..editor::Anchor::max()],
+                        &[position..editor::Anchor::Max],
                         &self.multibuffer.read(cx).read(cx),
                     )
                     .next();
 
                 if let Some(first_hunk) = first_hunk {
-                    let first_hunk_start = first_hunk.multi_buffer_range().start;
+                    let first_hunk_start = first_hunk.multi_buffer_range.start;
                     editor.change_selections(Default::default(), window, cx, |selections| {
                         selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
                     })
@@ -282,7 +283,7 @@ impl AgentDiffPane {
                 editor,
                 &snapshot,
                 &self.thread,
-                vec![editor::Anchor::min()..editor::Anchor::max()],
+                vec![editor::Anchor::Min..editor::Anchor::Max],
                 self.workspace.clone(),
                 window,
                 cx,
@@ -451,20 +452,20 @@ fn update_editor_selection(
         diff_hunks
             .last()
             .and_then(|last_kept_hunk| {
-                let last_kept_hunk_end = last_kept_hunk.multi_buffer_range().end;
+                let last_kept_hunk_end = last_kept_hunk.multi_buffer_range.end;
                 editor
                     .diff_hunks_in_ranges(
-                        &[last_kept_hunk_end..editor::Anchor::max()],
+                        &[last_kept_hunk_end..editor::Anchor::Max],
                         buffer_snapshot,
                     )
                     .nth(1)
             })
             .or_else(|| {
                 let first_kept_hunk = diff_hunks.first()?;
-                let first_kept_hunk_start = first_kept_hunk.multi_buffer_range().start;
+                let first_kept_hunk_start = first_kept_hunk.multi_buffer_range.start;
                 editor
                     .diff_hunks_in_ranges(
-                        &[editor::Anchor::min()..first_kept_hunk_start],
+                        &[editor::Anchor::Min..first_kept_hunk_start],
                         buffer_snapshot,
                     )
                     .next()
@@ -473,7 +474,7 @@ fn update_editor_selection(
 
     if let Some(target_hunk) = target_hunk {
         editor.change_selections(Default::default(), window, cx, |selections| {
-            let next_hunk_start = target_hunk.multi_buffer_range().start;
+            let next_hunk_start = target_hunk.multi_buffer_range.start;
             selections.select_anchor_ranges([next_hunk_start..next_hunk_start]);
         })
     }
@@ -1567,7 +1568,7 @@ impl AgentDiff {
                     editor.update(cx, |editor, cx| {
                         let snapshot = multibuffer.read(cx).snapshot(cx);
                         if let Some(first_hunk) = snapshot.diff_hunks().next() {
-                            let first_hunk_start = first_hunk.multi_buffer_range().start;
+                            let first_hunk_start = first_hunk.multi_buffer_range.start;
 
                             editor.change_selections(
                                 SelectionEffects::scroll(Autoscroll::center()),
@@ -1648,7 +1649,7 @@ impl AgentDiff {
                 editor,
                 &snapshot,
                 thread,
-                vec![editor::Anchor::min()..editor::Anchor::max()],
+                vec![editor::Anchor::Min..editor::Anchor::Max],
                 window,
                 cx,
             );
@@ -1669,7 +1670,7 @@ impl AgentDiff {
                 editor,
                 &snapshot,
                 thread,
-                vec![editor::Anchor::min()..editor::Anchor::max()],
+                vec![editor::Anchor::Min..editor::Anchor::Max],
                 workspace.clone(),
                 window,
                 cx,

crates/agent_ui/src/agent_panel.rs πŸ”—

@@ -66,7 +66,10 @@ use project::project_settings::ProjectSettings;
 use project::{Project, ProjectPath, Worktree};
 use prompt_store::{PromptStore, UserPromptId};
 use rules_library::{RulesLibrary, open_rules_library};
+use settings::TerminalDockPosition;
 use settings::{Settings, update_settings_file};
+use terminal::terminal_settings::TerminalSettings;
+use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
 use theme_settings::ThemeSettings;
 use ui::{
     Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, DocumentationSide,
@@ -86,6 +89,30 @@ use zed_actions::{
 
 const AGENT_PANEL_KEY: &str = "agent_panel";
 const RECENTLY_UPDATED_MENU_LIMIT: usize = 6;
+const LAST_USED_AGENT_KEY: &str = "agent_panel__last_used_external_agent";
+
+#[derive(Serialize, Deserialize)]
+struct LastUsedAgent {
+    agent: Agent,
+}
+
+/// Reads the most recently used agent across all workspaces. Used as a fallback
+/// when opening a workspace that has no per-workspace agent preference yet.
+fn read_global_last_used_agent(kvp: &KeyValueStore) -> Option<Agent> {
+    kvp.read_kvp(LAST_USED_AGENT_KEY)
+        .log_err()
+        .flatten()
+        .and_then(|json| serde_json::from_str::<LastUsedAgent>(&json).log_err())
+        .map(|entry| entry.agent)
+}
+
+async fn write_global_last_used_agent(kvp: KeyValueStore, agent: Agent) {
+    if let Some(json) = serde_json::to_string(&LastUsedAgent { agent }).log_err() {
+        kvp.write_kvp(LAST_USED_AGENT_KEY.to_string(), json)
+            .await
+            .log_err();
+    }
+}
 
 fn read_serialized_panel(
     workspace_id: workspace::WorkspaceId,
@@ -399,6 +426,48 @@ pub fn init(cx: &mut App) {
                 })
                 .register_action(
                     |workspace: &mut Workspace, _: &AddSelectionToThread, window, cx| {
+                        let active_editor = workspace
+                            .active_item(cx)
+                            .and_then(|item| item.act_as::<Editor>(cx));
+                        let has_editor_selection = active_editor.is_some_and(|editor| {
+                            editor.update(cx, |editor, cx| {
+                                editor.has_non_empty_selection(&editor.display_snapshot(cx))
+                            })
+                        });
+
+                        let has_terminal_selection = workspace
+                            .active_item(cx)
+                            .and_then(|item| item.act_as::<TerminalView>(cx))
+                            .is_some_and(|terminal_view| {
+                                terminal_view
+                                    .read(cx)
+                                    .terminal()
+                                    .read(cx)
+                                    .last_content
+                                    .selection_text
+                                    .as_ref()
+                                    .is_some_and(|text| !text.is_empty())
+                            });
+
+                        let has_terminal_panel_selection =
+                            workspace.panel::<TerminalPanel>(cx).is_some_and(|panel| {
+                                let position = match TerminalSettings::get_global(cx).dock {
+                                    TerminalDockPosition::Left => DockPosition::Left,
+                                    TerminalDockPosition::Bottom => DockPosition::Bottom,
+                                    TerminalDockPosition::Right => DockPosition::Right,
+                                };
+                                let dock_is_open =
+                                    workspace.dock_at_position(position).read(cx).is_open();
+                                dock_is_open && !panel.read(cx).terminal_selections(cx).is_empty()
+                            });
+
+                        if !has_editor_selection
+                            && !has_terminal_selection
+                            && !has_terminal_panel_selection
+                        {
+                            return;
+                        }
+
                         let Some(panel) = workspace.panel::<AgentPanel>(cx) else {
                             return;
                         };
@@ -665,13 +734,18 @@ impl AgentPanel {
                 .ok()
                 .flatten();
 
-            let serialized_panel = cx
+            let (serialized_panel, global_last_used_agent) = cx
                 .background_spawn(async move {
-                    kvp.and_then(|kvp| {
-                        workspace_id
-                            .and_then(|id| read_serialized_panel(id, &kvp))
-                            .or_else(|| read_legacy_serialized_panel(&kvp))
-                    })
+                    match kvp {
+                        Some(kvp) => {
+                            let panel = workspace_id
+                                .and_then(|id| read_serialized_panel(id, &kvp))
+                                .or_else(|| read_legacy_serialized_panel(&kvp));
+                            let global_agent = read_global_last_used_agent(&kvp);
+                            (panel, global_agent)
+                        }
+                        None => (None, None),
+                    }
                 })
                 .await;
 
@@ -710,10 +784,21 @@ impl AgentPanel {
                 let panel =
                     cx.new(|cx| Self::new(workspace, prompt_store, window, cx));
 
-                if let Some(serialized_panel) = &serialized_panel {
-                    panel.update(cx, |panel, cx| {
+                panel.update(cx, |panel, cx| {
+                    let is_via_collab = panel.project.read(cx).is_via_collab();
+
+                    // Only apply a non-native global fallback to local projects.
+                    // Collab workspaces only support NativeAgent, so inheriting a
+                    // custom agent would cause set_active β†’ new_agent_thread_inner
+                    // to bypass the collab guard in external_thread.
+                    let global_fallback = global_last_used_agent
+                        .filter(|agent| !is_via_collab || agent.is_native());
+
+                    if let Some(serialized_panel) = &serialized_panel {
                         if let Some(selected_agent) = serialized_panel.selected_agent.clone() {
                             panel.selected_agent = selected_agent;
+                        } else if let Some(agent) = global_fallback {
+                            panel.selected_agent = agent;
                         }
                         if let Some(start_thread_in) = serialized_panel.start_thread_in {
                             let is_worktree_flag_enabled =
@@ -734,9 +819,11 @@ impl AgentPanel {
                                 );
                             }
                         }
-                        cx.notify();
-                    });
-                }
+                    } else if let Some(agent) = global_fallback {
+                        panel.selected_agent = agent;
+                    }
+                    cx.notify();
+                });
 
                 if let Some(thread_info) = last_active_thread {
                     let agent = thread_info.agent_type.clone();
@@ -1069,85 +1156,30 @@ impl AgentPanel {
         let workspace = self.workspace.clone();
         let project = self.project.clone();
         let fs = self.fs.clone();
-        let is_via_collab = self.project.read(cx).is_via_collab();
-
-        const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent";
-
-        #[derive(Serialize, Deserialize)]
-        struct LastUsedExternalAgent {
-            agent: crate::Agent,
-        }
-
         let thread_store = self.thread_store.clone();
-        let kvp = KeyValueStore::global(cx);
-
-        if let Some(agent) = agent_choice {
-            cx.background_spawn({
-                let agent = agent.clone();
-                let kvp = kvp;
-                async move {
-                    if let Some(serialized) =
-                        serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
-                    {
-                        kvp.write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
-                            .await
-                            .log_err();
-                    }
-                }
-            })
-            .detach();
 
-            let server = agent.server(fs, thread_store);
-            self.create_agent_thread(
-                server,
-                resume_session_id,
-                work_dirs,
-                title,
-                initial_content,
-                workspace,
-                project,
-                agent,
-                focus,
-                window,
-                cx,
-            );
-        } else {
-            cx.spawn_in(window, async move |this, cx| {
-                let ext_agent = if is_via_collab {
-                    Agent::NativeAgent
-                } else {
-                    cx.background_spawn(async move { kvp.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) })
-                        .await
-                        .log_err()
-                        .flatten()
-                        .and_then(|value| {
-                            serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
-                        })
-                        .map(|agent| agent.agent)
-                        .unwrap_or(Agent::NativeAgent)
-                };
-
-                let server = ext_agent.server(fs, thread_store);
-                this.update_in(cx, |agent_panel, window, cx| {
-                    agent_panel.create_agent_thread(
-                        server,
-                        resume_session_id,
-                        work_dirs,
-                        title,
-                        initial_content,
-                        workspace,
-                        project,
-                        ext_agent,
-                        focus,
-                        window,
-                        cx,
-                    );
-                })?;
+        let agent = agent_choice.unwrap_or_else(|| {
+            if self.project.read(cx).is_via_collab() {
+                Agent::NativeAgent
+            } else {
+                self.selected_agent.clone()
+            }
+        });
 
-                anyhow::Ok(())
-            })
-            .detach_and_log_err(cx);
-        }
+        let server = agent.server(fs, thread_store);
+        self.create_agent_thread(
+            server,
+            resume_session_id,
+            work_dirs,
+            title,
+            initial_content,
+            workspace,
+            project,
+            agent,
+            focus,
+            window,
+            cx,
+        );
     }
 
     fn deploy_rules_library(
@@ -2102,15 +2134,25 @@ impl AgentPanel {
         initial_content: Option<AgentInitialContent>,
         workspace: WeakEntity<Workspace>,
         project: Entity<Project>,
-        ext_agent: Agent,
+        agent: Agent,
         focus: bool,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        if self.selected_agent != ext_agent {
-            self.selected_agent = ext_agent.clone();
+        if self.selected_agent != agent {
+            self.selected_agent = agent.clone();
             self.serialize(cx);
         }
+
+        cx.background_spawn({
+            let kvp = KeyValueStore::global(cx);
+            let agent = agent.clone();
+            async move {
+                write_global_last_used_agent(kvp, agent).await;
+            }
+        })
+        .detach();
+
         let thread_store = server
             .clone()
             .downcast::<agent::NativeAgentServer>()
@@ -2123,7 +2165,7 @@ impl AgentPanel {
             crate::ConversationView::new(
                 server,
                 connection_store,
-                ext_agent,
+                agent,
                 resume_session_id,
                 work_dirs,
                 title,
@@ -5611,4 +5653,211 @@ mod tests {
             "Thread A work_dirs should revert to only /project_a after removing /project_b"
         );
     }
+
+    #[gpui::test]
+    async fn test_new_workspace_inherits_global_last_used_agent(cx: &mut TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+            // Use an isolated DB so parallel tests can't overwrite our global key.
+            cx.set_global(db::AppDatabase::test_new());
+        });
+
+        let custom_agent = Agent::Custom {
+            id: "my-preferred-agent".into(),
+        };
+
+        // Write a known agent to the global KVP to simulate a user who has
+        // previously used this agent in another workspace.
+        let kvp = cx.update(|cx| KeyValueStore::global(cx));
+        write_global_last_used_agent(kvp, custom_agent.clone()).await;
+
+        let fs = FakeFs::new(cx.executor());
+        let project = Project::test(fs.clone(), [], cx).await;
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        let workspace = multi_workspace
+            .read_with(cx, |multi_workspace, _cx| {
+                multi_workspace.workspace().clone()
+            })
+            .unwrap();
+
+        workspace.update(cx, |workspace, _cx| {
+            workspace.set_random_database_id();
+        });
+
+        let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+        // Load the panel via `load()`, which reads the global fallback
+        // asynchronously when no per-workspace state exists.
+        let async_cx = cx.update(|window, cx| window.to_async(cx));
+        let panel = AgentPanel::load(workspace.downgrade(), async_cx)
+            .await
+            .expect("panel load should succeed");
+        cx.run_until_parked();
+
+        panel.read_with(cx, |panel, _cx| {
+            assert_eq!(
+                panel.selected_agent, custom_agent,
+                "new workspace should inherit the global last-used agent"
+            );
+        });
+    }
+
+    #[gpui::test]
+    async fn test_workspaces_maintain_independent_agent_selection(cx: &mut TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+        });
+
+        let fs = FakeFs::new(cx.executor());
+        let project_a = Project::test(fs.clone(), [], cx).await;
+        let project_b = Project::test(fs, [], cx).await;
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+
+        let workspace_a = multi_workspace
+            .read_with(cx, |multi_workspace, _cx| {
+                multi_workspace.workspace().clone()
+            })
+            .unwrap();
+
+        let workspace_b = multi_workspace
+            .update(cx, |multi_workspace, window, cx| {
+                multi_workspace.test_add_workspace(project_b.clone(), window, cx)
+            })
+            .unwrap();
+
+        workspace_a.update(cx, |workspace, _cx| {
+            workspace.set_random_database_id();
+        });
+        workspace_b.update(cx, |workspace, _cx| {
+            workspace.set_random_database_id();
+        });
+
+        let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+        let agent_a = Agent::Custom {
+            id: "agent-alpha".into(),
+        };
+        let agent_b = Agent::Custom {
+            id: "agent-beta".into(),
+        };
+
+        // Set up workspace A with agent_a
+        let panel_a = workspace_a.update_in(cx, |workspace, window, cx| {
+            cx.new(|cx| AgentPanel::new(workspace, None, window, cx))
+        });
+        panel_a.update(cx, |panel, _cx| {
+            panel.selected_agent = agent_a.clone();
+        });
+
+        // Set up workspace B with agent_b
+        let panel_b = workspace_b.update_in(cx, |workspace, window, cx| {
+            cx.new(|cx| AgentPanel::new(workspace, None, window, cx))
+        });
+        panel_b.update(cx, |panel, _cx| {
+            panel.selected_agent = agent_b.clone();
+        });
+
+        // Serialize both panels
+        panel_a.update(cx, |panel, cx| panel.serialize(cx));
+        panel_b.update(cx, |panel, cx| panel.serialize(cx));
+        cx.run_until_parked();
+
+        // Load fresh panels from serialized state and verify independence
+        let async_cx = cx.update(|window, cx| window.to_async(cx));
+        let loaded_a = AgentPanel::load(workspace_a.downgrade(), async_cx)
+            .await
+            .expect("panel A load should succeed");
+        cx.run_until_parked();
+
+        let async_cx = cx.update(|window, cx| window.to_async(cx));
+        let loaded_b = AgentPanel::load(workspace_b.downgrade(), async_cx)
+            .await
+            .expect("panel B load should succeed");
+        cx.run_until_parked();
+
+        loaded_a.read_with(cx, |panel, _cx| {
+            assert_eq!(
+                panel.selected_agent, agent_a,
+                "workspace A should restore agent-alpha, not agent-beta"
+            );
+        });
+
+        loaded_b.read_with(cx, |panel, _cx| {
+            assert_eq!(
+                panel.selected_agent, agent_b,
+                "workspace B should restore agent-beta, not agent-alpha"
+            );
+        });
+    }
+
+    #[gpui::test]
+    async fn test_new_thread_uses_workspace_selected_agent(cx: &mut TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_flags(true, vec!["agent-v2".to_string()]);
+            agent::ThreadStore::init_global(cx);
+            language_model::LanguageModelRegistry::test(cx);
+        });
+
+        let fs = FakeFs::new(cx.executor());
+        let project = Project::test(fs.clone(), [], cx).await;
+
+        let multi_workspace =
+            cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+        let workspace = multi_workspace
+            .read_with(cx, |multi_workspace, _cx| {
+                multi_workspace.workspace().clone()
+            })
+            .unwrap();
+
+        workspace.update(cx, |workspace, _cx| {
+            workspace.set_random_database_id();
+        });
+
+        let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+        let custom_agent = Agent::Custom {
+            id: "my-custom-agent".into(),
+        };
+
+        let panel = workspace.update_in(cx, |workspace, window, cx| {
+            let panel = cx.new(|cx| AgentPanel::new(workspace, None, window, cx));
+            workspace.add_panel(panel.clone(), window, cx);
+            panel
+        });
+
+        // Set selected_agent to a custom agent
+        panel.update(cx, |panel, _cx| {
+            panel.selected_agent = custom_agent.clone();
+        });
+
+        // Call new_thread, which internally calls external_thread(None, ...)
+        // This resolves the agent from self.selected_agent
+        panel.update_in(cx, |panel, window, cx| {
+            panel.new_thread(&NewThread, window, cx);
+        });
+
+        panel.read_with(cx, |panel, _cx| {
+            assert_eq!(
+                panel.selected_agent, custom_agent,
+                "selected_agent should remain the custom agent after new_thread"
+            );
+            assert!(
+                panel.active_conversation_view().is_some(),
+                "a thread should have been created"
+            );
+        });
+    }
 }

crates/agent_ui/src/agent_ui.rs πŸ”—

@@ -674,7 +674,9 @@ mod tests {
     use feature_flags::FeatureFlagAppExt;
     use gpui::{BorrowAppContext, TestAppContext, px};
     use project::DisableAiSettings;
-    use settings::{DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore};
+    use settings::{
+        DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings, SettingsStore,
+    };
 
     #[gpui::test]
     fn test_agent_command_palette_visibility(cx: &mut TestAppContext) {
@@ -705,7 +707,7 @@ mod tests {
             default_profile: AgentProfileId::default(),
             profiles: Default::default(),
             notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
-            play_sound_when_agent_done: false,
+            play_sound_when_agent_done: PlaySoundWhenAgentDone::Never,
             single_file_review: false,
             model_parameters: vec![],
             enable_feedback: false,

crates/agent_ui/src/buffer_codegen.rs πŸ”—

@@ -18,6 +18,9 @@ use language_model::{
     LanguageModelRequestTool, LanguageModelTextStream, LanguageModelToolChoice,
     LanguageModelToolUse, Role, TokenUsage,
 };
+use language_models::provider::anthropic::telemetry::{
+    AnthropicCompletionType, AnthropicEventData, AnthropicEventReporter, AnthropicEventType,
+};
 use multi_buffer::MultiBufferRow;
 use parking_lot::Mutex;
 use prompt_store::PromptBuilder;
@@ -300,7 +303,7 @@ impl CodegenAlternative {
         let snapshot = buffer.read(cx).snapshot(cx);
 
         let (old_buffer, _, _) = snapshot
-            .range_to_buffer_ranges(range.start..=range.end)
+            .range_to_buffer_ranges(range.start..range.end)
             .pop()
             .unwrap();
         let old_buffer = cx.new(|cx| {
@@ -637,7 +640,7 @@ impl CodegenAlternative {
         stream: impl 'static + Future<Output = Result<LanguageModelTextStream>>,
         cx: &mut Context<Self>,
     ) -> Task<()> {
-        let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx);
+        let anthropic_reporter = AnthropicEventReporter::new(&model, cx);
         let session_id = self.session_id;
         let model_telemetry_id = model.telemetry_id();
         let model_provider_id = model.provider_id().to_string();
@@ -681,7 +684,7 @@ impl CodegenAlternative {
         let language_name = {
             let multibuffer = self.buffer.read(cx);
             let snapshot = multibuffer.snapshot(cx);
-            let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end);
+            let ranges = snapshot.range_to_buffer_ranges(self.range.start..self.range.end);
             ranges
                 .first()
                 .and_then(|(buffer, _, _)| buffer.language())
@@ -830,9 +833,9 @@ impl CodegenAlternative {
                             error_message = error_message.as_deref(),
                         );
 
-                        anthropic_reporter.report(language_model::AnthropicEventData {
-                            completion_type: language_model::AnthropicCompletionType::Editor,
-                            event: language_model::AnthropicEventType::Response,
+                        anthropic_reporter.report(AnthropicEventData {
+                            completion_type: AnthropicCompletionType::Editor,
+                            event: AnthropicEventType::Response,
                             language_name: language_name.map(|n| n.to_string()),
                             message_id,
                         });

crates/agent_ui/src/completion_provider.rs πŸ”—

@@ -9,9 +9,7 @@ use crate::ThreadHistory;
 use acp_thread::MentionUri;
 use agent_client_protocol as acp;
 use anyhow::Result;
-use editor::{
-    CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH,
-};
+use editor::{CompletionProvider, Editor, code_context_menus::COMPLETION_MENU_MAX_WIDTH};
 use futures::FutureExt as _;
 use fuzzy::{PathMatch, StringMatch, StringMatchCandidate};
 use gpui::{App, BackgroundExecutor, Entity, SharedString, Task, WeakEntity};
@@ -28,7 +26,7 @@ use prompt_store::{PromptStore, UserPromptId};
 use rope::Point;
 use settings::{Settings, TerminalDockPosition};
 use terminal::terminal_settings::TerminalSettings;
-use terminal_view::terminal_panel::TerminalPanel;
+use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
 use text::{Anchor, ToOffset as _, ToPoint as _};
 use ui::IconName;
 use ui::prelude::*;
@@ -562,8 +560,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
                     .collect();
 
                 // Collect terminal selections from all terminal views if the terminal panel is visible
-                let terminal_selections: Vec<String> =
-                    terminal_selections_if_panel_open(workspace, cx);
+                let terminal_selections: Vec<String> = terminal_selections(workspace, cx);
 
                 const EDITOR_PLACEHOLDER: &str = "selection ";
                 const TERMINAL_PLACEHOLDER: &str = "terminal ";
@@ -622,7 +619,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
                                 for (terminal_text, terminal_range) in terminal_ranges {
                                     let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
                                     let Some(start) =
-                                        snapshot.as_singleton_anchor(source_range.start)
+                                        snapshot.anchor_in_excerpt(source_range.start)
                                     else {
                                         return;
                                     };
@@ -1198,7 +1195,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
                 })
             });
 
-        let has_terminal_selection = !terminal_selections_if_panel_open(workspace, cx).is_empty();
+        let has_terminal_selection = !terminal_selections(workspace, cx).is_empty();
 
         if has_editor_selection || has_terminal_selection {
             entries.push(PromptContextEntry::Action(
@@ -1236,7 +1233,6 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
 impl<T: PromptCompletionProviderDelegate> CompletionProvider for PromptCompletionProvider<T> {
     fn completions(
         &self,
-        _excerpt_id: ExcerptId,
         buffer: &Entity<Buffer>,
         buffer_position: Anchor,
         _trigger: CompletionContext,
@@ -2169,28 +2165,45 @@ fn build_code_label_for_path(
     label.build()
 }
 
-/// Returns terminal selections from all terminal views if the terminal panel is open.
-fn terminal_selections_if_panel_open(workspace: &Entity<Workspace>, cx: &App) -> Vec<String> {
-    let Some(panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
-        return Vec::new();
-    };
+fn terminal_selections(workspace: &Entity<Workspace>, cx: &App) -> Vec<String> {
+    let mut selections = Vec::new();
 
-    // Check if the dock containing this panel is open
-    let position = match TerminalSettings::get_global(cx).dock {
-        TerminalDockPosition::Left => DockPosition::Left,
-        TerminalDockPosition::Bottom => DockPosition::Bottom,
-        TerminalDockPosition::Right => DockPosition::Right,
-    };
-    let dock_is_open = workspace
-        .read(cx)
-        .dock_at_position(position)
+    // Check if the active item is a terminal (in a panel or not)
+    if let Some(terminal_view) = workspace
         .read(cx)
-        .is_open();
-    if !dock_is_open {
-        return Vec::new();
+        .active_item(cx)
+        .and_then(|item| item.act_as::<TerminalView>(cx))
+    {
+        if let Some(text) = terminal_view
+            .read(cx)
+            .terminal()
+            .read(cx)
+            .last_content
+            .selection_text
+            .clone()
+            .filter(|text| !text.is_empty())
+        {
+            selections.push(text);
+        }
+    }
+
+    if let Some(panel) = workspace.read(cx).panel::<TerminalPanel>(cx) {
+        let position = match TerminalSettings::get_global(cx).dock {
+            TerminalDockPosition::Left => DockPosition::Left,
+            TerminalDockPosition::Bottom => DockPosition::Bottom,
+            TerminalDockPosition::Right => DockPosition::Right,
+        };
+        let dock_is_open = workspace
+            .read(cx)
+            .dock_at_position(position)
+            .read(cx)
+            .is_open();
+        if dock_is_open {
+            selections.extend(panel.read(cx).terminal_selections(cx));
+        }
     }
 
-    panel.read(cx).terminal_selections(cx)
+    selections
 }
 
 fn selection_ranges(
@@ -2213,17 +2226,8 @@ fn selection_ranges(
 
         selections
             .into_iter()
-            .map(|s| {
-                let (start, end) = if s.is_empty() {
-                    let row = multi_buffer::MultiBufferRow(s.start.row);
-                    let line_start = text::Point::new(s.start.row, 0);
-                    let line_end = text::Point::new(s.start.row, snapshot.line_len(row));
-                    (line_start, line_end)
-                } else {
-                    (s.start, s.end)
-                };
-                snapshot.anchor_after(start)..snapshot.anchor_before(end)
-            })
+            .filter(|s| !s.is_empty())
+            .map(|s| snapshot.anchor_after(s.start)..snapshot.anchor_before(s.end))
             .flat_map(|range| {
                 let (start_buffer, start) = buffer.text_anchor_for_position(range.start, cx)?;
                 let (end_buffer, end) = buffer.text_anchor_for_position(range.end, cx)?;

crates/agent_ui/src/conversation_view.rs πŸ”—

@@ -2340,7 +2340,7 @@ impl ConversationView {
                     .is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx))
             };
         #[cfg(feature = "audio")]
-        if settings.play_sound_when_agent_done && !_visible {
+        if settings.play_sound_when_agent_done.should_play(_visible) {
             Audio::play_sound(Sound::AgentDone, cx);
         }
     }

crates/agent_ui/src/conversation_view/thread_view.rs πŸ”—

@@ -5152,9 +5152,12 @@ impl ThreadView {
     }
 
     pub(crate) fn auto_expand_streaming_thought(&mut self, cx: &mut Context<Self>) {
-        // Only auto-expand thinking blocks in Automatic mode.
-        // AlwaysExpanded shows them open by default; AlwaysCollapsed keeps them closed.
-        if AgentSettings::get_global(cx).thinking_display != ThinkingBlockDisplay::Automatic {
+        let thinking_display = AgentSettings::get_global(cx).thinking_display;
+
+        if !matches!(
+            thinking_display,
+            ThinkingBlockDisplay::Auto | ThinkingBlockDisplay::Preview
+        ) {
             return;
         }
 
@@ -5183,6 +5186,13 @@ impl ThreadView {
                 cx.notify();
             }
         } else if self.auto_expanded_thinking_block.is_some() {
+            if thinking_display == ThinkingBlockDisplay::Auto {
+                if let Some(key) = self.auto_expanded_thinking_block {
+                    if !self.user_toggled_thinking_blocks.contains(&key) {
+                        self.expanded_thinking_blocks.remove(&key);
+                    }
+                }
+            }
             self.auto_expanded_thinking_block = None;
             cx.notify();
         }
@@ -5196,7 +5206,19 @@ impl ThreadView {
         let thinking_display = AgentSettings::get_global(cx).thinking_display;
 
         match thinking_display {
-            ThinkingBlockDisplay::Automatic => {
+            ThinkingBlockDisplay::Auto => {
+                let is_open = self.expanded_thinking_blocks.contains(&key)
+                    || self.user_toggled_thinking_blocks.contains(&key);
+
+                if is_open {
+                    self.expanded_thinking_blocks.remove(&key);
+                    self.user_toggled_thinking_blocks.remove(&key);
+                } else {
+                    self.expanded_thinking_blocks.insert(key);
+                    self.user_toggled_thinking_blocks.insert(key);
+                }
+            }
+            ThinkingBlockDisplay::Preview => {
                 let is_user_expanded = self.user_toggled_thinking_blocks.contains(&key);
                 let is_in_expanded_set = self.expanded_thinking_blocks.contains(&key);
 
@@ -5249,7 +5271,11 @@ impl ThreadView {
         let is_in_expanded_set = self.expanded_thinking_blocks.contains(&key);
 
         let (is_open, is_constrained) = match thinking_display {
-            ThinkingBlockDisplay::Automatic => {
+            ThinkingBlockDisplay::Auto => {
+                let is_open = is_user_toggled || is_in_expanded_set;
+                (is_open, false)
+            }
+            ThinkingBlockDisplay::Preview => {
                 let is_open = is_user_toggled || is_in_expanded_set;
                 let is_constrained = is_in_expanded_set && !is_user_toggled;
                 (is_open, is_constrained)
@@ -7103,17 +7129,10 @@ impl ThreadView {
                 };
 
                 active_editor.update_in(cx, |editor, window, cx| {
-                    let singleton = editor
-                        .buffer()
-                        .read(cx)
-                        .read(cx)
-                        .as_singleton()
-                        .map(|(a, b, _)| (a, b));
-                    if let Some((excerpt_id, buffer_id)) = singleton
-                        && let Some(agent_buffer) = agent_location.buffer.upgrade()
-                        && agent_buffer.read(cx).remote_id() == buffer_id
+                    let snapshot = editor.buffer().read(cx).snapshot(cx);
+                    if snapshot.as_singleton().is_some()
+                        && let Some(anchor) = snapshot.anchor_in_excerpt(agent_location.position)
                     {
-                        let anchor = editor::Anchor::in_buffer(excerpt_id, agent_location.position);
                         editor.change_selections(Default::default(), window, cx, |selections| {
                             selections.select_anchor_ranges([anchor..anchor]);
                         })

crates/agent_ui/src/inline_assistant.rs πŸ”—

@@ -1,5 +1,6 @@
-use language_model::AnthropicEventData;
-use language_model::report_anthropic_event;
+use language_models::provider::anthropic::telemetry::{
+    AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event,
+};
 use std::cmp;
 use std::mem;
 use std::ops::Range;
@@ -26,8 +27,8 @@ use editor::RowExt;
 use editor::SelectionEffects;
 use editor::scroll::ScrollOffset;
 use editor::{
-    Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey,
-    MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
+    Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer,
+    MultiBufferSnapshot, ToOffset as _, ToPoint,
     actions::SelectAll,
     display_map::{
         BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -442,15 +443,17 @@ impl InlineAssistant {
         let newest_selection = newest_selection.unwrap();
 
         let mut codegen_ranges = Vec::new();
-        for (buffer, buffer_range, excerpt_id) in
-            snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| {
-                snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end)
-            }))
+        for (buffer, buffer_range, _) in selections
+            .iter()
+            .flat_map(|selection| snapshot.range_to_buffer_ranges(selection.start..selection.end))
         {
-            let anchor_range = Anchor::range_in_buffer(
-                excerpt_id,
-                buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end),
-            );
+            let (Some(start), Some(end)) = (
+                snapshot.anchor_in_buffer(buffer.anchor_before(buffer_range.start)),
+                snapshot.anchor_in_buffer(buffer.anchor_after(buffer_range.end)),
+            ) else {
+                continue;
+            };
+            let anchor_range = start..end;
 
             codegen_ranges.push(anchor_range);
 
@@ -467,8 +470,8 @@ impl InlineAssistant {
                 report_anthropic_event(
                     &model.model,
                     AnthropicEventData {
-                        completion_type: language_model::AnthropicCompletionType::Editor,
-                        event: language_model::AnthropicEventType::Invoked,
+                        completion_type: AnthropicCompletionType::Editor,
+                        event: AnthropicEventType::Invoked,
                         language_name: buffer.language().map(|language| language.name().to_proto()),
                         message_id: None,
                     },
@@ -981,8 +984,7 @@ impl InlineAssistant {
         match event {
             EditorEvent::Edited { transaction_id } => {
                 let buffer = editor.read(cx).buffer().read(cx);
-                let edited_ranges =
-                    buffer.edited_ranges_for_transaction::<MultiBufferOffset>(*transaction_id, cx);
+                let edited_ranges = buffer.edited_ranges_for_transaction(*transaction_id, cx);
                 let snapshot = buffer.snapshot(cx);
 
                 for assist_id in editor_assists.assist_ids.clone() {
@@ -1088,7 +1090,7 @@ impl InlineAssistant {
                     let multibuffer = editor.read(cx).buffer().read(cx);
                     let snapshot = multibuffer.snapshot(cx);
                     let ranges =
-                        snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end);
+                        snapshot.range_to_buffer_ranges(assist.range.start..assist.range.end);
                     ranges
                         .first()
                         .and_then(|(buffer, _, _)| buffer.language())
@@ -1105,13 +1107,13 @@ impl InlineAssistant {
                     (
                         "rejected",
                         "Assistant Response Rejected",
-                        language_model::AnthropicEventType::Reject,
+                        AnthropicEventType::Reject,
                     )
                 } else {
                     (
                         "accepted",
                         "Assistant Response Accepted",
-                        language_model::AnthropicEventType::Accept,
+                        AnthropicEventType::Accept,
                     )
                 };
 
@@ -1128,8 +1130,8 @@ impl InlineAssistant {
 
                 report_anthropic_event(
                     &model.model,
-                    language_model::AnthropicEventData {
-                        completion_type: language_model::AnthropicCompletionType::Editor,
+                    AnthropicEventData {
+                        completion_type: AnthropicCompletionType::Editor,
                         event: anthropic_event_type,
                         language_name,
                         message_id,
@@ -1495,10 +1497,10 @@ impl InlineAssistant {
 
             let mut new_blocks = Vec::new();
             for (new_row, old_row_range) in deleted_row_ranges {
-                let (_, start, _) = old_snapshot
+                let (_, start) = old_snapshot
                     .point_to_buffer_point(Point::new(*old_row_range.start(), 0))
                     .unwrap();
-                let (_, end, _) = old_snapshot
+                let (_, end) = old_snapshot
                     .point_to_buffer_point(Point::new(
                         *old_row_range.end(),
                         old_snapshot.line_len(MultiBufferRow(*old_row_range.end())),
@@ -1529,7 +1531,7 @@ impl InlineAssistant {
                     editor.set_read_only(true);
                     editor.set_show_edit_predictions(Some(false), window, cx);
                     editor.highlight_rows::<DeletedLines>(
-                        Anchor::min()..Anchor::max(),
+                        Anchor::Min..Anchor::Max,
                         cx.theme().status().deleted_background,
                         Default::default(),
                         cx,
@@ -1937,9 +1939,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
 
     fn apply_code_action(
         &self,
-        buffer: Entity<Buffer>,
+        _buffer: Entity<Buffer>,
         action: CodeAction,
-        excerpt_id: ExcerptId,
         _push_to_history: bool,
         window: &mut Window,
         cx: &mut App,
@@ -1969,31 +1970,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
             let range = editor
                 .update(cx, |editor, cx| {
                     editor.buffer().update(cx, |multibuffer, cx| {
-                        let buffer = buffer.read(cx);
-                        let multibuffer_snapshot = multibuffer.read(cx);
-
-                        let old_context_range =
-                            multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?;
-                        let mut new_context_range = old_context_range.clone();
-                        if action
-                            .range
-                            .start
-                            .cmp(&old_context_range.start, buffer)
-                            .is_lt()
-                        {
-                            new_context_range.start = action.range.start;
-                        }
-                        if action.range.end.cmp(&old_context_range.end, buffer).is_gt() {
-                            new_context_range.end = action.range.end;
-                        }
-                        drop(multibuffer_snapshot);
-
-                        if new_context_range != old_context_range {
-                            multibuffer.resize_excerpt(excerpt_id, new_context_range, cx);
-                        }
-
                         let multibuffer_snapshot = multibuffer.read(cx);
-                        multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range)
+                        multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range)
                     })
                 })
                 .context("invalid range")?;

crates/agent_ui/src/mention_set.rs πŸ”—

@@ -6,7 +6,7 @@ use agent_servers::{AgentServer, AgentServerDelegate};
 use anyhow::{Context as _, Result, anyhow};
 use collections::{HashMap, HashSet};
 use editor::{
-    Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset,
+    Anchor, Editor, EditorSnapshot, FoldPlaceholder, ToOffset,
     display_map::{Crease, CreaseId, CreaseMetadata, FoldId},
     scroll::Autoscroll,
 };
@@ -204,10 +204,9 @@ impl MentionSet {
         };
 
         let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
-        let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else {
+        let Some(start_anchor) = snapshot.buffer_snapshot().anchor_in_excerpt(start) else {
             return Task::ready(());
         };
-        let excerpt_id = start_anchor.excerpt_id;
         let end_anchor = snapshot.buffer_snapshot().anchor_before(
             start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize,
         );
@@ -234,7 +233,6 @@ impl MentionSet {
                 })
                 .shared();
             insert_crease_for_mention(
-                excerpt_id,
                 start,
                 content_len,
                 mention_uri.name().into(),
@@ -249,7 +247,6 @@ impl MentionSet {
             )
         } else {
             insert_crease_for_mention(
-                excerpt_id,
                 start,
                 content_len,
                 crease_text,
@@ -468,7 +465,7 @@ impl MentionSet {
         };
 
         let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
-        let Some(start) = snapshot.as_singleton_anchor(source_range.start) else {
+        let Some(start) = snapshot.anchor_in_excerpt(source_range.start) else {
             return;
         };
 
@@ -745,19 +742,17 @@ pub(crate) async fn insert_images_as_context(
     let replacement_text = MentionUri::PastedImage.as_link().to_string();
 
     for (image, name) in images {
-        let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor
+        let Some((text_anchor, multibuffer_anchor)) = editor
             .update_in(cx, |editor, window, cx| {
                 let snapshot = editor.snapshot(window, cx);
-                let (excerpt_id, _, buffer_snapshot) =
-                    snapshot.buffer_snapshot().as_singleton().unwrap();
-
-                let cursor_anchor = editor.selections.newest_anchor().start.text_anchor;
-                let text_anchor = cursor_anchor.bias_left(&buffer_snapshot);
-                let multibuffer_anchor = snapshot
+                let (cursor_anchor, buffer_snapshot) = snapshot
                     .buffer_snapshot()
-                    .anchor_in_excerpt(excerpt_id, text_anchor);
+                    .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+                    .unwrap();
+                let text_anchor = cursor_anchor.bias_left(buffer_snapshot);
+                let multibuffer_anchor = snapshot.buffer_snapshot().anchor_in_excerpt(text_anchor);
                 editor.insert(&format!("{replacement_text} "), window, cx);
-                (excerpt_id, text_anchor, multibuffer_anchor)
+                (text_anchor, multibuffer_anchor)
             })
             .ok()
         else {
@@ -775,7 +770,6 @@ pub(crate) async fn insert_images_as_context(
         let image = Arc::new(image);
         let Ok(Some((crease_id, tx))) = cx.update(|window, cx| {
             insert_crease_for_mention(
-                excerpt_id,
                 text_anchor,
                 content_len,
                 name.clone(),
@@ -909,7 +903,6 @@ pub(crate) fn paste_images_as_context(
 }
 
 pub(crate) fn insert_crease_for_mention(
-    excerpt_id: ExcerptId,
     anchor: text::Anchor,
     content_len: usize,
     crease_label: SharedString,
@@ -927,7 +920,7 @@ pub(crate) fn insert_crease_for_mention(
     let crease_id = editor.update(cx, |editor, cx| {
         let snapshot = editor.buffer().read(cx).snapshot(cx);
 
-        let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?;
+        let start = snapshot.anchor_in_excerpt(anchor)?;
 
         let start = start.bias_right(&snapshot);
         let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len);

crates/agent_ui/src/message_editor.rs πŸ”—

@@ -203,12 +203,10 @@ fn insert_mention_for_project_path(
         MentionInsertPosition::AtCursor => editor.update(cx, |editor, cx| {
             let buffer = editor.buffer().read(cx);
             let snapshot = buffer.snapshot(cx);
-            let (_, _, buffer_snapshot) = snapshot.as_singleton()?;
-            let text_anchor = editor
-                .selections
-                .newest_anchor()
-                .start
-                .text_anchor
+            let buffer_snapshot = snapshot.as_singleton()?;
+            let text_anchor = snapshot
+                .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)?
+                .0
                 .bias_left(&buffer_snapshot);
 
             editor.insert(&mention_text, window, cx);
@@ -224,7 +222,7 @@ fn insert_mention_for_project_path(
             editor.update(cx, |editor, cx| {
                 editor.edit(
                     [(
-                        multi_buffer::Anchor::max()..multi_buffer::Anchor::max(),
+                        multi_buffer::Anchor::Max..multi_buffer::Anchor::Max,
                         new_text,
                     )],
                     cx,
@@ -603,7 +601,7 @@ impl MessageEditor {
             COMMAND_HINT_INLAY_ID,
             hint_pos,
             &InlayHint {
-                position: hint_pos.text_anchor,
+                position: snapshot.anchor_to_buffer_anchor(hint_pos)?.0,
                 label: InlayHintLabel::String(hint),
                 kind: Some(InlayHintKind::Parameter),
                 padding_left: false,
@@ -640,12 +638,11 @@ impl MessageEditor {
 
         let start = self.editor.update(cx, |editor, cx| {
             editor.set_text(content, window, cx);
-            editor
-                .buffer()
-                .read(cx)
-                .snapshot(cx)
-                .anchor_before(Point::zero())
-                .text_anchor
+            let snapshot = editor.buffer().read(cx).snapshot(cx);
+            snapshot
+                .anchor_to_buffer_anchor(snapshot.anchor_before(Point::zero()))
+                .unwrap()
+                .0
         });
 
         let supports_images = self.session_capabilities.read().supports_images();
@@ -999,13 +996,10 @@ impl MessageEditor {
 
         if should_insert_creases && let Some(selections) = editor_clipboard_selections {
             cx.stop_propagation();
-            let insertion_target = self
-                .editor
-                .read(cx)
-                .selections
-                .newest_anchor()
-                .start
-                .text_anchor;
+            let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx);
+            let (insertion_target, _) = snapshot
+                .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start)
+                .unwrap();
 
             let project = workspace.read(cx).project().clone();
             for selection in selections {
@@ -1021,21 +1015,19 @@ impl MessageEditor {
                     };
 
                     let mention_text = mention_uri.as_link().to_string();
-                    let (excerpt_id, text_anchor, content_len) =
-                        self.editor.update(cx, |editor, cx| {
-                            let buffer = editor.buffer().read(cx);
-                            let snapshot = buffer.snapshot(cx);
-                            let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
-                            let text_anchor = insertion_target.bias_left(&buffer_snapshot);
+                    let (text_anchor, content_len) = self.editor.update(cx, |editor, cx| {
+                        let buffer = editor.buffer().read(cx);
+                        let snapshot = buffer.snapshot(cx);
+                        let buffer_snapshot = snapshot.as_singleton().unwrap();
+                        let text_anchor = insertion_target.bias_left(&buffer_snapshot);
 
-                            editor.insert(&mention_text, window, cx);
-                            editor.insert(" ", window, cx);
+                        editor.insert(&mention_text, window, cx);
+                        editor.insert(" ", window, cx);
 
-                            (excerpt_id, text_anchor, mention_text.len())
-                        });
+                        (text_anchor, mention_text.len())
+                    });
 
                     let Some((crease_id, tx)) = insert_crease_for_mention(
-                        excerpt_id,
                         text_anchor,
                         content_len,
                         crease_text.into(),
@@ -1145,8 +1137,7 @@ impl MessageEditor {
 
                     for (anchor, content_len, mention_uri) in all_mentions {
                         let Some((crease_id, tx)) = insert_crease_for_mention(
-                            anchor.excerpt_id,
-                            anchor.text_anchor,
+                            snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
                             content_len,
                             mention_uri.name().into(),
                             mention_uri.icon_path(cx),
@@ -1339,25 +1330,23 @@ impl MessageEditor {
                     };
                     let mention_text = mention_uri.as_link().to_string();
 
-                    let (excerpt_id, text_anchor, content_len) = editor.update(cx, |editor, cx| {
+                    let (text_anchor, content_len) = editor.update(cx, |editor, cx| {
                         let buffer = editor.buffer().read(cx);
                         let snapshot = buffer.snapshot(cx);
-                        let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
-                        let text_anchor = editor
-                            .selections
-                            .newest_anchor()
-                            .start
-                            .text_anchor
+                        let buffer_snapshot = snapshot.as_singleton().unwrap();
+                        let text_anchor = snapshot
+                            .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+                            .unwrap()
+                            .0
                             .bias_left(&buffer_snapshot);
 
                         editor.insert(&mention_text, window, cx);
                         editor.insert(" ", window, cx);
 
-                        (excerpt_id, text_anchor, mention_text.len())
+                        (text_anchor, mention_text.len())
                     });
 
                     let Some((crease_id, tx)) = insert_crease_for_mention(
-                        excerpt_id,
                         text_anchor,
                         content_len,
                         mention_uri.name().into(),
@@ -1700,8 +1689,7 @@ impl MessageEditor {
             let adjusted_start = insertion_start + range.start;
             let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start));
             let Some((crease_id, tx)) = insert_crease_for_mention(
-                anchor.excerpt_id,
-                anchor.text_anchor,
+                snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
                 range.end - range.start,
                 mention_uri.name().into(),
                 mention_uri.icon_path(cx),
@@ -2077,23 +2065,13 @@ mod tests {
 
         cx.run_until_parked();
 
-        let excerpt_id = editor.update(cx, |editor, cx| {
-            editor
-                .buffer()
-                .read(cx)
-                .excerpt_ids()
-                .into_iter()
-                .next()
-                .unwrap()
-        });
         let completions = editor.update_in(cx, |editor, window, cx| {
             editor.set_text("Hello @file ", window, cx);
             let buffer = editor.buffer().read(cx).as_singleton().unwrap();
             let completion_provider = editor.completion_provider().unwrap();
             completion_provider.completions(
-                excerpt_id,
                 &buffer,
-                text::Anchor::MAX,
+                text::Anchor::max_for_buffer(buffer.read(cx).remote_id()),
                 CompletionContext {
                     trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
                     trigger_character: Some("@".into()),
@@ -2114,7 +2092,7 @@ mod tests {
         editor.update_in(cx, |editor, window, cx| {
             let snapshot = editor.buffer().read(cx).snapshot(cx);
             let range = snapshot
-                .anchor_range_in_excerpt(excerpt_id, completion.replace_range)
+                .buffer_anchor_range_to_anchor_range(completion.replace_range)
                 .unwrap();
             editor.edit([(range, completion.new_text)], cx);
             (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx);

crates/agent_ui/src/terminal_codegen.rs πŸ”—

@@ -2,6 +2,9 @@ use crate::inline_prompt_editor::CodegenStatus;
 use futures::{SinkExt, StreamExt, channel::mpsc};
 use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task};
 use language_model::{ConfiguredModel, LanguageModelRegistry, LanguageModelRequest};
+use language_models::provider::anthropic::telemetry::{
+    AnthropicCompletionType, AnthropicEventData, AnthropicEventReporter, AnthropicEventType,
+};
 use std::time::Instant;
 use terminal::Terminal;
 use uuid::Uuid;
@@ -40,7 +43,7 @@ impl TerminalCodegen {
             return;
         };
 
-        let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx);
+        let anthropic_reporter = AnthropicEventReporter::new(&model, cx);
         let session_id = self.session_id;
         let model_telemetry_id = model.telemetry_id();
         let model_provider_id = model.provider_id().to_string();
@@ -94,9 +97,9 @@ impl TerminalCodegen {
                             error_message = error_message,
                         );
 
-                        anthropic_reporter.report(language_model::AnthropicEventData {
-                            completion_type: language_model::AnthropicCompletionType::Terminal,
-                            event: language_model::AnthropicEventType::Response,
+                        anthropic_reporter.report(AnthropicEventData {
+                            completion_type: AnthropicCompletionType::Terminal,
+                            event: AnthropicEventType::Response,
                             language_name: None,
                             message_id,
                         });

crates/agent_ui/src/terminal_inline_assistant.rs πŸ”—

@@ -17,7 +17,10 @@ use gpui::{App, Entity, Focusable, Global, Subscription, Task, UpdateGlobal, Wea
 use language::Buffer;
 use language_model::{
     CompletionIntent, ConfiguredModel, LanguageModelRegistry, LanguageModelRequest,
-    LanguageModelRequestMessage, Role, report_anthropic_event,
+    LanguageModelRequestMessage, Role,
+};
+use language_models::provider::anthropic::telemetry::{
+    AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event,
 };
 use project::Project;
 use prompt_store::{PromptBuilder, PromptStore};
@@ -312,13 +315,13 @@ impl TerminalInlineAssistant {
                     (
                         "rejected",
                         "Assistant Response Rejected",
-                        language_model::AnthropicEventType::Reject,
+                        AnthropicEventType::Reject,
                     )
                 } else {
                     (
                         "accepted",
                         "Assistant Response Accepted",
-                        language_model::AnthropicEventType::Accept,
+                        AnthropicEventType::Accept,
                     )
                 };
 
@@ -335,8 +338,8 @@ impl TerminalInlineAssistant {
 
                 report_anthropic_event(
                     &model,
-                    language_model::AnthropicEventData {
-                        completion_type: language_model::AnthropicCompletionType::Terminal,
+                    AnthropicEventData {
+                        completion_type: AnthropicCompletionType::Terminal,
                         event: anthropic_event_type,
                         language_name: None,
                         message_id,

crates/agent_ui/src/thread_metadata_store.rs πŸ”—

@@ -58,7 +58,7 @@ fn migrate_thread_metadata(cx: &mut App) {
                 .read(cx)
                 .entries()
                 .filter_map(|entry| {
-                    if existing_entries.contains(&entry.id.0) || entry.folder_paths.is_empty() {
+                    if existing_entries.contains(&entry.id.0) {
                         return None;
                     }
 
@@ -84,6 +84,9 @@ fn migrate_thread_metadata(cx: &mut App) {
         if is_first_migration {
             let mut per_project: HashMap<PathList, Vec<&mut ThreadMetadata>> = HashMap::default();
             for entry in &mut to_migrate {
+                if entry.folder_paths.is_empty() {
+                    continue;
+                }
                 per_project
                     .entry(entry.folder_paths.clone())
                     .or_default()
@@ -330,6 +333,25 @@ impl ThreadMetadataStore {
             .log_err();
     }
 
+    pub fn update_working_directories(
+        &mut self,
+        session_id: &acp::SessionId,
+        work_dirs: PathList,
+        cx: &mut Context<Self>,
+    ) {
+        if !cx.has_flag::<AgentV2FeatureFlag>() {
+            return;
+        }
+
+        if let Some(thread) = self.threads.get(session_id) {
+            self.save_internal(ThreadMetadata {
+                folder_paths: work_dirs,
+                ..thread.clone()
+            });
+            cx.notify();
+        }
+    }
+
     pub fn archive(&mut self, session_id: &acp::SessionId, cx: &mut Context<Self>) {
         self.update_archived(session_id, true, cx);
     }
@@ -561,7 +583,13 @@ impl ThreadMetadataStore {
                     PathList::new(&paths)
                 };
 
-                let archived = existing_thread.map(|t| t.archived).unwrap_or(false);
+                // Threads without a folder path (e.g. started in an empty
+                // window) are archived by default so they don't get lost,
+                // because they won't show up in the sidebar. Users can reload
+                // them from the archive.
+                let archived = existing_thread
+                    .map(|t| t.archived)
+                    .unwrap_or(folder_paths.is_empty());
 
                 let metadata = ThreadMetadata {
                     session_id,
@@ -1173,7 +1201,7 @@ mod tests {
             store.read(cx).entries().cloned().collect::<Vec<_>>()
         });
 
-        assert_eq!(list.len(), 3);
+        assert_eq!(list.len(), 4);
         assert!(
             list.iter()
                 .all(|metadata| metadata.agent_id.as_ref() == agent::ZED_AGENT_ID.as_ref())
@@ -1192,17 +1220,12 @@ mod tests {
             .collect::<Vec<_>>();
         assert!(migrated_session_ids.contains(&"a-session-1"));
         assert!(migrated_session_ids.contains(&"b-session-0"));
-        assert!(!migrated_session_ids.contains(&"projectless"));
+        assert!(migrated_session_ids.contains(&"projectless"));
 
         let migrated_entries = list
             .iter()
             .filter(|metadata| metadata.session_id.0.as_ref() != "a-session-0")
             .collect::<Vec<_>>();
-        assert!(
-            migrated_entries
-                .iter()
-                .all(|metadata| !metadata.folder_paths.is_empty())
-        );
         assert!(migrated_entries.iter().all(|metadata| metadata.archived));
     }
 
@@ -1448,6 +1471,84 @@ mod tests {
         assert_eq!(metadata_ids, vec![session_id]);
     }
 
+    #[gpui::test]
+    async fn test_threads_without_project_association_are_archived_by_default(
+        cx: &mut TestAppContext,
+    ) {
+        init_test(cx);
+
+        let fs = FakeFs::new(cx.executor());
+        let project_without_worktree = Project::test(fs.clone(), None::<&Path>, cx).await;
+        let project_with_worktree = Project::test(fs, [Path::new("/project-a")], cx).await;
+        let connection = Rc::new(StubAgentConnection::new());
+
+        let thread_without_worktree = cx
+            .update(|cx| {
+                connection.clone().new_session(
+                    project_without_worktree.clone(),
+                    PathList::default(),
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+        let session_without_worktree =
+            cx.read(|cx| thread_without_worktree.read(cx).session_id().clone());
+
+        cx.update(|cx| {
+            thread_without_worktree.update(cx, |thread, cx| {
+                thread.set_title("No Project Thread".into(), cx).detach();
+            });
+        });
+        cx.run_until_parked();
+
+        let thread_with_worktree = cx
+            .update(|cx| {
+                connection.clone().new_session(
+                    project_with_worktree.clone(),
+                    PathList::default(),
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+        let session_with_worktree =
+            cx.read(|cx| thread_with_worktree.read(cx).session_id().clone());
+
+        cx.update(|cx| {
+            thread_with_worktree.update(cx, |thread, cx| {
+                thread.set_title("Project Thread".into(), cx).detach();
+            });
+        });
+        cx.run_until_parked();
+
+        cx.update(|cx| {
+            let store = ThreadMetadataStore::global(cx);
+            let store = store.read(cx);
+
+            let without_worktree = store
+                .entry(&session_without_worktree)
+                .expect("missing metadata for thread without project association");
+            assert!(without_worktree.folder_paths.is_empty());
+            assert!(
+                without_worktree.archived,
+                "expected thread without project association to be archived"
+            );
+
+            let with_worktree = store
+                .entry(&session_with_worktree)
+                .expect("missing metadata for thread with project association");
+            assert_eq!(
+                with_worktree.folder_paths,
+                PathList::new(&[Path::new("/project-a")])
+            );
+            assert!(
+                !with_worktree.archived,
+                "expected thread with project association to remain unarchived"
+            );
+        });
+    }
+
     #[gpui::test]
     async fn test_subagent_threads_excluded_from_sidebar_metadata(cx: &mut TestAppContext) {
         init_test(cx);

crates/agent_ui/src/threads_archive_view.rs πŸ”—

@@ -1,3 +1,6 @@
+use std::collections::HashSet;
+use std::sync::Arc;
+
 use crate::agent_connection_store::AgentConnectionStore;
 
 use crate::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore};
@@ -9,18 +12,31 @@ use agent_settings::AgentSettings;
 use chrono::{DateTime, Datelike as _, Local, NaiveDate, TimeDelta, Utc};
 use editor::Editor;
 use fs::Fs;
+use fuzzy::{StringMatch, StringMatchCandidate};
 use gpui::{
-    AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, ListState, Render,
-    SharedString, Subscription, Task, WeakEntity, Window, list, prelude::*, px,
+    AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
+    ListState, Render, SharedString, Subscription, Task, WeakEntity, Window, list, prelude::*, px,
 };
 use itertools::Itertools as _;
 use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious};
+use picker::{
+    Picker, PickerDelegate,
+    highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths},
+};
 use project::{AgentId, AgentServerStore};
 use settings::Settings as _;
 use theme::ActiveTheme;
 use ui::ThreadItem;
 use ui::{
-    Divider, KeyBinding, Tooltip, WithScrollbar, prelude::*, utils::platform_title_bar_height,
+    Divider, KeyBinding, ListItem, ListItemSpacing, ListSubHeader, Tooltip, WithScrollbar,
+    prelude::*, utils::platform_title_bar_height,
+};
+use ui_input::ErasedEditor;
+use util::ResultExt;
+use util::paths::PathExt;
+use workspace::{
+    ModalView, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId,
+    resolve_worktree_workspaces,
 };
 
 use zed_actions::agents_sidebar::FocusSidebarFilter;
@@ -110,12 +126,14 @@ pub struct ThreadsArchiveView {
     filter_editor: Entity<Editor>,
     _subscriptions: Vec<gpui::Subscription>,
     _refresh_history_task: Task<()>,
+    workspace: WeakEntity<Workspace>,
     agent_connection_store: WeakEntity<AgentConnectionStore>,
     agent_server_store: WeakEntity<AgentServerStore>,
 }
 
 impl ThreadsArchiveView {
     pub fn new(
+        workspace: WeakEntity<Workspace>,
         agent_connection_store: WeakEntity<AgentConnectionStore>,
         agent_server_store: WeakEntity<AgentServerStore>,
         window: &mut Window,
@@ -176,6 +194,7 @@ impl ThreadsArchiveView {
                 thread_metadata_store_subscription,
             ],
             _refresh_history_task: Task::ready(()),
+            workspace,
             agent_connection_store,
             agent_server_store,
         };
@@ -254,7 +273,14 @@ impl ThreadsArchiveView {
 
         self.list_state.reset(items.len());
         self.items = items;
-        self.hovered_index = None;
+
+        if !preserve {
+            self.hovered_index = None;
+        } else if let Some(ix) = self.hovered_index {
+            if ix >= self.items.len() || !self.is_selectable_item(ix) {
+                self.hovered_index = None;
+            }
+        }
 
         if let Some(scroll_top) = saved_scroll {
             self.list_state.scroll_to(scroll_top);
@@ -288,11 +314,57 @@ impl ThreadsArchiveView {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
+        if thread.folder_paths.is_empty() {
+            self.show_project_picker_for_thread(thread, window, cx);
+            return;
+        }
+
         self.selection = None;
         self.reset_filter_editor_text(window, cx);
         cx.emit(ThreadsArchiveViewEvent::Unarchive { thread });
     }
 
+    fn show_project_picker_for_thread(
+        &mut self,
+        thread: ThreadMetadata,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) {
+        let Some(workspace) = self.workspace.upgrade() else {
+            return;
+        };
+
+        let archive_view = cx.weak_entity();
+        let fs = workspace.read(cx).app_state().fs.clone();
+        let current_workspace_id = workspace.read(cx).database_id();
+        let sibling_workspace_ids: HashSet<WorkspaceId> = workspace
+            .read(cx)
+            .multi_workspace()
+            .and_then(|mw| mw.upgrade())
+            .map(|mw| {
+                mw.read(cx)
+                    .workspaces()
+                    .iter()
+                    .filter_map(|ws| ws.read(cx).database_id())
+                    .collect()
+            })
+            .unwrap_or_default();
+
+        workspace.update(cx, |workspace, cx| {
+            workspace.toggle_modal(window, cx, |window, cx| {
+                ProjectPickerModal::new(
+                    thread,
+                    fs,
+                    archive_view,
+                    current_workspace_id,
+                    sibling_workspace_ids,
+                    window,
+                    cx,
+                )
+            });
+        });
+    }
+
     fn is_selectable_item(&self, ix: usize) -> bool {
         matches!(self.items.get(ix), Some(ArchiveListItem::Entry { .. }))
     }
@@ -380,10 +452,6 @@ impl ThreadsArchiveView {
             return;
         };
 
-        if thread.folder_paths.is_empty() {
-            return;
-        }
-
         self.unarchive_thread(thread.clone(), window, cx);
     }
 
@@ -471,6 +539,7 @@ impl ThreadsArchiveView {
                                 let agent = thread.agent_id.clone();
                                 let session_id = thread.session_id.clone();
                                 cx.listener(move |this, _, _, cx| {
+                                    this.preserve_selection_on_next_update = true;
                                     this.delete_thread(session_id.clone(), agent.clone(), cx);
                                     cx.stop_propagation();
                                 })
@@ -683,3 +752,534 @@ impl Render for ThreadsArchiveView {
             .child(content)
     }
 }
+
+struct ProjectPickerModal {
+    picker: Entity<Picker<ProjectPickerDelegate>>,
+    _subscription: Subscription,
+}
+
+impl ProjectPickerModal {
+    fn new(
+        thread: ThreadMetadata,
+        fs: Arc<dyn Fs>,
+        archive_view: WeakEntity<ThreadsArchiveView>,
+        current_workspace_id: Option<WorkspaceId>,
+        sibling_workspace_ids: HashSet<WorkspaceId>,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Self {
+        let delegate = ProjectPickerDelegate {
+            thread,
+            archive_view,
+            workspaces: Vec::new(),
+            filtered_entries: Vec::new(),
+            selected_index: 0,
+            current_workspace_id,
+            sibling_workspace_ids,
+            focus_handle: cx.focus_handle(),
+        };
+
+        let picker = cx.new(|cx| {
+            Picker::list(delegate, window, cx)
+                .list_measure_all()
+                .modal(false)
+        });
+
+        let picker_focus_handle = picker.focus_handle(cx);
+        picker.update(cx, |picker, _| {
+            picker.delegate.focus_handle = picker_focus_handle;
+        });
+
+        let _subscription =
+            cx.subscribe(&picker, |_this: &mut Self, _, _event: &DismissEvent, cx| {
+                cx.emit(DismissEvent);
+            });
+
+        let db = WorkspaceDb::global(cx);
+        cx.spawn_in(window, async move |this, cx| {
+            let workspaces = db
+                .recent_workspaces_on_disk(fs.as_ref())
+                .await
+                .log_err()
+                .unwrap_or_default();
+            let workspaces = resolve_worktree_workspaces(workspaces, fs.as_ref()).await;
+            this.update_in(cx, move |this, window, cx| {
+                this.picker.update(cx, move |picker, cx| {
+                    picker.delegate.workspaces = workspaces;
+                    picker.update_matches(picker.query(cx), window, cx)
+                })
+            })
+            .ok();
+        })
+        .detach();
+
+        picker.focus_handle(cx).focus(window, cx);
+
+        Self {
+            picker,
+            _subscription,
+        }
+    }
+}
+
+impl EventEmitter<DismissEvent> for ProjectPickerModal {}
+
+impl Focusable for ProjectPickerModal {
+    fn focus_handle(&self, cx: &App) -> FocusHandle {
+        self.picker.focus_handle(cx)
+    }
+}
+
+impl ModalView for ProjectPickerModal {}
+
+impl Render for ProjectPickerModal {
+    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+        v_flex()
+            .key_context("ProjectPickerModal")
+            .elevation_3(cx)
+            .w(rems(34.))
+            .on_action(cx.listener(|this, _: &workspace::Open, window, cx| {
+                this.picker.update(cx, |picker, cx| {
+                    picker.delegate.open_local_folder(window, cx)
+                })
+            }))
+            .child(self.picker.clone())
+    }
+}
+
+enum ProjectPickerEntry {
+    Header(SharedString),
+    Workspace(StringMatch),
+}
+
+struct ProjectPickerDelegate {
+    thread: ThreadMetadata,
+    archive_view: WeakEntity<ThreadsArchiveView>,
+    current_workspace_id: Option<WorkspaceId>,
+    sibling_workspace_ids: HashSet<WorkspaceId>,
+    workspaces: Vec<(
+        WorkspaceId,
+        SerializedWorkspaceLocation,
+        PathList,
+        DateTime<Utc>,
+    )>,
+    filtered_entries: Vec<ProjectPickerEntry>,
+    selected_index: usize,
+    focus_handle: FocusHandle,
+}
+
+impl ProjectPickerDelegate {
+    fn update_working_directories_and_unarchive(
+        &mut self,
+        paths: PathList,
+        window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) {
+        self.thread.folder_paths = paths.clone();
+        ThreadMetadataStore::global(cx).update(cx, |store, cx| {
+            store.update_working_directories(&self.thread.session_id, paths, cx);
+        });
+
+        self.archive_view
+            .update(cx, |view, cx| {
+                view.selection = None;
+                view.reset_filter_editor_text(window, cx);
+                cx.emit(ThreadsArchiveViewEvent::Unarchive {
+                    thread: self.thread.clone(),
+                });
+            })
+            .log_err();
+    }
+
+    fn is_current_workspace(&self, workspace_id: WorkspaceId) -> bool {
+        self.current_workspace_id == Some(workspace_id)
+    }
+
+    fn is_sibling_workspace(&self, workspace_id: WorkspaceId) -> bool {
+        self.sibling_workspace_ids.contains(&workspace_id)
+            && !self.is_current_workspace(workspace_id)
+    }
+
+    fn selected_match(&self) -> Option<&StringMatch> {
+        match self.filtered_entries.get(self.selected_index)? {
+            ProjectPickerEntry::Workspace(hit) => Some(hit),
+            ProjectPickerEntry::Header(_) => None,
+        }
+    }
+
+    fn open_local_folder(&mut self, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+        let paths_receiver = cx.prompt_for_paths(gpui::PathPromptOptions {
+            files: false,
+            directories: true,
+            multiple: false,
+            prompt: None,
+        });
+        cx.spawn_in(window, async move |this, cx| {
+            let Ok(Ok(Some(paths))) = paths_receiver.await else {
+                return;
+            };
+            if paths.is_empty() {
+                return;
+            }
+
+            let work_dirs = PathList::new(&paths);
+
+            this.update_in(cx, |this, window, cx| {
+                this.delegate
+                    .update_working_directories_and_unarchive(work_dirs, window, cx);
+                cx.emit(DismissEvent);
+            })
+            .log_err();
+        })
+        .detach();
+    }
+}
+
+impl EventEmitter<DismissEvent> for ProjectPickerDelegate {}
+
+impl PickerDelegate for ProjectPickerDelegate {
+    type ListItem = AnyElement;
+
+    fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
+        format!("Associate the \"{}\" thread with...", self.thread.title).into()
+    }
+
+    fn render_editor(
+        &self,
+        editor: &Arc<dyn ErasedEditor>,
+        window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Div {
+        h_flex()
+            .flex_none()
+            .h_9()
+            .px_2p5()
+            .justify_between()
+            .border_b_1()
+            .border_color(cx.theme().colors().border_variant)
+            .child(editor.render(window, cx))
+    }
+
+    fn match_count(&self) -> usize {
+        self.filtered_entries.len()
+    }
+
+    fn selected_index(&self) -> usize {
+        self.selected_index
+    }
+
+    fn set_selected_index(
+        &mut self,
+        ix: usize,
+        _window: &mut Window,
+        _cx: &mut Context<Picker<Self>>,
+    ) {
+        self.selected_index = ix;
+    }
+
+    fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context<Picker<Self>>) -> bool {
+        matches!(
+            self.filtered_entries.get(ix),
+            Some(ProjectPickerEntry::Workspace(_))
+        )
+    }
+
+    fn update_matches(
+        &mut self,
+        query: String,
+        _window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Task<()> {
+        let query = query.trim_start();
+        let smart_case = query.chars().any(|c| c.is_uppercase());
+        let is_empty_query = query.is_empty();
+
+        let sibling_candidates: Vec<_> = self
+            .workspaces
+            .iter()
+            .enumerate()
+            .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id))
+            .map(|(id, (_, _, paths, _))| {
+                let combined_string = paths
+                    .ordered_paths()
+                    .map(|path| path.compact().to_string_lossy().into_owned())
+                    .collect::<Vec<_>>()
+                    .join("");
+                StringMatchCandidate::new(id, &combined_string)
+            })
+            .collect();
+
+        let mut sibling_matches = smol::block_on(fuzzy::match_strings(
+            &sibling_candidates,
+            query,
+            smart_case,
+            true,
+            100,
+            &Default::default(),
+            cx.background_executor().clone(),
+        ));
+
+        sibling_matches.sort_unstable_by(|a, b| {
+            b.score
+                .partial_cmp(&a.score)
+                .unwrap_or(std::cmp::Ordering::Equal)
+                .then_with(|| a.candidate_id.cmp(&b.candidate_id))
+        });
+
+        let recent_candidates: Vec<_> = self
+            .workspaces
+            .iter()
+            .enumerate()
+            .filter(|(_, (id, _, _, _))| {
+                !self.is_current_workspace(*id) && !self.is_sibling_workspace(*id)
+            })
+            .map(|(id, (_, _, paths, _))| {
+                let combined_string = paths
+                    .ordered_paths()
+                    .map(|path| path.compact().to_string_lossy().into_owned())
+                    .collect::<Vec<_>>()
+                    .join("");
+                StringMatchCandidate::new(id, &combined_string)
+            })
+            .collect();
+
+        let mut recent_matches = smol::block_on(fuzzy::match_strings(
+            &recent_candidates,
+            query,
+            smart_case,
+            true,
+            100,
+            &Default::default(),
+            cx.background_executor().clone(),
+        ));
+
+        recent_matches.sort_unstable_by(|a, b| {
+            b.score
+                .partial_cmp(&a.score)
+                .unwrap_or(std::cmp::Ordering::Equal)
+                .then_with(|| a.candidate_id.cmp(&b.candidate_id))
+        });
+
+        let mut entries = Vec::new();
+
+        let has_siblings_to_show = if is_empty_query {
+            !sibling_candidates.is_empty()
+        } else {
+            !sibling_matches.is_empty()
+        };
+
+        if has_siblings_to_show {
+            entries.push(ProjectPickerEntry::Header("This Window".into()));
+
+            if is_empty_query {
+                for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() {
+                    if self.is_sibling_workspace(*workspace_id) {
+                        entries.push(ProjectPickerEntry::Workspace(StringMatch {
+                            candidate_id: id,
+                            score: 0.0,
+                            positions: Vec::new(),
+                            string: String::new(),
+                        }));
+                    }
+                }
+            } else {
+                for m in sibling_matches {
+                    entries.push(ProjectPickerEntry::Workspace(m));
+                }
+            }
+        }
+
+        let has_recent_to_show = if is_empty_query {
+            !recent_candidates.is_empty()
+        } else {
+            !recent_matches.is_empty()
+        };
+
+        if has_recent_to_show {
+            entries.push(ProjectPickerEntry::Header("Recent Projects".into()));
+
+            if is_empty_query {
+                for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() {
+                    if !self.is_current_workspace(*workspace_id)
+                        && !self.is_sibling_workspace(*workspace_id)
+                    {
+                        entries.push(ProjectPickerEntry::Workspace(StringMatch {
+                            candidate_id: id,
+                            score: 0.0,
+                            positions: Vec::new(),
+                            string: String::new(),
+                        }));
+                    }
+                }
+            } else {
+                for m in recent_matches {
+                    entries.push(ProjectPickerEntry::Workspace(m));
+                }
+            }
+        }
+
+        self.filtered_entries = entries;
+
+        self.selected_index = self
+            .filtered_entries
+            .iter()
+            .position(|e| matches!(e, ProjectPickerEntry::Workspace(_)))
+            .unwrap_or(0);
+
+        Task::ready(())
+    }
+
+    fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
+        let candidate_id = match self.filtered_entries.get(self.selected_index) {
+            Some(ProjectPickerEntry::Workspace(hit)) => hit.candidate_id,
+            _ => return,
+        };
+        let Some((_workspace_id, _location, paths, _)) = self.workspaces.get(candidate_id) else {
+            return;
+        };
+
+        self.update_working_directories_and_unarchive(paths.clone(), window, cx);
+        cx.emit(DismissEvent);
+    }
+
+    fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context<Picker<Self>>) {}
+
+    fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
+        let text = if self.workspaces.is_empty() {
+            "No recent projects found"
+        } else {
+            "No matches"
+        };
+        Some(text.into())
+    }
+
+    fn render_match(
+        &self,
+        ix: usize,
+        selected: bool,
+        window: &mut Window,
+        cx: &mut Context<Picker<Self>>,
+    ) -> Option<Self::ListItem> {
+        match self.filtered_entries.get(ix)? {
+            ProjectPickerEntry::Header(title) => Some(
+                v_flex()
+                    .w_full()
+                    .gap_1()
+                    .when(ix > 0, |this| this.mt_1().child(Divider::horizontal()))
+                    .child(ListSubHeader::new(title.clone()).inset(true))
+                    .into_any_element(),
+            ),
+            ProjectPickerEntry::Workspace(hit) => {
+                let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
+
+                let ordered_paths: Vec<_> = paths
+                    .ordered_paths()
+                    .map(|p| p.compact().to_string_lossy().to_string())
+                    .collect();
+
+                let tooltip_path: SharedString = ordered_paths.join("\n").into();
+
+                let mut path_start_offset = 0;
+                let match_labels: Vec<_> = paths
+                    .ordered_paths()
+                    .map(|p| p.compact())
+                    .map(|path| {
+                        let path_string = path.to_string_lossy();
+                        let path_text = path_string.to_string();
+                        let path_byte_len = path_text.len();
+
+                        let path_positions: Vec<usize> = hit
+                            .positions
+                            .iter()
+                            .copied()
+                            .skip_while(|pos| *pos < path_start_offset)
+                            .take_while(|pos| *pos < path_start_offset + path_byte_len)
+                            .map(|pos| pos - path_start_offset)
+                            .collect();
+
+                        let file_name_match = path.file_name().map(|file_name| {
+                            let file_name_text = file_name.to_string_lossy().into_owned();
+                            let file_name_start = path_byte_len - file_name_text.len();
+                            let highlight_positions: Vec<usize> = path_positions
+                                .iter()
+                                .copied()
+                                .skip_while(|pos| *pos < file_name_start)
+                                .take_while(|pos| *pos < file_name_start + file_name_text.len())
+                                .map(|pos| pos - file_name_start)
+                                .collect();
+                            HighlightedMatch {
+                                text: file_name_text,
+                                highlight_positions,
+                                color: Color::Default,
+                            }
+                        });
+
+                        path_start_offset += path_byte_len;
+                        file_name_match
+                    })
+                    .collect();
+
+                let highlighted_match = HighlightedMatchWithPaths {
+                    prefix: match location {
+                        SerializedWorkspaceLocation::Remote(options) => {
+                            Some(SharedString::from(options.display_name()))
+                        }
+                        _ => None,
+                    },
+                    match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "),
+                    paths: Vec::new(),
+                };
+
+                Some(
+                    ListItem::new(ix)
+                        .toggle_state(selected)
+                        .inset(true)
+                        .spacing(ListItemSpacing::Sparse)
+                        .child(
+                            h_flex()
+                                .gap_3()
+                                .flex_grow()
+                                .child(highlighted_match.render(window, cx)),
+                        )
+                        .tooltip(Tooltip::text(tooltip_path))
+                        .into_any_element(),
+                )
+            }
+        }
+    }
+
+    fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
+        let has_selection = self.selected_match().is_some();
+        let focus_handle = self.focus_handle.clone();
+
+        Some(
+            h_flex()
+                .flex_1()
+                .p_1p5()
+                .gap_1()
+                .justify_end()
+                .border_t_1()
+                .border_color(cx.theme().colors().border_variant)
+                .child(
+                    Button::new("open_local_folder", "Choose from Local Folders")
+                        .key_binding(KeyBinding::for_action_in(
+                            &workspace::Open::default(),
+                            &focus_handle,
+                            cx,
+                        ))
+                        .on_click(cx.listener(|this, _, window, cx| {
+                            this.delegate.open_local_folder(window, cx);
+                        })),
+                )
+                .child(
+                    Button::new("select_project", "Select")
+                        .disabled(!has_selection)
+                        .key_binding(KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx))
+                        .on_click(cx.listener(move |picker, _, window, cx| {
+                            picker.delegate.confirm(false, window, cx);
+                        })),
+                )
+                .into_any(),
+        )
+    }
+}

crates/buffer_diff/src/buffer_diff.rs πŸ”—

@@ -171,9 +171,9 @@ impl sum_tree::Item for PendingHunk {
 impl sum_tree::Summary for DiffHunkSummary {
     type Context<'a> = &'a text::BufferSnapshot;
 
-    fn zero(_cx: Self::Context<'_>) -> Self {
+    fn zero(buffer: &text::BufferSnapshot) -> Self {
         DiffHunkSummary {
-            buffer_range: Anchor::MIN..Anchor::MIN,
+            buffer_range: Anchor::min_min_range_for_buffer(buffer.remote_id()),
             diff_base_byte_range: 0..0,
             added_rows: 0,
             removed_rows: 0,
@@ -248,6 +248,10 @@ impl BufferDiffSnapshot {
         buffer_diff.update(cx, |buffer_diff, cx| buffer_diff.snapshot(cx))
     }
 
+    pub fn buffer_id(&self) -> BufferId {
+        self.inner.buffer_snapshot.remote_id()
+    }
+
     pub fn is_empty(&self) -> bool {
         self.inner.hunks.is_empty()
     }
@@ -953,7 +957,7 @@ impl BufferDiffInner<language::BufferSnapshot> {
             .flat_map(move |hunk| {
                 [
                     (
-                        &hunk.buffer_range.start,
+                        hunk.buffer_range.start,
                         (
                             hunk.buffer_range.start,
                             hunk.diff_base_byte_range.start,
@@ -961,7 +965,7 @@ impl BufferDiffInner<language::BufferSnapshot> {
                         ),
                     ),
                     (
-                        &hunk.buffer_range.end,
+                        hunk.buffer_range.end,
                         (hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk),
                     ),
                 ]
@@ -1653,7 +1657,7 @@ impl BufferDiff {
     ) {
         let hunks = self
             .snapshot(cx)
-            .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer)
+            .hunks_intersecting_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), buffer)
             .collect::<Vec<_>>();
         let Some(secondary) = self.secondary_diff.clone() else {
             return;

crates/call/src/call_impl/room.rs πŸ”—

@@ -21,7 +21,7 @@ use language::LanguageRegistry;
 use livekit::{LocalTrackPublication, ParticipantIdentity, RoomEvent};
 use livekit_client::{self as livekit, AudioStream, TrackSid};
 use postage::{sink::Sink, stream::Stream, watch};
-use project::Project;
+use project::{CURRENT_PROJECT_FEATURES, Project};
 use settings::Settings as _;
 use std::sync::atomic::AtomicU64;
 use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant};
@@ -1237,6 +1237,10 @@ impl Room {
             worktrees: project.read(cx).worktree_metadata_protos(cx),
             is_ssh_project: project.read(cx).is_via_remote_server(),
             windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows),
+            features: CURRENT_PROJECT_FEATURES
+                .iter()
+                .map(|s| s.to_string())
+                .collect(),
         });
 
         cx.spawn(async move |this, cx| {

crates/client/src/client.rs πŸ”—

@@ -2141,11 +2141,13 @@ mod tests {
             project_id: 1,
             committer_name: None,
             committer_email: None,
+            features: Vec::new(),
         });
         server.send(proto::JoinProject {
             project_id: 2,
             committer_name: None,
             committer_email: None,
+            features: Vec::new(),
         });
         done_rx1.recv().await.unwrap();
         done_rx2.recv().await.unwrap();

crates/collab/migrations.sqlite/20221109000000_test_schema.sql πŸ”—

@@ -48,7 +48,8 @@ CREATE TABLE "projects" (
     "host_connection_id" INTEGER,
     "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
     "unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
-    "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE
+    "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE,
+    "features" TEXT NOT NULL DEFAULT ''
 );
 
 CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");

crates/collab/migrations/20251208000000_test_schema.sql πŸ”—

@@ -332,7 +332,8 @@ CREATE TABLE public.projects (
     room_id integer,
     host_connection_id integer,
     host_connection_server_id integer,
-    windows_paths boolean DEFAULT false
+    windows_paths boolean DEFAULT false,
+    features text NOT NULL DEFAULT ''
 );
 
 CREATE SEQUENCE public.projects_id_seq

crates/collab/src/db.rs πŸ”—

@@ -589,6 +589,7 @@ pub struct Project {
     pub repositories: Vec<proto::UpdateRepository>,
     pub language_servers: Vec<LanguageServer>,
     pub path_style: PathStyle,
+    pub features: Vec<String>,
 }
 
 pub struct ProjectCollaborator {

crates/collab/src/db/queries/projects.rs πŸ”—

@@ -34,6 +34,7 @@ impl Database {
         worktrees: &[proto::WorktreeMetadata],
         is_ssh_project: bool,
         windows_paths: bool,
+        features: &[String],
     ) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
         self.room_transaction(room_id, |tx| async move {
             let participant = room_participant::Entity::find()
@@ -71,6 +72,7 @@ impl Database {
                 ))),
                 id: ActiveValue::NotSet,
                 windows_paths: ActiveValue::set(windows_paths),
+                features: ActiveValue::set(serde_json::to_string(features).unwrap()),
             }
             .insert(&*tx)
             .await?;
@@ -948,6 +950,7 @@ impl Database {
         } else {
             PathStyle::Posix
         };
+        let features: Vec<String> = serde_json::from_str(&project.features).unwrap_or_default();
 
         let project = Project {
             id: project.id,
@@ -977,6 +980,7 @@ impl Database {
                 })
                 .collect(),
             path_style,
+            features,
         };
         Ok((project, replica_id as ReplicaId))
     }

crates/collab/src/db/tables/project.rs πŸ”—

@@ -13,6 +13,7 @@ pub struct Model {
     pub host_connection_id: Option<i32>,
     pub host_connection_server_id: Option<ServerId>,
     pub windows_paths: bool,
+    pub features: String,
 }
 
 impl Model {

crates/collab/src/rpc.rs πŸ”—

@@ -1775,6 +1775,7 @@ async fn share_project(
             &request.worktrees,
             request.is_ssh_project,
             request.windows_paths.unwrap_or(false),
+            &request.features,
         )
         .await?;
     response.send(proto::ShareProjectResponse {
@@ -1840,6 +1841,28 @@ async fn join_project(
     tracing::info!(%project_id, "join project");
 
     let db = session.db().await;
+    let project_model = db.get_project(project_id).await?;
+    let host_features: Vec<String> =
+        serde_json::from_str(&project_model.features).unwrap_or_default();
+    let guest_features: HashSet<_> = request.features.iter().collect();
+    let host_features_set: HashSet<_> = host_features.iter().collect();
+    if guest_features != host_features_set {
+        let host_connection_id = project_model.host_connection()?;
+        let mut pool = session.connection_pool().await;
+        let host_version = pool
+            .connection(host_connection_id)
+            .map(|c| c.zed_version.to_string());
+        let guest_version = pool
+            .connection(session.connection_id)
+            .map(|c| c.zed_version.to_string());
+        drop(pool);
+        Err(anyhow!(
+            "The host (v{}) and guest (v{}) are using incompatible versions of Zed. The peer with the older version must update to collaborate.",
+            host_version.as_deref().unwrap_or("unknown"),
+            guest_version.as_deref().unwrap_or("unknown"),
+        ))?;
+    }
+
     let (project, replica_id) = &mut *db
         .join_project(
             project_id,
@@ -1850,6 +1873,7 @@ async fn join_project(
         )
         .await?;
     drop(db);
+
     tracing::info!(%project_id, "join remote project");
     let collaborators = project
         .collaborators
@@ -1909,6 +1933,7 @@ async fn join_project(
         language_server_capabilities,
         role: project.role.into(),
         windows_paths: project.path_style == PathStyle::Windows,
+        features: project.features.clone(),
     })?;
 
     for (worktree_id, worktree) in mem::take(&mut project.worktrees) {

crates/collab/tests/integration/channel_buffer_tests.rs πŸ”—

@@ -313,7 +313,7 @@ fn assert_remote_selections(
     let snapshot = editor.snapshot(window, cx);
     let hub = editor.collaboration_hub().unwrap();
     let collaborators = hub.collaborators(cx);
-    let range = Anchor::min()..Anchor::max();
+    let range = Anchor::Min..Anchor::Max;
     let remote_selections = snapshot
         .remote_selections_in_range(&range, hub, cx)
         .map(|s| {

crates/collab/tests/integration/db_tests/db_tests.rs πŸ”—

@@ -350,20 +350,41 @@ async fn test_project_count(db: &Arc<Database>) {
         .unwrap();
     assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
 
-    db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
-        .await
-        .unwrap();
+    db.share_project(
+        room_id,
+        ConnectionId { owner_id, id: 1 },
+        &[],
+        false,
+        false,
+        &[],
+    )
+    .await
+    .unwrap();
     assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
 
-    db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
-        .await
-        .unwrap();
+    db.share_project(
+        room_id,
+        ConnectionId { owner_id, id: 1 },
+        &[],
+        false,
+        false,
+        &[],
+    )
+    .await
+    .unwrap();
     assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
 
     // Projects shared by admins aren't counted.
-    db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false)
-        .await
-        .unwrap();
+    db.share_project(
+        room_id,
+        ConnectionId { owner_id, id: 0 },
+        &[],
+        false,
+        false,
+        &[],
+    )
+    .await
+    .unwrap();
     assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
 
     db.leave_room(ConnectionId { owner_id, id: 1 })

crates/collab/tests/integration/following_tests.rs πŸ”—

@@ -2184,6 +2184,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
         );
         mb
     });
+    let multibuffer_snapshot = multibuffer.update(cx_a, |mb, cx| mb.snapshot(cx));
     let snapshot = buffer.update(cx_a, |buffer, _| buffer.snapshot());
     let editor: Entity<Editor> = cx_a.new_window_entity(|window, cx| {
         Editor::for_multibuffer(
@@ -2205,7 +2206,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
         editor
             .selections
             .disjoint_anchor_ranges()
-            .map(|range| range.start.text_anchor.to_point(&snapshot))
+            .map(|range| {
+                multibuffer_snapshot
+                    .anchor_to_buffer_anchor(range.start)
+                    .unwrap()
+                    .0
+                    .to_point(&snapshot)
+            })
             .collect::<Vec<_>>()
     });
     multibuffer.update(cx_a, |multibuffer, cx| {
@@ -2232,7 +2239,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
         editor
             .selections
             .disjoint_anchor_ranges()
-            .map(|range| range.start.text_anchor.to_point(&snapshot))
+            .map(|range| {
+                multibuffer_snapshot
+                    .anchor_to_buffer_anchor(range.start)
+                    .unwrap()
+                    .0
+                    .to_point(&snapshot)
+            })
             .collect::<Vec<_>>()
     });
     assert_eq!(positions, new_positions);

crates/collab_ui/src/collab_panel.rs πŸ”—

@@ -1166,7 +1166,7 @@ impl CollabPanel {
                                 "Failed to join project",
                                 window,
                                 cx,
-                                |_, _, _| None,
+                                |error, _, _| Some(format!("{error:#}")),
                             );
                     })
                     .ok();
@@ -1729,7 +1729,7 @@ impl CollabPanel {
                                 "Failed to join project",
                                 window,
                                 cx,
-                                |_, _, _| None,
+                                |error, _, _| Some(format!("{error:#}")),
                             );
                     }
                 }

crates/csv_preview/src/csv_preview.rs πŸ”—

@@ -9,7 +9,10 @@ use std::{
 };
 
 use crate::table_data_engine::TableDataEngine;
-use ui::{SharedString, TableColumnWidths, TableInteractionState, prelude::*};
+use ui::{
+    AbsoluteLength, DefiniteLength, RedistributableColumnsState, SharedString,
+    TableInteractionState, TableResizeBehavior, prelude::*,
+};
 use workspace::{Item, SplitDirection, Workspace};
 
 use crate::{parser::EditorState, settings::CsvPreviewSettings, types::TableLikeContent};
@@ -52,6 +55,32 @@ pub fn init(cx: &mut App) {
 }
 
 impl CsvPreviewView {
+    pub(crate) fn sync_column_widths(&self, cx: &mut Context<Self>) {
+        // plus 1 for the rows column
+        let cols = self.engine.contents.headers.cols() + 1;
+        let remaining_col_number = cols.saturating_sub(1);
+        let fraction = if remaining_col_number > 0 {
+            1. / remaining_col_number as f32
+        } else {
+            1.
+        };
+        let mut widths = vec![DefiniteLength::Fraction(fraction); cols];
+        let line_number_width = self.calculate_row_identifier_column_width();
+        widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into()));
+
+        let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols];
+        resize_behaviors[0] = TableResizeBehavior::None;
+
+        self.column_widths.widths.update(cx, |state, _cx| {
+            if state.cols() != cols
+                || state.initial_widths().as_slice() != widths.as_slice()
+                || state.resize_behavior().as_slice() != resize_behaviors.as_slice()
+            {
+                *state = RedistributableColumnsState::new(cols, widths, resize_behaviors);
+            }
+        });
+    }
+
     pub fn register(workspace: &mut Workspace) {
         workspace.register_action_renderer(|div, _, _, cx| {
             div.when(cx.has_flag::<TabularDataPreviewFeatureFlag>(), |div| {
@@ -132,9 +161,7 @@ impl CsvPreviewView {
                 editor,
                 |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| {
                     match event {
-                        EditorEvent::Edited { .. }
-                        | EditorEvent::DirtyChanged
-                        | EditorEvent::ExcerptsEdited { .. } => {
+                        EditorEvent::Edited { .. } | EditorEvent::DirtyChanged => {
                             this.parse_csv_from_active_editor(true, cx);
                         }
                         _ => {}
@@ -286,18 +313,19 @@ impl PerformanceMetrics {
 
 /// Holds state of column widths for a table component in CSV preview.
 pub(crate) struct ColumnWidths {
-    pub widths: Entity<TableColumnWidths>,
+    pub widths: Entity<RedistributableColumnsState>,
 }
 
 impl ColumnWidths {
     pub(crate) fn new(cx: &mut Context<CsvPreviewView>, cols: usize) -> Self {
         Self {
-            widths: cx.new(|cx| TableColumnWidths::new(cols, cx)),
+            widths: cx.new(|_cx| {
+                RedistributableColumnsState::new(
+                    cols,
+                    vec![ui::DefiniteLength::Fraction(1.0 / cols as f32); cols],
+                    vec![ui::TableResizeBehavior::Resizable; cols],
+                )
+            }),
         }
     }
-    /// Replace the current `TableColumnWidths` entity with a new one for the given column count.
-    pub(crate) fn replace(&self, cx: &mut Context<CsvPreviewView>, cols: usize) {
-        self.widths
-            .update(cx, |entity, cx| *entity = TableColumnWidths::new(cols, cx));
-    }
 }

crates/csv_preview/src/parser.rs πŸ”—

@@ -80,11 +80,8 @@ impl CsvPreviewView {
                     .insert("Parsing", (parse_duration, Instant::now()));
 
                 log::debug!("Parsed {} rows", parsed_csv.rows.len());
-                // Update table width so it can be rendered properly
-                let cols = parsed_csv.headers.cols();
-                view.column_widths.replace(cx, cols + 1); // Add 1 for the line number column
-
                 view.engine.contents = parsed_csv;
+                view.sync_column_widths(cx);
                 view.last_parse_end_time = Some(parse_end_time);
 
                 view.apply_filter_sort();

crates/csv_preview/src/renderer/render_table.rs πŸ”—

@@ -1,11 +1,9 @@
 use crate::types::TableCell;
 use gpui::{AnyElement, Entity};
 use std::ops::Range;
-use ui::Table;
-use ui::TableColumnWidths;
-use ui::TableResizeBehavior;
-use ui::UncheckedTableRow;
-use ui::{DefiniteLength, div, prelude::*};
+use ui::{
+    ColumnWidthConfig, RedistributableColumnsState, Table, UncheckedTableRow, div, prelude::*,
+};
 
 use crate::{
     CsvPreviewView,
@@ -15,44 +13,22 @@ use crate::{
 
 impl CsvPreviewView {
     /// Creates a new table.
-    /// Column number is derived from the `TableColumnWidths` entity.
+    /// Column number is derived from the `RedistributableColumnsState` entity.
     pub(crate) fn create_table(
         &self,
-        current_widths: &Entity<TableColumnWidths>,
+        current_widths: &Entity<RedistributableColumnsState>,
         cx: &mut Context<Self>,
     ) -> AnyElement {
-        let cols = current_widths.read(cx).cols();
-        let remaining_col_number = cols - 1;
-        let fraction = if remaining_col_number > 0 {
-            1. / remaining_col_number as f32
-        } else {
-            1. // only column with line numbers is present. Put 100%, but it will be overwritten anyways :D
-        };
-        let mut widths = vec![DefiniteLength::Fraction(fraction); cols];
-        let line_number_width = self.calculate_row_identifier_column_width();
-        widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into()));
-
-        let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols];
-        resize_behaviors[0] = TableResizeBehavior::None;
-
-        self.create_table_inner(
-            self.engine.contents.rows.len(),
-            widths,
-            resize_behaviors,
-            current_widths,
-            cx,
-        )
+        self.create_table_inner(self.engine.contents.rows.len(), current_widths, cx)
     }
 
     fn create_table_inner(
         &self,
         row_count: usize,
-        widths: UncheckedTableRow<DefiniteLength>,
-        resize_behaviors: UncheckedTableRow<TableResizeBehavior>,
-        current_widths: &Entity<TableColumnWidths>,
+        current_widths: &Entity<RedistributableColumnsState>,
         cx: &mut Context<Self>,
     ) -> AnyElement {
-        let cols = widths.len();
+        let cols = current_widths.read(cx).cols();
         // Create headers array with interactive elements
         let mut headers = Vec::with_capacity(cols);
 
@@ -78,8 +54,7 @@ impl CsvPreviewView {
         Table::new(cols)
             .interactable(&self.table_interaction_state)
             .striped()
-            .column_widths(widths)
-            .resizable_columns(resize_behaviors, current_widths, cx)
+            .width_config(ColumnWidthConfig::redistributable(current_widths.clone()))
             .header(headers)
             .disable_base_style()
             .map(|table| {

crates/csv_preview/src/renderer/table_cell.rs πŸ”—

@@ -53,7 +53,6 @@ fn create_table_cell(
         .px_1()
         .bg(cx.theme().colors().editor_background)
         .border_b_1()
-        .border_r_1()
         .border_color(cx.theme().colors().border_variant)
         .map(|div| match vertical_alignment {
             VerticalAlignment::Top => div.items_start(),

crates/debugger_ui/src/debugger_ui.rs πŸ”—

@@ -299,7 +299,7 @@ pub fn init(cx: &mut App) {
                                     return;
                                 }
                                 maybe!({
-                                    let (buffer, position, _) = editor
+                                    let (buffer, position) = editor
                                         .update(cx, |editor, cx| {
                                             let cursor_point: language::Point = editor
                                                 .selections

crates/debugger_ui/src/session/running/console.rs πŸ”—

@@ -7,8 +7,8 @@ use anyhow::Result;
 use collections::HashMap;
 use dap::{CompletionItem, CompletionItemType, OutputEvent};
 use editor::{
-    Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId,
-    HighlightKey, MultiBufferOffset, SizingBehavior,
+    Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, HighlightKey,
+    MultiBufferOffset, SizingBehavior,
 };
 use fuzzy::StringMatchCandidate;
 use gpui::{
@@ -528,7 +528,6 @@ struct ConsoleQueryBarCompletionProvider(WeakEntity<Console>);
 impl CompletionProvider for ConsoleQueryBarCompletionProvider {
     fn completions(
         &self,
-        _excerpt_id: ExcerptId,
         buffer: &Entity<Buffer>,
         buffer_position: language::Anchor,
         _trigger: editor::CompletionContext,

crates/dev_container/Cargo.toml πŸ”—

@@ -5,21 +5,26 @@ publish.workspace = true
 edition.workspace = true
 
 [dependencies]
+async-tar.workspace = true
+async-trait.workspace = true
 serde.workspace = true
 serde_json.workspace = true
+serde_json_lenient.workspace = true
+shlex.workspace = true
 http_client.workspace = true
 http.workspace = true
 gpui.workspace = true
+fs.workspace = true
 futures.workspace = true
 log.workspace = true
-node_runtime.workspace = true
 menu.workspace = true
 paths.workspace = true
 picker.workspace = true
+project.workspace = true
 settings.workspace = true
-smol.workspace = true
 ui.workspace = true
 util.workspace = true
+walkdir.workspace = true
 worktree.workspace = true
 workspace.workspace = true
 
@@ -32,6 +37,8 @@ settings = { workspace = true, features = ["test-support"] }
 
 workspace = { workspace = true, features = ["test-support"] }
 worktree = { workspace = true, features = ["test-support"] }
+util = { workspace = true, features = ["test-support"] }
+env_logger.workspace = true
 
 [lints]
-workspace = true
+workspace = true

crates/dev_container/src/command_json.rs πŸ”—

@@ -0,0 +1,64 @@
+use std::process::Output;
+
+use async_trait::async_trait;
+use serde::Deserialize;
+use util::command::Command;
+
+use crate::devcontainer_api::DevContainerError;
+
+pub(crate) struct DefaultCommandRunner;
+
+impl DefaultCommandRunner {
+    pub(crate) fn new() -> Self {
+        Self
+    }
+}
+
+#[async_trait]
+impl CommandRunner for DefaultCommandRunner {
+    async fn run_command(&self, command: &mut Command) -> Result<Output, std::io::Error> {
+        command.output().await
+    }
+}
+
+#[async_trait]
+pub(crate) trait CommandRunner: Send + Sync {
+    async fn run_command(&self, command: &mut Command) -> Result<Output, std::io::Error>;
+}
+
+pub(crate) async fn evaluate_json_command<T>(
+    mut command: Command,
+) -> Result<Option<T>, DevContainerError>
+where
+    T: for<'de> Deserialize<'de>,
+{
+    let output = command.output().await.map_err(|e| {
+        log::error!("Error running command {:?}: {e}", command);
+        DevContainerError::CommandFailed(command.get_program().display().to_string())
+    })?;
+
+    deserialize_json_output(output).map_err(|e| {
+        log::error!("Error running command {:?}: {e}", command);
+        DevContainerError::CommandFailed(command.get_program().display().to_string())
+    })
+}
+
+pub(crate) fn deserialize_json_output<T>(output: Output) -> Result<Option<T>, String>
+where
+    T: for<'de> Deserialize<'de>,
+{
+    if output.status.success() {
+        let raw = String::from_utf8_lossy(&output.stdout);
+        if raw.is_empty() || raw.trim() == "[]" || raw.trim() == "{}" {
+            return Ok(None);
+        }
+        let value = serde_json_lenient::from_str(&raw)
+            .map_err(|e| format!("Error deserializing from raw json: {e}"));
+        value
+    } else {
+        let std_err = String::from_utf8_lossy(&output.stderr);
+        Err(format!(
+            "Sent non-successful output; cannot deserialize. StdErr: {std_err}"
+        ))
+    }
+}

crates/dev_container/src/devcontainer_api.rs πŸ”—

@@ -2,18 +2,26 @@ use std::{
     collections::{HashMap, HashSet},
     fmt::Display,
     path::{Path, PathBuf},
+    sync::Arc,
 };
 
-use node_runtime::NodeRuntime;
+use futures::TryFutureExt;
+use gpui::{AsyncWindowContext, Entity};
+use project::Worktree;
 use serde::Deserialize;
-use settings::DevContainerConnection;
-use smol::fs;
-use util::command::Command;
+use settings::{DevContainerConnection, infer_json_indent_size, replace_value_in_json_text};
 use util::rel_path::RelPath;
+use walkdir::WalkDir;
 use workspace::Workspace;
 use worktree::Snapshot;
 
-use crate::{DevContainerContext, DevContainerFeature, DevContainerTemplate};
+use crate::{
+    DevContainerContext, DevContainerFeature, DevContainerTemplate,
+    devcontainer_json::DevContainer,
+    devcontainer_manifest::{read_devcontainer_configuration, spawn_dev_container},
+    devcontainer_templates_repository, get_latest_oci_manifest, get_oci_token, ghcr_registry,
+    oci::download_oci_tarball,
+};
 
 /// Represents a discovered devcontainer configuration
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -42,63 +50,33 @@ impl DevContainerConfig {
 
 #[derive(Debug, Deserialize)]
 #[serde(rename_all = "camelCase")]
-struct DevContainerUp {
-    _outcome: String,
-    container_id: String,
-    remote_user: String,
-    remote_workspace_folder: String,
+pub(crate) struct DevContainerUp {
+    pub(crate) container_id: String,
+    pub(crate) remote_user: String,
+    pub(crate) remote_workspace_folder: String,
+    #[serde(default)]
+    pub(crate) extension_ids: Vec<String>,
+    #[serde(default)]
+    pub(crate) remote_env: HashMap<String, String>,
 }
 
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
+#[derive(Debug)]
 pub(crate) struct DevContainerApply {
-    pub(crate) files: Vec<String>,
-}
-
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
-pub(crate) struct DevContainerConfiguration {
-    name: Option<String>,
-}
-
-#[derive(Debug, Deserialize)]
-pub(crate) struct DevContainerConfigurationOutput {
-    configuration: DevContainerConfiguration,
-}
-
-pub(crate) struct DevContainerCli {
-    pub path: PathBuf,
-    node_runtime_path: Option<PathBuf>,
-}
-
-impl DevContainerCli {
-    fn command(&self, use_podman: bool) -> Command {
-        let mut command = if let Some(node_runtime_path) = &self.node_runtime_path {
-            let mut command =
-                util::command::new_command(node_runtime_path.as_os_str().display().to_string());
-            command.arg(self.path.display().to_string());
-            command
-        } else {
-            util::command::new_command(self.path.display().to_string())
-        };
-
-        if use_podman {
-            command.arg("--docker-path");
-            command.arg("podman");
-        }
-        command
-    }
+    pub(crate) project_files: Vec<Arc<RelPath>>,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub enum DevContainerError {
+    CommandFailed(String),
     DockerNotAvailable,
-    DevContainerCliNotAvailable,
+    ContainerNotValid(String),
     DevContainerTemplateApplyFailed(String),
+    DevContainerScriptsFailed,
     DevContainerUpFailed(String),
     DevContainerNotFound,
     DevContainerParseFailed,
-    NodeRuntimeNotAvailable,
+    FilesystemError,
+    ResourceFetchFailed,
     NotInValidProject,
 }
 
@@ -110,8 +88,11 @@ impl Display for DevContainerError {
             match self {
                 DevContainerError::DockerNotAvailable =>
                     "docker CLI not found on $PATH".to_string(),
-                DevContainerError::DevContainerCliNotAvailable =>
-                    "devcontainer CLI not found on path".to_string(),
+                DevContainerError::ContainerNotValid(id) => format!(
+                    "docker image {id} did not have expected configuration for a dev container"
+                ),
+                DevContainerError::DevContainerScriptsFailed =>
+                    "lifecycle scripts could not execute for dev container".to_string(),
                 DevContainerError::DevContainerUpFailed(_) => {
                     "DevContainer creation failed".to_string()
                 }
@@ -122,14 +103,32 @@ impl Display for DevContainerError {
                     "No valid dev container definition found in project".to_string(),
                 DevContainerError::DevContainerParseFailed =>
                     "Failed to parse file .devcontainer/devcontainer.json".to_string(),
-                DevContainerError::NodeRuntimeNotAvailable =>
-                    "Cannot find a valid node runtime".to_string(),
                 DevContainerError::NotInValidProject => "Not within a valid project".to_string(),
+                DevContainerError::CommandFailed(program) =>
+                    format!("Failure running external program {program}"),
+                DevContainerError::FilesystemError =>
+                    "Error downloading resources locally".to_string(),
+                DevContainerError::ResourceFetchFailed =>
+                    "Failed to fetch resources from template or feature repository".to_string(),
             }
         )
     }
 }
 
+pub(crate) async fn read_default_devcontainer_configuration(
+    cx: &DevContainerContext,
+    environment: HashMap<String, String>,
+) -> Result<DevContainer, DevContainerError> {
+    let default_config = DevContainerConfig::default_config();
+
+    read_devcontainer_configuration(default_config, cx, environment)
+        .await
+        .map_err(|e| {
+            log::error!("Default configuration not found: {:?}", e);
+            DevContainerError::DevContainerNotFound
+        })
+}
+
 /// Finds all available devcontainer configurations in the project.
 ///
 /// See [`find_configs_in_snapshot`] for the locations that are scanned.
@@ -241,27 +240,35 @@ pub fn find_configs_in_snapshot(snapshot: &Snapshot) -> Vec<DevContainerConfig>
 pub async fn start_dev_container_with_config(
     context: DevContainerContext,
     config: Option<DevContainerConfig>,
+    environment: HashMap<String, String>,
 ) -> Result<(DevContainerConnection, String), DevContainerError> {
     check_for_docker(context.use_podman).await?;
-    let cli = ensure_devcontainer_cli(&context.node_runtime).await?;
-    let config_path = config.map(|c| context.project_directory.join(&c.config_path));
 
-    match devcontainer_up(&context, &cli, config_path.as_deref()).await {
+    let Some(actual_config) = config.clone() else {
+        return Err(DevContainerError::NotInValidProject);
+    };
+
+    match spawn_dev_container(
+        &context,
+        environment.clone(),
+        actual_config.clone(),
+        context.project_directory.clone().as_ref(),
+    )
+    .await
+    {
         Ok(DevContainerUp {
             container_id,
             remote_workspace_folder,
             remote_user,
+            extension_ids,
+            remote_env,
             ..
         }) => {
             let project_name =
-                match read_devcontainer_configuration(&context, &cli, config_path.as_deref()).await
-                {
-                    Ok(DevContainerConfigurationOutput {
-                        configuration:
-                            DevContainerConfiguration {
-                                name: Some(project_name),
-                            },
-                    }) => project_name,
+                match read_devcontainer_configuration(actual_config, &context, environment).await {
+                    Ok(DevContainer {
+                        name: Some(name), ..
+                    }) => name,
                     _ => get_backup_project_name(&remote_workspace_folder, &container_id),
                 };
 
@@ -270,31 +277,19 @@ pub async fn start_dev_container_with_config(
                 container_id,
                 use_podman: context.use_podman,
                 remote_user,
+                extension_ids,
+                remote_env: remote_env.into_iter().collect(),
             };
 
             Ok((connection, remote_workspace_folder))
         }
         Err(err) => {
-            let message = format!("Failed with nested error: {}", err);
+            let message = format!("Failed with nested error: {:?}", err);
             Err(DevContainerError::DevContainerUpFailed(message))
         }
     }
 }
 
-#[cfg(not(target_os = "windows"))]
-fn dev_container_cli() -> String {
-    "devcontainer".to_string()
-}
-
-#[cfg(target_os = "windows")]
-fn dev_container_cli() -> String {
-    "devcontainer.cmd".to_string()
-}
-
-fn dev_container_script() -> String {
-    "devcontainer.js".to_string()
-}
-
 async fn check_for_docker(use_podman: bool) -> Result<(), DevContainerError> {
     let mut command = if use_podman {
         util::command::new_command("podman")
@@ -312,261 +307,157 @@ async fn check_for_docker(use_podman: bool) -> Result<(), DevContainerError> {
     }
 }
 
-pub(crate) async fn ensure_devcontainer_cli(
-    node_runtime: &NodeRuntime,
-) -> Result<DevContainerCli, DevContainerError> {
-    let mut command = util::command::new_command(&dev_container_cli());
-    command.arg("--version");
-
-    if let Err(e) = command.output().await {
-        log::error!(
-            "Unable to find devcontainer CLI in $PATH. Checking for a zed installed version. Error: {:?}",
-            e
-        );
-
-        let Ok(node_runtime_path) = node_runtime.binary_path().await else {
-            return Err(DevContainerError::NodeRuntimeNotAvailable);
+pub(crate) async fn apply_devcontainer_template(
+    worktree: Entity<Worktree>,
+    template: &DevContainerTemplate,
+    template_options: &HashMap<String, String>,
+    features_selected: &HashSet<DevContainerFeature>,
+    context: &DevContainerContext,
+    cx: &mut AsyncWindowContext,
+) -> Result<DevContainerApply, DevContainerError> {
+    let token = get_oci_token(
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &context.http_client,
+    )
+    .map_err(|e| {
+        log::error!("Failed to get OCI auth token: {e}");
+        DevContainerError::ResourceFetchFailed
+    })
+    .await?;
+    let manifest = get_latest_oci_manifest(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &context.http_client,
+        Some(&template.id),
+    )
+    .map_err(|e| {
+        log::error!("Failed to fetch template from OCI repository: {e}");
+        DevContainerError::ResourceFetchFailed
+    })
+    .await?;
+
+    let layer = &manifest.layers.get(0).ok_or_else(|| {
+        log::error!("Given manifest has no layers to query for blob. Aborting");
+        DevContainerError::ResourceFetchFailed
+    })?;
+
+    let timestamp = std::time::SystemTime::now()
+        .duration_since(std::time::UNIX_EPOCH)
+        .map(|d| d.as_millis())
+        .unwrap_or(0);
+    let extract_dir = std::env::temp_dir()
+        .join(&template.id)
+        .join(format!("extracted-{timestamp}"));
+
+    context.fs.create_dir(&extract_dir).await.map_err(|e| {
+        log::error!("Could not create temporary directory: {e}");
+        DevContainerError::FilesystemError
+    })?;
+
+    download_oci_tarball(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &layer.digest,
+        "application/vnd.oci.image.manifest.v1+json",
+        &extract_dir,
+        &context.http_client,
+        &context.fs,
+        Some(&template.id),
+    )
+    .map_err(|e| {
+        log::error!("Error downloading tarball: {:?}", e);
+        DevContainerError::ResourceFetchFailed
+    })
+    .await?;
+
+    let downloaded_devcontainer_folder = &extract_dir.join(".devcontainer/");
+    let mut project_files = Vec::new();
+    for entry in WalkDir::new(downloaded_devcontainer_folder) {
+        let Ok(entry) = entry else {
+            continue;
         };
-
-        let datadir_cli_path = paths::devcontainer_dir()
-            .join("node_modules")
-            .join("@devcontainers")
-            .join("cli")
-            .join(&dev_container_script());
-
-        log::debug!(
-            "devcontainer not found in path, using local location: ${}",
-            datadir_cli_path.display()
-        );
-
-        let mut command =
-            util::command::new_command(node_runtime_path.as_os_str().display().to_string());
-        command.arg(datadir_cli_path.display().to_string());
-        command.arg("--version");
-
-        match command.output().await {
-            Err(e) => log::error!(
-                "Unable to find devcontainer CLI in Data dir. Will try to install. Error: {:?}",
-                e
-            ),
-            Ok(output) => {
-                if output.status.success() {
-                    log::info!("Found devcontainer CLI in Data dir");
-                    return Ok(DevContainerCli {
-                        path: datadir_cli_path.clone(),
-                        node_runtime_path: Some(node_runtime_path.clone()),
-                    });
-                } else {
-                    log::error!(
-                        "Could not run devcontainer CLI from data_dir. Will try once more to install. Output: {:?}",
-                        output
-                    );
-                }
-            }
+        if !entry.file_type().is_file() {
+            continue;
         }
-
-        if let Err(e) = fs::create_dir_all(paths::devcontainer_dir()).await {
-            log::error!("Unable to create devcontainer directory. Error: {:?}", e);
-            return Err(DevContainerError::DevContainerCliNotAvailable);
+        let relative_path = entry.path().strip_prefix(&extract_dir).map_err(|e| {
+            log::error!("Can't create relative path: {e}");
+            DevContainerError::FilesystemError
+        })?;
+        let rel_path = RelPath::unix(relative_path)
+            .map_err(|e| {
+                log::error!("Can't create relative path: {e}");
+                DevContainerError::FilesystemError
+            })?
+            .into_arc();
+        let content = context.fs.load(entry.path()).await.map_err(|e| {
+            log::error!("Unable to read file: {e}");
+            DevContainerError::FilesystemError
+        })?;
+
+        let mut content = expand_template_options(content, template_options);
+        if let Some("devcontainer.json") = &rel_path.file_name() {
+            content = insert_features_into_devcontainer_json(&content, features_selected)
         }
-
-        if let Err(e) = node_runtime
-            .npm_install_packages(
-                &paths::devcontainer_dir(),
-                &[("@devcontainers/cli", "latest")],
-            )
-            .await
-        {
-            log::error!(
-                "Unable to install devcontainer CLI to data directory. Error: {:?}",
-                e
-            );
-            return Err(DevContainerError::DevContainerCliNotAvailable);
-        };
-
-        let mut command =
-            util::command::new_command(node_runtime_path.as_os_str().display().to_string());
-        command.arg(datadir_cli_path.display().to_string());
-        command.arg("--version");
-        if let Err(e) = command.output().await {
-            log::error!(
-                "Unable to find devcontainer cli after NPM install. Error: {:?}",
-                e
-            );
-            Err(DevContainerError::DevContainerCliNotAvailable)
-        } else {
-            Ok(DevContainerCli {
-                path: datadir_cli_path,
-                node_runtime_path: Some(node_runtime_path),
+        worktree
+            .update(cx, |worktree, cx| {
+                worktree.create_entry(rel_path.clone(), false, Some(content.into_bytes()), cx)
             })
-        }
-    } else {
-        log::info!("Found devcontainer cli on $PATH, using it");
-        Ok(DevContainerCli {
-            path: PathBuf::from(&dev_container_cli()),
-            node_runtime_path: None,
-        })
-    }
-}
-
-async fn devcontainer_up(
-    context: &DevContainerContext,
-    cli: &DevContainerCli,
-    config_path: Option<&Path>,
-) -> Result<DevContainerUp, DevContainerError> {
-    let mut command = cli.command(context.use_podman);
-    command.arg("up");
-    command.arg("--workspace-folder");
-    command.arg(context.project_directory.display().to_string());
-
-    if let Some(config) = config_path {
-        command.arg("--config");
-        command.arg(config.display().to_string());
+            .await
+            .map_err(|e| {
+                log::error!("Unable to create entry in worktree: {e}");
+                DevContainerError::NotInValidProject
+            })?;
+        project_files.push(rel_path);
     }
 
-    log::info!("Running full devcontainer up command: {:?}", command);
-
-    match command.output().await {
-        Ok(output) => {
-            if output.status.success() {
-                let raw = String::from_utf8_lossy(&output.stdout);
-                parse_json_from_cli(&raw)
-            } else {
-                let message = format!(
-                    "Non-success status running devcontainer up for workspace: out: {}, err: {}",
-                    String::from_utf8_lossy(&output.stdout),
-                    String::from_utf8_lossy(&output.stderr)
-                );
-
-                log::error!("{}", &message);
-                Err(DevContainerError::DevContainerUpFailed(message))
-            }
-        }
-        Err(e) => {
-            let message = format!("Error running devcontainer up: {:?}", e);
-            log::error!("{}", &message);
-            Err(DevContainerError::DevContainerUpFailed(message))
-        }
-    }
+    Ok(DevContainerApply { project_files })
 }
 
-pub(crate) async fn read_devcontainer_configuration(
-    context: &DevContainerContext,
-    cli: &DevContainerCli,
-    config_path: Option<&Path>,
-) -> Result<DevContainerConfigurationOutput, DevContainerError> {
-    let mut command = cli.command(context.use_podman);
-    command.arg("read-configuration");
-    command.arg("--workspace-folder");
-    command.arg(context.project_directory.display().to_string());
-
-    if let Some(config) = config_path {
-        command.arg("--config");
-        command.arg(config.display().to_string());
-    }
-
-    match command.output().await {
-        Ok(output) => {
-            if output.status.success() {
-                let raw = String::from_utf8_lossy(&output.stdout);
-                parse_json_from_cli(&raw)
-            } else {
-                let message = format!(
-                    "Non-success status running devcontainer read-configuration for workspace: out: {:?}, err: {:?}",
-                    String::from_utf8_lossy(&output.stdout),
-                    String::from_utf8_lossy(&output.stderr)
-                );
-                log::error!("{}", &message);
-                Err(DevContainerError::DevContainerNotFound)
-            }
-        }
-        Err(e) => {
-            let message = format!("Error running devcontainer read-configuration: {:?}", e);
-            log::error!("{}", &message);
-            Err(DevContainerError::DevContainerNotFound)
-        }
+fn insert_features_into_devcontainer_json(
+    content: &str,
+    features: &HashSet<DevContainerFeature>,
+) -> String {
+    if features.is_empty() {
+        return content.to_string();
     }
-}
-
-pub(crate) async fn apply_dev_container_template(
-    template: &DevContainerTemplate,
-    template_options: &HashMap<String, String>,
-    features_selected: &HashSet<DevContainerFeature>,
-    context: &DevContainerContext,
-    cli: &DevContainerCli,
-) -> Result<DevContainerApply, DevContainerError> {
-    let mut command = cli.command(context.use_podman);
-
-    let Ok(serialized_options) = serde_json::to_string(template_options) else {
-        log::error!("Unable to serialize options for {:?}", template_options);
-        return Err(DevContainerError::DevContainerParseFailed);
-    };
 
-    command.arg("templates");
-    command.arg("apply");
-    command.arg("--workspace-folder");
-    command.arg(context.project_directory.display().to_string());
-    command.arg("--template-id");
-    command.arg(format!(
-        "{}/{}",
-        template
-            .source_repository
-            .as_ref()
-            .unwrap_or(&String::from("")),
-        template.id
-    ));
-    command.arg("--template-args");
-    command.arg(serialized_options);
-    command.arg("--features");
-    command.arg(template_features_to_json(features_selected));
-
-    log::debug!("Running full devcontainer apply command: {:?}", command);
+    let features_value: serde_json::Value = features
+        .iter()
+        .map(|f| {
+            let key = format!(
+                "{}/{}:{}",
+                f.source_repository.as_deref().unwrap_or(""),
+                f.id,
+                f.major_version()
+            );
+            (key, serde_json::Value::Object(Default::default()))
+        })
+        .collect::<serde_json::Map<String, serde_json::Value>>()
+        .into();
+
+    let tab_size = infer_json_indent_size(content);
+    let (range, replacement) = replace_value_in_json_text(
+        content,
+        &["features"],
+        tab_size,
+        Some(&features_value),
+        None,
+    );
 
-    match command.output().await {
-        Ok(output) => {
-            if output.status.success() {
-                let raw = String::from_utf8_lossy(&output.stdout);
-                parse_json_from_cli(&raw)
-            } else {
-                let message = format!(
-                    "Non-success status running devcontainer templates apply for workspace: out: {:?}, err: {:?}",
-                    String::from_utf8_lossy(&output.stdout),
-                    String::from_utf8_lossy(&output.stderr)
-                );
+    let mut result = content.to_string();
+    result.replace_range(range, &replacement);
+    result
+}
 
-                log::error!("{}", &message);
-                Err(DevContainerError::DevContainerTemplateApplyFailed(message))
-            }
-        }
-        Err(e) => {
-            let message = format!("Error running devcontainer templates apply: {:?}", e);
-            log::error!("{}", &message);
-            Err(DevContainerError::DevContainerTemplateApplyFailed(message))
-        }
+fn expand_template_options(content: String, template_options: &HashMap<String, String>) -> String {
+    let mut replaced_content = content;
+    for (key, val) in template_options {
+        replaced_content = replaced_content.replace(&format!("${{templateOption:{key}}}"), val)
     }
-}
-// Try to parse directly first (newer versions output pure JSON)
-// If that fails, look for JSON start (older versions have plaintext prefix)
-fn parse_json_from_cli<T: serde::de::DeserializeOwned>(raw: &str) -> Result<T, DevContainerError> {
-    serde_json::from_str::<T>(&raw)
-        .or_else(|e| {
-            log::error!("Error parsing json: {} - will try to find json object in larger plaintext", e);
-            let json_start = raw
-                .find(|c| c == '{')
-                .ok_or_else(|| {
-                    log::error!("No JSON found in devcontainer up output");
-                    DevContainerError::DevContainerParseFailed
-                })?;
-
-            serde_json::from_str(&raw[json_start..]).map_err(|e| {
-                log::error!(
-                    "Unable to parse JSON from devcontainer up output (starting at position {}), error: {:?}",
-                    json_start,
-                    e
-                );
-                DevContainerError::DevContainerParseFailed
-            })
-        })
+    replaced_content
 }
 
 fn get_backup_project_name(remote_workspace_folder: &str, container_id: &str) -> String {
@@ -577,36 +468,11 @@ fn get_backup_project_name(remote_workspace_folder: &str, container_id: &str) ->
         .unwrap_or_else(|| container_id.to_string())
 }
 
-fn template_features_to_json(features_selected: &HashSet<DevContainerFeature>) -> String {
-    let features_map = features_selected
-        .iter()
-        .map(|feature| {
-            let mut map = HashMap::new();
-            map.insert(
-                "id",
-                format!(
-                    "{}/{}:{}",
-                    feature
-                        .source_repository
-                        .as_ref()
-                        .unwrap_or(&String::from("")),
-                    feature.id,
-                    feature.major_version()
-                ),
-            );
-            map
-        })
-        .collect::<Vec<HashMap<&str, String>>>();
-    serde_json::to_string(&features_map).unwrap()
-}
-
 #[cfg(test)]
 mod tests {
     use std::path::PathBuf;
 
-    use crate::devcontainer_api::{
-        DevContainerConfig, DevContainerUp, find_configs_in_snapshot, parse_json_from_cli,
-    };
+    use crate::devcontainer_api::{DevContainerConfig, find_configs_in_snapshot};
     use fs::FakeFs;
     use gpui::TestAppContext;
     use project::Project;
@@ -621,30 +487,6 @@ mod tests {
         });
     }
 
-    #[test]
-    fn should_parse_from_devcontainer_json() {
-        let json = r#"{"outcome":"success","containerId":"826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a","remoteUser":"vscode","remoteWorkspaceFolder":"/workspaces/zed"}"#;
-        let up: DevContainerUp = parse_json_from_cli(json).unwrap();
-        assert_eq!(up._outcome, "success");
-        assert_eq!(
-            up.container_id,
-            "826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a"
-        );
-        assert_eq!(up.remote_user, "vscode");
-        assert_eq!(up.remote_workspace_folder, "/workspaces/zed");
-
-        let json_in_plaintext = r#"[2026-01-22T16:19:08.802Z] @devcontainers/cli 0.80.1. Node.js v22.21.1. darwin 24.6.0 arm64.
-            {"outcome":"success","containerId":"826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a","remoteUser":"vscode","remoteWorkspaceFolder":"/workspaces/zed"}"#;
-        let up: DevContainerUp = parse_json_from_cli(json_in_plaintext).unwrap();
-        assert_eq!(up._outcome, "success");
-        assert_eq!(
-            up.container_id,
-            "826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a"
-        );
-        assert_eq!(up.remote_user, "vscode");
-        assert_eq!(up.remote_workspace_folder, "/workspaces/zed");
-    }
-
     #[gpui::test]
     async fn test_find_configs_root_devcontainer_json(cx: &mut TestAppContext) {
         init_test(cx);

crates/dev_container/src/devcontainer_json.rs πŸ”—

@@ -0,0 +1,1358 @@
+use std::{collections::HashMap, fmt::Display, path::Path, sync::Arc};
+
+use crate::{command_json::CommandRunner, devcontainer_api::DevContainerError};
+use serde::{Deserialize, Deserializer, Serialize};
+use serde_json_lenient::Value;
+use util::command::Command;
+
+#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)]
+#[serde(untagged)]
+pub(crate) enum ForwardPort {
+    Number(u16),
+    String(String),
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum PortAttributeProtocol {
+    Https,
+    Http,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum OnAutoForward {
+    Notify,
+    OpenBrowser,
+    OpenBrowserOnce,
+    OpenPreview,
+    Silent,
+    Ignore,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct PortAttributes {
+    label: String,
+    on_auto_forward: OnAutoForward,
+    elevate_if_needed: bool,
+    require_local_port: bool,
+    protocol: PortAttributeProtocol,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum UserEnvProbe {
+    None,
+    InteractiveShell,
+    LoginShell,
+    LoginInteractiveShell,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum ShutdownAction {
+    None,
+    StopContainer,
+    StopCompose,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct MountDefinition {
+    pub(crate) source: String,
+    pub(crate) target: String,
+    #[serde(rename = "type")]
+    pub(crate) mount_type: Option<String>,
+}
+
+impl Display for MountDefinition {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(
+            f,
+            "type={},source={},target={},consistency=cached",
+            self.mount_type.clone().unwrap_or_else(|| {
+                if self.source.starts_with('/') {
+                    "bind".to_string()
+                } else {
+                    "volume".to_string()
+                }
+            }),
+            self.source,
+            self.target
+        )
+    }
+}
+
+/// Represents the value associated with a feature ID in the `features` map of devcontainer.json.
+///
+/// Per the spec, the value can be:
+/// - A boolean (`true` to enable with defaults)
+/// - A string (shorthand for `{"version": "<value>"}`)
+/// - An object mapping option names to string or boolean values
+///
+/// See: https://containers.dev/implementors/features/#devcontainerjson-properties
+#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)]
+#[serde(untagged)]
+pub(crate) enum FeatureOptions {
+    Bool(bool),
+    String(String),
+    Options(HashMap<String, FeatureOptionValue>),
+}
+
+#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)]
+#[serde(untagged)]
+pub(crate) enum FeatureOptionValue {
+    Bool(bool),
+    String(String),
+}
+impl std::fmt::Display for FeatureOptionValue {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            FeatureOptionValue::Bool(b) => write!(f, "{}", b),
+            FeatureOptionValue::String(s) => write!(f, "{}", s),
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct ZedCustomizationsWrapper {
+    pub(crate) zed: ZedCustomization,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct ZedCustomization {
+    #[serde(default)]
+    pub(crate) extensions: Vec<String>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct ContainerBuild {
+    pub(crate) dockerfile: String,
+    context: Option<String>,
+    pub(crate) args: Option<HashMap<String, String>>,
+    options: Option<Vec<String>>,
+    target: Option<String>,
+    #[serde(default, deserialize_with = "deserialize_string_or_array")]
+    cache_from: Option<Vec<String>>,
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
+struct LifecycleScriptInternal {
+    command: Option<String>,
+    args: Vec<String>,
+}
+
+impl LifecycleScriptInternal {
+    fn from_args(args: Vec<String>) -> Self {
+        let command = args.get(0).map(|a| a.to_string());
+        let remaining = args.iter().skip(1).map(|a| a.to_string()).collect();
+        Self {
+            command,
+            args: remaining,
+        }
+    }
+}
+
+#[derive(Clone, Debug, Serialize, Eq, PartialEq)]
+pub struct LifecycleScript {
+    scripts: HashMap<String, LifecycleScriptInternal>,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct HostRequirements {
+    cpus: Option<u16>,
+    memory: Option<String>,
+    storage: Option<String>,
+}
+
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub(crate) enum LifecycleCommand {
+    InitializeCommand,
+    OnCreateCommand,
+    UpdateContentCommand,
+    PostCreateCommand,
+    PostStartCommand,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub(crate) enum DevContainerBuildType {
+    Image,
+    Dockerfile,
+    DockerCompose,
+    None,
+}
+#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq, Default)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct DevContainer {
+    pub(crate) image: Option<String>,
+    pub(crate) name: Option<String>,
+    pub(crate) remote_user: Option<String>,
+    pub(crate) forward_ports: Option<Vec<ForwardPort>>,
+    pub(crate) ports_attributes: Option<HashMap<String, PortAttributes>>,
+    pub(crate) other_ports_attributes: Option<PortAttributes>,
+    pub(crate) container_env: Option<HashMap<String, String>>,
+    pub(crate) remote_env: Option<HashMap<String, String>>,
+    pub(crate) container_user: Option<String>,
+    #[serde(rename = "updateRemoteUserUID")]
+    pub(crate) update_remote_user_uid: Option<bool>,
+    user_env_probe: Option<UserEnvProbe>,
+    override_command: Option<bool>,
+    shutdown_action: Option<ShutdownAction>,
+    init: Option<bool>,
+    pub(crate) privileged: Option<bool>,
+    cap_add: Option<Vec<String>>,
+    security_opt: Option<Vec<String>>,
+    #[serde(default, deserialize_with = "deserialize_mount_definitions")]
+    pub(crate) mounts: Option<Vec<MountDefinition>>,
+    pub(crate) features: Option<HashMap<String, FeatureOptions>>,
+    pub(crate) override_feature_install_order: Option<Vec<String>>,
+    pub(crate) customizations: Option<ZedCustomizationsWrapper>,
+    pub(crate) build: Option<ContainerBuild>,
+    #[serde(default, deserialize_with = "deserialize_string_or_int")]
+    pub(crate) app_port: Option<String>,
+    #[serde(default, deserialize_with = "deserialize_mount_definition")]
+    pub(crate) workspace_mount: Option<MountDefinition>,
+    pub(crate) workspace_folder: Option<String>,
+    run_args: Option<Vec<String>>,
+    #[serde(default, deserialize_with = "deserialize_string_or_array")]
+    pub(crate) docker_compose_file: Option<Vec<String>>,
+    pub(crate) service: Option<String>,
+    run_services: Option<Vec<String>>,
+    pub(crate) initialize_command: Option<LifecycleScript>,
+    pub(crate) on_create_command: Option<LifecycleScript>,
+    pub(crate) update_content_command: Option<LifecycleScript>,
+    pub(crate) post_create_command: Option<LifecycleScript>,
+    pub(crate) post_start_command: Option<LifecycleScript>,
+    pub(crate) post_attach_command: Option<LifecycleScript>,
+    wait_for: Option<LifecycleCommand>,
+    host_requirements: Option<HostRequirements>,
+}
+
+pub(crate) fn deserialize_devcontainer_json(json: &str) -> Result<DevContainer, DevContainerError> {
+    match serde_json_lenient::from_str(json) {
+        Ok(devcontainer) => Ok(devcontainer),
+        Err(e) => {
+            log::error!("Unable to deserialize devcontainer from json: {e}");
+            Err(DevContainerError::DevContainerParseFailed)
+        }
+    }
+}
+
+impl DevContainer {
+    pub(crate) fn build_type(&self) -> DevContainerBuildType {
+        if self.image.is_some() {
+            return DevContainerBuildType::Image;
+        } else if self.docker_compose_file.is_some() {
+            return DevContainerBuildType::DockerCompose;
+        } else if self.build.is_some() {
+            return DevContainerBuildType::Dockerfile;
+        }
+        return DevContainerBuildType::None;
+    }
+
+    pub(crate) fn has_features(&self) -> bool {
+        self.features
+            .as_ref()
+            .map(|features| !features.is_empty())
+            .unwrap_or(false)
+    }
+}
+
+// Custom deserializer that parses the entire customizations object as a
+// serde_json_lenient::Value first, then extracts the "zed" portion.
+// This avoids a bug in serde_json_lenient's `ignore_value` codepath which
+// does not handle trailing commas in skipped values.
+impl<'de> Deserialize<'de> for ZedCustomizationsWrapper {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        let value = Value::deserialize(deserializer)?;
+        let zed = value
+            .get("zed")
+            .map(|zed_value| serde_json_lenient::from_value::<ZedCustomization>(zed_value.clone()))
+            .transpose()
+            .map_err(serde::de::Error::custom)?
+            .unwrap_or_default();
+        Ok(ZedCustomizationsWrapper { zed })
+    }
+}
+
+impl LifecycleScript {
+    fn from_map(args: HashMap<String, Vec<String>>) -> Self {
+        Self {
+            scripts: args
+                .into_iter()
+                .map(|(k, v)| (k, LifecycleScriptInternal::from_args(v)))
+                .collect(),
+        }
+    }
+    fn from_str(args: &str) -> Self {
+        let script: Vec<String> = args.split(" ").map(|a| a.to_string()).collect();
+
+        Self::from_args(script)
+    }
+    fn from_args(args: Vec<String>) -> Self {
+        Self::from_map(HashMap::from([("default".to_string(), args)]))
+    }
+    pub fn script_commands(&self) -> HashMap<String, Command> {
+        self.scripts
+            .iter()
+            .filter_map(|(k, v)| {
+                if let Some(inner_command) = &v.command {
+                    let mut command = Command::new(inner_command);
+                    command.args(&v.args);
+                    Some((k.clone(), command))
+                } else {
+                    log::warn!(
+                        "Lifecycle script command {k}, value {:?} has no program to run. Skipping",
+                        v
+                    );
+                    None
+                }
+            })
+            .collect()
+    }
+
+    pub async fn run(
+        &self,
+        command_runnder: &Arc<dyn CommandRunner>,
+        working_directory: &Path,
+    ) -> Result<(), DevContainerError> {
+        for (command_name, mut command) in self.script_commands() {
+            log::debug!("Running script {command_name}");
+
+            command.current_dir(working_directory);
+
+            let output = command_runnder
+                .run_command(&mut command)
+                .await
+                .map_err(|e| {
+                    log::error!("Error running command {command_name}: {e}");
+                    DevContainerError::CommandFailed(command_name.clone())
+                })?;
+            if !output.status.success() {
+                let std_err = String::from_utf8_lossy(&output.stderr);
+                log::error!(
+                    "Command {command_name} produced a non-successful output. StdErr: {std_err}"
+                );
+            }
+            let std_out = String::from_utf8_lossy(&output.stdout);
+            log::debug!("Command {command_name} output:\n {std_out}");
+        }
+        Ok(())
+    }
+}
+
+impl<'de> Deserialize<'de> for LifecycleScript {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        use serde::de::{self, Visitor};
+        use std::fmt;
+
+        struct LifecycleScriptVisitor;
+
+        impl<'de> Visitor<'de> for LifecycleScriptVisitor {
+            type Value = LifecycleScript;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("a string, an array of strings, or a map of arrays")
+            }
+
+            fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(LifecycleScript::from_str(value))
+            }
+
+            fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
+            where
+                A: de::SeqAccess<'de>,
+            {
+                let mut array = Vec::new();
+                while let Some(elem) = seq.next_element()? {
+                    array.push(elem);
+                }
+                Ok(LifecycleScript::from_args(array))
+            }
+
+            fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
+            where
+                A: de::MapAccess<'de>,
+            {
+                let mut result = HashMap::new();
+                while let Some(key) = map.next_key::<String>()? {
+                    let value: Value = map.next_value()?;
+                    let script_args = match value {
+                        Value::String(s) => {
+                            s.split(" ").map(|s| s.to_string()).collect::<Vec<String>>()
+                        }
+                        Value::Array(arr) => {
+                            let strings: Vec<String> = arr
+                                .into_iter()
+                                .filter_map(|v| v.as_str().map(|s| s.to_string()))
+                                .collect();
+                            strings
+                        }
+                        _ => continue,
+                    };
+                    result.insert(key, script_args);
+                }
+                Ok(LifecycleScript::from_map(result))
+            }
+        }
+
+        deserializer.deserialize_any(LifecycleScriptVisitor)
+    }
+}
+
+fn deserialize_mount_definition<'de, D>(
+    deserializer: D,
+) -> Result<Option<MountDefinition>, D::Error>
+where
+    D: serde::Deserializer<'de>,
+{
+    use serde::Deserialize;
+    use serde::de::Error;
+
+    #[derive(Deserialize)]
+    #[serde(untagged)]
+    enum MountItem {
+        Object(MountDefinition),
+        String(String),
+    }
+
+    let item = MountItem::deserialize(deserializer)?;
+
+    let mount = match item {
+        MountItem::Object(mount) => mount,
+        MountItem::String(s) => {
+            let mut source = None;
+            let mut target = None;
+            let mut mount_type = None;
+
+            for part in s.split(',') {
+                let part = part.trim();
+                if let Some((key, value)) = part.split_once('=') {
+                    match key.trim() {
+                        "source" => source = Some(value.trim().to_string()),
+                        "target" => target = Some(value.trim().to_string()),
+                        "type" => mount_type = Some(value.trim().to_string()),
+                        _ => {} // Ignore unknown keys
+                    }
+                }
+            }
+
+            let source = source
+                .ok_or_else(|| D::Error::custom(format!("mount string missing 'source': {}", s)))?;
+            let target = target
+                .ok_or_else(|| D::Error::custom(format!("mount string missing 'target': {}", s)))?;
+
+            MountDefinition {
+                source,
+                target,
+                mount_type,
+            }
+        }
+    };
+
+    Ok(Some(mount))
+}
+
+fn deserialize_mount_definitions<'de, D>(
+    deserializer: D,
+) -> Result<Option<Vec<MountDefinition>>, D::Error>
+where
+    D: serde::Deserializer<'de>,
+{
+    use serde::Deserialize;
+    use serde::de::Error;
+
+    #[derive(Deserialize)]
+    #[serde(untagged)]
+    enum MountItem {
+        Object(MountDefinition),
+        String(String),
+    }
+
+    let items = Vec::<MountItem>::deserialize(deserializer)?;
+    let mut mounts = Vec::new();
+
+    for item in items {
+        match item {
+            MountItem::Object(mount) => mounts.push(mount),
+            MountItem::String(s) => {
+                let mut source = None;
+                let mut target = None;
+                let mut mount_type = None;
+
+                for part in s.split(',') {
+                    let part = part.trim();
+                    if let Some((key, value)) = part.split_once('=') {
+                        match key.trim() {
+                            "source" => source = Some(value.trim().to_string()),
+                            "target" => target = Some(value.trim().to_string()),
+                            "type" => mount_type = Some(value.trim().to_string()),
+                            _ => {} // Ignore unknown keys
+                        }
+                    }
+                }
+
+                let source = source.ok_or_else(|| {
+                    D::Error::custom(format!("mount string missing 'source': {}", s))
+                })?;
+                let target = target.ok_or_else(|| {
+                    D::Error::custom(format!("mount string missing 'target': {}", s))
+                })?;
+
+                mounts.push(MountDefinition {
+                    source,
+                    target,
+                    mount_type,
+                });
+            }
+        }
+    }
+
+    Ok(Some(mounts))
+}
+
+fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
+where
+    D: serde::Deserializer<'de>,
+{
+    use serde::Deserialize;
+
+    #[derive(Deserialize)]
+    #[serde(untagged)]
+    enum StringOrInt {
+        String(String),
+        Int(u32),
+    }
+
+    match StringOrInt::deserialize(deserializer)? {
+        StringOrInt::String(s) => Ok(Some(s)),
+        StringOrInt::Int(b) => Ok(Some(b.to_string())),
+    }
+}
+
+fn deserialize_string_or_array<'de, D>(deserializer: D) -> Result<Option<Vec<String>>, D::Error>
+where
+    D: serde::Deserializer<'de>,
+{
+    use serde::Deserialize;
+
+    #[derive(Deserialize)]
+    #[serde(untagged)]
+    enum StringOrArray {
+        String(String),
+        Array(Vec<String>),
+    }
+
+    match StringOrArray::deserialize(deserializer)? {
+        StringOrArray::String(s) => Ok(Some(vec![s])),
+        StringOrArray::Array(b) => Ok(Some(b)),
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::collections::HashMap;
+
+    use crate::{
+        devcontainer_api::DevContainerError,
+        devcontainer_json::{
+            ContainerBuild, DevContainer, DevContainerBuildType, FeatureOptions, ForwardPort,
+            HostRequirements, LifecycleCommand, LifecycleScript, MountDefinition, OnAutoForward,
+            PortAttributeProtocol, PortAttributes, ShutdownAction, UserEnvProbe, ZedCustomization,
+            ZedCustomizationsWrapper, deserialize_devcontainer_json,
+        },
+    };
+
+    #[test]
+    fn should_deserialize_customizations_with_unknown_keys() {
+        let json_with_other_customizations = r#"
+            {
+                "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
+                "customizations": {
+                  "vscode": {
+                    "extensions": [
+                      "dbaeumer.vscode-eslint",
+                      "GitHub.vscode-pull-request-github",
+                    ],
+                  },
+                  "zed": {
+                    "extensions": ["vue", "ruby"],
+                  },
+                  "codespaces": {
+                    "repositories": {
+                      "devcontainers/features": {
+                        "permissions": {
+                          "contents": "write",
+                          "workflows": "write",
+                        },
+                      },
+                    },
+                  },
+                },
+            }
+        "#;
+
+        let result = deserialize_devcontainer_json(json_with_other_customizations);
+
+        assert!(
+            result.is_ok(),
+            "Should ignore unknown customization keys, but got: {:?}",
+            result.err()
+        );
+        let devcontainer = result.expect("ok");
+        assert_eq!(
+            devcontainer.customizations,
+            Some(ZedCustomizationsWrapper {
+                zed: ZedCustomization {
+                    extensions: vec!["vue".to_string(), "ruby".to_string()]
+                }
+            })
+        );
+    }
+
+    #[test]
+    fn should_deserialize_customizations_without_zed_key() {
+        let json_without_zed = r#"
+            {
+                "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
+                "customizations": {
+                    "vscode": {
+                        "extensions": ["dbaeumer.vscode-eslint"]
+                    }
+                }
+            }
+        "#;
+
+        let result = deserialize_devcontainer_json(json_without_zed);
+
+        assert!(
+            result.is_ok(),
+            "Should handle missing zed key in customizations, but got: {:?}",
+            result.err()
+        );
+        let devcontainer = result.expect("ok");
+        assert_eq!(
+            devcontainer.customizations,
+            Some(ZedCustomizationsWrapper {
+                zed: ZedCustomization { extensions: vec![] }
+            })
+        );
+    }
+
+    #[test]
+    fn should_deserialize_simple_devcontainer_json() {
+        let given_bad_json = "{ \"image\": 123 }";
+
+        let result = deserialize_devcontainer_json(given_bad_json);
+
+        assert!(result.is_err());
+        assert_eq!(
+            result.expect_err("err"),
+            DevContainerError::DevContainerParseFailed
+        );
+
+        let given_image_container_json = r#"
+            // These are some external comments. serde_lenient should handle them
+            {
+                // These are some internal comments
+                "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
+                "name": "myDevContainer",
+                "remoteUser": "root",
+                "forwardPorts": [
+                    "db:5432",
+                    3000
+                ],
+                "portsAttributes": {
+                    "3000": {
+                        "label": "This Port",
+                        "onAutoForward": "notify",
+                        "elevateIfNeeded": false,
+                        "requireLocalPort": true,
+                        "protocol": "https"
+                    },
+                    "db:5432": {
+                        "label": "This Port too",
+                        "onAutoForward": "silent",
+                        "elevateIfNeeded": true,
+                        "requireLocalPort": false,
+                        "protocol": "http"
+                    }
+                },
+                "otherPortsAttributes": {
+                    "label": "Other Ports",
+                    "onAutoForward": "openBrowser",
+                    "elevateIfNeeded": true,
+                    "requireLocalPort": true,
+                    "protocol": "https"
+                },
+                "updateRemoteUserUID": true,
+                "remoteEnv": {
+                    "MYVAR1": "myvarvalue",
+                    "MYVAR2": "myvarothervalue"
+                },
+                "initializeCommand": ["echo", "initialize_command"],
+                "onCreateCommand": "echo on_create_command",
+                "updateContentCommand": {
+                    "first": "echo update_content_command",
+                    "second": ["echo", "update_content_command"]
+                },
+                "postCreateCommand": ["echo", "post_create_command"],
+                "postStartCommand": "echo post_start_command",
+                "postAttachCommand": {
+                    "something": "echo post_attach_command",
+                    "something1": "echo something else",
+                },
+                "waitFor": "postStartCommand",
+                "userEnvProbe": "loginShell",
+                "features": {
+              		"ghcr.io/devcontainers/features/aws-cli:1": {},
+              		"ghcr.io/devcontainers/features/anaconda:1": {}
+               	},
+                "overrideFeatureInstallOrder": [
+                    "ghcr.io/devcontainers/features/anaconda:1",
+                    "ghcr.io/devcontainers/features/aws-cli:1"
+                ],
+                "hostRequirements": {
+                    "cpus": 2,
+                    "memory": "8gb",
+                    "storage": "32gb",
+                    // Note that we're not parsing this currently
+                    "gpu": true,
+                },
+                "appPort": 8081,
+                "containerEnv": {
+                    "MYVAR3": "myvar3",
+                    "MYVAR4": "myvar4"
+                },
+                "containerUser": "myUser",
+                "mounts": [
+                    {
+                        "source": "/localfolder/app",
+                        "target": "/workspaces/app",
+                        "type": "volume"
+                    }
+                ],
+                "runArgs": [
+                    "-c",
+                    "some_command"
+                ],
+                "shutdownAction": "stopContainer",
+                "overrideCommand": true,
+                "workspaceFolder": "/workspaces",
+                "workspaceMount": "source=/app,target=/workspaces/app,type=bind,consistency=cached",
+                "customizations": {
+                    "vscode": {
+                        // Just confirm that this can be included and ignored
+                    },
+                    "zed": {
+                        "extensions": [
+                            "html"
+                        ]
+                    }
+                }
+            }
+            "#;
+
+        let result = deserialize_devcontainer_json(given_image_container_json);
+
+        assert!(result.is_ok());
+        let devcontainer = result.expect("ok");
+        assert_eq!(
+            devcontainer,
+            DevContainer {
+                image: Some(String::from("mcr.microsoft.com/devcontainers/base:ubuntu")),
+                name: Some(String::from("myDevContainer")),
+                remote_user: Some(String::from("root")),
+                forward_ports: Some(vec![
+                    ForwardPort::String("db:5432".to_string()),
+                    ForwardPort::Number(3000),
+                ]),
+                ports_attributes: Some(HashMap::from([
+                    (
+                        "3000".to_string(),
+                        PortAttributes {
+                            label: "This Port".to_string(),
+                            on_auto_forward: OnAutoForward::Notify,
+                            elevate_if_needed: false,
+                            require_local_port: true,
+                            protocol: PortAttributeProtocol::Https
+                        }
+                    ),
+                    (
+                        "db:5432".to_string(),
+                        PortAttributes {
+                            label: "This Port too".to_string(),
+                            on_auto_forward: OnAutoForward::Silent,
+                            elevate_if_needed: true,
+                            require_local_port: false,
+                            protocol: PortAttributeProtocol::Http
+                        }
+                    )
+                ])),
+                other_ports_attributes: Some(PortAttributes {
+                    label: "Other Ports".to_string(),
+                    on_auto_forward: OnAutoForward::OpenBrowser,
+                    elevate_if_needed: true,
+                    require_local_port: true,
+                    protocol: PortAttributeProtocol::Https
+                }),
+                update_remote_user_uid: Some(true),
+                remote_env: Some(HashMap::from([
+                    ("MYVAR1".to_string(), "myvarvalue".to_string()),
+                    ("MYVAR2".to_string(), "myvarothervalue".to_string())
+                ])),
+                initialize_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "initialize_command".to_string()
+                ])),
+                on_create_command: Some(LifecycleScript::from_str("echo on_create_command")),
+                update_content_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "first".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    ),
+                    (
+                        "second".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    )
+                ]))),
+                post_create_command: Some(LifecycleScript::from_str("echo post_create_command")),
+                post_start_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "post_start_command".to_string()
+                ])),
+                post_attach_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "something".to_string(),
+                        vec!["echo".to_string(), "post_attach_command".to_string()]
+                    ),
+                    (
+                        "something1".to_string(),
+                        vec![
+                            "echo".to_string(),
+                            "something".to_string(),
+                            "else".to_string()
+                        ]
+                    )
+                ]))),
+                wait_for: Some(LifecycleCommand::PostStartCommand),
+                user_env_probe: Some(UserEnvProbe::LoginShell),
+                features: Some(HashMap::from([
+                    (
+                        "ghcr.io/devcontainers/features/aws-cli:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    ),
+                    (
+                        "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    )
+                ])),
+                override_feature_install_order: Some(vec![
+                    "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                    "ghcr.io/devcontainers/features/aws-cli:1".to_string()
+                ]),
+                host_requirements: Some(HostRequirements {
+                    cpus: Some(2),
+                    memory: Some("8gb".to_string()),
+                    storage: Some("32gb".to_string()),
+                }),
+                app_port: Some("8081".to_string()),
+                container_env: Some(HashMap::from([
+                    ("MYVAR3".to_string(), "myvar3".to_string()),
+                    ("MYVAR4".to_string(), "myvar4".to_string())
+                ])),
+                container_user: Some("myUser".to_string()),
+                mounts: Some(vec![MountDefinition {
+                    source: "/localfolder/app".to_string(),
+                    target: "/workspaces/app".to_string(),
+                    mount_type: Some("volume".to_string()),
+                }]),
+                run_args: Some(vec!["-c".to_string(), "some_command".to_string()]),
+                shutdown_action: Some(ShutdownAction::StopContainer),
+                override_command: Some(true),
+                workspace_folder: Some("/workspaces".to_string()),
+                workspace_mount: Some(MountDefinition {
+                    source: "/app".to_string(),
+                    target: "/workspaces/app".to_string(),
+                    mount_type: Some("bind".to_string())
+                }),
+                customizations: Some(ZedCustomizationsWrapper {
+                    zed: ZedCustomization {
+                        extensions: vec!["html".to_string()]
+                    }
+                }),
+                ..Default::default()
+            }
+        );
+
+        assert_eq!(devcontainer.build_type(), DevContainerBuildType::Image);
+    }
+
+    #[test]
+    fn should_deserialize_docker_compose_devcontainer_json() {
+        let given_docker_compose_json = r#"
+            // These are some external comments. serde_lenient should handle them
+            {
+                // These are some internal comments
+                "name": "myDevContainer",
+                "remoteUser": "root",
+                "forwardPorts": [
+                    "db:5432",
+                    3000
+                ],
+                "portsAttributes": {
+                    "3000": {
+                        "label": "This Port",
+                        "onAutoForward": "notify",
+                        "elevateIfNeeded": false,
+                        "requireLocalPort": true,
+                        "protocol": "https"
+                    },
+                    "db:5432": {
+                        "label": "This Port too",
+                        "onAutoForward": "silent",
+                        "elevateIfNeeded": true,
+                        "requireLocalPort": false,
+                        "protocol": "http"
+                    }
+                },
+                "otherPortsAttributes": {
+                    "label": "Other Ports",
+                    "onAutoForward": "openBrowser",
+                    "elevateIfNeeded": true,
+                    "requireLocalPort": true,
+                    "protocol": "https"
+                },
+                "updateRemoteUserUID": true,
+                "remoteEnv": {
+                    "MYVAR1": "myvarvalue",
+                    "MYVAR2": "myvarothervalue"
+                },
+                "initializeCommand": ["echo", "initialize_command"],
+                "onCreateCommand": "echo on_create_command",
+                "updateContentCommand": {
+                    "first": "echo update_content_command",
+                    "second": ["echo", "update_content_command"]
+                },
+                "postCreateCommand": ["echo", "post_create_command"],
+                "postStartCommand": "echo post_start_command",
+                "postAttachCommand": {
+                    "something": "echo post_attach_command",
+                    "something1": "echo something else",
+                },
+                "waitFor": "postStartCommand",
+                "userEnvProbe": "loginShell",
+                "features": {
+              		"ghcr.io/devcontainers/features/aws-cli:1": {},
+              		"ghcr.io/devcontainers/features/anaconda:1": {}
+               	},
+                "overrideFeatureInstallOrder": [
+                    "ghcr.io/devcontainers/features/anaconda:1",
+                    "ghcr.io/devcontainers/features/aws-cli:1"
+                ],
+                "hostRequirements": {
+                    "cpus": 2,
+                    "memory": "8gb",
+                    "storage": "32gb",
+                    // Note that we're not parsing this currently
+                    "gpu": true,
+                },
+                "dockerComposeFile": "docker-compose.yml",
+                "service": "myService",
+                "runServices": [
+                    "myService",
+                    "mySupportingService"
+                ],
+                "workspaceFolder": "/workspaces/thing",
+                "shutdownAction": "stopCompose",
+                "overrideCommand": true
+            }
+            "#;
+        let result = deserialize_devcontainer_json(given_docker_compose_json);
+
+        assert!(result.is_ok());
+        let devcontainer = result.expect("ok");
+        assert_eq!(
+            devcontainer,
+            DevContainer {
+                name: Some(String::from("myDevContainer")),
+                remote_user: Some(String::from("root")),
+                forward_ports: Some(vec![
+                    ForwardPort::String("db:5432".to_string()),
+                    ForwardPort::Number(3000),
+                ]),
+                ports_attributes: Some(HashMap::from([
+                    (
+                        "3000".to_string(),
+                        PortAttributes {
+                            label: "This Port".to_string(),
+                            on_auto_forward: OnAutoForward::Notify,
+                            elevate_if_needed: false,
+                            require_local_port: true,
+                            protocol: PortAttributeProtocol::Https
+                        }
+                    ),
+                    (
+                        "db:5432".to_string(),
+                        PortAttributes {
+                            label: "This Port too".to_string(),
+                            on_auto_forward: OnAutoForward::Silent,
+                            elevate_if_needed: true,
+                            require_local_port: false,
+                            protocol: PortAttributeProtocol::Http
+                        }
+                    )
+                ])),
+                other_ports_attributes: Some(PortAttributes {
+                    label: "Other Ports".to_string(),
+                    on_auto_forward: OnAutoForward::OpenBrowser,
+                    elevate_if_needed: true,
+                    require_local_port: true,
+                    protocol: PortAttributeProtocol::Https
+                }),
+                update_remote_user_uid: Some(true),
+                remote_env: Some(HashMap::from([
+                    ("MYVAR1".to_string(), "myvarvalue".to_string()),
+                    ("MYVAR2".to_string(), "myvarothervalue".to_string())
+                ])),
+                initialize_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "initialize_command".to_string()
+                ])),
+                on_create_command: Some(LifecycleScript::from_str("echo on_create_command")),
+                update_content_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "first".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    ),
+                    (
+                        "second".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    )
+                ]))),
+                post_create_command: Some(LifecycleScript::from_str("echo post_create_command")),
+                post_start_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "post_start_command".to_string()
+                ])),
+                post_attach_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "something".to_string(),
+                        vec!["echo".to_string(), "post_attach_command".to_string()]
+                    ),
+                    (
+                        "something1".to_string(),
+                        vec![
+                            "echo".to_string(),
+                            "something".to_string(),
+                            "else".to_string()
+                        ]
+                    )
+                ]))),
+                wait_for: Some(LifecycleCommand::PostStartCommand),
+                user_env_probe: Some(UserEnvProbe::LoginShell),
+                features: Some(HashMap::from([
+                    (
+                        "ghcr.io/devcontainers/features/aws-cli:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    ),
+                    (
+                        "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    )
+                ])),
+                override_feature_install_order: Some(vec![
+                    "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                    "ghcr.io/devcontainers/features/aws-cli:1".to_string()
+                ]),
+                host_requirements: Some(HostRequirements {
+                    cpus: Some(2),
+                    memory: Some("8gb".to_string()),
+                    storage: Some("32gb".to_string()),
+                }),
+                docker_compose_file: Some(vec!["docker-compose.yml".to_string()]),
+                service: Some("myService".to_string()),
+                run_services: Some(vec![
+                    "myService".to_string(),
+                    "mySupportingService".to_string(),
+                ]),
+                workspace_folder: Some("/workspaces/thing".to_string()),
+                shutdown_action: Some(ShutdownAction::StopCompose),
+                override_command: Some(true),
+                ..Default::default()
+            }
+        );
+
+        assert_eq!(
+            devcontainer.build_type(),
+            DevContainerBuildType::DockerCompose
+        );
+    }
+
+    #[test]
+    fn should_deserialize_dockerfile_devcontainer_json() {
+        let given_dockerfile_container_json = r#"
+            // These are some external comments. serde_lenient should handle them
+            {
+                // These are some internal comments
+                "name": "myDevContainer",
+                "remoteUser": "root",
+                "forwardPorts": [
+                    "db:5432",
+                    3000
+                ],
+                "portsAttributes": {
+                    "3000": {
+                        "label": "This Port",
+                        "onAutoForward": "notify",
+                        "elevateIfNeeded": false,
+                        "requireLocalPort": true,
+                        "protocol": "https"
+                    },
+                    "db:5432": {
+                        "label": "This Port too",
+                        "onAutoForward": "silent",
+                        "elevateIfNeeded": true,
+                        "requireLocalPort": false,
+                        "protocol": "http"
+                    }
+                },
+                "otherPortsAttributes": {
+                    "label": "Other Ports",
+                    "onAutoForward": "openBrowser",
+                    "elevateIfNeeded": true,
+                    "requireLocalPort": true,
+                    "protocol": "https"
+                },
+                "updateRemoteUserUID": true,
+                "remoteEnv": {
+                    "MYVAR1": "myvarvalue",
+                    "MYVAR2": "myvarothervalue"
+                },
+                "initializeCommand": ["echo", "initialize_command"],
+                "onCreateCommand": "echo on_create_command",
+                "updateContentCommand": {
+                    "first": "echo update_content_command",
+                    "second": ["echo", "update_content_command"]
+                },
+                "postCreateCommand": ["echo", "post_create_command"],
+                "postStartCommand": "echo post_start_command",
+                "postAttachCommand": {
+                    "something": "echo post_attach_command",
+                    "something1": "echo something else",
+                },
+                "waitFor": "postStartCommand",
+                "userEnvProbe": "loginShell",
+                "features": {
+              		"ghcr.io/devcontainers/features/aws-cli:1": {},
+              		"ghcr.io/devcontainers/features/anaconda:1": {}
+               	},
+                "overrideFeatureInstallOrder": [
+                    "ghcr.io/devcontainers/features/anaconda:1",
+                    "ghcr.io/devcontainers/features/aws-cli:1"
+                ],
+                "hostRequirements": {
+                    "cpus": 2,
+                    "memory": "8gb",
+                    "storage": "32gb",
+                    // Note that we're not parsing this currently
+                    "gpu": true,
+                },
+                "appPort": 8081,
+                "containerEnv": {
+                    "MYVAR3": "myvar3",
+                    "MYVAR4": "myvar4"
+                },
+                "containerUser": "myUser",
+                "mounts": [
+                    {
+                        "source": "/localfolder/app",
+                        "target": "/workspaces/app",
+                        "type": "volume"
+                    },
+                    "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory",
+                ],
+                "runArgs": [
+                    "-c",
+                    "some_command"
+                ],
+                "shutdownAction": "stopContainer",
+                "overrideCommand": true,
+                "workspaceFolder": "/workspaces",
+                "workspaceMount": "source=/folder,target=/workspace,type=bind,consistency=cached",
+                "build": {
+                   	"dockerfile": "DockerFile",
+                   	"context": "..",
+                   	"args": {
+                   	    "MYARG": "MYVALUE"
+                   	},
+                   	"options": [
+                   	    "--some-option",
+                   	    "--mount"
+                   	],
+                   	"target": "development",
+                   	"cacheFrom": "some_image"
+                }
+            }
+            "#;
+
+        let result = deserialize_devcontainer_json(given_dockerfile_container_json);
+
+        assert!(result.is_ok());
+        let devcontainer = result.expect("ok");
+        assert_eq!(
+            devcontainer,
+            DevContainer {
+                name: Some(String::from("myDevContainer")),
+                remote_user: Some(String::from("root")),
+                forward_ports: Some(vec![
+                    ForwardPort::String("db:5432".to_string()),
+                    ForwardPort::Number(3000),
+                ]),
+                ports_attributes: Some(HashMap::from([
+                    (
+                        "3000".to_string(),
+                        PortAttributes {
+                            label: "This Port".to_string(),
+                            on_auto_forward: OnAutoForward::Notify,
+                            elevate_if_needed: false,
+                            require_local_port: true,
+                            protocol: PortAttributeProtocol::Https
+                        }
+                    ),
+                    (
+                        "db:5432".to_string(),
+                        PortAttributes {
+                            label: "This Port too".to_string(),
+                            on_auto_forward: OnAutoForward::Silent,
+                            elevate_if_needed: true,
+                            require_local_port: false,
+                            protocol: PortAttributeProtocol::Http
+                        }
+                    )
+                ])),
+                other_ports_attributes: Some(PortAttributes {
+                    label: "Other Ports".to_string(),
+                    on_auto_forward: OnAutoForward::OpenBrowser,
+                    elevate_if_needed: true,
+                    require_local_port: true,
+                    protocol: PortAttributeProtocol::Https
+                }),
+                update_remote_user_uid: Some(true),
+                remote_env: Some(HashMap::from([
+                    ("MYVAR1".to_string(), "myvarvalue".to_string()),
+                    ("MYVAR2".to_string(), "myvarothervalue".to_string())
+                ])),
+                initialize_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "initialize_command".to_string()
+                ])),
+                on_create_command: Some(LifecycleScript::from_str("echo on_create_command")),
+                update_content_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "first".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    ),
+                    (
+                        "second".to_string(),
+                        vec!["echo".to_string(), "update_content_command".to_string()]
+                    )
+                ]))),
+                post_create_command: Some(LifecycleScript::from_str("echo post_create_command")),
+                post_start_command: Some(LifecycleScript::from_args(vec![
+                    "echo".to_string(),
+                    "post_start_command".to_string()
+                ])),
+                post_attach_command: Some(LifecycleScript::from_map(HashMap::from([
+                    (
+                        "something".to_string(),
+                        vec!["echo".to_string(), "post_attach_command".to_string()]
+                    ),
+                    (
+                        "something1".to_string(),
+                        vec![
+                            "echo".to_string(),
+                            "something".to_string(),
+                            "else".to_string()
+                        ]
+                    )
+                ]))),
+                wait_for: Some(LifecycleCommand::PostStartCommand),
+                user_env_probe: Some(UserEnvProbe::LoginShell),
+                features: Some(HashMap::from([
+                    (
+                        "ghcr.io/devcontainers/features/aws-cli:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    ),
+                    (
+                        "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                        FeatureOptions::Options(HashMap::new())
+                    )
+                ])),
+                override_feature_install_order: Some(vec![
+                    "ghcr.io/devcontainers/features/anaconda:1".to_string(),
+                    "ghcr.io/devcontainers/features/aws-cli:1".to_string()
+                ]),
+                host_requirements: Some(HostRequirements {
+                    cpus: Some(2),
+                    memory: Some("8gb".to_string()),
+                    storage: Some("32gb".to_string()),
+                }),
+                app_port: Some("8081".to_string()),
+                container_env: Some(HashMap::from([
+                    ("MYVAR3".to_string(), "myvar3".to_string()),
+                    ("MYVAR4".to_string(), "myvar4".to_string())
+                ])),
+                container_user: Some("myUser".to_string()),
+                mounts: Some(vec![
+                    MountDefinition {
+                        source: "/localfolder/app".to_string(),
+                        target: "/workspaces/app".to_string(),
+                        mount_type: Some("volume".to_string()),
+                    },
+                    MountDefinition {
+                        source: "dev-containers-cli-bashhistory".to_string(),
+                        target: "/home/node/commandhistory".to_string(),
+                        mount_type: None,
+                    }
+                ]),
+                run_args: Some(vec!["-c".to_string(), "some_command".to_string()]),
+                shutdown_action: Some(ShutdownAction::StopContainer),
+                override_command: Some(true),
+                workspace_folder: Some("/workspaces".to_string()),
+                workspace_mount: Some(MountDefinition {
+                    source: "/folder".to_string(),
+                    target: "/workspace".to_string(),
+                    mount_type: Some("bind".to_string())
+                }),
+                build: Some(ContainerBuild {
+                    dockerfile: "DockerFile".to_string(),
+                    context: Some("..".to_string()),
+                    args: Some(HashMap::from([(
+                        "MYARG".to_string(),
+                        "MYVALUE".to_string()
+                    )])),
+                    options: Some(vec!["--some-option".to_string(), "--mount".to_string()]),
+                    target: Some("development".to_string()),
+                    cache_from: Some(vec!["some_image".to_string()]),
+                }),
+                ..Default::default()
+            }
+        );
+
+        assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile);
+    }
+}

crates/dev_container/src/devcontainer_manifest.rs πŸ”—

@@ -0,0 +1,6571 @@
+use std::{
+    collections::HashMap,
+    fmt::Debug,
+    hash::{DefaultHasher, Hash, Hasher},
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+
+use fs::Fs;
+use http_client::HttpClient;
+use util::{ResultExt, command::Command};
+
+use crate::{
+    DevContainerConfig, DevContainerContext,
+    command_json::{CommandRunner, DefaultCommandRunner},
+    devcontainer_api::{DevContainerError, DevContainerUp},
+    devcontainer_json::{
+        DevContainer, DevContainerBuildType, FeatureOptions, ForwardPort, MountDefinition,
+        deserialize_devcontainer_json,
+    },
+    docker::{
+        Docker, DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild,
+        DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config,
+    },
+    features::{DevContainerFeatureJson, FeatureManifest, parse_oci_feature_ref},
+    get_oci_token,
+    oci::{TokenResponse, download_oci_tarball, get_oci_manifest},
+    safe_id_lower,
+};
+
+enum ConfigStatus {
+    Deserialized(DevContainer),
+    VariableParsed(DevContainer),
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeResources {
+    files: Vec<PathBuf>,
+    config: DockerComposeConfig,
+}
+
+struct DevContainerManifest {
+    http_client: Arc<dyn HttpClient>,
+    fs: Arc<dyn Fs>,
+    docker_client: Arc<dyn DockerClient>,
+    command_runner: Arc<dyn CommandRunner>,
+    raw_config: String,
+    config: ConfigStatus,
+    local_environment: HashMap<String, String>,
+    local_project_directory: PathBuf,
+    config_directory: PathBuf,
+    file_name: String,
+    root_image: Option<DockerInspect>,
+    features_build_info: Option<FeaturesBuildInfo>,
+    features: Vec<FeatureManifest>,
+}
+const DEFAULT_REMOTE_PROJECT_DIR: &str = "/workspaces/";
+impl DevContainerManifest {
+    async fn new(
+        context: &DevContainerContext,
+        environment: HashMap<String, String>,
+        docker_client: Arc<dyn DockerClient>,
+        command_runner: Arc<dyn CommandRunner>,
+        local_config: DevContainerConfig,
+        local_project_path: &Path,
+    ) -> Result<Self, DevContainerError> {
+        let config_path = local_project_path.join(local_config.config_path.clone());
+        log::debug!("parsing devcontainer json found in {:?}", &config_path);
+        let devcontainer_contents = context.fs.load(&config_path).await.map_err(|e| {
+            log::error!("Unable to read devcontainer contents: {e}");
+            DevContainerError::DevContainerParseFailed
+        })?;
+
+        let devcontainer = deserialize_devcontainer_json(&devcontainer_contents)?;
+
+        let devcontainer_directory = config_path.parent().ok_or_else(|| {
+            log::error!("Dev container file should be in a directory");
+            DevContainerError::NotInValidProject
+        })?;
+        let file_name = config_path
+            .file_name()
+            .and_then(|f| f.to_str())
+            .ok_or_else(|| {
+                log::error!("Dev container file has no file name, or is invalid unicode");
+                DevContainerError::DevContainerParseFailed
+            })?;
+
+        Ok(Self {
+            fs: context.fs.clone(),
+            http_client: context.http_client.clone(),
+            docker_client,
+            command_runner,
+            raw_config: devcontainer_contents,
+            config: ConfigStatus::Deserialized(devcontainer),
+            local_project_directory: local_project_path.to_path_buf(),
+            local_environment: environment,
+            config_directory: devcontainer_directory.to_path_buf(),
+            file_name: file_name.to_string(),
+            root_image: None,
+            features_build_info: None,
+            features: Vec::new(),
+        })
+    }
+
+    fn devcontainer_id(&self) -> String {
+        let mut labels = self.identifying_labels();
+        labels.sort_by_key(|(key, _)| *key);
+
+        let mut hasher = DefaultHasher::new();
+        for (key, value) in &labels {
+            key.hash(&mut hasher);
+            value.hash(&mut hasher);
+        }
+
+        format!("{:016x}", hasher.finish())
+    }
+
+    fn identifying_labels(&self) -> Vec<(&str, String)> {
+        let labels = vec![
+            (
+                "devcontainer.local_folder",
+                (self.local_project_directory.display()).to_string(),
+            ),
+            (
+                "devcontainer.config_file",
+                (self.config_file().display()).to_string(),
+            ),
+        ];
+        labels
+    }
+
+    fn parse_nonremote_vars_for_content(&self, content: &str) -> Result<String, DevContainerError> {
+        let mut replaced_content = content
+            .replace("${devcontainerId}", &self.devcontainer_id())
+            .replace(
+                "${containerWorkspaceFolderBasename}",
+                &self.remote_workspace_base_name().unwrap_or_default(),
+            )
+            .replace(
+                "${localWorkspaceFolderBasename}",
+                &self.local_workspace_base_name()?,
+            )
+            .replace(
+                "${containerWorkspaceFolder}",
+                &self
+                    .remote_workspace_folder()
+                    .map(|path| path.display().to_string())
+                    .unwrap_or_default()
+                    .replace('\\', "/"),
+            )
+            .replace(
+                "${localWorkspaceFolder}",
+                &self.local_workspace_folder().replace('\\', "/"),
+            );
+        for (k, v) in &self.local_environment {
+            let find = format!("${{localEnv:{k}}}");
+            replaced_content = replaced_content.replace(&find, &v.replace('\\', "/"));
+        }
+
+        Ok(replaced_content)
+    }
+
+    fn parse_nonremote_vars(&mut self) -> Result<(), DevContainerError> {
+        let replaced_content = self.parse_nonremote_vars_for_content(&self.raw_config)?;
+        let parsed_config = deserialize_devcontainer_json(&replaced_content)?;
+
+        self.config = ConfigStatus::VariableParsed(parsed_config);
+
+        Ok(())
+    }
+
+    fn runtime_remote_env(
+        &self,
+        container_env: &HashMap<String, String>,
+    ) -> Result<HashMap<String, String>, DevContainerError> {
+        let mut merged_remote_env = container_env.clone();
+        // HOME is user-specific, and we will often not run as the image user
+        merged_remote_env.remove("HOME");
+        if let Some(remote_env) = self.dev_container().remote_env.clone() {
+            let mut raw = serde_json_lenient::to_string(&remote_env).map_err(|e| {
+                log::error!(
+                    "Unexpected error serializing dev container remote_env: {e} - {:?}",
+                    remote_env
+                );
+                DevContainerError::DevContainerParseFailed
+            })?;
+            for (k, v) in container_env {
+                raw = raw.replace(&format!("${{containerEnv:{k}}}"), v);
+            }
+            let reserialized: HashMap<String, String> = serde_json_lenient::from_str(&raw)
+                .map_err(|e| {
+                    log::error!(
+                        "Unexpected error reserializing dev container remote env: {e} - {:?}",
+                        &raw
+                    );
+                    DevContainerError::DevContainerParseFailed
+                })?;
+            for (k, v) in reserialized {
+                merged_remote_env.insert(k, v);
+            }
+        }
+        Ok(merged_remote_env)
+    }
+
+    fn config_file(&self) -> PathBuf {
+        self.config_directory.join(&self.file_name)
+    }
+
+    fn dev_container(&self) -> &DevContainer {
+        match &self.config {
+            ConfigStatus::Deserialized(dev_container) => dev_container,
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        }
+    }
+
+    async fn dockerfile_location(&self) -> Option<PathBuf> {
+        let dev_container = self.dev_container();
+        match dev_container.build_type() {
+            DevContainerBuildType::Image => None,
+            DevContainerBuildType::Dockerfile => dev_container
+                .build
+                .as_ref()
+                .map(|build| self.config_directory.join(&build.dockerfile)),
+            DevContainerBuildType::DockerCompose => {
+                let Ok(docker_compose_manifest) = self.docker_compose_manifest().await else {
+                    return None;
+                };
+                let Ok((_, main_service)) = find_primary_service(&docker_compose_manifest, self)
+                else {
+                    return None;
+                };
+                main_service
+                    .build
+                    .and_then(|b| b.dockerfile)
+                    .map(|dockerfile| self.config_directory.join(dockerfile))
+            }
+            DevContainerBuildType::None => None,
+        }
+    }
+
+    fn generate_features_image_tag(&self, dockerfile_build_path: String) -> String {
+        let mut hasher = DefaultHasher::new();
+        let prefix = match &self.dev_container().name {
+            Some(name) => &safe_id_lower(name),
+            None => "zed-dc",
+        };
+        let prefix = prefix.get(..6).unwrap_or(prefix);
+
+        dockerfile_build_path.hash(&mut hasher);
+
+        let hash = hasher.finish();
+        format!("{}-{:x}-features", prefix, hash)
+    }
+
+    /// Gets the base image from the devcontainer with the following precedence:
+    /// - The devcontainer image if an image is specified
+    /// - The image sourced in the Dockerfile if a Dockerfile is specified
+    /// - The image sourced in the docker-compose main service, if one is specified
+    /// - The image sourced in the docker-compose main service dockerfile, if one is specified
+    /// If no such image is available, return an error
+    async fn get_base_image_from_config(&self) -> Result<String, DevContainerError> {
+        if let Some(image) = &self.dev_container().image {
+            return Ok(image.to_string());
+        }
+        if let Some(dockerfile) = self.dev_container().build.as_ref().map(|b| &b.dockerfile) {
+            let dockerfile_contents = self
+                .fs
+                .load(&self.config_directory.join(dockerfile))
+                .await
+                .map_err(|e| {
+                    log::error!("Error reading dockerfile: {e}");
+                    DevContainerError::DevContainerParseFailed
+                })?;
+            return image_from_dockerfile(self, dockerfile_contents);
+        }
+        if self.dev_container().docker_compose_file.is_some() {
+            let docker_compose_manifest = self.docker_compose_manifest().await?;
+            let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?;
+
+            if let Some(dockerfile) = main_service
+                .build
+                .as_ref()
+                .and_then(|b| b.dockerfile.as_ref())
+            {
+                let dockerfile_contents = self
+                    .fs
+                    .load(&self.config_directory.join(dockerfile))
+                    .await
+                    .map_err(|e| {
+                        log::error!("Error reading dockerfile: {e}");
+                        DevContainerError::DevContainerParseFailed
+                    })?;
+                return image_from_dockerfile(self, dockerfile_contents);
+            }
+            if let Some(image) = &main_service.image {
+                return Ok(image.to_string());
+            }
+
+            log::error!("No valid base image found in docker-compose configuration");
+            return Err(DevContainerError::DevContainerParseFailed);
+        }
+        log::error!("No valid base image found in dev container configuration");
+        Err(DevContainerError::DevContainerParseFailed)
+    }
+
+    async fn download_feature_and_dockerfile_resources(&mut self) -> Result<(), DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet download resources"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+        let root_image_tag = self.get_base_image_from_config().await?;
+        let root_image = self.docker_client.inspect(&root_image_tag).await?;
+
+        if dev_container.build_type() == DevContainerBuildType::Image
+            && !dev_container.has_features()
+        {
+            log::debug!("No resources to download. Proceeding with just the image");
+            return Ok(());
+        }
+
+        let temp_base = std::env::temp_dir().join("devcontainer-zed");
+        let timestamp = std::time::SystemTime::now()
+            .duration_since(std::time::UNIX_EPOCH)
+            .map(|d| d.as_millis())
+            .unwrap_or(0);
+
+        let features_content_dir = temp_base.join(format!("container-features-{}", timestamp));
+        let empty_context_dir = temp_base.join("empty-folder");
+
+        self.fs
+            .create_dir(&features_content_dir)
+            .await
+            .map_err(|e| {
+                log::error!("Failed to create features content dir: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        self.fs.create_dir(&empty_context_dir).await.map_err(|e| {
+            log::error!("Failed to create empty context dir: {e}");
+            DevContainerError::FilesystemError
+        })?;
+
+        let dockerfile_path = features_content_dir.join("Dockerfile.extended");
+        let image_tag =
+            self.generate_features_image_tag(dockerfile_path.clone().display().to_string());
+
+        let build_info = FeaturesBuildInfo {
+            dockerfile_path,
+            features_content_dir,
+            empty_context_dir,
+            build_image: dev_container.image.clone(),
+            image_tag,
+        };
+
+        let features = match &dev_container.features {
+            Some(features) => features,
+            None => &HashMap::new(),
+        };
+
+        let container_user = get_container_user_from_config(&root_image, self)?;
+        let remote_user = get_remote_user_from_config(&root_image, self)?;
+
+        let builtin_env_content = format!(
+            "_CONTAINER_USER={}\n_REMOTE_USER={}\n",
+            container_user, remote_user
+        );
+
+        let builtin_env_path = build_info
+            .features_content_dir
+            .join("devcontainer-features.builtin.env");
+
+        self.fs
+            .write(&builtin_env_path, &builtin_env_content.as_bytes())
+            .await
+            .map_err(|e| {
+                log::error!("Failed to write builtin env file: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        let ordered_features =
+            resolve_feature_order(features, &dev_container.override_feature_install_order);
+
+        for (index, (feature_ref, options)) in ordered_features.iter().enumerate() {
+            if matches!(options, FeatureOptions::Bool(false)) {
+                log::debug!(
+                    "Feature '{}' is disabled (set to false), skipping",
+                    feature_ref
+                );
+                continue;
+            }
+
+            let feature_id = extract_feature_id(feature_ref);
+            let consecutive_id = format!("{}_{}", feature_id, index);
+            let feature_dir = build_info.features_content_dir.join(&consecutive_id);
+
+            self.fs.create_dir(&feature_dir).await.map_err(|e| {
+                log::error!(
+                    "Failed to create feature directory for {}: {e}",
+                    feature_ref
+                );
+                DevContainerError::FilesystemError
+            })?;
+
+            let oci_ref = parse_oci_feature_ref(feature_ref).ok_or_else(|| {
+                log::error!(
+                    "Feature '{}' is not a supported OCI feature reference",
+                    feature_ref
+                );
+                DevContainerError::DevContainerParseFailed
+            })?;
+            let TokenResponse { token } =
+                get_oci_token(&oci_ref.registry, &oci_ref.path, &self.http_client)
+                    .await
+                    .map_err(|e| {
+                        log::error!("Failed to get OCI token for feature '{}': {e}", feature_ref);
+                        DevContainerError::ResourceFetchFailed
+                    })?;
+            let manifest = get_oci_manifest(
+                &oci_ref.registry,
+                &oci_ref.path,
+                &token,
+                &self.http_client,
+                &oci_ref.version,
+                None,
+            )
+            .await
+            .map_err(|e| {
+                log::error!(
+                    "Failed to fetch OCI manifest for feature '{}': {e}",
+                    feature_ref
+                );
+                DevContainerError::ResourceFetchFailed
+            })?;
+            let digest = &manifest
+                .layers
+                .first()
+                .ok_or_else(|| {
+                    log::error!(
+                        "OCI manifest for feature '{}' contains no layers",
+                        feature_ref
+                    );
+                    DevContainerError::ResourceFetchFailed
+                })?
+                .digest;
+            download_oci_tarball(
+                &token,
+                &oci_ref.registry,
+                &oci_ref.path,
+                digest,
+                "application/vnd.devcontainers.layer.v1+tar",
+                &feature_dir,
+                &self.http_client,
+                &self.fs,
+                None,
+            )
+            .await?;
+
+            let feature_json_path = &feature_dir.join("devcontainer-feature.json");
+            if !self.fs.is_file(feature_json_path).await {
+                let message = format!(
+                    "No devcontainer-feature.json found in {:?}, no defaults to apply",
+                    feature_json_path
+                );
+                log::error!("{}", &message);
+                return Err(DevContainerError::ResourceFetchFailed);
+            }
+
+            let contents = self.fs.load(&feature_json_path).await.map_err(|e| {
+                log::error!("error reading devcontainer-feature.json: {:?}", e);
+                DevContainerError::FilesystemError
+            })?;
+
+            let contents_parsed = self.parse_nonremote_vars_for_content(&contents)?;
+
+            let feature_json: DevContainerFeatureJson =
+                serde_json_lenient::from_str(&contents_parsed).map_err(|e| {
+                    log::error!("Failed to parse devcontainer-feature.json: {e}");
+                    DevContainerError::ResourceFetchFailed
+                })?;
+
+            let feature_manifest = FeatureManifest::new(consecutive_id, feature_dir, feature_json);
+
+            log::debug!("Downloaded OCI feature content for '{}'", feature_ref);
+
+            let env_content = feature_manifest
+                .write_feature_env(&self.fs, options)
+                .await?;
+
+            let wrapper_content = generate_install_wrapper(feature_ref, feature_id, &env_content)?;
+
+            self.fs
+                .write(
+                    &feature_manifest
+                        .file_path()
+                        .join("devcontainer-features-install.sh"),
+                    &wrapper_content.as_bytes(),
+                )
+                .await
+                .map_err(|e| {
+                    log::error!("Failed to write install wrapper for {}: {e}", feature_ref);
+                    DevContainerError::FilesystemError
+                })?;
+
+            self.features.push(feature_manifest);
+        }
+
+        // --- Phase 3: Generate extended Dockerfile from the inflated manifests ---
+
+        let is_compose = dev_container.build_type() == DevContainerBuildType::DockerCompose;
+        let use_buildkit = self.docker_client.supports_compose_buildkit() || !is_compose;
+
+        let dockerfile_base_content = if let Some(location) = &self.dockerfile_location().await {
+            self.fs.load(location).await.log_err()
+        } else {
+            None
+        };
+
+        let dockerfile_content = self.generate_dockerfile_extended(
+            &container_user,
+            &remote_user,
+            dockerfile_base_content,
+            use_buildkit,
+        );
+
+        self.fs
+            .write(&build_info.dockerfile_path, &dockerfile_content.as_bytes())
+            .await
+            .map_err(|e| {
+                log::error!("Failed to write Dockerfile.extended: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        log::debug!(
+            "Features build resources written to {:?}",
+            build_info.features_content_dir
+        );
+
+        self.root_image = Some(root_image);
+        self.features_build_info = Some(build_info);
+
+        Ok(())
+    }
+
+    fn generate_dockerfile_extended(
+        &self,
+        container_user: &str,
+        remote_user: &str,
+        dockerfile_content: Option<String>,
+        use_buildkit: bool,
+    ) -> String {
+        #[cfg(not(target_os = "windows"))]
+        let update_remote_user_uid = self.dev_container().update_remote_user_uid.unwrap_or(true);
+        #[cfg(target_os = "windows")]
+        let update_remote_user_uid = false;
+        let feature_layers: String = self
+            .features
+            .iter()
+            .map(|manifest| {
+                manifest.generate_dockerfile_feature_layer(
+                    use_buildkit,
+                    FEATURES_CONTAINER_TEMP_DEST_FOLDER,
+                )
+            })
+            .collect();
+
+        let container_home_cmd = get_ent_passwd_shell_command(container_user);
+        let remote_home_cmd = get_ent_passwd_shell_command(remote_user);
+
+        let dockerfile_content = dockerfile_content
+            .map(|content| {
+                if dockerfile_alias(&content).is_some() {
+                    content
+                } else {
+                    dockerfile_inject_alias(&content, "dev_container_auto_added_stage_label")
+                }
+            })
+            .unwrap_or("".to_string());
+
+        let dest = FEATURES_CONTAINER_TEMP_DEST_FOLDER;
+
+        let feature_content_source_stage = if use_buildkit {
+            "".to_string()
+        } else {
+            "\nFROM dev_container_feature_content_temp as dev_containers_feature_content_source\n"
+                .to_string()
+        };
+
+        let builtin_env_source_path = if use_buildkit {
+            "./devcontainer-features.builtin.env"
+        } else {
+            "/tmp/build-features/devcontainer-features.builtin.env"
+        };
+
+        let mut extended_dockerfile = format!(
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+{dockerfile_content}
+{feature_content_source_stage}
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source {builtin_env_source_path} /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p {dest}
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ {dest}
+
+RUN \
+echo "_CONTAINER_USER_HOME=$({container_home_cmd} | cut -d: -f6)" >> {dest}/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$({remote_home_cmd} | cut -d: -f6)" >> {dest}/devcontainer-features.builtin.env
+
+{feature_layers}
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+"#
+        );
+
+        // If we're not adding a uid update layer, then we should add env vars to this layer instead
+        if !update_remote_user_uid {
+            extended_dockerfile = format!(
+                r#"{extended_dockerfile}
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
+"#
+            );
+
+            for feature in &self.features {
+                let container_env_layer = feature.generate_dockerfile_env();
+                extended_dockerfile = format!("{extended_dockerfile}\n{container_env_layer}");
+            }
+
+            if let Some(env) = &self.dev_container().container_env {
+                for (key, value) in env {
+                    extended_dockerfile = format!("{extended_dockerfile}ENV {key}={value}\n");
+                }
+            }
+        }
+
+        extended_dockerfile
+    }
+
+    fn build_merged_resources(
+        &self,
+        base_image: DockerInspect,
+    ) -> Result<DockerBuildResources, DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet merge resources"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+        let mut mounts = dev_container.mounts.clone().unwrap_or(Vec::new());
+
+        let mut feature_mounts = self.features.iter().flat_map(|f| f.mounts()).collect();
+
+        mounts.append(&mut feature_mounts);
+
+        let privileged = dev_container.privileged.unwrap_or(false)
+            || self.features.iter().any(|f| f.privileged());
+
+        let mut entrypoint_script_lines = vec![
+            "echo Container started".to_string(),
+            "trap \"exit 0\" 15".to_string(),
+        ];
+
+        for entrypoint in self.features.iter().filter_map(|f| f.entrypoint()) {
+            entrypoint_script_lines.push(entrypoint.clone());
+        }
+        entrypoint_script_lines.append(&mut vec![
+            "exec \"$@\"".to_string(),
+            "while sleep 1 & wait $!; do :; done".to_string(),
+        ]);
+
+        Ok(DockerBuildResources {
+            image: base_image,
+            additional_mounts: mounts,
+            privileged,
+            entrypoint_script: entrypoint_script_lines.join("\n").trim().to_string(),
+        })
+    }
+
+    async fn build_resources(&self) -> Result<DevContainerBuildResources, DevContainerError> {
+        if let ConfigStatus::Deserialized(_) = &self.config {
+            log::error!(
+                "Dev container has not yet been parsed for variable expansion. Cannot yet build resources"
+            );
+            return Err(DevContainerError::DevContainerParseFailed);
+        }
+        let dev_container = self.dev_container();
+        match dev_container.build_type() {
+            DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => {
+                let built_docker_image = self.build_docker_image().await?;
+                let built_docker_image = self
+                    .update_remote_user_uid(built_docker_image, None)
+                    .await?;
+
+                let resources = self.build_merged_resources(built_docker_image)?;
+                Ok(DevContainerBuildResources::Docker(resources))
+            }
+            DevContainerBuildType::DockerCompose => {
+                log::debug!("Using docker compose. Building extended compose files");
+                let docker_compose_resources = self.build_and_extend_compose_files().await?;
+
+                return Ok(DevContainerBuildResources::DockerCompose(
+                    docker_compose_resources,
+                ));
+            }
+            DevContainerBuildType::None => {
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+        }
+    }
+
+    async fn run_dev_container(
+        &self,
+        build_resources: DevContainerBuildResources,
+    ) -> Result<DevContainerUp, DevContainerError> {
+        let ConfigStatus::VariableParsed(_) = &self.config else {
+            log::error!(
+                "Variables have not been parsed; cannot proceed with running the dev container"
+            );
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let running_container = match build_resources {
+            DevContainerBuildResources::DockerCompose(resources) => {
+                self.run_docker_compose(resources).await?
+            }
+            DevContainerBuildResources::Docker(resources) => {
+                self.run_docker_image(resources).await?
+            }
+        };
+
+        let remote_user = get_remote_user_from_config(&running_container, self)?;
+        let remote_workspace_folder = get_remote_dir_from_config(
+            &running_container,
+            (&self.local_project_directory.display()).to_string(),
+        )?;
+
+        let remote_env = self.runtime_remote_env(&running_container.config.env_as_map()?)?;
+
+        Ok(DevContainerUp {
+            container_id: running_container.id,
+            remote_user,
+            remote_workspace_folder,
+            extension_ids: self.extension_ids(),
+            remote_env,
+        })
+    }
+
+    async fn docker_compose_manifest(&self) -> Result<DockerComposeResources, DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet get docker compose files"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+        let Some(docker_compose_files) = dev_container.docker_compose_file.clone() else {
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let docker_compose_full_paths = docker_compose_files
+            .iter()
+            .map(|relative| self.config_directory.join(relative))
+            .collect::<Vec<PathBuf>>();
+
+        let Some(config) = self
+            .docker_client
+            .get_docker_compose_config(&docker_compose_full_paths)
+            .await?
+        else {
+            log::error!("Output could not deserialize into DockerComposeConfig");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        Ok(DockerComposeResources {
+            files: docker_compose_full_paths,
+            config,
+        })
+    }
+
+    async fn build_and_extend_compose_files(
+        &self,
+    ) -> Result<DockerComposeResources, DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet build from compose files"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+
+        let Some(features_build_info) = &self.features_build_info else {
+            log::error!(
+                "Cannot build and extend compose files: features build info is not yet constructed"
+            );
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let mut docker_compose_resources = self.docker_compose_manifest().await?;
+        let supports_buildkit = self.docker_client.supports_compose_buildkit();
+
+        let (main_service_name, main_service) =
+            find_primary_service(&docker_compose_resources, self)?;
+        let built_service_image = if main_service
+            .build
+            .as_ref()
+            .map(|b| b.dockerfile.as_ref())
+            .is_some()
+        {
+            if !supports_buildkit {
+                self.build_feature_content_image().await?;
+            }
+
+            let dockerfile_path = &features_build_info.dockerfile_path;
+
+            let build_args = if !supports_buildkit {
+                HashMap::from([
+                    (
+                        "_DEV_CONTAINERS_BASE_IMAGE".to_string(),
+                        "dev_container_auto_added_stage_label".to_string(),
+                    ),
+                    ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()),
+                ])
+            } else {
+                HashMap::from([
+                    ("BUILDKIT_INLINE_CACHE".to_string(), "1".to_string()),
+                    (
+                        "_DEV_CONTAINERS_BASE_IMAGE".to_string(),
+                        "dev_container_auto_added_stage_label".to_string(),
+                    ),
+                    ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()),
+                ])
+            };
+
+            let additional_contexts = if !supports_buildkit {
+                None
+            } else {
+                Some(HashMap::from([(
+                    "dev_containers_feature_content_source".to_string(),
+                    features_build_info
+                        .features_content_dir
+                        .display()
+                        .to_string(),
+                )]))
+            };
+
+            let build_override = DockerComposeConfig {
+                name: None,
+                services: HashMap::from([(
+                    main_service_name.clone(),
+                    DockerComposeService {
+                        image: Some(features_build_info.image_tag.clone()),
+                        entrypoint: None,
+                        cap_add: None,
+                        security_opt: None,
+                        labels: None,
+                        build: Some(DockerComposeServiceBuild {
+                            context: Some(
+                                features_build_info.empty_context_dir.display().to_string(),
+                            ),
+                            dockerfile: Some(dockerfile_path.display().to_string()),
+                            args: Some(build_args),
+                            additional_contexts,
+                        }),
+                        volumes: Vec::new(),
+                        ..Default::default()
+                    },
+                )]),
+                volumes: HashMap::new(),
+            };
+
+            let temp_base = std::env::temp_dir().join("devcontainer-zed");
+            let config_location = temp_base.join("docker_compose_build.json");
+
+            let config_json = serde_json_lenient::to_string(&build_override).map_err(|e| {
+                log::error!("Error serializing docker compose runtime override: {e}");
+                DevContainerError::DevContainerParseFailed
+            })?;
+
+            self.fs
+                .write(&config_location, config_json.as_bytes())
+                .await
+                .map_err(|e| {
+                    log::error!("Error writing the runtime override file: {e}");
+                    DevContainerError::FilesystemError
+                })?;
+
+            docker_compose_resources.files.push(config_location);
+
+            self.docker_client
+                .docker_compose_build(&docker_compose_resources.files, &self.project_name())
+                .await?;
+            self.docker_client
+                .inspect(&features_build_info.image_tag)
+                .await?
+        } else if let Some(image) = &main_service.image {
+            if dev_container
+                .features
+                .as_ref()
+                .is_none_or(|features| features.is_empty())
+            {
+                self.docker_client.inspect(image).await?
+            } else {
+                if !supports_buildkit {
+                    self.build_feature_content_image().await?;
+                }
+
+                let dockerfile_path = &features_build_info.dockerfile_path;
+
+                let build_args = if !supports_buildkit {
+                    HashMap::from([
+                        ("_DEV_CONTAINERS_BASE_IMAGE".to_string(), image.clone()),
+                        ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()),
+                    ])
+                } else {
+                    HashMap::from([
+                        ("BUILDKIT_INLINE_CACHE".to_string(), "1".to_string()),
+                        ("_DEV_CONTAINERS_BASE_IMAGE".to_string(), image.clone()),
+                        ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()),
+                    ])
+                };
+
+                let additional_contexts = if !supports_buildkit {
+                    None
+                } else {
+                    Some(HashMap::from([(
+                        "dev_containers_feature_content_source".to_string(),
+                        features_build_info
+                            .features_content_dir
+                            .display()
+                            .to_string(),
+                    )]))
+                };
+
+                let build_override = DockerComposeConfig {
+                    name: None,
+                    services: HashMap::from([(
+                        main_service_name.clone(),
+                        DockerComposeService {
+                            image: Some(features_build_info.image_tag.clone()),
+                            entrypoint: None,
+                            cap_add: None,
+                            security_opt: None,
+                            labels: None,
+                            build: Some(DockerComposeServiceBuild {
+                                context: Some(
+                                    features_build_info.empty_context_dir.display().to_string(),
+                                ),
+                                dockerfile: Some(dockerfile_path.display().to_string()),
+                                args: Some(build_args),
+                                additional_contexts,
+                            }),
+                            volumes: Vec::new(),
+                            ..Default::default()
+                        },
+                    )]),
+                    volumes: HashMap::new(),
+                };
+
+                let temp_base = std::env::temp_dir().join("devcontainer-zed");
+                let config_location = temp_base.join("docker_compose_build.json");
+
+                let config_json = serde_json_lenient::to_string(&build_override).map_err(|e| {
+                    log::error!("Error serializing docker compose runtime override: {e}");
+                    DevContainerError::DevContainerParseFailed
+                })?;
+
+                self.fs
+                    .write(&config_location, config_json.as_bytes())
+                    .await
+                    .map_err(|e| {
+                        log::error!("Error writing the runtime override file: {e}");
+                        DevContainerError::FilesystemError
+                    })?;
+
+                docker_compose_resources.files.push(config_location);
+
+                self.docker_client
+                    .docker_compose_build(&docker_compose_resources.files, &self.project_name())
+                    .await?;
+
+                self.docker_client
+                    .inspect(&features_build_info.image_tag)
+                    .await?
+            }
+        } else {
+            log::error!("Docker compose must have either image or dockerfile defined");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+
+        let built_service_image = self
+            .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag))
+            .await?;
+
+        let resources = self.build_merged_resources(built_service_image)?;
+
+        let network_mode = main_service.network_mode.as_ref();
+        let network_mode_service = network_mode.and_then(|mode| mode.strip_prefix("service:"));
+        let runtime_override_file = self
+            .write_runtime_override_file(&main_service_name, network_mode_service, resources)
+            .await?;
+
+        docker_compose_resources.files.push(runtime_override_file);
+
+        Ok(docker_compose_resources)
+    }
+
+    async fn write_runtime_override_file(
+        &self,
+        main_service_name: &str,
+        network_mode_service: Option<&str>,
+        resources: DockerBuildResources,
+    ) -> Result<PathBuf, DevContainerError> {
+        let config =
+            self.build_runtime_override(main_service_name, network_mode_service, resources)?;
+        let temp_base = std::env::temp_dir().join("devcontainer-zed");
+        let config_location = temp_base.join("docker_compose_runtime.json");
+
+        let config_json = serde_json_lenient::to_string(&config).map_err(|e| {
+            log::error!("Error serializing docker compose runtime override: {e}");
+            DevContainerError::DevContainerParseFailed
+        })?;
+
+        self.fs
+            .write(&config_location, config_json.as_bytes())
+            .await
+            .map_err(|e| {
+                log::error!("Error writing the runtime override file: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        Ok(config_location)
+    }
+
+    fn build_runtime_override(
+        &self,
+        main_service_name: &str,
+        network_mode_service: Option<&str>,
+        resources: DockerBuildResources,
+    ) -> Result<DockerComposeConfig, DevContainerError> {
+        let mut runtime_labels = vec![];
+
+        if let Some(metadata) = &resources.image.config.labels.metadata {
+            let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| {
+                log::error!("Error serializing docker image metadata: {e}");
+                DevContainerError::ContainerNotValid(resources.image.id.clone())
+            })?;
+
+            runtime_labels.push(format!(
+                "{}={}",
+                "devcontainer.metadata", serialized_metadata
+            ));
+        }
+
+        for (k, v) in self.identifying_labels() {
+            runtime_labels.push(format!("{}={}", k, v));
+        }
+
+        let config_volumes: HashMap<String, DockerComposeVolume> = resources
+            .additional_mounts
+            .iter()
+            .filter_map(|mount| {
+                if let Some(mount_type) = &mount.mount_type
+                    && mount_type.to_lowercase() == "volume"
+                {
+                    Some((
+                        mount.source.clone(),
+                        DockerComposeVolume {
+                            name: mount.source.clone(),
+                        },
+                    ))
+                } else {
+                    None
+                }
+            })
+            .collect();
+
+        let volumes: Vec<MountDefinition> = resources
+            .additional_mounts
+            .iter()
+            .map(|v| MountDefinition {
+                source: v.source.clone(),
+                target: v.target.clone(),
+                mount_type: v.mount_type.clone(),
+            })
+            .collect();
+
+        let mut main_service = DockerComposeService {
+            entrypoint: Some(vec![
+                "/bin/sh".to_string(),
+                "-c".to_string(),
+                resources.entrypoint_script,
+                "-".to_string(),
+            ]),
+            cap_add: Some(vec!["SYS_PTRACE".to_string()]),
+            security_opt: Some(vec!["seccomp=unconfined".to_string()]),
+            labels: Some(runtime_labels),
+            volumes,
+            privileged: Some(resources.privileged),
+            ..Default::default()
+        };
+        // let mut extra_service_port_declarations: Vec<(String, DockerComposeService)> = Vec::new();
+        let mut service_declarations: HashMap<String, DockerComposeService> = HashMap::new();
+        if let Some(forward_ports) = &self.dev_container().forward_ports {
+            let main_service_ports: Vec<String> = forward_ports
+                .iter()
+                .filter_map(|f| match f {
+                    ForwardPort::Number(port) => Some(port.to_string()),
+                    ForwardPort::String(port) => {
+                        let parts: Vec<&str> = port.split(":").collect();
+                        if parts.len() <= 1 {
+                            Some(port.to_string())
+                        } else if parts.len() == 2 {
+                            if parts[0] == main_service_name {
+                                Some(parts[1].to_string())
+                            } else {
+                                None
+                            }
+                        } else {
+                            None
+                        }
+                    }
+                })
+                .collect();
+            for port in main_service_ports {
+                // If the main service uses a different service's network bridge, append to that service's ports instead
+                if let Some(network_service_name) = network_mode_service {
+                    if let Some(service) = service_declarations.get_mut(network_service_name) {
+                        service.ports.push(format!("{port}:{port}"));
+                    } else {
+                        service_declarations.insert(
+                            network_service_name.to_string(),
+                            DockerComposeService {
+                                ports: vec![format!("{port}:{port}")],
+                                ..Default::default()
+                            },
+                        );
+                    }
+                } else {
+                    main_service.ports.push(format!("{port}:{port}"));
+                }
+            }
+            let other_service_ports: Vec<(&str, &str)> = forward_ports
+                .iter()
+                .filter_map(|f| match f {
+                    ForwardPort::Number(_) => None,
+                    ForwardPort::String(port) => {
+                        let parts: Vec<&str> = port.split(":").collect();
+                        if parts.len() != 2 {
+                            None
+                        } else {
+                            if parts[0] == main_service_name {
+                                None
+                            } else {
+                                Some((parts[0], parts[1]))
+                            }
+                        }
+                    }
+                })
+                .collect();
+            for (service_name, port) in other_service_ports {
+                if let Some(service) = service_declarations.get_mut(service_name) {
+                    service.ports.push(format!("{port}:{port}"));
+                } else {
+                    service_declarations.insert(
+                        service_name.to_string(),
+                        DockerComposeService {
+                            ports: vec![format!("{port}:{port}")],
+                            ..Default::default()
+                        },
+                    );
+                }
+            }
+        }
+        if let Some(port) = &self.dev_container().app_port {
+            if let Some(network_service_name) = network_mode_service {
+                if let Some(service) = service_declarations.get_mut(network_service_name) {
+                    service.ports.push(format!("{port}:{port}"));
+                } else {
+                    service_declarations.insert(
+                        network_service_name.to_string(),
+                        DockerComposeService {
+                            ports: vec![format!("{port}:{port}")],
+                            ..Default::default()
+                        },
+                    );
+                }
+            } else {
+                main_service.ports.push(format!("{port}:{port}"));
+            }
+        }
+
+        service_declarations.insert(main_service_name.to_string(), main_service);
+        let new_docker_compose_config = DockerComposeConfig {
+            name: None,
+            services: service_declarations,
+            volumes: config_volumes,
+        };
+
+        Ok(new_docker_compose_config)
+    }
+
+    async fn build_docker_image(&self) -> Result<DockerInspect, DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet build image"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+
+        match dev_container.build_type() {
+            DevContainerBuildType::Image => {
+                let Some(image_tag) = &dev_container.image else {
+                    return Err(DevContainerError::DevContainerParseFailed);
+                };
+                let base_image = self.docker_client.inspect(image_tag).await?;
+                if dev_container
+                    .features
+                    .as_ref()
+                    .is_none_or(|features| features.is_empty())
+                {
+                    log::debug!("No features to add. Using base image");
+                    return Ok(base_image);
+                }
+            }
+            DevContainerBuildType::Dockerfile => {}
+            DevContainerBuildType::DockerCompose | DevContainerBuildType::None => {
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+        };
+
+        let mut command = self.create_docker_build()?;
+
+        let output = self
+            .command_runner
+            .run_command(&mut command)
+            .await
+            .map_err(|e| {
+                log::error!("Error building docker image: {e}");
+                DevContainerError::CommandFailed(command.get_program().display().to_string())
+            })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("docker buildx build failed: {stderr}");
+            return Err(DevContainerError::CommandFailed(
+                command.get_program().display().to_string(),
+            ));
+        }
+
+        // After a successful build, inspect the newly tagged image to get its metadata
+        let Some(features_build_info) = &self.features_build_info else {
+            log::error!("Features build info expected, but not created");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let image = self
+            .docker_client
+            .inspect(&features_build_info.image_tag)
+            .await?;
+
+        Ok(image)
+    }
+
+    #[cfg(target_os = "windows")]
+    async fn update_remote_user_uid(
+        &self,
+        image: DockerInspect,
+        _override_tag: Option<&str>,
+    ) -> Result<DockerInspect, DevContainerError> {
+        Ok(image)
+    }
+    #[cfg(not(target_os = "windows"))]
+    async fn update_remote_user_uid(
+        &self,
+        image: DockerInspect,
+        override_tag: Option<&str>,
+    ) -> Result<DockerInspect, DevContainerError> {
+        let dev_container = self.dev_container();
+
+        let Some(features_build_info) = &self.features_build_info else {
+            return Ok(image);
+        };
+
+        // updateRemoteUserUID defaults to true per the devcontainers spec
+        if dev_container.update_remote_user_uid == Some(false) {
+            return Ok(image);
+        }
+
+        let remote_user = get_remote_user_from_config(&image, self)?;
+        if remote_user == "root" || remote_user.chars().all(|c| c.is_ascii_digit()) {
+            return Ok(image);
+        }
+
+        let image_user = image
+            .config
+            .image_user
+            .as_deref()
+            .unwrap_or("root")
+            .to_string();
+
+        let host_uid = Command::new("id")
+            .arg("-u")
+            .output()
+            .await
+            .map_err(|e| {
+                log::error!("Failed to get host UID: {e}");
+                DevContainerError::CommandFailed("id -u".to_string())
+            })
+            .and_then(|output| {
+                String::from_utf8_lossy(&output.stdout)
+                    .trim()
+                    .parse::<u32>()
+                    .map_err(|e| {
+                        log::error!("Failed to parse host UID: {e}");
+                        DevContainerError::CommandFailed("id -u".to_string())
+                    })
+            })?;
+
+        let host_gid = Command::new("id")
+            .arg("-g")
+            .output()
+            .await
+            .map_err(|e| {
+                log::error!("Failed to get host GID: {e}");
+                DevContainerError::CommandFailed("id -g".to_string())
+            })
+            .and_then(|output| {
+                String::from_utf8_lossy(&output.stdout)
+                    .trim()
+                    .parse::<u32>()
+                    .map_err(|e| {
+                        log::error!("Failed to parse host GID: {e}");
+                        DevContainerError::CommandFailed("id -g".to_string())
+                    })
+            })?;
+
+        let dockerfile_content = self.generate_update_uid_dockerfile();
+
+        let dockerfile_path = features_build_info
+            .features_content_dir
+            .join("updateUID.Dockerfile");
+        self.fs
+            .write(&dockerfile_path, dockerfile_content.as_bytes())
+            .await
+            .map_err(|e| {
+                log::error!("Failed to write updateUID Dockerfile: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        let updated_image_tag = override_tag
+            .map(|t| t.to_string())
+            .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag));
+
+        let mut command = Command::new(self.docker_client.docker_cli());
+        command.args(["build"]);
+        command.args(["-f", &dockerfile_path.display().to_string()]);
+        command.args(["-t", &updated_image_tag]);
+        command.args([
+            "--build-arg",
+            &format!("BASE_IMAGE={}", features_build_info.image_tag),
+        ]);
+        command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]);
+        command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]);
+        command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]);
+        command.args(["--build-arg", &format!("IMAGE_USER={}", image_user)]);
+        command.arg(features_build_info.empty_context_dir.display().to_string());
+
+        let output = self
+            .command_runner
+            .run_command(&mut command)
+            .await
+            .map_err(|e| {
+                log::error!("Error building UID update image: {e}");
+                DevContainerError::CommandFailed(command.get_program().display().to_string())
+            })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("UID update build failed: {stderr}");
+            return Err(DevContainerError::CommandFailed(
+                command.get_program().display().to_string(),
+            ));
+        }
+
+        self.docker_client.inspect(&updated_image_tag).await
+    }
+
+    #[cfg(not(target_os = "windows"))]
+    fn generate_update_uid_dockerfile(&self) -> String {
+        let mut dockerfile = r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+	if [ -z "$OLD_UID" ]; then \
+		echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+	elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+		echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+	elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+		echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+	else \
+		if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+			FREE_GID=65532; \
+			while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+			echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+			sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+		fi; \
+		echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+		sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+		if [ "$OLD_GID" != "$NEW_GID" ]; then \
+			sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+		fi; \
+		chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+	fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+"#.to_string();
+        for feature in &self.features {
+            let container_env_layer = feature.generate_dockerfile_env();
+            dockerfile = format!("{dockerfile}\n{container_env_layer}");
+        }
+
+        if let Some(env) = &self.dev_container().container_env {
+            for (key, value) in env {
+                dockerfile = format!("{dockerfile}ENV {key}={value}\n");
+            }
+        }
+        dockerfile
+    }
+
+    async fn build_feature_content_image(&self) -> Result<(), DevContainerError> {
+        let Some(features_build_info) = &self.features_build_info else {
+            log::error!("Features build info not available for building feature content image");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let features_content_dir = &features_build_info.features_content_dir;
+
+        let dockerfile_content = "FROM scratch\nCOPY . /tmp/build-features/\n";
+        let dockerfile_path = features_content_dir.join("Dockerfile.feature-content");
+
+        self.fs
+            .write(&dockerfile_path, dockerfile_content.as_bytes())
+            .await
+            .map_err(|e| {
+                log::error!("Failed to write feature content Dockerfile: {e}");
+                DevContainerError::FilesystemError
+            })?;
+
+        let mut command = Command::new(self.docker_client.docker_cli());
+        command.args([
+            "build",
+            "-t",
+            "dev_container_feature_content_temp",
+            "-f",
+            &dockerfile_path.display().to_string(),
+            &features_content_dir.display().to_string(),
+        ]);
+
+        let output = self
+            .command_runner
+            .run_command(&mut command)
+            .await
+            .map_err(|e| {
+                log::error!("Error building feature content image: {e}");
+                DevContainerError::CommandFailed(self.docker_client.docker_cli())
+            })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("Feature content image build failed: {stderr}");
+            return Err(DevContainerError::CommandFailed(
+                self.docker_client.docker_cli(),
+            ));
+        }
+
+        Ok(())
+    }
+
+    fn create_docker_build(&self) -> Result<Command, DevContainerError> {
+        let dev_container = match &self.config {
+            ConfigStatus::Deserialized(_) => {
+                log::error!(
+                    "Dev container has not yet been parsed for variable expansion. Cannot yet proceed with docker build"
+                );
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            ConfigStatus::VariableParsed(dev_container) => dev_container,
+        };
+
+        let Some(features_build_info) = &self.features_build_info else {
+            log::error!(
+                "Cannot create docker build command; features build info has not been constructed"
+            );
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        let mut command = Command::new(self.docker_client.docker_cli());
+
+        command.args(["buildx", "build"]);
+
+        // --load is short for --output=docker, loading the built image into the local docker images
+        command.arg("--load");
+
+        // BuildKit build context: provides the features content directory as a named context
+        // that the Dockerfile.extended can COPY from via `--from=dev_containers_feature_content_source`
+        command.args([
+            "--build-context",
+            &format!(
+                "dev_containers_feature_content_source={}",
+                features_build_info.features_content_dir.display()
+            ),
+        ]);
+
+        // Build args matching the CLI reference implementation's `getFeaturesBuildOptions`
+        if let Some(build_image) = &features_build_info.build_image {
+            command.args([
+                "--build-arg",
+                &format!("_DEV_CONTAINERS_BASE_IMAGE={}", build_image),
+            ]);
+        } else {
+            command.args([
+                "--build-arg",
+                "_DEV_CONTAINERS_BASE_IMAGE=dev_container_auto_added_stage_label",
+            ]);
+        }
+
+        command.args([
+            "--build-arg",
+            &format!(
+                "_DEV_CONTAINERS_IMAGE_USER={}",
+                self.root_image
+                    .as_ref()
+                    .and_then(|docker_image| docker_image.config.image_user.as_ref())
+                    .unwrap_or(&"root".to_string())
+            ),
+        ]);
+
+        command.args([
+            "--build-arg",
+            "_DEV_CONTAINERS_FEATURE_CONTENT_SOURCE=dev_container_feature_content_temp",
+        ]);
+
+        if let Some(args) = dev_container.build.as_ref().and_then(|b| b.args.as_ref()) {
+            for (key, value) in args {
+                command.args(["--build-arg", &format!("{}={}", key, value)]);
+            }
+        }
+
+        command.args(["--target", "dev_containers_target_stage"]);
+
+        command.args([
+            "-f",
+            &features_build_info.dockerfile_path.display().to_string(),
+        ]);
+
+        command.args(["-t", &features_build_info.image_tag]);
+
+        if dev_container.build_type() == DevContainerBuildType::Dockerfile {
+            command.arg(self.config_directory.display().to_string());
+        } else {
+            // Use an empty folder as the build context to avoid pulling in unneeded files.
+            // The actual feature content is supplied via the BuildKit build context above.
+            command.arg(features_build_info.empty_context_dir.display().to_string());
+        }
+
+        Ok(command)
+    }
+
+    async fn run_docker_compose(
+        &self,
+        resources: DockerComposeResources,
+    ) -> Result<DockerInspect, DevContainerError> {
+        let mut command = Command::new(self.docker_client.docker_cli());
+        command.args(&["compose", "--project-name", &self.project_name()]);
+        for docker_compose_file in resources.files {
+            command.args(&["-f", &docker_compose_file.display().to_string()]);
+        }
+        command.args(&["up", "-d"]);
+
+        let output = self
+            .command_runner
+            .run_command(&mut command)
+            .await
+            .map_err(|e| {
+                log::error!("Error running docker compose up: {e}");
+                DevContainerError::CommandFailed(command.get_program().display().to_string())
+            })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("Non-success status from docker compose up: {}", stderr);
+            return Err(DevContainerError::CommandFailed(
+                command.get_program().display().to_string(),
+            ));
+        }
+
+        if let Some(docker_ps) = self.check_for_existing_container().await? {
+            log::debug!("Found newly created dev container");
+            return self.docker_client.inspect(&docker_ps.id).await;
+        }
+
+        log::error!("Could not find existing container after docker compose up");
+
+        Err(DevContainerError::DevContainerParseFailed)
+    }
+
+    async fn run_docker_image(
+        &self,
+        build_resources: DockerBuildResources,
+    ) -> Result<DockerInspect, DevContainerError> {
+        let mut docker_run_command = self.create_docker_run_command(build_resources)?;
+
+        let output = self
+            .command_runner
+            .run_command(&mut docker_run_command)
+            .await
+            .map_err(|e| {
+                log::error!("Error running docker run: {e}");
+                DevContainerError::CommandFailed(
+                    docker_run_command.get_program().display().to_string(),
+                )
+            })?;
+
+        if !output.status.success() {
+            let std_err = String::from_utf8_lossy(&output.stderr);
+            log::error!("Non-success status from docker run. StdErr: {std_err}");
+            return Err(DevContainerError::CommandFailed(
+                docker_run_command.get_program().display().to_string(),
+            ));
+        }
+
+        log::debug!("Checking for container that was started");
+        let Some(docker_ps) = self.check_for_existing_container().await? else {
+            log::error!("Could not locate container just created");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+        self.docker_client.inspect(&docker_ps.id).await
+    }
+
+    fn local_workspace_folder(&self) -> String {
+        self.local_project_directory.display().to_string()
+    }
+    fn local_workspace_base_name(&self) -> Result<String, DevContainerError> {
+        self.local_project_directory
+            .file_name()
+            .map(|f| f.display().to_string())
+            .ok_or(DevContainerError::DevContainerParseFailed)
+    }
+
+    fn remote_workspace_folder(&self) -> Result<PathBuf, DevContainerError> {
+        self.dev_container()
+            .workspace_folder
+            .as_ref()
+            .map(|folder| PathBuf::from(folder))
+            .or(Some(
+                PathBuf::from(DEFAULT_REMOTE_PROJECT_DIR).join(self.local_workspace_base_name()?),
+            ))
+            .ok_or(DevContainerError::DevContainerParseFailed)
+    }
+    fn remote_workspace_base_name(&self) -> Result<String, DevContainerError> {
+        self.remote_workspace_folder().and_then(|f| {
+            f.file_name()
+                .map(|file_name| file_name.display().to_string())
+                .ok_or(DevContainerError::DevContainerParseFailed)
+        })
+    }
+
+    fn remote_workspace_mount(&self) -> Result<MountDefinition, DevContainerError> {
+        if let Some(mount) = &self.dev_container().workspace_mount {
+            return Ok(mount.clone());
+        }
+        let Some(project_directory_name) = self.local_project_directory.file_name() else {
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+
+        Ok(MountDefinition {
+            source: self.local_workspace_folder(),
+            target: format!("/workspaces/{}", project_directory_name.display()),
+            mount_type: None,
+        })
+    }
+
+    fn create_docker_run_command(
+        &self,
+        build_resources: DockerBuildResources,
+    ) -> Result<Command, DevContainerError> {
+        let remote_workspace_mount = self.remote_workspace_mount()?;
+
+        let docker_cli = self.docker_client.docker_cli();
+        let mut command = Command::new(&docker_cli);
+
+        command.arg("run");
+
+        if build_resources.privileged {
+            command.arg("--privileged");
+        }
+
+        if &docker_cli == "podman" {
+            command.args(&["--security-opt", "label=disable", "--userns=keep-id"]);
+        }
+
+        command.arg("--sig-proxy=false");
+        command.arg("-d");
+        command.arg("--mount");
+        command.arg(remote_workspace_mount.to_string());
+
+        for mount in &build_resources.additional_mounts {
+            command.arg("--mount");
+            command.arg(mount.to_string());
+        }
+
+        for (key, val) in self.identifying_labels() {
+            command.arg("-l");
+            command.arg(format!("{}={}", key, val));
+        }
+
+        if let Some(metadata) = &build_resources.image.config.labels.metadata {
+            let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| {
+                log::error!("Problem serializing image metadata: {e}");
+                DevContainerError::ContainerNotValid(build_resources.image.id.clone())
+            })?;
+            command.arg("-l");
+            command.arg(format!(
+                "{}={}",
+                "devcontainer.metadata", serialized_metadata
+            ));
+        }
+
+        if let Some(forward_ports) = &self.dev_container().forward_ports {
+            for port in forward_ports {
+                if let ForwardPort::Number(port_number) = port {
+                    command.arg("-p");
+                    command.arg(format!("{port_number}:{port_number}"));
+                }
+            }
+        }
+        if let Some(app_port) = &self.dev_container().app_port {
+            command.arg("-p");
+            command.arg(format!("{app_port}:{app_port}"));
+        }
+
+        command.arg("--entrypoint");
+        command.arg("/bin/sh");
+        command.arg(&build_resources.image.id);
+        command.arg("-c");
+
+        command.arg(build_resources.entrypoint_script);
+        command.arg("-");
+
+        Ok(command)
+    }
+
+    fn extension_ids(&self) -> Vec<String> {
+        self.dev_container()
+            .customizations
+            .as_ref()
+            .map(|c| c.zed.extensions.clone())
+            .unwrap_or_default()
+    }
+
+    async fn build_and_run(&mut self) -> Result<DevContainerUp, DevContainerError> {
+        self.run_initialize_commands().await?;
+
+        self.download_feature_and_dockerfile_resources().await?;
+
+        let build_resources = self.build_resources().await?;
+
+        let devcontainer_up = self.run_dev_container(build_resources).await?;
+
+        self.run_remote_scripts(&devcontainer_up, true).await?;
+
+        Ok(devcontainer_up)
+    }
+
+    async fn run_remote_scripts(
+        &self,
+        devcontainer_up: &DevContainerUp,
+        new_container: bool,
+    ) -> Result<(), DevContainerError> {
+        let ConfigStatus::VariableParsed(config) = &self.config else {
+            log::error!("Config not yet parsed, cannot proceed with remote scripts");
+            return Err(DevContainerError::DevContainerScriptsFailed);
+        };
+        let remote_folder = self.remote_workspace_folder()?.display().to_string();
+
+        if new_container {
+            if let Some(on_create_command) = &config.on_create_command {
+                for (command_name, command) in on_create_command.script_commands() {
+                    log::debug!("Running on create command {command_name}");
+                    self.docker_client
+                        .run_docker_exec(
+                            &devcontainer_up.container_id,
+                            &remote_folder,
+                            "root",
+                            &devcontainer_up.remote_env,
+                            command,
+                        )
+                        .await?;
+                }
+            }
+            if let Some(update_content_command) = &config.update_content_command {
+                for (command_name, command) in update_content_command.script_commands() {
+                    log::debug!("Running update content command {command_name}");
+                    self.docker_client
+                        .run_docker_exec(
+                            &devcontainer_up.container_id,
+                            &remote_folder,
+                            "root",
+                            &devcontainer_up.remote_env,
+                            command,
+                        )
+                        .await?;
+                }
+            }
+
+            if let Some(post_create_command) = &config.post_create_command {
+                for (command_name, command) in post_create_command.script_commands() {
+                    log::debug!("Running post create command {command_name}");
+                    self.docker_client
+                        .run_docker_exec(
+                            &devcontainer_up.container_id,
+                            &remote_folder,
+                            &devcontainer_up.remote_user,
+                            &devcontainer_up.remote_env,
+                            command,
+                        )
+                        .await?;
+                }
+            }
+            if let Some(post_start_command) = &config.post_start_command {
+                for (command_name, command) in post_start_command.script_commands() {
+                    log::debug!("Running post start command {command_name}");
+                    self.docker_client
+                        .run_docker_exec(
+                            &devcontainer_up.container_id,
+                            &remote_folder,
+                            &devcontainer_up.remote_user,
+                            &devcontainer_up.remote_env,
+                            command,
+                        )
+                        .await?;
+                }
+            }
+        }
+        if let Some(post_attach_command) = &config.post_attach_command {
+            for (command_name, command) in post_attach_command.script_commands() {
+                log::debug!("Running post attach command {command_name}");
+                self.docker_client
+                    .run_docker_exec(
+                        &devcontainer_up.container_id,
+                        &remote_folder,
+                        &devcontainer_up.remote_user,
+                        &devcontainer_up.remote_env,
+                        command,
+                    )
+                    .await?;
+            }
+        }
+
+        Ok(())
+    }
+
+    async fn run_initialize_commands(&self) -> Result<(), DevContainerError> {
+        let ConfigStatus::VariableParsed(config) = &self.config else {
+            log::error!("Config not yet parsed, cannot proceed with initializeCommand");
+            return Err(DevContainerError::DevContainerParseFailed);
+        };
+
+        if let Some(initialize_command) = &config.initialize_command {
+            log::debug!("Running initialize command");
+            initialize_command
+                .run(&self.command_runner, &self.local_project_directory)
+                .await
+        } else {
+            log::warn!("No initialize command found");
+            Ok(())
+        }
+    }
+
+    async fn check_for_existing_devcontainer(
+        &self,
+    ) -> Result<Option<DevContainerUp>, DevContainerError> {
+        if let Some(docker_ps) = self.check_for_existing_container().await? {
+            log::debug!("Dev container already found. Proceeding with it");
+
+            let docker_inspect = self.docker_client.inspect(&docker_ps.id).await?;
+
+            if !docker_inspect.is_running() {
+                log::debug!("Container not running. Will attempt to start, and then proceed");
+                self.docker_client.start_container(&docker_ps.id).await?;
+            }
+
+            let remote_user = get_remote_user_from_config(&docker_inspect, self)?;
+
+            let remote_folder = get_remote_dir_from_config(
+                &docker_inspect,
+                (&self.local_project_directory.display()).to_string(),
+            )?;
+
+            let remote_env = self.runtime_remote_env(&docker_inspect.config.env_as_map()?)?;
+
+            let dev_container_up = DevContainerUp {
+                container_id: docker_ps.id,
+                remote_user: remote_user,
+                remote_workspace_folder: remote_folder,
+                extension_ids: self.extension_ids(),
+                remote_env,
+            };
+
+            self.run_remote_scripts(&dev_container_up, false).await?;
+
+            Ok(Some(dev_container_up))
+        } else {
+            log::debug!("Existing container not found.");
+
+            Ok(None)
+        }
+    }
+
+    async fn check_for_existing_container(&self) -> Result<Option<DockerPs>, DevContainerError> {
+        self.docker_client
+            .find_process_by_filters(
+                self.identifying_labels()
+                    .iter()
+                    .map(|(k, v)| format!("label={k}={v}"))
+                    .collect(),
+            )
+            .await
+    }
+
+    fn project_name(&self) -> String {
+        if let Some(name) = &self.dev_container().name {
+            safe_id_lower(name)
+        } else {
+            let alternate_name = &self
+                .local_workspace_base_name()
+                .unwrap_or(self.local_workspace_folder());
+            safe_id_lower(alternate_name)
+        }
+    }
+}
+
+/// Holds all the information needed to construct a `docker buildx build` command
+/// that extends a base image with dev container features.
+///
+/// This mirrors the `ImageBuildOptions` interface in the CLI reference implementation
+/// (cli/src/spec-node/containerFeatures.ts).
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) struct FeaturesBuildInfo {
+    /// Path to the generated Dockerfile.extended
+    pub dockerfile_path: PathBuf,
+    /// Path to the features content directory (used as a BuildKit build context)
+    pub features_content_dir: PathBuf,
+    /// Path to an empty directory used as the Docker build context
+    pub empty_context_dir: PathBuf,
+    /// The base image name (e.g. "mcr.microsoft.com/devcontainers/rust:2-1-bookworm")
+    pub build_image: Option<String>,
+    /// The tag to apply to the built image (e.g. "vsc-myproject-features")
+    pub image_tag: String,
+}
+
+pub(crate) async fn read_devcontainer_configuration(
+    config: DevContainerConfig,
+    context: &DevContainerContext,
+    environment: HashMap<String, String>,
+) -> Result<DevContainer, DevContainerError> {
+    let docker = if context.use_podman {
+        Docker::new("podman")
+    } else {
+        Docker::new("docker")
+    };
+    let mut dev_container = DevContainerManifest::new(
+        context,
+        environment,
+        Arc::new(docker),
+        Arc::new(DefaultCommandRunner::new()),
+        config,
+        &context.project_directory.as_ref(),
+    )
+    .await?;
+    dev_container.parse_nonremote_vars()?;
+    Ok(dev_container.dev_container().clone())
+}
+
+pub(crate) async fn spawn_dev_container(
+    context: &DevContainerContext,
+    environment: HashMap<String, String>,
+    config: DevContainerConfig,
+    local_project_path: &Path,
+) -> Result<DevContainerUp, DevContainerError> {
+    let docker = if context.use_podman {
+        Docker::new("podman")
+    } else {
+        Docker::new("docker")
+    };
+    let mut devcontainer_manifest = DevContainerManifest::new(
+        context,
+        environment,
+        Arc::new(docker),
+        Arc::new(DefaultCommandRunner::new()),
+        config,
+        local_project_path,
+    )
+    .await?;
+
+    devcontainer_manifest.parse_nonremote_vars()?;
+
+    log::debug!("Checking for existing container");
+    if let Some(devcontainer) = devcontainer_manifest
+        .check_for_existing_devcontainer()
+        .await?
+    {
+        Ok(devcontainer)
+    } else {
+        log::debug!("Existing container not found. Building");
+
+        devcontainer_manifest.build_and_run().await
+    }
+}
+
+#[derive(Debug)]
+struct DockerBuildResources {
+    image: DockerInspect,
+    additional_mounts: Vec<MountDefinition>,
+    privileged: bool,
+    entrypoint_script: String,
+}
+
+#[derive(Debug)]
+enum DevContainerBuildResources {
+    DockerCompose(DockerComposeResources),
+    Docker(DockerBuildResources),
+}
+
+fn find_primary_service(
+    docker_compose: &DockerComposeResources,
+    devcontainer: &DevContainerManifest,
+) -> Result<(String, DockerComposeService), DevContainerError> {
+    let Some(service_name) = &devcontainer.dev_container().service else {
+        return Err(DevContainerError::DevContainerParseFailed);
+    };
+
+    match docker_compose.config.services.get(service_name) {
+        Some(service) => Ok((service_name.clone(), service.clone())),
+        None => Err(DevContainerError::DevContainerParseFailed),
+    }
+}
+
+/// Destination folder inside the container where feature content is staged during build.
+/// Mirrors the CLI's `FEATURES_CONTAINER_TEMP_DEST_FOLDER`.
+const FEATURES_CONTAINER_TEMP_DEST_FOLDER: &str = "/tmp/dev-container-features";
+
+/// Escapes regex special characters in a string.
+fn escape_regex_chars(input: &str) -> String {
+    let mut result = String::with_capacity(input.len() * 2);
+    for c in input.chars() {
+        if ".*+?^${}()|[]\\".contains(c) {
+            result.push('\\');
+        }
+        result.push(c);
+    }
+    result
+}
+
+/// Extracts the short feature ID from a full feature reference string.
+///
+/// Examples:
+/// - `ghcr.io/devcontainers/features/aws-cli:1` β†’ `aws-cli`
+/// - `ghcr.io/user/repo/go` β†’ `go`
+/// - `ghcr.io/devcontainers/features/rust@sha256:abc` β†’ `rust`
+/// - `./myFeature` β†’ `myFeature`
+fn extract_feature_id(feature_ref: &str) -> &str {
+    let without_version = if let Some(at_idx) = feature_ref.rfind('@') {
+        &feature_ref[..at_idx]
+    } else {
+        let last_slash = feature_ref.rfind('/');
+        let last_colon = feature_ref.rfind(':');
+        match (last_slash, last_colon) {
+            (Some(slash), Some(colon)) if colon > slash => &feature_ref[..colon],
+            _ => feature_ref,
+        }
+    };
+    match without_version.rfind('/') {
+        Some(idx) => &without_version[idx + 1..],
+        None => without_version,
+    }
+}
+
+/// Generates a shell command that looks up a user's passwd entry.
+///
+/// Mirrors the CLI's `getEntPasswdShellCommand` in `commonUtils.ts`.
+/// Tries `getent passwd` first, then falls back to grepping `/etc/passwd`.
+fn get_ent_passwd_shell_command(user: &str) -> String {
+    let escaped_for_shell = user.replace('\\', "\\\\").replace('\'', "\\'");
+    let escaped_for_regex = escape_regex_chars(user).replace('\'', "\\'");
+    format!(
+        " (command -v getent >/dev/null 2>&1 && getent passwd '{shell}' || grep -E '^{re}|^[^:]*:[^:]*:{re}:' /etc/passwd || true)",
+        shell = escaped_for_shell,
+        re = escaped_for_regex,
+    )
+}
+
+/// Determines feature installation order, respecting `overrideFeatureInstallOrder`.
+///
+/// Features listed in the override come first (in the specified order), followed
+/// by any remaining features sorted lexicographically by their full reference ID.
+fn resolve_feature_order<'a>(
+    features: &'a HashMap<String, FeatureOptions>,
+    override_order: &Option<Vec<String>>,
+) -> Vec<(&'a String, &'a FeatureOptions)> {
+    if let Some(order) = override_order {
+        let mut ordered: Vec<(&'a String, &'a FeatureOptions)> = Vec::new();
+        for ordered_id in order {
+            if let Some((key, options)) = features.get_key_value(ordered_id) {
+                ordered.push((key, options));
+            }
+        }
+        let mut remaining: Vec<_> = features
+            .iter()
+            .filter(|(id, _)| !order.iter().any(|o| o == *id))
+            .collect();
+        remaining.sort_by_key(|(id, _)| id.as_str());
+        ordered.extend(remaining);
+        ordered
+    } else {
+        let mut entries: Vec<_> = features.iter().collect();
+        entries.sort_by_key(|(id, _)| id.as_str());
+        entries
+    }
+}
+
+/// Generates the `devcontainer-features-install.sh` wrapper script for one feature.
+///
+/// Mirrors the CLI's `getFeatureInstallWrapperScript` in
+/// `containerFeaturesConfiguration.ts`.
+fn generate_install_wrapper(
+    feature_ref: &str,
+    feature_id: &str,
+    env_variables: &str,
+) -> Result<String, DevContainerError> {
+    let escaped_id = shlex::try_quote(feature_ref).map_err(|e| {
+        log::error!("Error escaping feature ref {feature_ref}: {e}");
+        DevContainerError::DevContainerParseFailed
+    })?;
+    let escaped_name = shlex::try_quote(feature_id).map_err(|e| {
+        log::error!("Error escaping feature {feature_id}: {e}");
+        DevContainerError::DevContainerParseFailed
+    })?;
+    let options_indented: String = env_variables
+        .lines()
+        .filter(|l| !l.is_empty())
+        .map(|l| format!("    {}", l))
+        .collect::<Vec<_>>()
+        .join("\n");
+    let escaped_options = shlex::try_quote(&options_indented).map_err(|e| {
+        log::error!("Error escaping options {options_indented}: {e}");
+        DevContainerError::DevContainerParseFailed
+    })?;
+
+    let script = format!(
+        r#"#!/bin/sh
+set -e
+
+on_exit () {{
+    [ $? -eq 0 ] && exit
+    echo 'ERROR: Feature "{escaped_name}" ({escaped_id}) failed to install!'
+}}
+
+trap on_exit EXIT
+
+echo ===========================================================================
+echo 'Feature       : {escaped_name}'
+echo 'Id            : {escaped_id}'
+echo 'Options       :'
+echo {escaped_options}
+echo ===========================================================================
+
+set -a
+. ../devcontainer-features.builtin.env
+. ./devcontainer-features.env
+set +a
+
+chmod +x ./install.sh
+./install.sh
+"#
+    );
+
+    Ok(script)
+}
+
+// Dockerfile actions need to be moved to their own file
+fn dockerfile_alias(dockerfile_content: &str) -> Option<String> {
+    dockerfile_content
+        .lines()
+        .find(|line| line.starts_with("FROM"))
+        .and_then(|line| {
+            let words: Vec<&str> = line.split(" ").collect();
+            if words.len() > 2 && words[words.len() - 2].to_lowercase() == "as" {
+                return Some(words[words.len() - 1].to_string());
+            } else {
+                return None;
+            }
+        })
+}
+
+fn dockerfile_inject_alias(dockerfile_content: &str, alias: &str) -> String {
+    if dockerfile_alias(dockerfile_content).is_some() {
+        dockerfile_content.to_string()
+    } else {
+        dockerfile_content
+            .lines()
+            .map(|line| {
+                if line.starts_with("FROM") {
+                    format!("{} AS {}", line, alias)
+                } else {
+                    line.to_string()
+                }
+            })
+            .collect::<Vec<String>>()
+            .join("\n")
+    }
+}
+
+fn image_from_dockerfile(
+    devcontainer: &DevContainerManifest,
+    dockerfile_contents: String,
+) -> Result<String, DevContainerError> {
+    let mut raw_contents = dockerfile_contents
+        .lines()
+        .find(|line| line.starts_with("FROM"))
+        .and_then(|from_line| {
+            from_line
+                .split(' ')
+                .collect::<Vec<&str>>()
+                .get(1)
+                .map(|s| s.to_string())
+        })
+        .ok_or_else(|| {
+            log::error!("Could not find an image definition in dockerfile");
+            DevContainerError::DevContainerParseFailed
+        })?;
+
+    for (k, v) in devcontainer
+        .dev_container()
+        .build
+        .as_ref()
+        .and_then(|b| b.args.as_ref())
+        .unwrap_or(&HashMap::new())
+    {
+        raw_contents = raw_contents.replace(&format!("${{{}}}", k), v);
+    }
+    Ok(raw_contents)
+}
+
+// Container user things
+// This should come from spec - see the docs
+fn get_remote_user_from_config(
+    docker_config: &DockerInspect,
+    devcontainer: &DevContainerManifest,
+) -> Result<String, DevContainerError> {
+    if let DevContainer {
+        remote_user: Some(user),
+        ..
+    } = &devcontainer.dev_container()
+    {
+        return Ok(user.clone());
+    }
+    let Some(metadata) = &docker_config.config.labels.metadata else {
+        log::error!("Could not locate metadata");
+        return Err(DevContainerError::ContainerNotValid(
+            docker_config.id.clone(),
+        ));
+    };
+    for metadatum in metadata {
+        if let Some(remote_user) = metadatum.get("remoteUser") {
+            if let Some(remote_user_str) = remote_user.as_str() {
+                return Ok(remote_user_str.to_string());
+            }
+        }
+    }
+    log::error!("Could not locate the remote user");
+    Err(DevContainerError::ContainerNotValid(
+        docker_config.id.clone(),
+    ))
+}
+
+// This should come from spec - see the docs
+fn get_container_user_from_config(
+    docker_config: &DockerInspect,
+    devcontainer: &DevContainerManifest,
+) -> Result<String, DevContainerError> {
+    if let Some(user) = &devcontainer.dev_container().container_user {
+        return Ok(user.to_string());
+    }
+    if let Some(metadata) = &docker_config.config.labels.metadata {
+        for metadatum in metadata {
+            if let Some(container_user) = metadatum.get("containerUser") {
+                if let Some(container_user_str) = container_user.as_str() {
+                    return Ok(container_user_str.to_string());
+                }
+            }
+        }
+    }
+    if let Some(image_user) = &docker_config.config.image_user {
+        return Ok(image_user.to_string());
+    }
+
+    Err(DevContainerError::DevContainerParseFailed)
+}
+
+#[cfg(test)]
+mod test {
+    use std::{
+        collections::HashMap,
+        ffi::OsStr,
+        path::PathBuf,
+        process::{ExitStatus, Output},
+        sync::{Arc, Mutex},
+    };
+
+    use async_trait::async_trait;
+    use fs::{FakeFs, Fs};
+    use gpui::{AppContext, TestAppContext};
+    use http_client::{AsyncBody, FakeHttpClient, HttpClient};
+    use project::{
+        ProjectEnvironment,
+        worktree_store::{WorktreeIdCounter, WorktreeStore},
+    };
+    use serde_json_lenient::Value;
+    use util::{command::Command, paths::SanitizedPath};
+
+    use crate::{
+        DevContainerConfig, DevContainerContext,
+        command_json::CommandRunner,
+        devcontainer_api::DevContainerError,
+        devcontainer_json::MountDefinition,
+        devcontainer_manifest::{
+            ConfigStatus, DevContainerManifest, DockerBuildResources, DockerComposeResources,
+            DockerInspect, extract_feature_id, find_primary_service, get_remote_user_from_config,
+        },
+        docker::{
+            DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild,
+            DockerComposeVolume, DockerConfigLabels, DockerInspectConfig, DockerInspectMount,
+            DockerPs,
+        },
+        oci::TokenResponse,
+    };
+    const TEST_PROJECT_PATH: &str = "/path/to/local/project";
+
+    async fn build_tarball(content: Vec<(&str, &str)>) -> Vec<u8> {
+        let buffer = futures::io::Cursor::new(Vec::new());
+        let mut builder = async_tar::Builder::new(buffer);
+        for (file_name, content) in content {
+            if content.is_empty() {
+                let mut header = async_tar::Header::new_gnu();
+                header.set_size(0);
+                header.set_mode(0o755);
+                header.set_entry_type(async_tar::EntryType::Directory);
+                header.set_cksum();
+                builder
+                    .append_data(&mut header, file_name, &[] as &[u8])
+                    .await
+                    .unwrap();
+            } else {
+                let data = content.as_bytes();
+                let mut header = async_tar::Header::new_gnu();
+                header.set_size(data.len() as u64);
+                header.set_mode(0o755);
+                header.set_entry_type(async_tar::EntryType::Regular);
+                header.set_cksum();
+                builder
+                    .append_data(&mut header, file_name, data)
+                    .await
+                    .unwrap();
+            }
+        }
+        let buffer = builder.into_inner().await.unwrap();
+        buffer.into_inner()
+    }
+
+    fn test_project_filename() -> String {
+        PathBuf::from(TEST_PROJECT_PATH)
+            .file_name()
+            .expect("is valid")
+            .display()
+            .to_string()
+    }
+
+    async fn init_devcontainer_config(
+        fs: &Arc<FakeFs>,
+        devcontainer_contents: &str,
+    ) -> DevContainerConfig {
+        fs.insert_tree(
+            format!("{TEST_PROJECT_PATH}/.devcontainer"),
+            serde_json::json!({"devcontainer.json": devcontainer_contents}),
+        )
+        .await;
+
+        DevContainerConfig::default_config()
+    }
+
+    struct TestDependencies {
+        fs: Arc<FakeFs>,
+        _http_client: Arc<dyn HttpClient>,
+        docker: Arc<FakeDocker>,
+        command_runner: Arc<TestCommandRunner>,
+    }
+
+    async fn init_default_devcontainer_manifest(
+        cx: &mut TestAppContext,
+        devcontainer_contents: &str,
+    ) -> Result<(TestDependencies, DevContainerManifest), DevContainerError> {
+        let fs = FakeFs::new(cx.executor());
+        let http_client = fake_http_client();
+        let command_runner = Arc::new(TestCommandRunner::new());
+        let docker = Arc::new(FakeDocker::new());
+        let environment = HashMap::new();
+
+        init_devcontainer_manifest(
+            cx,
+            fs,
+            http_client,
+            docker,
+            command_runner,
+            environment,
+            devcontainer_contents,
+        )
+        .await
+    }
+
+    async fn init_devcontainer_manifest(
+        cx: &mut TestAppContext,
+        fs: Arc<FakeFs>,
+        http_client: Arc<dyn HttpClient>,
+        docker_client: Arc<FakeDocker>,
+        command_runner: Arc<TestCommandRunner>,
+        environment: HashMap<String, String>,
+        devcontainer_contents: &str,
+    ) -> Result<(TestDependencies, DevContainerManifest), DevContainerError> {
+        let local_config = init_devcontainer_config(&fs, devcontainer_contents).await;
+        let project_path = SanitizedPath::new_arc(&PathBuf::from(TEST_PROJECT_PATH));
+        let worktree_store =
+            cx.new(|_cx| WorktreeStore::local(false, fs.clone(), WorktreeIdCounter::default()));
+        let project_environment =
+            cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx));
+
+        let context = DevContainerContext {
+            project_directory: SanitizedPath::cast_arc(project_path),
+            use_podman: false,
+            fs: fs.clone(),
+            http_client: http_client.clone(),
+            environment: project_environment.downgrade(),
+        };
+
+        let test_dependencies = TestDependencies {
+            fs: fs.clone(),
+            _http_client: http_client.clone(),
+            docker: docker_client.clone(),
+            command_runner: command_runner.clone(),
+        };
+        let manifest = DevContainerManifest::new(
+            &context,
+            environment,
+            docker_client,
+            command_runner,
+            local_config,
+            &PathBuf::from(TEST_PROJECT_PATH),
+        )
+        .await?;
+
+        Ok((test_dependencies, manifest))
+    }
+
+    #[gpui::test]
+    async fn should_get_remote_user_from_devcontainer_if_available(cx: &mut TestAppContext) {
+        let (_, devcontainer_manifest) = init_default_devcontainer_manifest(
+            cx,
+            r#"
+// These are some external comments. serde_lenient should handle them
+{
+    // These are some internal comments
+    "image": "image",
+    "remoteUser": "root",
+}
+            "#,
+        )
+        .await
+        .unwrap();
+
+        let mut metadata = HashMap::new();
+        metadata.insert(
+            "remoteUser".to_string(),
+            serde_json_lenient::Value::String("vsCode".to_string()),
+        );
+        let given_docker_config = DockerInspect {
+            id: "docker_id".to_string(),
+            config: DockerInspectConfig {
+                labels: DockerConfigLabels {
+                    metadata: Some(vec![metadata]),
+                },
+                image_user: None,
+                env: Vec::new(),
+            },
+            mounts: None,
+            state: None,
+        };
+
+        let remote_user =
+            get_remote_user_from_config(&given_docker_config, &devcontainer_manifest).unwrap();
+
+        assert_eq!(remote_user, "root".to_string())
+    }
+
+    #[gpui::test]
+    async fn should_get_remote_user_from_docker_config(cx: &mut TestAppContext) {
+        let (_, devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, "{}").await.unwrap();
+        let mut metadata = HashMap::new();
+        metadata.insert(
+            "remoteUser".to_string(),
+            serde_json_lenient::Value::String("vsCode".to_string()),
+        );
+        let given_docker_config = DockerInspect {
+            id: "docker_id".to_string(),
+            config: DockerInspectConfig {
+                labels: DockerConfigLabels {
+                    metadata: Some(vec![metadata]),
+                },
+                image_user: None,
+                env: Vec::new(),
+            },
+            mounts: None,
+            state: None,
+        };
+
+        let remote_user = get_remote_user_from_config(&given_docker_config, &devcontainer_manifest);
+
+        assert!(remote_user.is_ok());
+        let remote_user = remote_user.expect("ok");
+        assert_eq!(&remote_user, "vsCode")
+    }
+
+    #[test]
+    fn should_extract_feature_id_from_references() {
+        assert_eq!(
+            extract_feature_id("ghcr.io/devcontainers/features/aws-cli:1"),
+            "aws-cli"
+        );
+        assert_eq!(
+            extract_feature_id("ghcr.io/devcontainers/features/go"),
+            "go"
+        );
+        assert_eq!(extract_feature_id("ghcr.io/user/repo/node:18.0.0"), "node");
+        assert_eq!(extract_feature_id("./myFeature"), "myFeature");
+        assert_eq!(
+            extract_feature_id("ghcr.io/devcontainers/features/rust@sha256:abc123"),
+            "rust"
+        );
+    }
+
+    #[gpui::test]
+    async fn should_create_correct_docker_run_command(cx: &mut TestAppContext) {
+        let mut metadata = HashMap::new();
+        metadata.insert(
+            "remoteUser".to_string(),
+            serde_json_lenient::Value::String("vsCode".to_string()),
+        );
+
+        let (_, devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, "{}").await.unwrap();
+        let build_resources = DockerBuildResources {
+            image: DockerInspect {
+                id: "mcr.microsoft.com/devcontainers/base:ubuntu".to_string(),
+                config: DockerInspectConfig {
+                    labels: DockerConfigLabels { metadata: None },
+                    image_user: None,
+                    env: Vec::new(),
+                },
+                mounts: None,
+                state: None,
+            },
+            additional_mounts: vec![],
+            privileged: false,
+            entrypoint_script: "echo Container started\n    trap \"exit 0\" 15\n    exec \"$@\"\n    while sleep 1 & wait $!; do :; done".to_string(),
+        };
+        let docker_run_command = devcontainer_manifest.create_docker_run_command(build_resources);
+
+        assert!(docker_run_command.is_ok());
+        let docker_run_command = docker_run_command.expect("ok");
+
+        assert_eq!(docker_run_command.get_program(), "docker");
+        let expected_config_file_label = PathBuf::from(TEST_PROJECT_PATH)
+            .join(".devcontainer")
+            .join("devcontainer.json");
+        let expected_config_file_label = expected_config_file_label.display();
+        assert_eq!(
+            docker_run_command.get_args().collect::<Vec<&OsStr>>(),
+            vec![
+                OsStr::new("run"),
+                OsStr::new("--sig-proxy=false"),
+                OsStr::new("-d"),
+                OsStr::new("--mount"),
+                OsStr::new(
+                    "type=bind,source=/path/to/local/project,target=/workspaces/project,consistency=cached"
+                ),
+                OsStr::new("-l"),
+                OsStr::new("devcontainer.local_folder=/path/to/local/project"),
+                OsStr::new("-l"),
+                OsStr::new(&format!(
+                    "devcontainer.config_file={expected_config_file_label}"
+                )),
+                OsStr::new("--entrypoint"),
+                OsStr::new("/bin/sh"),
+                OsStr::new("mcr.microsoft.com/devcontainers/base:ubuntu"),
+                OsStr::new("-c"),
+                OsStr::new(
+                    "
+    echo Container started
+    trap \"exit 0\" 15
+    exec \"$@\"
+    while sleep 1 & wait $!; do :; done
+                        "
+                    .trim()
+                ),
+                OsStr::new("-"),
+            ]
+        )
+    }
+
+    #[gpui::test]
+    async fn should_find_primary_service_in_docker_compose(cx: &mut TestAppContext) {
+        // State where service not defined in dev container
+        let (_, given_dev_container) = init_default_devcontainer_manifest(cx, "{}").await.unwrap();
+        let given_docker_compose_config = DockerComposeResources {
+            config: DockerComposeConfig {
+                name: Some("devcontainers".to_string()),
+                services: HashMap::new(),
+                ..Default::default()
+            },
+            ..Default::default()
+        };
+
+        let bad_result = find_primary_service(&given_docker_compose_config, &given_dev_container);
+
+        assert!(bad_result.is_err());
+
+        // State where service defined in devcontainer, not found in DockerCompose config
+        let (_, given_dev_container) =
+            init_default_devcontainer_manifest(cx, r#"{"service": "not_found_service"}"#)
+                .await
+                .unwrap();
+        let given_docker_compose_config = DockerComposeResources {
+            config: DockerComposeConfig {
+                name: Some("devcontainers".to_string()),
+                services: HashMap::new(),
+                ..Default::default()
+            },
+            ..Default::default()
+        };
+
+        let bad_result = find_primary_service(&given_docker_compose_config, &given_dev_container);
+
+        assert!(bad_result.is_err());
+        // State where service defined in devcontainer and in DockerCompose config
+
+        let (_, given_dev_container) =
+            init_default_devcontainer_manifest(cx, r#"{"service": "found_service"}"#)
+                .await
+                .unwrap();
+        let given_docker_compose_config = DockerComposeResources {
+            config: DockerComposeConfig {
+                name: Some("devcontainers".to_string()),
+                services: HashMap::from([(
+                    "found_service".to_string(),
+                    DockerComposeService {
+                        ..Default::default()
+                    },
+                )]),
+                ..Default::default()
+            },
+            ..Default::default()
+        };
+
+        let (service_name, _) =
+            find_primary_service(&given_docker_compose_config, &given_dev_container).unwrap();
+
+        assert_eq!(service_name, "found_service".to_string());
+    }
+
+    #[gpui::test]
+    async fn test_nonremote_variable_replacement_with_default_mount(cx: &mut TestAppContext) {
+        let fs = FakeFs::new(cx.executor());
+        let given_devcontainer_contents = r#"
+// These are some external comments. serde_lenient should handle them
+{
+    // These are some internal comments
+    "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
+    "name": "myDevContainer-${devcontainerId}",
+    "remoteUser": "root",
+    "remoteEnv": {
+        "DEVCONTAINER_ID": "${devcontainerId}",
+        "MYVAR2": "myvarothervalue",
+        "REMOTE_WORKSPACE_FOLDER_BASENAME": "${containerWorkspaceFolderBasename}",
+        "LOCAL_WORKSPACE_FOLDER_BASENAME": "${localWorkspaceFolderBasename}",
+        "REMOTE_WORKSPACE_FOLDER": "${containerWorkspaceFolder}",
+        "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}",
+        "LOCAL_ENV_VAR_1": "${localEnv:local_env_1}",
+        "LOCAL_ENV_VAR_2": "${localEnv:my_other_env}"
+
+    }
+}
+                    "#;
+        let (_, mut devcontainer_manifest) = init_devcontainer_manifest(
+            cx,
+            fs,
+            fake_http_client(),
+            Arc::new(FakeDocker::new()),
+            Arc::new(TestCommandRunner::new()),
+            HashMap::from([
+                ("local_env_1".to_string(), "local_env_value1".to_string()),
+                ("my_other_env".to_string(), "THISVALUEHERE".to_string()),
+            ]),
+            given_devcontainer_contents,
+        )
+        .await
+        .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let ConfigStatus::VariableParsed(variable_replaced_devcontainer) =
+            &devcontainer_manifest.config
+        else {
+            panic!("Config not parsed");
+        };
+
+        // ${devcontainerId}
+        let devcontainer_id = devcontainer_manifest.devcontainer_id();
+        assert_eq!(
+            variable_replaced_devcontainer.name,
+            Some(format!("myDevContainer-{devcontainer_id}"))
+        );
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("DEVCONTAINER_ID")),
+            Some(&devcontainer_id)
+        );
+
+        // ${containerWorkspaceFolderBasename}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER_BASENAME")),
+            Some(&test_project_filename())
+        );
+
+        // ${localWorkspaceFolderBasename}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER_BASENAME")),
+            Some(&test_project_filename())
+        );
+
+        // ${containerWorkspaceFolder}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER")),
+            Some(&format!("/workspaces/{}", test_project_filename()))
+        );
+
+        // ${localWorkspaceFolder}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER")),
+            Some(&TEST_PROJECT_PATH.to_string())
+        );
+
+        // ${localEnv:VARIABLE_NAME}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_ENV_VAR_1")),
+            Some(&"local_env_value1".to_string())
+        );
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_ENV_VAR_2")),
+            Some(&"THISVALUEHERE".to_string())
+        );
+    }
+
+    #[gpui::test]
+    async fn test_nonremote_variable_replacement_with_explicit_mount(cx: &mut TestAppContext) {
+        let given_devcontainer_contents = r#"
+                // These are some external comments. serde_lenient should handle them
+                {
+                    // These are some internal comments
+                    "image": "mcr.microsoft.com/devcontainers/base:ubuntu",
+                    "name": "myDevContainer-${devcontainerId}",
+                    "remoteUser": "root",
+                    "remoteEnv": {
+                        "DEVCONTAINER_ID": "${devcontainerId}",
+                        "MYVAR2": "myvarothervalue",
+                        "REMOTE_WORKSPACE_FOLDER_BASENAME": "${containerWorkspaceFolderBasename}",
+                        "LOCAL_WORKSPACE_FOLDER_BASENAME": "${localWorkspaceFolderBasename}",
+                        "REMOTE_WORKSPACE_FOLDER": "${containerWorkspaceFolder}",
+                        "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
+
+                    },
+                    "workspaceMount": "source=/local/folder,target=/workspace/subfolder,type=bind,consistency=cached",
+                    "workspaceFolder": "/workspace/customfolder"
+                }
+            "#;
+
+        let (_, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let ConfigStatus::VariableParsed(variable_replaced_devcontainer) =
+            &devcontainer_manifest.config
+        else {
+            panic!("Config not parsed");
+        };
+
+        // ${devcontainerId}
+        let devcontainer_id = devcontainer_manifest.devcontainer_id();
+        assert_eq!(
+            variable_replaced_devcontainer.name,
+            Some(format!("myDevContainer-{devcontainer_id}"))
+        );
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("DEVCONTAINER_ID")),
+            Some(&devcontainer_id)
+        );
+
+        // ${containerWorkspaceFolderBasename}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER_BASENAME")),
+            Some(&"customfolder".to_string())
+        );
+
+        // ${localWorkspaceFolderBasename}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER_BASENAME")),
+            Some(&"project".to_string())
+        );
+
+        // ${containerWorkspaceFolder}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER")),
+            Some(&"/workspace/customfolder".to_string())
+        );
+
+        // ${localWorkspaceFolder}
+        assert_eq!(
+            variable_replaced_devcontainer
+                .remote_env
+                .as_ref()
+                .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER")),
+            Some(&TEST_PROJECT_PATH.to_string())
+        );
+    }
+
+    // updateRemoteUserUID is treated as false in Windows, so this test will fail
+    // It is covered by test_spawns_devcontainer_with_dockerfile_and_no_update_uid
+    #[cfg(not(target_os = "windows"))]
+    #[gpui::test]
+    async fn test_spawns_devcontainer_with_dockerfile_and_features(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            /*---------------------------------------------------------------------------------------------
+             *  Copyright (c) Microsoft Corporation. All rights reserved.
+             *  Licensed under the MIT License. See License.txt in the project root for license information.
+             *--------------------------------------------------------------------------------------------*/
+            {
+              "name": "cli-${devcontainerId}",
+              // "image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
+              "build": {
+                "dockerfile": "Dockerfile",
+                "args": {
+                  "VARIANT": "18-bookworm",
+                  "FOO": "bar",
+                },
+              },
+              "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached",
+              "workspaceFolder": "/workspace2",
+              "mounts": [
+                // Keep command history across instances
+                "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory",
+              ],
+
+              "forwardPorts": [
+                8082,
+                8083,
+              ],
+              "appPort": "8084",
+
+              "containerEnv": {
+                "VARIABLE_VALUE": "value",
+              },
+
+              "initializeCommand": "touch IAM.md",
+
+              "onCreateCommand": "echo 'onCreateCommand' >> ON_CREATE_COMMAND.md",
+
+              "updateContentCommand": "echo 'updateContentCommand' >> UPDATE_CONTENT_COMMAND.md",
+
+              "postCreateCommand": {
+                "yarn": "yarn install",
+                "debug": "echo 'postStartCommand' >> POST_START_COMMAND.md",
+              },
+
+              "postStartCommand": "echo 'postStartCommand' >> POST_START_COMMAND.md",
+
+              "postAttachCommand": "echo 'postAttachCommand' >> POST_ATTACH_COMMAND.md",
+
+              "remoteUser": "node",
+
+              "remoteEnv": {
+                "PATH": "${containerEnv:PATH}:/some/other/path",
+                "OTHER_ENV": "other_env_value"
+              },
+
+              "features": {
+                "ghcr.io/devcontainers/features/docker-in-docker:2": {
+                  "moby": false,
+                },
+                "ghcr.io/devcontainers/features/go:1": {},
+              },
+
+              "customizations": {
+                "vscode": {
+                  "extensions": [
+                    "dbaeumer.vscode-eslint",
+                    "GitHub.vscode-pull-request-github",
+                  ],
+                },
+                "zed": {
+                  "extensions": ["vue", "ruby"],
+                },
+                "codespaces": {
+                  "repositories": {
+                    "devcontainers/features": {
+                      "permissions": {
+                        "contents": "write",
+                        "workflows": "write",
+                      },
+                    },
+                  },
+                },
+              },
+            }
+            "#;
+
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+                r#"
+#  Copyright (c) Microsoft Corporation. All rights reserved.
+#  Licensed under the MIT License. See License.txt in the project root for license information.
+ARG VARIANT="16-bullseye"
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
+
+RUN mkdir -p /workspaces && chown node:node /workspaces
+
+ARG USERNAME=node
+USER $USERNAME
+
+# Save command line history
+RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \
+&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \
+&& mkdir -p /home/$USERNAME/commandhistory \
+&& touch /home/$USERNAME/commandhistory/.bash_history \
+&& chown -R $USERNAME /home/$USERNAME/commandhistory
+                    "#.trim().to_string(),
+            )
+            .await
+            .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+        assert_eq!(
+            devcontainer_up.extension_ids,
+            vec!["vue".to_string(), "ruby".to_string()]
+        );
+
+        let files = test_dependencies.fs.files();
+        let feature_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "Dockerfile.extended")
+            })
+            .expect("to be found");
+        let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap();
+        assert_eq!(
+            &feature_dockerfile,
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+#  Copyright (c) Microsoft Corporation. All rights reserved.
+#  Licensed under the MIT License. See License.txt in the project root for license information.
+ARG VARIANT="16-bullseye"
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+
+RUN mkdir -p /workspaces && chown node:node /workspaces
+
+ARG USERNAME=node
+USER $USERNAME
+
+# Save command line history
+RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \
+&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \
+&& mkdir -p /home/$USERNAME/commandhistory \
+&& touch /home/$USERNAME/commandhistory/.bash_history \
+&& chown -R $USERNAME /home/$USERNAME/commandhistory
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p /tmp/dev-container-features
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
+
+RUN \
+echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'node' || grep -E '^node|^[^:]*:[^:]*:node:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
+
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_0,target=/tmp/build-features-src/docker-in-docker_0 \
+cp -ar /tmp/build-features-src/docker-in-docker_0 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_0 \
+&& cd /tmp/dev-container-features/docker-in-docker_0 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/docker-in-docker_0
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./go_1,target=/tmp/build-features-src/go_1 \
+cp -ar /tmp/build-features-src/go_1 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/go_1 \
+&& cd /tmp/dev-container-features/go_1 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/go_1
+
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+"#
+        );
+
+        let uid_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+            })
+            .expect("to be found");
+        let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+        assert_eq!(
+            &uid_dockerfile,
+            r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+	if [ -z "$OLD_UID" ]; then \
+		echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+	elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+		echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+	elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+		echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+	else \
+		if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+			FREE_GID=65532; \
+			while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+			echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+			sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+		fi; \
+		echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+		sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+		if [ "$OLD_GID" != "$NEW_GID" ]; then \
+			sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+		fi; \
+		chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+	fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+
+ENV DOCKER_BUILDKIT=1
+
+ENV GOPATH=/go
+ENV GOROOT=/usr/local/go
+ENV PATH=/usr/local/go/bin:/go/bin:${PATH}
+ENV VARIABLE_VALUE=value
+"#
+        );
+
+        let golang_install_wrapper = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "devcontainer-features-install.sh")
+                    && f.to_str().is_some_and(|s| s.contains("/go_"))
+            })
+            .expect("to be found");
+        let golang_install_wrapper = test_dependencies
+            .fs
+            .load(golang_install_wrapper)
+            .await
+            .unwrap();
+        assert_eq!(
+            &golang_install_wrapper,
+            r#"#!/bin/sh
+set -e
+
+on_exit () {
+    [ $? -eq 0 ] && exit
+    echo 'ERROR: Feature "go" (ghcr.io/devcontainers/features/go:1) failed to install!'
+}
+
+trap on_exit EXIT
+
+echo ===========================================================================
+echo 'Feature       : go'
+echo 'Id            : ghcr.io/devcontainers/features/go:1'
+echo 'Options       :'
+echo '    GOLANGCILINTVERSION=latest
+    VERSION=latest'
+echo ===========================================================================
+
+set -a
+. ../devcontainer-features.builtin.env
+. ./devcontainer-features.env
+set +a
+
+chmod +x ./install.sh
+./install.sh
+"#
+        );
+
+        let docker_commands = test_dependencies
+            .command_runner
+            .commands_by_program("docker");
+
+        let docker_run_command = docker_commands
+            .iter()
+            .find(|c| c.args.get(0).is_some_and(|a| a == "run"))
+            .expect("found");
+
+        assert_eq!(
+            docker_run_command.args,
+            vec![
+                "run".to_string(),
+                "--privileged".to_string(),
+                "--sig-proxy=false".to_string(),
+                "-d".to_string(),
+                "--mount".to_string(),
+                "type=bind,source=/path/to/local/project,target=/workspace2,consistency=cached".to_string(),
+                "--mount".to_string(),
+                "type=volume,source=dev-containers-cli-bashhistory,target=/home/node/commandhistory,consistency=cached".to_string(),
+                "--mount".to_string(),
+                "type=volume,source=dind-var-lib-docker-42dad4b4ca7b8ced,target=/var/lib/docker,consistency=cached".to_string(),
+                "-l".to_string(),
+                "devcontainer.local_folder=/path/to/local/project".to_string(),
+                "-l".to_string(),
+                "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string(),
+                "-l".to_string(),
+                "devcontainer.metadata=[{\"remoteUser\":\"node\"}]".to_string(),
+                "-p".to_string(),
+                "8082:8082".to_string(),
+                "-p".to_string(),
+                "8083:8083".to_string(),
+                "-p".to_string(),
+                "8084:8084".to_string(),
+                "--entrypoint".to_string(),
+                "/bin/sh".to_string(),
+                "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105".to_string(),
+                "-c".to_string(),
+                "echo Container started\ntrap \"exit 0\" 15\n/usr/local/share/docker-init.sh\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done".to_string(),
+                "-".to_string()
+            ]
+        );
+
+        let docker_exec_commands = test_dependencies
+            .docker
+            .exec_commands_recorded
+            .lock()
+            .unwrap();
+
+        assert!(docker_exec_commands.iter().all(|exec| {
+            exec.env
+                == HashMap::from([
+                    ("OTHER_ENV".to_string(), "other_env_value".to_string()),
+                    (
+                        "PATH".to_string(),
+                        "/initial/path:/some/other/path".to_string(),
+                    ),
+                ])
+        }))
+    }
+
+    // updateRemoteUserUID is treated as false in Windows, so this test will fail
+    // It is covered by test_spawns_devcontainer_with_docker_compose_and_no_update_uid
+    #[cfg(not(target_os = "windows"))]
+    #[gpui::test]
+    async fn test_spawns_devcontainer_with_docker_compose(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            // For format details, see https://aka.ms/devcontainer.json. For config options, see the
+            // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres
+            {
+              "features": {
+                "ghcr.io/devcontainers/features/aws-cli:1": {},
+                "ghcr.io/devcontainers/features/docker-in-docker:2": {},
+              },
+              "name": "Rust and PostgreSQL",
+              "dockerComposeFile": "docker-compose.yml",
+              "service": "app",
+              "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
+
+              // Features to add to the dev container. More info: https://containers.dev/features.
+              // "features": {},
+
+              // Use 'forwardPorts' to make a list of ports inside the container available locally.
+              "forwardPorts": [
+                8083,
+                "db:5432",
+                "db:1234",
+              ],
+              "appPort": "8084",
+
+              // Use 'postCreateCommand' to run commands after the container is created.
+              // "postCreateCommand": "rustc --version",
+
+              // Configure tool-specific properties.
+              // "customizations": {},
+
+              // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+              // "remoteUser": "root"
+            }
+            "#;
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"),
+                r#"
+version: '3.8'
+
+volumes:
+    postgres-data:
+
+services:
+    app:
+        build:
+            context: .
+            dockerfile: Dockerfile
+        env_file:
+            # Ensure that the variables in .env match the same variables in devcontainer.json
+            - .env
+
+        volumes:
+            - ../..:/workspaces:cached
+
+        # Overrides default command so things don't shut down after the process ends.
+        command: sleep infinity
+
+        # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
+        network_mode: service:db
+
+        # Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
+        # (Adding the "ports" property to this file will not forward from a Codespace.)
+
+    db:
+        image: postgres:14.1
+        restart: unless-stopped
+        volumes:
+            - postgres-data:/var/lib/postgresql/data
+        env_file:
+            # Ensure that the variables in .env match the same variables in devcontainer.json
+            - .env
+
+        # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
+        # (Adding the "ports" property to this file will not forward from a Codespace.)
+                    "#.trim().to_string(),
+            )
+            .await
+            .unwrap();
+
+        test_dependencies.fs.atomic_write(
+            PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+            r#"
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+    && apt-get -y install clang lld \
+    && apt-get autoremove -y && apt-get clean -y
+            "#.trim().to_string()).await.unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+        let files = test_dependencies.fs.files();
+        let feature_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "Dockerfile.extended")
+            })
+            .expect("to be found");
+        let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap();
+        assert_eq!(
+            &feature_dockerfile,
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+    && apt-get -y install clang lld \
+    && apt-get autoremove -y && apt-get clean -y
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p /tmp/dev-container-features
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
+
+RUN \
+echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
+
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./aws-cli_0,target=/tmp/build-features-src/aws-cli_0 \
+cp -ar /tmp/build-features-src/aws-cli_0 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \
+&& cd /tmp/dev-container-features/aws-cli_0 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/aws-cli_0
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_1,target=/tmp/build-features-src/docker-in-docker_1 \
+cp -ar /tmp/build-features-src/docker-in-docker_1 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \
+&& cd /tmp/dev-container-features/docker-in-docker_1 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/docker-in-docker_1
+
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+"#
+        );
+
+        let uid_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+            })
+            .expect("to be found");
+        let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+        assert_eq!(
+            &uid_dockerfile,
+            r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+	if [ -z "$OLD_UID" ]; then \
+		echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+	elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+		echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+	elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+		echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+	else \
+		if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+			FREE_GID=65532; \
+			while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+			echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+			sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+		fi; \
+		echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+		sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+		if [ "$OLD_GID" != "$NEW_GID" ]; then \
+			sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+		fi; \
+		chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+	fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+
+
+ENV DOCKER_BUILDKIT=1
+"#
+        );
+
+        let runtime_override = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "docker_compose_runtime.json")
+            })
+            .expect("to be found");
+        let runtime_override = test_dependencies.fs.load(runtime_override).await.unwrap();
+
+        let expected_runtime_override = DockerComposeConfig {
+            name: None,
+            services: HashMap::from([
+                (
+                    "app".to_string(),
+                    DockerComposeService {
+                        entrypoint: Some(vec![
+                            "/bin/sh".to_string(),
+                            "-c".to_string(),
+                            "echo Container started\ntrap \"exit 0\" 15\n/usr/local/share/docker-init.sh\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done".to_string(),
+                            "-".to_string(),
+                        ]),
+                        cap_add: Some(vec!["SYS_PTRACE".to_string()]),
+                        security_opt: Some(vec!["seccomp=unconfined".to_string()]),
+                        privileged: Some(true),
+                        labels: Some(vec![
+                            "devcontainer.metadata=[{\"remoteUser\":\"vscode\"}]".to_string(),
+                            "devcontainer.local_folder=/path/to/local/project".to_string(),
+                            "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string()
+                        ]),
+                        volumes: vec![
+                            MountDefinition {
+                                source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(),
+                                target: "/var/lib/docker".to_string(),
+                                mount_type: Some("volume".to_string())
+                            }
+                        ],
+                        ..Default::default()
+                    },
+                ),
+                (
+                    "db".to_string(),
+                    DockerComposeService {
+                        ports: vec![
+                            "8083:8083".to_string(),
+                            "5432:5432".to_string(),
+                            "1234:1234".to_string(),
+                            "8084:8084".to_string()
+                        ],
+                        ..Default::default()
+                    },
+                ),
+            ]),
+            volumes: HashMap::from([(
+                "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(),
+                DockerComposeVolume {
+                    name: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(),
+                },
+            )]),
+        };
+
+        assert_eq!(
+            serde_json_lenient::from_str::<DockerComposeConfig>(&runtime_override).unwrap(),
+            expected_runtime_override
+        )
+    }
+
+    #[gpui::test]
+    async fn test_spawns_devcontainer_with_docker_compose_and_no_update_uid(
+        cx: &mut TestAppContext,
+    ) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+        // For format details, see https://aka.ms/devcontainer.json. For config options, see the
+        // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres
+        {
+          "features": {
+            "ghcr.io/devcontainers/features/aws-cli:1": {},
+            "ghcr.io/devcontainers/features/docker-in-docker:2": {},
+          },
+          "name": "Rust and PostgreSQL",
+          "dockerComposeFile": "docker-compose.yml",
+          "service": "app",
+          "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
+
+          // Features to add to the dev container. More info: https://containers.dev/features.
+          // "features": {},
+
+          // Use 'forwardPorts' to make a list of ports inside the container available locally.
+          "forwardPorts": [
+            8083,
+            "db:5432",
+            "db:1234",
+          ],
+          "updateRemoteUserUID": false,
+          "appPort": "8084",
+
+          // Use 'postCreateCommand' to run commands after the container is created.
+          // "postCreateCommand": "rustc --version",
+
+          // Configure tool-specific properties.
+          // "customizations": {},
+
+          // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+          // "remoteUser": "root"
+        }
+        "#;
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+        .fs
+        .atomic_write(
+            PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"),
+            r#"
+version: '3.8'
+
+volumes:
+postgres-data:
+
+services:
+app:
+    build:
+        context: .
+        dockerfile: Dockerfile
+    env_file:
+        # Ensure that the variables in .env match the same variables in devcontainer.json
+        - .env
+
+    volumes:
+        - ../..:/workspaces:cached
+
+    # Overrides default command so things don't shut down after the process ends.
+    command: sleep infinity
+
+    # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
+    network_mode: service:db
+
+    # Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
+    # (Adding the "ports" property to this file will not forward from a Codespace.)
+
+db:
+    image: postgres:14.1
+    restart: unless-stopped
+    volumes:
+        - postgres-data:/var/lib/postgresql/data
+    env_file:
+        # Ensure that the variables in .env match the same variables in devcontainer.json
+        - .env
+
+    # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
+    # (Adding the "ports" property to this file will not forward from a Codespace.)
+                "#.trim().to_string(),
+        )
+        .await
+        .unwrap();
+
+        test_dependencies.fs.atomic_write(
+        PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+        r#"
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+&& apt-get -y install clang lld \
+&& apt-get autoremove -y && apt-get clean -y
+        "#.trim().to_string()).await.unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+        let files = test_dependencies.fs.files();
+        let feature_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "Dockerfile.extended")
+            })
+            .expect("to be found");
+        let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap();
+        assert_eq!(
+            &feature_dockerfile,
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+&& apt-get -y install clang lld \
+&& apt-get autoremove -y && apt-get clean -y
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p /tmp/dev-container-features
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
+
+RUN \
+echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
+
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./aws-cli_0,target=/tmp/build-features-src/aws-cli_0 \
+cp -ar /tmp/build-features-src/aws-cli_0 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \
+&& cd /tmp/dev-container-features/aws-cli_0 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/aws-cli_0
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_1,target=/tmp/build-features-src/docker-in-docker_1 \
+cp -ar /tmp/build-features-src/docker-in-docker_1 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \
+&& cd /tmp/dev-container-features/docker-in-docker_1 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/docker-in-docker_1
+
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+
+
+ENV DOCKER_BUILDKIT=1
+"#
+        );
+    }
+
+    #[cfg(not(target_os = "windows"))]
+    #[gpui::test]
+    async fn test_spawns_devcontainer_with_docker_compose_and_podman(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+        // For format details, see https://aka.ms/devcontainer.json. For config options, see the
+        // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres
+        {
+          "features": {
+            "ghcr.io/devcontainers/features/aws-cli:1": {},
+            "ghcr.io/devcontainers/features/docker-in-docker:2": {},
+          },
+          "name": "Rust and PostgreSQL",
+          "dockerComposeFile": "docker-compose.yml",
+          "service": "app",
+          "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
+
+          // Features to add to the dev container. More info: https://containers.dev/features.
+          // "features": {},
+
+          // Use 'forwardPorts' to make a list of ports inside the container available locally.
+          // "forwardPorts": [5432],
+
+          // Use 'postCreateCommand' to run commands after the container is created.
+          // "postCreateCommand": "rustc --version",
+
+          // Configure tool-specific properties.
+          // "customizations": {},
+
+          // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+          // "remoteUser": "root"
+        }
+        "#;
+        let mut fake_docker = FakeDocker::new();
+        fake_docker.set_podman(true);
+        let (test_dependencies, mut devcontainer_manifest) = init_devcontainer_manifest(
+            cx,
+            FakeFs::new(cx.executor()),
+            fake_http_client(),
+            Arc::new(fake_docker),
+            Arc::new(TestCommandRunner::new()),
+            HashMap::new(),
+            given_devcontainer_contents,
+        )
+        .await
+        .unwrap();
+
+        test_dependencies
+        .fs
+        .atomic_write(
+            PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"),
+            r#"
+version: '3.8'
+
+volumes:
+postgres-data:
+
+services:
+app:
+build:
+    context: .
+    dockerfile: Dockerfile
+env_file:
+    # Ensure that the variables in .env match the same variables in devcontainer.json
+    - .env
+
+volumes:
+    - ../..:/workspaces:cached
+
+# Overrides default command so things don't shut down after the process ends.
+command: sleep infinity
+
+# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
+network_mode: service:db
+
+# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
+# (Adding the "ports" property to this file will not forward from a Codespace.)
+
+db:
+image: postgres:14.1
+restart: unless-stopped
+volumes:
+    - postgres-data:/var/lib/postgresql/data
+env_file:
+    # Ensure that the variables in .env match the same variables in devcontainer.json
+    - .env
+
+# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
+# (Adding the "ports" property to this file will not forward from a Codespace.)
+                "#.trim().to_string(),
+        )
+        .await
+        .unwrap();
+
+        test_dependencies.fs.atomic_write(
+        PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+        r#"
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+&& apt-get -y install clang lld \
+&& apt-get autoremove -y && apt-get clean -y
+        "#.trim().to_string()).await.unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+        let files = test_dependencies.fs.files();
+
+        let feature_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "Dockerfile.extended")
+            })
+            .expect("to be found");
+        let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap();
+        assert_eq!(
+            &feature_dockerfile,
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label
+
+# Include lld linker to improve build times either by using environment variable
+# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml).
+RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
+&& apt-get -y install clang lld \
+&& apt-get autoremove -y && apt-get clean -y
+
+FROM dev_container_feature_content_temp as dev_containers_feature_content_source
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source /tmp/build-features/devcontainer-features.builtin.env /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p /tmp/dev-container-features
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
+
+RUN \
+echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
+
+
+COPY --chown=root:root --from=dev_containers_feature_content_source /tmp/build-features/aws-cli_0 /tmp/dev-container-features/aws-cli_0
+RUN chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \
+&& cd /tmp/dev-container-features/aws-cli_0 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh
+
+COPY --chown=root:root --from=dev_containers_feature_content_source /tmp/build-features/docker-in-docker_1 /tmp/dev-container-features/docker-in-docker_1
+RUN chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \
+&& cd /tmp/dev-container-features/docker-in-docker_1 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh
+
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+"#
+        );
+
+        let uid_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+            })
+            .expect("to be found");
+        let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+        assert_eq!(
+            &uid_dockerfile,
+            r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+	eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+	if [ -z "$OLD_UID" ]; then \
+		echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+	elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+		echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+	elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+		echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+	else \
+		if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+			FREE_GID=65532; \
+			while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+			echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+			sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+		fi; \
+		echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+		sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+		if [ "$OLD_GID" != "$NEW_GID" ]; then \
+			sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+		fi; \
+		chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+	fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+
+
+ENV DOCKER_BUILDKIT=1
+"#
+        );
+    }
+
+    #[gpui::test]
+    async fn test_spawns_devcontainer_with_dockerfile_and_no_update_uid(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        env_logger::try_init().ok();
+        let given_devcontainer_contents = r#"
+            /*---------------------------------------------------------------------------------------------
+             *  Copyright (c) Microsoft Corporation. All rights reserved.
+             *  Licensed under the MIT License. See License.txt in the project root for license information.
+             *--------------------------------------------------------------------------------------------*/
+            {
+              "name": "cli-${devcontainerId}",
+              // "image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
+              "build": {
+                "dockerfile": "Dockerfile",
+                "args": {
+                  "VARIANT": "18-bookworm",
+                  "FOO": "bar",
+                },
+              },
+              "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached",
+              "workspaceFolder": "/workspace2",
+              "mounts": [
+                // Keep command history across instances
+                "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory",
+              ],
+
+              "forwardPorts": [
+                8082,
+                8083,
+              ],
+              "appPort": "8084",
+              "updateRemoteUserUID": false,
+
+              "containerEnv": {
+                "VARIABLE_VALUE": "value",
+              },
+
+              "initializeCommand": "touch IAM.md",
+
+              "onCreateCommand": "echo 'onCreateCommand' >> ON_CREATE_COMMAND.md",
+
+              "updateContentCommand": "echo 'updateContentCommand' >> UPDATE_CONTENT_COMMAND.md",
+
+              "postCreateCommand": {
+                "yarn": "yarn install",
+                "debug": "echo 'postStartCommand' >> POST_START_COMMAND.md",
+              },
+
+              "postStartCommand": "echo 'postStartCommand' >> POST_START_COMMAND.md",
+
+              "postAttachCommand": "echo 'postAttachCommand' >> POST_ATTACH_COMMAND.md",
+
+              "remoteUser": "node",
+
+              "remoteEnv": {
+                "PATH": "${containerEnv:PATH}:/some/other/path",
+                "OTHER_ENV": "other_env_value"
+              },
+
+              "features": {
+                "ghcr.io/devcontainers/features/docker-in-docker:2": {
+                  "moby": false,
+                },
+                "ghcr.io/devcontainers/features/go:1": {},
+              },
+
+              "customizations": {
+                "vscode": {
+                  "extensions": [
+                    "dbaeumer.vscode-eslint",
+                    "GitHub.vscode-pull-request-github",
+                  ],
+                },
+                "zed": {
+                  "extensions": ["vue", "ruby"],
+                },
+                "codespaces": {
+                  "repositories": {
+                    "devcontainers/features": {
+                      "permissions": {
+                        "contents": "write",
+                        "workflows": "write",
+                      },
+                    },
+                  },
+                },
+              },
+            }
+            "#;
+
+        let (test_dependencies, mut devcontainer_manifest) =
+            init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+                .await
+                .unwrap();
+
+        test_dependencies
+            .fs
+            .atomic_write(
+                PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"),
+                r#"
+#  Copyright (c) Microsoft Corporation. All rights reserved.
+#  Licensed under the MIT License. See License.txt in the project root for license information.
+ARG VARIANT="16-bullseye"
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT}
+
+RUN mkdir -p /workspaces && chown node:node /workspaces
+
+ARG USERNAME=node
+USER $USERNAME
+
+# Save command line history
+RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \
+&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \
+&& mkdir -p /home/$USERNAME/commandhistory \
+&& touch /home/$USERNAME/commandhistory/.bash_history \
+&& chown -R $USERNAME /home/$USERNAME/commandhistory
+                    "#.trim().to_string(),
+            )
+            .await
+            .unwrap();
+
+        devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+        let devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+        assert_eq!(
+            devcontainer_up.extension_ids,
+            vec!["vue".to_string(), "ruby".to_string()]
+        );
+
+        let files = test_dependencies.fs.files();
+        let feature_dockerfile = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "Dockerfile.extended")
+            })
+            .expect("to be found");
+        let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap();
+        assert_eq!(
+            &feature_dockerfile,
+            r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder
+
+#  Copyright (c) Microsoft Corporation. All rights reserved.
+#  Licensed under the MIT License. See License.txt in the project root for license information.
+ARG VARIANT="16-bullseye"
+FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label
+
+RUN mkdir -p /workspaces && chown node:node /workspaces
+
+ARG USERNAME=node
+USER $USERNAME
+
+# Save command line history
+RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \
+&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \
+&& mkdir -p /home/$USERNAME/commandhistory \
+&& touch /home/$USERNAME/commandhistory/.bash_history \
+&& chown -R $USERNAME /home/$USERNAME/commandhistory
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize
+USER root
+COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/
+RUN chmod -R 0755 /tmp/build-features/
+
+FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage
+
+USER root
+
+RUN mkdir -p /tmp/dev-container-features
+COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
+
+RUN \
+echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \
+echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'node' || grep -E '^node|^[^:]*:[^:]*:node:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
+
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_0,target=/tmp/build-features-src/docker-in-docker_0 \
+cp -ar /tmp/build-features-src/docker-in-docker_0 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_0 \
+&& cd /tmp/dev-container-features/docker-in-docker_0 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/docker-in-docker_0
+
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./go_1,target=/tmp/build-features-src/go_1 \
+cp -ar /tmp/build-features-src/go_1 /tmp/dev-container-features \
+&& chmod -R 0755 /tmp/dev-container-features/go_1 \
+&& cd /tmp/dev-container-features/go_1 \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf /tmp/dev-container-features/go_1
+
+
+ARG _DEV_CONTAINERS_IMAGE_USER=root
+USER $_DEV_CONTAINERS_IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+
+ENV DOCKER_BUILDKIT=1
+
+ENV GOPATH=/go
+ENV GOROOT=/usr/local/go
+ENV PATH=/usr/local/go/bin:/go/bin:${PATH}
+ENV VARIABLE_VALUE=value
+"#
+        );
+
+        let golang_install_wrapper = files
+            .iter()
+            .find(|f| {
+                f.file_name()
+                    .is_some_and(|s| s.display().to_string() == "devcontainer-features-install.sh")
+                    && f.to_str().is_some_and(|s| s.contains("go_"))
+            })
+            .expect("to be found");
+        let golang_install_wrapper = test_dependencies
+            .fs
+            .load(golang_install_wrapper)
+            .await
+            .unwrap();
+        assert_eq!(
+            &golang_install_wrapper,
+            r#"#!/bin/sh
+set -e
+
+on_exit () {
+    [ $? -eq 0 ] && exit
+    echo 'ERROR: Feature "go" (ghcr.io/devcontainers/features/go:1) failed to install!'
+}
+
+trap on_exit EXIT
+
+echo ===========================================================================
+echo 'Feature       : go'
+echo 'Id            : ghcr.io/devcontainers/features/go:1'
+echo 'Options       :'
+echo '    GOLANGCILINTVERSION=latest
+    VERSION=latest'
+echo ===========================================================================
+
+set -a
+. ../devcontainer-features.builtin.env
+. ./devcontainer-features.env
+set +a
+
+chmod +x ./install.sh
+./install.sh
+"#
+        );
+
+        let docker_commands = test_dependencies
+            .command_runner
+            .commands_by_program("docker");
+
+        let docker_run_command = docker_commands
+            .iter()
+            .find(|c| c.args.get(0).is_some_and(|a| a == "run"));
+
+        assert!(docker_run_command.is_some());
+
+        let docker_exec_commands = test_dependencies
+            .docker
+            .exec_commands_recorded
+            .lock()
+            .unwrap();
+
+        assert!(docker_exec_commands.iter().all(|exec| {
+            exec.env
+                == HashMap::from([
+                    ("OTHER_ENV".to_string(), "other_env_value".to_string()),
+                    (
+                        "PATH".to_string(),
+                        "/initial/path:/some/other/path".to_string(),
+                    ),
+                ])
+        }))
+    }
+
+    pub(crate) struct RecordedExecCommand {
+        pub(crate) _container_id: String,
+        pub(crate) _remote_folder: String,
+        pub(crate) _user: String,
+        pub(crate) env: HashMap<String, String>,
+        pub(crate) _inner_command: Command,
+    }
+
+    pub(crate) struct FakeDocker {
+        exec_commands_recorded: Mutex<Vec<RecordedExecCommand>>,
+        podman: bool,
+    }
+
+    impl FakeDocker {
+        pub(crate) fn new() -> Self {
+            Self {
+                podman: false,
+                exec_commands_recorded: Mutex::new(Vec::new()),
+            }
+        }
+        #[cfg(not(target_os = "windows"))]
+        fn set_podman(&mut self, podman: bool) {
+            self.podman = podman;
+        }
+    }
+
+    #[async_trait]
+    impl DockerClient for FakeDocker {
+        async fn inspect(&self, id: &String) -> Result<DockerInspect, DevContainerError> {
+            if id == "mcr.microsoft.com/devcontainers/typescript-node:1-18-bookworm" {
+                return Ok(DockerInspect {
+                    id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104"
+                        .to_string(),
+                    config: DockerInspectConfig {
+                        labels: DockerConfigLabels {
+                            metadata: Some(vec![HashMap::from([(
+                                "remoteUser".to_string(),
+                                Value::String("node".to_string()),
+                            )])]),
+                        },
+                        env: Vec::new(),
+                        image_user: Some("root".to_string()),
+                    },
+                    mounts: None,
+                    state: None,
+                });
+            }
+            if id == "mcr.microsoft.com/devcontainers/rust:2-1-bookworm" {
+                return Ok(DockerInspect {
+                    id: "sha256:39ad1c7264794d60e3bc449d9d8877a8e486d19ad8fba80f5369def6a2408392"
+                        .to_string(),
+                    config: DockerInspectConfig {
+                        labels: DockerConfigLabels {
+                            metadata: Some(vec![HashMap::from([(
+                                "remoteUser".to_string(),
+                                Value::String("vscode".to_string()),
+                            )])]),
+                        },
+                        image_user: Some("root".to_string()),
+                        env: Vec::new(),
+                    },
+                    mounts: None,
+                    state: None,
+                });
+            }
+            if id.starts_with("cli_") {
+                return Ok(DockerInspect {
+                    id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105"
+                        .to_string(),
+                    config: DockerInspectConfig {
+                        labels: DockerConfigLabels {
+                            metadata: Some(vec![HashMap::from([(
+                                "remoteUser".to_string(),
+                                Value::String("node".to_string()),
+                            )])]),
+                        },
+                        image_user: Some("root".to_string()),
+                        env: vec!["PATH=/initial/path".to_string()],
+                    },
+                    mounts: None,
+                    state: None,
+                });
+            }
+            if id == "found_docker_ps" {
+                return Ok(DockerInspect {
+                    id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105"
+                        .to_string(),
+                    config: DockerInspectConfig {
+                        labels: DockerConfigLabels {
+                            metadata: Some(vec![HashMap::from([(
+                                "remoteUser".to_string(),
+                                Value::String("node".to_string()),
+                            )])]),
+                        },
+                        image_user: Some("root".to_string()),
+                        env: vec!["PATH=/initial/path".to_string()],
+                    },
+                    mounts: Some(vec![DockerInspectMount {
+                        source: "/path/to/local/project".to_string(),
+                        destination: "/workspaces/project".to_string(),
+                    }]),
+                    state: None,
+                });
+            }
+            if id.starts_with("rust_a-") {
+                return Ok(DockerInspect {
+                    id: "sha256:9da65c34ab809e763b13d238fd7a0f129fcabd533627d340f293308cb63620a0"
+                        .to_string(),
+                    config: DockerInspectConfig {
+                        labels: DockerConfigLabels {
+                            metadata: Some(vec![HashMap::from([(
+                                "remoteUser".to_string(),
+                                Value::String("vscode".to_string()),
+                            )])]),
+                        },
+                        image_user: Some("root".to_string()),
+                        env: Vec::new(),
+                    },
+                    mounts: None,
+                    state: None,
+                });
+            }
+
+            Err(DevContainerError::DockerNotAvailable)
+        }
+        async fn get_docker_compose_config(
+            &self,
+            config_files: &Vec<PathBuf>,
+        ) -> Result<Option<DockerComposeConfig>, DevContainerError> {
+            if config_files.len() == 1
+                && config_files.get(0)
+                    == Some(&PathBuf::from(
+                        "/path/to/local/project/.devcontainer/docker-compose.yml",
+                    ))
+            {
+                return Ok(Some(DockerComposeConfig {
+                    name: None,
+                    services: HashMap::from([
+                        (
+                            "app".to_string(),
+                            DockerComposeService {
+                                build: Some(DockerComposeServiceBuild {
+                                    context: Some(".".to_string()),
+                                    dockerfile: Some("Dockerfile".to_string()),
+                                    args: None,
+                                    additional_contexts: None,
+                                }),
+                                volumes: vec![MountDefinition {
+                                    source: "../..".to_string(),
+                                    target: "/workspaces".to_string(),
+                                    mount_type: Some("bind".to_string()),
+                                }],
+                                network_mode: Some("service:db".to_string()),
+                                ..Default::default()
+                            },
+                        ),
+                        (
+                            "db".to_string(),
+                            DockerComposeService {
+                                image: Some("postgres:14.1".to_string()),
+                                volumes: vec![MountDefinition {
+                                    source: "postgres-data".to_string(),
+                                    target: "/var/lib/postgresql/data".to_string(),
+                                    mount_type: Some("volume".to_string()),
+                                }],
+                                env_file: Some(vec![".env".to_string()]),
+                                ..Default::default()
+                            },
+                        ),
+                    ]),
+                    volumes: HashMap::from([(
+                        "postgres-data".to_string(),
+                        DockerComposeVolume::default(),
+                    )]),
+                }));
+            }
+            Err(DevContainerError::DockerNotAvailable)
+        }
+        async fn docker_compose_build(
+            &self,
+            _config_files: &Vec<PathBuf>,
+            _project_name: &str,
+        ) -> Result<(), DevContainerError> {
+            Ok(())
+        }
+        async fn run_docker_exec(
+            &self,
+            container_id: &str,
+            remote_folder: &str,
+            user: &str,
+            env: &HashMap<String, String>,
+            inner_command: Command,
+        ) -> Result<(), DevContainerError> {
+            let mut record = self
+                .exec_commands_recorded
+                .lock()
+                .expect("should be available");
+            record.push(RecordedExecCommand {
+                _container_id: container_id.to_string(),
+                _remote_folder: remote_folder.to_string(),
+                _user: user.to_string(),
+                env: env.clone(),
+                _inner_command: inner_command,
+            });
+            Ok(())
+        }
+        async fn start_container(&self, _id: &str) -> Result<(), DevContainerError> {
+            Err(DevContainerError::DockerNotAvailable)
+        }
+        async fn find_process_by_filters(
+            &self,
+            _filters: Vec<String>,
+        ) -> Result<Option<DockerPs>, DevContainerError> {
+            Ok(Some(DockerPs {
+                id: "found_docker_ps".to_string(),
+            }))
+        }
+        fn supports_compose_buildkit(&self) -> bool {
+            !self.podman
+        }
+        fn docker_cli(&self) -> String {
+            if self.podman {
+                "podman".to_string()
+            } else {
+                "docker".to_string()
+            }
+        }
+    }
+
+    #[derive(Debug, Clone)]
+    pub(crate) struct TestCommand {
+        pub(crate) program: String,
+        pub(crate) args: Vec<String>,
+    }
+
+    pub(crate) struct TestCommandRunner {
+        commands_recorded: Mutex<Vec<TestCommand>>,
+    }
+
+    impl TestCommandRunner {
+        fn new() -> Self {
+            Self {
+                commands_recorded: Mutex::new(Vec::new()),
+            }
+        }
+
+        fn commands_by_program(&self, program: &str) -> Vec<TestCommand> {
+            let record = self.commands_recorded.lock().expect("poisoned");
+            record
+                .iter()
+                .filter(|r| r.program == program)
+                .map(|r| r.clone())
+                .collect()
+        }
+    }
+
+    #[async_trait]
+    impl CommandRunner for TestCommandRunner {
+        async fn run_command(&self, command: &mut Command) -> Result<Output, std::io::Error> {
+            let mut record = self.commands_recorded.lock().expect("poisoned");
+
+            record.push(TestCommand {
+                program: command.get_program().display().to_string(),
+                args: command
+                    .get_args()
+                    .map(|a| a.display().to_string())
+                    .collect(),
+            });
+
+            Ok(Output {
+                status: ExitStatus::default(),
+                stdout: vec![],
+                stderr: vec![],
+            })
+        }
+    }
+
+    fn fake_http_client() -> Arc<dyn HttpClient> {
+        FakeHttpClient::create(|request| async move {
+            let (parts, _body) = request.into_parts();
+            if parts.uri.path() == "/token" {
+                let token_response = TokenResponse {
+                    token: "token".to_string(),
+                };
+                return Ok(http::Response::builder()
+                    .status(200)
+                    .body(http_client::AsyncBody::from(
+                        serde_json_lenient::to_string(&token_response).unwrap(),
+                    ))
+                    .unwrap());
+            }
+
+            // OCI specific things
+            if parts.uri.path() == "/v2/devcontainers/features/docker-in-docker/manifests/2" {
+                let response = r#"
+                    {
+                        "schemaVersion": 2,
+                        "mediaType": "application/vnd.oci.image.manifest.v1+json",
+                        "config": {
+                            "mediaType": "application/vnd.devcontainers",
+                            "digest": "sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a",
+                            "size": 2
+                        },
+                        "layers": [
+                            {
+                                "mediaType": "application/vnd.devcontainers.layer.v1+tar",
+                                "digest": "sha256:bc7ab0d8d8339416e1491419ab9ffe931458d0130110f4b18351b0fa184e67d5",
+                                "size": 59392,
+                                "annotations": {
+                                    "org.opencontainers.image.title": "devcontainer-feature-docker-in-docker.tgz"
+                                }
+                            }
+                        ],
+                        "annotations": {

crates/dev_container/src/docker.rs πŸ”—

@@ -0,0 +1,898 @@
+use std::{collections::HashMap, path::PathBuf};
+
+use async_trait::async_trait;
+use serde::{Deserialize, Deserializer, Serialize};
+use util::command::Command;
+
+use crate::{
+    command_json::evaluate_json_command, devcontainer_api::DevContainerError,
+    devcontainer_json::MountDefinition,
+};
+
+#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "PascalCase")]
+pub(crate) struct DockerPs {
+    #[serde(alias = "ID")]
+    pub(crate) id: String,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "PascalCase")]
+pub(crate) struct DockerState {
+    pub(crate) running: bool,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "PascalCase")]
+pub(crate) struct DockerInspect {
+    pub(crate) id: String,
+    pub(crate) config: DockerInspectConfig,
+    pub(crate) mounts: Option<Vec<DockerInspectMount>>,
+    pub(crate) state: Option<DockerState>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+pub(crate) struct DockerConfigLabels {
+    #[serde(
+        rename = "devcontainer.metadata",
+        deserialize_with = "deserialize_metadata"
+    )]
+    pub(crate) metadata: Option<Vec<HashMap<String, serde_json_lenient::Value>>>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "PascalCase")]
+pub(crate) struct DockerInspectConfig {
+    pub(crate) labels: DockerConfigLabels,
+    #[serde(rename = "User")]
+    pub(crate) image_user: Option<String>,
+    #[serde(default)]
+    pub(crate) env: Vec<String>,
+}
+
+impl DockerInspectConfig {
+    pub(crate) fn env_as_map(&self) -> Result<HashMap<String, String>, DevContainerError> {
+        let mut map = HashMap::new();
+        for env_var in &self.env {
+            let parts: Vec<&str> = env_var.split("=").collect();
+            if parts.len() != 2 {
+                log::error!("Unable to parse {env_var} into and environment key-value");
+                return Err(DevContainerError::DevContainerParseFailed);
+            }
+            map.insert(parts[0].to_string(), parts[1].to_string());
+        }
+        Ok(map)
+    }
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[serde(rename_all = "PascalCase")]
+pub(crate) struct DockerInspectMount {
+    pub(crate) source: String,
+    pub(crate) destination: String,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeServiceBuild {
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) context: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) dockerfile: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) args: Option<HashMap<String, String>>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) additional_contexts: Option<HashMap<String, String>>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeService {
+    pub(crate) image: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) entrypoint: Option<Vec<String>>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) cap_add: Option<Vec<String>>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) security_opt: Option<Vec<String>>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) labels: Option<Vec<String>>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) build: Option<DockerComposeServiceBuild>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) privileged: Option<bool>,
+    pub(crate) volumes: Vec<MountDefinition>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) env_file: Option<Vec<String>>,
+    #[serde(default, skip_serializing_if = "Vec::is_empty")]
+    pub(crate) ports: Vec<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) network_mode: Option<String>,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeVolume {
+    pub(crate) name: String,
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeConfig {
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub(crate) name: Option<String>,
+    pub(crate) services: HashMap<String, DockerComposeService>,
+    pub(crate) volumes: HashMap<String, DockerComposeVolume>,
+}
+
+pub(crate) struct Docker {
+    docker_cli: String,
+}
+
+impl DockerInspect {
+    pub(crate) fn is_running(&self) -> bool {
+        self.state.as_ref().map_or(false, |s| s.running)
+    }
+}
+
+impl Docker {
+    pub(crate) fn new(docker_cli: &str) -> Self {
+        Self {
+            docker_cli: docker_cli.to_string(),
+        }
+    }
+
+    fn is_podman(&self) -> bool {
+        self.docker_cli == "podman"
+    }
+
+    async fn pull_image(&self, image: &String) -> Result<(), DevContainerError> {
+        let mut command = Command::new(&self.docker_cli);
+        command.args(&["pull", image]);
+
+        let output = command.output().await.map_err(|e| {
+            log::error!("Error pulling image: {e}");
+            DevContainerError::ResourceFetchFailed
+        })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("Non-success result from docker pull: {stderr}");
+            return Err(DevContainerError::ResourceFetchFailed);
+        }
+        Ok(())
+    }
+
+    fn create_docker_query_containers(&self, filters: Vec<String>) -> Command {
+        let mut command = Command::new(&self.docker_cli);
+        command.args(&["ps", "-a"]);
+
+        for filter in filters {
+            command.arg("--filter");
+            command.arg(filter);
+        }
+        command.arg("--format={{ json . }}");
+        command
+    }
+
+    fn create_docker_inspect(&self, id: &str) -> Command {
+        let mut command = Command::new(&self.docker_cli);
+        command.args(&["inspect", "--format={{json . }}", id]);
+        command
+    }
+
+    fn create_docker_compose_config_command(&self, config_files: &Vec<PathBuf>) -> Command {
+        let mut command = Command::new(&self.docker_cli);
+        command.arg("compose");
+        for file_path in config_files {
+            command.args(&["-f", &file_path.display().to_string()]);
+        }
+        command.args(&["config", "--format", "json"]);
+        command
+    }
+}
+
+#[async_trait]
+impl DockerClient for Docker {
+    async fn inspect(&self, id: &String) -> Result<DockerInspect, DevContainerError> {
+        // Try to pull the image, continue on failure; Image may be local only, id a reference to a running container
+        self.pull_image(id).await.ok();
+
+        let command = self.create_docker_inspect(id);
+
+        let Some(docker_inspect): Option<DockerInspect> = evaluate_json_command(command).await?
+        else {
+            log::error!("Docker inspect produced no deserializable output");
+            return Err(DevContainerError::CommandFailed(self.docker_cli.clone()));
+        };
+        Ok(docker_inspect)
+    }
+
+    async fn get_docker_compose_config(
+        &self,
+        config_files: &Vec<PathBuf>,
+    ) -> Result<Option<DockerComposeConfig>, DevContainerError> {
+        let command = self.create_docker_compose_config_command(config_files);
+        evaluate_json_command(command).await
+    }
+
+    async fn docker_compose_build(
+        &self,
+        config_files: &Vec<PathBuf>,
+        project_name: &str,
+    ) -> Result<(), DevContainerError> {
+        let mut command = Command::new(&self.docker_cli);
+        if !self.is_podman() {
+            command.env("DOCKER_BUILDKIT", "1");
+        }
+        command.args(&["compose", "--project-name", project_name]);
+        for docker_compose_file in config_files {
+            command.args(&["-f", &docker_compose_file.display().to_string()]);
+        }
+        command.arg("build");
+
+        let output = command.output().await.map_err(|e| {
+            log::error!("Error running docker compose up: {e}");
+            DevContainerError::CommandFailed(command.get_program().display().to_string())
+        })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("Non-success status from docker compose up: {}", stderr);
+            return Err(DevContainerError::CommandFailed(
+                command.get_program().display().to_string(),
+            ));
+        }
+
+        Ok(())
+    }
+    async fn run_docker_exec(
+        &self,
+        container_id: &str,
+        remote_folder: &str,
+        user: &str,
+        env: &HashMap<String, String>,
+        inner_command: Command,
+    ) -> Result<(), DevContainerError> {
+        let mut command = Command::new(&self.docker_cli);
+
+        command.args(&["exec", "-w", remote_folder, "-u", user]);
+
+        for (k, v) in env.iter() {
+            command.arg("-e");
+            let env_declaration = format!("{}={}", k, v);
+            command.arg(&env_declaration);
+        }
+
+        command.arg(container_id);
+
+        command.arg("sh");
+
+        let mut inner_program_script: Vec<String> =
+            vec![inner_command.get_program().display().to_string()];
+        let mut args: Vec<String> = inner_command
+            .get_args()
+            .map(|arg| arg.display().to_string())
+            .collect();
+        inner_program_script.append(&mut args);
+        command.args(&["-c", &inner_program_script.join(" ")]);
+
+        let output = command.output().await.map_err(|e| {
+            log::error!("Error running command {e} in container exec");
+            DevContainerError::ContainerNotValid(container_id.to_string())
+        })?;
+        if !output.status.success() {
+            let std_err = String::from_utf8_lossy(&output.stderr);
+            log::error!("Command produced a non-successful output. StdErr: {std_err}");
+        }
+        let std_out = String::from_utf8_lossy(&output.stdout);
+        log::debug!("Command output:\n {std_out}");
+
+        Ok(())
+    }
+    async fn start_container(&self, id: &str) -> Result<(), DevContainerError> {
+        let mut command = Command::new(&self.docker_cli);
+
+        command.args(&["start", id]);
+
+        let output = command.output().await.map_err(|e| {
+            log::error!("Error running docker start: {e}");
+            DevContainerError::CommandFailed(command.get_program().display().to_string())
+        })?;
+
+        if !output.status.success() {
+            let stderr = String::from_utf8_lossy(&output.stderr);
+            log::error!("Non-success status from docker start: {stderr}");
+            return Err(DevContainerError::CommandFailed(
+                command.get_program().display().to_string(),
+            ));
+        }
+
+        Ok(())
+    }
+
+    async fn find_process_by_filters(
+        &self,
+        filters: Vec<String>,
+    ) -> Result<Option<DockerPs>, DevContainerError> {
+        let command = self.create_docker_query_containers(filters);
+        evaluate_json_command(command).await
+    }
+
+    fn docker_cli(&self) -> String {
+        self.docker_cli.clone()
+    }
+
+    fn supports_compose_buildkit(&self) -> bool {
+        !self.is_podman()
+    }
+}
+
+#[async_trait]
+pub(crate) trait DockerClient {
+    async fn inspect(&self, id: &String) -> Result<DockerInspect, DevContainerError>;
+    async fn get_docker_compose_config(
+        &self,
+        config_files: &Vec<PathBuf>,
+    ) -> Result<Option<DockerComposeConfig>, DevContainerError>;
+    async fn docker_compose_build(
+        &self,
+        config_files: &Vec<PathBuf>,
+        project_name: &str,
+    ) -> Result<(), DevContainerError>;
+    async fn run_docker_exec(
+        &self,
+        container_id: &str,
+        remote_folder: &str,
+        user: &str,
+        env: &HashMap<String, String>,
+        inner_command: Command,
+    ) -> Result<(), DevContainerError>;
+    async fn start_container(&self, id: &str) -> Result<(), DevContainerError>;
+    async fn find_process_by_filters(
+        &self,
+        filters: Vec<String>,
+    ) -> Result<Option<DockerPs>, DevContainerError>;
+    fn supports_compose_buildkit(&self) -> bool;
+    /// This operates as an escape hatch for more custom uses of the docker API.
+    /// See DevContainerManifest::create_docker_build as an example
+    fn docker_cli(&self) -> String;
+}
+
+fn deserialize_metadata<'de, D>(
+    deserializer: D,
+) -> Result<Option<Vec<HashMap<String, serde_json_lenient::Value>>>, D::Error>
+where
+    D: Deserializer<'de>,
+{
+    let s: Option<String> = Option::deserialize(deserializer)?;
+    match s {
+        Some(json_string) => {
+            let parsed: Vec<HashMap<String, serde_json_lenient::Value>> =
+                serde_json_lenient::from_str(&json_string).map_err(|e| {
+                    log::error!("Error deserializing metadata: {e}");
+                    serde::de::Error::custom(e)
+                })?;
+            Ok(Some(parsed))
+        }
+        None => Ok(None),
+    }
+}
+
+pub(crate) fn get_remote_dir_from_config(
+    config: &DockerInspect,
+    local_dir: String,
+) -> Result<String, DevContainerError> {
+    let local_path = PathBuf::from(&local_dir);
+
+    let Some(mounts) = &config.mounts else {
+        log::error!("No mounts defined for container");
+        return Err(DevContainerError::ContainerNotValid(config.id.clone()));
+    };
+
+    for mount in mounts {
+        // Sometimes docker will mount the local filesystem on host_mnt for system isolation
+        let mount_source = PathBuf::from(&mount.source.trim_start_matches("/host_mnt"));
+        if let Ok(relative_path_to_project) = local_path.strip_prefix(&mount_source) {
+            let remote_dir = format!(
+                "{}/{}",
+                &mount.destination,
+                relative_path_to_project.display()
+            );
+            return Ok(remote_dir);
+        }
+        if mount.source == local_dir {
+            return Ok(mount.destination.clone());
+        }
+    }
+    log::error!("No mounts to local folder");
+    Err(DevContainerError::ContainerNotValid(config.id.clone()))
+}
+
+#[cfg(test)]
+mod test {
+    use std::{
+        collections::HashMap,
+        ffi::OsStr,
+        process::{ExitStatus, Output},
+    };
+
+    use crate::{
+        command_json::deserialize_json_output,
+        devcontainer_json::MountDefinition,
+        docker::{
+            Docker, DockerComposeConfig, DockerComposeService, DockerComposeVolume, DockerInspect,
+            DockerPs, get_remote_dir_from_config,
+        },
+    };
+
+    #[test]
+    fn should_create_docker_inspect_command() {
+        let docker = Docker::new("docker");
+        let given_id = "given_docker_id";
+
+        let command = docker.create_docker_inspect(given_id);
+
+        assert_eq!(
+            command.get_args().collect::<Vec<&OsStr>>(),
+            vec![
+                OsStr::new("inspect"),
+                OsStr::new("--format={{json . }}"),
+                OsStr::new(given_id)
+            ]
+        )
+    }
+
+    #[test]
+    fn should_deserialize_docker_ps_with_filters() {
+        // First, deserializes empty
+        let empty_output = Output {
+            status: ExitStatus::default(),
+            stderr: vec![],
+            stdout: String::from("").into_bytes(),
+        };
+
+        let result: Option<DockerPs> = deserialize_json_output(empty_output).unwrap();
+
+        assert!(result.is_none());
+
+        let full_output = Output {
+                status: ExitStatus::default(),
+                stderr: vec![],
+                stdout: String::from(r#"
+    {
+        "Command": "\"/bin/sh -c 'echo Co…\"",
+        "CreatedAt": "2026-02-04 15:44:21 -0800 PST",
+        "ID": "abdb6ab59573",
+        "Image": "mcr.microsoft.com/devcontainers/base:ubuntu",

crates/dev_container/src/features.rs πŸ”—

@@ -0,0 +1,254 @@
+use std::{collections::HashMap, path::PathBuf, sync::Arc};
+
+use fs::Fs;
+use serde::Deserialize;
+use serde_json_lenient::Value;
+
+use crate::{
+    devcontainer_api::DevContainerError,
+    devcontainer_json::{FeatureOptions, MountDefinition},
+    safe_id_upper,
+};
+
+/// Parsed components of an OCI feature reference such as
+/// `ghcr.io/devcontainers/features/aws-cli:1`.
+///
+/// Mirrors the CLI's `OCIRef` in `containerCollectionsOCI.ts`.
+#[derive(Debug, Clone)]
+pub(crate) struct OciFeatureRef {
+    /// Registry hostname, e.g. `ghcr.io`
+    pub registry: String,
+    /// Full repository path within the registry, e.g. `devcontainers/features/aws-cli`
+    pub path: String,
+    /// Version tag, digest, or `latest`
+    pub version: String,
+}
+
+/// Minimal representation of a `devcontainer-feature.json` file, used to
+/// extract option default values after the feature tarball is downloaded.
+///
+/// See: https://containers.dev/implementors/features/#devcontainer-featurejson-properties
+#[derive(Debug, Deserialize, Eq, PartialEq, Default)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct DevContainerFeatureJson {
+    #[serde(rename = "id")]
+    pub(crate) _id: Option<String>,
+    #[serde(default)]
+    pub(crate) options: HashMap<String, FeatureOptionDefinition>,
+    pub(crate) mounts: Option<Vec<MountDefinition>>,
+    pub(crate) privileged: Option<bool>,
+    pub(crate) entrypoint: Option<String>,
+    pub(crate) container_env: Option<HashMap<String, String>>,
+}
+
+/// A single option definition inside `devcontainer-feature.json`.
+/// We only need the `default` field to populate env variables.
+#[derive(Debug, Deserialize, Eq, PartialEq)]
+pub(crate) struct FeatureOptionDefinition {
+    pub(crate) default: Option<Value>,
+}
+
+impl FeatureOptionDefinition {
+    fn serialize_default(&self) -> Option<String> {
+        self.default.as_ref().map(|some_value| match some_value {
+            Value::Bool(b) => b.to_string(),
+            Value::String(s) => s.to_string(),
+            Value::Number(n) => n.to_string(),
+            other => other.to_string(),
+        })
+    }
+}
+
+#[derive(Debug, Eq, PartialEq, Default)]
+pub(crate) struct FeatureManifest {
+    consecutive_id: String,
+    file_path: PathBuf,
+    feature_json: DevContainerFeatureJson,
+}
+
+impl FeatureManifest {
+    pub(crate) fn new(
+        consecutive_id: String,
+        file_path: PathBuf,
+        feature_json: DevContainerFeatureJson,
+    ) -> Self {
+        Self {
+            consecutive_id,
+            file_path,
+            feature_json,
+        }
+    }
+    pub(crate) fn container_env(&self) -> HashMap<String, String> {
+        self.feature_json.container_env.clone().unwrap_or_default()
+    }
+
+    pub(crate) fn generate_dockerfile_feature_layer(
+        &self,
+        use_buildkit: bool,
+        dest: &str,
+    ) -> String {
+        let id = &self.consecutive_id;
+        if use_buildkit {
+            format!(
+                r#"
+RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./{id},target=/tmp/build-features-src/{id} \
+cp -ar /tmp/build-features-src/{id} {dest} \
+&& chmod -R 0755 {dest}/{id} \
+&& cd {dest}/{id} \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh \
+&& rm -rf {dest}/{id}
+"#,
+            )
+        } else {
+            let source = format!("/tmp/build-features/{id}");
+            let full_dest = format!("{dest}/{id}");
+            format!(
+                r#"
+COPY --chown=root:root --from=dev_containers_feature_content_source {source} {full_dest}
+RUN chmod -R 0755 {full_dest} \
+&& cd {full_dest} \
+&& chmod +x ./devcontainer-features-install.sh \
+&& ./devcontainer-features-install.sh
+"#
+            )
+        }
+    }
+
+    pub(crate) fn generate_dockerfile_env(&self) -> String {
+        let mut layer = "".to_string();
+        let env = self.container_env();
+        let mut env: Vec<(&String, &String)> = env.iter().collect();
+        env.sort();
+
+        for (key, value) in env {
+            layer = format!("{layer}ENV {key}={value}\n")
+        }
+        layer
+    }
+
+    /// Merges user options from devcontainer.json with default options defined in this feature manifest
+    pub(crate) fn generate_merged_env(&self, options: &FeatureOptions) -> HashMap<String, String> {
+        let mut merged: HashMap<String, String> = self
+            .feature_json
+            .options
+            .iter()
+            .filter_map(|(k, v)| {
+                v.serialize_default()
+                    .map(|v_some| (safe_id_upper(k), v_some))
+            })
+            .collect();
+
+        match options {
+            FeatureOptions::Bool(_) => {}
+            FeatureOptions::String(version) => {
+                merged.insert("VERSION".to_string(), version.clone());
+            }
+            FeatureOptions::Options(map) => {
+                for (key, value) in map {
+                    merged.insert(safe_id_upper(key), value.to_string());
+                }
+            }
+        }
+        merged
+    }
+
+    pub(crate) async fn write_feature_env(
+        &self,
+        fs: &Arc<dyn Fs>,
+        options: &FeatureOptions,
+    ) -> Result<String, DevContainerError> {
+        let merged_env = self.generate_merged_env(options);
+
+        let mut env_vars: Vec<(&String, &String)> = merged_env.iter().collect();
+        env_vars.sort();
+
+        let env_file_content = env_vars
+            .iter()
+            .fold("".to_string(), |acc, (k, v)| format!("{acc}{}={}\n", k, v));
+
+        fs.write(
+            &self.file_path.join("devcontainer-features.env"),
+            env_file_content.as_bytes(),
+        )
+        .await
+        .map_err(|e| {
+            log::error!("error writing devcontainer feature environment: {e}");
+            DevContainerError::FilesystemError
+        })?;
+
+        Ok(env_file_content)
+    }
+
+    pub(crate) fn mounts(&self) -> Vec<MountDefinition> {
+        if let Some(mounts) = &self.feature_json.mounts {
+            mounts.clone()
+        } else {
+            vec![]
+        }
+    }
+
+    pub(crate) fn privileged(&self) -> bool {
+        self.feature_json.privileged.unwrap_or(false)
+    }
+
+    pub(crate) fn entrypoint(&self) -> Option<String> {
+        self.feature_json.entrypoint.clone()
+    }
+
+    pub(crate) fn file_path(&self) -> PathBuf {
+        self.file_path.clone()
+    }
+}
+
+/// Parses an OCI feature reference string into its components.
+///
+/// Handles formats like:
+/// - `ghcr.io/devcontainers/features/aws-cli:1`
+/// - `ghcr.io/user/repo/go`  (implicitly `:latest`)
+/// - `ghcr.io/devcontainers/features/rust@sha256:abc123`
+///
+/// Returns `None` for local paths (`./…`) and direct tarball URIs (`https://…`).
+pub(crate) fn parse_oci_feature_ref(input: &str) -> Option<OciFeatureRef> {
+    if input.starts_with('.')
+        || input.starts_with('/')
+        || input.starts_with("https://")
+        || input.starts_with("http://")
+    {
+        return None;
+    }
+
+    let input_lower = input.to_lowercase();
+
+    let (resource, version) = if let Some(at_idx) = input_lower.rfind('@') {
+        // Digest-based: ghcr.io/foo/bar@sha256:abc
+        (
+            input_lower[..at_idx].to_string(),
+            input_lower[at_idx + 1..].to_string(),
+        )
+    } else {
+        let last_slash = input_lower.rfind('/');
+        let last_colon = input_lower.rfind(':');
+        match (last_slash, last_colon) {
+            (Some(slash), Some(colon)) if colon > slash => (
+                input_lower[..colon].to_string(),
+                input_lower[colon + 1..].to_string(),
+            ),
+            _ => (input_lower, "latest".to_string()),
+        }
+    };
+
+    let parts: Vec<&str> = resource.split('/').collect();
+    if parts.len() < 3 {
+        return None;
+    }
+
+    let registry = parts[0].to_string();
+    let path = parts[1..].join("/");
+
+    Some(OciFeatureRef {
+        registry,
+        path,
+        version,
+    })
+}

crates/dev_container/src/lib.rs πŸ”—

@@ -1,11 +1,14 @@
 use std::path::Path;
 
+use fs::Fs;
 use gpui::AppContext;
 use gpui::Entity;
 use gpui::Task;
+use gpui::WeakEntity;
 use http_client::anyhow;
 use picker::Picker;
 use picker::PickerDelegate;
+use project::ProjectEnvironment;
 use settings::RegisterSetting;
 use settings::Settings;
 use std::collections::HashMap;
@@ -25,8 +28,9 @@ use ui::Tooltip;
 use ui::h_flex;
 use ui::rems_from_px;
 use ui::v_flex;
+use util::shell::Shell;
 
-use gpui::{Action, DismissEvent, EventEmitter, FocusHandle, Focusable, RenderOnce, WeakEntity};
+use gpui::{Action, DismissEvent, EventEmitter, FocusHandle, Focusable, RenderOnce};
 use serde::Deserialize;
 use ui::{
     AnyElement, App, Color, CommonAnimationExt, Context, Headline, HeadlineSize, Icon, IconName,
@@ -37,40 +41,94 @@ use util::ResultExt;
 use util::rel_path::RelPath;
 use workspace::{ModalView, Workspace, with_active_or_new_workspace};
 
-use futures::AsyncReadExt;
-use http::Request;
-use http_client::{AsyncBody, HttpClient};
+use http_client::HttpClient;
 
+mod command_json;
 mod devcontainer_api;
+mod devcontainer_json;
+mod devcontainer_manifest;
+mod docker;
+mod features;
+mod oci;
 
-use devcontainer_api::ensure_devcontainer_cli;
-use devcontainer_api::read_devcontainer_configuration;
+use devcontainer_api::read_default_devcontainer_configuration;
 
 use crate::devcontainer_api::DevContainerError;
-use crate::devcontainer_api::apply_dev_container_template;
+use crate::devcontainer_api::apply_devcontainer_template;
+use crate::oci::get_deserializable_oci_blob;
+use crate::oci::get_latest_oci_manifest;
+use crate::oci::get_oci_token;
 
 pub use devcontainer_api::{
     DevContainerConfig, find_configs_in_snapshot, find_devcontainer_configs,
     start_dev_container_with_config,
 };
 
+/// Converts a string to a safe environment variable name.
+///
+/// Mirrors the CLI's `getSafeId` in `containerFeatures.ts`:
+/// replaces non-alphanumeric/underscore characters with `_`, replaces a
+/// leading sequence of digits/underscores with a single `_`, and uppercases.
+pub(crate) fn safe_id_lower(input: &str) -> String {
+    get_safe_id(input).to_lowercase()
+}
+pub(crate) fn safe_id_upper(input: &str) -> String {
+    get_safe_id(input).to_uppercase()
+}
+fn get_safe_id(input: &str) -> String {
+    let replaced: String = input
+        .chars()
+        .map(|c| {
+            if c.is_alphanumeric() || c == '_' {
+                c
+            } else {
+                '_'
+            }
+        })
+        .collect();
+    let without_leading = replaced.trim_start_matches(|c: char| c.is_ascii_digit() || c == '_');
+    let result = if without_leading.len() < replaced.len() {
+        format!("_{}", without_leading)
+    } else {
+        replaced
+    };
+    result
+}
+
 pub struct DevContainerContext {
     pub project_directory: Arc<Path>,
     pub use_podman: bool,
-    pub node_runtime: node_runtime::NodeRuntime,
+    pub fs: Arc<dyn Fs>,
+    pub http_client: Arc<dyn HttpClient>,
+    pub environment: WeakEntity<ProjectEnvironment>,
 }
 
 impl DevContainerContext {
     pub fn from_workspace(workspace: &Workspace, cx: &App) -> Option<Self> {
         let project_directory = workspace.project().read(cx).active_project_directory(cx)?;
         let use_podman = DevContainerSettings::get_global(cx).use_podman;
-        let node_runtime = workspace.app_state().node_runtime.clone();
+        let http_client = cx.http_client().clone();
+        let fs = workspace.app_state().fs.clone();
+        let environment = workspace.project().read(cx).environment().downgrade();
         Some(Self {
             project_directory,
             use_podman,
-            node_runtime,
+            fs,
+            http_client,
+            environment,
         })
     }
+
+    pub async fn environment(&self, cx: &mut impl AppContext) -> HashMap<String, String> {
+        let Ok(task) = self.environment.update(cx, |this, cx| {
+            this.local_directory_environment(&Shell::System, self.project_directory.clone(), cx)
+        }) else {
+            return HashMap::default();
+        };
+        task.await
+            .map(|env| env.into_iter().collect::<std::collections::HashMap<_, _>>())
+            .unwrap_or_default()
+    }
 }
 
 #[derive(RegisterSetting)]
@@ -1043,7 +1101,7 @@ impl StatefulModal for DevContainerModal {
                     let Ok(client) = cx.update(|_, cx| cx.http_client()) else {
                         return;
                     };
-                    match get_templates(client).await {
+                    match get_ghcr_templates(client).await {
                         Ok(templates) => {
                             let message =
                                 DevContainerMessage::TemplatesRetrieved(templates.templates);
@@ -1209,7 +1267,7 @@ impl StatefulModal for DevContainerModal {
                     let Ok(client) = cx.update(|_, cx| cx.http_client()) else {
                         return;
                     };
-                    let Some(features) = get_features(client).await.log_err() else {
+                    let Some(features) = get_ghcr_features(client).await.log_err() else {
                         return;
                     };
                     let message = DevContainerMessage::FeaturesRetrieved(features.features);
@@ -1328,17 +1386,7 @@ trait StatefulModal: ModalView + EventEmitter<DismissEvent> + Render {
     }
 }
 
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
-struct GithubTokenResponse {
-    token: String,
-}
-
-fn ghcr_url() -> &'static str {
-    "https://ghcr.io"
-}
-
-fn ghcr_domain() -> &'static str {
+fn ghcr_registry() -> &'static str {
     "ghcr.io"
 }
 
@@ -1350,11 +1398,6 @@ fn devcontainer_features_repository() -> &'static str {
     "devcontainers/features"
 }
 
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
-struct ManifestLayer {
-    digest: String,
-}
 #[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
 #[serde(rename_all = "camelCase")]
 struct TemplateOptions {
@@ -1409,12 +1452,6 @@ impl TemplateOptions {
     }
 }
 
-#[derive(Debug, Deserialize)]
-#[serde(rename_all = "camelCase")]
-struct DockerManifestsResponse {
-    layers: Vec<ManifestLayer>,
-}
-
 #[derive(Debug, Deserialize, Clone, PartialEq, Eq, Hash)]
 #[serde(rename_all = "camelCase")]
 struct DevContainerFeature {
@@ -1480,23 +1517,11 @@ fn dispatch_apply_templates(
             return;
         };
 
-        let Ok(cli) = ensure_devcontainer_cli(&context.node_runtime).await else {
-            this.update_in(cx, |this, window, cx| {
-                this.accept_message(
-                    DevContainerMessage::FailedToWriteTemplate(
-                        DevContainerError::DevContainerCliNotAvailable,
-                    ),
-                    window,
-                    cx,
-                );
-            })
-            .log_err();
-            return;
-        };
+        let environment = context.environment(cx).await;
 
         {
             if check_for_existing
-                && read_devcontainer_configuration(&context, &cli, None)
+                && read_default_devcontainer_configuration(&context, environment)
                     .await
                     .is_ok()
             {
@@ -1511,12 +1536,17 @@ fn dispatch_apply_templates(
                 return;
             }
 
-            let files = match apply_dev_container_template(
+            let worktree = workspace.read_with(cx, |workspace, cx| {
+                workspace.project().read(cx).worktree_for_id(tree_id, cx)
+            });
+
+            let files = match apply_devcontainer_template(
+                worktree.unwrap(),
                 &template_entry.template,
                 &template_entry.options_selected,
                 &template_entry.features_selected,
                 &context,
-                &cli,
+                cx,
             )
             .await
             {
@@ -1524,7 +1554,9 @@ fn dispatch_apply_templates(
                 Err(e) => {
                     this.update_in(cx, |this, window, cx| {
                         this.accept_message(
-                            DevContainerMessage::FailedToWriteTemplate(e),
+                            DevContainerMessage::FailedToWriteTemplate(
+                                DevContainerError::DevContainerTemplateApplyFailed(e.to_string()),
+                            ),
                             window,
                             cx,
                         );
@@ -1534,10 +1566,9 @@ fn dispatch_apply_templates(
                 }
             };
 
-            if files
-                .files
-                .contains(&"./.devcontainer/devcontainer.json".to_string())
-            {
+            if files.project_files.contains(&Arc::from(
+                RelPath::unix(".devcontainer/devcontainer.json").unwrap(),
+            )) {
                 let Some(workspace_task) = workspace
                     .update_in(cx, |workspace, window, cx| {
                         let Ok(path) = RelPath::unix(".devcontainer/devcontainer.json") else {
@@ -1563,250 +1594,90 @@ fn dispatch_apply_templates(
     .detach();
 }
 
-async fn get_templates(
+async fn get_ghcr_templates(
     client: Arc<dyn HttpClient>,
 ) -> Result<DevContainerTemplatesResponse, String> {
-    let token = get_ghcr_token(&client).await?;
-    let manifest = get_latest_manifest(&token.token, &client).await?;
-
-    let mut template_response =
-        get_devcontainer_templates(&token.token, &manifest.layers[0].digest, &client).await?;
+    let token = get_oci_token(
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &client,
+    )
+    .await?;
+    let manifest = get_latest_oci_manifest(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &client,
+        None,
+    )
+    .await?;
+
+    let mut template_response: DevContainerTemplatesResponse = get_deserializable_oci_blob(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &manifest.layers[0].digest,
+        &client,
+    )
+    .await?;
 
     for template in &mut template_response.templates {
         template.source_repository = Some(format!(
             "{}/{}",
-            ghcr_domain(),
+            ghcr_registry(),
             devcontainer_templates_repository()
         ));
     }
     Ok(template_response)
 }
 
-async fn get_features(client: Arc<dyn HttpClient>) -> Result<DevContainerFeaturesResponse, String> {
-    let token = get_ghcr_token(&client).await?;
-    let manifest = get_latest_feature_manifest(&token.token, &client).await?;
+async fn get_ghcr_features(
+    client: Arc<dyn HttpClient>,
+) -> Result<DevContainerFeaturesResponse, String> {
+    let token = get_oci_token(
+        ghcr_registry(),
+        devcontainer_templates_repository(),
+        &client,
+    )
+    .await?;
 
-    let mut features_response =
-        get_devcontainer_features(&token.token, &manifest.layers[0].digest, &client).await?;
+    let manifest = get_latest_oci_manifest(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_features_repository(),
+        &client,
+        None,
+    )
+    .await?;
+
+    let mut features_response: DevContainerFeaturesResponse = get_deserializable_oci_blob(
+        &token.token,
+        ghcr_registry(),
+        devcontainer_features_repository(),
+        &manifest.layers[0].digest,
+        &client,
+    )
+    .await?;
 
     for feature in &mut features_response.features {
         feature.source_repository = Some(format!(
             "{}/{}",
-            ghcr_domain(),
+            ghcr_registry(),
             devcontainer_features_repository()
         ));
     }
     Ok(features_response)
 }
 
-async fn get_ghcr_token(client: &Arc<dyn HttpClient>) -> Result<GithubTokenResponse, String> {
-    let url = format!(
-        "{}/token?service=ghcr.io&scope=repository:{}:pull",
-        ghcr_url(),
-        devcontainer_templates_repository()
-    );
-    get_deserialized_response("", &url, client).await
-}
-
-async fn get_latest_feature_manifest(
-    token: &str,
-    client: &Arc<dyn HttpClient>,
-) -> Result<DockerManifestsResponse, String> {
-    let url = format!(
-        "{}/v2/{}/manifests/latest",
-        ghcr_url(),
-        devcontainer_features_repository()
-    );
-    get_deserialized_response(token, &url, client).await
-}
-
-async fn get_latest_manifest(
-    token: &str,
-    client: &Arc<dyn HttpClient>,
-) -> Result<DockerManifestsResponse, String> {
-    let url = format!(
-        "{}/v2/{}/manifests/latest",
-        ghcr_url(),
-        devcontainer_templates_repository()
-    );
-    get_deserialized_response(token, &url, client).await
-}
-
-async fn get_devcontainer_features(
-    token: &str,
-    blob_digest: &str,
-    client: &Arc<dyn HttpClient>,
-) -> Result<DevContainerFeaturesResponse, String> {
-    let url = format!(
-        "{}/v2/{}/blobs/{}",
-        ghcr_url(),
-        devcontainer_features_repository(),
-        blob_digest
-    );
-    get_deserialized_response(token, &url, client).await
-}
-
-async fn get_devcontainer_templates(
-    token: &str,
-    blob_digest: &str,
-    client: &Arc<dyn HttpClient>,
-) -> Result<DevContainerTemplatesResponse, String> {
-    let url = format!(
-        "{}/v2/{}/blobs/{}",
-        ghcr_url(),
-        devcontainer_templates_repository(),
-        blob_digest
-    );
-    get_deserialized_response(token, &url, client).await
-}
-
-async fn get_deserialized_response<T>(
-    token: &str,
-    url: &str,
-    client: &Arc<dyn HttpClient>,
-) -> Result<T, String>
-where
-    T: for<'de> Deserialize<'de>,
-{
-    let request = match Request::get(url)
-        .header("Authorization", format!("Bearer {}", token))
-        .header("Accept", "application/vnd.oci.image.manifest.v1+json")
-        .body(AsyncBody::default())
-    {
-        Ok(request) => request,
-        Err(e) => return Err(format!("Failed to create request: {}", e)),
-    };
-    let response = match client.send(request).await {
-        Ok(response) => response,
-        Err(e) => {
-            return Err(format!("Failed to send request: {}", e));
-        }
-    };
-
-    let mut output = String::new();
-
-    if let Err(e) = response.into_body().read_to_string(&mut output).await {
-        return Err(format!("Failed to read response body: {}", e));
-    };
-
-    match serde_json::from_str(&output) {
-        Ok(response) => Ok(response),
-        Err(e) => Err(format!("Failed to deserialize response: {}", e)),
-    }
-}
-
 #[cfg(test)]
 mod tests {
-    use gpui::TestAppContext;
     use http_client::{FakeHttpClient, anyhow};
 
     use crate::{
-        GithubTokenResponse, devcontainer_templates_repository, get_deserialized_response,
-        get_devcontainer_templates, get_ghcr_token, get_latest_manifest,
+        DevContainerTemplatesResponse, devcontainer_templates_repository,
+        get_deserializable_oci_blob, ghcr_registry,
     };
 
-    #[gpui::test]
-    async fn test_get_deserialized_response(_cx: &mut TestAppContext) {
-        let client = FakeHttpClient::create(|_request| async move {
-            Ok(http_client::Response::builder()
-                .status(200)
-                .body("{ \"token\": \"thisisatoken\" }".into())
-                .unwrap())
-        });
-
-        let response =
-            get_deserialized_response::<GithubTokenResponse>("", "https://ghcr.io/token", &client)
-                .await;
-        assert!(response.is_ok());
-        assert_eq!(response.unwrap().token, "thisisatoken".to_string())
-    }
-
-    #[gpui::test]
-    async fn test_get_ghcr_token() {
-        let client = FakeHttpClient::create(|request| async move {
-            let host = request.uri().host();
-            if host.is_none() || host.unwrap() != "ghcr.io" {
-                return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
-            }
-            let path = request.uri().path();
-            if path != "/token" {
-                return Err(anyhow!("Unexpected path: {}", path));
-            }
-            let query = request.uri().query();
-            if query.is_none()
-                || query.unwrap()
-                    != format!(
-                        "service=ghcr.io&scope=repository:{}:pull",
-                        devcontainer_templates_repository()
-                    )
-            {
-                return Err(anyhow!("Unexpected query: {}", query.unwrap_or_default()));
-            }
-            Ok(http_client::Response::builder()
-                .status(200)
-                .body("{ \"token\": \"thisisatoken\" }".into())
-                .unwrap())
-        });
-
-        let response = get_ghcr_token(&client).await;
-        assert!(response.is_ok());
-        assert_eq!(response.unwrap().token, "thisisatoken".to_string());
-    }
-
-    #[gpui::test]
-    async fn test_get_latest_manifests() {
-        let client = FakeHttpClient::create(|request| async move {
-            let host = request.uri().host();
-            if host.is_none() || host.unwrap() != "ghcr.io" {
-                return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
-            }
-            let path = request.uri().path();
-            if path
-                != format!(
-                    "/v2/{}/manifests/latest",
-                    devcontainer_templates_repository()
-                )
-            {
-                return Err(anyhow!("Unexpected path: {}", path));
-            }
-            Ok(http_client::Response::builder()
-                .status(200)
-                .body("{
-                    \"schemaVersion\": 2,
-                    \"mediaType\": \"application/vnd.oci.image.manifest.v1+json\",
-                    \"config\": {
-                        \"mediaType\": \"application/vnd.devcontainers\",
-                        \"digest\": \"sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a\",
-                        \"size\": 2
-                    },
-                    \"layers\": [
-                        {
-                            \"mediaType\": \"application/vnd.devcontainers.collection.layer.v1+json\",
-                            \"digest\": \"sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09\",
-                            \"size\": 65235,
-                            \"annotations\": {
-                                \"org.opencontainers.image.title\": \"devcontainer-collection.json\"
-                            }
-                        }
-                    ],
-                    \"annotations\": {
-                        \"com.github.package.type\": \"devcontainer_collection\"
-                    }
-                }".into())
-                .unwrap())
-        });
-
-        let response = get_latest_manifest("", &client).await;
-        assert!(response.is_ok());
-        let response = response.unwrap();
-
-        assert_eq!(response.layers.len(), 1);
-        assert_eq!(
-            response.layers[0].digest,
-            "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09"
-        );
-    }
-
     #[gpui::test]
     async fn test_get_devcontainer_templates() {
         let client = FakeHttpClient::create(|request| async move {
@@ -1872,8 +1743,10 @@ mod tests {
                 }".into())
                 .unwrap())
         });
-        let response = get_devcontainer_templates(
+        let response: Result<DevContainerTemplatesResponse, String> = get_deserializable_oci_blob(
             "",
+            ghcr_registry(),
+            devcontainer_templates_repository(),
             "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09",
             &client,
         )

crates/dev_container/src/oci.rs πŸ”—

@@ -0,0 +1,470 @@
+use std::{path::PathBuf, pin::Pin, sync::Arc};
+
+use fs::Fs;
+use futures::{AsyncRead, AsyncReadExt, io::BufReader};
+use http::Request;
+use http_client::{AsyncBody, HttpClient};
+use serde::{Deserialize, Serialize};
+
+use crate::devcontainer_api::DevContainerError;
+
+#[derive(Debug, Serialize, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct TokenResponse {
+    pub(crate) token: String,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct DockerManifestsResponse {
+    pub(crate) layers: Vec<ManifestLayer>,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub(crate) struct ManifestLayer {
+    pub(crate) digest: String,
+}
+
+/// Gets a bearer token for pulling from a container registry repository.
+///
+/// This uses the registry's `/token` endpoint directly, which works for
+/// `ghcr.io` and other registries that follow the same convention.  For
+/// registries that require a full `WWW-Authenticate` negotiation flow this
+/// would need to be extended.
+pub(crate) async fn get_oci_token(
+    registry: &str,
+    repository_path: &str,
+    client: &Arc<dyn HttpClient>,
+) -> Result<TokenResponse, String> {
+    let url = format!(
+        "https://{registry}/token?service={registry}&scope=repository:{repository_path}:pull",
+    );
+    log::debug!("Fetching OCI token from: {}", url);
+    get_deserialized_response("", &url, client)
+        .await
+        .map_err(|e| {
+            log::error!("OCI token request failed for {}: {e}", url);
+            e
+        })
+}
+
+pub(crate) async fn get_latest_oci_manifest(
+    token: &str,
+    registry: &str,
+    repository_path: &str,
+    client: &Arc<dyn HttpClient>,
+    id: Option<&str>,
+) -> Result<DockerManifestsResponse, String> {
+    get_oci_manifest(registry, repository_path, token, client, "latest", id).await
+}
+
+pub(crate) async fn get_oci_manifest(
+    registry: &str,
+    repository_path: &str,
+    token: &str,
+    client: &Arc<dyn HttpClient>,
+    version: &str,
+    id: Option<&str>,
+) -> Result<DockerManifestsResponse, String> {
+    let url = match id {
+        Some(id) => format!("https://{registry}/v2/{repository_path}/{id}/manifests/{version}"),
+        None => format!("https://{registry}/v2/{repository_path}/manifests/{version}"),
+    };
+
+    get_deserialized_response(token, &url, client).await
+}
+
+pub(crate) async fn get_deserializable_oci_blob<T>(
+    token: &str,
+    registry: &str,
+    repository_path: &str,
+    blob_digest: &str,
+    client: &Arc<dyn HttpClient>,
+) -> Result<T, String>
+where
+    T: for<'a> Deserialize<'a>,
+{
+    let url = format!("https://{registry}/v2/{repository_path}/blobs/{blob_digest}");
+    get_deserialized_response(token, &url, client).await
+}
+
+pub(crate) async fn download_oci_tarball(
+    token: &str,
+    registry: &str,
+    repository_path: &str,
+    blob_digest: &str,
+    accept_header: &str,
+    dest_dir: &PathBuf,
+    client: &Arc<dyn HttpClient>,
+    fs: &Arc<dyn Fs>,
+    id: Option<&str>,
+) -> Result<(), DevContainerError> {
+    let url = match id {
+        Some(id) => format!("https://{registry}/v2/{repository_path}/{id}/blobs/{blob_digest}"),
+        None => format!("https://{registry}/v2/{repository_path}/blobs/{blob_digest}"),
+    };
+
+    let request = Request::get(&url)
+        .header("Authorization", format!("Bearer {}", token))
+        .header("Accept", accept_header)
+        .body(AsyncBody::default())
+        .map_err(|e| {
+            log::error!("Failed to create blob request: {e}");
+            DevContainerError::ResourceFetchFailed
+        })?;
+
+    let mut response = client.send(request).await.map_err(|e| {
+        log::error!("Failed to download feature blob: {e}");
+        DevContainerError::ResourceFetchFailed
+    })?;
+    let status = response.status();
+
+    let body = BufReader::new(response.body_mut());
+
+    if !status.is_success() {
+        let body_text = String::from_utf8_lossy(body.buffer());
+        log::error!(
+            "Feature blob download returned HTTP {}: {}",
+            status.as_u16(),
+            body_text,
+        );
+        return Err(DevContainerError::ResourceFetchFailed);
+    }
+
+    futures::pin_mut!(body);
+    let body: Pin<&mut (dyn AsyncRead + Send)> = body;
+    let archive = async_tar::Archive::new(body);
+    fs.extract_tar_file(dest_dir, archive).await.map_err(|e| {
+        log::error!("Failed to extract feature tarball: {e}");
+        DevContainerError::FilesystemError
+    })?;
+
+    Ok(())
+}
+
+pub(crate) async fn get_deserialized_response<T>(
+    token: &str,
+    url: &str,
+    client: &Arc<dyn HttpClient>,
+) -> Result<T, String>
+where
+    T: for<'de> Deserialize<'de>,
+{
+    let request = match Request::get(url)
+        .header("Authorization", format!("Bearer {}", token))
+        .header("Accept", "application/vnd.oci.image.manifest.v1+json")
+        .body(AsyncBody::default())
+    {
+        Ok(request) => request,
+        Err(e) => return Err(format!("Failed to create request: {}", e)),
+    };
+    let response = match client.send(request).await {
+        Ok(response) => response,
+        Err(e) => {
+            return Err(format!("Failed to send request to {}: {}", url, e));
+        }
+    };
+
+    let status = response.status();
+    let mut output = String::new();
+
+    if let Err(e) = response.into_body().read_to_string(&mut output).await {
+        return Err(format!("Failed to read response body from {}: {}", url, e));
+    };
+
+    if !status.is_success() {
+        return Err(format!(
+            "OCI request to {} returned HTTP {}: {}",
+            url,
+            status.as_u16(),
+            &output[..output.len().min(500)],
+        ));
+    }
+
+    match serde_json_lenient::from_str(&output) {
+        Ok(response) => Ok(response),
+        Err(e) => Err(format!(
+            "Failed to deserialize response from {}: {} (body: {})",
+            url,
+            e,
+            &output[..output.len().min(500)],
+        )),
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::{path::PathBuf, sync::Arc};
+
+    use fs::{FakeFs, Fs};
+    use gpui::TestAppContext;
+    use http_client::{FakeHttpClient, anyhow};
+    use serde::Deserialize;
+
+    use crate::oci::{
+        TokenResponse, download_oci_tarball, get_deserializable_oci_blob,
+        get_deserialized_response, get_latest_oci_manifest, get_oci_token,
+    };
+
+    async fn build_test_tarball() -> Vec<u8> {
+        let devcontainer_json = concat!(
+            "// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n",
+            "// README at: https://github.com/devcontainers/templates/tree/main/src/alpine\n",
+            "{\n",
+            "\t\"name\": \"Alpine\",\n",
+            "\t// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile\n",
+            "\t\"image\": \"mcr.microsoft.com/devcontainers/base:alpine-${templateOption:imageVariant}\"\n",
+            "}\n",
+        );
+
+        let dependabot_yml = concat!(
+            "version: 2\n",
+            "updates:\n",
+            " - package-ecosystem: \"devcontainers\"\n",
+            "   directory: \"/\"\n",
+            "   schedule:\n",
+            "     interval: weekly\n",
+        );
+
+        let buffer = futures::io::Cursor::new(Vec::new());
+        let mut builder = async_tar::Builder::new(buffer);
+
+        let files: &[(&str, &[u8], u32)] = &[
+            (
+                ".devcontainer/devcontainer.json",
+                devcontainer_json.as_bytes(),
+                0o644,
+            ),
+            (".github/dependabot.yml", dependabot_yml.as_bytes(), 0o644),
+            ("NOTES.md", b"Some notes", 0o644),
+            ("README.md", b"# Alpine\n", 0o644),
+        ];
+
+        for (path, data, mode) in files {
+            let mut header = async_tar::Header::new_gnu();
+            header.set_size(data.len() as u64);
+            header.set_mode(*mode);
+            header.set_entry_type(async_tar::EntryType::Regular);
+            header.set_cksum();
+            builder.append_data(&mut header, path, *data).await.unwrap();
+        }
+
+        let buffer = builder.into_inner().await.unwrap();
+        buffer.into_inner()
+    }
+    fn test_oci_registry() -> &'static str {
+        "ghcr.io"
+    }
+    fn test_oci_repository() -> &'static str {
+        "repository"
+    }
+
+    #[gpui::test]
+    async fn test_get_deserialized_response(_cx: &mut TestAppContext) {
+        let client = FakeHttpClient::create(|_request| async move {
+            Ok(http_client::Response::builder()
+                .status(200)
+                .body("{ \"token\": \"thisisatoken\" }".into())
+                .unwrap())
+        });
+
+        let response =
+            get_deserialized_response::<TokenResponse>("", "https://ghcr.io/token", &client).await;
+        assert!(response.is_ok());
+        assert_eq!(response.unwrap().token, "thisisatoken".to_string())
+    }
+
+    #[gpui::test]
+    async fn test_get_oci_token() {
+        let client = FakeHttpClient::create(|request| async move {
+            let host = request.uri().host();
+            if host.is_none() || host.unwrap() != test_oci_registry() {
+                return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
+            }
+            let path = request.uri().path();
+            if path != "/token" {
+                return Err(anyhow!("Unexpected path: {}", path));
+            }
+            let query = request.uri().query();
+            if query.is_none()
+                || query.unwrap()
+                    != format!(
+                        "service=ghcr.io&scope=repository:{}:pull",
+                        test_oci_repository()
+                    )
+            {
+                return Err(anyhow!("Unexpected query: {}", query.unwrap_or_default()));
+            }
+            Ok(http_client::Response::builder()
+                .status(200)
+                .body("{ \"token\": \"thisisatoken\" }".into())
+                .unwrap())
+        });
+
+        let response = get_oci_token(test_oci_registry(), test_oci_repository(), &client).await;
+
+        assert!(response.is_ok());
+        assert_eq!(response.unwrap().token, "thisisatoken".to_string());
+    }
+
+    #[gpui::test]
+    async fn test_get_latest_manifests() {
+        let client = FakeHttpClient::create(|request| async move {
+            let host = request.uri().host();
+            if host.is_none() || host.unwrap() != test_oci_registry() {
+                return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
+            }
+            let path = request.uri().path();
+            if path != format!("/v2/{}/manifests/latest", test_oci_repository()) {
+                return Err(anyhow!("Unexpected path: {}", path));
+            }
+            Ok(http_client::Response::builder()
+                .status(200)
+                .body("{
+                    \"schemaVersion\": 2,
+                    \"mediaType\": \"application/vnd.oci.image.manifest.v1+json\",
+                    \"config\": {
+                        \"mediaType\": \"application/vnd.devcontainers\",
+                        \"digest\": \"sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a\",
+                        \"size\": 2
+                    },
+                    \"layers\": [
+                        {
+                            \"mediaType\": \"application/vnd.devcontainers.collection.layer.v1+json\",
+                            \"digest\": \"sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09\",
+                            \"size\": 65235,
+                            \"annotations\": {
+                                \"org.opencontainers.image.title\": \"devcontainer-collection.json\"
+                            }
+                        }
+                    ],
+                    \"annotations\": {
+                        \"com.github.package.type\": \"devcontainer_collection\"
+                    }
+                }".into())
+                .unwrap())
+        });
+
+        let response = get_latest_oci_manifest(
+            "",
+            test_oci_registry(),
+            test_oci_repository(),
+            &client,
+            None,
+        )
+        .await;
+        assert!(response.is_ok());
+        let response = response.unwrap();
+
+        assert_eq!(response.layers.len(), 1);
+        assert_eq!(
+            response.layers[0].digest,
+            "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_get_oci_blob() {
+        #[derive(Debug, Deserialize)]
+        struct DeserializableTestStruct {
+            foo: String,
+        }
+
+        let client = FakeHttpClient::create(|request| async move {
+            let host = request.uri().host();
+            if host.is_none() || host.unwrap() != test_oci_registry() {
+                return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
+            }
+            let path = request.uri().path();
+            if path != format!("/v2/{}/blobs/blobdigest", test_oci_repository()) {
+                return Err(anyhow!("Unexpected path: {}", path));
+            }
+            Ok(http_client::Response::builder()
+                .status(200)
+                .body(
+                    r#"
+                    {
+                        "foo": "bar"
+                    }
+                    "#
+                    .into(),
+                )
+                .unwrap())
+        });
+
+        let response: Result<DeserializableTestStruct, String> = get_deserializable_oci_blob(
+            "",
+            test_oci_registry(),
+            test_oci_repository(),
+            "blobdigest",
+            &client,
+        )
+        .await;
+        assert!(response.is_ok());
+        let response = response.unwrap();
+
+        assert_eq!(response.foo, "bar".to_string());
+    }
+
+    #[gpui::test]
+    async fn test_download_oci_tarball(cx: &mut TestAppContext) {
+        cx.executor().allow_parking();
+        let fs: Arc<dyn Fs> = FakeFs::new(cx.executor());
+
+        let destination_dir = PathBuf::from("/tmp/extracted");
+        fs.create_dir(&destination_dir).await.unwrap();
+
+        let tarball_bytes = build_test_tarball().await;
+        let tarball = std::sync::Arc::new(tarball_bytes);
+
+        let client = FakeHttpClient::create(move |request| {
+            let tarball = tarball.clone();
+            async move {
+                let host = request.uri().host();
+                if host.is_none() || host.unwrap() != test_oci_registry() {
+                    return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default()));
+                }
+                let path = request.uri().path();
+                if path != format!("/v2/{}/blobs/blobdigest", test_oci_repository()) {
+                    return Err(anyhow!("Unexpected path: {}", path));
+                }
+                Ok(http_client::Response::builder()
+                    .status(200)
+                    .body(tarball.to_vec().into())
+                    .unwrap())
+            }
+        });
+
+        let response = download_oci_tarball(
+            "",
+            test_oci_registry(),
+            test_oci_repository(),
+            "blobdigest",
+            "header",
+            &destination_dir,
+            &client,
+            &fs,
+            None,
+        )
+        .await;
+        assert!(response.is_ok());
+
+        let expected_devcontainer_json = concat!(
+            "// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n",
+            "// README at: https://github.com/devcontainers/templates/tree/main/src/alpine\n",
+            "{\n",
+            "\t\"name\": \"Alpine\",\n",
+            "\t// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile\n",
+            "\t\"image\": \"mcr.microsoft.com/devcontainers/base:alpine-${templateOption:imageVariant}\"\n",
+            "}\n",
+        );
+
+        assert_eq!(
+            fs.load(&destination_dir.join(".devcontainer/devcontainer.json"))
+                .await
+                .unwrap(),
+            expected_devcontainer_json
+        )
+    }
+}

crates/diagnostics/src/buffer_diagnostics.rs πŸ”—

@@ -24,6 +24,7 @@ use settings::Settings;
 use std::{
     any::{Any, TypeId},
     cmp::{self, Ordering},
+    ops::Range,
     sync::Arc,
 };
 use text::{Anchor, BufferSnapshot, OffsetRangeExt};
@@ -480,25 +481,35 @@ impl BufferDiagnosticsEditor {
                     })
                 });
 
-                let (anchor_ranges, _) =
-                    buffer_diagnostics_editor
-                        .multibuffer
-                        .update(cx, |multibuffer, cx| {
-                            let excerpt_ranges = excerpt_ranges
-                                .into_iter()
-                                .map(|range| ExcerptRange {
-                                    context: range.context.to_point(&buffer_snapshot),
-                                    primary: range.primary.to_point(&buffer_snapshot),
-                                })
-                                .collect();
-                            multibuffer.set_excerpt_ranges_for_path(
-                                PathKey::for_buffer(&buffer, cx),
-                                buffer.clone(),
-                                &buffer_snapshot,
-                                excerpt_ranges,
-                                cx,
-                            )
-                        });
+                let excerpt_ranges: Vec<_> = excerpt_ranges
+                    .into_iter()
+                    .map(|range| ExcerptRange {
+                        context: range.context.to_point(&buffer_snapshot),
+                        primary: range.primary.to_point(&buffer_snapshot),
+                    })
+                    .collect();
+                buffer_diagnostics_editor
+                    .multibuffer
+                    .update(cx, |multibuffer, cx| {
+                        multibuffer.set_excerpt_ranges_for_path(
+                            PathKey::for_buffer(&buffer, cx),
+                            buffer.clone(),
+                            &buffer_snapshot,
+                            excerpt_ranges.clone(),
+                            cx,
+                        )
+                    });
+                let multibuffer_snapshot =
+                    buffer_diagnostics_editor.multibuffer.read(cx).snapshot(cx);
+                let anchor_ranges: Vec<Range<editor::Anchor>> = excerpt_ranges
+                    .into_iter()
+                    .filter_map(|range| {
+                        let text_range = buffer_snapshot.anchor_range_inside(range.primary);
+                        let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?;
+                        let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?;
+                        Some(start..end)
+                    })
+                    .collect();
 
                 if was_empty {
                     if let Some(anchor_range) = anchor_ranges.first() {

crates/diagnostics/src/diagnostic_renderer.rs πŸ”—

@@ -8,9 +8,9 @@ use editor::{
 use gpui::{AppContext, Entity, Focusable, WeakEntity};
 use language::{BufferId, Diagnostic, DiagnosticEntryRef, LanguageRegistry};
 use lsp::DiagnosticSeverity;
-use markdown::{Markdown, MarkdownElement};
+use markdown::{CopyButtonVisibility, Markdown, MarkdownElement};
 use settings::Settings;
-use text::{AnchorRangeExt, Point};
+use text::Point;
 use theme_settings::ThemeSettings;
 use ui::{CopyButton, prelude::*};
 use util::maybe;
@@ -239,8 +239,7 @@ impl DiagnosticBlock {
                         diagnostics_markdown_style(bcx.window, cx),
                     )
                     .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     })
                     .on_url_click({
@@ -290,23 +289,12 @@ impl DiagnosticBlock {
                 .nth(ix)
             {
                 let multibuffer = editor.buffer().read(cx);
-                let Some(snapshot) = multibuffer
-                    .buffer(buffer_id)
-                    .map(|entity| entity.read(cx).snapshot())
-                else {
+                if let Some(anchor_range) = multibuffer
+                    .snapshot(cx)
+                    .buffer_anchor_range_to_anchor_range(diagnostic.range)
+                {
+                    Self::jump_to(editor, anchor_range, window, cx);
                     return;
-                };
-
-                for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) {
-                    if range.context.overlaps(&diagnostic.range, &snapshot) {
-                        Self::jump_to(
-                            editor,
-                            Anchor::range_in_buffer(excerpt_id, diagnostic.range),
-                            window,
-                            cx,
-                        );
-                        return;
-                    }
                 }
             }
         } else if let Some(diagnostic) = editor

crates/diagnostics/src/diagnostics.rs πŸ”—

@@ -12,7 +12,7 @@ use buffer_diagnostics::BufferDiagnosticsEditor;
 use collections::{BTreeSet, HashMap, HashSet};
 use diagnostic_renderer::DiagnosticBlock;
 use editor::{
-    Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
+    Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
     display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
     multibuffer_context_lines,
 };
@@ -301,17 +301,21 @@ impl ProjectDiagnosticsEditor {
         let snapshot = self
             .editor
             .update(cx, |editor, cx| editor.display_snapshot(cx));
-        let buffer = self.multibuffer.read(cx);
-        let buffer_ids = buffer.all_buffer_ids();
         let selected_buffers = self.editor.update(cx, |editor, _| {
             editor
                 .selections
                 .all_anchors(&snapshot)
                 .iter()
-                .filter_map(|anchor| anchor.start.text_anchor.buffer_id)
+                .filter_map(|anchor| {
+                    Some(snapshot.anchor_to_buffer_anchor(anchor.start)?.0.buffer_id)
+                })
                 .collect::<HashSet<_>>()
         });
-        for buffer_id in buffer_ids {
+        for buffer_id in snapshot
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
+            .dedup()
+        {
             if retain_selections && selected_buffers.contains(&buffer_id) {
                 continue;
             }
@@ -329,7 +333,7 @@ impl ProjectDiagnosticsEditor {
                 continue;
             }
             self.multibuffer.update(cx, |b, cx| {
-                b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx);
+                b.remove_excerpts(PathKey::for_buffer(&buffer, cx), cx);
             });
         }
     }
@@ -581,9 +585,8 @@ impl ProjectDiagnosticsEditor {
                     match retain_excerpts {
                         RetainExcerpts::Dirty if !is_dirty => Vec::new(),
                         RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer
-                            .excerpts_for_buffer(buffer_id, cx)
-                            .into_iter()
-                            .map(|(_, _, range)| range)
+                            .snapshot(cx)
+                            .excerpts_for_buffer(buffer_id)
                             .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b))
                             .collect(),
                     }
@@ -621,22 +624,33 @@ impl ProjectDiagnosticsEditor {
                         });
                     })
                 }
-                let (anchor_ranges, _) = this.multibuffer.update(cx, |multi_buffer, cx| {
-                    let excerpt_ranges = excerpt_ranges
-                        .into_iter()
-                        .map(|range| ExcerptRange {
-                            context: range.context.to_point(&buffer_snapshot),
-                            primary: range.primary.to_point(&buffer_snapshot),
-                        })
-                        .collect();
+                let excerpt_ranges: Vec<_> = excerpt_ranges
+                    .into_iter()
+                    .map(|range| ExcerptRange {
+                        context: range.context.to_point(&buffer_snapshot),
+                        primary: range.primary.to_point(&buffer_snapshot),
+                    })
+                    .collect();
+                // TODO(cole): maybe should use the nonshrinking API?
+                this.multibuffer.update(cx, |multi_buffer, cx| {
                     multi_buffer.set_excerpt_ranges_for_path(
                         PathKey::for_buffer(&buffer, cx),
                         buffer.clone(),
                         &buffer_snapshot,
-                        excerpt_ranges,
+                        excerpt_ranges.clone(),
                         cx,
                     )
                 });
+                let multibuffer_snapshot = this.multibuffer.read(cx).snapshot(cx);
+                let anchor_ranges: Vec<Range<Anchor>> = excerpt_ranges
+                    .into_iter()
+                    .filter_map(|range| {
+                        let text_range = buffer_snapshot.anchor_range_inside(range.primary);
+                        let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?;
+                        let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?;
+                        Some(start..end)
+                    })
+                    .collect();
                 #[cfg(test)]
                 let cloned_blocks = result_blocks.clone();
 

crates/edit_prediction/src/edit_prediction.rs πŸ”—

@@ -1676,7 +1676,7 @@ impl EditPredictionStore {
             buffer.pending_predictions.push(PendingSettledPrediction {
                 request_id: request_id,
                 editable_anchor_range: edited_buffer_snapshot
-                    .anchor_range_around(editable_offset_range),
+                    .anchor_range_inside(editable_offset_range),
                 example,
                 e2e_latency,
                 enqueued_at: now,
@@ -2351,7 +2351,10 @@ impl EditPredictionStore {
         cx: &mut AsyncApp,
     ) -> Result<Option<(Entity<Buffer>, language::Anchor)>> {
         let collaborator_cursor_rows: Vec<u32> = active_buffer_snapshot
-            .selections_in_range(Anchor::MIN..Anchor::MAX, false)
+            .selections_in_range(
+                Anchor::min_max_range_for_buffer(active_buffer_snapshot.remote_id()),
+                false,
+            )
             .flat_map(|(_, _, _, selections)| {
                 selections.map(|s| s.head().to_point(active_buffer_snapshot).row)
             })
@@ -2427,7 +2430,10 @@ impl EditPredictionStore {
                     candidate_buffer.read_with(cx, |buffer, _cx| {
                         let snapshot = buffer.snapshot();
                         let has_collaborators = snapshot
-                            .selections_in_range(Anchor::MIN..Anchor::MAX, false)
+                            .selections_in_range(
+                                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                                false,
+                            )
                             .next()
                             .is_some();
                         let position = buffer
@@ -2761,7 +2767,7 @@ fn collaborator_edit_overlaps_locality_region(
         (position..position).to_point(snapshot),
         COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS,
     );
-    let locality_anchor_range = snapshot.anchor_range_around(locality_point_range);
+    let locality_anchor_range = snapshot.anchor_range_inside(locality_point_range);
 
     edit_range.overlaps(&locality_anchor_range, snapshot)
 }

crates/edit_prediction/src/udiff.rs πŸ”—

@@ -54,7 +54,6 @@ pub async fn apply_diff(
 
     let mut included_files: HashMap<String, Entity<Buffer>> = HashMap::default();
 
-    let ranges = [Anchor::MIN..Anchor::MAX];
     let mut diff = DiffParser::new(diff_str);
     let mut current_file = None;
     let mut edits: Vec<(std::ops::Range<Anchor>, Arc<str>)> = vec![];
@@ -115,7 +114,7 @@ pub async fn apply_diff(
                     edits.extend(resolve_hunk_edits_in_buffer(
                         hunk,
                         buffer,
-                        ranges.as_slice(),
+                        &[Anchor::min_max_range_for_buffer(buffer.remote_id())],
                         status,
                     )?);
                     anyhow::Ok(())

crates/edit_prediction_ui/src/edit_prediction_context_view.rs πŸ”—

@@ -201,10 +201,14 @@ impl EditPredictionContextView {
                 multibuffer.clear(cx);
 
                 for (path, buffer, ranges, orders, _) in paths {
-                    let (anchor_ranges, _) =
-                        multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
-                    for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) {
-                        excerpt_anchors_with_orders.push((anchor_range.start, order));
+                    multibuffer.set_excerpts_for_path(path, buffer.clone(), ranges.clone(), 0, cx);
+                    let snapshot = multibuffer.snapshot(cx);
+                    let buffer_snapshot = buffer.read(cx).snapshot();
+                    for (range, order) in ranges.into_iter().zip(orders) {
+                        let text_anchor = buffer_snapshot.anchor_range_inside(range);
+                        if let Some(start) = snapshot.anchor_in_buffer(text_anchor.start) {
+                            excerpt_anchors_with_orders.push((start, order));
+                        }
                     }
                 }
             });

crates/edit_prediction_ui/src/rate_prediction_modal.rs πŸ”—

@@ -357,35 +357,26 @@ impl RatePredictionsModal {
                 });
 
                 editor.disable_header_for_buffer(new_buffer_id, cx);
-                let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| {
+                editor.buffer().update(cx, |multibuffer, cx| {
                     multibuffer.clear(cx);
-                    multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx);
+                    multibuffer.set_excerpts_for_buffer(new_buffer.clone(), [start..end], 0, cx);
                     multibuffer.add_diff(diff, cx);
-                    multibuffer.excerpt_ids().into_iter().next()
                 });
 
-                if let Some((excerpt_id, cursor_position)) =
-                    excerpt_id.zip(prediction.cursor_position.as_ref())
-                {
+                if let Some(cursor_position) = prediction.cursor_position.as_ref() {
                     let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
-                    if let Some(buffer_snapshot) =
-                        multibuffer_snapshot.buffer_for_excerpt(excerpt_id)
-                    {
-                        let cursor_offset = prediction
-                            .edit_preview
-                            .anchor_to_offset_in_result(cursor_position.anchor)
-                            + cursor_position.offset;
-                        let cursor_anchor = buffer_snapshot.anchor_after(cursor_offset);
-
-                        if let Some(anchor) =
-                            multibuffer_snapshot.anchor_in_excerpt(excerpt_id, cursor_anchor)
-                        {
-                            editor.splice_inlays(
-                                &[InlayId::EditPrediction(0)],
-                                vec![Inlay::edit_prediction(0, anchor, "▏")],
-                                cx,
-                            );
-                        }
+                    let cursor_offset = prediction
+                        .edit_preview
+                        .anchor_to_offset_in_result(cursor_position.anchor)
+                        + cursor_position.offset;
+                    let cursor_anchor = new_buffer.read(cx).snapshot().anchor_after(cursor_offset);
+
+                    if let Some(anchor) = multibuffer_snapshot.anchor_in_excerpt(cursor_anchor) {
+                        editor.splice_inlays(
+                            &[InlayId::EditPrediction(0)],
+                            vec![Inlay::edit_prediction(0, anchor, "▏")],
+                            cx,
+                        );
                     }
                 }
             });
@@ -991,7 +982,6 @@ impl FeedbackCompletionProvider {
 impl editor::CompletionProvider for FeedbackCompletionProvider {
     fn completions(
         &self,
-        _excerpt_id: editor::ExcerptId,
         buffer: &Entity<Buffer>,
         buffer_position: language::Anchor,
         _trigger: editor::CompletionContext,

crates/editor/src/bracket_colorization.rs πŸ”—

@@ -7,9 +7,9 @@ use std::ops::Range;
 use crate::{Editor, HighlightKey};
 use collections::{HashMap, HashSet};
 use gpui::{AppContext as _, Context, HighlightStyle};
-use itertools::Itertools;
 use language::{BufferRow, BufferSnapshot, language_settings::LanguageSettings};
-use multi_buffer::{Anchor, ExcerptId};
+use multi_buffer::{Anchor, BufferOffset, ExcerptRange, MultiBufferSnapshot};
+use text::OffsetRangeExt as _;
 use ui::{ActiveTheme, utils::ensure_minimum_contrast};
 
 impl Editor {
@@ -25,55 +25,49 @@ impl Editor {
         let accents_count = cx.theme().accents().0.len();
         let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
 
-        let visible_excerpts = self.visible_excerpts(false, cx);
-        let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range<usize>)> = visible_excerpts
+        let visible_excerpts = self.visible_buffer_ranges(cx);
+        let excerpt_data: Vec<(
+            BufferSnapshot,
+            Range<BufferOffset>,
+            ExcerptRange<text::Anchor>,
+        )> = visible_excerpts
             .into_iter()
-            .filter_map(|(excerpt_id, (buffer, _, buffer_range))| {
-                let buffer = buffer.read(cx);
-                let buffer_snapshot = buffer.snapshot();
-                if LanguageSettings::for_buffer(&buffer, cx).colorize_brackets {
-                    Some((excerpt_id, buffer_snapshot, buffer_range))
-                } else {
-                    None
-                }
+            .filter(|(buffer_snapshot, _, _)| {
+                let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id())
+                else {
+                    return false;
+                };
+                LanguageSettings::for_buffer(buffer.read(cx), cx).colorize_brackets
             })
             .collect();
 
         let mut fetched_tree_sitter_chunks = excerpt_data
             .iter()
-            .filter_map(|(excerpt_id, ..)| {
+            .filter_map(|(_, _, excerpt_range)| {
+                let key = excerpt_range.context.clone();
                 Some((
-                    *excerpt_id,
-                    self.bracket_fetched_tree_sitter_chunks
-                        .get(excerpt_id)
-                        .cloned()?,
+                    key.clone(),
+                    self.bracket_fetched_tree_sitter_chunks.get(&key).cloned()?,
                 ))
             })
-            .collect::<HashMap<ExcerptId, HashSet<Range<BufferRow>>>>();
+            .collect::<HashMap<Range<text::Anchor>, HashSet<Range<BufferRow>>>>();
 
         let bracket_matches_by_accent = cx.background_spawn(async move {
-            let anchors_in_multi_buffer = |current_excerpt: ExcerptId,
-                                           text_anchors: [text::Anchor; 4]|
-             -> Option<[Option<_>; 4]> {
-                multi_buffer_snapshot
-                    .anchors_in_excerpt(current_excerpt, text_anchors)?
-                    .collect_array()
-            };
-
             let bracket_matches_by_accent: HashMap<usize, Vec<Range<Anchor>>> =
                 excerpt_data.into_iter().fold(
                     HashMap::default(),
-                    |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| {
-                        let fetched_chunks =
-                            fetched_tree_sitter_chunks.entry(excerpt_id).or_default();
+                    |mut acc, (buffer_snapshot, buffer_range, excerpt_range)| {
+                        let fetched_chunks = fetched_tree_sitter_chunks
+                            .entry(excerpt_range.context.clone())
+                            .or_default();
 
                         let brackets_by_accent = compute_bracket_ranges(
+                            &multi_buffer_snapshot,
                             &buffer_snapshot,
                             buffer_range,
+                            excerpt_range,
                             fetched_chunks,
-                            excerpt_id,
                             accents_count,
-                            &anchors_in_multi_buffer,
                         );
 
                         for (accent_number, new_ranges) in brackets_by_accent {
@@ -144,15 +138,20 @@ impl Editor {
 }
 
 fn compute_bracket_ranges(
+    multi_buffer_snapshot: &MultiBufferSnapshot,
     buffer_snapshot: &BufferSnapshot,
-    buffer_range: Range<usize>,
+    buffer_range: Range<BufferOffset>,
+    excerpt_range: ExcerptRange<text::Anchor>,
     fetched_chunks: &mut HashSet<Range<BufferRow>>,
-    excerpt_id: ExcerptId,
     accents_count: usize,
-    anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option<Anchor>; 4]>,
 ) -> Vec<(usize, Vec<Range<Anchor>>)> {
+    let context = excerpt_range.context.to_offset(buffer_snapshot);
+
     buffer_snapshot
-        .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks))
+        .fetch_bracket_ranges(
+            buffer_range.start.0..buffer_range.end.0,
+            Some(fetched_chunks),
+        )
         .into_iter()
         .flat_map(|(chunk_range, pairs)| {
             if fetched_chunks.insert(chunk_range) {
@@ -164,37 +163,25 @@ fn compute_bracket_ranges(
         .filter_map(|pair| {
             let color_index = pair.color_index?;
 
-            let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range);
-            let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range);
-            let [
-                buffer_open_range_start,
-                buffer_open_range_end,
-                buffer_close_range_start,
-                buffer_close_range_end,
-            ] = anchors_in_multi_buffer(
-                excerpt_id,
-                [
-                    buffer_open_range.start,
-                    buffer_open_range.end,
-                    buffer_close_range.start,
-                    buffer_close_range.end,
-                ],
-            )?;
-            let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end);
-            let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end);
+            let mut ranges = Vec::new();
 
-            let mut ranges = Vec::with_capacity(2);
-            if let Some((open_start, open_end)) = multi_buffer_open_range {
-                ranges.push(open_start..open_end);
-            }
-            if let Some((close_start, close_end)) = multi_buffer_close_range {
-                ranges.push(close_start..close_end);
-            }
-            if ranges.is_empty() {
-                None
-            } else {
-                Some((color_index % accents_count, ranges))
-            }
+            if context.start <= pair.open_range.start && pair.open_range.end <= context.end {
+                let anchors = buffer_snapshot.anchor_range_inside(pair.open_range);
+                ranges.push(
+                    multi_buffer_snapshot.anchor_in_buffer(anchors.start)?
+                        ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?,
+                );
+            };
+
+            if context.start <= pair.close_range.start && pair.close_range.end <= context.end {
+                let anchors = buffer_snapshot.anchor_range_inside(pair.close_range);
+                ranges.push(
+                    multi_buffer_snapshot.anchor_in_buffer(anchors.start)?
+                        ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?,
+                );
+            };
+
+            Some((color_index % accents_count, ranges))
         })
         .collect()
 }
@@ -1197,7 +1184,7 @@ mod foo Β«1{
                 );
             }
 
-            let buffer_snapshot = snapshot.buffer().as_singleton().unwrap().2;
+            let buffer_snapshot = snapshot.buffer().as_singleton().unwrap();
             for bracket_match in buffer_snapshot
                 .fetch_bracket_ranges(
                     snapshot
@@ -1464,6 +1451,101 @@ mod foo Β«1{
         );
     }
 
+    #[gpui::test]
+    async fn test_multi_buffer_close_excerpts(cx: &mut gpui::TestAppContext) {
+        let comment_lines = 5;
+
+        init_test(cx, |language_settings| {
+            language_settings.defaults.colorize_brackets = Some(true);
+        });
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/a"),
+            json!({
+                "lib.rs": separate_with_comment_lines(
+                    indoc! {r#"
+    fn process_data_1() {
+        let map: Option<Vec<()>> = None;
+    }
+    "#},
+                    indoc! {r#"
+    fn process_data_2() {
+        let other_map: Option<Vec<()>> = None;
+    }
+    "#},
+                    comment_lines,
+                )
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+        language_registry.add(rust_lang());
+
+        let buffer_1 = project
+            .update(cx, |project, cx| {
+                project.open_local_buffer(path!("/a/lib.rs"), cx)
+            })
+            .await
+            .unwrap();
+
+        let second_excerpt_start = buffer_1.read_with(cx, |buffer, _| {
+            let text = buffer.text();
+            text.lines()
+                .enumerate()
+                .find(|(_, line)| line.contains("process_data_2"))
+                .map(|(row, _)| row as u32)
+                .unwrap()
+        });
+
+        let multi_buffer = cx.new(|cx| {
+            let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
+            multi_buffer.set_excerpts_for_path(
+                PathKey::sorted(0),
+                buffer_1.clone(),
+                [
+                    Point::new(0, 0)..Point::new(3, 0),
+                    Point::new(second_excerpt_start, 0)..Point::new(second_excerpt_start + 3, 0),
+                ],
+                0,
+                cx,
+            );
+            multi_buffer
+        });
+
+        let editor = cx.add_window(|window, cx| {
+            Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx)
+        });
+        cx.executor().advance_clock(Duration::from_millis(100));
+        cx.executor().run_until_parked();
+
+        let editor_snapshot = editor
+            .update(cx, |editor, window, cx| editor.snapshot(window, cx))
+            .unwrap();
+        assert_eq!(
+            concat!(
+                "\n",
+                "\n",
+                "fn process_data_1\u{00ab}1()1\u{00bb} \u{00ab}1{\n",
+                "    let map: Option\u{00ab}2<Vec\u{00ab}3<\u{00ab}4()4\u{00bb}>3\u{00bb}>2\u{00bb} = None;\n",
+                "}1\u{00bb}\n",
+                "\n",
+                "\n",
+                "fn process_data_2\u{00ab}1()1\u{00bb} \u{00ab}1{\n",
+                "    let other_map: Option\u{00ab}2<Vec\u{00ab}3<\u{00ab}4()4\u{00bb}>3\u{00bb}>2\u{00bb} = None;\n",
+                "}1\u{00bb}\n",
+                "\n",
+                "1 hsla(207.80, 16.20%, 69.19%, 1.00)\n",
+                "2 hsla(29.00, 54.00%, 65.88%, 1.00)\n",
+                "3 hsla(286.00, 51.00%, 75.25%, 1.00)\n",
+                "4 hsla(187.00, 47.00%, 59.22%, 1.00)\n",
+            ),
+            &editor_bracket_colors_markup(&editor_snapshot),
+            "Two close excerpts from the same buffer (within same tree-sitter chunk) should both have bracket colors"
+        );
+    }
+
     #[gpui::test]
     // reproduction of #47846
     async fn test_bracket_colorization_with_folds(cx: &mut gpui::TestAppContext) {

crates/editor/src/code_completion_tests.rs πŸ”—

@@ -7,7 +7,7 @@ use project::{Completion, CompletionSource};
 use settings::SnippetSortOrder;
 use std::sync::Arc;
 use std::sync::atomic::AtomicBool;
-use text::Anchor;
+use text::{Anchor, BufferId};
 
 #[gpui::test]
 async fn test_sort_kind(cx: &mut TestAppContext) {
@@ -393,7 +393,7 @@ impl CompletionBuilder {
         kind: Option<CompletionItemKind>,
     ) -> Completion {
         Completion {
-            replace_range: Anchor::MIN..Anchor::MAX,
+            replace_range: Anchor::min_max_range_for_buffer(BufferId::new(1).unwrap()),
             new_text: label.to_string(),
             label: CodeLabel::plain(label.to_string(), filter_text),
             documentation: None,

crates/editor/src/code_context_menus.rs πŸ”—

@@ -9,8 +9,8 @@ use itertools::Itertools;
 use language::CodeLabel;
 use language::{Buffer, LanguageName, LanguageRegistry};
 use lsp::CompletionItemTag;
-use markdown::{Markdown, MarkdownElement};
-use multi_buffer::{Anchor, ExcerptId};
+use markdown::{CopyButtonVisibility, Markdown, MarkdownElement};
+use multi_buffer::Anchor;
 use ordered_float::OrderedFloat;
 use project::lsp_store::CompletionDocumentation;
 use project::{CodeAction, Completion, TaskSourceKind};
@@ -357,7 +357,8 @@ impl CompletionsMenu {
         id: CompletionId,
         sort_completions: bool,
         choices: &Vec<String>,
-        selection: Range<Anchor>,
+        initial_position: Anchor,
+        selection: Range<text::Anchor>,
         buffer: Entity<Buffer>,
         scroll_handle: Option<UniformListScrollHandle>,
         snippet_sort_order: SnippetSortOrder,
@@ -365,7 +366,7 @@ impl CompletionsMenu {
         let completions = choices
             .iter()
             .map(|choice| Completion {
-                replace_range: selection.start.text_anchor..selection.end.text_anchor,
+                replace_range: selection.clone(),
                 new_text: choice.to_string(),
                 label: CodeLabel::plain(choice.to_string(), None),
                 match_start: None,
@@ -400,7 +401,7 @@ impl CompletionsMenu {
             id,
             source: CompletionsMenuSource::SnippetChoices,
             sort_completions,
-            initial_position: selection.start,
+            initial_position,
             initial_query: None,
             is_incomplete: false,
             buffer,
@@ -1118,8 +1119,7 @@ impl CompletionsMenu {
         div().child(
             MarkdownElement::new(markdown, hover_markdown_style(window, cx))
                 .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                    copy_button: false,
-                    copy_button_on_hover: false,
+                    copy_button_visibility: CopyButtonVisibility::Hidden,
                     border: false,
                 })
                 .on_url_click(open_markdown_url),
@@ -1381,7 +1381,6 @@ impl CompletionsMenu {
 
 #[derive(Clone)]
 pub struct AvailableCodeAction {
-    pub excerpt_id: ExcerptId,
     pub action: CodeAction,
     pub provider: Rc<dyn CodeActionProvider>,
 }
@@ -1434,7 +1433,6 @@ impl CodeActionContents {
             })
             .chain(self.actions.iter().flat_map(|actions| {
                 actions.iter().map(|available| CodeActionsItem::CodeAction {
-                    excerpt_id: available.excerpt_id,
                     action: available.action.clone(),
                     provider: available.provider.clone(),
                 })
@@ -1458,7 +1456,6 @@ impl CodeActionContents {
         if let Some(actions) = &self.actions {
             if let Some(available) = actions.get(index) {
                 return Some(CodeActionsItem::CodeAction {
-                    excerpt_id: available.excerpt_id,
                     action: available.action.clone(),
                     provider: available.provider.clone(),
                 });
@@ -1478,7 +1475,6 @@ impl CodeActionContents {
 pub enum CodeActionsItem {
     Task(TaskSourceKind, ResolvedTask),
     CodeAction {
-        excerpt_id: ExcerptId,
         action: CodeAction,
         provider: Rc<dyn CodeActionProvider>,
     },

crates/editor/src/display_map.rs πŸ”—

@@ -103,7 +103,7 @@ use language::{
 };
 
 use multi_buffer::{
-    Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
+    Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
     MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint,
 };
 use project::project_settings::DiagnosticSeverity;
@@ -125,7 +125,7 @@ use std::{
     fmt::Debug,
     iter,
     num::NonZeroU32,
-    ops::{self, Add, Bound, Range, Sub},
+    ops::{self, Add, Range, Sub},
     sync::Arc,
 };
 
@@ -195,10 +195,9 @@ pub struct CompanionExcerptPatch {
 }
 
 pub type ConvertMultiBufferRows = fn(
-    &HashMap<ExcerptId, ExcerptId>,
     &MultiBufferSnapshot,
     &MultiBufferSnapshot,
-    (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+    Range<MultiBufferPoint>,
 ) -> Vec<CompanionExcerptPatch>;
 
 /// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
@@ -240,8 +239,6 @@ pub(crate) struct Companion {
     rhs_display_map_id: EntityId,
     rhs_buffer_to_lhs_buffer: HashMap<BufferId, BufferId>,
     lhs_buffer_to_rhs_buffer: HashMap<BufferId, BufferId>,
-    rhs_excerpt_to_lhs_excerpt: HashMap<ExcerptId, ExcerptId>,
-    lhs_excerpt_to_rhs_excerpt: HashMap<ExcerptId, ExcerptId>,
     rhs_rows_to_lhs_rows: ConvertMultiBufferRows,
     lhs_rows_to_rhs_rows: ConvertMultiBufferRows,
     rhs_custom_block_to_balancing_block: RefCell<HashMap<CustomBlockId, CustomBlockId>>,
@@ -258,8 +255,6 @@ impl Companion {
             rhs_display_map_id,
             rhs_buffer_to_lhs_buffer: Default::default(),
             lhs_buffer_to_rhs_buffer: Default::default(),
-            rhs_excerpt_to_lhs_excerpt: Default::default(),
-            lhs_excerpt_to_rhs_excerpt: Default::default(),
             rhs_rows_to_lhs_rows,
             lhs_rows_to_rhs_rows,
             rhs_custom_block_to_balancing_block: Default::default(),
@@ -287,14 +282,14 @@ impl Companion {
         display_map_id: EntityId,
         companion_snapshot: &MultiBufferSnapshot,
         our_snapshot: &MultiBufferSnapshot,
-        bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+        bounds: Range<MultiBufferPoint>,
     ) -> Vec<CompanionExcerptPatch> {
-        let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
-            (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+        let convert_fn = if self.is_rhs(display_map_id) {
+            self.rhs_rows_to_lhs_rows
         } else {
-            (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+            self.lhs_rows_to_rhs_rows
         };
-        convert_fn(excerpt_map, companion_snapshot, our_snapshot, bounds)
+        convert_fn(companion_snapshot, our_snapshot, bounds)
     }
 
     pub(crate) fn convert_point_from_companion(
@@ -304,20 +299,15 @@ impl Companion {
         companion_snapshot: &MultiBufferSnapshot,
         point: MultiBufferPoint,
     ) -> Range<MultiBufferPoint> {
-        let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
-            (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+        let convert_fn = if self.is_rhs(display_map_id) {
+            self.lhs_rows_to_rhs_rows
         } else {
-            (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+            self.rhs_rows_to_lhs_rows
         };
 
-        let excerpt = convert_fn(
-            excerpt_map,
-            our_snapshot,
-            companion_snapshot,
-            (Bound::Included(point), Bound::Included(point)),
-        )
-        .into_iter()
-        .next();
+        let excerpt = convert_fn(our_snapshot, companion_snapshot, point..point)
+            .into_iter()
+            .next();
 
         let Some(excerpt) = excerpt else {
             return Point::zero()..our_snapshot.max_point();
@@ -332,20 +322,15 @@ impl Companion {
         companion_snapshot: &MultiBufferSnapshot,
         point: MultiBufferPoint,
     ) -> Range<MultiBufferPoint> {
-        let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
-            (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+        let convert_fn = if self.is_rhs(display_map_id) {
+            self.rhs_rows_to_lhs_rows
         } else {
-            (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+            self.lhs_rows_to_rhs_rows
         };
 
-        let excerpt = convert_fn(
-            excerpt_map,
-            companion_snapshot,
-            our_snapshot,
-            (Bound::Included(point), Bound::Included(point)),
-        )
-        .into_iter()
-        .next();
+        let excerpt = convert_fn(companion_snapshot, our_snapshot, point..point)
+            .into_iter()
+            .next();
 
         let Some(excerpt) = excerpt else {
             return Point::zero()..companion_snapshot.max_point();
@@ -353,30 +338,6 @@ impl Companion {
         excerpt.patch.edit_for_old_position(point).new
     }
 
-    pub(crate) fn companion_excerpt_to_excerpt(
-        &self,
-        display_map_id: EntityId,
-    ) -> &HashMap<ExcerptId, ExcerptId> {
-        if self.is_rhs(display_map_id) {
-            &self.lhs_excerpt_to_rhs_excerpt
-        } else {
-            &self.rhs_excerpt_to_lhs_excerpt
-        }
-    }
-
-    #[cfg(test)]
-    pub(crate) fn excerpt_mappings(
-        &self,
-    ) -> (
-        &HashMap<ExcerptId, ExcerptId>,
-        &HashMap<ExcerptId, ExcerptId>,
-    ) {
-        (
-            &self.lhs_excerpt_to_rhs_excerpt,
-            &self.rhs_excerpt_to_lhs_excerpt,
-        )
-    }
-
     fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap<BufferId, BufferId> {
         if self.is_rhs(display_map_id) {
             &self.rhs_buffer_to_lhs_buffer
@@ -385,24 +346,6 @@ impl Companion {
         }
     }
 
-    pub(crate) fn add_excerpt_mapping(&mut self, lhs_id: ExcerptId, rhs_id: ExcerptId) {
-        self.lhs_excerpt_to_rhs_excerpt.insert(lhs_id, rhs_id);
-        self.rhs_excerpt_to_lhs_excerpt.insert(rhs_id, lhs_id);
-    }
-
-    pub(crate) fn remove_excerpt_mappings(
-        &mut self,
-        lhs_ids: impl IntoIterator<Item = ExcerptId>,
-        rhs_ids: impl IntoIterator<Item = ExcerptId>,
-    ) {
-        for id in lhs_ids {
-            self.lhs_excerpt_to_rhs_excerpt.remove(&id);
-        }
-        for id in rhs_ids {
-            self.rhs_excerpt_to_lhs_excerpt.remove(&id);
-        }
-    }
-
     pub(crate) fn lhs_to_rhs_buffer(&self, lhs_buffer_id: BufferId) -> Option<BufferId> {
         self.lhs_buffer_to_rhs_buffer.get(&lhs_buffer_id).copied()
     }
@@ -540,8 +483,7 @@ impl DisplayMap {
                 .wrap_map
                 .update(cx, |wrap_map, cx| wrap_map.sync(snapshot, edits, cx));
 
-            let (snapshot, edits) =
-                writer.unfold_intersecting([Anchor::min()..Anchor::max()], true);
+            let (snapshot, edits) = writer.unfold_intersecting([Anchor::Min..Anchor::Max], true);
             let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
             let (snapshot, _edits) = self
                 .wrap_map
@@ -632,18 +574,6 @@ impl DisplayMap {
         self.companion.as_ref().map(|(_, c)| c)
     }
 
-    pub(crate) fn companion_excerpt_to_my_excerpt(
-        &self,
-        their_id: ExcerptId,
-        cx: &App,
-    ) -> Option<ExcerptId> {
-        let (_, companion) = self.companion.as_ref()?;
-        let c = companion.read(cx);
-        c.companion_excerpt_to_excerpt(self.entity_id)
-            .get(&their_id)
-            .copied()
-    }
-
     fn sync_through_wrap(&mut self, cx: &mut App) -> (WrapSnapshot, WrapPatch) {
         let tab_size = Self::tab_size(&self.buffer, cx);
         let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
@@ -1054,17 +984,10 @@ impl DisplayMap {
             return;
         }
 
-        let excerpt_ids = snapshot
-            .excerpts()
-            .filter(|(_, buf, _)| buf.remote_id() == buffer_id)
-            .map(|(id, _, _)| id)
-            .collect::<Vec<_>>();
-
         let base_placeholder = self.fold_placeholder.clone();
         let creases = ranges.into_iter().filter_map(|folding_range| {
-            let mb_range = excerpt_ids.iter().find_map(|&id| {
-                snapshot.anchor_range_in_excerpt(id, folding_range.range.clone())
-            })?;
+            let mb_range =
+                snapshot.buffer_anchor_range_to_anchor_range(folding_range.range.clone())?;
             let placeholder = if let Some(collapsed_text) = folding_range.collapsed_text {
                 FoldPlaceholder {
                     render: Arc::new({

crates/editor/src/display_map/block_map.rs πŸ”—

@@ -11,8 +11,8 @@ use collections::{Bound, HashMap, HashSet};
 use gpui::{AnyElement, App, EntityId, Pixels, Window};
 use language::{Patch, Point};
 use multi_buffer::{
-    Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint,
-    MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
+    Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow,
+    MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
 };
 use parking_lot::Mutex;
 use std::{
@@ -298,10 +298,10 @@ pub struct BlockContext<'a, 'b> {
     pub indent_guide_padding: Pixels,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum BlockId {
-    ExcerptBoundary(ExcerptId),
-    FoldedBuffer(ExcerptId),
+    ExcerptBoundary(Anchor),
+    FoldedBuffer(BufferId),
     Custom(CustomBlockId),
     Spacer(SpacerId),
 }
@@ -310,10 +310,8 @@ impl From<BlockId> for ElementId {
     fn from(value: BlockId) -> Self {
         match value {
             BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(),
-            BlockId::ExcerptBoundary(excerpt_id) => {
-                ("ExcerptBoundary", EntityId::from(excerpt_id)).into()
-            }
-            BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(),
+            BlockId::ExcerptBoundary(anchor) => anchor.opaque_id().unwrap().into(),
+            BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id.to_proto())).into(),
             BlockId::Spacer(SpacerId(id)) => ("Spacer", id).into(),
         }
     }
@@ -323,7 +321,7 @@ impl std::fmt::Display for BlockId {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             Self::Custom(id) => write!(f, "Block({id:?})"),
-            Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"),
+            Self::ExcerptBoundary(id) => write!(f, "ExcerptBoundary({id:?})"),
             Self::FoldedBuffer(id) => write!(f, "FoldedBuffer({id:?})"),
             Self::Spacer(id) => write!(f, "Spacer({id:?})"),
         }
@@ -340,15 +338,15 @@ struct Transform {
 pub enum Block {
     Custom(Arc<CustomBlock>),
     FoldedBuffer {
-        first_excerpt: ExcerptInfo,
+        first_excerpt: ExcerptBoundaryInfo,
         height: u32,
     },
     ExcerptBoundary {
-        excerpt: ExcerptInfo,
+        excerpt: ExcerptBoundaryInfo,
         height: u32,
     },
     BufferHeader {
-        excerpt: ExcerptInfo,
+        excerpt: ExcerptBoundaryInfo,
         height: u32,
     },
     Spacer {
@@ -365,12 +363,14 @@ impl Block {
             Block::ExcerptBoundary {
                 excerpt: next_excerpt,
                 ..
-            } => BlockId::ExcerptBoundary(next_excerpt.id),
-            Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id),
+            } => BlockId::ExcerptBoundary(next_excerpt.start_anchor),
+            Block::FoldedBuffer { first_excerpt, .. } => {
+                BlockId::FoldedBuffer(first_excerpt.buffer_id())
+            }
             Block::BufferHeader {
                 excerpt: next_excerpt,
                 ..
-            } => BlockId::ExcerptBoundary(next_excerpt.id),
+            } => BlockId::ExcerptBoundary(next_excerpt.start_anchor),
             Block::Spacer { id, .. } => BlockId::Spacer(*id),
         }
     }
@@ -1174,10 +1174,10 @@ impl BlockMap {
                 let wrap_row = wrap_row_for(Point::new(excerpt_boundary.row.0, 0), Bias::Left);
 
                 let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) {
-                    (None, next) => Some(next.buffer_id),
+                    (None, next) => Some(next.buffer_id()),
                     (Some(prev), next) => {
-                        if prev.buffer_id != next.buffer_id {
-                            Some(next.buffer_id)
+                        if prev.buffer_id() != next.buffer_id() {
+                            Some(next.buffer_id())
                         } else {
                             None
                         }
@@ -1195,7 +1195,7 @@ impl BlockMap {
                         let mut last_excerpt_end_row = first_excerpt.end_row;
 
                         while let Some(next_boundary) = boundaries.peek() {
-                            if next_boundary.next.buffer_id == new_buffer_id {
+                            if next_boundary.next.buffer_id() == new_buffer_id {
                                 last_excerpt_end_row = next_boundary.next.end_row;
                             } else {
                                 break;
@@ -1254,12 +1254,24 @@ impl BlockMap {
         let our_buffer = wrap_snapshot.buffer_snapshot();
         let companion_buffer = companion_snapshot.buffer_snapshot();
 
-        let patches = companion.convert_rows_to_companion(
+        let range = match bounds {
+            (Bound::Included(start), Bound::Excluded(end)) => start..end,
+            (Bound::Included(start), Bound::Unbounded) => start..wrap_snapshot.buffer().max_point(),
+            _ => unreachable!(),
+        };
+        let mut patches = companion.convert_rows_to_companion(
             display_map_id,
             companion_buffer,
             our_buffer,
-            bounds,
+            range,
         );
+        if let Some(patch) = patches.last()
+            && let Bound::Excluded(end) = bounds.1
+            && end == wrap_snapshot.buffer().max_point()
+            && patch.source_excerpt_range.is_empty()
+        {
+            patches.pop();
+        }
 
         let mut our_inlay_point_cursor = wrap_snapshot.inlay_point_cursor();
         let mut our_fold_point_cursor = wrap_snapshot.fold_point_cursor();
@@ -1391,18 +1403,15 @@ impl BlockMap {
                 }
             }
 
-            // Main loop: process one hunk/group at a time, possibly inserting spacers before and after.
             while let Some(source_point) = source_points.next() {
                 let mut current_boundary = source_point;
                 let current_range = excerpt.patch.edit_for_old_position(current_boundary).new;
 
-                // This can only occur at the end of an excerpt.
                 if current_boundary.column > 0 {
                     debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end);
                     break;
                 }
 
-                // Align the two sides at the start of this group.
                 let (delta_at_start, mut spacer_at_start) = determine_spacer(
                     &mut our_wrapper,
                     &mut companion_wrapper,
@@ -1434,7 +1443,6 @@ impl BlockMap {
                     source_points.next();
                 }
 
-                // This can only occur at the end of an excerpt.
                 if current_boundary.column > 0 {
                     debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end);
                     break;
@@ -1538,7 +1546,8 @@ impl BlockMap {
                         | Block::BufferHeader {
                             excerpt: excerpt_b, ..
                         },
-                    ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)),
+                    ) => Some(excerpt_a.start_text_anchor().opaque_id())
+                        .cmp(&Some(excerpt_b.start_text_anchor().opaque_id())),
                     (
                         Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
                         Block::Spacer { .. } | Block::Custom(_),
@@ -2042,7 +2051,7 @@ impl BlockMapWriter<'_> {
             } else {
                 self.block_map.folded_buffers.remove(&buffer_id);
             }
-            ranges.extend(multi_buffer.excerpt_ranges_for_buffer(buffer_id, cx));
+            ranges.extend(multi_buffer.range_for_buffer(buffer_id, cx));
             if let Some(companion) = &self.companion
                 && companion.inverse.is_some()
             {
@@ -2268,14 +2277,16 @@ impl BlockSnapshot {
                 let custom_block = self.custom_blocks_by_id.get(&custom_block_id)?;
                 return Some(Block::Custom(custom_block.clone()));
             }
-            BlockId::ExcerptBoundary(next_excerpt_id) => {
-                let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?;
-                self.wrap_snapshot
-                    .make_wrap_point(excerpt_range.start, Bias::Left)
+            BlockId::ExcerptBoundary(start_anchor) => {
+                let start_point = start_anchor.to_point(&buffer);
+                self.wrap_snapshot.make_wrap_point(start_point, Bias::Left)
             }
-            BlockId::FoldedBuffer(excerpt_id) => self
-                .wrap_snapshot
-                .make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left),
+            BlockId::FoldedBuffer(buffer_id) => self.wrap_snapshot.make_wrap_point(
+                buffer
+                    .anchor_in_excerpt(buffer.excerpts_for_buffer(buffer_id).next()?.context.start)?
+                    .to_point(buffer),
+                Bias::Left,
+            ),
             BlockId::Spacer(_) => return None,
         };
         let wrap_row = wrap_point.row();
@@ -2571,7 +2582,7 @@ impl BlockChunks<'_> {
 }
 
 pub struct StickyHeaderExcerpt<'a> {
-    pub excerpt: &'a ExcerptInfo,
+    pub excerpt: &'a ExcerptBoundaryInfo,
 }
 
 impl<'a> Iterator for BlockChunks<'a> {
@@ -3096,7 +3107,13 @@ mod tests {
             );
             multi_buffer
         });
-        let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids());
+        let excerpt_start_anchors = multi_buffer.read_with(cx, |mb, _| {
+            let snapshot = mb.snapshot(cx);
+            snapshot
+                .excerpts()
+                .map(|e| snapshot.anchor_in_excerpt(e.context.start).unwrap())
+                .collect::<Vec<_>>()
+        });
 
         let font = test_font();
         let font_size = px(14.);
@@ -3129,9 +3146,9 @@ mod tests {
         assert_eq!(
             blocks,
             vec![
-                (0..1, BlockId::ExcerptBoundary(excerpt_ids[0])), // path, header
-                (3..4, BlockId::ExcerptBoundary(excerpt_ids[1])), // path, header
-                (6..7, BlockId::ExcerptBoundary(excerpt_ids[2])), // path, header
+                (0..1, BlockId::ExcerptBoundary(excerpt_start_anchors[0])), // path, header
+                (3..4, BlockId::ExcerptBoundary(excerpt_start_anchors[1])), // path, header
+                (6..7, BlockId::ExcerptBoundary(excerpt_start_anchors[2])), // path, header
             ]
         );
     }
@@ -3447,13 +3464,13 @@ mod tests {
                 ],
                 cx,
             );
-            assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6);
+            assert_eq!(multibuffer.read(cx).snapshot(cx).excerpts().count(), 6);
             multibuffer
         });
         let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
         let buffer_ids = buffer_snapshot
             .excerpts()
-            .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+            .map(|excerpt| excerpt.context.start.buffer_id)
             .dedup()
             .collect::<Vec<_>>();
         assert_eq!(buffer_ids.len(), 3);
@@ -3800,7 +3817,7 @@ mod tests {
         let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
         let buffer_ids = buffer_snapshot
             .excerpts()
-            .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+            .map(|excerpt| excerpt.context.start.buffer_id)
             .dedup()
             .collect::<Vec<_>>();
         assert_eq!(buffer_ids.len(), 1);
@@ -4008,17 +4025,16 @@ mod tests {
                         wrap_map.sync(tab_snapshot, tab_edits, cx)
                     });
                     let mut block_map = block_map.write(wraps_snapshot, wrap_edits, None);
-                    let (unfolded_buffers, folded_buffers) = buffer.read_with(cx, |buffer, _| {
-                        let folded_buffers: Vec<_> =
-                            block_map.block_map.folded_buffers.iter().cloned().collect();
-                        let mut unfolded_buffers = buffer.excerpt_buffer_ids();
-                        unfolded_buffers.dedup();
-                        log::debug!("All buffers {unfolded_buffers:?}");
-                        log::debug!("Folded buffers {folded_buffers:?}");
-                        unfolded_buffers.retain(|buffer_id| {
-                            !block_map.block_map.folded_buffers.contains(buffer_id)
-                        });
-                        (unfolded_buffers, folded_buffers)
+                    let folded_buffers: Vec<_> =
+                        block_map.block_map.folded_buffers.iter().cloned().collect();
+                    let mut unfolded_buffers = buffer_snapshot
+                        .buffer_ids_for_range(Anchor::Min..Anchor::Max)
+                        .collect::<Vec<_>>();
+                    unfolded_buffers.dedup();
+                    log::debug!("All buffers {unfolded_buffers:?}");
+                    log::debug!("Folded buffers {folded_buffers:?}");
+                    unfolded_buffers.retain(|buffer_id| {
+                        !block_map.block_map.folded_buffers.contains(buffer_id)
                     });
                     let mut folded_count = folded_buffers.len();
                     let mut unfolded_count = unfolded_buffers.len();
@@ -4039,12 +4055,14 @@ mod tests {
                             log::info!("Folding {buffer_to_fold:?}");
                             let related_excerpts = buffer_snapshot
                                 .excerpts()
-                                .filter_map(|(excerpt_id, buffer, range)| {
-                                    if buffer.remote_id() == buffer_to_fold {
+                                .filter_map(|excerpt| {
+                                    if excerpt.context.start.buffer_id == buffer_to_fold {
                                         Some((
-                                            excerpt_id,
-                                            buffer
-                                                .text_for_range(range.context)
+                                            excerpt.context.start,
+                                            buffer_snapshot
+                                                .buffer_for_id(buffer_to_fold)
+                                                .unwrap()
+                                                .text_for_range(excerpt.context)
                                                 .collect::<String>(),
                                         ))
                                     } else {
@@ -4518,7 +4536,7 @@ mod tests {
         let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
         let buffer_ids = buffer_snapshot
             .excerpts()
-            .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+            .map(|excerpt| excerpt.context.start.buffer_id)
             .dedup()
             .collect::<Vec<_>>();
         assert_eq!(buffer_ids.len(), 1);
@@ -4563,7 +4581,7 @@ mod tests {
         let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
         let buffer_ids = buffer_snapshot
             .excerpts()
-            .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+            .map(|excerpt| excerpt.context.start.buffer_id)
             .dedup()
             .collect::<Vec<_>>();
         assert_eq!(buffer_ids.len(), 1);
@@ -4635,11 +4653,6 @@ mod tests {
         let subscription =
             rhs_multibuffer.update(cx, |rhs_multibuffer, _| rhs_multibuffer.subscribe());
 
-        let lhs_excerpt_id =
-            lhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0);
-        let rhs_excerpt_id =
-            rhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0);
-
         let lhs_buffer_snapshot = cx.update(|cx| lhs_multibuffer.read(cx).snapshot(cx));
         let (mut _lhs_inlay_map, lhs_inlay_snapshot) = InlayMap::new(lhs_buffer_snapshot);
         let (mut _lhs_fold_map, lhs_fold_snapshot) = FoldMap::new(lhs_inlay_snapshot);
@@ -4661,13 +4674,11 @@ mod tests {
         let rhs_entity_id = rhs_multibuffer.entity_id();
 
         let companion = cx.new(|_| {
-            let mut c = Companion::new(
+            Companion::new(
                 rhs_entity_id,
                 convert_rhs_rows_to_lhs,
                 convert_lhs_rows_to_rhs,
-            );
-            c.add_excerpt_mapping(lhs_excerpt_id, rhs_excerpt_id);
-            c
+            )
         });
 
         let rhs_edits = Patch::new(vec![text::Edit {

crates/editor/src/display_map/fold_map.rs πŸ”—

@@ -185,16 +185,18 @@ impl FoldMapWriter<'_> {
                 continue;
             }
 
+            let fold_range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
             // For now, ignore any ranges that span an excerpt boundary.
-            let fold_range =
-                FoldRange(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
-            if fold_range.0.start.excerpt_id != fold_range.0.end.excerpt_id {
+            if buffer
+                .anchor_range_to_buffer_anchor_range(fold_range.clone())
+                .is_none()
+            {
                 continue;
             }
 
             folds.push(Fold {
                 id: FoldId(post_inc(&mut self.0.next_fold_id.0)),
-                range: fold_range,
+                range: FoldRange(fold_range),
                 placeholder: fold_text,
             });
 
@@ -510,7 +512,7 @@ impl FoldMap {
                     .snapshot
                     .folds
                     .cursor::<FoldRange>(&inlay_snapshot.buffer);
-                folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left);
+                folds_cursor.seek(&FoldRange(anchor..Anchor::Max), Bias::Left);
 
                 let mut folds = iter::from_fn({
                     let inlay_snapshot = &inlay_snapshot;
@@ -1226,7 +1228,7 @@ impl DerefMut for FoldRange {
 
 impl Default for FoldRange {
     fn default() -> Self {
-        Self(Anchor::min()..Anchor::max())
+        Self(Anchor::Min..Anchor::Max)
     }
 }
 
@@ -1262,10 +1264,10 @@ pub struct FoldSummary {
 impl Default for FoldSummary {
     fn default() -> Self {
         Self {
-            start: Anchor::min(),
-            end: Anchor::max(),
-            min_start: Anchor::max(),
-            max_end: Anchor::min(),
+            start: Anchor::Min,
+            end: Anchor::Max,
+            min_start: Anchor::Max,
+            max_end: Anchor::Min,
             count: 0,
         }
     }

crates/editor/src/display_map/inlay_map.rs πŸ”—

@@ -1342,7 +1342,7 @@ mod tests {
     use settings::SettingsStore;
     use std::{cmp::Reverse, env, sync::Arc};
     use sum_tree::TreeMap;
-    use text::{Patch, Rope};
+    use text::{BufferId, Patch, Rope};
     use util::RandomCharIter;
     use util::post_inc;
 
@@ -1351,10 +1351,10 @@ mod tests {
         assert_eq!(
             Inlay::hint(
                 InlayId::Hint(0),
-                Anchor::min(),
+                Anchor::Min,
                 &InlayHint {
                     label: InlayHintLabel::String("a".to_string()),
-                    position: text::Anchor::MIN,
+                    position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
                     padding_left: false,
                     padding_right: false,
                     tooltip: None,
@@ -1371,10 +1371,10 @@ mod tests {
         assert_eq!(
             Inlay::hint(
                 InlayId::Hint(0),
-                Anchor::min(),
+                Anchor::Min,
                 &InlayHint {
                     label: InlayHintLabel::String("a".to_string()),
-                    position: text::Anchor::MIN,
+                    position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
                     padding_left: true,
                     padding_right: true,
                     tooltip: None,
@@ -1391,10 +1391,10 @@ mod tests {
         assert_eq!(
             Inlay::hint(
                 InlayId::Hint(0),
-                Anchor::min(),
+                Anchor::Min,
                 &InlayHint {
                     label: InlayHintLabel::String(" a ".to_string()),
-                    position: text::Anchor::MIN,
+                    position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
                     padding_left: false,
                     padding_right: false,
                     tooltip: None,
@@ -1411,10 +1411,10 @@ mod tests {
         assert_eq!(
             Inlay::hint(
                 InlayId::Hint(0),
-                Anchor::min(),
+                Anchor::Min,
                 &InlayHint {
                     label: InlayHintLabel::String(" a ".to_string()),
-                    position: text::Anchor::MIN,
+                    position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
                     padding_left: true,
                     padding_right: true,
                     tooltip: None,
@@ -1434,10 +1434,10 @@ mod tests {
         assert_eq!(
             Inlay::hint(
                 InlayId::Hint(0),
-                Anchor::min(),
+                Anchor::Min,
                 &InlayHint {
                     label: InlayHintLabel::String("🎨".to_string()),
-                    position: text::Anchor::MIN,
+                    position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
                     padding_left: true,
                     padding_right: true,
                     tooltip: None,

crates/editor/src/document_colors.rs πŸ”—

@@ -8,7 +8,7 @@ use language::point_from_lsp;
 use multi_buffer::Anchor;
 use project::{DocumentColor, InlayId};
 use settings::Settings as _;
-use text::{Bias, BufferId, OffsetRangeExt as _};
+use text::{Bias, BufferId};
 use ui::{App, Context, Window};
 use util::post_inc;
 
@@ -160,9 +160,9 @@ impl Editor {
         }
 
         let buffers_to_query = self
-            .visible_excerpts(true, cx)
-            .into_values()
-            .map(|(buffer, ..)| buffer)
+            .visible_buffers(cx)
+            .into_iter()
+            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
             .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
             .filter(|editor_buffer| {
                 let editor_buffer_id = editor_buffer.read(cx).remote_id();
@@ -184,9 +184,9 @@ impl Editor {
                         buffers_to_query
                             .into_iter()
                             .filter_map(|buffer| {
-                                let buffer_id = buffer.read(cx).remote_id();
+                                let buffer_snapshot = buffer.read(cx).snapshot();
                                 let colors_task = lsp_store.document_colors(buffer, cx)?;
-                                Some(async move { (buffer_id, colors_task.await) })
+                                Some(async move { (buffer_snapshot, colors_task.await) })
                             })
                             .collect::<Vec<_>>()
                     })
@@ -200,40 +200,21 @@ impl Editor {
             if all_colors.is_empty() {
                 return;
             }
-            let Ok((multi_buffer_snapshot, editor_excerpts)) = editor.update(cx, |editor, cx| {
-                let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
-                let editor_excerpts = multi_buffer_snapshot.excerpts().fold(
-                    HashMap::default(),
-                    |mut acc, (excerpt_id, buffer_snapshot, excerpt_range)| {
-                        let excerpt_data = acc
-                            .entry(buffer_snapshot.remote_id())
-                            .or_insert_with(Vec::new);
-                        let excerpt_point_range =
-                            excerpt_range.context.to_point_utf16(buffer_snapshot);
-                        excerpt_data.push((
-                            excerpt_id,
-                            buffer_snapshot.clone(),
-                            excerpt_point_range,
-                        ));
-                        acc
-                    },
-                );
-                (multi_buffer_snapshot, editor_excerpts)
-            }) else {
+            let Some(multi_buffer_snapshot) = editor
+                .update(cx, |editor, cx| editor.buffer.read(cx).snapshot(cx))
+                .ok()
+            else {
                 return;
             };
 
             let mut new_editor_colors: HashMap<BufferId, Vec<(Range<Anchor>, DocumentColor)>> =
                 HashMap::default();
-            for (buffer_id, colors) in all_colors {
-                let Some(excerpts) = editor_excerpts.get(&buffer_id) else {
-                    continue;
-                };
+            for (buffer_snapshot, colors) in all_colors {
                 match colors {
                     Ok(colors) => {
                         if colors.colors.is_empty() {
                             new_editor_colors
-                                .entry(buffer_id)
+                                .entry(buffer_snapshot.remote_id())
                                 .or_insert_with(Vec::new)
                                 .clear();
                         } else {
@@ -241,41 +222,33 @@ impl Editor {
                                 let color_start = point_from_lsp(color.lsp_range.start);
                                 let color_end = point_from_lsp(color.lsp_range.end);
 
-                                for (excerpt_id, buffer_snapshot, excerpt_range) in excerpts {
-                                    if !excerpt_range.contains(&color_start.0)
-                                        || !excerpt_range.contains(&color_end.0)
-                                    {
-                                        continue;
-                                    }
-                                    let start = buffer_snapshot.anchor_before(
-                                        buffer_snapshot.clip_point_utf16(color_start, Bias::Left),
-                                    );
-                                    let end = buffer_snapshot.anchor_after(
-                                        buffer_snapshot.clip_point_utf16(color_end, Bias::Right),
-                                    );
-                                    let Some(range) = multi_buffer_snapshot
-                                        .anchor_range_in_excerpt(*excerpt_id, start..end)
-                                    else {
-                                        continue;
-                                    };
-
-                                    let new_buffer_colors =
-                                        new_editor_colors.entry(buffer_id).or_insert_with(Vec::new);
-
-                                    let (Ok(i) | Err(i)) =
-                                        new_buffer_colors.binary_search_by(|(probe, _)| {
-                                            probe
-                                                .start
-                                                .cmp(&range.start, &multi_buffer_snapshot)
-                                                .then_with(|| {
-                                                    probe
-                                                        .end
-                                                        .cmp(&range.end, &multi_buffer_snapshot)
-                                                })
-                                        });
-                                    new_buffer_colors.insert(i, (range, color));
-                                    break;
-                                }
+                                let Some(range) = multi_buffer_snapshot
+                                    .buffer_anchor_range_to_anchor_range(
+                                        buffer_snapshot.anchor_range_outside(
+                                            buffer_snapshot
+                                                .clip_point_utf16(color_start, Bias::Left)
+                                                ..buffer_snapshot
+                                                    .clip_point_utf16(color_end, Bias::Right),
+                                        ),
+                                    )
+                                else {
+                                    continue;
+                                };
+
+                                let new_buffer_colors = new_editor_colors
+                                    .entry(buffer_snapshot.remote_id())
+                                    .or_insert_with(Vec::new);
+
+                                let (Ok(i) | Err(i)) =
+                                    new_buffer_colors.binary_search_by(|(probe, _)| {
+                                        probe
+                                            .start
+                                            .cmp(&range.start, &multi_buffer_snapshot)
+                                            .then_with(|| {
+                                                probe.end.cmp(&range.end, &multi_buffer_snapshot)
+                                            })
+                                    });
+                                new_buffer_colors.insert(i, (range, color));
                             }
                         }
                     }

crates/editor/src/document_symbols.rs πŸ”—

@@ -62,10 +62,10 @@ impl Editor {
         multi_buffer_snapshot: &MultiBufferSnapshot,
         cx: &Context<Self>,
     ) -> bool {
-        let Some(excerpt) = multi_buffer_snapshot.excerpt_containing(cursor..cursor) else {
+        let Some((anchor, _)) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor) else {
             return false;
         };
-        let Some(buffer) = self.buffer.read(cx).buffer(excerpt.buffer_id()) else {
+        let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) else {
             return false;
         };
         lsp_symbols_enabled(buffer.read(cx), cx)
@@ -77,19 +77,12 @@ impl Editor {
         &self,
         cursor: Anchor,
         multi_buffer_snapshot: &MultiBufferSnapshot,
-        cx: &Context<Self>,
+        _cx: &Context<Self>,
     ) -> Option<(BufferId, Vec<OutlineItem<Anchor>>)> {
-        let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?;
-        let excerpt_id = excerpt.id();
-        let buffer_id = excerpt.buffer_id();
-        if Some(buffer_id) != cursor.text_anchor.buffer_id {
-            return None;
-        }
-        let buffer = self.buffer.read(cx).buffer(buffer_id)?;
-        let buffer_snapshot = buffer.read(cx).snapshot();
-        let cursor_text_anchor = cursor.text_anchor;
-
-        let all_items = self.lsp_document_symbols.get(&buffer_id)?;
+        let (cursor_text_anchor, buffer) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor)?;
+        let all_items = self
+            .lsp_document_symbols
+            .get(&cursor_text_anchor.buffer_id)?;
         if all_items.is_empty() {
             return None;
         }
@@ -97,34 +90,36 @@ impl Editor {
         let mut symbols = all_items
             .iter()
             .filter(|item| {
-                item.range
-                    .start
-                    .cmp(&cursor_text_anchor, &buffer_snapshot)
-                    .is_le()
-                    && item
-                        .range
-                        .end
-                        .cmp(&cursor_text_anchor, &buffer_snapshot)
-                        .is_ge()
+                item.range.start.cmp(&cursor_text_anchor, buffer).is_le()
+                    && item.range.end.cmp(&cursor_text_anchor, buffer).is_ge()
             })
-            .map(|item| OutlineItem {
-                depth: item.depth,
-                range: Anchor::range_in_buffer(excerpt_id, item.range.clone()),
-                source_range_for_text: Anchor::range_in_buffer(
-                    excerpt_id,
-                    item.source_range_for_text.clone(),
-                ),
-                text: item.text.clone(),
-                highlight_ranges: item.highlight_ranges.clone(),
-                name_ranges: item.name_ranges.clone(),
-                body_range: item
-                    .body_range
-                    .as_ref()
-                    .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())),
-                annotation_range: item
-                    .annotation_range
-                    .as_ref()
-                    .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())),
+            .filter_map(|item| {
+                let range_start = multi_buffer_snapshot.anchor_in_buffer(item.range.start)?;
+                let range_end = multi_buffer_snapshot.anchor_in_buffer(item.range.end)?;
+                let source_range_for_text_start =
+                    multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.start)?;
+                let source_range_for_text_end =
+                    multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.end)?;
+                Some(OutlineItem {
+                    depth: item.depth,
+                    range: range_start..range_end,
+                    source_range_for_text: source_range_for_text_start..source_range_for_text_end,
+                    text: item.text.clone(),
+                    highlight_ranges: item.highlight_ranges.clone(),
+                    name_ranges: item.name_ranges.clone(),
+                    body_range: item.body_range.as_ref().and_then(|r| {
+                        Some(
+                            multi_buffer_snapshot.anchor_in_buffer(r.start)?
+                                ..multi_buffer_snapshot.anchor_in_buffer(r.end)?,
+                        )
+                    }),
+                    annotation_range: item.annotation_range.as_ref().and_then(|r| {
+                        Some(
+                            multi_buffer_snapshot.anchor_in_buffer(r.start)?
+                                ..multi_buffer_snapshot.anchor_in_buffer(r.end)?,
+                        )
+                    }),
+                })
             })
             .collect::<Vec<_>>();
 
@@ -135,7 +130,7 @@ impl Editor {
             retain
         });
 
-        Some((buffer_id, symbols))
+        Some((buffer.remote_id(), symbols))
     }
 
     /// Fetches document symbols from the LSP for buffers that have the setting
@@ -155,9 +150,10 @@ impl Editor {
         };
 
         let buffers_to_query = self
-            .visible_excerpts(true, cx)
+            .visible_buffers(cx)
             .into_iter()
-            .filter_map(|(_, (buffer, _, _))| {
+            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
+            .filter_map(|buffer| {
                 let id = buffer.read(cx).remote_id();
                 if for_buffer.is_none_or(|target| target == id)
                     && lsp_symbols_enabled(buffer.read(cx), cx)

crates/editor/src/edit_prediction_tests.rs πŸ”—

@@ -7,7 +7,7 @@ use gpui::{
 use indoc::indoc;
 use language::EditPredictionsMode;
 use language::{Buffer, CodeLabel};
-use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot, ToPoint};
+use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
 use project::{Completion, CompletionResponse, CompletionSource};
 use std::{
     ops::Range,
@@ -1242,15 +1242,14 @@ struct FakeCompletionMenuProvider;
 impl CompletionProvider for FakeCompletionMenuProvider {
     fn completions(
         &self,
-        _excerpt_id: ExcerptId,
-        _buffer: &Entity<Buffer>,
+        buffer: &Entity<Buffer>,
         _buffer_position: text::Anchor,
         _trigger: CompletionContext,
         _window: &mut Window,
-        _cx: &mut Context<crate::Editor>,
+        cx: &mut Context<crate::Editor>,
     ) -> Task<anyhow::Result<Vec<CompletionResponse>>> {
         let completion = Completion {
-            replace_range: text::Anchor::MIN..text::Anchor::MAX,
+            replace_range: text::Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()),
             new_text: "fake_completion".to_string(),
             label: CodeLabel::plain("fake_completion".to_string(), None),
             documentation: None,

crates/editor/src/editor.rs πŸ”—

@@ -76,7 +76,7 @@ pub use linked_editing_ranges::LinkedEdits;
 pub use lsp::CompletionContext;
 pub use lsp_ext::lsp_tasks;
 pub use multi_buffer::{
-    Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer,
+    Anchor, AnchorRangeExt, BufferOffset, ExcerptRange, MBTextSummary, MultiBuffer,
     MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset,
     ToPoint,
 };
@@ -150,7 +150,8 @@ use markdown::Markdown;
 use mouse_context_menu::MouseContextMenu;
 use movement::TextLayoutDetails;
 use multi_buffer::{
-    ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
+    ExcerptBoundaryInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint,
+    MultiBufferRow,
 };
 use parking_lot::Mutex;
 use persistence::EditorDb;
@@ -640,6 +641,7 @@ pub(crate) enum EditDisplayMode {
 
 enum EditPrediction {
     Edit {
+        // TODO could be a language::Anchor?
         edits: Vec<(Range<Anchor>, Arc<str>)>,
         /// Predicted cursor position as (anchor, offset_from_anchor).
         /// The anchor is in multibuffer coordinates; after applying edits,
@@ -887,7 +889,8 @@ pub trait Addon: 'static {
 
     fn render_buffer_header_controls(
         &self,
-        _: &ExcerptInfo,
+        _: &ExcerptBoundaryInfo,
+        _: &language::BufferSnapshot,
         _: &Window,
         _: &App,
     ) -> Option<AnyElement> {
@@ -1340,7 +1343,7 @@ pub struct Editor {
     suppress_selection_callback: bool,
     applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
     accent_data: Option<AccentData>,
-    bracket_fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
+    bracket_fetched_tree_sitter_chunks: HashMap<Range<text::Anchor>, HashSet<Range<BufferRow>>>,
     semantic_token_state: SemanticTokenState,
     pub(crate) refresh_matching_bracket_highlights_task: Task<()>,
     refresh_document_symbols_task: Shared<Task<()>>,
@@ -1763,15 +1766,13 @@ impl ClipboardSelection {
             project.absolute_path(&project_path, cx)
         });
 
-        let line_range = file_path.as_ref().and_then(|_| {
-            let (_, start_point, start_excerpt_id) = buffer.point_to_buffer_point(range.start)?;
-            let (_, end_point, end_excerpt_id) = buffer.point_to_buffer_point(range.end)?;
-            if start_excerpt_id == end_excerpt_id {
-                Some(start_point.row..=end_point.row)
-            } else {
-                None
-            }
-        });
+        let line_range = if file_path.is_some() {
+            buffer
+                .range_to_buffer_range(range)
+                .map(|(_, buffer_range)| buffer_range.start.row..=buffer_range.end.row)
+        } else {
+            None
+        };
 
         Self {
             len,
@@ -1852,9 +1853,8 @@ pub enum JumpData {
         line_offset_from_top: u32,
     },
     MultiBufferPoint {
-        excerpt_id: ExcerptId,
+        anchor: language::Anchor,
         position: Point,
-        anchor: text::Anchor,
         line_offset_from_top: u32,
     },
 }
@@ -1990,17 +1990,21 @@ impl Editor {
         if !self.mode.is_full() {
             return;
         }
-        let multi_buffer = display_snapshot.buffer_snapshot();
+        let multi_buffer = display_snapshot.buffer_snapshot().clone();
         let scroll_anchor = self
             .scroll_manager
             .native_anchor(display_snapshot, cx)
             .anchor;
-        let Some((excerpt_id, _, buffer)) = multi_buffer.as_singleton() else {
+        let Some(buffer_snapshot) = multi_buffer.as_singleton() else {
             return;
         };
-        let buffer = buffer.clone();
 
-        let buffer_visible_start = scroll_anchor.text_anchor.to_point(&buffer);
+        let buffer = buffer_snapshot.clone();
+        let Some((buffer_visible_start, _)) = multi_buffer.anchor_to_buffer_anchor(scroll_anchor)
+        else {
+            return;
+        };
+        let buffer_visible_start = buffer_visible_start.to_point(&buffer);
         let max_row = buffer.max_point().row;
         let start_row = buffer_visible_start.row.min(max_row);
         let end_row = (buffer_visible_start.row + 10).min(max_row);
@@ -2014,22 +2018,24 @@ impl Editor {
                     Some(syntax.as_ref()),
                 )
                 .into_iter()
-                .map(|outline_item| OutlineItem {
-                    depth: outline_item.depth,
-                    range: Anchor::range_in_buffer(excerpt_id, outline_item.range),
-                    source_range_for_text: Anchor::range_in_buffer(
-                        excerpt_id,
-                        outline_item.source_range_for_text,
-                    ),
-                    text: outline_item.text,
-                    highlight_ranges: outline_item.highlight_ranges,
-                    name_ranges: outline_item.name_ranges,
-                    body_range: outline_item
-                        .body_range
-                        .map(|range| Anchor::range_in_buffer(excerpt_id, range)),
-                    annotation_range: outline_item
-                        .annotation_range
-                        .map(|range| Anchor::range_in_buffer(excerpt_id, range)),
+                .filter_map(|outline_item| {
+                    Some(OutlineItem {
+                        depth: outline_item.depth,
+                        range: multi_buffer
+                            .buffer_anchor_range_to_anchor_range(outline_item.range)?,
+                        source_range_for_text: multi_buffer.buffer_anchor_range_to_anchor_range(
+                            outline_item.source_range_for_text,
+                        )?,
+                        text: outline_item.text,
+                        highlight_ranges: outline_item.highlight_ranges,
+                        name_ranges: outline_item.name_ranges,
+                        body_range: outline_item.body_range.and_then(|range| {
+                            multi_buffer.buffer_anchor_range_to_anchor_range(range)
+                        }),
+                        annotation_range: outline_item.annotation_range.and_then(|range| {
+                            multi_buffer.buffer_anchor_range_to_anchor_range(range)
+                        }),
+                    })
                 })
                 .collect()
         });
@@ -3024,7 +3030,10 @@ impl Editor {
     fn edit_prediction_cursor_popover_prefers_preview(
         &self,
         completion: &EditPredictionState,
+        cx: &App,
     ) -> bool {
+        let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+
         match &completion.completion {
             EditPrediction::Edit {
                 edits, snapshot, ..
@@ -3033,8 +3042,13 @@ impl Editor {
                 let mut end_row: Option<u32> = None;
 
                 for (range, text) in edits {
-                    let edit_start_row = range.start.text_anchor.to_point(snapshot).row;
-                    let old_end_row = range.end.text_anchor.to_point(snapshot).row;
+                    let Some((_, range)) =
+                        multibuffer_snapshot.anchor_range_to_buffer_anchor_range(range.clone())
+                    else {
+                        continue;
+                    };
+                    let edit_start_row = range.start.to_point(snapshot).row;
+                    let old_end_row = range.end.to_point(snapshot).row;
                     let inserted_newline_count = text
                         .as_ref()
                         .chars()
@@ -3083,7 +3097,7 @@ impl Editor {
                 .active_edit_prediction
                 .as_ref()
                 .filter(|completion| {
-                    self.edit_prediction_cursor_popover_prefers_preview(completion)
+                    self.edit_prediction_cursor_popover_prefers_preview(completion, cx)
                 })
                 .map_or(EditPredictionKeybindAction::Accept, |_| {
                     EditPredictionKeybindAction::Preview
@@ -3320,13 +3334,12 @@ impl Editor {
         self.buffer.read(cx).read(cx).file_at(point).cloned()
     }
 
-    pub fn active_excerpt(
-        &self,
-        cx: &App,
-    ) -> Option<(ExcerptId, Entity<Buffer>, Range<text::Anchor>)> {
-        self.buffer
-            .read(cx)
-            .excerpt_containing(self.selections.newest_anchor().head(), cx)
+    pub fn active_buffer(&self, cx: &App) -> Option<Entity<Buffer>> {
+        let multibuffer = self.buffer.read(cx);
+        let snapshot = multibuffer.snapshot(cx);
+        let (anchor, _) =
+            snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())?;
+        multibuffer.buffer(anchor.buffer_id)
     }
 
     pub fn mode(&self) -> &EditorMode {
@@ -3695,8 +3708,8 @@ impl Editor {
         }
 
         if local {
-            if let Some(buffer_id) = new_cursor_position.text_anchor.buffer_id {
-                self.register_buffer(buffer_id, cx);
+            if let Some((anchor, _)) = buffer.anchor_to_buffer_anchor(new_cursor_position) {
+                self.register_buffer(anchor.buffer_id, cx);
             }
 
             let mut context_menu = self.context_menu.borrow_mut();
@@ -3778,12 +3791,13 @@ impl Editor {
         if selections.len() == 1 {
             cx.emit(SearchEvent::ActiveMatchChanged)
         }
-        if local && let Some((_, _, buffer_snapshot)) = buffer.as_singleton() {
+        if local && let Some(buffer_snapshot) = buffer.as_singleton() {
             let inmemory_selections = selections
                 .iter()
                 .map(|s| {
-                    text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot)
-                        ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot)
+                    let start = s.range().start.text_anchor_in(buffer_snapshot);
+                    let end = s.range().end.text_anchor_in(buffer_snapshot);
+                    (start..end).to_point(buffer_snapshot)
                 })
                 .collect();
             self.update_restoration_data(cx, |data| {
@@ -3829,7 +3843,6 @@ impl Editor {
 
     fn folds_did_change(&mut self, cx: &mut Context<Self>) {
         use text::ToOffset as _;
-        use text::ToPoint as _;
 
         if self.mode.is_minimap()
             || WorkspaceSettings::get(None, cx).restore_on_startup
@@ -3838,21 +3851,18 @@ impl Editor {
             return;
         }
 
-        if !self.buffer().read(cx).is_singleton() {
-            return;
-        }
-
         let display_snapshot = self
             .display_map
             .update(cx, |display_map, cx| display_map.snapshot(cx));
-        let Some((.., snapshot)) = display_snapshot.buffer_snapshot().as_singleton() else {
+        let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() else {
             return;
         };
         let inmemory_folds = display_snapshot
             .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len())
             .map(|fold| {
-                fold.range.start.text_anchor.to_point(&snapshot)
-                    ..fold.range.end.text_anchor.to_point(&snapshot)
+                let start = fold.range.start.text_anchor_in(buffer_snapshot);
+                let end = fold.range.end.text_anchor_in(buffer_snapshot);
+                (start..end).to_point(buffer_snapshot)
             })
             .collect();
         self.update_restoration_data(cx, |data| {
@@ -3876,8 +3886,16 @@ impl Editor {
         let db_folds = display_snapshot
             .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len())
             .map(|fold| {
-                let start = fold.range.start.text_anchor.to_offset(&snapshot);
-                let end = fold.range.end.text_anchor.to_offset(&snapshot);
+                let start = fold
+                    .range
+                    .start
+                    .text_anchor_in(buffer_snapshot)
+                    .to_offset(buffer_snapshot);
+                let end = fold
+                    .range
+                    .end
+                    .text_anchor_in(buffer_snapshot)
+                    .to_offset(buffer_snapshot);
 
                 // Extract fingerprints - content at fold boundaries for validation on restore
                 // Both fingerprints must be INSIDE the fold to avoid capturing surrounding
@@ -3886,12 +3904,14 @@ impl Editor {
                 // end_fp: last min(32, fold_len) bytes of fold content
                 // Clip to character boundaries to handle multibyte UTF-8 characters.
                 let fold_len = end - start;
-                let start_fp_end = snapshot
+                let start_fp_end = buffer_snapshot
                     .clip_offset(start + std::cmp::min(FINGERPRINT_LEN, fold_len), Bias::Left);
-                let start_fp: String = snapshot.text_for_range(start..start_fp_end).collect();
-                let end_fp_start = snapshot
+                let start_fp: String = buffer_snapshot
+                    .text_for_range(start..start_fp_end)
+                    .collect();
+                let end_fp_start = buffer_snapshot
                     .clip_offset(end.saturating_sub(FINGERPRINT_LEN).max(start), Bias::Right);
-                let end_fp: String = snapshot.text_for_range(end_fp_start..end).collect();
+                let end_fp: String = buffer_snapshot.text_for_range(end_fp_start..end).collect();
 
                 (start, end, start_fp, end_fp)
             })
@@ -4654,30 +4674,31 @@ impl Editor {
 
     fn linked_editing_ranges_for(
         &self,
-        selection: Range<text::Anchor>,
+        query_range: Range<text::Anchor>,
         cx: &App,
     ) -> Option<HashMap<Entity<Buffer>, Vec<Range<text::Anchor>>>> {
+        use text::ToOffset as TO;
+
         if self.linked_edit_ranges.is_empty() {
             return None;
         }
-        let ((base_range, linked_ranges), buffer_snapshot, buffer) =
-            selection.end.buffer_id.and_then(|end_buffer_id| {
-                if selection.start.buffer_id != Some(end_buffer_id) {
-                    return None;
-                }
-                let buffer = self.buffer.read(cx).buffer(end_buffer_id)?;
-                let snapshot = buffer.read(cx).snapshot();
-                self.linked_edit_ranges
-                    .get(end_buffer_id, selection.start..selection.end, &snapshot)
-                    .map(|ranges| (ranges, snapshot, buffer))
-            })?;
-        use text::ToOffset as TO;
+        if query_range.start.buffer_id != query_range.end.buffer_id {
+            return None;
+        };
+        let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+        let buffer = self.buffer.read(cx).buffer(query_range.end.buffer_id)?;
+        let buffer_snapshot = buffer.read(cx).snapshot();
+        let (base_range, linked_ranges) = self.linked_edit_ranges.get(
+            buffer_snapshot.remote_id(),
+            query_range.clone(),
+            &buffer_snapshot,
+        )?;
         // find offset from the start of current range to current cursor position
         let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot);
 
-        let start_offset = TO::to_offset(&selection.start, &buffer_snapshot);
+        let start_offset = TO::to_offset(&query_range.start, &buffer_snapshot);
         let start_difference = start_offset - start_byte_offset;
-        let end_offset = TO::to_offset(&selection.end, &buffer_snapshot);
+        let end_offset = TO::to_offset(&query_range.end, &buffer_snapshot);
         let end_difference = end_offset - start_byte_offset;
 
         // Current range has associated linked ranges.
@@ -4690,13 +4711,22 @@ impl Editor {
                 continue;
             }
             if self.selections.disjoint_anchor_ranges().any(|s| {
-                if s.start.text_anchor.buffer_id != selection.start.buffer_id
-                    || s.end.text_anchor.buffer_id != selection.end.buffer_id
+                let Some((selection_start, _)) =
+                    multibuffer_snapshot.anchor_to_buffer_anchor(s.start)
+                else {
+                    return false;
+                };
+                let Some((selection_end, _)) = multibuffer_snapshot.anchor_to_buffer_anchor(s.end)
+                else {
+                    return false;
+                };
+                if selection_start.buffer_id != query_range.start.buffer_id
+                    || selection_end.buffer_id != query_range.end.buffer_id
                 {
                     return false;
                 }
-                TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset
-                    && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset
+                TO::to_offset(&selection_start, &buffer_snapshot) <= end_offset
+                    && TO::to_offset(&selection_end, &buffer_snapshot) >= start_offset
             }) {
                 continue;
             }
@@ -5015,21 +5045,26 @@ impl Editor {
 
             if !self.linked_edit_ranges.is_empty() {
                 let start_anchor = snapshot.anchor_before(selection.start);
+                let classifier = snapshot
+                    .char_classifier_at(start_anchor)
+                    .scope_context(Some(CharScopeContext::LinkedEdit));
 
-                let is_word_char = text.chars().next().is_none_or(|char| {
-                    let classifier = snapshot
-                        .char_classifier_at(start_anchor.to_offset(&snapshot))
-                        .scope_context(Some(CharScopeContext::LinkedEdit));
-                    classifier.is_word(char)
-                });
-                let is_dot = text.as_ref() == ".";
-                let should_apply_linked_edit = is_word_char || is_dot;
+                if let Some((_, anchor_range)) =
+                    snapshot.anchor_range_to_buffer_anchor_range(start_anchor..anchor)
+                {
+                    let is_word_char = text
+                        .chars()
+                        .next()
+                        .is_none_or(|char| classifier.is_word(char));
 
-                if should_apply_linked_edit {
-                    let anchor_range = start_anchor.text_anchor..anchor.text_anchor;
-                    linked_edits.push(&self, anchor_range, text.clone(), cx);
-                } else {
-                    clear_linked_edit_ranges = true;
+                    let is_dot = text.as_ref() == ".";
+                    let should_apply_linked_edit = is_word_char || is_dot;
+
+                    if should_apply_linked_edit {
+                        linked_edits.push(&self, anchor_range, text.clone(), cx);
+                    } else {
+                        clear_linked_edit_ranges = true;
+                    }
                 }
             }
 
@@ -5522,7 +5557,7 @@ impl Editor {
             let row = cursor.row;
 
             let point = Point::new(row, 0);
-            let Some((buffer_handle, buffer_point, _)) =
+            let Some((buffer_handle, buffer_point)) =
                 self.buffer.read(cx).point_to_buffer_point(point, cx)
             else {
                 continue;
@@ -5662,12 +5697,16 @@ impl Editor {
     /// Collects linked edits for the current selections, pairing each linked
     /// range with `text`.
     pub fn linked_edits_for_selections(&self, text: Arc<str>, cx: &App) -> LinkedEdits {
+        let multibuffer_snapshot = self.buffer().read(cx).snapshot(cx);
         let mut linked_edits = LinkedEdits::new();
         if !self.linked_edit_ranges.is_empty() {
             for selection in self.selections.disjoint_anchors() {
-                let start = selection.start.text_anchor;
-                let end = selection.end.text_anchor;
-                linked_edits.push(self, start..end, text.clone(), cx);
+                let Some((_, range)) =
+                    multibuffer_snapshot.anchor_range_to_buffer_anchor_range(selection.range())
+                else {
+                    continue;
+                };
+                linked_edits.push(self, range, text.clone(), cx);
             }
         }
         linked_edits
@@ -5898,53 +5937,54 @@ impl Editor {
         }
     }
 
-    pub fn visible_excerpts(
-        &self,
-        lsp_related_only: bool,
-        cx: &mut Context<Editor>,
-    ) -> HashMap<ExcerptId, (Entity<Buffer>, clock::Global, Range<usize>)> {
-        let project = self.project().cloned();
-        let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+    pub fn is_lsp_relevant(&self, file: Option<&Arc<dyn language::File>>, cx: &App) -> bool {
+        let Some(project) = self.project() else {
+            return false;
+        };
+        let Some(buffer_file) = project::File::from_dyn(file) else {
+            return false;
+        };
+        let Some(entry_id) = buffer_file.project_entry_id() else {
+            return false;
+        };
+        let project = project.read(cx);
+        let Some(buffer_worktree) = project.worktree_for_id(buffer_file.worktree_id(cx), cx) else {
+            return false;
+        };
+        let Some(worktree_entry) = buffer_worktree.read(cx).entry_for_id(entry_id) else {
+            return false;
+        };
+        !worktree_entry.is_ignored
+    }
+
+    pub fn visible_buffers(&self, cx: &mut Context<Editor>) -> Vec<Entity<Buffer>> {
+        let display_snapshot = self.display_snapshot(cx);
+        let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx);
         let multi_buffer = self.buffer().read(cx);
-        let multi_buffer_snapshot = multi_buffer.snapshot(cx);
-        multi_buffer_snapshot
-            .range_to_buffer_ranges(
-                self.multi_buffer_visible_range(&display_snapshot, cx)
-                    .to_inclusive(),
-            )
+        display_snapshot
+            .buffer_snapshot()
+            .range_to_buffer_ranges(visible_range)
             .into_iter()
             .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
-            .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| {
-                if !lsp_related_only {
-                    return Some((
-                        excerpt_id,
-                        (
-                            multi_buffer.buffer(buffer.remote_id()).unwrap(),
-                            buffer.version().clone(),
-                            excerpt_visible_range.start.0..excerpt_visible_range.end.0,
-                        ),
-                    ));
-                }
+            .filter_map(|(buffer_snapshot, _, _)| multi_buffer.buffer(buffer_snapshot.remote_id()))
+            .collect()
+    }
 
-                let project = project.as_ref()?.read(cx);
-                let buffer_file = project::File::from_dyn(buffer.file())?;
-                let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?;
-                let worktree_entry = buffer_worktree
-                    .read(cx)
-                    .entry_for_id(buffer_file.project_entry_id()?)?;
-                if worktree_entry.is_ignored {
-                    None
-                } else {
-                    Some((
-                        excerpt_id,
-                        (
-                            multi_buffer.buffer(buffer.remote_id()).unwrap(),
-                            buffer.version().clone(),
-                            excerpt_visible_range.start.0..excerpt_visible_range.end.0,
-                        ),
-                    ))
-                }
-            })
+    pub fn visible_buffer_ranges(
+        &self,
+        cx: &mut Context<Editor>,
+    ) -> Vec<(
+        BufferSnapshot,
+        Range<BufferOffset>,
+        ExcerptRange<text::Anchor>,
+    )> {
+        let display_snapshot = self.display_snapshot(cx);
+        let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx);
+        display_snapshot
+            .buffer_snapshot()
+            .range_to_buffer_ranges(visible_range)
+            .into_iter()
+            .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
             .collect()
     }
 
@@ -6069,17 +6109,19 @@ impl Editor {
             .newest_anchor()
             .start
             .bias_right(&multibuffer_snapshot);
-        if position.diff_base_anchor.is_some() {
+
+        if position.diff_base_anchor().is_some() {
             return;
         }
-        let buffer_position = multibuffer_snapshot.anchor_before(position);
-        let Some(buffer) = buffer_position
-            .text_anchor
-            .buffer_id
-            .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))
+        let multibuffer_position = multibuffer_snapshot.anchor_before(position);
+        let Some((buffer_position, _)) =
+            multibuffer_snapshot.anchor_to_buffer_anchor(multibuffer_position)
         else {
             return;
         };
+        let Some(buffer) = self.buffer.read(cx).buffer(buffer_position.buffer_id) else {
+            return;
+        };
         let buffer_snapshot = buffer.read(cx).snapshot();
 
         let menu_is_open = matches!(
@@ -6088,9 +6130,9 @@ impl Editor {
         );
 
         let language = buffer_snapshot
-            .language_at(buffer_position.text_anchor)
+            .language_at(buffer_position)
             .map(|language| language.name());
-        let language_settings = multibuffer_snapshot.language_settings_at(buffer_position, cx);
+        let language_settings = multibuffer_snapshot.language_settings_at(multibuffer_position, cx);
         let completion_settings = language_settings.completions.clone();
 
         let show_completions_on_input = self
@@ -6101,7 +6143,7 @@ impl Editor {
         }
 
         let query: Option<Arc<String>> =
-            Self::completion_query(&multibuffer_snapshot, buffer_position)
+            Self::completion_query(&multibuffer_snapshot, multibuffer_position)
                 .map(|query| query.into());
 
         drop(multibuffer_snapshot);
@@ -6143,7 +6185,7 @@ impl Editor {
             if filter_completions {
                 menu.filter(
                     query.clone().unwrap_or_default(),
-                    buffer_position.text_anchor,
+                    buffer_position,
                     &buffer,
                     provider.clone(),
                     window,
@@ -6177,12 +6219,6 @@ impl Editor {
             }
         };
 
-        let Anchor {
-            excerpt_id: buffer_excerpt_id,
-            text_anchor: buffer_position,
-            ..
-        } = buffer_position;
-
         let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) =
             buffer_snapshot.surrounding_word(buffer_position, None)
         {
@@ -6225,7 +6261,7 @@ impl Editor {
             trigger.as_ref().is_none_or(|trigger| {
                 provider.is_completion_trigger(
                     &buffer,
-                    position.text_anchor,
+                    buffer_position,
                     trigger,
                     trigger_in_words,
                     cx,
@@ -6246,14 +6282,7 @@ impl Editor {
                 trigger_character,
             };
 
-            provider.completions(
-                buffer_excerpt_id,
-                &buffer,
-                buffer_position,
-                completion_context,
-                window,
-                cx,
-            )
+            provider.completions(&buffer, buffer_position, completion_context, window, cx)
         } else {
             Task::ready(Ok(Vec::new()))
         };
@@ -6593,42 +6622,42 @@ impl Editor {
         cx.stop_propagation();
 
         let buffer_handle = completions_menu.buffer.clone();
+        let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+        let (initial_position, _) =
+            multibuffer_snapshot.anchor_to_buffer_anchor(completions_menu.initial_position)?;
 
         let CompletionEdit {
             new_text,
             snippet,
             replace_range,
-        } = process_completion_for_edit(
-            &completion,
-            intent,
-            &buffer_handle,
-            &completions_menu.initial_position.text_anchor,
-            cx,
-        );
+        } = process_completion_for_edit(&completion, intent, &buffer_handle, &initial_position, cx);
 
-        let buffer = buffer_handle.read(cx);
-        let snapshot = self.buffer.read(cx).snapshot(cx);
-        let newest_anchor = self.selections.newest_anchor();
-        let replace_range_multibuffer = {
-            let mut excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap();
-            excerpt.map_range_from_buffer(replace_range.clone())
+        let buffer = buffer_handle.read(cx).snapshot();
+        let newest_selection = self.selections.newest_anchor();
+
+        let Some(replace_range_multibuffer) =
+            multibuffer_snapshot.buffer_anchor_range_to_anchor_range(replace_range.clone())
+        else {
+            return None;
         };
-        if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) {
+
+        let Some((buffer_snapshot, newest_range_buffer)) =
+            multibuffer_snapshot.anchor_range_to_buffer_anchor_range(newest_selection.range())
+        else {
             return None;
-        }
+        };
 
         let old_text = buffer
             .text_for_range(replace_range.clone())
             .collect::<String>();
-        let lookbehind = newest_anchor
+        let lookbehind = newest_range_buffer
             .start
-            .text_anchor
-            .to_offset(buffer)
-            .saturating_sub(replace_range.start.0);
+            .to_offset(buffer_snapshot)
+            .saturating_sub(replace_range.start.to_offset(&buffer_snapshot));
         let lookahead = replace_range
             .end
-            .0
-            .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer));
+            .to_offset(&buffer_snapshot)
+            .saturating_sub(newest_range_buffer.end.to_offset(&buffer));
         let prefix = &old_text[..old_text.len().saturating_sub(lookahead)];
         let suffix = &old_text[lookbehind.min(old_text.len())..];
 
@@ -6641,34 +6670,40 @@ impl Editor {
 
         let text: Arc<str> = new_text.clone().into();
         for selection in &selections {
-            let range = if selection.id == newest_anchor.id {
+            let range = if selection.id == newest_selection.id {
                 replace_range_multibuffer.clone()
             } else {
                 let mut range = selection.range();
 
                 // if prefix is present, don't duplicate it
-                if snapshot.contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) {
+                if multibuffer_snapshot
+                    .contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix)
+                {
                     range.start = range.start.saturating_sub_usize(lookbehind);
 
                     // if suffix is also present, mimic the newest cursor and replace it
-                    if selection.id != newest_anchor.id
-                        && snapshot.contains_str_at(range.end, suffix)
+                    if selection.id != newest_selection.id
+                        && multibuffer_snapshot.contains_str_at(range.end, suffix)
                     {
                         range.end += lookahead;
                     }
                 }
-                range
+                range.to_anchors(&multibuffer_snapshot)
             };
 
             ranges.push(range.clone());
 
-            let start_anchor = snapshot.anchor_before(range.start);
-            let end_anchor = snapshot.anchor_after(range.end);
-            let anchor_range = start_anchor.text_anchor..end_anchor.text_anchor;
-            all_commit_ranges.push(anchor_range.clone());
+            let start_anchor = multibuffer_snapshot.anchor_before(range.start);
+            let end_anchor = multibuffer_snapshot.anchor_after(range.end);
 
-            if !self.linked_edit_ranges.is_empty() {
-                linked_edits.push(&self, anchor_range, text.clone(), cx);
+            if let Some((buffer_snapshot_2, anchor_range)) =
+                multibuffer_snapshot.anchor_range_to_buffer_anchor_range(start_anchor..end_anchor)
+                && buffer_snapshot_2.remote_id() == buffer_snapshot.remote_id()
+            {
+                all_commit_ranges.push(anchor_range.clone());
+                if !self.linked_edit_ranges.is_empty() {
+                    linked_edits.push(&self, anchor_range, text.clone(), cx);
+                }
             }
         }
 
@@ -6687,8 +6722,12 @@ impl Editor {
         let tx_id = self.transact(window, cx, |editor, window, cx| {
             if let Some(mut snippet) = snippet {
                 snippet.text = new_text.to_string();
+                let offset_ranges = ranges
+                    .iter()
+                    .map(|range| range.to_offset(&multibuffer_snapshot))
+                    .collect::<Vec<_>>();
                 editor
-                    .insert_snippet(&ranges, snippet, window, cx)
+                    .insert_snippet(&offset_ranges, snippet, window, cx)
                     .log_err();
             } else {
                 editor.buffer.update(cx, |multi_buffer, cx| {
@@ -6703,7 +6742,10 @@ impl Editor {
             linked_edits.apply(cx);
             editor.refresh_edit_prediction(true, false, window, cx);
         });
-        self.invalidate_autoclose_regions(&self.selections.disjoint_anchors_arc(), &snapshot);
+        self.invalidate_autoclose_regions(
+            &self.selections.disjoint_anchors_arc(),
+            &multibuffer_snapshot,
+        );
 
         let show_new_completions_on_confirm = completion
             .confirm
@@ -6739,7 +6781,7 @@ impl Editor {
             if available_commands.contains(&lsp_command.command) {
                 Some(CodeAction {
                     server_id: *server_id,
-                    range: language::Anchor::MIN..language::Anchor::MIN,
+                    range: language::Anchor::min_min_range_for_buffer(buffer.remote_id()),
                     lsp_action: LspAction::Command(lsp_command.clone()),
                     resolved: false,
                 })
@@ -7069,13 +7111,9 @@ impl Editor {
                     Some(Task::ready(Ok(())))
                 })
             }
-            CodeActionsItem::CodeAction {
-                excerpt_id,
-                action,
-                provider,
-            } => {
+            CodeActionsItem::CodeAction { action, provider } => {
                 let apply_code_action =
-                    provider.apply_code_action(buffer, action, excerpt_id, true, window, cx);
+                    provider.apply_code_action(buffer, action, true, window, cx);
                 let workspace = workspace.downgrade();
                 Some(cx.spawn_in(window, async move |editor, cx| {
                     let project_transaction = apply_code_action.await?;
@@ -7175,17 +7213,19 @@ impl Editor {
         // avoid opening a new editor to display them.
 
         if let [(buffer, transaction)] = &*entries {
-            let excerpt = editor.update(cx, |editor, cx| {
-                editor
-                    .buffer()
-                    .read(cx)
-                    .excerpt_containing(editor.selections.newest_anchor().head(), cx)
+            let cursor_excerpt = editor.update(cx, |editor, cx| {
+                let snapshot = editor.buffer().read(cx).snapshot(cx);
+                let head = editor.selections.newest_anchor().head();
+                let (buffer_snapshot, excerpt_range) = snapshot.excerpt_containing(head..head)?;
+                if buffer_snapshot.remote_id() != buffer.read(cx).remote_id() {
+                    return None;
+                }
+                Some(excerpt_range)
             })?;
-            if let Some((_, excerpted_buffer, excerpt_range)) = excerpt
-                && excerpted_buffer == *buffer
-            {
+
+            if let Some(excerpt_range) = cursor_excerpt {
                 let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| {
-                    let excerpt_range = excerpt_range.to_offset(buffer);
+                    let excerpt_range = excerpt_range.context.to_offset(buffer);
                     buffer
                         .edited_ranges_for_transaction::<usize>(transaction)
                         .all(|range| {
@@ -7207,15 +7247,21 @@ impl Editor {
                     .read(cx)
                     .edited_ranges_for_transaction::<Point>(transaction)
                     .collect::<Vec<_>>();
-                let (ranges, _) = multibuffer.set_excerpts_for_path(
+                multibuffer.set_excerpts_for_path(
                     PathKey::for_buffer(buffer_handle, cx),
                     buffer_handle.clone(),
-                    edited_ranges,
+                    edited_ranges.clone(),
                     multibuffer_context_lines(cx),
                     cx,
                 );
-
-                ranges_to_highlight.extend(ranges);
+                let snapshot = multibuffer.snapshot(cx);
+                let buffer_snapshot = buffer_handle.read(cx).snapshot();
+                ranges_to_highlight.extend(edited_ranges.into_iter().filter_map(|range| {
+                    let text_range = buffer_snapshot.anchor_range_inside(range);
+                    let start = snapshot.anchor_in_buffer(text_range.start)?;
+                    let end = snapshot.anchor_in_buffer(text_range.end)?;
+                    Some(start..end)
+                }));
             }
             multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx);
             multibuffer
@@ -7339,10 +7385,10 @@ impl Editor {
                 .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
                 .await;
 
-            let (start_buffer, start, _, end, newest_selection) = this
+            let (start_buffer, start, _, end, _newest_selection) = this
                 .update(cx, |this, cx| {
                     let newest_selection = this.selections.newest_anchor().clone();
-                    if newest_selection.head().diff_base_anchor.is_some() {
+                    if newest_selection.head().diff_base_anchor().is_some() {
                         return None;
                     }
                     let display_snapshot = this.display_snapshot(cx);
@@ -7378,7 +7424,6 @@ impl Editor {
                 if let Some(provider_actions) = provider_actions.log_err() {
                     actions.extend(provider_actions.into_iter().map(|action| {
                         AvailableCodeAction {
-                            excerpt_id: newest_selection.start.excerpt_id,
                             action,
                             provider: provider.clone(),
                         }
@@ -7426,8 +7471,7 @@ impl Editor {
             .selections
             .newest::<Point>(&snapshot.display_snapshot)
             .head();
-        let Some((buffer, point, _)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)
-        else {
+        let Some((buffer, point)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) else {
             return;
         };
 
@@ -7612,27 +7656,13 @@ impl Editor {
                         return;
                     }
 
-                    let cursor_buffer_snapshot = cursor_buffer.read(cx);
                     let mut write_ranges = Vec::new();
                     let mut read_ranges = Vec::new();
+                    let multibuffer_snapshot = buffer.snapshot(cx);
                     for highlight in highlights {
-                        let buffer_id = cursor_buffer.read(cx).remote_id();
-                        for (excerpt_id, _, excerpt_range) in
-                            buffer.excerpts_for_buffer(buffer_id, cx)
+                        for range in
+                            multibuffer_snapshot.buffer_range_to_excerpt_ranges(highlight.range)
                         {
-                            let start = highlight
-                                .range
-                                .start
-                                .max(&excerpt_range.context.start, cursor_buffer_snapshot);
-                            let end = highlight
-                                .range
-                                .end
-                                .min(&excerpt_range.context.end, cursor_buffer_snapshot);
-                            if start.cmp(&end, cursor_buffer_snapshot).is_ge() {
-                                continue;
-                            }
-
-                            let range = Anchor::range_in_buffer(excerpt_id, *start..*end);
                             if highlight.kind == lsp::DocumentHighlightKind::WRITE {
                                 write_ranges.push(range);
                             } else {
@@ -7713,7 +7743,7 @@ impl Editor {
             let match_task = cx.background_spawn(async move {
                 let buffer_ranges = multi_buffer_snapshot
                     .range_to_buffer_ranges(
-                        multi_buffer_range_to_query.start..=multi_buffer_range_to_query.end,
+                        multi_buffer_range_to_query.start..multi_buffer_range_to_query.end,
                     )
                     .into_iter()
                     .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty());
@@ -7731,11 +7761,11 @@ impl Editor {
                     return Vec::default();
                 };
                 let query_range = query_range.to_anchors(&multi_buffer_snapshot);
-                for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges {
+                for (buffer_snapshot, search_range, _) in buffer_ranges {
                     match_ranges.extend(
                         regex
                             .search(
-                                buffer_snapshot,
+                                &buffer_snapshot,
                                 Some(search_range.start.0..search_range.end.0),
                             )
                             .await
@@ -7745,9 +7775,14 @@ impl Editor {
                                     .anchor_after(search_range.start + match_range.start);
                                 let match_end = buffer_snapshot
                                     .anchor_before(search_range.start + match_range.end);
-                                let match_anchor_range =
-                                    Anchor::range_in_buffer(excerpt_id, match_start..match_end);
-                                (match_anchor_range != query_range).then_some(match_anchor_range)
+                                {
+                                    let range = multi_buffer_snapshot
+                                        .anchor_in_buffer(match_start)?
+                                        ..multi_buffer_snapshot.anchor_in_buffer(match_end)?;
+                                    Some(range).filter(|match_anchor_range| {
+                                        match_anchor_range != &query_range
+                                    })
+                                }
                             }),
                     );
                 }
@@ -8434,13 +8469,15 @@ impl Editor {
             return;
         };
 
-        let Some((_, buffer, _)) = self
-            .buffer
-            .read(cx)
-            .excerpt_containing(self.selections.newest_anchor().head(), cx)
+        let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
+        let Some((position, _)) =
+            buffer_snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())
         else {
             return;
         };
+        let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else {
+            return;
+        };
 
         let extension = buffer
             .read(cx)
@@ -8687,17 +8724,16 @@ impl Editor {
         }
 
         let selection = self.selections.newest_anchor();
-        let cursor = selection.head();
         let multibuffer = self.buffer.read(cx).snapshot(cx);
+        let cursor = selection.head();
+        let (cursor_text_anchor, _) = multibuffer.anchor_to_buffer_anchor(cursor)?;
+        let buffer = self.buffer.read(cx).buffer(cursor_text_anchor.buffer_id)?;
 
         // Check project-level disable_ai setting for the current buffer
-        if let Some((buffer, _)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) {
-            if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) {
-                return None;
-            }
+        if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) {
+            return None;
         }
         let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer));
-        let excerpt_id = cursor.excerpt_id;
 
         let show_in_menu = self.show_edit_predictions_in_menu();
         let completions_menu_has_precedence = !show_in_menu

crates/editor/src/editor_tests.rs πŸ”—

@@ -59,7 +59,6 @@ use std::{
     sync::atomic::{self, AtomicUsize},
 };
 use test::build_editor_with_project;
-use text::ToPoint as _;
 use unindent::Unindent;
 use util::{
     assert_set_eq, path,
@@ -1030,12 +1029,13 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
                 original_scroll_position
             );
 
+            let other_buffer =
+                cx.new(|cx| MultiBuffer::singleton(cx.new(|cx| Buffer::local("test", cx)), cx));
+
             // Ensure we don't panic when navigation data contains invalid anchors *and* points.
-            let mut invalid_anchor = editor
-                .scroll_manager
-                .native_anchor(&editor.display_snapshot(cx), cx)
-                .anchor;
-            invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok();
+            let invalid_anchor = other_buffer.update(cx, |buffer, cx| {
+                buffer.snapshot(cx).anchor_after(MultiBufferOffset(3))
+            });
             let invalid_point = Point::new(9999, 0);
             editor.navigate(
                 Arc::new(NavigationData {
@@ -13836,7 +13836,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
             0,
             cx,
         );
-        assert_eq!(multi_buffer.excerpt_ids().len(), 9);
+        assert_eq!(multi_buffer.read(cx).excerpts().count(), 9);
         multi_buffer
     });
     let multi_buffer_editor = cx.new_window_entity(|window, cx| {
@@ -18946,157 +18946,6 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
     });
 }
 
-#[gpui::test]
-fn test_refresh_selections(cx: &mut TestAppContext) {
-    init_test(cx, |_| {});
-
-    let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx));
-    let multibuffer = cx.new(|cx| {
-        let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.set_excerpts_for_path(
-            PathKey::sorted(0),
-            buffer.clone(),
-            [
-                Point::new(0, 0)..Point::new(1, 4),
-                Point::new(3, 0)..Point::new(4, 4),
-            ],
-            0,
-            cx,
-        );
-        multibuffer
-    });
-
-    let editor = cx.add_window(|window, cx| {
-        let mut editor = build_editor(multibuffer.clone(), window, cx);
-        let snapshot = editor.snapshot(window, cx);
-        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
-            s.select_ranges([Point::new(1, 3)..Point::new(1, 3)])
-        });
-        editor.begin_selection(
-            Point::new(2, 1).to_display_point(&snapshot),
-            true,
-            1,
-            window,
-            cx,
-        );
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [
-                Point::new(1, 3)..Point::new(1, 3),
-                Point::new(2, 1)..Point::new(2, 1),
-            ]
-        );
-        editor
-    });
-
-    // Refreshing selections is a no-op when excerpts haven't changed.
-    _ = editor.update(cx, |editor, window, cx| {
-        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [
-                Point::new(1, 3)..Point::new(1, 3),
-                Point::new(2, 1)..Point::new(2, 1),
-            ]
-        );
-    });
-
-    multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.set_excerpts_for_path(
-            PathKey::sorted(0),
-            buffer.clone(),
-            [Point::new(3, 0)..Point::new(4, 4)],
-            0,
-            cx,
-        );
-    });
-    _ = editor.update(cx, |editor, window, cx| {
-        // Removing an excerpt causes the first selection to become degenerate.
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [
-                Point::new(0, 0)..Point::new(0, 0),
-                Point::new(0, 1)..Point::new(0, 1)
-            ]
-        );
-
-        // Refreshing selections will relocate the first selection to the original buffer
-        // location.
-        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [
-                Point::new(0, 0)..Point::new(0, 0),
-                Point::new(0, 1)..Point::new(0, 1),
-            ]
-        );
-        assert!(editor.selections.pending_anchor().is_some());
-    });
-}
-
-#[gpui::test]
-fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
-    init_test(cx, |_| {});
-
-    let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx));
-    let multibuffer = cx.new(|cx| {
-        let mut multibuffer = MultiBuffer::new(ReadWrite);
-        multibuffer.set_excerpts_for_path(
-            PathKey::sorted(0),
-            buffer.clone(),
-            [
-                Point::new(0, 0)..Point::new(1, 4),
-                Point::new(3, 0)..Point::new(4, 4),
-            ],
-            0,
-            cx,
-        );
-        assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\ndddd\neeee");
-        multibuffer
-    });
-
-    let editor = cx.add_window(|window, cx| {
-        let mut editor = build_editor(multibuffer.clone(), window, cx);
-        let snapshot = editor.snapshot(window, cx);
-        editor.begin_selection(
-            Point::new(1, 3).to_display_point(&snapshot),
-            false,
-            1,
-            window,
-            cx,
-        );
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [Point::new(1, 3)..Point::new(1, 3)]
-        );
-        editor
-    });
-
-    multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.set_excerpts_for_path(
-            PathKey::sorted(0),
-            buffer.clone(),
-            [Point::new(3, 0)..Point::new(4, 4)],
-            0,
-            cx,
-        );
-    });
-    _ = editor.update(cx, |editor, window, cx| {
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [Point::new(0, 0)..Point::new(0, 0)]
-        );
-
-        // Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
-        editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
-        assert_eq!(
-            editor.selections.ranges(&editor.display_snapshot(cx)),
-            [Point::new(0, 0)..Point::new(0, 0)]
-        );
-        assert!(editor.selections.pending_anchor().is_some());
-    });
-}
-
 #[gpui::test]
 async fn test_extra_newline_insertion(cx: &mut TestAppContext) {
     init_test(cx, |_| {});
@@ -19738,8 +19587,8 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
 
     let (buffer_1, buffer_2) = project.update(cx, |project, cx| {
         (
-            project.create_local_buffer("abc\ndef\nghi\njkl\n", None, false, cx),
-            project.create_local_buffer("mno\npqr\nstu\nvwx\n", None, false, cx),
+            project.create_local_buffer("abc\ndef\nghi\njkl\nmno\npqr\nstu\nvwx\nyza\nbcd\nefg\nhij\nklm\nnop\nqrs\ntuv\nwxy\nzab\ncde\nfgh\n", None, false, cx),
+            project.create_local_buffer("aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\nkkk\nlll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\n", None, false, cx),
         )
     });
 
@@ -19814,7 +19663,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
     // Remove some excerpts.
     leader.update(cx, |leader, cx| {
         leader.buffer.update(cx, |multibuffer, cx| {
-            multibuffer.remove_excerpts_for_path(
+            multibuffer.remove_excerpts(
                 PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()),
                 cx,
             );
@@ -23318,7 +23167,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) {
             0,
             cx,
         );
-        assert_eq!(multibuffer.excerpt_ids().len(), 9);
+        assert_eq!(multibuffer.read(cx).excerpts().count(), 9);
         multibuffer
     });
 
@@ -23422,7 +23271,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) {
             0,
             cx,
         );
-        assert_eq!(multibuffer.excerpt_ids().len(), 3);
+        assert_eq!(multibuffer.read(cx).excerpts().count(), 3);
         multibuffer
     });
 
@@ -24191,9 +24040,13 @@ async fn setup_indent_guides_editor(
 
     let buffer_id = cx.update_editor(|editor, window, cx| {
         editor.set_text(text, window, cx);
-        let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids();
-
-        buffer_ids[0]
+        editor
+            .buffer()
+            .read(cx)
+            .as_singleton()
+            .unwrap()
+            .read(cx)
+            .remote_id()
     });
 
     (buffer_id, cx)
@@ -24902,7 +24755,7 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut TestAppContext) {
         editor
             .snapshot(window, cx)
             .buffer_snapshot()
-            .indent_guides_in_range(Anchor::min()..Anchor::max(), false, cx)
+            .indent_guides_in_range(Anchor::Min..Anchor::Max, false, cx)
             .map(|guide| (guide.start_row..=guide.end_row, guide.depth))
             .collect::<Vec<_>>()
     });
@@ -24957,12 +24810,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
     let hunk_ranges = cx.update_editor(|editor, window, cx| {
         let snapshot = editor.snapshot(window, cx);
         let hunks = editor
-            .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+            .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
             .collect::<Vec<_>>();
-        let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+        let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
         hunks
             .into_iter()
-            .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+            .map(|hunk| {
+                multibuffer_snapshot
+                    .anchor_in_excerpt(hunk.buffer_range.start)
+                    .unwrap()
+                    ..multibuffer_snapshot
+                        .anchor_in_excerpt(hunk.buffer_range.end)
+                        .unwrap()
+            })
             .collect::<Vec<_>>()
     });
     assert_eq!(hunk_ranges.len(), 2);
@@ -25047,12 +24907,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
     let hunk_ranges = cx.update_editor(|editor, window, cx| {
         let snapshot = editor.snapshot(window, cx);
         let hunks = editor
-            .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+            .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
             .collect::<Vec<_>>();
-        let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+        let multibuffer_snapshot = snapshot.buffer_snapshot();
         hunks
             .into_iter()
-            .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+            .map(|hunk| {
+                multibuffer_snapshot
+                    .anchor_in_excerpt(hunk.buffer_range.start)
+                    .unwrap()
+                    ..multibuffer_snapshot
+                        .anchor_in_excerpt(hunk.buffer_range.end)
+                        .unwrap()
+            })
             .collect::<Vec<_>>()
     });
     assert_eq!(hunk_ranges.len(), 2);
@@ -25112,12 +24979,19 @@ async fn test_toggle_deletion_hunk_at_start_of_file(
     let hunk_ranges = cx.update_editor(|editor, window, cx| {
         let snapshot = editor.snapshot(window, cx);
         let hunks = editor
-            .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+            .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
             .collect::<Vec<_>>();
-        let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+        let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
         hunks
             .into_iter()
-            .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+            .map(|hunk| {
+                multibuffer_snapshot
+                    .anchor_in_excerpt(hunk.buffer_range.start)
+                    .unwrap()
+                    ..multibuffer_snapshot
+                        .anchor_in_excerpt(hunk.buffer_range.end)
+                        .unwrap()
+            })
             .collect::<Vec<_>>()
     });
     assert_eq!(hunk_ranges.len(), 1);
@@ -25217,12 +25091,17 @@ async fn test_expand_first_line_diff_hunk_keeps_deleted_lines_visible(
     // Expanding a diff hunk at the first line inserts deleted lines above the first buffer line.
     cx.update_editor(|editor, window, cx| {
         let snapshot = editor.snapshot(window, cx);
-        let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+        let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
         let hunks = editor
-            .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+            .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
             .collect::<Vec<_>>();
         assert_eq!(hunks.len(), 1);
-        let hunk_range = Anchor::range_in_buffer(excerpt_id, hunks[0].buffer_range.clone());
+        let hunk_range = multibuffer_snapshot
+            .anchor_in_excerpt(hunks[0].buffer_range.start)
+            .unwrap()
+            ..multibuffer_snapshot
+                .anchor_in_excerpt(hunks[0].buffer_range.end)
+                .unwrap();
         editor.toggle_single_diff_hunk(hunk_range, cx)
     });
     executor.run_until_parked();
@@ -25279,7 +25158,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
             multibuffer.set_excerpts_for_path(
                 PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()),
                 buffer.clone(),
-                vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
+                vec![Point::zero()..snapshot.max_point()],
                 2,
                 cx,
             );
@@ -25365,7 +25244,7 @@ async fn test_partially_staged_hunk(cx: &mut TestAppContext) {
     cx.update_editor(|editor, window, cx| {
         let snapshot = editor.snapshot(window, cx);
         let hunks = editor
-            .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+            .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
             .collect::<Vec<_>>();
         assert_eq!(hunks.len(), 1);
         assert_eq!(
@@ -26450,7 +26329,7 @@ async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext
     // `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be
     // picked up by the editor and update the display map accordingly.
     multi_buffer.update(cx, |multi_buffer, cx| {
-        multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx)
+        multi_buffer.remove_excerpts(PathKey::sorted(0), cx)
     });
     assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx)));
 }
@@ -26702,7 +26581,12 @@ async fn test_multi_buffer_navigation_with_folded_buffers(cx: &mut TestAppContex
         );
         let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
 
-        let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+        let buffer_ids = multi_buffer
+            .read(cx)
+            .snapshot(cx)
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
+            .collect::<Vec<_>>();
         // fold all but the second buffer, so that we test navigating between two
         // adjacent folded buffers, as well as folded buffers at the start and
         // end the multibuffer
@@ -27038,7 +26922,12 @@ async fn assert_highlighted_edits(
     let text_anchor_edits = edits
         .clone()
         .into_iter()
-        .map(|(range, edit)| (range.start.text_anchor..range.end.text_anchor, edit.into()))
+        .map(|(range, edit)| {
+            (
+                range.start.expect_text_anchor()..range.end.expect_text_anchor(),
+                edit.into(),
+            )
+        })
         .collect::<Vec<_>>();
 
     let edit_preview = window
@@ -27055,10 +26944,11 @@ async fn assert_highlighted_edits(
 
     cx.update(|_window, cx| {
         let highlighted_edits = edit_prediction_edit_text(
-            snapshot.as_singleton().unwrap().2,
+            snapshot.as_singleton().unwrap(),
             &edits,
             &edit_preview,
             include_deletions,
+            &snapshot,
             cx,
         );
         assertion_fn(highlighted_edits, cx)
@@ -31479,12 +31369,8 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult
                 Point::new(1, 21)..Point::new(1, 25),
             ])
         });
-        let first_buffer_id = multi_buffer
-            .read(cx)
-            .excerpt_buffer_ids()
-            .into_iter()
-            .next()
-            .unwrap();
+        let snapshot = multi_buffer.read(cx).snapshot(cx);
+        let first_buffer_id = snapshot.all_buffer_ids().next().unwrap();
         let first_buffer = multi_buffer.read(cx).buffer(first_buffer_id).unwrap();
         first_buffer.update(cx, |buffer, cx| {
             buffer.set_language(Some(markdown_language.clone()), cx);
@@ -32309,6 +32195,91 @@ async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) {
     assert_eq!(selections, vec![empty_range(4, 5)]);
 }
 
+#[gpui::test]
+async fn test_clicking_sticky_header_sets_character_select_mode(cx: &mut TestAppContext) {
+    init_test(cx, |_| {});
+    cx.update(|cx| {
+        SettingsStore::update_global(cx, |store, cx| {
+            store.update_user_settings(cx, |settings| {
+                settings.editor.sticky_scroll = Some(settings::StickyScrollContent {
+                    enabled: Some(true),
+                })
+            });
+        });
+    });
+    let mut cx = EditorTestContext::new(cx).await;
+
+    let line_height = cx.update_editor(|editor, window, cx| {
+        editor
+            .style(cx)
+            .text
+            .line_height_in_pixels(window.rem_size())
+    });
+
+    let buffer = indoc! {"
+            fn foo() {
+                let abc = 123;
+            }
+            Λ‡struct Bar;
+        "};
+    cx.set_state(&buffer);
+
+    cx.update_editor(|editor, _, cx| {
+        editor
+            .buffer()
+            .read(cx)
+            .as_singleton()
+            .unwrap()
+            .update(cx, |buffer, cx| {
+                buffer.set_language(Some(rust_lang()), cx);
+            })
+    });
+
+    let text_origin_x = cx.update_editor(|editor, _, _| {
+        editor
+            .last_position_map
+            .as_ref()
+            .unwrap()
+            .text_hitbox
+            .bounds
+            .origin
+            .x
+    });
+
+    cx.update_editor(|editor, window, cx| {
+        // Double click on `struct` to select it
+        editor.begin_selection(DisplayPoint::new(DisplayRow(3), 1), false, 2, window, cx);
+        editor.end_selection(window, cx);
+
+        // Scroll down one row to make `fn foo() {` a sticky header
+        editor.scroll(gpui::Point { x: 0., y: 1. }, None, window, cx);
+    });
+    cx.run_until_parked();
+
+    // Click at the start of the `fn foo() {` sticky header
+    cx.simulate_click(
+        gpui::Point {
+            x: text_origin_x,
+            y: 0.5 * line_height,
+        },
+        Modifiers::none(),
+    );
+    cx.run_until_parked();
+
+    // Shift-click at the end of `fn foo() {` to select the whole row
+    cx.update_editor(|editor, window, cx| {
+        editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
+        editor.end_selection(window, cx);
+    });
+    cx.run_until_parked();
+
+    let selections = cx.update_editor(|editor, _, cx| display_ranges(editor, cx));
+    assert_eq!(
+        selections,
+        vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 10)]
+    );
+}
+
 #[gpui::test]
 async fn test_next_prev_reference(cx: &mut TestAppContext) {
     const CYCLE_POSITIONS: &[&'static str] = &[
@@ -32445,7 +32416,12 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) {
     });
 
     let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
-    let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids());
+    let buffer_ids = cx.multibuffer(|mb, cx| {
+        mb.snapshot(cx)
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
+            .collect::<Vec<_>>()
+    });
 
     cx.assert_excerpts_with_selections(indoc! {"
         [EXCERPT]
@@ -33685,7 +33661,7 @@ async fn test_diff_review_button_shown_when_ai_enabled(cx: &mut TestAppContext)
 }
 
 /// Helper function to create a DiffHunkKey for testing.
-/// Uses Anchor::min() as a placeholder anchor since these tests don't need
+/// Uses Anchor::Min as a placeholder anchor since these tests don't need
 /// real buffer positioning.
 fn test_hunk_key(file_path: &str) -> DiffHunkKey {
     DiffHunkKey {
@@ -33694,7 +33670,7 @@ fn test_hunk_key(file_path: &str) -> DiffHunkKey {
         } else {
             Arc::from(util::rel_path::RelPath::unix(file_path).unwrap())
         },
-        hunk_start_anchor: Anchor::min(),
+        hunk_start_anchor: Anchor::Min,
     }
 }
 
@@ -33717,7 +33693,7 @@ fn add_test_comment(
     comment: &str,
     cx: &mut Context<Editor>,
 ) -> usize {
-    editor.add_review_comment(key, comment.to_string(), Anchor::min()..Anchor::max(), cx)
+    editor.add_review_comment(key, comment.to_string(), Anchor::Min..Anchor::Max, cx)
 }
 
 #[gpui::test]

crates/editor/src/element.rs πŸ”—

@@ -54,7 +54,7 @@ use itertools::Itertools;
 use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting};
 use markdown::Markdown;
 use multi_buffer::{
-    Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
+    Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
     MultiBufferRow, RowInfo,
 };
 
@@ -1289,7 +1289,9 @@ impl EditorElement {
             cx.notify();
         }
 
-        if let Some((bounds, buffer_id, blame_entry)) = &position_map.inline_blame_bounds {
+        if text_hovered
+            && let Some((bounds, buffer_id, blame_entry)) = &position_map.inline_blame_bounds
+        {
             let mouse_over_inline_blame = bounds.contains(&event.position);
             let mouse_over_popover = editor
                 .inline_blame_popover
@@ -1388,13 +1390,13 @@ impl EditorElement {
                 .snapshot
                 .display_point_to_anchor(valid_point, Bias::Left);
 
-            if let Some((buffer_snapshot, file)) = position_map
+            if let Some((buffer_anchor, buffer_snapshot)) = position_map
                 .snapshot
                 .buffer_snapshot()
-                .buffer_for_excerpt(buffer_anchor.excerpt_id)
-                .and_then(|buffer| buffer.file().map(|file| (buffer, file)))
+                .anchor_to_buffer_anchor(buffer_anchor)
+                && let Some(file) = buffer_snapshot.file()
             {
-                let as_point = text::ToPoint::to_point(&buffer_anchor.text_anchor, buffer_snapshot);
+                let as_point = text::ToPoint::to_point(&buffer_anchor, buffer_snapshot);
 
                 let is_visible = editor
                     .gutter_breakpoint_indicator
@@ -1750,7 +1752,7 @@ impl EditorElement {
         // Remote cursors
         if let Some(collaboration_hub) = &editor.collaboration_hub {
             for remote_selection in snapshot.remote_selections_in_range(
-                &(Anchor::min()..Anchor::max()),
+                &(Anchor::Min..Anchor::Max),
                 collaboration_hub.deref(),
                 cx,
             ) {
@@ -2587,12 +2589,6 @@ impl EditorElement {
         const INLINE_SLOT_CHAR_LIMIT: u32 = 4;
         const MAX_ALTERNATE_DISTANCE: u32 = 8;
 
-        let excerpt_id = snapshot
-            .display_snapshot
-            .buffer_snapshot()
-            .excerpt_containing(buffer_point..buffer_point)
-            .map(|excerpt| excerpt.id());
-
         let is_valid_row = |row_candidate: u32| -> bool {
             // move to other row if folded row
             if snapshot.is_line_folded(MultiBufferRow(row_candidate)) {
@@ -2608,13 +2604,18 @@ impl EditorElement {
                     row: row_candidate,
                     column: 0,
                 };
-                let candidate_excerpt_id = snapshot
+                // move to other row if different excerpt
+                let range = if candidate_point < buffer_point {
+                    candidate_point..buffer_point
+                } else {
+                    buffer_point..candidate_point
+                };
+                if snapshot
                     .display_snapshot
                     .buffer_snapshot()
-                    .excerpt_containing(candidate_point..candidate_point)
-                    .map(|excerpt| excerpt.id());
-                // move to other row if different excerpt
-                if excerpt_id != candidate_excerpt_id {
+                    .excerpt_containing(range)
+                    .is_none()
+                {
                     return false;
                 }
             }
@@ -2794,7 +2795,7 @@ impl EditorElement {
             .newest::<language::Point>(&editor_snapshot.display_snapshot)
             .head();
 
-        let Some((buffer, buffer_point, _)) = editor_snapshot
+        let Some((buffer, buffer_point)) = editor_snapshot
             .buffer_snapshot()
             .point_to_buffer_point(cursor_point)
         else {
@@ -3387,8 +3388,8 @@ impl EditorElement {
             .enumerate()
             .map(|(ix, row_info)| {
                 let ExpandInfo {
-                    excerpt_id,
                     direction,
+                    start_anchor,
                 } = row_info.expand_info?;
 
                 let icon_name = match direction {
@@ -3417,7 +3418,7 @@ impl EditorElement {
                     .width(width)
                     .on_click(move |_, window, cx| {
                         editor.update(cx, |editor, cx| {
-                            editor.expand_excerpt(excerpt_id, direction, window, cx);
+                            editor.expand_excerpt(start_anchor, direction, window, cx);
                         });
                     })
                     .tooltip(Tooltip::for_action_title(
@@ -3884,7 +3885,7 @@ impl EditorElement {
         selected_buffer_ids: &Vec<BufferId>,
         latest_selection_anchors: &HashMap<BufferId, Anchor>,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
-        sticky_header_excerpt_id: Option<ExcerptId>,
+        sticky_header_excerpt_id: Option<BufferId>,
         indent_guides: &Option<Vec<IndentGuideLayout>>,
         block_resize_offset: &mut i32,
         window: &mut Window,
@@ -3972,7 +3973,7 @@ impl EditorElement {
                 let mut result = v_flex().id(block_id).w_full().pr(editor_margins.right);
 
                 if self.should_show_buffer_headers() {
-                    let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
+                    let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id());
                     let jump_data = header_jump_data(
                         snapshot,
                         block_row_start,
@@ -4027,8 +4028,8 @@ impl EditorElement {
                         latest_selection_anchors,
                     );
 
-                    if sticky_header_excerpt_id != Some(excerpt.id) {
-                        let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+                    if sticky_header_excerpt_id != Some(excerpt.buffer_id()) {
+                        let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
 
                         result = result.child(div().pr(editor_margins.right).child(
                             self.render_buffer_header(
@@ -4188,7 +4189,7 @@ impl EditorElement {
 
     fn render_buffer_header(
         &self,
-        for_excerpt: &ExcerptInfo,
+        for_excerpt: &ExcerptBoundaryInfo,
         is_folded: bool,
         is_selected: bool,
         is_sticky: bool,
@@ -4225,7 +4226,7 @@ impl EditorElement {
         selected_buffer_ids: &Vec<BufferId>,
         latest_selection_anchors: &HashMap<BufferId, Anchor>,
         is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
-        sticky_header_excerpt_id: Option<ExcerptId>,
+        sticky_header_excerpt_id: Option<BufferId>,
         indent_guides: &Option<Vec<IndentGuideLayout>>,
         window: &mut Window,
         cx: &mut App,
@@ -4518,7 +4519,7 @@ impl EditorElement {
 
         let editor_bg_color = cx.theme().colors().editor_background;
 
-        let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+        let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
 
         let available_width = hitbox.bounds.size.width - right_margin;
 
@@ -6732,7 +6733,13 @@ impl EditorElement {
                             SelectionEffects::scroll(Autoscroll::top_relative(line_index)),
                             window,
                             cx,
-                            |selections| selections.select_ranges([anchor..anchor]),
+                            |selections| {
+                                selections.clear_disjoint();
+                                selections.set_pending_anchor_range(
+                                    anchor..anchor,
+                                    crate::SelectMode::Character,
+                                );
+                            },
                         );
                         cx.stop_propagation();
                     });
@@ -7886,23 +7893,26 @@ impl EditorElement {
                 return;
             }
             let buffer_snapshot = &display_snapshot.buffer_snapshot();
-            for (buffer, buffer_range, excerpt_id) in
-                buffer_snapshot.range_to_buffer_ranges(anchor_range.start..=anchor_range.end)
+            for (excerpt_buffer_snapshot, buffer_range, _) in
+                buffer_snapshot.range_to_buffer_ranges(anchor_range.start..anchor_range.end)
             {
-                let buffer_range =
-                    buffer.anchor_after(buffer_range.start)..buffer.anchor_before(buffer_range.end);
+                let buffer_range = excerpt_buffer_snapshot.anchor_after(buffer_range.start)
+                    ..excerpt_buffer_snapshot.anchor_before(buffer_range.end);
                 selections.extend(debug_ranges.ranges.iter().flat_map(|debug_range| {
-                    let player_color = theme
-                        .players()
-                        .color_for_participant(debug_range.occurrence_index as u32 + 1);
-                    debug_range.ranges.iter().filter_map(move |range| {
-                        if range.start.buffer_id != Some(buffer.remote_id()) {
+                    debug_range.ranges.iter().filter_map(|range| {
+                        let player_color = theme
+                            .players()
+                            .color_for_participant(debug_range.occurrence_index as u32 + 1);
+                        if range.start.buffer_id != excerpt_buffer_snapshot.remote_id() {
                             return None;
                         }
-                        let clipped_start = range.start.max(&buffer_range.start, buffer);
-                        let clipped_end = range.end.min(&buffer_range.end, buffer);
+                        let clipped_start = range
+                            .start
+                            .max(&buffer_range.start, &excerpt_buffer_snapshot);
+                        let clipped_end =
+                            range.end.min(&buffer_range.end, &excerpt_buffer_snapshot);
                         let range = buffer_snapshot
-                            .anchor_range_in_excerpt(excerpt_id, *clipped_start..*clipped_end)?;
+                            .buffer_anchor_range_to_anchor_range(*clipped_start..*clipped_end)?;
                         let start = range.start.to_display_point(display_snapshot);
                         let end = range.end.to_display_point(display_snapshot);
                         let selection_layout = SelectionLayout {
@@ -8142,49 +8152,23 @@ pub(crate) fn header_jump_data(
     editor_snapshot: &EditorSnapshot,
     block_row_start: DisplayRow,
     height: u32,
-    first_excerpt: &ExcerptInfo,
+    first_excerpt: &ExcerptBoundaryInfo,
     latest_selection_anchors: &HashMap<BufferId, Anchor>,
 ) -> JumpData {
-    let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id)
-        && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id)
-        && let Some(buffer) = editor_snapshot
-            .buffer_snapshot()
-            .buffer_for_excerpt(anchor.excerpt_id)
+    let multibuffer_snapshot = editor_snapshot.buffer_snapshot();
+    let buffer = first_excerpt.buffer(multibuffer_snapshot);
+    let (jump_anchor, jump_buffer) = if let Some(anchor) =
+        latest_selection_anchors.get(&first_excerpt.buffer_id())
+        && let Some((jump_anchor, selection_buffer)) =
+            multibuffer_snapshot.anchor_to_buffer_anchor(*anchor)
     {
-        JumpTargetInExcerptInput {
-            id: anchor.excerpt_id,
-            buffer,
-            excerpt_start_anchor: range.start,
-            jump_anchor: anchor.text_anchor,
-        }
+        (jump_anchor, selection_buffer)
     } else {
-        JumpTargetInExcerptInput {
-            id: first_excerpt.id,
-            buffer: &first_excerpt.buffer,
-            excerpt_start_anchor: first_excerpt.range.context.start,
-            jump_anchor: first_excerpt.range.primary.start,
-        }
+        (first_excerpt.range.primary.start, buffer)
     };
-    header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target)
-}
-
-struct JumpTargetInExcerptInput<'a> {
-    id: ExcerptId,
-    buffer: &'a language::BufferSnapshot,
-    excerpt_start_anchor: text::Anchor,
-    jump_anchor: text::Anchor,
-}
-
-fn header_jump_data_inner(
-    snapshot: &EditorSnapshot,
-    block_row_start: DisplayRow,
-    height: u32,
-    for_excerpt: &JumpTargetInExcerptInput,
-) -> JumpData {
-    let buffer = &for_excerpt.buffer;
-    let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer);
-    let excerpt_start = for_excerpt.excerpt_start_anchor;
-    let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start {
+    let excerpt_start = first_excerpt.range.context.start;
+    let jump_position = language::ToPoint::to_point(&jump_anchor, jump_buffer);
+    let rows_from_excerpt_start = if jump_anchor == excerpt_start {
         0
     } else {
         let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer);
@@ -8193,15 +8177,14 @@ fn header_jump_data_inner(
 
     let line_offset_from_top = (block_row_start.0 + height + rows_from_excerpt_start)
         .saturating_sub(
-            snapshot
+            editor_snapshot
                 .scroll_anchor
-                .scroll_position(&snapshot.display_snapshot)
+                .scroll_position(&editor_snapshot.display_snapshot)
                 .y as u32,
         );
 
     JumpData::MultiBufferPoint {
-        excerpt_id: for_excerpt.id,
-        anchor: for_excerpt.jump_anchor,
+        anchor: jump_anchor,
         position: jump_position,
         line_offset_from_top,
     }
@@ -8209,7 +8192,7 @@ fn header_jump_data_inner(
 
 pub(crate) fn render_buffer_header(
     editor: &Entity<Editor>,
-    for_excerpt: &ExcerptInfo,
+    for_excerpt: &ExcerptBoundaryInfo,
     is_folded: bool,
     is_selected: bool,
     is_sticky: bool,
@@ -8221,6 +8204,8 @@ pub(crate) fn render_buffer_header(
     let multi_buffer = editor_read.buffer.read(cx);
     let is_read_only = editor_read.read_only(cx);
     let editor_handle: &dyn ItemHandle = editor;
+    let multibuffer_snapshot = multi_buffer.snapshot(cx);
+    let buffer = for_excerpt.buffer(&multibuffer_snapshot);
 
     let breadcrumbs = if is_selected {
         editor_read.breadcrumbs_inner(cx)
@@ -8228,31 +8213,30 @@ pub(crate) fn render_buffer_header(
         None
     };
 
+    let buffer_id = for_excerpt.buffer_id();
     let file_status = multi_buffer
         .all_diff_hunks_expanded()
-        .then(|| editor_read.status_for_buffer_id(for_excerpt.buffer_id, cx))
+        .then(|| editor_read.status_for_buffer_id(buffer_id, cx))
         .flatten();
-    let indicator = multi_buffer
-        .buffer(for_excerpt.buffer_id)
-        .and_then(|buffer| {
-            let buffer = buffer.read(cx);
-            let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) {
-                (true, _) => Some(Color::Warning),
-                (_, true) => Some(Color::Accent),
-                (false, false) => None,
-            };
-            indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color))
-        });
+    let indicator = multi_buffer.buffer(buffer_id).and_then(|buffer| {
+        let buffer = buffer.read(cx);
+        let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) {
+            (true, _) => Some(Color::Warning),
+            (_, true) => Some(Color::Accent),
+            (false, false) => None,
+        };
+        indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color))
+    });
 
     let include_root = editor_read
         .project
         .as_ref()
         .map(|project| project.read(cx).visible_worktrees(cx).count() > 1)
         .unwrap_or_default();
-    let file = for_excerpt.buffer.file();
+    let file = buffer.file();
     let can_open_excerpts = file.is_none_or(|file| file.can_open());
     let path_style = file.map(|file| file.path_style(cx));
-    let relative_path = for_excerpt.buffer.resolve_file_path(include_root, cx);
+    let relative_path = buffer.resolve_file_path(include_root, cx);
     let (parent_path, filename) = if let Some(path) = &relative_path {
         if let Some(path_style) = path_style {
             let (dir, file_name) = path_style.split(path);
@@ -8267,7 +8251,7 @@ pub(crate) fn render_buffer_header(
     let colors = cx.theme().colors();
 
     let header = div()
-        .id(("buffer-header", for_excerpt.buffer_id.to_proto()))
+        .id(("buffer-header", buffer_id.to_proto()))
         .p(BUFFER_HEADER_PADDING)
         .w_full()
         .h(FILE_HEADER_HEIGHT as f32 * window.line_height())
@@ -8295,7 +8279,7 @@ pub(crate) fn render_buffer_header(
                 .hover(|style| style.bg(colors.element_hover))
                 .map(|header| {
                     let editor = editor.clone();
-                    let buffer_id = for_excerpt.buffer_id;
+                    let buffer_id = for_excerpt.buffer_id();
                     let toggle_chevron_icon =
                         FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path);
                     let button_size = rems_from_px(28.);
@@ -8359,7 +8343,7 @@ pub(crate) fn render_buffer_header(
                         .addons
                         .values()
                         .filter_map(|addon| {
-                            addon.render_buffer_header_controls(for_excerpt, window, cx)
+                            addon.render_buffer_header_controls(for_excerpt, buffer, window, cx)
                         })
                         .take(1),
                 )
@@ -8452,7 +8436,7 @@ pub(crate) fn render_buffer_header(
                                                 ),
                                         )
                                     })
-                                    .when(!for_excerpt.buffer.capability.editable(), |el| {
+                                    .when(!buffer.capability.editable(), |el| {
                                         el.child(Icon::new(IconName::FileLock).color(Color::Muted))
                                     })
                                     .when_some(breadcrumbs, |then, breadcrumbs| {
@@ -8503,7 +8487,7 @@ pub(crate) fn render_buffer_header(
                         })
                         .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
                         .on_click(window.listener_for(editor, {
-                            let buffer_id = for_excerpt.buffer_id;
+                            let buffer_id = for_excerpt.buffer_id();
                             move |editor, e: &ClickEvent, window, cx| {
                                 if e.modifiers().alt {
                                     editor.open_excerpts_common(
@@ -8525,7 +8509,7 @@ pub(crate) fn render_buffer_header(
                 ),
         );
 
-    let file = for_excerpt.buffer.file().cloned();
+    let file = buffer.file().cloned();
     let editor = editor.clone();
 
     right_click_menu("buffer-header-context-menu")
@@ -9847,14 +9831,14 @@ impl Element for EditorElement {
                     };
 
                     let start_anchor = if start_row == Default::default() {
-                        Anchor::min()
+                        Anchor::Min
                     } else {
                         snapshot.buffer_snapshot().anchor_before(
                             DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left),
                         )
                     };
                     let end_anchor = if end_row > max_row {
-                        Anchor::max()
+                        Anchor::Max
                     } else {
                         snapshot.buffer_snapshot().anchor_before(
                             DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right),
@@ -9880,7 +9864,7 @@ impl Element for EditorElement {
                                 editor.update(cx, |editor, cx| {
                                     let snapshot = editor.snapshot(window, cx);
                                     let start_anchor = if start_row == Default::default() {
-                                        Anchor::min()
+                                        Anchor::Min
                                     } else {
                                         snapshot.buffer_snapshot().anchor_before(
                                             DisplayPoint::new(start_row, 0)
@@ -9888,7 +9872,7 @@ impl Element for EditorElement {
                                         )
                                     };
                                     let end_anchor = if end_row > max_row {
-                                        Anchor::max()
+                                        Anchor::Max
                                     } else {
                                         snapshot.buffer_snapshot().anchor_before(
                                             DisplayPoint::new(end_row, 0)
@@ -10044,9 +10028,11 @@ impl Element for EditorElement {
                                     HashMap::default();
                                 for selection in all_anchor_selections.iter() {
                                     let head = selection.head();
-                                    if let Some(buffer_id) = head.text_anchor.buffer_id {
+                                    if let Some((text_anchor, _)) =
+                                        snapshot.buffer_snapshot().anchor_to_buffer_anchor(head)
+                                    {
                                         anchors_by_buffer
-                                            .entry(buffer_id)
+                                            .entry(text_anchor.buffer_id)
                                             .and_modify(|(latest_id, latest_anchor)| {
                                                 if selection.id > *latest_id {
                                                     *latest_id = selection.id;
@@ -10314,8 +10300,9 @@ impl Element for EditorElement {
                     } else {
                         None
                     };
-                    let sticky_header_excerpt_id =
-                        sticky_header_excerpt.as_ref().map(|top| top.excerpt.id);
+                    let sticky_header_excerpt_id = sticky_header_excerpt
+                        .as_ref()
+                        .map(|top| top.excerpt.buffer_id());
 
                     let buffer = snapshot.buffer_snapshot();
                     let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row);
@@ -12960,7 +12947,7 @@ mod tests {
                 editor.insert_blocks(
                     [BlockProperties {
                         style: BlockStyle::Fixed,
-                        placement: BlockPlacement::Above(Anchor::min()),
+                        placement: BlockPlacement::Above(Anchor::Min),
                         height: Some(3),
                         render: Arc::new(|cx| div().h(3. * cx.window.line_height()).into_any()),
                         priority: 0,

crates/editor/src/folding_ranges.rs πŸ”—

@@ -21,9 +21,9 @@ impl Editor {
         };
 
         let buffers_to_query = self
-            .visible_excerpts(true, cx)
-            .into_values()
-            .map(|(buffer, ..)| buffer)
+            .visible_buffers(cx)
+            .into_iter()
+            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
             .chain(for_buffer.and_then(|id| self.buffer.read(cx).buffer(id)))
             .filter(|buffer| {
                 let id = buffer.read(cx).remote_id();

crates/editor/src/git/blame.rs πŸ”—

@@ -204,8 +204,8 @@ impl GitBlame {
                         git_blame.generate(cx);
                     }
                 }
-                multi_buffer::Event::ExcerptsAdded { .. }
-                | multi_buffer::Event::ExcerptsEdited { .. } => git_blame.regenerate_on_edit(cx),
+                multi_buffer::Event::BufferRangesUpdated { .. }
+                | multi_buffer::Event::BuffersEdited { .. } => git_blame.regenerate_on_edit(cx),
                 _ => {}
             },
         );
@@ -346,11 +346,10 @@ impl GitBlame {
         let Some(multi_buffer) = self.multi_buffer.upgrade() else {
             return;
         };
-        multi_buffer
-            .read(cx)
-            .excerpt_buffer_ids()
-            .into_iter()
-            .for_each(|id| self.sync(cx, id));
+        let snapshot = multi_buffer.read(cx).snapshot(cx);
+        for id in snapshot.all_buffer_ids() {
+            self.sync(cx, id)
+        }
     }
 
     fn sync(&mut self, cx: &mut App, buffer_id: BufferId) {
@@ -497,10 +496,10 @@ impl GitBlame {
         }
         let buffers_to_blame = self
             .multi_buffer
-            .update(cx, |multi_buffer, _| {
-                multi_buffer
+            .update(cx, |multi_buffer, cx| {
+                let snapshot = multi_buffer.snapshot(cx);
+                snapshot
                     .all_buffer_ids()
-                    .into_iter()
                     .filter_map(|id| Some(multi_buffer.buffer(id)?.downgrade()))
                     .collect::<Vec<_>>()
             })

crates/editor/src/hover_links.rs πŸ”—

@@ -237,7 +237,8 @@ impl Editor {
                 let Some(mb_anchor) = self
                     .buffer()
                     .read(cx)
-                    .buffer_anchor_to_anchor(&buffer, anchor, cx)
+                    .snapshot(cx)
+                    .anchor_in_excerpt(anchor)
                 else {
                     return Task::ready(Ok(Navigated::No));
                 };
@@ -324,16 +325,13 @@ pub fn show_link_definition(
         return;
     }
 
-    let trigger_anchor = trigger_point.anchor();
-    let anchor = snapshot.buffer_snapshot().anchor_before(*trigger_anchor);
-    let Some(buffer) = editor.buffer().read(cx).buffer_for_anchor(anchor, cx) else {
+    let anchor = trigger_point.anchor().bias_left(snapshot.buffer_snapshot());
+    let Some((anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(anchor) else {
+        return;
+    };
+    let Some(buffer) = editor.buffer.read(cx).buffer(anchor.buffer_id) else {
         return;
     };
-    let Anchor {
-        excerpt_id,
-        text_anchor,
-        ..
-    } = anchor;
     let same_kind = hovered_link_state.preferred_kind == preferred_kind
         || hovered_link_state
             .links
@@ -363,39 +361,39 @@ pub fn show_link_definition(
         async move {
             let result = match &trigger_point {
                 TriggerPoint::Text(_) => {
-                    if let Some((url_range, url)) = find_url(&buffer, text_anchor, cx.clone()) {
+                    if let Some((url_range, url)) = find_url(&buffer, anchor, cx.clone()) {
                         this.read_with(cx, |_, _| {
                             let range = maybe!({
                                 let range =
-                                    snapshot.anchor_range_in_excerpt(excerpt_id, url_range)?;
+                                    snapshot.buffer_anchor_range_to_anchor_range(url_range)?;
                                 Some(RangeInEditor::Text(range))
                             });
                             (range, vec![HoverLink::Url(url)])
                         })
                         .ok()
                     } else if let Some((filename_range, filename)) =
-                        find_file(&buffer, project.clone(), text_anchor, cx).await
+                        find_file(&buffer, project.clone(), anchor, cx).await
                     {
                         let range = maybe!({
                             let range =
-                                snapshot.anchor_range_in_excerpt(excerpt_id, filename_range)?;
+                                snapshot.buffer_anchor_range_to_anchor_range(filename_range)?;
                             Some(RangeInEditor::Text(range))
                         });
 
                         Some((range, vec![HoverLink::File(filename)]))
                     } else if let Some(provider) = provider {
                         let task = cx.update(|_, cx| {
-                            provider.definitions(&buffer, text_anchor, preferred_kind, cx)
+                            provider.definitions(&buffer, anchor, preferred_kind, cx)
                         })?;
                         if let Some(task) = task {
                             task.await.ok().flatten().map(|definition_result| {
                                 (
                                     definition_result.iter().find_map(|link| {
                                         link.origin.as_ref().and_then(|origin| {
-                                            let range = snapshot.anchor_range_in_excerpt(
-                                                excerpt_id,
-                                                origin.range.clone(),
-                                            )?;
+                                            let range = snapshot
+                                                .buffer_anchor_range_to_anchor_range(
+                                                    origin.range.clone(),
+                                                )?;
                                             Some(RangeInEditor::Text(range))
                                         })
                                     }),
@@ -1602,7 +1600,11 @@ mod tests {
             cx.set_state(input);
 
             let (position, snapshot) = cx.editor(|editor, _, cx| {
-                let positions = editor.selections.newest_anchor().head().text_anchor;
+                let positions = editor
+                    .selections
+                    .newest_anchor()
+                    .head()
+                    .expect_text_anchor();
                 let snapshot = editor
                     .buffer()
                     .clone()

crates/editor/src/hover_popover.rs πŸ”—

@@ -17,7 +17,7 @@ use gpui::{
 use itertools::Itertools;
 use language::{DiagnosticEntry, Language, LanguageRegistry};
 use lsp::DiagnosticSeverity;
-use markdown::{Markdown, MarkdownElement, MarkdownStyle};
+use markdown::{CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle};
 use multi_buffer::{MultiBufferOffset, ToOffset, ToPoint};
 use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart};
 use settings::Settings;
@@ -275,12 +275,12 @@ fn show_hover(
 
     let snapshot = editor.snapshot(window, cx);
 
-    let (buffer, buffer_position) = editor
+    let (buffer_position, _) = editor
         .buffer
         .read(cx)
-        .text_anchor_for_position(anchor, cx)?;
-
-    let (excerpt_id, _, _) = editor.buffer().read(cx).excerpt_containing(anchor, cx)?;
+        .snapshot(cx)
+        .anchor_to_buffer_anchor(anchor)?;
+    let buffer = editor.buffer.read(cx).buffer(buffer_position.buffer_id)?;
 
     let language_registry = editor
         .project()
@@ -515,7 +515,7 @@ fn show_hover(
                     .and_then(|range| {
                         let range = snapshot
                             .buffer_snapshot()
-                            .anchor_range_in_excerpt(excerpt_id, range)?;
+                            .buffer_anchor_range_to_anchor_range(range)?;
                         Some(range)
                     })
                     .or_else(|| {
@@ -1040,8 +1040,7 @@ impl InfoPopover {
                         .child(
                             MarkdownElement::new(markdown, hover_markdown_style(window, cx))
                                 .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                                    copy_button: false,
-                                    copy_button_on_hover: false,
+                                    copy_button_visibility: CopyButtonVisibility::Hidden,
                                     border: false,
                                 })
                                 .on_url_click(open_markdown_url)
@@ -1155,8 +1154,7 @@ impl DiagnosticPopover {
                                     diagnostics_markdown_style(window, cx),
                                 )
                                 .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                                    copy_button: false,
-                                    copy_button_on_hover: false,
+                                    copy_button_visibility: CopyButtonVisibility::Hidden,
                                     border: false,
                                 })
                                 .on_url_click(

crates/editor/src/inlays.rs πŸ”—

@@ -45,6 +45,7 @@ impl InlaySplice {
 #[derive(Debug, Clone)]
 pub struct Inlay {
     pub id: InlayId,
+    // TODO this could be an ExcerptAnchor
     pub position: Anchor,
     pub content: InlayContent,
 }

crates/editor/src/inlays/inlay_hints.rs πŸ”—

@@ -14,7 +14,7 @@ use language::{
     language_settings::{InlayHintKind, InlayHintSettings},
 };
 use lsp::LanguageServerId;
-use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot};
+use multi_buffer::{Anchor, MultiBufferSnapshot};
 use project::{
     HoverBlock, HoverBlockKind, InlayHintLabel, InlayHintLabelPartTooltip, InlayHintTooltip,
     InvalidationStrategy, ResolveState,
@@ -110,14 +110,15 @@ impl LspInlayHintData {
         &mut self,
         buffer_ids: &HashSet<BufferId>,
         current_hints: impl IntoIterator<Item = Inlay>,
+        snapshot: &MultiBufferSnapshot,
     ) {
         for buffer_id in buffer_ids {
             self.hint_refresh_tasks.remove(buffer_id);
             self.hint_chunk_fetching.remove(buffer_id);
         }
         for hint in current_hints {
-            if let Some(buffer_id) = hint.position.text_anchor.buffer_id {
-                if buffer_ids.contains(&buffer_id) {
+            if let Some((text_anchor, _)) = snapshot.anchor_to_buffer_anchor(hint.position) {
+                if buffer_ids.contains(&text_anchor.buffer_id) {
                     self.added_hints.remove(&hint.id);
                 }
             }
@@ -237,7 +238,7 @@ pub enum InlayHintRefreshReason {
         server_id: LanguageServerId,
         request_id: Option<usize>,
     },
-    ExcerptsRemoved(Vec<ExcerptId>),
+    BuffersRemoved(Vec<BufferId>),
 }
 
 impl Editor {
@@ -303,7 +304,7 @@ impl Editor {
         let debounce = match &reason {
             InlayHintRefreshReason::SettingsChange(_)
             | InlayHintRefreshReason::Toggle(_)
-            | InlayHintRefreshReason::ExcerptsRemoved(_)
+            | InlayHintRefreshReason::BuffersRemoved(_)
             | InlayHintRefreshReason::ModifiersChanged(_) => None,
             _may_need_lsp_call => self.inlay_hints.as_ref().and_then(|inlay_hints| {
                 if invalidate_cache.should_invalidate() {
@@ -314,7 +315,8 @@ impl Editor {
             }),
         };
 
-        let mut visible_excerpts = self.visible_excerpts(true, cx);
+        let mut visible_excerpts = self.visible_buffer_ranges(cx);
+        visible_excerpts.retain(|(snapshot, _, _)| self.is_lsp_relevant(snapshot.file(), cx));
 
         let mut invalidate_hints_for_buffers = HashSet::default();
         let ignore_previous_fetches = match reason {
@@ -324,7 +326,7 @@ impl Editor {
             | InlayHintRefreshReason::ServerRemoved => true,
             InlayHintRefreshReason::NewLinesShown
             | InlayHintRefreshReason::RefreshRequested { .. }
-            | InlayHintRefreshReason::ExcerptsRemoved(_) => false,
+            | InlayHintRefreshReason::BuffersRemoved(_) => false,
             InlayHintRefreshReason::BufferEdited(buffer_id) => {
                 let Some(affected_language) = self
                     .buffer()
@@ -351,8 +353,8 @@ impl Editor {
                 );
 
                 semantics_provider.invalidate_inlay_hints(&invalidate_hints_for_buffers, cx);
-                visible_excerpts.retain(|_, (visible_buffer, _, _)| {
-                    visible_buffer.read(cx).language() == Some(&affected_language)
+                visible_excerpts.retain(|(buffer_snapshot, _, _)| {
+                    buffer_snapshot.language() == Some(&affected_language)
                 });
                 false
             }
@@ -371,6 +373,7 @@ impl Editor {
                 inlay_hints.clear_for_buffers(
                     &invalidate_hints_for_buffers,
                     Self::visible_inlay_hints(self.display_map.read(cx)),
+                    &multi_buffer.read(cx).snapshot(cx),
                 );
             }
         }
@@ -379,14 +382,18 @@ impl Editor {
             .extend(invalidate_hints_for_buffers);
 
         let mut buffers_to_query = HashMap::default();
-        for (_, (buffer, buffer_version, visible_range)) in visible_excerpts {
-            let buffer_id = buffer.read(cx).remote_id();
+        for (buffer_snapshot, visible_range, _) in visible_excerpts {
+            let buffer_id = buffer_snapshot.remote_id();
 
             if !self.registered_buffers.contains_key(&buffer_id) {
                 continue;
             }
 
-            let buffer_snapshot = buffer.read(cx).snapshot();
+            let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else {
+                continue;
+            };
+
+            let buffer_version = buffer_snapshot.version().clone();
             let buffer_anchor_range = buffer_snapshot.anchor_before(visible_range.start)
                 ..buffer_snapshot.anchor_after(visible_range.end);
 
@@ -514,13 +521,14 @@ impl Editor {
                     }
                 }
             }
-            InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => {
+            InlayHintRefreshReason::BuffersRemoved(buffers_removed) => {
                 let to_remove = self
                     .display_map
                     .read(cx)
                     .current_inlays()
                     .filter_map(|inlay| {
-                        if excerpts_removed.contains(&inlay.position.excerpt_id) {
+                        let anchor = inlay.position.raw_text_anchor()?;
+                        if buffers_removed.contains(&anchor.buffer_id) {
                             Some(inlay.id)
                         } else {
                             None
@@ -610,13 +618,11 @@ impl Editor {
                 })
                 .max_by_key(|hint| hint.id)
             {
-                if let Some(ResolvedHint::Resolved(cached_hint)) = hovered_hint
-                    .position
-                    .text_anchor
-                    .buffer_id
-                    .and_then(|buffer_id| {
+                if let Some(ResolvedHint::Resolved(cached_hint)) = buffer_snapshot
+                    .anchor_to_buffer_anchor(hovered_hint.position)
+                    .and_then(|(anchor, _)| {
                         lsp_store.update(cx, |lsp_store, cx| {
-                            lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx)
+                            lsp_store.resolved_hint(anchor.buffer_id, hovered_hint.id, cx)
                         })
                     })
                 {
@@ -787,15 +793,19 @@ impl Editor {
         new_hints: Vec<(Range<BufferRow>, anyhow::Result<CacheInlayHints>)>,
         cx: &mut Context<Self>,
     ) {
+        let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
         let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx))
-            .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id))
+            .filter(|inlay| {
+                multi_buffer_snapshot
+                    .anchor_to_buffer_anchor(inlay.position)
+                    .map(|(anchor, _)| anchor.buffer_id)
+                    == Some(buffer_id)
+            })
             .map(|inlay| inlay.id)
             .collect::<Vec<_>>();
         let Some(inlay_hints) = &mut self.inlay_hints else {
             return;
         };
-
-        let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
         let Some(buffer_snapshot) = self
             .buffer
             .read(cx)
@@ -910,12 +920,10 @@ impl Editor {
             hints_to_remove.extend(
                 Self::visible_inlay_hints(self.display_map.read(cx))
                     .filter(|inlay| {
-                        inlay
-                            .position
-                            .text_anchor
-                            .buffer_id
-                            .is_none_or(|buffer_id| {
-                                invalidate_hints_for_buffers.contains(&buffer_id)
+                        multi_buffer_snapshot
+                            .anchor_to_buffer_anchor(inlay.position)
+                            .is_none_or(|(anchor, _)| {
+                                invalidate_hints_for_buffers.contains(&anchor.buffer_id)
                             })
                     })
                     .map(|inlay| inlay.id),
@@ -2285,17 +2293,15 @@ pub mod tests {
         cx: &mut gpui::TestAppContext,
     ) -> Range<Point> {
         let ranges = editor
-            .update(cx, |editor, _window, cx| editor.visible_excerpts(true, cx))
+            .update(cx, |editor, _window, cx| editor.visible_buffer_ranges(cx))
             .unwrap();
         assert_eq!(
             ranges.len(),
             1,
             "Single buffer should produce a single excerpt with visible range"
         );
-        let (_, (excerpt_buffer, _, excerpt_visible_range)) = ranges.into_iter().next().unwrap();
-        excerpt_buffer.read_with(cx, |buffer, _| {
-            excerpt_visible_range.to_point(&buffer.snapshot())
-        })
+        let (buffer_snapshot, visible_range, _) = ranges.into_iter().next().unwrap();
+        visible_range.to_point(&buffer_snapshot)
     }
 
     #[gpui::test]
@@ -2968,7 +2974,7 @@ let c = 3;"#
             .await
             .unwrap();
         let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
-        let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.update(cx, |multibuffer, cx| {
             multibuffer.set_excerpts_for_path(
                 PathKey::sorted(0),
                 buffer_1.clone(),
@@ -2983,15 +2989,8 @@ let c = 3;"#
                 0,
                 cx,
             );
-            let excerpt_ids = multibuffer.excerpt_ids();
-            let buffer_1_excerpts = vec![excerpt_ids[0]];
-            let buffer_2_excerpts = vec![excerpt_ids[1]];
-            (buffer_1_excerpts, buffer_2_excerpts)
         });
 
-        assert!(!buffer_1_excerpts.is_empty());
-        assert!(!buffer_2_excerpts.is_empty());
-
         cx.executor().run_until_parked();
         let editor = cx.add_window(|window, cx| {
             Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx)
@@ -3092,7 +3091,7 @@ let c = 3;"#
         editor
             .update(cx, |editor, _, cx| {
                 editor.buffer().update(cx, |multibuffer, cx| {
-                    multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
+                    multibuffer.remove_excerpts(PathKey::sorted(1), cx);
                 })
             })
             .unwrap();

crates/editor/src/items.rs πŸ”—

@@ -1,7 +1,7 @@
 use crate::{
     ActiveDebugLine, Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent,
-    EditorSettings, ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot,
-    NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _,
+    EditorSettings, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData,
+    ReportEditorEvent, SelectionEffects, ToPoint as _,
     display_map::HighlightKey,
     editor_settings::SeedQuerySetting,
     persistence::{EditorDb, SerializedEditor},
@@ -22,7 +22,7 @@ use language::{
     SelectionGoal, proto::serialize_anchor as serialize_text_anchor,
 };
 use lsp::DiagnosticSeverity;
-use multi_buffer::MultiBufferOffset;
+use multi_buffer::{MultiBufferOffset, PathKey};
 use project::{
     File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger,
     project_settings::ProjectSettings, search::SearchQuery,
@@ -33,14 +33,13 @@ use std::{
     any::{Any, TypeId},
     borrow::Cow,
     cmp::{self, Ordering},
-    iter,
     ops::Range,
     path::{Path, PathBuf},
     sync::Arc,
 };
 use text::{BufferId, BufferSnapshot, Selection};
 use ui::{IconDecorationKind, prelude::*};
-use util::{ResultExt, TryFutureExt, paths::PathExt};
+use util::{ResultExt, TryFutureExt, paths::PathExt, rel_path::RelPath};
 use workspace::item::{Dedup, ItemSettings, SerializableItem, TabContentParams};
 use workspace::{
     CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId,
@@ -83,10 +82,11 @@ impl FollowableItem for Editor {
         };
 
         let buffer_ids = state
-            .excerpts
+            .path_excerpts
             .iter()
             .map(|excerpt| excerpt.buffer_id)
             .collect::<HashSet<_>>();
+
         let buffers = project.update(cx, |project, cx| {
             buffer_ids
                 .iter()
@@ -106,38 +106,32 @@ impl FollowableItem for Editor {
                         multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx)
                     } else {
                         multibuffer = MultiBuffer::new(project.read(cx).capability());
-                        let mut sorted_excerpts = state.excerpts.clone();
-                        sorted_excerpts.sort_by_key(|e| e.id);
-                        let sorted_excerpts = sorted_excerpts.into_iter().peekable();
-
-                        for excerpt in sorted_excerpts {
-                            let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
+                        for path_with_ranges in state.path_excerpts {
+                            let Some(path_key) =
+                                path_with_ranges.path_key.and_then(deserialize_path_key)
+                            else {
                                 continue;
                             };
-
-                            let mut insert_position = ExcerptId::min();
-                            for e in &state.excerpts {
-                                if e.id == excerpt.id {
-                                    break;
-                                }
-                                if e.id < excerpt.id {
-                                    insert_position = ExcerptId::from_proto(e.id);
-                                }
-                            }
-
-                            let buffer =
-                                buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id);
-
-                            let Some(excerpt) = deserialize_excerpt_range(excerpt) else {
+                            let Some(buffer_id) = BufferId::new(path_with_ranges.buffer_id).ok()
+                            else {
                                 continue;
                             };
-
-                            let Some(buffer) = buffer else { continue };
-
-                            multibuffer.insert_excerpts_with_ids_after(
-                                insert_position,
+                            let Some(buffer) =
+                                buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id)
+                            else {
+                                continue;
+                            };
+                            let buffer_snapshot = buffer.read(cx).snapshot();
+                            let ranges = path_with_ranges
+                                .ranges
+                                .into_iter()
+                                .filter_map(deserialize_excerpt_range)
+                                .collect::<Vec<_>>();
+                            multibuffer.update_path_excerpts(
+                                path_key,
                                 buffer.clone(),
-                                [excerpt],
+                                &buffer_snapshot,
+                                &ranges,
                                 cx,
                             );
                         }
@@ -158,6 +152,7 @@ impl FollowableItem for Editor {
                 })
             })?;
 
+            editor.update(cx, |editor, cx| editor.text(cx));
             update_editor_from_message(
                 editor.downgrade(),
                 project,
@@ -215,38 +210,43 @@ impl FollowableItem for Editor {
         let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
         let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx);
         let buffer = self.buffer.read(cx);
-        let excerpts = buffer
-            .read(cx)
-            .excerpts()
-            .map(|(id, buffer, range)| proto::Excerpt {
-                id: id.to_proto(),
-                buffer_id: buffer.remote_id().into(),
-                context_start: Some(serialize_text_anchor(&range.context.start)),
-                context_end: Some(serialize_text_anchor(&range.context.end)),
-                primary_start: Some(serialize_text_anchor(&range.primary.start)),
-                primary_end: Some(serialize_text_anchor(&range.primary.end)),
-            })
-            .collect();
         let snapshot = buffer.snapshot(cx);
+        let mut path_excerpts: Vec<proto::PathExcerpts> = Vec::new();
+        for excerpt in snapshot.excerpts() {
+            if let Some(prev_entry) = path_excerpts.last_mut()
+                && prev_entry.buffer_id == excerpt.context.start.buffer_id.to_proto()
+            {
+                prev_entry.ranges.push(serialize_excerpt_range(excerpt));
+            } else if let Some(path_key) = snapshot.path_for_buffer(excerpt.context.start.buffer_id)
+            {
+                path_excerpts.push(proto::PathExcerpts {
+                    path_key: Some(serialize_path_key(path_key)),
+                    buffer_id: excerpt.context.start.buffer_id.to_proto(),
+                    ranges: vec![serialize_excerpt_range(excerpt)],
+                });
+            }
+        }
 
         Some(proto::view::Variant::Editor(proto::view::Editor {
             singleton: buffer.is_singleton(),
             title: buffer.explicit_title().map(ToOwned::to_owned),
-            excerpts,
-            scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)),
+            excerpts: Vec::new(),
+            scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor)),
             scroll_x: scroll_anchor.offset.x,
             scroll_y: scroll_anchor.offset.y,
             selections: self
                 .selections
                 .disjoint_anchors_arc()
                 .iter()
-                .map(|s| serialize_selection(s, &snapshot))
+                .map(serialize_selection)
                 .collect(),
             pending_selection: self
                 .selections
                 .pending_anchor()
                 .as_ref()
-                .map(|s| serialize_selection(s, &snapshot)),
+                .copied()
+                .map(serialize_selection),
+            path_excerpts,
         }))
     }
 
@@ -277,56 +277,52 @@ impl FollowableItem for Editor {
 
         match update {
             proto::update_view::Variant::Editor(update) => match event {
-                EditorEvent::ExcerptsAdded {
+                EditorEvent::BufferRangesUpdated {
                     buffer,
-                    predecessor,
-                    excerpts,
+                    path_key,
+                    ranges,
                 } => {
-                    let buffer_id = buffer.read(cx).remote_id();
-                    let mut excerpts = excerpts.iter();
-                    if let Some((id, range)) = excerpts.next() {
-                        update.inserted_excerpts.push(proto::ExcerptInsertion {
-                            previous_excerpt_id: Some(predecessor.to_proto()),
-                            excerpt: serialize_excerpt(buffer_id, id, range),
-                        });
-                        update.inserted_excerpts.extend(excerpts.map(|(id, range)| {
-                            proto::ExcerptInsertion {
-                                previous_excerpt_id: None,
-                                excerpt: serialize_excerpt(buffer_id, id, range),
-                            }
-                        }))
-                    }
+                    let buffer_id = buffer.read(cx).remote_id().to_proto();
+                    let path_key = serialize_path_key(path_key);
+                    let ranges = ranges
+                        .iter()
+                        .cloned()
+                        .map(serialize_excerpt_range)
+                        .collect::<Vec<_>>();
+                    update.updated_paths.push(proto::PathExcerpts {
+                        path_key: Some(path_key),
+                        buffer_id,
+                        ranges,
+                    });
                     true
                 }
-                EditorEvent::ExcerptsRemoved { ids, .. } => {
+                EditorEvent::BuffersRemoved { removed_buffer_ids } => {
                     update
-                        .deleted_excerpts
-                        .extend(ids.iter().copied().map(ExcerptId::to_proto));
+                        .deleted_buffers
+                        .extend(removed_buffer_ids.iter().copied().map(BufferId::to_proto));
                     true
                 }
                 EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => {
                     let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
-                    let snapshot = self.buffer.read(cx).snapshot(cx);
                     let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx);
-                    update.scroll_top_anchor =
-                        Some(serialize_anchor(&scroll_anchor.anchor, &snapshot));
+                    update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor));
                     update.scroll_x = scroll_anchor.offset.x;
                     update.scroll_y = scroll_anchor.offset.y;
                     true
                 }
                 EditorEvent::SelectionsChanged { .. } => {
-                    let snapshot = self.buffer.read(cx).snapshot(cx);
                     update.selections = self
                         .selections
                         .disjoint_anchors_arc()
                         .iter()
-                        .map(|s| serialize_selection(s, &snapshot))
+                        .map(serialize_selection)
                         .collect();
                     update.pending_selection = self
                         .selections
                         .pending_anchor()
                         .as_ref()
-                        .map(|s| serialize_selection(s, &snapshot));
+                        .copied()
+                        .map(serialize_selection);
                     true
                 }
                 _ => false,
@@ -370,7 +366,7 @@ impl FollowableItem for Editor {
     ) {
         let buffer = self.buffer.read(cx);
         let buffer = buffer.read(cx);
-        let Some(position) = buffer.as_singleton_anchor(location) else {
+        let Some(position) = buffer.anchor_in_excerpt(location) else {
             return;
         };
         let selection = Selection {
@@ -394,9 +390,9 @@ async fn update_editor_from_message(
 ) -> Result<()> {
     // Open all of the buffers of which excerpts were added to the editor.
     let inserted_excerpt_buffer_ids = message
-        .inserted_excerpts
+        .updated_paths
         .iter()
-        .filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id))
+        .map(|insertion| insertion.buffer_id)
         .collect::<HashSet<_>>();
     let inserted_excerpt_buffers = project.update(cx, |project, cx| {
         inserted_excerpt_buffer_ids
@@ -407,66 +403,53 @@ async fn update_editor_from_message(
     let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?;
 
     // Update the editor's excerpts.
-    this.update(cx, |editor, cx| {
+    let buffer_snapshot = this.update(cx, |editor, cx| {
         editor.buffer.update(cx, |multibuffer, cx| {
-            let mut removed_excerpt_ids = message
-                .deleted_excerpts
-                .into_iter()
-                .map(ExcerptId::from_proto)
-                .collect::<Vec<_>>();
-            removed_excerpt_ids.sort_by({
-                let multibuffer = multibuffer.read(cx);
-                move |a, b| a.cmp(b, &multibuffer)
-            });
-
-            let mut insertions = message.inserted_excerpts.into_iter().peekable();
-            while let Some(insertion) = insertions.next() {
-                let Some(excerpt) = insertion.excerpt else {
+            for path_with_excerpts in message.updated_paths {
+                let Some(path_key) = path_with_excerpts.path_key.and_then(deserialize_path_key)
+                else {
                     continue;
                 };
-                let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
-                    continue;
-                };
-                let buffer_id = BufferId::new(excerpt.buffer_id)?;
-                let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
+                let ranges = path_with_excerpts
+                    .ranges
+                    .into_iter()
+                    .filter_map(deserialize_excerpt_range)
+                    .collect::<Vec<_>>();
+                let Some(buffer) = BufferId::new(path_with_excerpts.buffer_id)
+                    .ok()
+                    .and_then(|buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
+                else {
                     continue;
                 };
 
-                let adjacent_excerpts = iter::from_fn(|| {
-                    let insertion = insertions.peek()?;
-                    if insertion.previous_excerpt_id.is_none()
-                        && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id)
-                    {
-                        insertions.next()?.excerpt
-                    } else {
-                        None
-                    }
-                });
+                let buffer_snapshot = buffer.read(cx).snapshot();
+                multibuffer.update_path_excerpts(path_key, buffer, &buffer_snapshot, &ranges, cx);
+            }
 
-                multibuffer.insert_excerpts_with_ids_after(
-                    ExcerptId::from_proto(previous_excerpt_id),
-                    buffer,
-                    [excerpt]
-                        .into_iter()
-                        .chain(adjacent_excerpts)
-                        .filter_map(deserialize_excerpt_range),
-                    cx,
-                );
+            for buffer_id in message
+                .deleted_buffers
+                .into_iter()
+                .filter_map(|buffer_id| BufferId::new(buffer_id).ok())
+            {
+                multibuffer.remove_excerpts_for_buffer(buffer_id, cx);
             }
 
-            multibuffer.remove_excerpts(removed_excerpt_ids, cx);
-            anyhow::Ok(())
+            multibuffer.snapshot(cx)
         })
-    })??;
+    })?;
 
     // Deserialize the editor state.
     let selections = message
         .selections
         .into_iter()
-        .filter_map(deserialize_selection)
+        .filter_map(|selection| deserialize_selection(selection, &buffer_snapshot))
         .collect::<Vec<_>>();
-    let pending_selection = message.pending_selection.and_then(deserialize_selection);
-    let scroll_top_anchor = message.scroll_top_anchor.and_then(deserialize_anchor);
+    let pending_selection = message
+        .pending_selection
+        .and_then(|selection| deserialize_selection(selection, &buffer_snapshot));
+    let scroll_top_anchor = message
+        .scroll_top_anchor
+        .and_then(|selection| deserialize_anchor(selection, &buffer_snapshot));
 
     // Wait until the buffer has received all of the operations referenced by
     // the editor's new state.
@@ -503,79 +486,103 @@ async fn update_editor_from_message(
     Ok(())
 }
 
-fn serialize_excerpt(
-    buffer_id: BufferId,
-    id: &ExcerptId,
-    range: &ExcerptRange<language::Anchor>,
-) -> Option<proto::Excerpt> {
-    Some(proto::Excerpt {
-        id: id.to_proto(),
-        buffer_id: buffer_id.into(),
-        context_start: Some(serialize_text_anchor(&range.context.start)),
-        context_end: Some(serialize_text_anchor(&range.context.end)),
-        primary_start: Some(serialize_text_anchor(&range.primary.start)),
-        primary_end: Some(serialize_text_anchor(&range.primary.end)),
-    })
-}
-
-fn serialize_selection(
-    selection: &Selection<Anchor>,
-    buffer: &MultiBufferSnapshot,
-) -> proto::Selection {
+fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
     proto::Selection {
         id: selection.id as u64,
-        start: Some(serialize_anchor(&selection.start, buffer)),
-        end: Some(serialize_anchor(&selection.end, buffer)),
+        start: Some(serialize_anchor(&selection.start)),
+        end: Some(serialize_anchor(&selection.end)),
         reversed: selection.reversed,
     }
 }
 
-fn serialize_anchor(anchor: &Anchor, buffer: &MultiBufferSnapshot) -> proto::EditorAnchor {
-    proto::EditorAnchor {
-        excerpt_id: buffer.latest_excerpt_id(anchor.excerpt_id).to_proto(),
-        anchor: Some(serialize_text_anchor(&anchor.text_anchor)),
+fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor {
+    match anchor {
+        Anchor::Min => proto::EditorAnchor {
+            excerpt_id: None,
+            anchor: Some(proto::Anchor {
+                replica_id: 0,
+                timestamp: 0,
+                offset: 0,
+                bias: proto::Bias::Left as i32,
+                buffer_id: None,
+            }),
+        },
+        Anchor::Excerpt(_) => proto::EditorAnchor {
+            excerpt_id: None,
+            anchor: anchor.raw_text_anchor().map(|a| serialize_text_anchor(&a)),
+        },
+        Anchor::Max => proto::EditorAnchor {
+            excerpt_id: None,
+            anchor: Some(proto::Anchor {
+                replica_id: u32::MAX,
+                timestamp: u32::MAX,
+                offset: u64::MAX,
+                bias: proto::Bias::Right as i32,
+                buffer_id: None,
+            }),
+        },
+    }
+}
+
+fn serialize_excerpt_range(range: ExcerptRange<language::Anchor>) -> proto::ExcerptRange {
+    let context_start = language::proto::serialize_anchor(&range.context.start);
+    let context_end = language::proto::serialize_anchor(&range.context.end);
+    let primary_start = language::proto::serialize_anchor(&range.primary.start);
+    let primary_end = language::proto::serialize_anchor(&range.primary.end);
+    proto::ExcerptRange {
+        context_start: Some(context_start),
+        context_end: Some(context_end),
+        primary_start: Some(primary_start),
+        primary_end: Some(primary_end),
     }
 }
 
 fn deserialize_excerpt_range(
-    excerpt: proto::Excerpt,
-) -> Option<(ExcerptId, ExcerptRange<language::Anchor>)> {
+    excerpt_range: proto::ExcerptRange,
+) -> Option<ExcerptRange<language::Anchor>> {
     let context = {
-        let start = language::proto::deserialize_anchor(excerpt.context_start?)?;
-        let end = language::proto::deserialize_anchor(excerpt.context_end?)?;
+        let start = language::proto::deserialize_anchor(excerpt_range.context_start?)?;
+        let end = language::proto::deserialize_anchor(excerpt_range.context_end?)?;
         start..end
     };
-    let primary = excerpt
+    let primary = excerpt_range
         .primary_start
-        .zip(excerpt.primary_end)
+        .zip(excerpt_range.primary_end)
         .and_then(|(start, end)| {
             let start = language::proto::deserialize_anchor(start)?;
             let end = language::proto::deserialize_anchor(end)?;
             Some(start..end)
         })
         .unwrap_or_else(|| context.clone());
-    Some((
-        ExcerptId::from_proto(excerpt.id),
-        ExcerptRange { context, primary },
-    ))
+    Some(ExcerptRange { context, primary })
 }
 
-fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
+fn deserialize_selection(
+    selection: proto::Selection,
+    buffer: &MultiBufferSnapshot,
+) -> Option<Selection<Anchor>> {
     Some(Selection {
         id: selection.id as usize,
-        start: deserialize_anchor(selection.start?)?,
-        end: deserialize_anchor(selection.end?)?,
+        start: deserialize_anchor(selection.start?, buffer)?,
+        end: deserialize_anchor(selection.end?, buffer)?,
         reversed: selection.reversed,
         goal: SelectionGoal::None,
     })
 }
 
-fn deserialize_anchor(anchor: proto::EditorAnchor) -> Option<Anchor> {
-    let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id);
-    Some(Anchor::in_buffer(
-        excerpt_id,
-        language::proto::deserialize_anchor(anchor.anchor?)?,
-    ))
+fn deserialize_anchor(anchor: proto::EditorAnchor, buffer: &MultiBufferSnapshot) -> Option<Anchor> {
+    let anchor = anchor.anchor?;
+    if let Some(buffer_id) = anchor.buffer_id
+        && BufferId::new(buffer_id).is_ok()
+    {
+        let text_anchor = language::proto::deserialize_anchor(anchor)?;
+        buffer.anchor_in_buffer(text_anchor)
+    } else {
+        match proto::Bias::from_i32(anchor.bias)? {
+            proto::Bias::Left => Some(Anchor::Min),
+            proto::Bias::Right => Some(Anchor::Max),
+        }
+    }
 }
 
 impl Item for Editor {
@@ -1071,7 +1078,7 @@ impl Item for Editor {
                 f(ItemEvent::UpdateBreadcrumbs);
             }
 
-            EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => {
+            EditorEvent::BufferRangesUpdated { .. } | EditorEvent::BuffersRemoved { .. } => {
                 f(ItemEvent::Edit);
             }
 
@@ -1434,9 +1441,9 @@ impl ProjectItem for Editor {
         cx: &mut Context<Self>,
     ) -> Self {
         let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx);
+        let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 
-        if let Some((excerpt_id, _, snapshot)) =
-            editor.buffer().read(cx).snapshot(cx).as_singleton()
+        if let Some(buffer_snapshot) = editor.buffer().read(cx).snapshot(cx).as_singleton()
             && WorkspaceSettings::get(None, cx).restore_on_file_reopen
             && let Some(restoration_data) = Self::project_item_kind()
                 .and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind))
@@ -1448,7 +1455,7 @@ impl ProjectItem for Editor {
         {
             if !restoration_data.folds.is_empty() {
                 editor.fold_ranges(
-                    clip_ranges(&restoration_data.folds, snapshot),
+                    clip_ranges(&restoration_data.folds, buffer_snapshot),
                     false,
                     window,
                     cx,
@@ -1456,12 +1463,11 @@ impl ProjectItem for Editor {
             }
             if !restoration_data.selections.is_empty() {
                 editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
-                    s.select_ranges(clip_ranges(&restoration_data.selections, snapshot));
+                    s.select_ranges(clip_ranges(&restoration_data.selections, buffer_snapshot));
                 });
             }
             let (top_row, offset) = restoration_data.scroll_position;
-            let anchor =
-                Anchor::in_buffer(excerpt_id, snapshot.anchor_before(Point::new(top_row, 0)));
+            let anchor = multibuffer_snapshot.anchor_before(Point::new(top_row, 0));
             editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx);
         }
 
@@ -1838,7 +1844,7 @@ impl SearchableItem for Editor {
             };
 
             for range in search_within_ranges {
-                for (search_buffer, search_range, excerpt_id, deleted_hunk_anchor) in
+                for (search_buffer, search_range, deleted_hunk_anchor) in
                     buffer.range_to_buffer_ranges_with_deleted_hunks(range)
                 {
                     ranges.extend(
@@ -1849,20 +1855,22 @@ impl SearchableItem for Editor {
                             )
                             .await
                             .into_iter()
-                            .map(|match_range| {
+                            .filter_map(|match_range| {
                                 if let Some(deleted_hunk_anchor) = deleted_hunk_anchor {
                                     let start = search_buffer
                                         .anchor_after(search_range.start + match_range.start);
                                     let end = search_buffer
                                         .anchor_before(search_range.start + match_range.end);
-                                    deleted_hunk_anchor.with_diff_base_anchor(start)
-                                        ..deleted_hunk_anchor.with_diff_base_anchor(end)
+                                    Some(
+                                        deleted_hunk_anchor.with_diff_base_anchor(start)
+                                            ..deleted_hunk_anchor.with_diff_base_anchor(end),
+                                    )
                                 } else {
                                     let start = search_buffer
                                         .anchor_after(search_range.start + match_range.start);
                                     let end = search_buffer
                                         .anchor_before(search_range.start + match_range.end);
-                                    Anchor::range_in_buffer(excerpt_id, start..end)
+                                    buffer.buffer_anchor_range_to_anchor_range(start..end)
                                 }
                             }),
                     );
@@ -2050,6 +2058,20 @@ fn restore_serialized_buffer_contents(
     }
 }
 
+fn serialize_path_key(path_key: &PathKey) -> proto::PathKey {
+    proto::PathKey {
+        sort_prefix: path_key.sort_prefix,
+        path: path_key.path.to_proto(),
+    }
+}
+
+fn deserialize_path_key(path_key: proto::PathKey) -> Option<PathKey> {
+    Some(PathKey {
+        sort_prefix: path_key.sort_prefix,
+        path: RelPath::from_proto(&path_key.path).ok()?,
+    })
+}
+
 #[cfg(test)]
 mod tests {
     use crate::editor_tests::init_test;

crates/editor/src/jsx_tag_auto_close.rs πŸ”—

@@ -352,11 +352,12 @@ pub(crate) fn construct_initial_buffer_versions_map<
     }
 
     for (edit_range, _) in edits {
-        let edit_range_buffer = editor
-            .buffer()
-            .read(cx)
-            .excerpt_containing(edit_range.end, cx)
-            .map(|e| e.1);
+        let multibuffer = editor.buffer.read(cx);
+        let snapshot = multibuffer.snapshot(cx);
+        let anchor = snapshot.anchor_before(edit_range.end);
+        let edit_range_buffer = snapshot
+            .anchor_to_buffer_anchor(anchor)
+            .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id));
         if let Some(buffer) = edit_range_buffer {
             let (buffer_id, buffer_version) =
                 buffer.read_with(cx, |buffer, _| (buffer.remote_id(), buffer.version.clone()));

crates/editor/src/linked_editing_ranges.rs πŸ”—

@@ -2,7 +2,6 @@ use collections::HashMap;
 use gpui::{AppContext, Context, Entity, Window};
 use itertools::Itertools;
 use language::Buffer;
-use multi_buffer::MultiBufferOffset;
 use std::{ops::Range, sync::Arc, time::Duration};
 use text::{Anchor, AnchorRangeExt, Bias, BufferId, ToOffset, ToPoint};
 use util::ResultExt;
@@ -62,27 +61,15 @@ pub(super) fn refresh_linked_ranges(
         editor
             .update(cx, |editor, cx| {
                 let display_snapshot = editor.display_snapshot(cx);
-                let selections = editor
-                    .selections
-                    .all::<MultiBufferOffset>(&display_snapshot);
+                let selections = editor.selections.all_anchors(&display_snapshot);
                 let snapshot = display_snapshot.buffer_snapshot();
                 let buffer = editor.buffer.read(cx);
-                for selection in selections {
-                    let cursor_position = selection.head();
-                    let start_position = snapshot.anchor_before(cursor_position);
-                    let end_position = snapshot.anchor_after(selection.tail());
-                    if start_position.text_anchor.buffer_id != end_position.text_anchor.buffer_id
-                        || end_position.text_anchor.buffer_id.is_none()
+                for selection in selections.iter() {
+                    if let Some((_, range)) =
+                        snapshot.anchor_range_to_buffer_anchor_range(selection.range())
+                        && let Some(buffer) = buffer.buffer(range.start.buffer_id)
                     {
-                        // Throw away selections spanning multiple buffers.
-                        continue;
-                    }
-                    if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) {
-                        applicable_selections.push((
-                            buffer,
-                            start_position.text_anchor,
-                            end_position.text_anchor,
-                        ));
+                        applicable_selections.push((buffer, range.start, range.end));
                     }
                 }
             })

crates/editor/src/lsp_ext.rs πŸ”—

@@ -9,7 +9,6 @@ use language::Buffer;
 use language::Language;
 use lsp::LanguageServerId;
 use lsp::LanguageServerName;
-use multi_buffer::Anchor;
 use project::LanguageServerToQuery;
 use project::LocationLink;
 use project::Project;
@@ -27,7 +26,12 @@ pub(crate) fn find_specific_language_server_in_selection<F>(
     cx: &mut App,
     filter_language: F,
     language_server_name: LanguageServerName,
-) -> Option<(Anchor, Arc<Language>, LanguageServerId, Entity<Buffer>)>
+) -> Option<(
+    text::Anchor,
+    Arc<Language>,
+    LanguageServerId,
+    Entity<Buffer>,
+)>
 where
     F: Fn(&Language) -> bool,
 {
@@ -40,19 +44,15 @@ where
         .iter()
         .find_map(|selection| {
             let multi_buffer = multi_buffer.read(cx);
-            let (position, buffer) = multi_buffer
-                .buffer_for_anchor(selection.head(), cx)
-                .map(|buffer| (selection.head(), buffer))
-                .or_else(|| {
-                    multi_buffer
-                        .buffer_for_anchor(selection.tail(), cx)
-                        .map(|buffer| (selection.tail(), buffer))
-                })?;
+            let multi_buffer_snapshot = multi_buffer.snapshot(cx);
+            let (position, buffer) = multi_buffer_snapshot
+                .anchor_to_buffer_anchor(selection.head())
+                .and_then(|(anchor, _)| Some((anchor, multi_buffer.buffer(anchor.buffer_id)?)))?;
             if !seen_buffer_ids.insert(buffer.read(cx).remote_id()) {
                 return None;
             }
 
-            let language = buffer.read(cx).language_at(position.text_anchor)?;
+            let language = buffer.read(cx).language_at(position)?;
             if filter_language(&language) {
                 let server_id = buffer.update(cx, |buffer, cx| {
                     project
@@ -108,7 +108,7 @@ pub fn lsp_tasks(
             let buffers = buffer_ids
                 .iter()
                 .filter(|&&buffer_id| match for_position {
-                    Some(for_position) => for_position.buffer_id == Some(buffer_id),
+                    Some(for_position) => for_position.buffer_id == buffer_id,
                     None => true,
                 })
                 .filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
@@ -194,7 +194,7 @@ mod tests {
     use language::{FakeLspAdapter, Language};
     use languages::rust_lang;
     use lsp::{LanguageServerId, LanguageServerName};
-    use multi_buffer::{Anchor, MultiBuffer};
+    use multi_buffer::MultiBuffer;
     use project::{FakeFs, Project};
     use util::path;
 
@@ -236,7 +236,7 @@ mod tests {
         let filter = |language: &Language| language.name().as_ref() == "Rust";
 
         let assert_result = |result: Option<(
-            Anchor,
+            text::Anchor,
             Arc<Language>,
             LanguageServerId,
             Entity<language::Buffer>,

crates/editor/src/mouse_context_menu.rs πŸ”—

@@ -205,16 +205,17 @@ pub fn deploy_context_menu(
             .all::<PointUtf16>(&display_map)
             .into_iter()
             .any(|s| !s.is_empty());
-        let has_git_repo = buffer
-            .buffer_id_for_anchor(anchor)
-            .is_some_and(|buffer_id| {
-                project
-                    .read(cx)
-                    .git_store()
-                    .read(cx)
-                    .repository_and_path_for_buffer_id(buffer_id, cx)
-                    .is_some()
-            });
+        let has_git_repo =
+            buffer
+                .anchor_to_buffer_anchor(anchor)
+                .is_some_and(|(buffer_anchor, _)| {
+                    project
+                        .read(cx)
+                        .git_store()
+                        .read(cx)
+                        .repository_and_path_for_buffer_id(buffer_anchor.buffer_id, cx)
+                        .is_some()
+                });
 
         let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx);
         let run_to_cursor = window.is_action_available(&RunToCursor, cx);

crates/editor/src/movement.rs πŸ”—

@@ -588,22 +588,30 @@ pub fn start_of_excerpt(
     direction: Direction,
 ) -> DisplayPoint {
     let point = map.display_point_to_point(display_point, Bias::Left);
-    let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+    let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else {
         return display_point;
     };
     match direction {
         Direction::Prev => {
-            let mut start = excerpt.start_anchor().to_display_point(map);
+            let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else {
+                return display_point;
+            };
+            let mut start = start_anchor.to_display_point(map);
             if start >= display_point && start.row() > DisplayRow(0) {
-                let Some(excerpt) = map.buffer_snapshot().excerpt_before(excerpt.id()) else {
+                let Some(excerpt) = map.buffer_snapshot().excerpt_before(start_anchor) else {
                     return display_point;
                 };
-                start = excerpt.start_anchor().to_display_point(map);
+                if let Some(start_anchor) = map.anchor_in_excerpt(excerpt.context.start) {
+                    start = start_anchor.to_display_point(map);
+                }
             }
             start
         }
         Direction::Next => {
-            let mut end = excerpt.end_anchor().to_display_point(map);
+            let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else {
+                return display_point;
+            };
+            let mut end = end_anchor.to_display_point(map);
             *end.row_mut() += 1;
             map.clip_point(end, Bias::Right)
         }
@@ -616,12 +624,15 @@ pub fn end_of_excerpt(
     direction: Direction,
 ) -> DisplayPoint {
     let point = map.display_point_to_point(display_point, Bias::Left);
-    let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+    let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else {
         return display_point;
     };
     match direction {
         Direction::Prev => {
-            let mut start = excerpt.start_anchor().to_display_point(map);
+            let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else {
+                return display_point;
+            };
+            let mut start = start_anchor.to_display_point(map);
             if start.row() > DisplayRow(0) {
                 *start.row_mut() -= 1;
             }
@@ -630,18 +641,23 @@ pub fn end_of_excerpt(
             start
         }
         Direction::Next => {
-            let mut end = excerpt.end_anchor().to_display_point(map);
+            let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else {
+                return display_point;
+            };
+            let mut end = end_anchor.to_display_point(map);
             *end.column_mut() = 0;
             if end <= display_point {
                 *end.row_mut() += 1;
                 let point_end = map.display_point_to_point(end, Bias::Right);
-                let Some(excerpt) = map
+                let Some((_, excerpt_range)) = map
                     .buffer_snapshot()
                     .excerpt_containing(point_end..point_end)
                 else {
                     return display_point;
                 };
-                end = excerpt.end_anchor().to_display_point(map);
+                if let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) {
+                    end = end_anchor.to_display_point(map);
+                }
                 *end.column_mut() = 0;
             }
             end

crates/editor/src/runnables.rs πŸ”—

@@ -8,9 +8,7 @@ use gpui::{
 };
 use language::{Buffer, BufferRow, Runnable};
 use lsp::LanguageServerName;
-use multi_buffer::{
-    Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _,
-};
+use multi_buffer::{Anchor, BufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _};
 use project::{
     Location, Project, TaskSourceKind,
     debugger::breakpoint_store::{Breakpoint, BreakpointSessionState},
@@ -165,7 +163,7 @@ impl Editor {
                     .update(cx, |editor, cx| {
                         let multi_buffer = editor.buffer().read(cx);
                         if multi_buffer.is_singleton() {
-                            Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max()))
+                            Some((multi_buffer.snapshot(cx), Anchor::Min..Anchor::Max))
                         } else {
                             let display_snapshot =
                                 editor.display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -209,16 +207,8 @@ impl Editor {
                     .fold(HashMap::default(), |mut acc, (kind, location, task)| {
                         let buffer = location.target.buffer;
                         let buffer_snapshot = buffer.read(cx).snapshot();
-                        let offset = multi_buffer_snapshot.excerpts().find_map(
-                            |(excerpt_id, snapshot, _)| {
-                                if snapshot.remote_id() == buffer_snapshot.remote_id() {
-                                    multi_buffer_snapshot
-                                        .anchor_in_excerpt(excerpt_id, location.target.range.start)
-                                } else {
-                                    None
-                                }
-                            },
-                        );
+                        let offset =
+                            multi_buffer_snapshot.anchor_in_excerpt(location.target.range.start);
                         if let Some(offset) = offset {
                             let task_buffer_range =
                                 location.target.range.to_point(&buffer_snapshot);
@@ -369,20 +359,23 @@ impl Editor {
             (selection, buffer, snapshot)
         };
         let selection_range = selection.range();
-        let start = editor_snapshot
+        let Some((_, range)) = editor_snapshot
             .display_snapshot
             .buffer_snapshot()
-            .anchor_after(selection_range.start)
-            .text_anchor;
-        let end = editor_snapshot
-            .display_snapshot
-            .buffer_snapshot()
-            .anchor_after(selection_range.end)
-            .text_anchor;
-        let location = Location {
-            buffer,
-            range: start..end,
+            .anchor_range_to_buffer_anchor_range(
+                editor_snapshot
+                    .display_snapshot
+                    .buffer_snapshot()
+                    .anchor_after(selection_range.start)
+                    ..editor_snapshot
+                        .display_snapshot
+                        .buffer_snapshot()
+                        .anchor_before(selection_range.end),
+            )
+        else {
+            return Task::ready(None);
         };
+        let location = Location { buffer, range };
         let captured_variables = {
             let mut variables = TaskVariables::default();
             let buffer = location.buffer.read(cx);
@@ -430,9 +423,9 @@ impl Editor {
             return HashMap::default();
         }
         let buffers = if visible_only {
-            self.visible_excerpts(true, cx)
-                .into_values()
-                .map(|(buffer, _, _)| buffer)
+            self.visible_buffers(cx)
+                .into_iter()
+                .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
                 .collect()
         } else {
             self.buffer().read(cx).all_buffers()
@@ -482,19 +475,15 @@ impl Editor {
         cx: &mut Context<Self>,
     ) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
         let snapshot = self.buffer.read(cx).snapshot(cx);
-        let offset = self
-            .selections
-            .newest::<MultiBufferOffset>(&self.display_snapshot(cx))
-            .head();
-        let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
-        let offset = excerpt.map_offset_to_buffer(offset);
-        let buffer_id = excerpt.buffer().remote_id();
+        let anchor = self.selections.newest_anchor().head();
+        let (anchor, buffer_snapshot) = snapshot.anchor_to_buffer_anchor(anchor)?;
+        let offset = anchor.to_offset(buffer_snapshot);
 
-        let layer = excerpt.buffer().syntax_layer_at(offset)?;
+        let layer = buffer_snapshot.syntax_layer_at(offset)?;
         let mut cursor = layer.node().walk();
 
-        while cursor.goto_first_child_for_byte(offset.0).is_some() {
-            if cursor.node().end_byte() == offset.0 {
+        while cursor.goto_first_child_for_byte(offset).is_some() {
+            if cursor.node().end_byte() == offset {
                 cursor.goto_next_sibling();
             }
         }
@@ -503,18 +492,18 @@ impl Editor {
         loop {
             let node = cursor.node();
             let node_range = node.byte_range();
-            let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row;
+            let symbol_start_row = buffer_snapshot.offset_to_point(node.start_byte()).row;
 
             // Check if this node contains our offset
-            if node_range.start <= offset.0 && node_range.end >= offset.0 {
+            if node_range.start <= offset && node_range.end >= offset {
                 // If it contains offset, check for task
                 if let Some(tasks) = self
                     .runnables
                     .runnables
-                    .get(&buffer_id)
+                    .get(&buffer_snapshot.remote_id())
                     .and_then(|(_, tasks)| tasks.get(&symbol_start_row))
                 {
-                    let buffer = self.buffer.read(cx).buffer(buffer_id)?;
+                    let buffer = self.buffer.read(cx).buffer(buffer_snapshot.remote_id())?;
                     return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned())));
                 }
             }

crates/editor/src/rust_analyzer_ext.rs πŸ”—

@@ -88,7 +88,7 @@ pub fn go_to_parent_module(
             let request = proto::LspExtGoToParentModule {
                 project_id,
                 buffer_id: buffer_id.to_proto(),
-                position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+                position: Some(serialize_anchor(&trigger_anchor)),
             };
             let response = client
                 .request(request)
@@ -106,7 +106,7 @@ pub fn go_to_parent_module(
             .context("go to parent module via collab")?
         } else {
             let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-            let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+            let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
             project
                 .update(cx, |project, cx| {
                     project.request_lsp(
@@ -168,7 +168,7 @@ pub fn expand_macro_recursively(
             let request = proto::LspExtExpandMacro {
                 project_id,
                 buffer_id: buffer_id.to_proto(),
-                position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+                position: Some(serialize_anchor(&trigger_anchor)),
             };
             let response = client
                 .request(request)
@@ -180,7 +180,7 @@ pub fn expand_macro_recursively(
             }
         } else {
             let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-            let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+            let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
             project
                 .update(cx, |project, cx| {
                     project.request_lsp(
@@ -195,10 +195,7 @@ pub fn expand_macro_recursively(
         };
 
         if macro_expansion.is_empty() {
-            log::info!(
-                "Empty macro expansion for position {:?}",
-                trigger_anchor.text_anchor
-            );
+            log::info!("Empty macro expansion for position {:?}", trigger_anchor);
             return Ok(());
         }
 
@@ -260,7 +257,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
             let request = proto::LspExtOpenDocs {
                 project_id,
                 buffer_id: buffer_id.to_proto(),
-                position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+                position: Some(serialize_anchor(&trigger_anchor)),
             };
             let response = client
                 .request(request)
@@ -272,7 +269,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
             }
         } else {
             let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-            let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+            let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
             project
                 .update(cx, |project, cx| {
                     project.request_lsp(
@@ -287,10 +284,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
         };
 
         if docs_urls.is_empty() {
-            log::debug!(
-                "Empty docs urls for position {:?}",
-                trigger_anchor.text_anchor
-            );
+            log::debug!("Empty docs urls for position {:?}", trigger_anchor);
             return Ok(());
         }
 
@@ -322,16 +316,18 @@ fn cancel_flycheck_action(
     let Some(project) = &editor.project else {
         return;
     };
+    let multibuffer_snapshot = editor
+        .buffer
+        .read_with(cx, |buffer, cx| buffer.snapshot(cx));
     let buffer_id = editor
         .selections
         .disjoint_anchors_arc()
         .iter()
         .find_map(|selection| {
-            let buffer_id = selection
-                .start
-                .text_anchor
-                .buffer_id
-                .or(selection.end.text_anchor.buffer_id)?;
+            let buffer_id = multibuffer_snapshot
+                .anchor_to_buffer_anchor(selection.start)?
+                .0
+                .buffer_id;
             let project = project.read(cx);
             let entry_id = project
                 .buffer_for_id(buffer_id, cx)?
@@ -351,16 +347,18 @@ fn run_flycheck_action(
     let Some(project) = &editor.project else {
         return;
     };
+    let multibuffer_snapshot = editor
+        .buffer
+        .read_with(cx, |buffer, cx| buffer.snapshot(cx));
     let buffer_id = editor
         .selections
         .disjoint_anchors_arc()
         .iter()
         .find_map(|selection| {
-            let buffer_id = selection
-                .start
-                .text_anchor
-                .buffer_id
-                .or(selection.end.text_anchor.buffer_id)?;
+            let buffer_id = multibuffer_snapshot
+                .anchor_to_buffer_anchor(selection.head())?
+                .0
+                .buffer_id;
             let project = project.read(cx);
             let entry_id = project
                 .buffer_for_id(buffer_id, cx)?
@@ -380,16 +378,18 @@ fn clear_flycheck_action(
     let Some(project) = &editor.project else {
         return;
     };
+    let multibuffer_snapshot = editor
+        .buffer
+        .read_with(cx, |buffer, cx| buffer.snapshot(cx));
     let buffer_id = editor
         .selections
         .disjoint_anchors_arc()
         .iter()
         .find_map(|selection| {
-            let buffer_id = selection
-                .start
-                .text_anchor
-                .buffer_id
-                .or(selection.end.text_anchor.buffer_id)?;
+            let buffer_id = multibuffer_snapshot
+                .anchor_to_buffer_anchor(selection.head())?
+                .0
+                .buffer_id;
             let project = project.read(cx);
             let entry_id = project
                 .buffer_for_id(buffer_id, cx)?

crates/editor/src/scroll.rs πŸ”—

@@ -44,13 +44,13 @@ impl ScrollAnchor {
     pub(super) fn new() -> Self {
         Self {
             offset: gpui::Point::default(),
-            anchor: Anchor::min(),
+            anchor: Anchor::Min,
         }
     }
 
     pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<ScrollOffset> {
         self.offset.apply_along(Axis::Vertical, |offset| {
-            if self.anchor == Anchor::min() {
+            if self.anchor == Anchor::Min {
                 0.
             } else {
                 let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();

crates/editor/src/scroll/actions.rs πŸ”—

@@ -78,7 +78,7 @@ impl Editor {
         let selection_head = self.selections.newest_display(&display_snapshot).head();
 
         let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled
-            && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton()
+            && let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton()
         {
             let select_head_point =
                 rope::Point::new(selection_head.to_point(&display_snapshot).row, 0);

crates/editor/src/selections_collection.rs πŸ”—

@@ -4,7 +4,6 @@ use std::{
     sync::Arc,
 };
 
-use collections::HashMap;
 use gpui::Pixels;
 use itertools::Itertools as _;
 use language::{Bias, Point, PointUtf16, Selection, SelectionGoal};
@@ -12,7 +11,7 @@ use multi_buffer::{MultiBufferDimension, MultiBufferOffset};
 use util::post_inc;
 
 use crate::{
-    Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset,
+    Anchor, DisplayPoint, DisplayRow, MultiBufferSnapshot, SelectMode, ToOffset,
     display_map::{DisplaySnapshot, ToDisplayPoint},
     movement::TextLayoutDetails,
 };
@@ -45,8 +44,8 @@ impl SelectionsCollection {
             pending: Some(PendingSelection {
                 selection: Selection {
                     id: 0,
-                    start: Anchor::min(),
-                    end: Anchor::min(),
+                    start: Anchor::Min,
+                    end: Anchor::Min,
                     reversed: false,
                     goal: SelectionGoal::None,
                 },
@@ -547,13 +546,11 @@ impl SelectionsCollection {
                 );
                 assert!(
                     snapshot.can_resolve(&selection.start),
-                    "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}, {excerpt:?}",
-                    excerpt = snapshot.buffer_for_excerpt(selection.start.excerpt_id).map(|snapshot| snapshot.remote_id()),
+                    "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}",
                 );
                 assert!(
                     snapshot.can_resolve(&selection.end),
-                    "disjoint selection end is not resolvable for the given snapshot: {selection:?}, {excerpt:?}",
-                    excerpt = snapshot.buffer_for_excerpt(selection.end.excerpt_id).map(|snapshot| snapshot.remote_id()),
+                    "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}",
                 );
             });
             assert!(
@@ -572,17 +569,11 @@ impl SelectionsCollection {
                 );
                 assert!(
                     snapshot.can_resolve(&selection.start),
-                    "pending selection start is not resolvable for the given snapshot: {pending:?}, {excerpt:?}",
-                    excerpt = snapshot
-                        .buffer_for_excerpt(selection.start.excerpt_id)
-                        .map(|snapshot| snapshot.remote_id()),
+                    "pending selection start is not resolvable for the given snapshot: {pending:?}",
                 );
                 assert!(
                     snapshot.can_resolve(&selection.end),
-                    "pending selection end is not resolvable for the given snapshot: {pending:?}, {excerpt:?}",
-                    excerpt = snapshot
-                        .buffer_for_excerpt(selection.end.excerpt_id)
-                        .map(|snapshot| snapshot.remote_id()),
+                    "pending selection end is not resolvable for the given snapshot: {pending:?}",
                 );
             }
         }
@@ -665,10 +656,10 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
             self.disjoint
                 .iter()
                 .filter(|selection| {
-                    if let Some(selection_buffer_id) =
-                        self.snapshot.buffer_id_for_anchor(selection.start)
+                    if let Some((selection_buffer_anchor, _)) =
+                        self.snapshot.anchor_to_buffer_anchor(selection.start)
                     {
-                        let should_remove = selection_buffer_id == buffer_id;
+                        let should_remove = selection_buffer_anchor.buffer_id == buffer_id;
                         changed |= should_remove;
                         !should_remove
                     } else {
@@ -683,10 +674,8 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
             let buffer_snapshot = self.snapshot.buffer_snapshot();
             let anchor = buffer_snapshot
                 .excerpts()
-                .find(|(_, buffer, _)| buffer.remote_id() == buffer_id)
-                .and_then(|(excerpt_id, _, range)| {
-                    buffer_snapshot.anchor_in_excerpt(excerpt_id, range.context.start)
-                })
+                .find(|excerpt| excerpt.context.start.buffer_id == buffer_id)
+                .and_then(|excerpt| buffer_snapshot.anchor_in_excerpt(excerpt.context.start))
                 .unwrap_or_else(|| self.snapshot.anchor_before(MultiBufferOffset(0)));
             self.collection.disjoint = Arc::from([Selection {
                 id: post_inc(&mut self.collection.next_selection_id),
@@ -1077,80 +1066,6 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
         self.selections_changed = true;
         self.pending.as_mut().map(|pending| &mut pending.selection)
     }
-
-    /// Compute new ranges for any selections that were located in excerpts that have
-    /// since been removed.
-    ///
-    /// Returns a `HashMap` indicating which selections whose former head position
-    /// was no longer present. The keys of the map are selection ids. The values are
-    /// the id of the new excerpt where the head of the selection has been moved.
-    pub fn refresh(&mut self) -> HashMap<usize, ExcerptId> {
-        let mut pending = self.collection.pending.take();
-        let mut selections_with_lost_position = HashMap::default();
-
-        let anchors_with_status = {
-            let disjoint_anchors = self
-                .disjoint
-                .iter()
-                .flat_map(|selection| [&selection.start, &selection.end]);
-            self.snapshot.refresh_anchors(disjoint_anchors)
-        };
-        let adjusted_disjoint: Vec<_> = anchors_with_status
-            .chunks(2)
-            .map(|selection_anchors| {
-                let (anchor_ix, start, kept_start) = selection_anchors[0];
-                let (_, end, kept_end) = selection_anchors[1];
-                let selection = &self.disjoint[anchor_ix / 2];
-                let kept_head = if selection.reversed {
-                    kept_start
-                } else {
-                    kept_end
-                };
-                if !kept_head {
-                    selections_with_lost_position.insert(selection.id, selection.head().excerpt_id);
-                }
-
-                Selection {
-                    id: selection.id,
-                    start,
-                    end,
-                    reversed: selection.reversed,
-                    goal: selection.goal,
-                }
-            })
-            .collect();
-
-        if !adjusted_disjoint.is_empty() {
-            let map = self.display_snapshot();
-            let resolved_selections =
-                resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect();
-            self.select::<MultiBufferOffset>(resolved_selections);
-        }
-
-        if let Some(pending) = pending.as_mut() {
-            let anchors = self
-                .snapshot
-                .refresh_anchors([&pending.selection.start, &pending.selection.end]);
-            let (_, start, kept_start) = anchors[0];
-            let (_, end, kept_end) = anchors[1];
-            let kept_head = if pending.selection.reversed {
-                kept_start
-            } else {
-                kept_end
-            };
-            if !kept_head {
-                selections_with_lost_position
-                    .insert(pending.selection.id, pending.selection.head().excerpt_id);
-            }
-
-            pending.selection.start = start;
-            pending.selection.end = end;
-        }
-        self.collection.pending = pending;
-        self.selections_changed = true;
-
-        selections_with_lost_position
-    }
 }
 
 impl Deref for MutableSelectionsCollection<'_, '_> {

crates/editor/src/semantic_tokens.rs πŸ”—

@@ -148,9 +148,9 @@ impl Editor {
         };
 
         let buffers_to_query = self
-            .visible_excerpts(true, cx)
-            .into_values()
-            .map(|(buffer, ..)| buffer)
+            .visible_buffers(cx)
+            .into_iter()
+            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
             .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
             .filter_map(|editor_buffer| {
                 let editor_buffer_id = editor_buffer.read(cx).remote_id();
@@ -1214,11 +1214,19 @@ mod tests {
         );
 
         // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
-        let toml_excerpt_id =
-            editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
+        let toml_anchor = editor.read_with(cx, |editor, cx| {
+            editor
+                .buffer()
+                .read(cx)
+                .snapshot(cx)
+                .anchor_in_excerpt(text::Anchor::min_for_buffer(
+                    toml_buffer.read(cx).remote_id(),
+                ))
+                .unwrap()
+        });
         editor.update_in(cx, |editor, _, cx| {
             editor.buffer().update(cx, |buffer, cx| {
-                buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
+                buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
             });
         });
 

crates/editor/src/signature_help.rs πŸ”—

@@ -7,7 +7,7 @@ use gpui::{
 };
 use language::BufferSnapshot;
 
-use markdown::{Markdown, MarkdownElement};
+use markdown::{CopyButtonVisibility, Markdown, MarkdownElement};
 use multi_buffer::{Anchor, MultiBufferOffset, ToOffset};
 use settings::Settings;
 use std::ops::Range;
@@ -408,9 +408,8 @@ impl SignatureHelpPopover {
                                         hover_markdown_style(window, cx),
                                     )
                                     .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                                        copy_button: false,
+                                        copy_button_visibility: CopyButtonVisibility::Hidden,
                                         border: false,
-                                        copy_button_on_hover: false,
                                     })
                                     .on_url_click(open_markdown_url),
                                 )
@@ -421,9 +420,8 @@ impl SignatureHelpPopover {
                             .child(
                                 MarkdownElement::new(description, hover_markdown_style(window, cx))
                                     .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                                        copy_button: false,
+                                        copy_button_visibility: CopyButtonVisibility::Hidden,
                                         border: false,
-                                        copy_button_on_hover: false,
                                     })
                                     .on_url_click(open_markdown_url),
                             )

crates/editor/src/split.rs πŸ”—

@@ -1,5 +1,5 @@
 use std::{
-    ops::{Bound, Range, RangeInclusive},
+    ops::{Range, RangeInclusive},
     sync::Arc,
 };
 
@@ -13,7 +13,7 @@ use gpui::{
 use itertools::Itertools;
 use language::{Buffer, Capability, HighlightedText};
 use multi_buffer::{
-    Anchor, BufferOffset, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
+    Anchor, AnchorRangeExt as _, BufferOffset, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
     MultiBufferDiffHunk, MultiBufferPoint, MultiBufferSnapshot, PathKey,
 };
 use project::Project;
@@ -44,13 +44,11 @@ use crate::{
 use zed_actions::assistant::InlineAssist;
 
 pub(crate) fn convert_lhs_rows_to_rhs(
-    lhs_excerpt_to_rhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
     rhs_snapshot: &MultiBufferSnapshot,
     lhs_snapshot: &MultiBufferSnapshot,
-    lhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+    lhs_bounds: Range<MultiBufferPoint>,
 ) -> Vec<CompanionExcerptPatch> {
     patches_for_range(
-        lhs_excerpt_to_rhs_excerpt,
         lhs_snapshot,
         rhs_snapshot,
         lhs_bounds,
@@ -59,13 +57,11 @@ pub(crate) fn convert_lhs_rows_to_rhs(
 }
 
 pub(crate) fn convert_rhs_rows_to_lhs(
-    rhs_excerpt_to_lhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
     lhs_snapshot: &MultiBufferSnapshot,
     rhs_snapshot: &MultiBufferSnapshot,
-    rhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+    rhs_bounds: Range<MultiBufferPoint>,
 ) -> Vec<CompanionExcerptPatch> {
     patches_for_range(
-        rhs_excerpt_to_lhs_excerpt,
         rhs_snapshot,
         lhs_snapshot,
         rhs_bounds,
@@ -73,6 +69,21 @@ pub(crate) fn convert_rhs_rows_to_lhs(
     )
 }
 
+fn rhs_range_to_base_text_range(
+    rhs_range: &Range<Point>,
+    diff_snapshot: &BufferDiffSnapshot,
+    rhs_buffer_snapshot: &text::BufferSnapshot,
+) -> Range<Point> {
+    let start = diff_snapshot
+        .buffer_point_to_base_text_range(Point::new(rhs_range.start.row, 0), rhs_buffer_snapshot)
+        .start;
+    let end = diff_snapshot
+        .buffer_point_to_base_text_range(Point::new(rhs_range.end.row, 0), rhs_buffer_snapshot)
+        .end;
+    let end_column = diff_snapshot.base_text().line_len(end.row);
+    Point::new(start.row, 0)..Point::new(end.row, end_column)
+}
+
 fn translate_lhs_selections_to_rhs(
     selections_by_buffer: &HashMap<BufferId, (Vec<Range<BufferOffset>>, Option<u32>)>,
     splittable: &SplittableEditor,
@@ -168,22 +179,18 @@ fn translate_lhs_hunks_to_rhs(
 }
 
 fn patches_for_range<F>(
-    excerpt_map: &HashMap<ExcerptId, ExcerptId>,
     source_snapshot: &MultiBufferSnapshot,
     target_snapshot: &MultiBufferSnapshot,
-    source_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+    source_bounds: Range<MultiBufferPoint>,
     translate_fn: F,
 ) -> Vec<CompanionExcerptPatch>
 where
     F: Fn(&BufferDiffSnapshot, RangeInclusive<Point>, &text::BufferSnapshot) -> Patch<Point>,
 {
-    struct PendingExcerpt<'a> {
-        source_excerpt_id: ExcerptId,
-        target_excerpt_id: ExcerptId,
-        source_buffer: &'a text::BufferSnapshot,
-        target_buffer: &'a text::BufferSnapshot,
+    struct PendingExcerpt {
+        source_buffer_snapshot: language::BufferSnapshot,
+        source_excerpt_range: ExcerptRange<text::Anchor>,
         buffer_point_range: Range<Point>,
-        source_context_range: Range<Point>,
     }
 
     let mut result = Vec::new();
@@ -201,41 +208,55 @@ where
         };
 
         let diff = source_snapshot
-            .diff_for_buffer_id(first.source_buffer.remote_id())
+            .diff_for_buffer_id(first.source_buffer_snapshot.remote_id())
             .expect("buffer with no diff when creating patches");
-        let rhs_buffer = if first.source_buffer.remote_id() == diff.base_text().remote_id() {
-            first.target_buffer
+        let source_is_lhs =
+            first.source_buffer_snapshot.remote_id() == diff.base_text().remote_id();
+        let target_buffer_id = if source_is_lhs {
+            diff.buffer_id()
         } else {
-            first.source_buffer
+            diff.base_text().remote_id()
+        };
+        let target_buffer = target_snapshot
+            .buffer_for_id(target_buffer_id)
+            .expect("missing corresponding buffer");
+        let rhs_buffer = if source_is_lhs {
+            target_buffer
+        } else {
+            &first.source_buffer_snapshot
         };
 
         let patch = translate_fn(diff, union_start..=union_end, rhs_buffer);
 
         for excerpt in pending.drain(..) {
+            let target_position = patch.old_to_new(excerpt.buffer_point_range.start);
+            let target_position = target_buffer.anchor_before(target_position);
+            let Some(target_position) = target_snapshot.anchor_in_excerpt(target_position) else {
+                continue;
+            };
+            let Some((target_buffer_snapshot, target_excerpt_range)) =
+                target_snapshot.excerpt_containing(target_position..target_position)
+            else {
+                continue;
+            };
+
             result.push(patch_for_excerpt(
                 source_snapshot,
                 target_snapshot,
-                excerpt.source_excerpt_id,
-                excerpt.target_excerpt_id,
-                excerpt.target_buffer,
-                excerpt.source_context_range,
+                &excerpt.source_buffer_snapshot,
+                target_buffer_snapshot,
+                excerpt.source_excerpt_range,
+                target_excerpt_range,
                 &patch,
                 excerpt.buffer_point_range,
             ));
         }
     };
 
-    for (source_buffer, buffer_offset_range, source_excerpt_id, source_context_range) in
-        source_snapshot.range_to_buffer_ranges_with_context(source_bounds)
+    for (buffer_snapshot, source_range, source_excerpt_range) in
+        source_snapshot.range_to_buffer_ranges(source_bounds)
     {
-        let Some(target_excerpt_id) = excerpt_map.get(&source_excerpt_id).copied() else {
-            continue;
-        };
-        let Some(target_buffer) = target_snapshot.buffer_for_excerpt(target_excerpt_id) else {
-            continue;
-        };
-
-        let buffer_id = source_buffer.remote_id();
+        let buffer_id = buffer_snapshot.remote_id();
 
         if current_buffer_id != Some(buffer_id) {
             if let (Some(start), Some(end)) = (union_context_start.take(), union_context_end.take())
@@ -245,8 +266,8 @@ where
             current_buffer_id = Some(buffer_id);
         }
 
-        let buffer_point_range = buffer_offset_range.to_point(source_buffer);
-        let source_context_range = source_context_range.to_point(source_buffer);
+        let buffer_point_range = source_range.to_point(&buffer_snapshot);
+        let source_context_range = source_excerpt_range.context.to_point(&buffer_snapshot);
 
         union_context_start = Some(union_context_start.map_or(source_context_range.start, |s| {
             s.min(source_context_range.start)
@@ -256,12 +277,9 @@ where
         }));
 
         pending_excerpts.push(PendingExcerpt {
-            source_excerpt_id,
-            target_excerpt_id,
-            source_buffer,
-            target_buffer,
+            source_buffer_snapshot: buffer_snapshot,
+            source_excerpt_range,
             buffer_point_range,
-            source_context_range,
         });
     }
 
@@ -275,55 +293,60 @@ where
 fn patch_for_excerpt(
     source_snapshot: &MultiBufferSnapshot,
     target_snapshot: &MultiBufferSnapshot,
-    source_excerpt_id: ExcerptId,
-    target_excerpt_id: ExcerptId,
-    target_buffer: &text::BufferSnapshot,
-    source_context_range: Range<Point>,
+    source_buffer_snapshot: &language::BufferSnapshot,
+    target_buffer_snapshot: &language::BufferSnapshot,
+    source_excerpt_range: ExcerptRange<text::Anchor>,
+    target_excerpt_range: ExcerptRange<text::Anchor>,
     patch: &Patch<Point>,
     source_edited_range: Range<Point>,
 ) -> CompanionExcerptPatch {
-    let source_multibuffer_range = source_snapshot
-        .range_for_excerpt(source_excerpt_id)
-        .expect("no excerpt for source id when creating patch");
-    let source_excerpt_start_in_multibuffer = source_multibuffer_range.start;
-    let source_excerpt_start_in_buffer = source_context_range.start;
-    let source_excerpt_end_in_buffer = source_context_range.end;
-    let target_multibuffer_range = target_snapshot
-        .range_for_excerpt(target_excerpt_id)
-        .expect("no excerpt for target id when creating patch");
-    let target_excerpt_start_in_multibuffer = target_multibuffer_range.start;
-    let target_context_range = target_snapshot
-        .context_range_for_excerpt(target_excerpt_id)
-        .expect("no range for target id when creating patch");
-    let target_excerpt_start_in_buffer = target_context_range.start.to_point(&target_buffer);
-    let target_excerpt_end_in_buffer = target_context_range.end.to_point(&target_buffer);
+    let source_buffer_range = source_excerpt_range
+        .context
+        .to_point(source_buffer_snapshot);
+    let source_multibuffer_range = (source_snapshot
+        .anchor_in_buffer(source_excerpt_range.context.start)
+        .expect("buffer should exist in multibuffer")
+        ..source_snapshot
+            .anchor_in_buffer(source_excerpt_range.context.end)
+            .expect("buffer should exist in multibuffer"))
+        .to_point(source_snapshot);
+    let target_buffer_range = target_excerpt_range
+        .context
+        .to_point(target_buffer_snapshot);
+    let target_multibuffer_range = (target_snapshot
+        .anchor_in_buffer(target_excerpt_range.context.start)
+        .expect("buffer should exist in multibuffer")
+        ..target_snapshot
+            .anchor_in_buffer(target_excerpt_range.context.end)
+            .expect("buffer should exist in multibuffer"))
+        .to_point(target_snapshot);
 
     let edits = patch
         .edits()
         .iter()
-        .skip_while(|edit| edit.old.end < source_excerpt_start_in_buffer)
-        .take_while(|edit| edit.old.start <= source_excerpt_end_in_buffer)
+        .skip_while(|edit| edit.old.end < source_buffer_range.start)
+        .take_while(|edit| edit.old.start <= source_buffer_range.end)
         .map(|edit| {
-            let clamped_source_start = edit.old.start.max(source_excerpt_start_in_buffer);
-            let clamped_source_end = edit.old.end.min(source_excerpt_end_in_buffer);
-            let source_multibuffer_start = source_excerpt_start_in_multibuffer
-                + (clamped_source_start - source_excerpt_start_in_buffer);
-            let source_multibuffer_end = source_excerpt_start_in_multibuffer
-                + (clamped_source_end - source_excerpt_start_in_buffer);
+            let clamped_source_start = edit.old.start.max(source_buffer_range.start);
+            let clamped_source_end = edit.old.end.min(source_buffer_range.end);
+            let source_multibuffer_start =
+                source_multibuffer_range.start + (clamped_source_start - source_buffer_range.start);
+            let source_multibuffer_end =
+                source_multibuffer_range.start + (clamped_source_end - source_buffer_range.start);
             let clamped_target_start = edit
                 .new
                 .start
-                .max(target_excerpt_start_in_buffer)
-                .min(target_excerpt_end_in_buffer);
+                .max(target_buffer_range.start)
+                .min(target_buffer_range.end);
             let clamped_target_end = edit
                 .new
                 .end
-                .max(target_excerpt_start_in_buffer)
-                .min(target_excerpt_end_in_buffer);
-            let target_multibuffer_start = target_excerpt_start_in_multibuffer
-                + (clamped_target_start - target_excerpt_start_in_buffer);
-            let target_multibuffer_end = target_excerpt_start_in_multibuffer
-                + (clamped_target_end - target_excerpt_start_in_buffer);
+                .max(target_buffer_range.start)
+                .min(target_buffer_range.end);
+            let target_multibuffer_start =
+                target_multibuffer_range.start + (clamped_target_start - target_buffer_range.start);
+            let target_multibuffer_end =
+                target_multibuffer_range.start + (clamped_target_end - target_buffer_range.start);
             text::Edit {
                 old: source_multibuffer_start..source_multibuffer_end,
                 new: target_multibuffer_start..target_multibuffer_end,
@@ -331,8 +354,8 @@ fn patch_for_excerpt(
         });
 
     let edits = [text::Edit {
-        old: source_excerpt_start_in_multibuffer..source_excerpt_start_in_multibuffer,
-        new: target_excerpt_start_in_multibuffer..target_excerpt_start_in_multibuffer,
+        old: source_multibuffer_range.start..source_multibuffer_range.start,
+        new: target_multibuffer_range.start..target_multibuffer_range.start,
     }]
     .into_iter()
     .chain(edits);
@@ -349,21 +372,20 @@ fn patch_for_excerpt(
         merged_edits.push(edit);
     }
 
-    let edited_range = source_excerpt_start_in_multibuffer
-        + (source_edited_range.start - source_excerpt_start_in_buffer)
-        ..source_excerpt_start_in_multibuffer
-            + (source_edited_range.end - source_excerpt_start_in_buffer);
+    let edited_range = source_multibuffer_range.start
+        + (source_edited_range.start - source_buffer_range.start)
+        ..source_multibuffer_range.start + (source_edited_range.end - source_buffer_range.start);
 
-    let source_excerpt_end = source_excerpt_start_in_multibuffer
-        + (source_excerpt_end_in_buffer - source_excerpt_start_in_buffer);
-    let target_excerpt_end = target_excerpt_start_in_multibuffer
-        + (target_excerpt_end_in_buffer - target_excerpt_start_in_buffer);
+    let source_excerpt_end =
+        source_multibuffer_range.start + (source_buffer_range.end - source_buffer_range.start);
+    let target_excerpt_end =
+        target_multibuffer_range.start + (target_buffer_range.end - target_buffer_range.start);
 
     CompanionExcerptPatch {
         patch: Patch::new(merged_edits),
         edited_range,
-        source_excerpt_range: source_excerpt_start_in_multibuffer..source_excerpt_end,
-        target_excerpt_range: target_excerpt_start_in_multibuffer..target_excerpt_end,
+        source_excerpt_range: source_multibuffer_range.start..source_excerpt_end,
+        target_excerpt_range: target_multibuffer_range.start..target_excerpt_end,
     }
 }
 
@@ -390,6 +412,7 @@ pub struct SplittableEditor {
 struct LhsEditor {
     multibuffer: Entity<MultiBuffer>,
     editor: Entity<Editor>,
+    companion: Entity<Companion>,
     was_last_focused: bool,
     _subscriptions: Vec<Subscription>,
 }
@@ -470,11 +493,16 @@ impl SplittableEditor {
                 &rhs_editor,
                 |this, _, event: &EditorEvent, cx| match event {
                     EditorEvent::ExpandExcerptsRequested {
-                        excerpt_ids,
+                        excerpt_anchors,
                         lines,
                         direction,
                     } => {
-                        this.expand_excerpts(excerpt_ids.iter().copied(), *lines, *direction, cx);
+                        this.expand_excerpts(
+                            excerpt_anchors.iter().copied(),
+                            *lines,
+                            *direction,
+                            cx,
+                        );
                     }
                     _ => cx.emit(event.clone()),
                 },
@@ -563,19 +591,31 @@ impl SplittableEditor {
             window,
             |this, _, event: &EditorEvent, window, cx| match event {
                 EditorEvent::ExpandExcerptsRequested {
-                    excerpt_ids,
+                    excerpt_anchors,
                     lines,
                     direction,
                 } => {
-                    if this.lhs.is_some() {
-                        let rhs_display_map = this.rhs_editor.read(cx).display_map.read(cx);
-                        let rhs_ids: Vec<_> = excerpt_ids
+                    if let Some(lhs) = &this.lhs {
+                        let rhs_snapshot = this.rhs_multibuffer.read(cx).snapshot(cx);
+                        let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx);
+                        let rhs_anchors = excerpt_anchors
                             .iter()
-                            .filter_map(|id| {
-                                rhs_display_map.companion_excerpt_to_my_excerpt(*id, cx)
+                            .filter_map(|anchor| {
+                                let (anchor, lhs_buffer) =
+                                    lhs_snapshot.anchor_to_buffer_anchor(*anchor)?;
+                                let rhs_buffer_id =
+                                    lhs.companion.read(cx).lhs_to_rhs_buffer(anchor.buffer_id)?;
+                                let rhs_buffer = rhs_snapshot.buffer_for_id(rhs_buffer_id)?;
+                                let diff = this.rhs_multibuffer.read(cx).diff_for(rhs_buffer_id)?;
+                                let diff_snapshot = diff.read(cx).snapshot(cx);
+                                let rhs_point = diff_snapshot.base_text_point_to_buffer_point(
+                                    anchor.to_point(&lhs_buffer),
+                                    &rhs_buffer,
+                                );
+                                rhs_snapshot.anchor_in_excerpt(rhs_buffer.anchor_before(rhs_point))
                             })
-                            .collect();
-                        this.expand_excerpts(rhs_ids.into_iter(), *lines, *direction, cx);
+                            .collect::<Vec<_>>();
+                        this.expand_excerpts(rhs_anchors.into_iter(), *lines, *direction, cx);
                     }
                 }
                 EditorEvent::StageOrUnstageRequested { stage, hunks } => {
@@ -654,15 +694,23 @@ impl SplittableEditor {
             }),
         );
 
+        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
+        let lhs_display_map = lhs_editor.read(cx).display_map.clone();
+        let rhs_display_map_id = rhs_display_map.entity_id();
+        let companion = cx.new(|_| {
+            Companion::new(
+                rhs_display_map_id,
+                convert_rhs_rows_to_lhs,
+                convert_lhs_rows_to_rhs,
+            )
+        });
         let lhs = LhsEditor {
             editor: lhs_editor,
             multibuffer: lhs_multibuffer,
             was_last_focused: false,
+            companion: companion.clone(),
             _subscriptions: subscriptions,
         };
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-        let lhs_display_map = lhs.editor.read(cx).display_map.clone();
-        let rhs_display_map_id = rhs_display_map.entity_id();
 
         self.rhs_editor.update(cx, |editor, cx| {
             editor.set_delegate_expand_excerpts(true);
@@ -672,35 +720,21 @@ impl SplittableEditor {
             })
         });
 
-        let path_diffs: Vec<_> = {
+        let all_paths: Vec<_> = {
             let rhs_multibuffer = self.rhs_multibuffer.read(cx);
-            rhs_multibuffer
-                .paths()
-                .filter_map(|path| {
-                    let excerpt_id = rhs_multibuffer.excerpts_for_path(path).next()?;
-                    let snapshot = rhs_multibuffer.snapshot(cx);
-                    let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
+            let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx);
+            rhs_multibuffer_snapshot
+                .buffers_with_paths()
+                .filter_map(|(buffer, path)| {
                     let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
                     Some((path.clone(), diff))
                 })
                 .collect()
         };
 
-        let companion = cx.new(|_| {
-            Companion::new(
-                rhs_display_map_id,
-                convert_rhs_rows_to_lhs,
-                convert_lhs_rows_to_rhs,
-            )
-        });
-
         self.lhs = Some(lhs);
 
-        let paths_for_sync: Vec<_> = path_diffs
-            .into_iter()
-            .map(|(path, diff)| (path, vec![], diff))
-            .collect();
-        self.sync_lhs_for_paths(paths_for_sync, &companion, cx);
+        self.sync_lhs_for_paths(all_paths, &companion, cx);
 
         rhs_display_map.update(cx, |dm, cx| {
             dm.set_companion(Some((lhs_display_map, companion.clone())), cx);
@@ -1004,7 +1038,7 @@ impl SplittableEditor {
         cx.notify();
     }
 
-    pub fn set_excerpts_for_path(
+    pub fn update_excerpts_for_path(
         &mut self,
         path: PathKey,
         buffer: Entity<Buffer>,
@@ -1012,122 +1046,94 @@ impl SplittableEditor {
         context_line_count: u32,
         diff: Entity<BufferDiff>,
         cx: &mut Context<Self>,
-    ) -> (Vec<Range<Anchor>>, bool) {
+    ) -> bool {
+        let has_ranges = ranges.clone().into_iter().next().is_some();
         let Some(companion) = self.companion(cx) else {
             return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+                let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path(
                     path,
                     buffer.clone(),
                     ranges,
                     context_line_count,
                     cx,
                 );
-                if !anchors.is_empty()
+                if has_ranges
                     && rhs_multibuffer
                         .diff_for(buffer.read(cx).remote_id())
                         .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
                 {
                     rhs_multibuffer.add_diff(diff, cx);
                 }
-                (anchors, added_a_new_excerpt)
+                added_a_new_excerpt
             });
         };
 
-        let old_rhs_ids: Vec<ExcerptId> = self
-            .rhs_multibuffer
-            .read(cx)
-            .excerpts_for_path(&path)
-            .collect();
-
         let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-            let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+            let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path(
                 path.clone(),
                 buffer.clone(),
                 ranges,
                 context_line_count,
                 cx,
             );
-            if !anchors.is_empty()
+            if has_ranges
                 && rhs_multibuffer
                     .diff_for(buffer.read(cx).remote_id())
                     .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
             {
                 rhs_multibuffer.add_diff(diff.clone(), cx);
             }
-            (anchors, added_a_new_excerpt)
+            added_a_new_excerpt
         });
 
-        self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx);
+        self.sync_lhs_for_paths(vec![(path, diff)], &companion, cx);
         result
     }
 
     fn expand_excerpts(
         &mut self,
-        excerpt_ids: impl Iterator<Item = ExcerptId> + Clone,
+        excerpt_anchors: impl Iterator<Item = Anchor> + Clone,
         lines: u32,
         direction: ExpandExcerptDirection,
         cx: &mut Context<Self>,
     ) {
         let Some(companion) = self.companion(cx) else {
             self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
+                rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx);
             });
             return;
         };
 
-        let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+        let paths: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
             let snapshot = rhs_multibuffer.snapshot(cx);
-            let paths = excerpt_ids
+            let paths = excerpt_anchors
                 .clone()
-                .filter_map(|excerpt_id| {
-                    let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?;
-                    let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
-                    let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
-                    Some((path, diff))
+                .filter_map(|anchor| {
+                    let (anchor, _) = snapshot.anchor_to_buffer_anchor(anchor)?;
+                    let path = snapshot.path_for_buffer(anchor.buffer_id)?;
+                    let diff = rhs_multibuffer.diff_for(anchor.buffer_id)?;
+                    Some((path.clone(), diff))
                 })
                 .collect::<HashMap<_, _>>()
                 .into_iter()
-                .map(|(path, diff)| {
-                    let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect();
-                    (path, old_ids, diff)
-                })
                 .collect();
-            rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
+            rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx);
             paths
         });
 
-        self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx);
+        self.sync_lhs_for_paths(paths, &companion, cx);
     }
 
     pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
-        let Some(lhs) = &self.lhs else {
-            self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-                rhs_multibuffer.remove_excerpts_for_path(path, cx);
-            });
-            return;
-        };
-
-        let rhs_excerpt_ids: Vec<ExcerptId> = self
-            .rhs_multibuffer
-            .read(cx)
-            .excerpts_for_path(&path)
-            .collect();
-        let lhs_excerpt_ids: Vec<ExcerptId> =
-            lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
+        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+            rhs_multibuffer.remove_excerpts(path.clone(), cx);
+        });
 
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-        if let Some(companion) = rhs_display_map.read(cx).companion().cloned() {
-            companion.update(cx, |c, _| {
-                c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids);
+        if let Some(lhs) = &self.lhs {
+            lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
+                lhs_multibuffer.remove_excerpts(path, cx);
             });
         }
-
-        self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-            rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx);
-        });
-        lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
-            lhs_multibuffer.remove_excerpts_for_path(path, cx);
-        });
     }
 
     fn search_token(&self) -> SearchToken {
@@ -1151,122 +1157,95 @@ impl SplittableEditor {
 
     fn sync_lhs_for_paths(
         &self,
-        paths_with_old_rhs_ids: Vec<(PathKey, Vec<ExcerptId>, Entity<BufferDiff>)>,
+        paths: Vec<(PathKey, Entity<BufferDiff>)>,
         companion: &Entity<Companion>,
         cx: &mut Context<Self>,
     ) {
         let Some(lhs) = &self.lhs else { return };
 
         self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
-            for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids {
-                let old_lhs_ids: Vec<ExcerptId> =
-                    lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
-
-                companion.update(cx, |c, _| {
-                    c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids);
-                });
-
-                let rhs_excerpt_ids: Vec<ExcerptId> =
-                    rhs_multibuffer.excerpts_for_path(&path).collect();
-                let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else {
+            for (path, diff) in paths {
+                let main_buffer_id = diff.read(cx).buffer_id;
+                let Some(main_buffer) = rhs_multibuffer.buffer(diff.read(cx).buffer_id) else {
                     lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
-                        lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx);
+                        lhs_multibuffer.remove_excerpts(path, lhs_cx);
                     });
                     continue;
                 };
-                let Some(main_buffer_snapshot) = rhs_multibuffer
-                    .snapshot(cx)
-                    .buffer_for_excerpt(excerpt_id)
-                    .cloned()
-                else {
-                    continue;
-                };
-                let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id())
-                else {
-                    continue;
-                };
+                let main_buffer_snapshot = main_buffer.read(cx).snapshot();
 
                 let base_text_buffer = diff.read(cx).base_text_buffer().clone();
                 let diff_snapshot = diff.read(cx).snapshot(cx);
                 let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot();
 
-                let lhs_ranges: Vec<ExcerptRange<Point>> = rhs_multibuffer
-                    .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx)
+                let mut paired_ranges: Vec<(Range<Point>, ExcerptRange<text::Anchor>)> = Vec::new();
+
+                let mut have_excerpt = false;
+                let mut did_merge = false;
+                let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx);
+                for info in rhs_multibuffer_snapshot.excerpts_for_buffer(main_buffer_id) {
+                    have_excerpt = true;
+                    let rhs_context = info.context.to_point(&main_buffer_snapshot);
+                    let lhs_context = rhs_range_to_base_text_range(
+                        &rhs_context,
+                        &diff_snapshot,
+                        &main_buffer_snapshot,
+                    );
+
+                    if let Some((prev_lhs_context, prev_rhs_range)) = paired_ranges.last_mut()
+                        && prev_lhs_context.end >= lhs_context.start
+                    {
+                        did_merge = true;
+                        prev_lhs_context.end = lhs_context.end;
+                        prev_rhs_range.context.end = info.context.end;
+                        continue;
+                    }
+
+                    paired_ranges.push((lhs_context, info));
+                }
+
+                let (lhs_ranges, rhs_ranges): (Vec<_>, Vec<_>) = paired_ranges.into_iter().unzip();
+                let lhs_ranges = lhs_ranges
                     .into_iter()
-                    .filter(|(id, _, _)| rhs_excerpt_ids.contains(id))
-                    .map(|(_, _, excerpt_range)| {
-                        let to_base_text = |range: Range<Point>| {
-                            let start = diff_snapshot
-                                .buffer_point_to_base_text_range(
-                                    Point::new(range.start.row, 0),
-                                    &main_buffer_snapshot,
-                                )
-                                .start;
-                            let end = diff_snapshot
-                                .buffer_point_to_base_text_range(
-                                    Point::new(range.end.row, 0),
-                                    &main_buffer_snapshot,
-                                )
-                                .end;
-                            let end_column = diff_snapshot.base_text().line_len(end.row);
-                            Point::new(start.row, 0)..Point::new(end.row, end_column)
-                        };
-                        let primary = excerpt_range.primary.to_point(&main_buffer_snapshot);
-                        let context = excerpt_range.context.to_point(&main_buffer_snapshot);
-                        ExcerptRange {
-                            primary: to_base_text(primary),
-                            context: to_base_text(context),
-                        }
+                    .map(|range| {
+                        ExcerptRange::new(base_text_buffer_snapshot.anchor_range_outside(range))
                     })
-                    .collect();
+                    .collect::<Vec<_>>();
 
-                let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
-                    let lhs_result = lhs_multibuffer.update_path_excerpts(
-                        path,
+                lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
+                    lhs_multibuffer.update_path_excerpts(
+                        path.clone(),
                         base_text_buffer,
                         &base_text_buffer_snapshot,
-                        lhs_ranges,
+                        &lhs_ranges,
                         lhs_cx,
                     );
-                    if !lhs_result.excerpt_ids.is_empty()
+                    if have_excerpt
                         && lhs_multibuffer
                             .diff_for(base_text_buffer_snapshot.remote_id())
                             .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
                     {
-                        lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx);
-                    }
-
-                    let mut groups = Vec::new();
-                    for (lhs_id, chunk) in &lhs_result
-                        .excerpt_ids
-                        .iter()
-                        .copied()
-                        .zip(rhs_excerpt_ids)
-                        .chunk_by(|(lhs_id, _)| *lhs_id)
-                    {
-                        groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::<Vec<_>>()));
+                        lhs_multibuffer.add_inverted_diff(
+                            diff.clone(),
+                            main_buffer.clone(),
+                            lhs_cx,
+                        );
                     }
-                    groups
                 });
 
-                let pairs = groups
-                    .into_iter()
-                    .map(|(lhs_id, rhs_group)| {
-                        let rhs_id = if rhs_group.len() == 1 {
-                            rhs_group[0]
-                        } else {
-                            rhs_multibuffer.merge_excerpts(&rhs_group, cx)
-                        };
-                        (lhs_id, rhs_id)
-                    })
-                    .collect::<Vec<_>>();
+                if did_merge {
+                    rhs_multibuffer.update_path_excerpts(
+                        path,
+                        main_buffer,
+                        &main_buffer_snapshot,
+                        &rhs_ranges,
+                        cx,
+                    );
+                }
 
                 let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id();
                 let rhs_buffer_id = diff.read(cx).buffer_id;
                 companion.update(cx, |c, _| {
-                    for (lhs_id, rhs_id) in pairs {
-                        c.add_excerpt_mapping(lhs_id, rhs_id);
-                    }
                     c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id);
                 });
             }
@@ -1312,7 +1291,7 @@ impl SplittableEditor {
         use crate::display_map::DisplayRow;
 
         self.debug_print(cx);
-        self.check_excerpt_mapping_invariants(cx);
+        self.check_excerpt_invariants(quiesced, cx);
 
         let lhs = self.lhs.as_ref().unwrap();
 
@@ -1362,15 +1341,21 @@ impl SplittableEditor {
 
                 let (lhs_point, rhs_point) =
                     if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() {
+                        use multi_buffer::ToPoint as _;
+
                         let lhs_end = Point::new(lhs_hunk.row_range.end.0, 0);
                         let rhs_end = Point::new(rhs_hunk.row_range.end.0, 0);
 
-                        let lhs_exceeds = lhs_snapshot
-                            .range_for_excerpt(lhs_hunk.excerpt_id)
-                            .map_or(false, |range| lhs_end >= range.end);
-                        let rhs_exceeds = rhs_snapshot
-                            .range_for_excerpt(rhs_hunk.excerpt_id)
-                            .map_or(false, |range| rhs_end >= range.end);
+                        let lhs_excerpt_end = lhs_snapshot
+                            .anchor_in_excerpt(lhs_hunk.excerpt_range.context.end)
+                            .unwrap()
+                            .to_point(&lhs_snapshot);
+                        let lhs_exceeds = lhs_end >= lhs_excerpt_end;
+                        let rhs_excerpt_end = rhs_snapshot
+                            .anchor_in_excerpt(rhs_hunk.excerpt_range.context.end)
+                            .unwrap()
+                            .to_point(&rhs_snapshot);
+                        let rhs_exceeds = rhs_end >= rhs_excerpt_end;
                         if lhs_exceeds != rhs_exceeds {
                             continue;
                         }
@@ -1664,109 +1649,53 @@ impl SplittableEditor {
         eprintln!();
     }
 
-    fn check_excerpt_mapping_invariants(&self, cx: &gpui::App) {
-        use multi_buffer::{ExcerptId, PathKey};
-
+    fn check_excerpt_invariants(&self, quiesced: bool, cx: &gpui::App) {
         let lhs = self.lhs.as_ref().expect("should have lhs editor");
 
-        let rhs_excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids();
-        let lhs_excerpt_ids = lhs.multibuffer.read(cx).excerpt_ids();
-        assert_eq!(
-            rhs_excerpt_ids.len(),
-            lhs_excerpt_ids.len(),
-            "excerpt count mismatch: rhs has {}, lhs has {}",
-            rhs_excerpt_ids.len(),
-            lhs_excerpt_ids.len(),
-        );
-
-        let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
-        let companion = rhs_display_map
-            .read(cx)
-            .companion()
-            .cloned()
-            .expect("should have companion");
-        let (lhs_to_rhs, rhs_to_lhs) = {
-            let c = companion.read(cx);
-            let (l, r) = c.excerpt_mappings();
-            (l.clone(), r.clone())
-        };
-
-        assert_eq!(
-            lhs_to_rhs.len(),
-            rhs_to_lhs.len(),
-            "mapping size mismatch: lhs_to_rhs has {}, rhs_to_lhs has {}",
-            lhs_to_rhs.len(),
-            rhs_to_lhs.len(),
-        );
+        let rhs_snapshot = self.rhs_multibuffer.read(cx).snapshot(cx);
+        let rhs_excerpts = rhs_snapshot.excerpts().collect::<Vec<_>>();
+        let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx);
+        let lhs_excerpts = lhs_snapshot.excerpts().collect::<Vec<_>>();
+        assert_eq!(lhs_excerpts.len(), rhs_excerpts.len());
 
-        for (&lhs_id, &rhs_id) in &lhs_to_rhs {
-            let reverse = rhs_to_lhs.get(&rhs_id);
-            assert_eq!(
-                reverse,
-                Some(&lhs_id),
-                "lhs_to_rhs maps {lhs_id:?} -> {rhs_id:?}, but rhs_to_lhs maps {rhs_id:?} -> {reverse:?}",
-            );
-        }
-        for (&rhs_id, &lhs_id) in &rhs_to_lhs {
-            let reverse = lhs_to_rhs.get(&lhs_id);
+        for (lhs_excerpt, rhs_excerpt) in lhs_excerpts.into_iter().zip(rhs_excerpts) {
             assert_eq!(
-                reverse,
-                Some(&rhs_id),
-                "rhs_to_lhs maps {rhs_id:?} -> {lhs_id:?}, but lhs_to_rhs maps {lhs_id:?} -> {reverse:?}",
+                lhs_snapshot
+                    .path_for_buffer(lhs_excerpt.context.start.buffer_id)
+                    .unwrap(),
+                rhs_snapshot
+                    .path_for_buffer(rhs_excerpt.context.start.buffer_id)
+                    .unwrap(),
+                "corresponding excerpts should have the same path"
             );
-        }
-
-        assert_eq!(
-            lhs_to_rhs.len(),
-            rhs_excerpt_ids.len(),
-            "mapping covers {} excerpts but rhs has {}",
-            lhs_to_rhs.len(),
-            rhs_excerpt_ids.len(),
-        );
-
-        let rhs_mapped_order: Vec<ExcerptId> = rhs_excerpt_ids
-            .iter()
-            .map(|rhs_id| {
-                *rhs_to_lhs.get(rhs_id).unwrap_or_else(|| {
-                    panic!("rhs excerpt {rhs_id:?} has no mapping in rhs_to_lhs")
-                })
-            })
-            .collect();
-        assert_eq!(
-            rhs_mapped_order, lhs_excerpt_ids,
-            "excerpt ordering mismatch: mapping rhs order through rhs_to_lhs doesn't match lhs order",
-        );
-
-        let rhs_paths: Vec<PathKey> = self.rhs_multibuffer.read(cx).paths().cloned().collect();
-        let lhs_paths: Vec<PathKey> = lhs.multibuffer.read(cx).paths().cloned().collect();
-        assert_eq!(
-            rhs_paths, lhs_paths,
-            "path set mismatch between rhs and lhs"
-        );
-
-        for path in &rhs_paths {
-            let rhs_path_excerpts: Vec<ExcerptId> = self
+            let diff = self
                 .rhs_multibuffer
                 .read(cx)
-                .excerpts_for_path(path)
-                .collect();
-            let lhs_path_excerpts: Vec<ExcerptId> =
-                lhs.multibuffer.read(cx).excerpts_for_path(path).collect();
+                .diff_for(rhs_excerpt.context.start.buffer_id)
+                .expect("missing diff");
             assert_eq!(
-                rhs_path_excerpts.len(),
-                lhs_path_excerpts.len(),
-                "excerpt count mismatch for path {path:?}: rhs has {}, lhs has {}",
-                rhs_path_excerpts.len(),
-                lhs_path_excerpts.len(),
-            );
-            let rhs_path_mapped: Vec<ExcerptId> = rhs_path_excerpts
-                .iter()
-                .map(|rhs_id| *rhs_to_lhs.get(rhs_id).unwrap())
-                .collect();
-            assert_eq!(
-                rhs_path_mapped, lhs_path_excerpts,
-                "per-path excerpt ordering mismatch for {path:?}",
+                lhs_excerpt.context.start.buffer_id,
+                diff.read(cx).base_text(cx).remote_id(),
+                "corresponding lhs excerpt should show diff base text"
             );
+
+            if quiesced {
+                let diff_snapshot = diff.read(cx).snapshot(cx);
+                let lhs_buffer_snapshot = lhs_snapshot
+                    .buffer_for_id(lhs_excerpt.context.start.buffer_id)
+                    .unwrap();
+                let rhs_buffer_snapshot = rhs_snapshot
+                    .buffer_for_id(rhs_excerpt.context.start.buffer_id)
+                    .unwrap();
+                let lhs_range = lhs_excerpt.context.to_point(&lhs_buffer_snapshot);
+                let rhs_range = rhs_excerpt.context.to_point(&rhs_buffer_snapshot);
+                let expected_lhs_range =
+                    rhs_range_to_base_text_range(&rhs_range, &diff_snapshot, &rhs_buffer_snapshot);
+                assert_eq!(
+                    lhs_range, expected_lhs_range,
+                    "corresponding lhs excerpt should have a matching range"
+                )
+            }
         }
     }
 }
@@ -2316,7 +2245,7 @@ mod tests {
                 let context_lines = rng.random_range(0..2);
                 editor.update(cx, |editor, cx| {
                     let path = PathKey::for_buffer(&buffer, cx);
-                    editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
+                    editor.update_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
                 });
                 editor.update(cx, |editor, cx| {
                     editor.check_invariants(true, cx);
@@ -2351,7 +2280,14 @@ mod tests {
                     let context_lines = rng.random_range(0..2);
                     editor.update(cx, |editor, cx| {
                         let path = PathKey::for_buffer(&buffer, cx);
-                        editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
+                        editor.update_excerpts_for_path(
+                            path,
+                            buffer,
+                            ranges,
+                            context_lines,
+                            diff,
+                            cx,
+                        );
                     });
                 }
                 15..=29 => {

crates/editor/src/split_editor_view.rs πŸ”—

@@ -7,7 +7,7 @@ use gpui::{
     ParentElement, Pixels, StatefulInteractiveElement, Styled, TextStyleRefinement, Window, div,
     linear_color_stop, linear_gradient, point, px, size,
 };
-use multi_buffer::{Anchor, ExcerptId};
+use multi_buffer::{Anchor, ExcerptBoundaryInfo};
 use settings::Settings;
 use smallvec::smallvec;
 use text::BufferId;
@@ -429,7 +429,7 @@ impl SplitBufferHeadersElement {
 
         let sticky_header_excerpt_id = snapshot
             .sticky_header_excerpt(scroll_position.y)
-            .map(|e| e.excerpt.id);
+            .map(|e| e.excerpt);
 
         let non_sticky_headers = self.build_non_sticky_headers(
             &snapshot,
@@ -476,9 +476,10 @@ impl SplitBufferHeadersElement {
         let mut anchors_by_buffer: HashMap<BufferId, (usize, Anchor)> = HashMap::default();
         for selection in all_anchor_selections.iter() {
             let head = selection.head();
-            if let Some(buffer_id) = head.text_anchor.buffer_id {
+            if let Some((text_anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(head)
+            {
                 anchors_by_buffer
-                    .entry(buffer_id)
+                    .entry(text_anchor.buffer_id)
                     .and_modify(|(latest_id, latest_anchor)| {
                         if selection.id > *latest_id {
                             *latest_id = selection.id;
@@ -520,7 +521,7 @@ impl SplitBufferHeadersElement {
         );
 
         let editor_bg_color = cx.theme().colors().editor_background;
-        let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+        let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
 
         let mut header = v_flex()
             .id("sticky-buffer-header")
@@ -594,7 +595,7 @@ impl SplitBufferHeadersElement {
         end_row: DisplayRow,
         selected_buffer_ids: &HashSet<BufferId>,
         latest_selection_anchors: &HashMap<BufferId, Anchor>,
-        sticky_header_excerpt_id: Option<ExcerptId>,
+        sticky_header: Option<&ExcerptBoundaryInfo>,
         window: &mut Window,
         cx: &mut App,
     ) -> Vec<BufferHeaderLayout> {
@@ -603,7 +604,7 @@ impl SplitBufferHeadersElement {
         for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) {
             let (excerpt, is_folded) = match block {
                 Block::BufferHeader { excerpt, .. } => {
-                    if sticky_header_excerpt_id == Some(excerpt.id) {
+                    if sticky_header == Some(excerpt) {
                         continue;
                     }
                     (excerpt, false)
@@ -613,7 +614,7 @@ impl SplitBufferHeadersElement {
                 Block::ExcerptBoundary { .. } | Block::Custom(_) | Block::Spacer { .. } => continue,
             };
 
-            let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+            let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
             let jump_data = header_jump_data(
                 snapshot,
                 block_row,

crates/editor/src/tasks.rs πŸ”—

@@ -0,0 +1,101 @@
+use crate::Editor;
+
+use collections::HashMap;
+use gpui::{App, Task, Window};
+use lsp::LanguageServerName;
+use project::{Location, project_settings::ProjectSettings};
+use settings::Settings as _;
+use task::{TaskContext, TaskVariables, VariableName};
+use text::{BufferId, ToOffset, ToPoint};
+
+impl Editor {
+    pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
+        let Some(project) = self.project.clone() else {
+            return Task::ready(None);
+        };
+        let display_snapshot = self.display_snapshot(cx);
+        let selection = self.selections.newest_adjusted(&display_snapshot);
+        let start = display_snapshot
+            .buffer_snapshot()
+            .anchor_after(selection.start);
+        let end = display_snapshot
+            .buffer_snapshot()
+            .anchor_after(selection.end);
+        let Some((buffer_snapshot, range)) = display_snapshot
+            .buffer_snapshot()
+            .anchor_range_to_buffer_anchor_range(start..end)
+        else {
+            return Task::ready(None);
+        };
+        let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else {
+            return Task::ready(None);
+        };
+        let location = Location { buffer, range };
+        let captured_variables = {
+            let mut variables = TaskVariables::default();
+            let buffer = location.buffer.read(cx);
+            let buffer_id = buffer.remote_id();
+            let snapshot = buffer.snapshot();
+            let starting_point = location.range.start.to_point(&snapshot);
+            let starting_offset = starting_point.to_offset(&snapshot);
+            for (_, tasks) in self
+                .tasks
+                .range((buffer_id, 0)..(buffer_id, starting_point.row + 1))
+            {
+                if !tasks
+                    .context_range
+                    .contains(&crate::BufferOffset(starting_offset))
+                {
+                    continue;
+                }
+                for (capture_name, value) in tasks.extra_variables.iter() {
+                    variables.insert(
+                        VariableName::Custom(capture_name.to_owned().into()),
+                        value.clone(),
+                    );
+                }
+            }
+            variables
+        };
+
+        project.update(cx, |project, cx| {
+            project.task_store().update(cx, |task_store, cx| {
+                task_store.task_context_for_location(captured_variables, location, cx)
+            })
+        })
+    }
+
+    pub fn lsp_task_sources(&self, cx: &App) -> HashMap<LanguageServerName, Vec<BufferId>> {
+        let lsp_settings = &ProjectSettings::get_global(cx).lsp;
+
+        self.buffer()
+            .read(cx)
+            .all_buffers()
+            .into_iter()
+            .filter_map(|buffer| {
+                let lsp_tasks_source = buffer
+                    .read(cx)
+                    .language()?
+                    .context_provider()?
+                    .lsp_task_source()?;
+                if lsp_settings
+                    .get(&lsp_tasks_source)
+                    .is_none_or(|s| s.enable_lsp_tasks)
+                {
+                    let buffer_id = buffer.read(cx).remote_id();
+                    Some((lsp_tasks_source, buffer_id))
+                } else {
+                    None
+                }
+            })
+            .fold(
+                HashMap::default(),
+                |mut acc, (lsp_task_source, buffer_id)| {
+                    acc.entry(lsp_task_source)
+                        .or_insert_with(Vec::new)
+                        .push(buffer_id);
+                    acc
+                },
+            )
+    }
+}

crates/editor/src/test.rs πŸ”—

@@ -245,7 +245,7 @@ pub fn editor_content_with_blocks_and_size(
                     format!(
                         "Β§ {}",
                         first_excerpt
-                            .buffer
+                            .buffer(snapshot.buffer_snapshot())
                             .file()
                             .map(|file| file.file_name(cx))
                             .unwrap_or("<no file>")
@@ -274,7 +274,7 @@ pub fn editor_content_with_blocks_and_size(
                     format!(
                         "Β§ {}",
                         excerpt
-                            .buffer
+                            .buffer(snapshot.buffer_snapshot())
                             .file()
                             .map(|file| file.file_name(cx))
                             .unwrap_or("<no file>")

crates/editor/src/test/editor_test_context.rs πŸ”—

@@ -1,5 +1,5 @@
 use crate::{
-    AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt,
+    DisplayPoint, Editor, MultiBuffer, MultiBufferSnapshot, RowExt,
     display_map::{HighlightKey, ToDisplayPoint},
 };
 use buffer_diff::DiffHunkStatusKind;
@@ -13,7 +13,9 @@ use gpui::{
 };
 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot, LanguageRegistry};
-use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey};
+use multi_buffer::{
+    Anchor, AnchorRangeExt, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey,
+};
 use parking_lot::RwLock;
 use project::{FakeFs, Project};
 use std::{
@@ -464,7 +466,21 @@ impl EditorTestContext {
             let selections = editor.selections.disjoint_anchors_arc();
             let excerpts = multibuffer_snapshot
                 .excerpts()
-                .map(|(e_id, snapshot, range)| (e_id, snapshot.clone(), range))
+                .map(|info| {
+                    (
+                        multibuffer_snapshot
+                            .buffer_for_id(info.context.start.buffer_id)
+                            .cloned()
+                            .unwrap(),
+                        multibuffer_snapshot
+                            .anchor_in_excerpt(info.context.start)
+                            .unwrap()
+                            ..multibuffer_snapshot
+                                .anchor_in_excerpt(info.context.end)
+                                .unwrap(),
+                        info,
+                    )
+                })
                 .collect::<Vec<_>>();
 
             (multibuffer_snapshot, selections, excerpts)
@@ -478,14 +494,23 @@ impl EditorTestContext {
             fmt_additional_notes(),
         );
 
-        for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() {
+        for (ix, (snapshot, multibuffer_range, excerpt_range)) in excerpts.into_iter().enumerate() {
             let is_folded = self
                 .update_editor(|editor, _, cx| editor.is_buffer_folded(snapshot.remote_id(), cx));
             let (expected_text, expected_selections) =
                 marked_text_ranges(expected_excerpts[ix], true);
             if expected_text == "[FOLDED]\n" {
                 assert!(is_folded, "excerpt {} should be folded", ix);
-                let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id);
+                let is_selected = selections.iter().any(|s| {
+                    multibuffer_range
+                        .start
+                        .cmp(&s.head(), &multibuffer_snapshot)
+                        .is_le()
+                        && multibuffer_range
+                            .end
+                            .cmp(&s.head(), &multibuffer_snapshot)
+                            .is_ge()
+                });
                 if !expected_selections.is_empty() {
                     assert!(
                         is_selected,
@@ -510,7 +535,7 @@ impl EditorTestContext {
             );
             assert_eq!(
                 multibuffer_snapshot
-                    .text_for_range(Anchor::range_in_buffer(excerpt_id, range.context.clone()))
+                    .text_for_range(multibuffer_range.clone())
                     .collect::<String>(),
                 expected_text,
                 "{}",
@@ -519,13 +544,24 @@ impl EditorTestContext {
 
             let selections = selections
                 .iter()
-                .filter(|s| s.head().excerpt_id == excerpt_id)
-                .map(|s| {
-                    let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
-                        - text::ToOffset::to_offset(&range.context.start, &snapshot);
-                    let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
-                        - text::ToOffset::to_offset(&range.context.start, &snapshot);
-                    tail..head
+                .filter(|s| {
+                    multibuffer_range
+                        .start
+                        .cmp(&s.head(), &multibuffer_snapshot)
+                        .is_le()
+                        && multibuffer_range
+                            .end
+                            .cmp(&s.head(), &multibuffer_snapshot)
+                            .is_ge()
+                })
+                .filter_map(|s| {
+                    let (head_anchor, buffer_snapshot) =
+                        multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?;
+                    let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+                        - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot);
+                    let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+                        - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot);
+                    Some(tail..head)
                 })
                 .collect::<Vec<_>>();
             // todo: selections that cross excerpt boundaries..
@@ -546,9 +582,12 @@ impl EditorTestContext {
             let selections = editor.selections.disjoint_anchors_arc().to_vec();
             let excerpts = multibuffer_snapshot
                 .excerpts()
-                .map(|(e_id, snapshot, range)| {
-                    let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx);
-                    (e_id, snapshot.clone(), range, is_folded)
+                .map(|info| {
+                    let buffer_snapshot = multibuffer_snapshot
+                        .buffer_for_id(info.context.start.buffer_id)
+                        .unwrap();
+                    let is_folded = editor.is_buffer_folded(buffer_snapshot.remote_id(), cx);
+                    (buffer_snapshot.clone(), info, is_folded)
                 })
                 .collect::<Vec<_>>();
 
@@ -673,7 +712,7 @@ impl EditorTestContext {
 struct FormatMultiBufferAsMarkedText {
     multibuffer_snapshot: MultiBufferSnapshot,
     selections: Vec<Selection<Anchor>>,
-    excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
+    excerpts: Vec<(BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
 }
 
 impl std::fmt::Display for FormatMultiBufferAsMarkedText {
@@ -684,25 +723,40 @@ impl std::fmt::Display for FormatMultiBufferAsMarkedText {
             excerpts,
         } = self;
 
-        for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() {
+        for (_snapshot, range, is_folded) in excerpts.into_iter() {
             write!(f, "[EXCERPT]\n")?;
             if *is_folded {
                 write!(f, "[FOLDED]\n")?;
             }
 
+            let multibuffer_range = multibuffer_snapshot
+                .buffer_anchor_range_to_anchor_range(range.context.clone())
+                .unwrap();
+
             let mut text = multibuffer_snapshot
-                .text_for_range(Anchor::range_in_buffer(*excerpt_id, range.context.clone()))
+                .text_for_range(multibuffer_range.clone())
                 .collect::<String>();
 
             let selections = selections
                 .iter()
-                .filter(|&s| s.head().excerpt_id == *excerpt_id)
-                .map(|s| {
-                    let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
-                        - text::ToOffset::to_offset(&range.context.start, &snapshot);
-                    let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
-                        - text::ToOffset::to_offset(&range.context.start, &snapshot);
-                    tail..head
+                .filter(|&s| {
+                    multibuffer_range
+                        .start
+                        .cmp(&s.head(), multibuffer_snapshot)
+                        .is_le()
+                        && multibuffer_range
+                            .end
+                            .cmp(&s.head(), multibuffer_snapshot)
+                            .is_ge()
+                })
+                .filter_map(|s| {
+                    let (head_anchor, buffer_snapshot) =
+                        multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?;
+                    let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+                        - text::ToOffset::to_offset(&range.context.start, buffer_snapshot);
+                    let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+                        - text::ToOffset::to_offset(&range.context.start, buffer_snapshot);
+                    Some(tail..head)
                 })
                 .rev()
                 .collect::<Vec<_>>();

crates/encoding_selector/src/active_buffer_encoding.rs πŸ”—

@@ -47,7 +47,7 @@ impl ActiveBufferEncoding {
         self.is_shared = project.is_shared();
         self.is_via_remote_server = project.is_via_remote_server();
 
-        if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) {
+        if let Some(buffer) = editor.read(cx).active_buffer(cx) {
             let buffer = buffer.read(cx);
             self.active_encoding = Some(buffer.encoding());
             self.has_bom = buffer.has_bom();

crates/encoding_selector/src/encoding_selector.rs πŸ”—

@@ -47,11 +47,11 @@ impl EncodingSelector {
         window: &mut Window,
         cx: &mut Context<Workspace>,
     ) -> Option<()> {
-        let (_, buffer, _) = workspace
+        let buffer = workspace
             .active_item(cx)?
             .act_as::<Editor>(cx)?
             .read(cx)
-            .active_excerpt(cx)?;
+            .active_buffer(cx)?;
 
         let buffer_handle = buffer.read(cx);
         let project = workspace.project().read(cx);

crates/git_graph/src/git_graph.rs πŸ”—

@@ -41,9 +41,9 @@ use theme::AccentColors;
 use theme_settings::ThemeSettings;
 use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem};
 use ui::{
-    ButtonLike, Chip, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, HighlightedLabel,
-    ScrollableHandle, Table, TableColumnWidths, TableInteractionState, TableResizeBehavior,
-    Tooltip, WithScrollbar, prelude::*,
+    ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider,
+    HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, TableInteractionState,
+    TableResizeBehavior, Tooltip, WithScrollbar, prelude::*,
 };
 use workspace::{
     Workspace,
@@ -901,7 +901,7 @@ pub struct GitGraph {
     context_menu: Option<(Entity<ContextMenu>, Point<Pixels>, Subscription)>,
     row_height: Pixels,
     table_interaction_state: Entity<TableInteractionState>,
-    table_column_widths: Entity<TableColumnWidths>,
+    table_column_widths: Entity<RedistributableColumnsState>,
     horizontal_scroll_offset: Pixels,
     graph_viewport_width: Pixels,
     selected_entry_idx: Option<usize>,
@@ -972,7 +972,23 @@ impl GitGraph {
         });
 
         let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx));
-        let table_column_widths = cx.new(|cx| TableColumnWidths::new(4, cx));
+        let table_column_widths = cx.new(|_cx| {
+            RedistributableColumnsState::new(
+                4,
+                vec![
+                    DefiniteLength::Fraction(0.72),
+                    DefiniteLength::Fraction(0.12),
+                    DefiniteLength::Fraction(0.10),
+                    DefiniteLength::Fraction(0.06),
+                ],
+                vec![
+                    TableResizeBehavior::Resizable,
+                    TableResizeBehavior::Resizable,
+                    TableResizeBehavior::Resizable,
+                    TableResizeBehavior::Resizable,
+                ],
+            )
+        });
         let mut row_height = Self::row_height(cx);
 
         cx.observe_global_in::<settings::SettingsStore>(window, move |this, _window, cx| {
@@ -2172,6 +2188,8 @@ impl GitGraph {
                         builder.move_to(point(line_x, from_y));
 
                         let segments = &line.segments[start_segment_idx..];
+                        let desired_curve_height = row_height / 3.0;
+                        let desired_curve_width = LANE_WIDTH / 3.0;
 
                         for (segment_idx, segment) in segments.iter().enumerate() {
                             let is_last = segment_idx + 1 == segments.len();
@@ -2225,66 +2243,69 @@ impl GitGraph {
                                             if is_last {
                                                 to_column -= column_shift;
                                             }
-                                            builder.move_to(point(current_column, current_row));
 
-                                            if (to_column - current_column).abs() > LANE_WIDTH {
-                                                // Multi-lane checkout: straight down, small
-                                                // curve turn, then straight horizontal.
-                                                if (to_row - current_row).abs() > row_height {
-                                                    let vertical_end =
-                                                        point(current_column, to_row - row_height);
-                                                    builder.line_to(vertical_end);
-                                                    builder.move_to(vertical_end);
-                                                }
-
-                                                let lane_shift = if going_right {
-                                                    LANE_WIDTH
-                                                } else {
-                                                    -LANE_WIDTH
-                                                };
-                                                let curve_end =
-                                                    point(current_column + lane_shift, to_row);
-                                                let curve_control = point(current_column, to_row);
-                                                builder.curve_to(curve_end, curve_control);
-                                                builder.move_to(curve_end);
-
-                                                builder.line_to(point(to_column, to_row));
+                                            let available_curve_width =
+                                                (to_column - current_column).abs();
+                                            let available_curve_height =
+                                                (to_row - current_row).abs();
+                                            let curve_width =
+                                                desired_curve_width.min(available_curve_width);
+                                            let curve_height =
+                                                desired_curve_height.min(available_curve_height);
+                                            let signed_curve_width = if going_right {
+                                                curve_width
                                             } else {
-                                                if (to_row - current_row).abs() > row_height {
-                                                    let start_curve =
-                                                        point(current_column, to_row - row_height);
-                                                    builder.line_to(start_curve);
-                                                    builder.move_to(start_curve);
-                                                }
-                                                let control = point(current_column, to_row);
-                                                builder.curve_to(point(to_column, to_row), control);
-                                            }
+                                                -curve_width
+                                            };
+                                            let curve_start =
+                                                point(current_column, to_row - curve_height);
+                                            let curve_end =
+                                                point(current_column + signed_curve_width, to_row);
+                                            let curve_control = point(current_column, to_row);
+
+                                            builder.move_to(point(current_column, current_row));
+                                            builder.line_to(curve_start);
+                                            builder.move_to(curve_start);
+                                            builder.curve_to(curve_end, curve_control);
+                                            builder.move_to(curve_end);
+                                            builder.line_to(point(to_column, to_row));
                                         }
                                         CurveKind::Merge => {
                                             if is_last {
                                                 to_row -= COMMIT_CIRCLE_RADIUS;
                                             }
-                                            builder.move_to(point(
+
+                                            let merge_start = point(
                                                 current_column + column_shift,
                                                 current_row - COMMIT_CIRCLE_RADIUS,
-                                            ));
-
-                                            if (to_column - current_column).abs() > LANE_WIDTH {
-                                                let column_shift = if going_right {
-                                                    LANE_WIDTH
-                                                } else {
-                                                    -LANE_WIDTH
-                                                };
-                                                let start_curve = point(
-                                                    current_column + column_shift,
-                                                    current_row - COMMIT_CIRCLE_RADIUS,
-                                                );
-                                                builder.line_to(start_curve);
-                                                builder.move_to(start_curve);
-                                            }
-
-                                            let control = point(to_column, current_row);
-                                            builder.curve_to(point(to_column, to_row), control);
+                                            );
+                                            let available_curve_width =
+                                                (to_column - merge_start.x).abs();
+                                            let available_curve_height =
+                                                (to_row - merge_start.y).abs();
+                                            let curve_width =
+                                                desired_curve_width.min(available_curve_width);
+                                            let curve_height =
+                                                desired_curve_height.min(available_curve_height);
+                                            let signed_curve_width = if going_right {
+                                                curve_width
+                                            } else {
+                                                -curve_width
+                                            };
+                                            let curve_start = point(
+                                                to_column - signed_curve_width,
+                                                merge_start.y,
+                                            );
+                                            let curve_end =
+                                                point(to_column, merge_start.y + curve_height);
+                                            let curve_control = point(to_column, merge_start.y);
+
+                                            builder.move_to(merge_start);
+                                            builder.line_to(curve_start);
+                                            builder.move_to(curve_start);
+                                            builder.curve_to(curve_end, curve_control);
+                                            builder.move_to(curve_end);
+                                            builder.line_to(point(to_column, to_row));
                                         }
                                     }
                                     current_row = to_row;
@@ -2459,11 +2480,6 @@ impl Render for GitGraph {
             self.search_state.state = QueryState::Empty;
             self.search(query, cx);
         }
-        let description_width_fraction = 0.72;
-        let date_width_fraction = 0.12;
-        let author_width_fraction = 0.10;
-        let commit_width_fraction = 0.06;
-
         let (commit_count, is_loading) = match self.graph_data.max_commit_count {
             AllCommitCount::Loaded(count) => (count, true),
             AllCommitCount::NotLoaded => {
@@ -2523,7 +2539,10 @@ impl Render for GitGraph {
                         .flex_col()
                         .child(
                             div()
-                                .p_2()
+                                .flex()
+                                .items_center()
+                                .px_1()
+                                .py_0p5()
                                 .border_b_1()
                                 .whitespace_nowrap()
                                 .border_color(cx.theme().colors().border)
@@ -2565,25 +2584,9 @@ impl Render for GitGraph {
                                 Label::new("Author").color(Color::Muted).into_any_element(),
                                 Label::new("Commit").color(Color::Muted).into_any_element(),
                             ])
-                            .column_widths(
-                                [
-                                    DefiniteLength::Fraction(description_width_fraction),
-                                    DefiniteLength::Fraction(date_width_fraction),
-                                    DefiniteLength::Fraction(author_width_fraction),
-                                    DefiniteLength::Fraction(commit_width_fraction),
-                                ]
-                                .to_vec(),
-                            )
-                            .resizable_columns(
-                                vec![
-                                    TableResizeBehavior::Resizable,
-                                    TableResizeBehavior::Resizable,
-                                    TableResizeBehavior::Resizable,
-                                    TableResizeBehavior::Resizable,
-                                ],
-                                &self.table_column_widths,
-                                cx,
-                            )
+                            .width_config(ColumnWidthConfig::redistributable(
+                                self.table_column_widths.clone(),
+                            ))
                             .map_row(move |(index, row), window, cx| {
                                 let is_selected = selected_entry_idx == Some(index);
                                 let is_hovered = hovered_entry_idx == Some(index);

crates/git_ui/src/commit_view.rs πŸ”—

@@ -212,7 +212,7 @@ impl CommitView {
 
             editor.insert_blocks(
                 [BlockProperties {
-                    placement: BlockPlacement::Above(editor::Anchor::min()),
+                    placement: BlockPlacement::Above(editor::Anchor::Min),
                     height: Some(1),
                     style: BlockStyle::Sticky,
                     render: Arc::new(|_| gpui::Empty.into_any_element()),
@@ -223,7 +223,10 @@ impl CommitView {
                     editor
                         .buffer()
                         .read(cx)
-                        .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx)
+                        .snapshot(cx)
+                        .anchor_in_buffer(Anchor::max_for_buffer(
+                            message_buffer.read(cx).remote_id(),
+                        ))
                         .map(|anchor| BlockProperties {
                             placement: BlockPlacement::Below(anchor),
                             height: Some(1),

crates/git_ui/src/conflict_view.rs πŸ”—

@@ -2,7 +2,7 @@ use agent_settings::AgentSettings;
 use collections::{HashMap, HashSet};
 use editor::{
     ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker,
-    Editor, EditorEvent, ExcerptId, MultiBuffer, RowHighlightOptions,
+    Editor, EditorEvent, MultiBuffer, RowHighlightOptions,
     display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
 };
 use gpui::{
@@ -67,62 +67,22 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity<MultiBuffer>, cx: &mu
 
     let buffers = buffer.read(cx).all_buffers();
     for buffer in buffers {
-        buffer_added(editor, buffer, cx);
+        buffer_ranges_updated(editor, buffer, cx);
     }
 
     cx.subscribe(&cx.entity(), |editor, _, event, cx| match event {
-        EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx),
-        EditorEvent::ExcerptsExpanded { ids } => {
-            let multibuffer = editor.buffer().read(cx).snapshot(cx);
-            for excerpt_id in ids {
-                let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else {
-                    continue;
-                };
-                let addon = editor.addon::<ConflictAddon>().unwrap();
-                let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else {
-                    return;
-                };
-                excerpt_for_buffer_updated(editor, conflict_set, cx);
-            }
+        EditorEvent::BufferRangesUpdated { buffer, .. } => {
+            buffer_ranges_updated(editor, buffer.clone(), cx)
+        }
+        EditorEvent::BuffersRemoved { removed_buffer_ids } => {
+            buffers_removed(editor, removed_buffer_ids, cx)
         }
-        EditorEvent::ExcerptsRemoved {
-            removed_buffer_ids, ..
-        } => buffers_removed(editor, removed_buffer_ids, cx),
         _ => {}
     })
     .detach();
 }
 
-fn excerpt_for_buffer_updated(
-    editor: &mut Editor,
-    conflict_set: Entity<ConflictSet>,
-    cx: &mut Context<Editor>,
-) {
-    let conflicts_len = conflict_set.read(cx).snapshot().conflicts.len();
-    let buffer_id = conflict_set.read(cx).snapshot().buffer_id;
-    let Some(buffer_conflicts) = editor
-        .addon_mut::<ConflictAddon>()
-        .unwrap()
-        .buffers
-        .get(&buffer_id)
-    else {
-        return;
-    };
-    let addon_conflicts_len = buffer_conflicts.block_ids.len();
-    conflicts_updated(
-        editor,
-        conflict_set,
-        &ConflictSetUpdate {
-            buffer_range: None,
-            old_range: 0..addon_conflicts_len,
-            new_range: 0..conflicts_len,
-        },
-        cx,
-    );
-}
-
-#[ztracing::instrument(skip_all)]
-fn buffer_added(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Editor>) {
+fn buffer_ranges_updated(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Editor>) {
     let Some(project) = editor.project() else {
         return;
     };
@@ -188,14 +148,6 @@ fn conflicts_updated(
     let conflict_set = conflict_set.read(cx).snapshot();
     let multibuffer = editor.buffer().read(cx);
     let snapshot = multibuffer.snapshot(cx);
-    let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx);
-    let Some(buffer_snapshot) = excerpts
-        .first()
-        .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id))
-    else {
-        return;
-    };
-
     let old_range = maybe!({
         let conflict_addon = editor.addon_mut::<ConflictAddon>().unwrap();
         let buffer_conflicts = conflict_addon.buffers.get(&buffer_id)?;
@@ -230,23 +182,7 @@ fn conflicts_updated(
         let mut removed_highlighted_ranges = Vec::new();
         let mut removed_block_ids = HashSet::default();
         for (conflict_range, block_id) in old_conflicts {
-            let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
-                let precedes_start = range
-                    .context
-                    .start
-                    .cmp(&conflict_range.start, buffer_snapshot)
-                    .is_le();
-                let follows_end = range
-                    .context
-                    .end
-                    .cmp(&conflict_range.start, buffer_snapshot)
-                    .is_ge();
-                precedes_start && follows_end
-            }) else {
-                continue;
-            };
-            let excerpt_id = *excerpt_id;
-            let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else {
+            let Some(range) = snapshot.buffer_anchor_range_to_anchor_range(conflict_range) else {
                 continue;
             };
             removed_highlighted_ranges.push(range.clone());
@@ -272,26 +208,9 @@ fn conflicts_updated(
     let new_conflicts = &conflict_set.conflicts[event.new_range.clone()];
     let mut blocks = Vec::new();
     for conflict in new_conflicts {
-        let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
-            let precedes_start = range
-                .context
-                .start
-                .cmp(&conflict.range.start, buffer_snapshot)
-                .is_le();
-            let follows_end = range
-                .context
-                .end
-                .cmp(&conflict.range.start, buffer_snapshot)
-                .is_ge();
-            precedes_start && follows_end
-        }) else {
-            continue;
-        };
-        let excerpt_id = *excerpt_id;
-
-        update_conflict_highlighting(editor, conflict, &snapshot, excerpt_id, cx);
+        update_conflict_highlighting(editor, conflict, &snapshot, cx);
 
-        let Some(anchor) = snapshot.anchor_in_excerpt(excerpt_id, conflict.range.start) else {
+        let Some(anchor) = snapshot.anchor_in_excerpt(conflict.range.start) else {
             continue;
         };
 
@@ -302,7 +221,7 @@ fn conflicts_updated(
             style: BlockStyle::Sticky,
             render: Arc::new({
                 let conflict = conflict.clone();
-                move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx)
+                move |cx| render_conflict_buttons(&conflict, editor_handle.clone(), cx)
             }),
             priority: 0,
         })
@@ -328,14 +247,13 @@ fn update_conflict_highlighting(
     editor: &mut Editor,
     conflict: &ConflictRegion,
     buffer: &editor::MultiBufferSnapshot,
-    excerpt_id: editor::ExcerptId,
     cx: &mut Context<Editor>,
 ) -> Option<()> {
     log::debug!("update conflict highlighting for {conflict:?}");
 
-    let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?;
-    let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?;
-    let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?;
+    let outer = buffer.buffer_anchor_range_to_anchor_range(conflict.range.clone())?;
+    let ours = buffer.buffer_anchor_range_to_anchor_range(conflict.ours.clone())?;
+    let theirs = buffer.buffer_anchor_range_to_anchor_range(conflict.theirs.clone())?;
 
     let ours_background = cx.theme().colors().version_control_conflict_marker_ours;
     let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs;
@@ -373,7 +291,6 @@ fn update_conflict_highlighting(
 
 fn render_conflict_buttons(
     conflict: &ConflictRegion,
-    excerpt_id: ExcerptId,
     editor: WeakEntity<Editor>,
     cx: &mut BlockContext,
 ) -> AnyElement {
@@ -395,7 +312,6 @@ fn render_conflict_buttons(
                     move |_, window, cx| {
                         resolve_conflict(
                             editor.clone(),
-                            excerpt_id,
                             conflict.clone(),
                             vec![ours.clone()],
                             window,
@@ -415,7 +331,6 @@ fn render_conflict_buttons(
                     move |_, window, cx| {
                         resolve_conflict(
                             editor.clone(),
-                            excerpt_id,
                             conflict.clone(),
                             vec![theirs.clone()],
                             window,
@@ -436,7 +351,6 @@ fn render_conflict_buttons(
                     move |_, window, cx| {
                         resolve_conflict(
                             editor.clone(),
-                            excerpt_id,
                             conflict.clone(),
                             vec![ours.clone(), theirs.clone()],
                             window,
@@ -461,7 +375,7 @@ fn render_conflict_buttons(
                             let content = editor
                                 .update(cx, |editor, cx| {
                                     let multibuffer = editor.buffer().read(cx);
-                                    let buffer_id = conflict.ours.end.buffer_id?;
+                                    let buffer_id = conflict.ours.end.buffer_id;
                                     let buffer = multibuffer.buffer(buffer_id)?;
                                     let buffer_read = buffer.read(cx);
                                     let snapshot = buffer_read.snapshot();
@@ -589,7 +503,6 @@ pub(crate) fn register_conflict_notification(
 
 pub(crate) fn resolve_conflict(
     editor: WeakEntity<Editor>,
-    excerpt_id: ExcerptId,
     resolved_conflict: ConflictRegion,
     ranges: Vec<Range<Anchor>>,
     window: &mut Window,
@@ -601,7 +514,7 @@ pub(crate) fn resolve_conflict(
                 let workspace = editor.workspace()?;
                 let project = editor.project()?.clone();
                 let multibuffer = editor.buffer().clone();
-                let buffer_id = resolved_conflict.ours.end.buffer_id?;
+                let buffer_id = resolved_conflict.ours.end.buffer_id;
                 let buffer = multibuffer.read(cx).buffer(buffer_id)?;
                 resolved_conflict.resolve(buffer.clone(), &ranges, cx);
                 let conflict_addon = editor.addon_mut::<ConflictAddon>().unwrap();
@@ -620,7 +533,7 @@ pub(crate) fn resolve_conflict(
                     .ok()?;
                 let &(_, block_id) = &state.block_ids[ix];
                 let range =
-                    snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?;
+                    snapshot.buffer_anchor_range_to_anchor_range(resolved_conflict.range)?;
 
                 editor.remove_gutter_highlights::<ConflictsOuter>(vec![range.clone()], cx);
 

crates/git_ui/src/git_panel.rs πŸ”—

@@ -49,7 +49,7 @@ use language_model::{
     LanguageModelRequestMessage, Role,
 };
 use menu;
-use multi_buffer::ExcerptInfo;
+use multi_buffer::ExcerptBoundaryInfo;
 use notifications::status_toast::{StatusToast, ToastIcon};
 use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button};
 use project::{
@@ -5760,11 +5760,12 @@ impl editor::Addon for GitPanelAddon {
 
     fn render_buffer_header_controls(
         &self,
-        excerpt_info: &ExcerptInfo,
+        _excerpt_info: &ExcerptBoundaryInfo,
+        buffer: &language::BufferSnapshot,
         window: &Window,
         cx: &App,
     ) -> Option<AnyElement> {
-        let file = excerpt_info.buffer.file()?;
+        let file = buffer.file()?;
         let git_panel = self.workspace.upgrade()?.read(cx).panel::<GitPanel>(cx)?;
 
         git_panel

crates/git_ui/src/project_diff.rs πŸ”—

@@ -501,9 +501,11 @@ impl ProjectDiff {
 
     pub fn active_path(&self, cx: &App) -> Option<ProjectPath> {
         let editor = self.editor.read(cx).focused_editor().read(cx);
+        let multibuffer = editor.buffer().read(cx);
         let position = editor.selections.newest_anchor().head();
-        let multi_buffer = editor.buffer().read(cx);
-        let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?;
+        let snapshot = multibuffer.snapshot(cx);
+        let (text_anchor, _) = snapshot.anchor_to_buffer_anchor(position)?;
+        let buffer = multibuffer.buffer(text_anchor.buffer_id)?;
 
         let file = buffer.read(cx).file()?;
         Some(ProjectPath {
@@ -516,9 +518,7 @@ impl ProjectDiff {
         self.editor.update(cx, |editor, cx| {
             editor.rhs_editor().update(cx, |editor, cx| {
                 editor.change_selections(Default::default(), window, cx, |s| {
-                    s.select_ranges(vec![
-                        multi_buffer::Anchor::min()..multi_buffer::Anchor::min(),
-                    ]);
+                    s.select_ranges(vec![multi_buffer::Anchor::Min..multi_buffer::Anchor::Min]);
                 });
             });
         });
@@ -569,17 +569,17 @@ impl ProjectDiff {
             .collect::<Vec<_>>();
         if !ranges.iter().any(|range| range.start != range.end) {
             selection = false;
-            if let Some((excerpt_id, _, range)) = self
-                .editor
-                .read(cx)
-                .rhs_editor()
-                .read(cx)
-                .active_excerpt(cx)
+            let anchor = editor.selections.newest_anchor().head();
+            if let Some((_, excerpt_range)) = snapshot.excerpt_containing(anchor..anchor)
+                && let Some(range) = snapshot
+                    .anchor_in_buffer(excerpt_range.context.start)
+                    .zip(snapshot.anchor_in_buffer(excerpt_range.context.end))
+                    .map(|(start, end)| start..end)
             {
-                ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)];
+                ranges = vec![range];
             } else {
                 ranges = Vec::default();
-            }
+            };
         }
         let mut has_staged_hunks = false;
         let mut has_unstaged_hunks = false;
@@ -715,7 +715,7 @@ impl ProjectDiff {
 
         let (was_empty, is_excerpt_newly_added) = self.editor.update(cx, |editor, cx| {
             let was_empty = editor.rhs_editor().read(cx).buffer().read(cx).is_empty();
-            let (_, is_newly_added) = editor.set_excerpts_for_path(
+            let is_newly_added = editor.update_excerpts_for_path(
                 path_key.clone(),
                 buffer,
                 excerpt_ranges,
@@ -735,7 +735,7 @@ impl ProjectDiff {
                         cx,
                         |selections| {
                             selections.select_ranges([
-                                multi_buffer::Anchor::min()..multi_buffer::Anchor::min()
+                                multi_buffer::Anchor::Min..multi_buffer::Anchor::Min
                             ])
                         },
                     );
@@ -785,8 +785,9 @@ impl ProjectDiff {
             let mut previous_paths = this
                 .multibuffer
                 .read(cx)
-                .paths()
-                .cloned()
+                .snapshot(cx)
+                .buffers_with_paths()
+                .map(|(_, path_key)| path_key.clone())
                 .collect::<HashSet<_>>();
 
             if let Some(repo) = repo {
@@ -877,10 +878,23 @@ impl ProjectDiff {
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn excerpt_paths(&self, cx: &App) -> Vec<std::sync::Arc<util::rel_path::RelPath>> {
-        self.multibuffer
+        let snapshot = self
+            .editor()
+            .read(cx)
+            .rhs_editor()
+            .read(cx)
+            .buffer()
             .read(cx)
-            .paths()
-            .map(|key| key.path.clone())
+            .snapshot(cx);
+        snapshot
+            .excerpts()
+            .map(|excerpt| {
+                snapshot
+                    .path_for_buffer(excerpt.context.start.buffer_id)
+                    .unwrap()
+                    .path
+                    .clone()
+            })
             .collect()
     }
 }
@@ -1937,7 +1951,7 @@ mod tests {
                 let snapshot = buffer_editor.snapshot(window, cx);
                 let snapshot = &snapshot.buffer_snapshot();
                 let prev_buffer_hunks = buffer_editor
-                    .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot)
+                    .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot)
                     .collect::<Vec<_>>();
                 buffer_editor.git_restore(&Default::default(), window, cx);
                 prev_buffer_hunks
@@ -1950,7 +1964,7 @@ mod tests {
                 let snapshot = buffer_editor.snapshot(window, cx);
                 let snapshot = &snapshot.buffer_snapshot();
                 buffer_editor
-                    .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot)
+                    .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot)
                     .collect::<Vec<_>>()
             });
         assert_eq!(new_buffer_hunks.as_slice(), &[]);
@@ -2209,9 +2223,14 @@ mod tests {
 
         cx.update(|window, cx| {
             let editor = diff.read(cx).editor.read(cx).rhs_editor().clone();
-            let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids();
-            assert_eq!(excerpt_ids.len(), 1);
-            let excerpt_id = excerpt_ids[0];
+            let excerpts = editor
+                .read(cx)
+                .buffer()
+                .read(cx)
+                .snapshot(cx)
+                .excerpts()
+                .collect::<Vec<_>>();
+            assert_eq!(excerpts.len(), 1);
             let buffer = editor
                 .read(cx)
                 .buffer()
@@ -2239,7 +2258,6 @@ mod tests {
 
             resolve_conflict(
                 editor.downgrade(),
-                excerpt_id,
                 snapshot.conflicts[0].clone(),
                 vec![ours_range],
                 window,

crates/git_ui/src/text_diff_view.rs πŸ”—

@@ -11,7 +11,7 @@ use gpui::{
     AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle,
     Focusable, IntoElement, Render, Task, Window,
 };
-use language::{self, Buffer, Point};
+use language::{self, Buffer, OffsetRangeExt, Point};
 use project::Project;
 use settings::Settings;
 use std::{
@@ -52,36 +52,26 @@ impl TextDiffView {
 
         let selection_data = source_editor.update(cx, |editor, cx| {
             let multibuffer = editor.buffer();
-            let selections = editor.selections.all::<Point>(&editor.display_snapshot(cx));
-            let first_selection = selections.first()?;
-
-            let (source_buffer, buffer_start, start_excerpt) = multibuffer
-                .read(cx)
-                .point_to_buffer_point(first_selection.start, cx)?;
-            let buffer_end = multibuffer
-                .read(cx)
-                .point_to_buffer_point(first_selection.end, cx)
-                .and_then(|(buf, pt, end_excerpt)| {
-                    (buf.read(cx).remote_id() == source_buffer.read(cx).remote_id()
-                        && end_excerpt == start_excerpt)
-                        .then_some(pt)
-                })
-                .unwrap_or(buffer_start);
+            let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx);
+            let first_selection = editor.selections.newest_anchor();
 
-            let buffer_snapshot = source_buffer.read(cx);
-            let max_point = buffer_snapshot.max_point();
+            let (source_buffer, buffer_range) = multibuffer_snapshot
+                .anchor_range_to_buffer_anchor_range(first_selection.range())?;
+            let max_point = source_buffer.max_point();
+            let buffer_range = buffer_range.to_point(source_buffer);
+            let source_buffer = multibuffer.read(cx).buffer(source_buffer.remote_id())?;
 
-            if first_selection.is_empty() {
+            if buffer_range.is_empty() {
                 let full_range = Point::new(0, 0)..max_point;
                 return Some((source_buffer, full_range));
             }
 
-            let expanded_start = Point::new(buffer_start.row, 0);
-            let expanded_end = if buffer_end.column > 0 {
-                let next_row = buffer_end.row + 1;
+            let expanded_start = Point::new(buffer_range.start.row, 0);
+            let expanded_end = if buffer_range.end.column > 0 {
+                let next_row = buffer_range.end.row + 1;
                 cmp::min(max_point, Point::new(next_row, 0))
             } else {
-                buffer_end
+                buffer_range.end
             };
             Some((source_buffer, expanded_start..expanded_end))
         });

crates/go_to_line/src/cursor_position.rs πŸ”—

@@ -42,23 +42,22 @@ impl UserCaretPosition {
         snapshot: &MultiBufferSnapshot,
     ) -> Self {
         let selection_end = selection.head();
-        let (line, character) = if let Some((buffer_snapshot, point, _)) =
-            snapshot.point_to_buffer_point(selection_end)
-        {
-            let line_start = Point::new(point.row, 0);
+        let (line, character) =
+            if let Some((buffer_snapshot, point)) = snapshot.point_to_buffer_point(selection_end) {
+                let line_start = Point::new(point.row, 0);
 
-            let chars_to_last_position = buffer_snapshot
-                .text_summary_for_range::<text::TextSummary, _>(line_start..point)
-                .chars as u32;
-            (line_start.row, chars_to_last_position)
-        } else {
-            let line_start = Point::new(selection_end.row, 0);
+                let chars_to_last_position = buffer_snapshot
+                    .text_summary_for_range::<text::TextSummary, _>(line_start..point)
+                    .chars as u32;
+                (line_start.row, chars_to_last_position)
+            } else {
+                let line_start = Point::new(selection_end.row, 0);
 
-            let chars_to_last_position = snapshot
-                .text_summary_for_range::<MBTextSummary, _>(line_start..selection_end)
-                .chars as u32;
-            (selection_end.row, chars_to_last_position)
-        };
+                let chars_to_last_position = snapshot
+                    .text_summary_for_range::<MBTextSummary, _>(line_start..selection_end)
+                    .chars as u32;
+                (selection_end.row, chars_to_last_position)
+            };
 
         Self {
             line: NonZeroU32::new(line + 1).expect("added 1"),
@@ -232,7 +231,7 @@ impl Render for CursorPosition {
                                 if let Some(editor) = workspace
                                     .active_item(cx)
                                     .and_then(|item| item.act_as::<Editor>(cx))
-                                    && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx)
+                                    && let Some(buffer) = editor.read(cx).active_buffer(cx)
                                 {
                                     workspace.toggle_modal(window, cx, |window, cx| {
                                         crate::GoToLine::new(editor, buffer, window, cx)

crates/go_to_line/src/go_to_line.rs πŸ”—

@@ -63,7 +63,7 @@ impl GoToLine {
                     return;
                 };
                 let editor = editor_handle.read(cx);
-                let Some((_, buffer, _)) = editor.active_excerpt(cx) else {
+                let Some(buffer) = editor.active_buffer(cx) else {
                     return;
                 };
                 workspace.update(cx, |workspace, cx| {
@@ -93,11 +93,9 @@ impl GoToLine {
             let last_line = editor
                 .buffer()
                 .read(cx)
-                .excerpts_for_buffer(snapshot.remote_id(), cx)
-                .into_iter()
-                .map(move |(_, _, range)| {
-                    text::ToPoint::to_point(&range.context.end, &snapshot).row
-                })
+                .snapshot(cx)
+                .excerpts_for_buffer(snapshot.remote_id())
+                .map(move |range| text::ToPoint::to_point(&range.context.end, &snapshot).row)
                 .max()
                 .unwrap_or(0);
 
@@ -230,7 +228,7 @@ impl GoToLine {
         let character = query_char.unwrap_or(0).saturating_sub(1);
 
         let target_multi_buffer_row = MultiBufferRow(row);
-        let (buffer_snapshot, target_in_buffer, _) = snapshot.point_to_buffer_point(Point::new(
+        let (buffer_snapshot, target_in_buffer) = snapshot.point_to_buffer_point(Point::new(
             target_multi_buffer_row.min(snapshot.max_row()).0,
             0,
         ))?;

crates/gpui/src/elements/div.rs πŸ”—

@@ -3067,21 +3067,29 @@ fn handle_tooltip_mouse_move(
         }
         Action::ScheduleShow => {
             let delayed_show_task = window.spawn(cx, {
-                let active_tooltip = active_tooltip.clone();
+                let weak_active_tooltip = Rc::downgrade(active_tooltip);
                 let build_tooltip = build_tooltip.clone();
                 let check_is_hovered_during_prepaint = check_is_hovered_during_prepaint.clone();
                 async move |cx| {
                     cx.background_executor().timer(TOOLTIP_SHOW_DELAY).await;
+                    let Some(active_tooltip) = weak_active_tooltip.upgrade() else {
+                        return;
+                    };
                     cx.update(|window, cx| {
                         let new_tooltip =
                             build_tooltip(window, cx).map(|(view, tooltip_is_hoverable)| {
-                                let active_tooltip = active_tooltip.clone();
+                                let weak_active_tooltip = Rc::downgrade(&active_tooltip);
                                 ActiveTooltip::Visible {
                                     tooltip: AnyTooltip {
                                         view,
                                         mouse_position: window.mouse_position(),
                                         check_visible_and_update: Rc::new(
                                             move |tooltip_bounds, window, cx| {
+                                                let Some(active_tooltip) =
+                                                    weak_active_tooltip.upgrade()
+                                                else {
+                                                    return false;
+                                                };
                                                 handle_tooltip_check_visible_and_update(
                                                     &active_tooltip,
                                                     tooltip_is_hoverable,
@@ -3160,11 +3168,14 @@ fn handle_tooltip_check_visible_and_update(
         Action::Hide => clear_active_tooltip(active_tooltip, window),
         Action::ScheduleHide(tooltip) => {
             let delayed_hide_task = window.spawn(cx, {
-                let active_tooltip = active_tooltip.clone();
+                let weak_active_tooltip = Rc::downgrade(active_tooltip);
                 async move |cx| {
                     cx.background_executor()
                         .timer(HOVERABLE_TOOLTIP_HIDE_DELAY)
                         .await;
+                    let Some(active_tooltip) = weak_active_tooltip.upgrade() else {
+                        return;
+                    };
                     if active_tooltip.borrow_mut().take().is_some() {
                         cx.update(|window, _cx| window.refresh()).ok();
                     }
@@ -3577,6 +3588,112 @@ impl ScrollHandle {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use crate::{AppContext as _, Context, InputEvent, MouseMoveEvent, TestAppContext};
+    use std::rc::Weak;
+
+    struct TestTooltipView;
+
+    impl Render for TestTooltipView {
+        fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+            div().w(px(20.)).h(px(20.)).child("tooltip")
+        }
+    }
+
+    type CapturedActiveTooltip = Rc<RefCell<Option<Weak<RefCell<Option<ActiveTooltip>>>>>>;
+
+    struct TooltipCaptureElement {
+        child: AnyElement,
+        captured_active_tooltip: CapturedActiveTooltip,
+    }
+
+    impl IntoElement for TooltipCaptureElement {
+        type Element = Self;
+
+        fn into_element(self) -> Self::Element {
+            self
+        }
+    }
+
+    impl Element for TooltipCaptureElement {
+        type RequestLayoutState = ();
+        type PrepaintState = ();
+
+        fn id(&self) -> Option<ElementId> {
+            None
+        }
+
+        fn source_location(&self) -> Option<&'static core::panic::Location<'static>> {
+            None
+        }
+
+        fn request_layout(
+            &mut self,
+            _id: Option<&GlobalElementId>,
+            _inspector_id: Option<&InspectorElementId>,
+            window: &mut Window,
+            cx: &mut App,
+        ) -> (LayoutId, Self::RequestLayoutState) {
+            (self.child.request_layout(window, cx), ())
+        }
+
+        fn prepaint(
+            &mut self,
+            _id: Option<&GlobalElementId>,
+            _inspector_id: Option<&InspectorElementId>,
+            _bounds: Bounds<Pixels>,
+            _request_layout: &mut Self::RequestLayoutState,
+            window: &mut Window,
+            cx: &mut App,
+        ) -> Self::PrepaintState {
+            self.child.prepaint(window, cx);
+        }
+
+        fn paint(
+            &mut self,
+            _id: Option<&GlobalElementId>,
+            _inspector_id: Option<&InspectorElementId>,
+            _bounds: Bounds<Pixels>,
+            _request_layout: &mut Self::RequestLayoutState,
+            _prepaint: &mut Self::PrepaintState,
+            window: &mut Window,
+            cx: &mut App,
+        ) {
+            self.child.paint(window, cx);
+            window.with_global_id("target".into(), |global_id, window| {
+                window.with_element_state::<InteractiveElementState, _>(
+                    global_id,
+                    |state, _window| {
+                        let state = state.unwrap();
+                        *self.captured_active_tooltip.borrow_mut() =
+                            state.active_tooltip.as_ref().map(Rc::downgrade);
+                        ((), state)
+                    },
+                )
+            });
+        }
+    }
+
+    struct TooltipOwner {
+        captured_active_tooltip: CapturedActiveTooltip,
+    }
+
+    impl Render for TooltipOwner {
+        fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
+            TooltipCaptureElement {
+                child: div()
+                    .size_full()
+                    .child(
+                        div()
+                            .id("target")
+                            .w(px(50.))
+                            .h(px(50.))
+                            .tooltip(|_, cx| cx.new(|_| TestTooltipView).into()),
+                    )
+                    .into_any_element(),
+                captured_active_tooltip: self.captured_active_tooltip.clone(),
+            }
+        }
+    }
 
     #[test]
     fn scroll_handle_aligns_wide_children_to_left_edge() {
@@ -3615,4 +3732,96 @@ mod tests {
 
         assert_eq!(handle.offset().y, px(-25.));
     }
+
+    fn setup_tooltip_owner_test() -> (
+        TestAppContext,
+        crate::AnyWindowHandle,
+        CapturedActiveTooltip,
+    ) {
+        let mut test_app = TestAppContext::single();
+        let captured_active_tooltip: CapturedActiveTooltip = Rc::new(RefCell::new(None));
+        let window = test_app.add_window({
+            let captured_active_tooltip = captured_active_tooltip.clone();
+            move |_, _| TooltipOwner {
+                captured_active_tooltip,
+            }
+        });
+        let any_window = window.into();
+
+        test_app
+            .update_window(any_window, |_, window, cx| {
+                window.draw(cx).clear();
+            })
+            .unwrap();
+
+        test_app
+            .update_window(any_window, |_, window, cx| {
+                window.dispatch_event(
+                    MouseMoveEvent {
+                        position: point(px(10.), px(10.)),
+                        modifiers: Default::default(),
+                        pressed_button: None,
+                    }
+                    .to_platform_input(),
+                    cx,
+                );
+            })
+            .unwrap();
+
+        test_app
+            .update_window(any_window, |_, window, cx| {
+                window.draw(cx).clear();
+            })
+            .unwrap();
+
+        (test_app, any_window, captured_active_tooltip)
+    }
+
+    #[test]
+    fn tooltip_waiting_for_show_is_released_when_its_owner_disappears() {
+        let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test();
+
+        let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap();
+        let active_tooltip = weak_active_tooltip.upgrade().unwrap();
+        assert!(matches!(
+            active_tooltip.borrow().as_ref(),
+            Some(ActiveTooltip::WaitingForShow { .. })
+        ));
+
+        test_app
+            .update_window(any_window, |_, window, _| {
+                window.remove_window();
+            })
+            .unwrap();
+        test_app.run_until_parked();
+        drop(active_tooltip);
+
+        assert!(weak_active_tooltip.upgrade().is_none());
+    }
+
+    #[test]
+    fn tooltip_is_released_when_its_owner_disappears() {
+        let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test();
+
+        let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap();
+        let active_tooltip = weak_active_tooltip.upgrade().unwrap();
+
+        test_app.dispatcher.advance_clock(TOOLTIP_SHOW_DELAY);
+        test_app.run_until_parked();
+
+        assert!(matches!(
+            active_tooltip.borrow().as_ref(),
+            Some(ActiveTooltip::Visible { .. })
+        ));
+
+        test_app
+            .update_window(any_window, |_, window, _| {
+                window.remove_window();
+            })
+            .unwrap();
+        test_app.run_until_parked();
+        drop(active_tooltip);
+
+        assert!(weak_active_tooltip.upgrade().is_none());
+    }
 }

crates/gpui/src/window.rs πŸ”—

@@ -5496,6 +5496,8 @@ pub enum ElementId {
     CodeLocation(core::panic::Location<'static>),
     /// A labeled child of an element.
     NamedChild(Arc<ElementId>, SharedString),
+    /// A byte array ID (used for text-anchors)
+    OpaqueId([u8; 20]),
 }
 
 impl ElementId {
@@ -5517,6 +5519,7 @@ impl Display for ElementId {
             ElementId::Path(path) => write!(f, "{}", path.display())?,
             ElementId::CodeLocation(location) => write!(f, "{}", location)?,
             ElementId::NamedChild(id, name) => write!(f, "{}-{}", id, name)?,
+            ElementId::OpaqueId(opaque_id) => write!(f, "{:x?}", opaque_id)?,
         }
 
         Ok(())
@@ -5631,6 +5634,12 @@ impl From<&'static core::panic::Location<'static>> for ElementId {
     }
 }
 
+impl From<[u8; 20]> for ElementId {
+    fn from(opaque_id: [u8; 20]) -> Self {
+        ElementId::OpaqueId(opaque_id)
+    }
+}
+
 /// A rectangle to be rendered in the window at the given position and size.
 /// Passed as an argument [`Window::paint_quad`].
 #[derive(Clone)]

crates/inspector_ui/src/div_inspector.rs πŸ”—

@@ -1,7 +1,6 @@
 use anyhow::{Result, anyhow};
 use editor::{
-    Bias, CompletionProvider, Editor, EditorEvent, EditorMode, ExcerptId, MinimapVisibility,
-    MultiBuffer,
+    Bias, CompletionProvider, Editor, EditorEvent, EditorMode, MinimapVisibility, MultiBuffer,
 };
 use fuzzy::StringMatch;
 use gpui::{
@@ -641,7 +640,6 @@ struct RustStyleCompletionProvider {
 impl CompletionProvider for RustStyleCompletionProvider {
     fn completions(
         &self,
-        _excerpt_id: ExcerptId,
         buffer: &Entity<Buffer>,
         position: Anchor,
         _: editor::CompletionContext,

crates/keymap_editor/src/action_completion_provider.rs πŸ”—

@@ -26,7 +26,6 @@ impl ActionCompletionProvider {
 impl CompletionProvider for ActionCompletionProvider {
     fn completions(
         &self,
-        _excerpt_id: editor::ExcerptId,
         buffer: &Entity<language::Buffer>,
         buffer_position: language::Anchor,
         _trigger: editor::CompletionContext,

crates/keymap_editor/src/keymap_editor.rs πŸ”—

@@ -31,10 +31,10 @@ use settings::{
     BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets, infer_json_indent_size,
 };
 use ui::{
-    ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, IconButtonShape, IconPosition,
-    Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _, PopoverMenu, Render, Section,
-    SharedString, Styled as _, Table, TableColumnWidths, TableInteractionState,
-    TableResizeBehavior, Tooltip, Window, prelude::*,
+    ActiveTheme as _, App, Banner, BorrowAppContext, ColumnWidthConfig, ContextMenu,
+    IconButtonShape, IconPosition, Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _,
+    PopoverMenu, RedistributableColumnsState, Render, Section, SharedString, Styled as _, Table,
+    TableInteractionState, TableResizeBehavior, Tooltip, Window, prelude::*,
 };
 use ui_input::InputField;
 use util::ResultExt;
@@ -450,7 +450,7 @@ struct KeymapEditor {
     context_menu: Option<(Entity<ContextMenu>, Point<Pixels>, Subscription)>,
     previous_edit: Option<PreviousEdit>,
     humanized_action_names: HumanizedActionNameCache,
-    current_widths: Entity<TableColumnWidths>,
+    current_widths: Entity<RedistributableColumnsState>,
     show_hover_menus: bool,
     actions_with_schemas: HashSet<&'static str>,
     /// In order for the JSON LSP to run in the actions arguments editor, we
@@ -623,7 +623,27 @@ impl KeymapEditor {
             actions_with_schemas: HashSet::default(),
             action_args_temp_dir: None,
             action_args_temp_dir_worktree: None,
-            current_widths: cx.new(|cx| TableColumnWidths::new(COLS, cx)),
+            current_widths: cx.new(|_cx| {
+                RedistributableColumnsState::new(
+                    COLS,
+                    vec![
+                        DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))),
+                        DefiniteLength::Fraction(0.25),
+                        DefiniteLength::Fraction(0.20),
+                        DefiniteLength::Fraction(0.14),
+                        DefiniteLength::Fraction(0.45),
+                        DefiniteLength::Fraction(0.08),
+                    ],
+                    vec![
+                        TableResizeBehavior::None,
+                        TableResizeBehavior::Resizable,
+                        TableResizeBehavior::Resizable,
+                        TableResizeBehavior::Resizable,
+                        TableResizeBehavior::Resizable,
+                        TableResizeBehavior::Resizable,
+                    ],
+                )
+            }),
         };
 
         this.on_keymap_changed(window, cx);
@@ -2095,26 +2115,9 @@ impl Render for KeymapEditor {
                         let this = cx.entity();
                         move |window, cx| this.read(cx).render_no_matches_hint(window, cx)
                     })
-                    .column_widths(vec![
-                        DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))),
-                        DefiniteLength::Fraction(0.25),
-                        DefiniteLength::Fraction(0.20),
-                        DefiniteLength::Fraction(0.14),
-                        DefiniteLength::Fraction(0.45),
-                        DefiniteLength::Fraction(0.08),
-                    ])
-                    .resizable_columns(
-                        vec![
-                            TableResizeBehavior::None,
-                            TableResizeBehavior::Resizable,
-                            TableResizeBehavior::Resizable,
-                            TableResizeBehavior::Resizable,
-                            TableResizeBehavior::Resizable,
-                            TableResizeBehavior::Resizable, // this column doesn't matter
-                        ],
-                        &self.current_widths,
-                        cx,
-                    )
+                    .width_config(ColumnWidthConfig::redistributable(
+                        self.current_widths.clone(),
+                    ))
                     .header(vec!["", "Action", "Arguments", "Keystrokes", "Context", "Source"])
                     .uniform_list(
                         "keymap-editor-table",
@@ -3477,7 +3480,6 @@ struct KeyContextCompletionProvider {
 impl CompletionProvider for KeyContextCompletionProvider {
     fn completions(
         &self,
-        _excerpt_id: editor::ExcerptId,
         buffer: &Entity<language::Buffer>,
         buffer_position: language::Anchor,
         _trigger: editor::CompletionContext,

crates/language/src/diagnostic_set.rs πŸ”—

@@ -326,23 +326,17 @@ impl DiagnosticEntry<Anchor> {
     }
 }
 
-impl Default for Summary {
-    fn default() -> Self {
-        Self {
-            start: Anchor::MIN,
-            end: Anchor::MAX,
-            min_start: Anchor::MAX,
-            max_end: Anchor::MIN,
-            count: 0,
-        }
-    }
-}
-
 impl sum_tree::Summary for Summary {
     type Context<'a> = &'a text::BufferSnapshot;
 
-    fn zero(_cx: Self::Context<'_>) -> Self {
-        Default::default()
+    fn zero(buffer: &text::BufferSnapshot) -> Self {
+        Self {
+            start: Anchor::min_for_buffer(buffer.remote_id()),
+            end: Anchor::max_for_buffer(buffer.remote_id()),
+            min_start: Anchor::max_for_buffer(buffer.remote_id()),
+            max_end: Anchor::min_for_buffer(buffer.remote_id()),
+            count: 0,
+        }
     }
 
     fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) {

crates/language/src/proto.rs πŸ”—

@@ -174,11 +174,11 @@ pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
         id: selection.id as u64,
         start: Some(proto::EditorAnchor {
             anchor: Some(serialize_anchor(&selection.start)),
-            excerpt_id: 0,
+            excerpt_id: None,
         }),
         end: Some(proto::EditorAnchor {
             anchor: Some(serialize_anchor(&selection.end)),
-            excerpt_id: 0,
+            excerpt_id: None,
         }),
         reversed: selection.reversed,
     }
@@ -260,7 +260,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
             Bias::Left => proto::Bias::Left as i32,
             Bias::Right => proto::Bias::Right as i32,
         },
-        buffer_id: anchor.buffer_id.map(Into::into),
+        buffer_id: Some(anchor.buffer_id.into()),
     }
 }
 
@@ -498,7 +498,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
         timestamp,
         anchor.offset as u32,
         bias,
-        buffer_id,
+        buffer_id?,
     ))
 }
 

crates/language/src/syntax_map.rs πŸ”—

@@ -18,7 +18,7 @@ use std::{
 };
 use streaming_iterator::StreamingIterator;
 use sum_tree::{Bias, Dimensions, SeekTarget, SumTree};
-use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
+use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
 use tree_sitter::{
     Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches,
     QueryPredicateArg,
@@ -56,7 +56,15 @@ impl Drop for SyntaxSnapshot {
         // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`.
         let _ = DROP_TX.send(std::mem::replace(
             &mut self.layers,
-            SumTree::from_summary(Default::default()),
+            SumTree::from_summary(SyntaxLayerSummary {
+                min_depth: Default::default(),
+                max_depth: Default::default(),
+                // Deliberately bogus anchors, doesn't matter in this context
+                range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()),
+                last_layer_range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()),
+                last_layer_language: Default::default(),
+                contains_unknown_injections: Default::default(),
+            }),
         ));
     }
 }
@@ -588,7 +596,7 @@ impl SyntaxSnapshot {
 
                 let bounded_position = SyntaxLayerPositionBeforeChange {
                     position: position.clone(),
-                    change: changed_regions.start_position(),
+                    change: changed_regions.start_position(text.remote_id()),
                 };
                 if bounded_position.cmp(cursor.start(), text).is_gt() {
                     let slice = cursor.slice(&bounded_position, Bias::Left);
@@ -1946,11 +1954,11 @@ impl ChangedRegion {
 }
 
 impl ChangeRegionSet {
-    fn start_position(&self) -> ChangeStartPosition {
+    fn start_position(&self, buffer_id: BufferId) -> ChangeStartPosition {
         self.0.first().map_or(
             ChangeStartPosition {
                 depth: usize::MAX,
-                position: Anchor::MAX,
+                position: Anchor::max_for_buffer(buffer_id),
             },
             |region| ChangeStartPosition {
                 depth: region.depth,
@@ -1999,32 +2007,28 @@ impl ChangeRegionSet {
     }
 }
 
-impl Default for SyntaxLayerSummary {
-    fn default() -> Self {
+impl sum_tree::Summary for SyntaxLayerSummary {
+    type Context<'a> = &'a BufferSnapshot;
+
+    fn zero(buffer: &BufferSnapshot) -> Self {
         Self {
             max_depth: 0,
             min_depth: 0,
-            range: Anchor::MAX..Anchor::MIN,
-            last_layer_range: Anchor::MIN..Anchor::MAX,
+            range: Anchor::max_for_buffer(buffer.remote_id())
+                ..Anchor::min_for_buffer(buffer.remote_id()),
+            last_layer_range: Anchor::min_for_buffer(buffer.remote_id())
+                ..Anchor::max_for_buffer(buffer.remote_id()),
             last_layer_language: None,
             contains_unknown_injections: false,
         }
     }
-}
-
-impl sum_tree::Summary for SyntaxLayerSummary {
-    type Context<'a> = &'a BufferSnapshot;
-
-    fn zero(_cx: &BufferSnapshot) -> Self {
-        Default::default()
-    }
 
     fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) {
         if other.max_depth > self.max_depth {
             self.max_depth = other.max_depth;
             self.range = other.range.clone();
         } else {
-            if self.range == (Anchor::MAX..Anchor::MAX) {
+            if self.range.start.is_max() && self.range.end.is_max() {
                 self.range.start = other.range.start;
             }
             if other.range.end.cmp(&self.range.end, buffer).is_gt() {

crates/language_model/Cargo.toml πŸ”—

@@ -37,7 +37,6 @@ parking_lot.workspace = true
 schemars.workspace = true
 serde.workspace = true
 serde_json.workspace = true
-settings.workspace = true
 smol.workspace = true
 thiserror.workspace = true
 util.workspace = true

crates/language_model/src/language_model.rs πŸ”—

@@ -1,16 +1,15 @@
 mod api_key;
 mod model;
+mod provider;
 mod rate_limiter;
 mod registry;
 mod request;
 mod role;
-mod telemetry;
 pub mod tool_schema;
 
 #[cfg(any(test, feature = "test-support"))]
 pub mod fake_provider;
 
-use anthropic::{AnthropicError, parse_prompt_too_long};
 use anyhow::{Result, anyhow};
 use client::Client;
 use client::UserStore;
@@ -20,10 +19,8 @@ use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
 use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window};
 use http_client::{StatusCode, http};
 use icons::IconName;
-use open_router::OpenRouterError;
 use parking_lot::Mutex;
 use serde::{Deserialize, Serialize};
-pub use settings::LanguageModelCacheConfiguration;
 use std::ops::{Add, Sub};
 use std::str::FromStr;
 use std::sync::Arc;
@@ -38,30 +35,10 @@ pub use crate::rate_limiter::*;
 pub use crate::registry::*;
 pub use crate::request::*;
 pub use crate::role::*;
-pub use crate::telemetry::*;
 pub use crate::tool_schema::LanguageModelToolSchemaFormat;
+pub use provider::*;
 pub use zed_env_vars::{EnvVar, env_var};
 
-pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId =
-    LanguageModelProviderId::new("anthropic");
-pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Anthropic");
-
-pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google");
-pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Google AI");
-
-pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai");
-pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("OpenAI");
-
-pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai");
-pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI");
-
-pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev");
-pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
-    LanguageModelProviderName::new("Zed");
-
 pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
     init_settings(cx);
     RefreshLlmTokenListener::register(client, user_store, cx);
@@ -71,6 +48,13 @@ pub fn init_settings(cx: &mut App) {
     registry::init(cx);
 }
 
+#[derive(Clone, Debug)]
+pub struct LanguageModelCacheConfiguration {
+    pub max_cache_anchors: usize,
+    pub should_speculate: bool,
+    pub min_total_token: u64,
+}
+
 /// A completion event from a language model.
 #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
 pub enum LanguageModelCompletionEvent {
@@ -310,165 +294,6 @@ impl LanguageModelCompletionError {
     }
 }
 
-impl From<AnthropicError> for LanguageModelCompletionError {
-    fn from(error: AnthropicError) -> Self {
-        let provider = ANTHROPIC_PROVIDER_NAME;
-        match error {
-            AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
-            AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
-            AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
-            AnthropicError::DeserializeResponse(error) => {
-                Self::DeserializeResponse { provider, error }
-            }
-            AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
-            AnthropicError::HttpResponseError {
-                status_code,
-                message,
-            } => Self::HttpResponseError {
-                provider,
-                status_code,
-                message,
-            },
-            AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
-                provider,
-                retry_after: Some(retry_after),
-            },
-            AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            AnthropicError::ApiError(api_error) => api_error.into(),
-        }
-    }
-}
-
-impl From<anthropic::ApiError> for LanguageModelCompletionError {
-    fn from(error: anthropic::ApiError) -> Self {
-        use anthropic::ApiErrorCode::*;
-        let provider = ANTHROPIC_PROVIDER_NAME;
-        match error.code() {
-            Some(code) => match code {
-                InvalidRequestError => Self::BadRequestFormat {
-                    provider,
-                    message: error.message,
-                },
-                AuthenticationError => Self::AuthenticationError {
-                    provider,
-                    message: error.message,
-                },
-                PermissionError => Self::PermissionError {
-                    provider,
-                    message: error.message,
-                },
-                NotFoundError => Self::ApiEndpointNotFound { provider },
-                RequestTooLarge => Self::PromptTooLarge {
-                    tokens: parse_prompt_too_long(&error.message),
-                },
-                RateLimitError => Self::RateLimitExceeded {
-                    provider,
-                    retry_after: None,
-                },
-                ApiError => Self::ApiInternalServerError {
-                    provider,
-                    message: error.message,
-                },
-                OverloadedError => Self::ServerOverloaded {
-                    provider,
-                    retry_after: None,
-                },
-            },
-            None => Self::Other(error.into()),
-        }
-    }
-}
-
-impl From<open_ai::RequestError> for LanguageModelCompletionError {
-    fn from(error: open_ai::RequestError) -> Self {
-        match error {
-            open_ai::RequestError::HttpResponseError {
-                provider,
-                status_code,
-                body,
-                headers,
-            } => {
-                let retry_after = headers
-                    .get(http::header::RETRY_AFTER)
-                    .and_then(|val| val.to_str().ok()?.parse::<u64>().ok())
-                    .map(Duration::from_secs);
-
-                Self::from_http_status(provider.into(), status_code, body, retry_after)
-            }
-            open_ai::RequestError::Other(e) => Self::Other(e),
-        }
-    }
-}
-
-impl From<OpenRouterError> for LanguageModelCompletionError {
-    fn from(error: OpenRouterError) -> Self {
-        let provider = LanguageModelProviderName::new("OpenRouter");
-        match error {
-            OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
-            OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
-            OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error },
-            OpenRouterError::DeserializeResponse(error) => {
-                Self::DeserializeResponse { provider, error }
-            }
-            OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
-            OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded {
-                provider,
-                retry_after: Some(retry_after),
-            },
-            OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
-                provider,
-                retry_after,
-            },
-            OpenRouterError::ApiError(api_error) => api_error.into(),
-        }
-    }
-}
-
-impl From<open_router::ApiError> for LanguageModelCompletionError {
-    fn from(error: open_router::ApiError) -> Self {
-        use open_router::ApiErrorCode::*;
-        let provider = LanguageModelProviderName::new("OpenRouter");
-        match error.code {
-            InvalidRequestError => Self::BadRequestFormat {
-                provider,
-                message: error.message,
-            },
-            AuthenticationError => Self::AuthenticationError {
-                provider,
-                message: error.message,
-            },
-            PaymentRequiredError => Self::AuthenticationError {
-                provider,
-                message: format!("Payment required: {}", error.message),
-            },
-            PermissionError => Self::PermissionError {
-                provider,
-                message: error.message,
-            },
-            RequestTimedOut => Self::HttpResponseError {
-                provider,
-                status_code: StatusCode::REQUEST_TIMEOUT,
-                message: error.message,
-            },
-            RateLimitError => Self::RateLimitExceeded {
-                provider,
-                retry_after: None,
-            },
-            ApiError => Self::ApiInternalServerError {
-                provider,
-                message: error.message,
-            },
-            OverloadedError => Self::ServerOverloaded {
-                provider,
-                retry_after: None,
-            },
-        }
-    }
-}
-
 #[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
 #[serde(rename_all = "snake_case")]
 pub enum StopReason {

crates/language_model/src/provider.rs πŸ”—

@@ -0,0 +1,12 @@
+pub mod anthropic;
+pub mod google;
+pub mod open_ai;
+pub mod open_router;
+pub mod x_ai;
+pub mod zed;
+
+pub use anthropic::*;
+pub use google::*;
+pub use open_ai::*;
+pub use x_ai::*;
+pub use zed::*;

crates/language_model/src/provider/anthropic.rs πŸ”—

@@ -0,0 +1,80 @@
+use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName};
+use anthropic::AnthropicError;
+pub use anthropic::parse_prompt_too_long;
+
+pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId =
+    LanguageModelProviderId::new("anthropic");
+pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Anthropic");
+
+impl From<AnthropicError> for LanguageModelCompletionError {
+    fn from(error: AnthropicError) -> Self {
+        let provider = ANTHROPIC_PROVIDER_NAME;
+        match error {
+            AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+            AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+            AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
+            AnthropicError::DeserializeResponse(error) => {
+                Self::DeserializeResponse { provider, error }
+            }
+            AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+            AnthropicError::HttpResponseError {
+                status_code,
+                message,
+            } => Self::HttpResponseError {
+                provider,
+                status_code,
+                message,
+            },
+            AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
+                provider,
+                retry_after: Some(retry_after),
+            },
+            AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            AnthropicError::ApiError(api_error) => api_error.into(),
+        }
+    }
+}
+
+impl From<anthropic::ApiError> for LanguageModelCompletionError {
+    fn from(error: anthropic::ApiError) -> Self {
+        use anthropic::ApiErrorCode::*;
+        let provider = ANTHROPIC_PROVIDER_NAME;
+        match error.code() {
+            Some(code) => match code {
+                InvalidRequestError => Self::BadRequestFormat {
+                    provider,
+                    message: error.message,
+                },
+                AuthenticationError => Self::AuthenticationError {
+                    provider,
+                    message: error.message,
+                },
+                PermissionError => Self::PermissionError {
+                    provider,
+                    message: error.message,
+                },
+                NotFoundError => Self::ApiEndpointNotFound { provider },
+                RequestTooLarge => Self::PromptTooLarge {
+                    tokens: parse_prompt_too_long(&error.message),
+                },
+                RateLimitError => Self::RateLimitExceeded {
+                    provider,
+                    retry_after: None,
+                },
+                ApiError => Self::ApiInternalServerError {
+                    provider,
+                    message: error.message,
+                },
+                OverloadedError => Self::ServerOverloaded {
+                    provider,
+                    retry_after: None,
+                },
+            },
+            None => Self::Other(error.into()),
+        }
+    }
+}

crates/language_model/src/provider/google.rs πŸ”—

@@ -0,0 +1,5 @@
+use crate::{LanguageModelProviderId, LanguageModelProviderName};
+
+pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google");
+pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Google AI");

crates/language_model/src/provider/open_ai.rs πŸ”—

@@ -0,0 +1,28 @@
+use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName};
+use http_client::http;
+use std::time::Duration;
+
+pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai");
+pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("OpenAI");
+
+impl From<open_ai::RequestError> for LanguageModelCompletionError {
+    fn from(error: open_ai::RequestError) -> Self {
+        match error {
+            open_ai::RequestError::HttpResponseError {
+                provider,
+                status_code,
+                body,
+                headers,
+            } => {
+                let retry_after = headers
+                    .get(http::header::RETRY_AFTER)
+                    .and_then(|val| val.to_str().ok()?.parse::<u64>().ok())
+                    .map(Duration::from_secs);
+
+                Self::from_http_status(provider.into(), status_code, body, retry_after)
+            }
+            open_ai::RequestError::Other(e) => Self::Other(e),
+        }
+    }
+}

crates/language_model/src/provider/open_router.rs πŸ”—

@@ -0,0 +1,69 @@
+use crate::{LanguageModelCompletionError, LanguageModelProviderName};
+use http_client::StatusCode;
+use open_router::OpenRouterError;
+
+impl From<OpenRouterError> for LanguageModelCompletionError {
+    fn from(error: OpenRouterError) -> Self {
+        let provider = LanguageModelProviderName::new("OpenRouter");
+        match error {
+            OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+            OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+            OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error },
+            OpenRouterError::DeserializeResponse(error) => {
+                Self::DeserializeResponse { provider, error }
+            }
+            OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+            OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded {
+                provider,
+                retry_after: Some(retry_after),
+            },
+            OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+                provider,
+                retry_after,
+            },
+            OpenRouterError::ApiError(api_error) => api_error.into(),
+        }
+    }
+}
+
+impl From<open_router::ApiError> for LanguageModelCompletionError {
+    fn from(error: open_router::ApiError) -> Self {
+        use open_router::ApiErrorCode::*;
+        let provider = LanguageModelProviderName::new("OpenRouter");
+        match error.code {
+            InvalidRequestError => Self::BadRequestFormat {
+                provider,
+                message: error.message,
+            },
+            AuthenticationError => Self::AuthenticationError {
+                provider,
+                message: error.message,
+            },
+            PaymentRequiredError => Self::AuthenticationError {
+                provider,
+                message: format!("Payment required: {}", error.message),
+            },
+            PermissionError => Self::PermissionError {
+                provider,
+                message: error.message,
+            },
+            RequestTimedOut => Self::HttpResponseError {
+                provider,
+                status_code: StatusCode::REQUEST_TIMEOUT,
+                message: error.message,
+            },
+            RateLimitError => Self::RateLimitExceeded {
+                provider,
+                retry_after: None,
+            },
+            ApiError => Self::ApiInternalServerError {
+                provider,
+                message: error.message,
+            },
+            OverloadedError => Self::ServerOverloaded {
+                provider,
+                retry_after: None,
+            },
+        }
+    }
+}

crates/language_model/src/provider/x_ai.rs πŸ”—

@@ -0,0 +1,4 @@
+use crate::{LanguageModelProviderId, LanguageModelProviderName};
+
+pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai");
+pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI");

crates/language_model/src/provider/zed.rs πŸ”—

@@ -0,0 +1,5 @@
+use crate::{LanguageModelProviderId, LanguageModelProviderName};
+
+pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev");
+pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
+    LanguageModelProviderName::new("Zed");

crates/language_model/src/registry.rs πŸ”—

@@ -101,7 +101,7 @@ impl ConfiguredModel {
     }
 
     pub fn is_provided_by_zed(&self) -> bool {
-        self.provider.id() == crate::ZED_CLOUD_PROVIDER_ID
+        self.provider.id() == crate::provider::ZED_CLOUD_PROVIDER_ID
     }
 }
 

crates/language_models/src/provider/anthropic.rs πŸ”—

@@ -1,3 +1,5 @@
+pub mod telemetry;
+
 use anthropic::{
     ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event,
     ResponseContent, ToolResultContent, ToolResultPart, Usage,
@@ -8,7 +10,8 @@ use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream
 use gpui::{AnyView, App, AsyncApp, Context, Entity, Task};
 use http_client::HttpClient;
 use language_model::{
-    ApiKeyState, AuthenticateError, ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel,
+    ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, ApiKeyState, AuthenticateError,
+    ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel,
     LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent,
     LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
     LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
@@ -28,8 +31,8 @@ use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
 
 pub use settings::AnthropicAvailableModel as AvailableModel;
 
-const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
-const PROVIDER_NAME: LanguageModelProviderName = language_model::ANTHROPIC_PROVIDER_NAME;
+const PROVIDER_ID: LanguageModelProviderId = ANTHROPIC_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = ANTHROPIC_PROVIDER_NAME;
 
 #[derive(Default, Clone, Debug, PartialEq)]
 pub struct AnthropicSettings {

crates/language_model/src/telemetry.rs β†’ crates/language_models/src/provider/anthropic/telemetry.rs πŸ”—

@@ -1,8 +1,8 @@
-use crate::ANTHROPIC_PROVIDER_ID;
 use anthropic::ANTHROPIC_API_URL;
 use anyhow::{Context as _, anyhow};
 use gpui::BackgroundExecutor;
 use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
+use language_model::{ANTHROPIC_PROVIDER_ID, LanguageModel};
 use std::env;
 use std::sync::Arc;
 use util::ResultExt;
@@ -52,7 +52,7 @@ impl AnthropicEventType {
 }
 
 pub fn report_anthropic_event(
-    model: &Arc<dyn crate::LanguageModel>,
+    model: &Arc<dyn LanguageModel>,
     event: AnthropicEventData,
     cx: &gpui::App,
 ) {
@@ -69,7 +69,7 @@ pub struct AnthropicEventReporter {
 }
 
 impl AnthropicEventReporter {
-    pub fn new(model: &Arc<dyn crate::LanguageModel>, cx: &gpui::App) -> Self {
+    pub fn new(model: &Arc<dyn LanguageModel>, cx: &gpui::App) -> Self {
         Self {
             http_client: cx.http_client(),
             executor: cx.background_executor().clone(),

crates/language_models/src/provider/cloud.rs πŸ”—

@@ -19,12 +19,15 @@ use gpui::{AnyElement, AnyView, App, AsyncApp, Context, Entity, Subscription, Ta
 use http_client::http::{HeaderMap, HeaderValue};
 use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response, StatusCode};
 use language_model::{
-    AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration,
+    ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, AuthenticateError, GOOGLE_PROVIDER_ID,
+    GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration,
     LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel,
     LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
     LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
     LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh,
-    PaymentRequiredError, RateLimiter, RefreshLlmTokenListener,
+    OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter,
+    RefreshLlmTokenListener, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, ZED_CLOUD_PROVIDER_ID,
+    ZED_CLOUD_PROVIDER_NAME,
 };
 use release_channel::AppVersion;
 use schemars::JsonSchema;
@@ -53,8 +56,8 @@ use crate::provider::open_ai::{
 };
 use crate::provider::x_ai::count_xai_tokens;
 
-const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID;
-const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME;
+const PROVIDER_ID: LanguageModelProviderId = ZED_CLOUD_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = ZED_CLOUD_PROVIDER_NAME;
 
 #[derive(Default, Clone, Debug, PartialEq)]
 pub struct ZedDotDevSettings {
@@ -568,20 +571,20 @@ impl LanguageModel for CloudLanguageModel {
     fn upstream_provider_id(&self) -> LanguageModelProviderId {
         use cloud_llm_client::LanguageModelProvider::*;
         match self.model.provider {
-            Anthropic => language_model::ANTHROPIC_PROVIDER_ID,
-            OpenAi => language_model::OPEN_AI_PROVIDER_ID,
-            Google => language_model::GOOGLE_PROVIDER_ID,
-            XAi => language_model::X_AI_PROVIDER_ID,
+            Anthropic => ANTHROPIC_PROVIDER_ID,
+            OpenAi => OPEN_AI_PROVIDER_ID,
+            Google => GOOGLE_PROVIDER_ID,
+            XAi => X_AI_PROVIDER_ID,
         }
     }
 
     fn upstream_provider_name(&self) -> LanguageModelProviderName {
         use cloud_llm_client::LanguageModelProvider::*;
         match self.model.provider {
-            Anthropic => language_model::ANTHROPIC_PROVIDER_NAME,
-            OpenAi => language_model::OPEN_AI_PROVIDER_NAME,
-            Google => language_model::GOOGLE_PROVIDER_NAME,
-            XAi => language_model::X_AI_PROVIDER_NAME,
+            Anthropic => ANTHROPIC_PROVIDER_NAME,
+            OpenAi => OPEN_AI_PROVIDER_NAME,
+            Google => GOOGLE_PROVIDER_NAME,
+            XAi => X_AI_PROVIDER_NAME,
         }
     }
 
@@ -1047,12 +1050,10 @@ where
 
 fn provider_name(provider: &cloud_llm_client::LanguageModelProvider) -> LanguageModelProviderName {
     match provider {
-        cloud_llm_client::LanguageModelProvider::Anthropic => {
-            language_model::ANTHROPIC_PROVIDER_NAME
-        }
-        cloud_llm_client::LanguageModelProvider::OpenAi => language_model::OPEN_AI_PROVIDER_NAME,
-        cloud_llm_client::LanguageModelProvider::Google => language_model::GOOGLE_PROVIDER_NAME,
-        cloud_llm_client::LanguageModelProvider::XAi => language_model::X_AI_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::Anthropic => ANTHROPIC_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::OpenAi => OPEN_AI_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::Google => GOOGLE_PROVIDER_NAME,
+        cloud_llm_client::LanguageModelProvider::XAi => X_AI_PROVIDER_NAME,
     }
 }
 

crates/language_models/src/provider/google.rs πŸ”—

@@ -13,9 +13,9 @@ use language_model::{
     LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason,
 };
 use language_model::{
-    IconOrSvg, LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider,
-    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
-    LanguageModelRequest, RateLimiter, Role,
+    GOOGLE_PROVIDER_ID, GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelId,
+    LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
+    LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role,
 };
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
@@ -33,8 +33,8 @@ use util::ResultExt;
 
 use language_model::ApiKeyState;
 
-const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID;
-const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME;
+const PROVIDER_ID: LanguageModelProviderId = GOOGLE_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = GOOGLE_PROVIDER_NAME;
 
 #[derive(Default, Clone, Debug, PartialEq)]
 pub struct GoogleSettings {

crates/language_models/src/provider/open_ai.rs πŸ”—

@@ -10,7 +10,8 @@ use language_model::{
     LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
     LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage,
     LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse,
-    LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage, env_var,
+    LanguageModelToolUseId, MessageContent, OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME,
+    RateLimiter, Role, StopReason, TokenUsage, env_var,
 };
 use menu;
 use open_ai::responses::{
@@ -35,8 +36,8 @@ use util::ResultExt;
 
 use crate::provider::util::{fix_streamed_json, parse_tool_arguments};
 
-const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
-const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME;
+const PROVIDER_ID: LanguageModelProviderId = OPEN_AI_PROVIDER_ID;
+const PROVIDER_NAME: LanguageModelProviderName = OPEN_AI_PROVIDER_NAME;
 
 const API_KEY_ENV_VAR_NAME: &str = "OPENAI_API_KEY";
 static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);

crates/language_selector/src/active_buffer_language.rs πŸ”—

@@ -29,7 +29,7 @@ impl ActiveBufferLanguage {
         self.active_language = Some(None);
 
         let editor = editor.read(cx);
-        if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+        if let Some(buffer) = editor.active_buffer(cx)
             && let Some(language) = buffer.read(cx).language()
         {
             self.active_language = Some(Some(language.name()));

crates/language_selector/src/language_selector.rs πŸ”—

@@ -51,11 +51,11 @@ impl LanguageSelector {
         cx: &mut Context<Workspace>,
     ) -> Option<()> {
         let registry = workspace.app_state().languages.clone();
-        let (_, buffer, _) = workspace
+        let buffer = workspace
             .active_item(cx)?
             .act_as::<Editor>(cx)?
             .read(cx)
-            .active_excerpt(cx)?;
+            .active_buffer(cx)?;
         let project = workspace.project().clone();
 
         workspace.toggle_modal(window, cx, move |window, cx| {
@@ -414,10 +414,10 @@ mod tests {
     ) -> Entity<Editor> {
         let editor = open_new_buffer_editor(workspace, project, cx).await;
         // Ensure the buffer has no language after the editor is created
-        let (_, buffer, _) = editor.read_with(cx, |editor, cx| {
+        let buffer = editor.read_with(cx, |editor, cx| {
             editor
-                .active_excerpt(cx)
-                .expect("editor should have an active excerpt")
+                .active_buffer(cx)
+                .expect("editor should have an active buffer")
         });
         buffer.update(cx, |buffer, cx| {
             buffer.set_language(None, cx);
@@ -454,8 +454,8 @@ mod tests {
             .await
             .expect("language should exist in registry");
         editor.update(cx, move |editor, cx| {
-            let (_, buffer, _) = editor
-                .active_excerpt(cx)
+            let buffer = editor
+                .active_buffer(cx)
                 .expect("editor should have an active excerpt");
             buffer.update(cx, |buffer, cx| {
                 buffer.set_language(Some(language), cx);
@@ -578,6 +578,15 @@ mod tests {
 
         assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx);
         assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx);
+        // Ensure the empty editor's buffer has no language before asserting
+        let buffer = empty_editor.read_with(cx, |editor, cx| {
+            editor
+                .active_buffer(cx)
+                .expect("editor should have an active excerpt")
+        });
+        buffer.update(cx, |buffer, cx| {
+            buffer.set_language(None, cx);
+        });
         assert_selected_language_for_editor(&workspace, &empty_editor, None, cx);
     }
 

crates/language_tools/src/highlights_tree_view.rs πŸ”—

@@ -1,5 +1,5 @@
 use editor::{
-    Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint,
+    Anchor, Editor, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint,
     scroll::Autoscroll,
 };
 use gpui::{
@@ -8,8 +8,7 @@ use gpui::{
     MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled,
     Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list,
 };
-use language::ToOffset;
-
+use language::{BufferId, Point, ToOffset};
 use menu::{SelectNext, SelectPrevious};
 use std::{mem, ops::Range};
 use theme::ActiveTheme;
@@ -114,12 +113,12 @@ impl HighlightCategory {
 
 #[derive(Debug, Clone)]
 struct HighlightEntry {
-    excerpt_id: ExcerptId,
     range: Range<Anchor>,
+    buffer_id: BufferId,
+    buffer_point_range: Range<Point>,
     range_display: SharedString,
     style: HighlightStyle,
     category: HighlightCategory,
-    sort_key: (ExcerptId, u32, u32, u32, u32),
 }
 
 /// An item in the display list: either a separator between excerpts or a highlight entry.
@@ -319,20 +318,18 @@ impl HighlightsTreeView {
         display_map.update(cx, |display_map, cx| {
             for (key, text_highlights) in display_map.all_text_highlights() {
                 for range in &text_highlights.1 {
-                    let excerpt_id = range.start.excerpt_id;
-                    let (range_display, sort_key) = format_anchor_range(
-                        range,
-                        excerpt_id,
-                        &multi_buffer_snapshot,
-                        is_singleton,
-                    );
+                    let Some((range_display, buffer_id, buffer_point_range)) =
+                        format_anchor_range(range, &multi_buffer_snapshot)
+                    else {
+                        continue;
+                    };
                     entries.push(HighlightEntry {
-                        excerpt_id,
                         range: range.clone(),
+                        buffer_id,
                         range_display,
                         style: text_highlights.0,
                         category: HighlightCategory::Text(*key),
-                        sort_key,
+                        buffer_point_range,
                     });
                 }
             }
@@ -345,13 +342,11 @@ impl HighlightsTreeView {
                         .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
                     for token in tokens.iter() {
                         let range = token.range.start..token.range.end;
-                        let excerpt_id = range.start.excerpt_id;
-                        let (range_display, sort_key) = format_anchor_range(
-                            &range,
-                            excerpt_id,
-                            &multi_buffer_snapshot,
-                            is_singleton,
-                        );
+                        let Some((range_display, entry_buffer_id, buffer_point_range)) =
+                            format_anchor_range(&range, &multi_buffer_snapshot)
+                        else {
+                            continue;
+                        };
                         let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
                             token.server_id,
                             language_name.as_ref(),
@@ -388,8 +383,8 @@ impl HighlightsTreeView {
                                 });
 
                         entries.push(HighlightEntry {
-                            excerpt_id,
                             range,
+                            buffer_id: entry_buffer_id,
                             range_display,
                             style: interner[token.style],
                             category: HighlightCategory::SemanticToken {
@@ -399,7 +394,7 @@ impl HighlightsTreeView {
                                     .map(SharedString::from),
                                 theme_key,
                             },
-                            sort_key,
+                            buffer_point_range,
                         });
                     }
                 }
@@ -407,7 +402,13 @@ impl HighlightsTreeView {
         });
 
         let syntax_theme = cx.theme().syntax().clone();
-        for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() {
+        for excerpt_range in multi_buffer_snapshot.excerpts() {
+            let Some(buffer_snapshot) =
+                multi_buffer_snapshot.buffer_for_id(excerpt_range.context.start.buffer_id)
+            else {
+                continue;
+            };
+
             let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot);
             let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot);
             let range = start_offset..end_offset;
@@ -438,8 +439,8 @@ impl HighlightsTreeView {
                 let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte());
                 let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte());
 
-                let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor);
-                let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor);
+                let start = multi_buffer_snapshot.anchor_in_excerpt(start_anchor);
+                let end = multi_buffer_snapshot.anchor_in_excerpt(end_anchor);
 
                 let (start, end) = match (start, end) {
                     (Some(s), Some(e)) => (s, e),
@@ -447,29 +448,38 @@ impl HighlightsTreeView {
                 };
 
                 let range = start..end;
-                let (range_display, sort_key) =
-                    format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton);
+                let Some((range_display, buffer_id, buffer_point_range)) =
+                    format_anchor_range(&range, &multi_buffer_snapshot)
+                else {
+                    continue;
+                };
 
                 entries.push(HighlightEntry {
-                    excerpt_id,
                     range,
+                    buffer_id,
                     range_display,
                     style,
                     category: HighlightCategory::SyntaxToken {
                         capture_name,
                         theme_key,
                     },
-                    sort_key,
+                    buffer_point_range,
                 });
             }
         }
 
         entries.sort_by(|a, b| {
-            a.sort_key
-                .cmp(&b.sort_key)
+            a.buffer_id
+                .cmp(&b.buffer_id)
+                .then_with(|| a.buffer_point_range.start.cmp(&b.buffer_point_range.start))
+                .then_with(|| a.buffer_point_range.end.cmp(&b.buffer_point_range.end))
                 .then_with(|| a.category.cmp(&b.category))
         });
-        entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category);
+        entries.dedup_by(|a, b| {
+            a.buffer_id == b.buffer_id
+                && a.buffer_point_range == b.buffer_point_range
+                && a.category == b.category
+        });
 
         self.cached_entries = entries;
         self.rebuild_display_items(&multi_buffer_snapshot, cx);
@@ -485,7 +495,7 @@ impl HighlightsTreeView {
     fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) {
         self.display_items.clear();
 
-        let mut last_excerpt_id: Option<ExcerptId> = None;
+        let mut last_range_end: Option<Anchor> = None;
 
         for (entry_ix, entry) in self.cached_entries.iter().enumerate() {
             if !self.should_show_entry(entry) {
@@ -493,11 +503,14 @@ impl HighlightsTreeView {
             }
 
             if !self.is_singleton {
-                let excerpt_changed =
-                    last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id);
+                let excerpt_changed = last_range_end.is_none_or(|anchor| {
+                    snapshot
+                        .excerpt_containing(anchor..entry.range.start)
+                        .is_none()
+                });
                 if excerpt_changed {
-                    last_excerpt_id = Some(entry.excerpt_id);
-                    let label = excerpt_label_for(entry.excerpt_id, snapshot, cx);
+                    last_range_end = Some(entry.range.end);
+                    let label = excerpt_label_for(entry, snapshot, cx);
                     self.display_items
                         .push(DisplayItem::ExcerptSeparator { label });
                 }
@@ -516,10 +529,6 @@ impl HighlightsTreeView {
     }
 
     fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) {
-        let cursor_point = cursor.to_point(snapshot);
-        let cursor_key = (cursor_point.row, cursor_point.column);
-        let cursor_excerpt = cursor.excerpt_id;
-
         let best = self
             .display_items
             .iter()
@@ -532,17 +541,18 @@ impl HighlightsTreeView {
                 _ => None,
             })
             .filter(|(_, _, entry)| {
-                let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key;
-                if !self.is_singleton && excerpt_id != cursor_excerpt {
-                    return false;
-                }
-                let start = (start_row, start_col);
-                let end = (end_row, end_col);
-                cursor_key >= start && cursor_key <= end
+                entry.range.start.cmp(&cursor, snapshot).is_le()
+                    && cursor.cmp(&entry.range.end, snapshot).is_lt()
             })
             .min_by_key(|(_, _, entry)| {
-                let (_, start_row, start_col, end_row, end_col) = entry.sort_key;
-                (end_row - start_row, end_col.saturating_sub(start_col))
+                (
+                    entry.buffer_point_range.end.row - entry.buffer_point_range.start.row,
+                    entry
+                        .buffer_point_range
+                        .end
+                        .column
+                        .saturating_sub(entry.buffer_point_range.start.column),
+                )
             })
             .map(|(display_ix, entry_ix, _)| (display_ix, entry_ix));
 
@@ -1076,12 +1086,13 @@ impl ToolbarItemView for HighlightsTreeToolbarItemView {
 }
 
 fn excerpt_label_for(
-    excerpt_id: ExcerptId,
+    entry: &HighlightEntry,
     snapshot: &MultiBufferSnapshot,
     cx: &App,
 ) -> SharedString {
-    let buffer = snapshot.buffer_for_excerpt(excerpt_id);
-    let path_label = buffer
+    let path_label = snapshot
+        .anchor_to_buffer_anchor(entry.range.start)
+        .and_then(|(anchor, _)| snapshot.buffer_for_id(anchor.buffer_id))
         .and_then(|buf| buf.file())
         .map(|file| {
             let full_path = file.full_path(cx);
@@ -1093,50 +1104,21 @@ fn excerpt_label_for(
 
 fn format_anchor_range(
     range: &Range<Anchor>,
-    excerpt_id: ExcerptId,
     snapshot: &MultiBufferSnapshot,
-    is_singleton: bool,
-) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) {
-    if is_singleton {
-        let start = range.start.to_point(snapshot);
-        let end = range.end.to_point(snapshot);
-        let display = SharedString::from(format!(
-            "[{}:{} - {}:{}]",
-            start.row + 1,
-            start.column + 1,
-            end.row + 1,
-            end.column + 1,
-        ));
-        let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
-        (display, sort_key)
-    } else {
-        let buffer = snapshot.buffer_for_excerpt(excerpt_id);
-        if let Some(buffer) = buffer {
-            let start = language::ToPoint::to_point(&range.start.text_anchor, buffer);
-            let end = language::ToPoint::to_point(&range.end.text_anchor, buffer);
-            let display = SharedString::from(format!(
-                "[{}:{} - {}:{}]",
-                start.row + 1,
-                start.column + 1,
-                end.row + 1,
-                end.column + 1,
-            ));
-            let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
-            (display, sort_key)
-        } else {
-            let start = range.start.to_point(snapshot);
-            let end = range.end.to_point(snapshot);
-            let display = SharedString::from(format!(
-                "[{}:{} - {}:{}]",
-                start.row + 1,
-                start.column + 1,
-                end.row + 1,
-                end.column + 1,
-            ));
-            let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
-            (display, sort_key)
-        }
-    }
+) -> Option<(SharedString, BufferId, Range<Point>)> {
+    let start = range.start.to_point(snapshot);
+    let end = range.end.to_point(snapshot);
+    let ((start_buffer, start), (_, end)) = snapshot
+        .point_to_buffer_point(start)
+        .zip(snapshot.point_to_buffer_point(end))?;
+    let display = SharedString::from(format!(
+        "[{}:{} - {}:{}]",
+        start.row + 1,
+        start.column + 1,
+        end.row + 1,
+        end.column + 1,
+    ));
+    Some((display, start_buffer.remote_id(), start..end))
 }
 
 fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div {

crates/language_tools/src/lsp_button.rs πŸ”—

@@ -1179,13 +1179,20 @@ impl StatusItemView for LspButton {
                         .and_then(|active_editor| active_editor.editor.upgrade())
                         .as_ref()
                 {
-                    let editor_buffers =
-                        HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids());
+                    let editor_buffers = HashSet::from_iter(
+                        editor
+                            .read(cx)
+                            .buffer()
+                            .read(cx)
+                            .snapshot(cx)
+                            .excerpts()
+                            .map(|excerpt| excerpt.context.start.buffer_id),
+                    );
                     let _editor_subscription = cx.subscribe_in(
                         &editor,
                         window,
                         |lsp_button, _, e: &EditorEvent, window, cx| match e {
-                            EditorEvent::ExcerptsAdded { buffer, .. } => {
+                            EditorEvent::BufferRangesUpdated { buffer, .. } => {
                                 let updated = lsp_button.server_state.update(cx, |state, cx| {
                                     if let Some(active_editor) = state.active_editor.as_mut() {
                                         let buffer_id = buffer.read(cx).remote_id();
@@ -1198,9 +1205,7 @@ impl StatusItemView for LspButton {
                                     lsp_button.refresh_lsp_menu(false, window, cx);
                                 }
                             }
-                            EditorEvent::ExcerptsRemoved {
-                                removed_buffer_ids, ..
-                            } => {
+                            EditorEvent::BuffersRemoved { removed_buffer_ids } => {
                                 let removed = lsp_button.server_state.update(cx, |state, _| {
                                     let mut removed = false;
                                     if let Some(active_editor) = state.active_editor.as_mut() {

crates/language_tools/src/syntax_tree_view.rs πŸ”—

@@ -1,7 +1,6 @@
 use command_palette_hooks::CommandPaletteFilter;
 use editor::{
-    Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects,
-    scroll::Autoscroll,
+    Anchor, Editor, HighlightKey, MultiBufferOffset, SelectionEffects, scroll::Autoscroll,
 };
 use gpui::{
     App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
@@ -125,7 +124,6 @@ impl EditorState {
 #[derive(Clone)]
 struct BufferState {
     buffer: Entity<Buffer>,
-    excerpt_id: ExcerptId,
     active_layer: Option<OwnedSyntaxLayer>,
 }
 
@@ -253,18 +251,18 @@ impl SyntaxTreeView {
         let snapshot = editor_state
             .editor
             .update(cx, |editor, cx| editor.snapshot(window, cx));
-        let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| {
+        let (buffer, range) = editor_state.editor.update(cx, |editor, cx| {
             let selection_range = editor
                 .selections
                 .last::<MultiBufferOffset>(&editor.display_snapshot(cx))
                 .range();
             let multi_buffer = editor.buffer().read(cx);
-            let (buffer, range, excerpt_id) = snapshot
+            let (buffer, range, _) = snapshot
                 .buffer_snapshot()
-                .range_to_buffer_ranges(selection_range.start..=selection_range.end)
+                .range_to_buffer_ranges(selection_range.start..selection_range.end)
                 .pop()?;
             let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap();
-            Some((buffer, range, excerpt_id))
+            Some((buffer, range))
         })?;
 
         // If the cursor has moved into a different excerpt, retrieve a new syntax layer
@@ -273,16 +271,14 @@ impl SyntaxTreeView {
             .active_buffer
             .get_or_insert_with(|| BufferState {
                 buffer: buffer.clone(),
-                excerpt_id,
                 active_layer: None,
             });
         let mut prev_layer = None;
         if did_reparse {
             prev_layer = buffer_state.active_layer.take();
         }
-        if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id {
+        if buffer_state.buffer != buffer {
             buffer_state.buffer = buffer.clone();
-            buffer_state.excerpt_id = excerpt_id;
             buffer_state.active_layer = None;
         }
 
@@ -360,8 +356,7 @@ impl SyntaxTreeView {
         // Build a multibuffer anchor range.
         let multibuffer = editor_state.editor.read(cx).buffer();
         let multibuffer = multibuffer.read(cx).snapshot(cx);
-        let excerpt_id = buffer_state.excerpt_id;
-        let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?;
+        let range = multibuffer.buffer_anchor_range_to_anchor_range(range)?;
         let key = cx.entity_id().as_u64() as usize;
 
         // Update the editor with the anchor range.

crates/languages/src/eslint.rs πŸ”—

@@ -7,8 +7,10 @@ use http_client::{
 };
 use language::{LspAdapter, LspAdapterDelegate, LspInstaller, Toolchain};
 use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName, Uri};
-use node_runtime::NodeRuntime;
+use node_runtime::{NodeRuntime, read_package_installed_version};
+use project::Fs;
 use project::lsp_store::language_server_settings_for;
+use semver::Version;
 use serde::{Deserialize, Serialize};
 use serde_json::{Value, json};
 use settings::SettingsLocation;
@@ -31,11 +33,12 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
 
 pub struct EsLintLspAdapter {
     node: NodeRuntime,
+    fs: Arc<dyn Fs>,
 }
 
 impl EsLintLspAdapter {
-    const CURRENT_VERSION: &'static str = "2.4.4";
-    const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4";
+    const CURRENT_VERSION: &'static str = "3.0.24";
+    const CURRENT_VERSION_TAG_NAME: &'static str = "release/3.0.24";
 
     #[cfg(not(windows))]
     const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz;
@@ -45,7 +48,10 @@ impl EsLintLspAdapter {
     const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js";
     const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint");
 
-    const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &[
+    const FLAT_CONFIG_FILE_NAMES_V8_21: &'static [&'static str] = &["eslint.config.js"];
+    const FLAT_CONFIG_FILE_NAMES_V8_57: &'static [&'static str] =
+        &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"];
+    const FLAT_CONFIG_FILE_NAMES_V10: &'static [&'static str] = &[
         "eslint.config.js",
         "eslint.config.mjs",
         "eslint.config.cjs",
@@ -53,9 +59,17 @@ impl EsLintLspAdapter {
         "eslint.config.cts",
         "eslint.config.mts",
     ];
+    const LEGACY_CONFIG_FILE_NAMES: &'static [&'static str] = &[
+        ".eslintrc",
+        ".eslintrc.js",
+        ".eslintrc.cjs",
+        ".eslintrc.yaml",
+        ".eslintrc.yml",
+        ".eslintrc.json",
+    ];
 
-    pub fn new(node: NodeRuntime) -> Self {
-        EsLintLspAdapter { node }
+    pub fn new(node: NodeRuntime, fs: Arc<dyn Fs>) -> Self {
+        EsLintLspAdapter { node, fs }
     }
 
     fn build_destination_path(container_dir: &Path) -> PathBuf {
@@ -73,7 +87,7 @@ impl LspInstaller for EsLintLspAdapter {
         _: &mut AsyncApp,
     ) -> Result<GitHubLspBinaryVersion> {
         let url = build_asset_url(
-            "zed-industries/vscode-eslint",
+            "microsoft/vscode-eslint",
             Self::CURRENT_VERSION_TAG_NAME,
             Self::GITHUB_ASSET_KIND,
         )?;
@@ -148,6 +162,7 @@ impl LspInstaller for EsLintLspAdapter {
     ) -> Option<LanguageServerBinary> {
         let server_path =
             Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH);
+        fs::metadata(&server_path).await.ok()?;
         Some(LanguageServerBinary {
             path: self.node.binary_path().await.ok()?,
             env: None,
@@ -156,6 +171,42 @@ impl LspInstaller for EsLintLspAdapter {
     }
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum EslintConfigKind {
+    Flat,
+    Legacy,
+}
+
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+struct EslintSettingsOverrides {
+    use_flat_config: Option<bool>,
+    experimental_use_flat_config: Option<bool>,
+}
+
+impl EslintSettingsOverrides {
+    fn apply_to(self, workspace_configuration: &mut Value) {
+        if let Some(use_flat_config) = self.use_flat_config
+            && let Some(workspace_configuration) = workspace_configuration.as_object_mut()
+        {
+            workspace_configuration.insert("useFlatConfig".to_string(), json!(use_flat_config));
+        }
+
+        if let Some(experimental_use_flat_config) = self.experimental_use_flat_config
+            && let Some(workspace_configuration) = workspace_configuration.as_object_mut()
+        {
+            let experimental = workspace_configuration
+                .entry("experimental")
+                .or_insert_with(|| json!({}));
+            if let Some(experimental) = experimental.as_object_mut() {
+                experimental.insert(
+                    "useFlatConfig".to_string(),
+                    json!(experimental_use_flat_config),
+                );
+            }
+        }
+    }
+}
+
 #[async_trait(?Send)]
 impl LspAdapter for EsLintLspAdapter {
     fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
@@ -173,9 +224,26 @@ impl LspAdapter for EsLintLspAdapter {
         cx: &mut AsyncApp,
     ) -> Result<Value> {
         let worktree_root = delegate.worktree_root_path();
-        let use_flat_config = Self::FLAT_CONFIG_FILE_NAMES
-            .iter()
-            .any(|file| worktree_root.join(file).is_file());
+        let requested_file_path = requested_uri
+            .as_ref()
+            .filter(|uri| uri.scheme() == "file")
+            .and_then(|uri| uri.to_file_path().ok())
+            .filter(|path| path.starts_with(worktree_root));
+        let eslint_version = find_eslint_version(
+            delegate.as_ref(),
+            worktree_root,
+            requested_file_path.as_deref(),
+        )
+        .await?;
+        let config_kind = find_eslint_config_kind(
+            worktree_root,
+            requested_file_path.as_deref(),
+            eslint_version.as_ref(),
+            self.fs.as_ref(),
+        )
+        .await;
+        let eslint_settings_overrides =
+            eslint_settings_overrides_for(eslint_version.as_ref(), config_kind);
 
         let mut default_workspace_configuration = json!({
             "validate": "on",
@@ -205,26 +273,13 @@ impl LspAdapter for EsLintLspAdapter {
                 "showDocumentation": {
                     "enable": true
                 }
-            },
-            "experimental": {
-                "useFlatConfig": use_flat_config,
             }
         });
+        eslint_settings_overrides.apply_to(&mut default_workspace_configuration);
 
-        let file_path = requested_uri
+        let file_path = requested_file_path
             .as_ref()
-            .and_then(|uri| {
-                (uri.scheme() == "file")
-                    .then(|| uri.to_file_path().ok())
-                    .flatten()
-            })
-            .and_then(|abs_path| {
-                abs_path
-                    .strip_prefix(&worktree_root)
-                    .ok()
-                    .map(ToOwned::to_owned)
-            });
-        let file_path = file_path
+            .and_then(|abs_path| abs_path.strip_prefix(worktree_root).ok())
             .and_then(|p| RelPath::unix(&p).ok().map(ToOwned::to_owned))
             .unwrap_or_else(|| RelPath::empty().to_owned());
         let override_options = cx.update(|cx| {
@@ -271,6 +326,109 @@ impl LspAdapter for EsLintLspAdapter {
     }
 }
 
+fn ancestor_directories<'a>(
+    worktree_root: &'a Path,
+    requested_file: Option<&'a Path>,
+) -> impl Iterator<Item = &'a Path> + 'a {
+    let start = requested_file
+        .filter(|file| file.starts_with(worktree_root))
+        .and_then(Path::parent)
+        .unwrap_or(worktree_root);
+
+    start
+        .ancestors()
+        .take_while(move |dir| dir.starts_with(worktree_root))
+}
+
+fn flat_config_file_names(version: Option<&Version>) -> &'static [&'static str] {
+    match version {
+        Some(version) if version.major >= 10 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V10,
+        Some(version) if version.major == 9 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57,
+        Some(version) if version.major == 8 && version.minor >= 57 => {
+            EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57
+        }
+        Some(version) if version.major == 8 && version.minor >= 21 => {
+            EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_21
+        }
+        _ => &[],
+    }
+}
+
+async fn find_eslint_config_kind(
+    worktree_root: &Path,
+    requested_file: Option<&Path>,
+    version: Option<&Version>,
+    fs: &dyn Fs,
+) -> Option<EslintConfigKind> {
+    let flat_config_file_names = flat_config_file_names(version);
+
+    for directory in ancestor_directories(worktree_root, requested_file) {
+        for file_name in flat_config_file_names {
+            if fs.is_file(&directory.join(file_name)).await {
+                return Some(EslintConfigKind::Flat);
+            }
+        }
+
+        for file_name in EsLintLspAdapter::LEGACY_CONFIG_FILE_NAMES {
+            if fs.is_file(&directory.join(file_name)).await {
+                return Some(EslintConfigKind::Legacy);
+            }
+        }
+    }
+
+    None
+}
+
+fn eslint_settings_overrides_for(
+    version: Option<&Version>,
+    config_kind: Option<EslintConfigKind>,
+) -> EslintSettingsOverrides {
+    // vscode-eslint 3.x already discovers config files and chooses a working
+    // directory from the active file on its own. Zed only overrides settings
+    // for the two cases where leaving everything unset is known to be wrong:
+    //
+    // - ESLint 8.21-8.56 flat config still needs experimental.useFlatConfig.
+    // - ESLint 9.x legacy config needs useFlatConfig = false.
+    //
+    // All other cases should defer to the server's own defaults and discovery.
+    let Some(version) = version else {
+        return EslintSettingsOverrides::default();
+    };
+
+    match config_kind {
+        Some(EslintConfigKind::Flat) if version.major == 8 && (21..57).contains(&version.minor) => {
+            EslintSettingsOverrides {
+                use_flat_config: None,
+                experimental_use_flat_config: Some(true),
+            }
+        }
+        Some(EslintConfigKind::Legacy) if version.major == 9 => EslintSettingsOverrides {
+            use_flat_config: Some(false),
+            experimental_use_flat_config: None,
+        },
+        _ => EslintSettingsOverrides::default(),
+    }
+}
+
+async fn find_eslint_version(
+    delegate: &dyn LspAdapterDelegate,
+    worktree_root: &Path,
+    requested_file: Option<&Path>,
+) -> Result<Option<Version>> {
+    for directory in ancestor_directories(worktree_root, requested_file) {
+        if let Some(version) =
+            read_package_installed_version(directory.join("node_modules"), "eslint").await?
+        {
+            return Ok(Some(version));
+        }
+    }
+
+    Ok(delegate
+        .npm_package_installed_version("eslint")
+        .await?
+        .map(|(_, version)| version))
+}
+
 /// On Windows, converts Unix-style separators (/) to Windows-style (\).
 /// On Unix, returns the path unchanged
 fn normalize_path_separators(path: &str) -> String {
@@ -623,6 +781,217 @@ mod tests {
         }
     }
 
+    mod eslint_settings {
+        use super::*;
+        use ::fs::FakeFs;
+        use gpui::TestAppContext;
+
+        #[test]
+        fn test_ancestor_directories_for_package_local_file() {
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform(
+                "/workspace/packages/web/src/index.js",
+            ));
+
+            let directories: Vec<&Path> =
+                ancestor_directories(&worktree_root, Some(&requested_file)).collect();
+
+            assert_eq!(
+                directories,
+                vec![
+                    Path::new(&unix_path_to_platform("/workspace/packages/web/src")),
+                    Path::new(&unix_path_to_platform("/workspace/packages/web")),
+                    Path::new(&unix_path_to_platform("/workspace/packages")),
+                    Path::new(&unix_path_to_platform("/workspace")),
+                ]
+            );
+        }
+
+        #[test]
+        fn test_eslint_8_flat_root_repo_uses_experimental_flag() {
+            let version = Version::parse("8.56.0").expect("valid ESLint version");
+            let settings =
+                eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat));
+
+            assert_eq!(
+                settings,
+                EslintSettingsOverrides {
+                    use_flat_config: None,
+                    experimental_use_flat_config: Some(true),
+                }
+            );
+        }
+
+        #[test]
+        fn test_eslint_8_57_flat_repo_uses_no_override() {
+            let version = Version::parse("8.57.0").expect("valid ESLint version");
+            let settings =
+                eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat));
+
+            assert_eq!(settings, EslintSettingsOverrides::default());
+        }
+
+        #[test]
+        fn test_eslint_9_legacy_repo_uses_use_flat_config_false() {
+            let version = Version::parse("9.0.0").expect("valid ESLint version");
+            let settings =
+                eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Legacy));
+
+            assert_eq!(
+                settings,
+                EslintSettingsOverrides {
+                    use_flat_config: Some(false),
+                    experimental_use_flat_config: None,
+                }
+            );
+        }
+
+        #[test]
+        fn test_eslint_10_repo_uses_no_override() {
+            let version = Version::parse("10.0.0").expect("valid ESLint version");
+            let settings =
+                eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat));
+
+            assert_eq!(settings, EslintSettingsOverrides::default());
+        }
+
+        #[gpui::test]
+        async fn test_eslint_8_56_does_not_treat_cjs_as_flat_config(cx: &mut TestAppContext) {
+            let fs = FakeFs::new(cx.executor());
+            fs.insert_tree(
+                unix_path_to_platform("/workspace"),
+                json!({ "eslint.config.cjs": "" }),
+            )
+            .await;
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js"));
+            let version = Version::parse("8.56.0").expect("valid ESLint version");
+
+            let config_kind = find_eslint_config_kind(
+                &worktree_root,
+                Some(&requested_file),
+                Some(&version),
+                fs.as_ref(),
+            )
+            .await;
+
+            assert_eq!(config_kind, None);
+        }
+
+        #[gpui::test]
+        async fn test_eslint_8_57_treats_cjs_as_flat_config(cx: &mut TestAppContext) {
+            let fs = FakeFs::new(cx.executor());
+            fs.insert_tree(
+                unix_path_to_platform("/workspace"),
+                json!({ "eslint.config.cjs": "" }),
+            )
+            .await;
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js"));
+            let version = Version::parse("8.57.0").expect("valid ESLint version");
+
+            let config_kind = find_eslint_config_kind(
+                &worktree_root,
+                Some(&requested_file),
+                Some(&version),
+                fs.as_ref(),
+            )
+            .await;
+
+            assert_eq!(config_kind, Some(EslintConfigKind::Flat));
+        }
+
+        #[gpui::test]
+        async fn test_eslint_10_treats_typescript_config_as_flat_config(cx: &mut TestAppContext) {
+            let fs = FakeFs::new(cx.executor());
+            fs.insert_tree(
+                unix_path_to_platform("/workspace"),
+                json!({ "eslint.config.ts": "" }),
+            )
+            .await;
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js"));
+            let version = Version::parse("10.0.0").expect("valid ESLint version");
+
+            let config_kind = find_eslint_config_kind(
+                &worktree_root,
+                Some(&requested_file),
+                Some(&version),
+                fs.as_ref(),
+            )
+            .await;
+
+            assert_eq!(config_kind, Some(EslintConfigKind::Flat));
+        }
+
+        #[gpui::test]
+        async fn test_package_local_flat_config_is_preferred_for_monorepo_file(
+            cx: &mut TestAppContext,
+        ) {
+            let fs = FakeFs::new(cx.executor());
+            fs.insert_tree(
+                unix_path_to_platform("/workspace"),
+                json!({
+                    "eslint.config.js": "",
+                    "packages": {
+                        "web": {
+                            "eslint.config.js": ""
+                        }
+                    }
+                }),
+            )
+            .await;
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform(
+                "/workspace/packages/web/src/index.js",
+            ));
+            let version = Version::parse("8.56.0").expect("valid ESLint version");
+
+            let config_kind = find_eslint_config_kind(
+                &worktree_root,
+                Some(&requested_file),
+                Some(&version),
+                fs.as_ref(),
+            )
+            .await;
+
+            assert_eq!(config_kind, Some(EslintConfigKind::Flat));
+        }
+
+        #[gpui::test]
+        async fn test_package_local_legacy_config_is_detected_for_eslint_9(
+            cx: &mut TestAppContext,
+        ) {
+            let fs = FakeFs::new(cx.executor());
+            fs.insert_tree(
+                unix_path_to_platform("/workspace"),
+                json!({
+                    "packages": {
+                        "web": {
+                            ".eslintrc.cjs": ""
+                        }
+                    }
+                }),
+            )
+            .await;
+            let worktree_root = PathBuf::from(unix_path_to_platform("/workspace"));
+            let requested_file = PathBuf::from(unix_path_to_platform(
+                "/workspace/packages/web/src/index.js",
+            ));
+            let version = Version::parse("9.0.0").expect("valid ESLint version");
+
+            let config_kind = find_eslint_config_kind(
+                &worktree_root,
+                Some(&requested_file),
+                Some(&version),
+                fs.as_ref(),
+            )
+            .await;
+
+            assert_eq!(config_kind, Some(EslintConfigKind::Legacy));
+        }
+    }
+
     #[cfg(windows)]
     mod windows_style_paths {
         use super::*;

crates/languages/src/lib.rs πŸ”—

@@ -59,7 +59,7 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
 
     let c_lsp_adapter = Arc::new(c::CLspAdapter);
     let css_lsp_adapter = Arc::new(css::CssLspAdapter::new(node.clone()));
-    let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone()));
+    let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone(), fs.clone()));
     let go_context_provider = Arc::new(go::GoContextProvider);
     let go_lsp_adapter = Arc::new(go::GoLspAdapter);
     let json_context_provider = Arc::new(JsonTaskProvider);

crates/line_ending_selector/src/line_ending_indicator.rs πŸ”—

@@ -18,7 +18,7 @@ impl LineEndingIndicator {
         self.line_ending = None;
         self.active_editor = None;
 
-        if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) {
+        if let Some(buffer) = editor.read(cx).active_buffer(cx) {
             let line_ending = buffer.read(cx).line_ending();
             self.line_ending = Some(line_ending);
             self.active_editor = Some(editor.downgrade());

crates/line_ending_selector/src/line_ending_selector.rs πŸ”—

@@ -40,7 +40,7 @@ impl LineEndingSelector {
     fn toggle(editor: &WeakEntity<Editor>, window: &mut Window, cx: &mut App) {
         let Some((workspace, buffer)) = editor
             .update(cx, |editor, cx| {
-                Some((editor.workspace()?, editor.active_excerpt(cx)?.1))
+                Some((editor.workspace()?, editor.active_buffer(cx)?))
             })
             .ok()
             .flatten()

crates/markdown/src/html/html_rendering.rs πŸ”—

@@ -497,7 +497,10 @@ mod tests {
     use gpui::{TestAppContext, size};
     use ui::prelude::*;
 
-    use crate::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownOptions, MarkdownStyle};
+    use crate::{
+        CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions,
+        MarkdownStyle,
+    };
 
     fn ensure_theme_initialized(cx: &mut TestAppContext) {
         cx.update(|cx| {
@@ -530,8 +533,7 @@ mod tests {
             |_window, _cx| {
                 MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer(
                     CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     },
                 )
@@ -591,8 +593,7 @@ mod tests {
             |_window, _cx| {
                 MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer(
                     CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     },
                 )

crates/markdown/src/markdown.rs πŸ”—

@@ -270,10 +270,16 @@ pub struct MarkdownOptions {
     pub render_mermaid_diagrams: bool,
 }
 
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum CopyButtonVisibility {
+    Hidden,
+    AlwaysVisible,
+    VisibleOnHover,
+}
+
 pub enum CodeBlockRenderer {
     Default {
-        copy_button: bool,
-        copy_button_on_hover: bool,
+        copy_button_visibility: CopyButtonVisibility,
         border: bool,
     },
     Custom {
@@ -826,8 +832,7 @@ impl MarkdownElement {
             markdown,
             style,
             code_block_renderer: CodeBlockRenderer::Default {
-                copy_button: true,
-                copy_button_on_hover: false,
+                copy_button_visibility: CopyButtonVisibility::VisibleOnHover,
                 border: false,
             },
             on_url_click: None,
@@ -1686,38 +1691,10 @@ impl Element for MarkdownElement {
                         builder.pop_text_style();
 
                         if let CodeBlockRenderer::Default {
-                            copy_button: true, ..
-                        } = &self.code_block_renderer
-                        {
-                            builder.modify_current_div(|el| {
-                                let content_range = parser::extract_code_block_content_range(
-                                    &parsed_markdown.source()[range.clone()],
-                                );
-                                let content_range = content_range.start + range.start
-                                    ..content_range.end + range.start;
-
-                                let code = parsed_markdown.source()[content_range].to_string();
-                                let codeblock = render_copy_code_block_button(
-                                    range.end,
-                                    code,
-                                    self.markdown.clone(),
-                                );
-                                el.child(
-                                    h_flex()
-                                        .w_4()
-                                        .absolute()
-                                        .top_1p5()
-                                        .right_1p5()
-                                        .justify_end()
-                                        .child(codeblock),
-                                )
-                            });
-                        }
-
-                        if let CodeBlockRenderer::Default {
-                            copy_button_on_hover: true,
+                            copy_button_visibility,
                             ..
                         } = &self.code_block_renderer
+                            && *copy_button_visibility != CopyButtonVisibility::Hidden
                         {
                             builder.modify_current_div(|el| {
                                 let content_range = parser::extract_code_block_content_range(
@@ -1736,10 +1713,17 @@ impl Element for MarkdownElement {
                                     h_flex()
                                         .w_4()
                                         .absolute()
-                                        .top_0()
-                                        .right_0()
                                         .justify_end()
-                                        .visible_on_hover("code_block")
+                                        .when_else(
+                                            *copy_button_visibility
+                                                == CopyButtonVisibility::VisibleOnHover,
+                                            |this| {
+                                                this.top_0()
+                                                    .right_0()
+                                                    .visible_on_hover("code_block")
+                                            },
+                                            |this| this.top_1p5().right_1p5(),
+                                        )
                                         .child(codeblock),
                                 )
                             });
@@ -2772,8 +2756,7 @@ mod tests {
             |_window, _cx| {
                 MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer(
                     CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     },
                 )

crates/markdown/src/mermaid.rs πŸ”—

@@ -266,7 +266,10 @@ mod tests {
         CachedMermaidDiagram, MermaidDiagramCache, MermaidState,
         ParsedMarkdownMermaidDiagramContents, extract_mermaid_diagrams, parse_mermaid_info,
     };
-    use crate::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownOptions, MarkdownStyle};
+    use crate::{
+        CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions,
+        MarkdownStyle,
+    };
     use collections::HashMap;
     use gpui::{Context, IntoElement, Render, RenderImage, TestAppContext, Window, size};
     use std::sync::Arc;
@@ -309,8 +312,7 @@ mod tests {
             |_window, _cx| {
                 MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer(
                     CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     },
                 )
@@ -581,8 +583,7 @@ mod tests {
             |_window, _cx| {
                 MarkdownElement::new(markdown.clone(), MarkdownStyle::default())
                     .code_block_renderer(CodeBlockRenderer::Default {
-                        copy_button: false,
-                        copy_button_on_hover: false,
+                        copy_button_visibility: CopyButtonVisibility::Hidden,
                         border: false,
                     })
             },

crates/markdown_preview/src/markdown_preview_view.rs πŸ”—

@@ -13,7 +13,8 @@ use gpui::{
 };
 use language::LanguageRegistry;
 use markdown::{
-    CodeBlockRenderer, Markdown, MarkdownElement, MarkdownFont, MarkdownOptions, MarkdownStyle,
+    CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont,
+    MarkdownOptions, MarkdownStyle,
 };
 use settings::Settings;
 use theme_settings::ThemeSettings;
@@ -294,7 +295,7 @@ impl MarkdownPreviewView {
                     EditorEvent::Edited { .. }
                     | EditorEvent::BufferEdited { .. }
                     | EditorEvent::DirtyChanged
-                    | EditorEvent::ExcerptsEdited { .. } => {
+                    | EditorEvent::BuffersEdited { .. } => {
                         this.update_markdown_from_active_editor(true, false, window, cx);
                     }
                     EditorEvent::SelectionsChanged { .. } => {
@@ -580,20 +581,33 @@ impl MarkdownPreviewView {
             .as_ref()
             .map(|state| state.editor.clone());
 
+        let mut workspace_directory = None;
+        if let Some(workspace_entity) = self.workspace.upgrade() {
+            let project = workspace_entity.read(cx).project();
+            if let Some(tree) = project.read(cx).worktrees(cx).next() {
+                workspace_directory = Some(tree.read(cx).abs_path().to_path_buf());
+            }
+        }
+
         let mut markdown_element = MarkdownElement::new(
             self.markdown.clone(),
             MarkdownStyle::themed(MarkdownFont::Editor, window, cx),
         )
         .code_block_renderer(CodeBlockRenderer::Default {
-            copy_button: false,
-            copy_button_on_hover: true,
+            copy_button_visibility: CopyButtonVisibility::VisibleOnHover,
             border: false,
         })
         .scroll_handle(self.scroll_handle.clone())
         .show_root_block_markers()
         .image_resolver({
             let base_directory = self.base_directory.clone();
-            move |dest_url| resolve_preview_image(dest_url, base_directory.as_deref())
+            move |dest_url| {
+                resolve_preview_image(
+                    dest_url,
+                    base_directory.as_deref(),
+                    workspace_directory.as_deref(),
+                )
+            }
         })
         .on_url_click(move |url, window, cx| {
             open_preview_url(url, base_directory.clone(), &workspace, window, cx);
@@ -687,7 +701,11 @@ fn resolve_preview_path(url: &str, base_directory: Option<&Path>) -> Option<Path
     }
 }
 
-fn resolve_preview_image(dest_url: &str, base_directory: Option<&Path>) -> Option<ImageSource> {
+fn resolve_preview_image(
+    dest_url: &str,
+    base_directory: Option<&Path>,
+    workspace_directory: Option<&Path>,
+) -> Option<ImageSource> {
     if dest_url.starts_with("data:") {
         return None;
     }
@@ -702,6 +720,19 @@ fn resolve_preview_image(dest_url: &str, base_directory: Option<&Path>) -> Optio
         .map(|decoded| decoded.into_owned())
         .unwrap_or_else(|_| dest_url.to_string());
 
+    let decoded_path = Path::new(&decoded);
+
+    if let Ok(relative_path) = decoded_path.strip_prefix("/") {
+        if let Some(root) = workspace_directory {
+            let absolute_path = root.join(relative_path);
+            if absolute_path.exists() {
+                return Some(ImageSource::Resource(Resource::Path(Arc::from(
+                    absolute_path.as_path(),
+                ))));
+            }
+        }
+    }
+
     let path = if Path::new(&decoded).is_absolute() {
         PathBuf::from(decoded)
     } else {
@@ -778,6 +809,9 @@ impl Render for MarkdownPreviewView {
 
 #[cfg(test)]
 mod tests {
+    use crate::markdown_preview_view::ImageSource;
+    use crate::markdown_preview_view::Resource;
+    use crate::markdown_preview_view::resolve_preview_image;
     use anyhow::Result;
     use std::fs;
     use tempfile::TempDir;
@@ -819,6 +853,54 @@ mod tests {
         Ok(())
     }
 
+    #[test]
+    fn resolves_workspace_absolute_preview_images() -> Result<()> {
+        let temp_dir = TempDir::new()?;
+        let workspace_directory = temp_dir.path();
+
+        let base_directory = workspace_directory.join("docs");
+        fs::create_dir_all(&base_directory)?;
+
+        let image_file = workspace_directory.join("test_image.png");
+        fs::write(&image_file, "mock data")?;
+
+        let resolved_success = resolve_preview_image(
+            "/test_image.png",
+            Some(&base_directory),
+            Some(workspace_directory),
+        );
+
+        match resolved_success {
+            Some(ImageSource::Resource(Resource::Path(p))) => {
+                assert_eq!(p.as_ref(), image_file.as_path());
+            }
+            _ => panic!("Expected successful resolution to be a Resource::Path"),
+        }
+
+        let resolved_missing = resolve_preview_image(
+            "/missing_image.png",
+            Some(&base_directory),
+            Some(workspace_directory),
+        );
+
+        let expected_missing_path = if std::path::Path::new("/missing_image.png").is_absolute() {
+            std::path::PathBuf::from("/missing_image.png")
+        } else {
+            // join is to retain windows path prefix C:/
+            #[expect(clippy::join_absolute_paths)]
+            base_directory.join("/missing_image.png")
+        };
+
+        match resolved_missing {
+            Some(ImageSource::Resource(Resource::Path(p))) => {
+                assert_eq!(p.as_ref(), expected_missing_path.as_path());
+            }
+            _ => panic!("Expected missing file to fallback to a Resource::Path"),
+        }
+
+        Ok(())
+    }
+
     #[test]
     fn does_not_treat_web_links_as_preview_paths() {
         assert_eq!(resolve_preview_path("https://zed.dev", None), None);

crates/migrator/src/migrations.rs πŸ”—

@@ -317,8 +317,8 @@ pub(crate) mod m_2026_03_23 {
     pub(crate) use keymap::KEYMAP_PATTERNS;
 }
 
-pub(crate) mod m_2026_03_31 {
+pub(crate) mod m_2026_03_30 {
     mod settings;
 
-    pub(crate) use settings::remove_text_thread_settings;
+    pub(crate) use settings::make_play_sound_when_agent_done_an_enum;
 }

crates/migrator/src/migrations/m_2026_03_30/settings.rs πŸ”—

@@ -0,0 +1,29 @@
+use anyhow::Result;
+use serde_json::Value;
+
+use crate::migrations::migrate_settings;
+
+pub fn make_play_sound_when_agent_done_an_enum(value: &mut Value) -> Result<()> {
+    migrate_settings(value, &mut migrate_one)
+}
+
+fn migrate_one(obj: &mut serde_json::Map<String, Value>) -> Result<()> {
+    let Some(play_sound) = obj
+        .get_mut("agent")
+        .and_then(|agent| agent.as_object_mut())
+        .and_then(|agent| agent.get_mut("play_sound_when_agent_done"))
+    else {
+        return Ok(());
+    };
+
+    *play_sound = match play_sound {
+        Value::Bool(true) => Value::String("always".to_string()),
+        Value::Bool(false) => Value::String("never".to_string()),
+        Value::String(s) if s == "never" || s == "when_hidden" || s == "always" => return Ok(()),
+        _ => {
+            anyhow::bail!("Expected play_sound_when_agent_done to be a boolean or valid enum value")
+        }
+    };
+
+    Ok(())
+}

crates/migrator/src/migrations/m_2026_03_31/settings.rs πŸ”—

@@ -1,29 +0,0 @@
-use anyhow::Result;
-use serde_json::Value;
-
-use crate::migrations::migrate_settings;
-
-pub fn remove_text_thread_settings(value: &mut Value) -> Result<()> {
-    migrate_settings(value, &mut migrate_one)
-}
-
-fn migrate_one(obj: &mut serde_json::Map<String, Value>) -> Result<()> {
-    // Remove `agent.default_view`
-    if let Some(agent) = obj.get_mut("agent") {
-        if let Some(agent_obj) = agent.as_object_mut() {
-            agent_obj.remove("default_view");
-        }
-    }
-
-    // Remove `edit_predictions.enabled_in_text_threads`
-    if let Some(edit_predictions) = obj.get_mut("edit_predictions") {
-        if let Some(edit_predictions_obj) = edit_predictions.as_object_mut() {
-            edit_predictions_obj.remove("enabled_in_text_threads");
-        }
-    }
-
-    // Remove top-level `slash_commands`
-    obj.remove("slash_commands");
-
-    Ok(())
-}

crates/migrator/src/migrator.rs πŸ”—

@@ -247,7 +247,7 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
             migrations::m_2026_03_16::SETTINGS_PATTERNS,
             &SETTINGS_QUERY_2026_03_16,
         ),
-        MigrationType::Json(migrations::m_2026_03_31::remove_text_thread_settings),
+        MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum),
     ];
     run_migrations(text, migrations)
 }
@@ -941,7 +941,8 @@ mod tests {
                     "foo": "bar"
                 },
                 "edit_predictions": {
-                    }
+                    "enabled_in_text_threads": false,
+                }
             }"#,
             ),
         );
@@ -2400,6 +2401,132 @@ mod tests {
         );
     }
 
+    #[test]
+    fn test_make_play_sound_when_agent_done_an_enum() {
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"{ }"#.unindent(),
+            None,
+        );
+
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"{
+                "agent": {
+                    "play_sound_when_agent_done": true
+                }
+            }"#
+            .unindent(),
+            Some(
+                &r#"{
+                    "agent": {
+                        "play_sound_when_agent_done": "always"
+                    }
+                }"#
+                .unindent(),
+            ),
+        );
+
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"{
+                "agent": {
+                    "play_sound_when_agent_done": false
+                }
+            }"#
+            .unindent(),
+            Some(
+                &r#"{
+                    "agent": {
+                        "play_sound_when_agent_done": "never"
+                    }
+                }"#
+                .unindent(),
+            ),
+        );
+
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"{
+                "agent": {
+                    "play_sound_when_agent_done": "when_hidden"
+                }
+            }"#
+            .unindent(),
+            None,
+        );
+
+        // Platform key: settings nested inside "macos" should be migrated
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"
+            {
+                "macos": {
+                    "agent": {
+                        "play_sound_when_agent_done": true
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "macos": {
+                        "agent": {
+                            "play_sound_when_agent_done": "always"
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+
+        // Profile: settings nested inside profiles should be migrated
+        assert_migrate_with_migrations(
+            &[MigrationType::Json(
+                migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+            )],
+            &r#"
+            {
+                "profiles": {
+                    "work": {
+                        "agent": {
+                            "play_sound_when_agent_done": false
+                        }
+                    }
+                }
+            }
+            "#
+            .unindent(),
+            Some(
+                &r#"
+                {
+                    "profiles": {
+                        "work": {
+                            "agent": {
+                                "play_sound_when_agent_done": "never"
+                            }
+                        }
+                    }
+                }
+                "#
+                .unindent(),
+            ),
+        );
+    }
+
     #[test]
     fn test_remove_context_server_source() {
         assert_migrate_settings(
@@ -4480,109 +4607,4 @@ mod tests {
             ),
         );
     }
-
-    #[test]
-    fn test_remove_text_thread_settings() {
-        assert_migrate_with_migrations(
-            &[MigrationType::Json(
-                migrations::m_2026_03_31::remove_text_thread_settings,
-            )],
-            r#"{
-    "agent": {
-        "default_model": {
-            "provider": "anthropic",
-            "model": "claude-sonnet"
-        },
-        "default_view": "text_thread"
-    },
-    "edit_predictions": {
-        "mode": "eager",
-        "enabled_in_text_threads": true
-    },
-    "slash_commands": {
-        "cargo_workspace": {
-            "enabled": true
-        }
-    }
-}"#,
-            Some(
-                r#"{
-    "agent": {
-        "default_model": {
-            "provider": "anthropic",
-            "model": "claude-sonnet"
-        }
-    },
-    "edit_predictions": {
-        "mode": "eager"
-    }
-}"#,
-            ),
-        );
-    }
-
-    #[test]
-    fn test_remove_text_thread_settings_only_default_view() {
-        assert_migrate_with_migrations(
-            &[MigrationType::Json(
-                migrations::m_2026_03_31::remove_text_thread_settings,
-            )],
-            r#"{
-    "agent": {
-        "default_model": "claude-sonnet",
-        "default_view": "thread"
-    }
-}"#,
-            Some(
-                r#"{
-    "agent": {
-        "default_model": "claude-sonnet"
-    }
-}"#,
-            ),
-        );
-    }
-
-    #[test]
-    fn test_remove_text_thread_settings_only_slash_commands() {
-        assert_migrate_with_migrations(
-            &[MigrationType::Json(
-                migrations::m_2026_03_31::remove_text_thread_settings,
-            )],
-            r#"{
-    "slash_commands": {
-        "cargo_workspace": {
-            "enabled": true
-        }
-    },
-    "vim_mode": true
-}"#,
-            Some(
-                r#"{
-    "vim_mode": true
-}"#,
-            ),
-        );
-    }
-
-    #[test]
-    fn test_remove_text_thread_settings_none_present() {
-        assert_migrate_with_migrations(
-            &[MigrationType::Json(
-                migrations::m_2026_03_31::remove_text_thread_settings,
-            )],
-            r#"{
-    "agent": {
-        "default_model": {
-            "provider": "anthropic",
-            "model": "claude-sonnet"
-        }
-    },
-    "edit_predictions": {
-        "mode": "eager"
-    }
-}"#,
-            None,
-        );
-    }
 }

crates/multi_buffer/src/anchor.rs πŸ”—

@@ -1,192 +1,331 @@
-use crate::{MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16};
+use crate::{
+    ExcerptSummary, MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey,
+    PathKeyIndex, find_diff_state,
+};
 
-use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint};
-use language::Point;
+use super::{MultiBufferSnapshot, ToOffset, ToPoint};
+use language::{BufferSnapshot, Point};
 use std::{
     cmp::Ordering,
     ops::{Add, AddAssign, Range, Sub},
 };
 use sum_tree::Bias;
+use text::BufferId;
+
+/// A multibuffer anchor derived from an anchor into a specific excerpted buffer.
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
+pub struct ExcerptAnchor {
+    pub(crate) text_anchor: text::Anchor,
+    pub(crate) path: PathKeyIndex,
+    pub(crate) diff_base_anchor: Option<text::Anchor>,
+}
 
 /// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer).
 ///
 /// Unlike simple offsets, anchors remain valid as the text is edited, automatically
 /// adjusting to reflect insertions and deletions around them.
 #[derive(Clone, Copy, Eq, PartialEq, Hash)]
-pub struct Anchor {
-    /// Identifies which excerpt within the multi-buffer this anchor belongs to.
-    /// A multi-buffer can contain multiple excerpts from different buffers.
-    pub excerpt_id: ExcerptId,
-    /// The position within the excerpt's underlying buffer. This is a stable
-    /// reference that remains valid as the buffer text is edited.
-    pub text_anchor: text::Anchor,
-    /// When present, indicates this anchor points into deleted text within an
-    /// expanded diff hunk. The anchor references a position in the diff base
-    /// (original) text rather than the current buffer text. This is used when
-    /// displaying inline diffs where deleted lines are shown.
-    pub diff_base_anchor: Option<text::Anchor>,
+pub enum Anchor {
+    /// An anchor that always resolves to the start of the multibuffer.
+    Min,
+    /// An anchor that's attached to a specific excerpted buffer.
+    Excerpt(ExcerptAnchor),
+    /// An anchor that always resolves to the end of the multibuffer.
+    Max,
 }
 
-impl std::fmt::Debug for Anchor {
+pub(crate) enum AnchorSeekTarget {
+    Excerpt {
+        path_key: PathKey,
+        anchor: ExcerptAnchor,
+        // None when the buffer no longer exists in the multibuffer
+        snapshot: Option<BufferSnapshot>,
+    },
+    Empty,
+}
+
+impl std::fmt::Debug for AnchorSeekTarget {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        if self.is_min() {
-            return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id);
+        match self {
+            Self::Excerpt {
+                path_key,
+                anchor,
+                snapshot: _,
+            } => f
+                .debug_struct("Excerpt")
+                .field("path_key", path_key)
+                .field("anchor", anchor)
+                .finish(),
+            Self::Empty => write!(f, "Empty"),
         }
-        if self.is_max() {
-            return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id);
+    }
+}
+
+impl std::fmt::Debug for Anchor {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Anchor::Min => write!(f, "Anchor::Min"),
+            Anchor::Max => write!(f, "Anchor::Max"),
+            Anchor::Excerpt(excerpt_anchor) => write!(f, "{excerpt_anchor:?}"),
         }
+    }
+}
 
-        f.debug_struct("Anchor")
-            .field("excerpt_id", &self.excerpt_id)
-            .field("text_anchor", &self.text_anchor)
-            .field("diff_base_anchor", &self.diff_base_anchor)
-            .finish()
+impl From<ExcerptAnchor> for Anchor {
+    fn from(anchor: ExcerptAnchor) -> Self {
+        Anchor::Excerpt(anchor)
     }
 }
 
-impl Anchor {
-    pub fn with_diff_base_anchor(self, diff_base_anchor: text::Anchor) -> Self {
-        Self {
-            diff_base_anchor: Some(diff_base_anchor),
-            ..self
+impl ExcerptAnchor {
+    pub(crate) fn buffer_id(&self) -> BufferId {
+        self.text_anchor.buffer_id
+    }
+
+    pub(crate) fn text_anchor(&self) -> text::Anchor {
+        self.text_anchor
+    }
+
+    pub(crate) fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self {
+        self.diff_base_anchor = Some(diff_base_anchor);
+        self
+    }
+
+    pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering {
+        let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else {
+            panic!("anchor's path was never added to multibuffer")
+        };
+        let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else {
+            panic!("anchor's path was never added to multibuffer")
+        };
+
+        if self_path_key.cmp(other_path_key) != Ordering::Equal {
+            return self_path_key.cmp(other_path_key);
+        }
+
+        // in the case that you removed the buffer containing self,
+        // and added the buffer containing other with the same path key
+        // (ordering is arbitrary but consistent)
+        if self.text_anchor.buffer_id != other.text_anchor.buffer_id {
+            return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id);
+        }
+
+        let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else {
+            return Ordering::Equal;
+        };
+        // Comparing two anchors into buffer A that formerly existed at path P,
+        // when path P has since been reused for a different buffer B
+        if buffer.remote_id() != self.text_anchor.buffer_id {
+            return Ordering::Equal;
+        };
+        assert_eq!(self.text_anchor.buffer_id, buffer.remote_id());
+        let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer);
+        if text_cmp != Ordering::Equal {
+            return text_cmp;
+        }
+
+        if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some())
+            && let Some(base_text) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+                .map(|diff| diff.base_text())
+        {
+            let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text));
+            let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text));
+            return match (self_anchor, other_anchor) {
+                (Some(a), Some(b)) => a.cmp(&b, base_text),
+                (Some(_), None) => match other.text_anchor().bias {
+                    Bias::Left => Ordering::Greater,
+                    Bias::Right => Ordering::Less,
+                },
+                (None, Some(_)) => match self.text_anchor().bias {
+                    Bias::Left => Ordering::Less,
+                    Bias::Right => Ordering::Greater,
+                },
+                (None, None) => Ordering::Equal,
+            };
         }
+
+        Ordering::Equal
     }
 
-    pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self {
-        Self {
-            excerpt_id,
-            text_anchor,
-            diff_base_anchor: None,
+    fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Self {
+        if self.text_anchor.bias == Bias::Left {
+            return *self;
+        }
+        let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else {
+            return *self;
+        };
+        let text_anchor = self.text_anchor().bias_left(&buffer);
+        let ret = Self::in_buffer(self.path, text_anchor);
+        if let Some(diff_base_anchor) = self.diff_base_anchor {
+            if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+                && diff_base_anchor.is_valid(&diff.base_text())
+            {
+                ret.with_diff_base_anchor(diff_base_anchor.bias_left(diff.base_text()))
+            } else {
+                ret.with_diff_base_anchor(diff_base_anchor)
+            }
+        } else {
+            ret
         }
     }
 
-    pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range<text::Anchor>) -> Range<Self> {
-        Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end)
+    fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Self {
+        if self.text_anchor.bias == Bias::Right {
+            return *self;
+        }
+        let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else {
+            return *self;
+        };
+        let text_anchor = self.text_anchor().bias_right(&buffer);
+        let ret = Self::in_buffer(self.path, text_anchor);
+        if let Some(diff_base_anchor) = self.diff_base_anchor {
+            if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+                && diff_base_anchor.is_valid(&diff.base_text())
+            {
+                ret.with_diff_base_anchor(diff_base_anchor.bias_right(diff.base_text()))
+            } else {
+                ret.with_diff_base_anchor(diff_base_anchor)
+            }
+        } else {
+            ret
+        }
     }
 
-    pub fn min() -> Self {
-        Self {
-            excerpt_id: ExcerptId::min(),
-            text_anchor: text::Anchor::MIN,
+    #[track_caller]
+    pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self {
+        ExcerptAnchor {
+            path,
             diff_base_anchor: None,
+            text_anchor,
         }
     }
 
-    pub fn max() -> Self {
-        Self {
-            excerpt_id: ExcerptId::max(),
-            text_anchor: text::Anchor::MAX,
-            diff_base_anchor: None,
+    fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
+        let Some(target) = self.try_seek_target(snapshot) else {
+            return false;
+        };
+        let Some(buffer_snapshot) = snapshot.buffer_for_id(self.buffer_id()) else {
+            return false;
+        };
+        // Early check to avoid invalid comparisons when seeking
+        if !buffer_snapshot.can_resolve(&self.text_anchor) {
+            return false;
         }
+        let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
+        cursor.seek(&target, Bias::Left);
+        let Some(excerpt) = cursor.item() else {
+            return false;
+        };
+        let is_valid = self.text_anchor == excerpt.range.context.start
+            || self.text_anchor == excerpt.range.context.end
+            || self.text_anchor.is_valid(&buffer_snapshot);
+        is_valid
+            && excerpt
+                .range
+                .context
+                .start
+                .cmp(&self.text_anchor(), buffer_snapshot)
+                .is_le()
+            && excerpt
+                .range
+                .context
+                .end
+                .cmp(&self.text_anchor(), buffer_snapshot)
+                .is_ge()
+    }
+
+    pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+        self.try_seek_target(snapshot)
+            .expect("anchor is from different multi-buffer")
+    }
+
+    pub(crate) fn try_seek_target(
+        &self,
+        snapshot: &MultiBufferSnapshot,
+    ) -> Option<AnchorSeekTarget> {
+        let path_key = snapshot.try_path_for_anchor(*self)?;
+        let buffer = snapshot.buffer_for_path(&path_key).cloned();
+        Some(AnchorSeekTarget::Excerpt {
+            path_key,
+            anchor: *self,
+            snapshot: buffer,
+        })
+    }
+}
+
+impl ToOffset for ExcerptAnchor {
+    fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset {
+        Anchor::from(*self).to_offset(snapshot)
+    }
+
+    fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 {
+        Anchor::from(*self).to_offset_utf16(snapshot)
+    }
+}
+
+impl ToPoint for ExcerptAnchor {
+    fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point {
+        Anchor::from(*self).to_point(snapshot)
     }
 
+    fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 {
+        Anchor::from(*self).to_point_utf16(snapshot)
+    }
+}
+
+impl Anchor {
     pub fn is_min(&self) -> bool {
-        self.excerpt_id == ExcerptId::min()
-            && self.text_anchor.is_min()
-            && self.diff_base_anchor.is_none()
+        matches!(self, Self::Min)
     }
 
     pub fn is_max(&self) -> bool {
-        self.excerpt_id == ExcerptId::max()
-            && self.text_anchor.is_max()
-            && self.diff_base_anchor.is_none()
+        matches!(self, Self::Max)
     }
 
-    pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
-        if self == other {
-            return Ordering::Equal;
-        }
+    pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self {
+        Self::Excerpt(ExcerptAnchor::in_buffer(path, text_anchor))
+    }
 
-        let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id);
-        let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id);
+    pub(crate) fn range_in_buffer(path: PathKeyIndex, range: Range<text::Anchor>) -> Range<Self> {
+        Self::in_buffer(path, range.start)..Self::in_buffer(path, range.end)
+    }
 
-        let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot);
-        if excerpt_id_cmp.is_ne() {
-            return excerpt_id_cmp;
-        }
-        if self_excerpt_id == ExcerptId::max()
-            && self.text_anchor.is_max()
-            && self.text_anchor.is_max()
-            && self.diff_base_anchor.is_none()
-            && other.diff_base_anchor.is_none()
-        {
-            return Ordering::Equal;
-        }
-        if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) {
-            let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer);
-            if text_cmp.is_ne() {
-                return text_cmp;
-            }
-            if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some())
-                && let Some(base_text) = snapshot
-                    .diff_state(excerpt.buffer_id)
-                    .map(|diff| diff.base_text())
-            {
-                let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text));
-                let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text));
-                return match (self_anchor, other_anchor) {
-                    (Some(a), Some(b)) => a.cmp(&b, base_text),
-                    (Some(_), None) => match other.text_anchor.bias {
-                        Bias::Left => Ordering::Greater,
-                        Bias::Right => Ordering::Less,
-                    },
-                    (None, Some(_)) => match self.text_anchor.bias {
-                        Bias::Left => Ordering::Less,
-                        Bias::Right => Ordering::Greater,
-                    },
-                    (None, None) => Ordering::Equal,
-                };
+    pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
+        match (self, other) {
+            (Anchor::Min, Anchor::Min) => return Ordering::Equal,
+            (Anchor::Max, Anchor::Max) => return Ordering::Equal,
+            (Anchor::Min, _) => return Ordering::Less,
+            (Anchor::Max, _) => return Ordering::Greater,
+            (_, Anchor::Max) => return Ordering::Less,
+            (_, Anchor::Min) => return Ordering::Greater,
+            (Anchor::Excerpt(self_excerpt_anchor), Anchor::Excerpt(other_excerpt_anchor)) => {
+                self_excerpt_anchor.cmp(other_excerpt_anchor, snapshot)
             }
         }
-        Ordering::Equal
     }
 
     pub fn bias(&self) -> Bias {
-        self.text_anchor.bias
+        match self {
+            Anchor::Min => Bias::Left,
+            Anchor::Max => Bias::Right,
+            Anchor::Excerpt(anchor) => anchor.text_anchor.bias,
+        }
     }
 
     pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
-        if self.text_anchor.bias != Bias::Left
-            && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
-        {
-            return Self {
-                excerpt_id: excerpt.id,
-                text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
-                diff_base_anchor: self.diff_base_anchor.map(|a| {
-                    if let Some(base_text) = snapshot
-                        .diff_state(excerpt.buffer_id)
-                        .map(|diff| diff.base_text())
-                        && a.is_valid(&base_text)
-                    {
-                        return a.bias_left(base_text);
-                    }
-                    a
-                }),
-            };
+        match self {
+            Anchor::Min => *self,
+            Anchor::Max => snapshot.anchor_before(snapshot.max_point()),
+            Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_left(snapshot)),
         }
-        *self
     }
 
     pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
-        if self.text_anchor.bias != Bias::Right
-            && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
-        {
-            return Self {
-                excerpt_id: excerpt.id,
-                text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
-                diff_base_anchor: self.diff_base_anchor.map(|a| {
-                    if let Some(base_text) = snapshot
-                        .diff_state(excerpt.buffer_id)
-                        .map(|diff| diff.base_text())
-                        && a.is_valid(&base_text)
-                    {
-                        return a.bias_right(base_text);
-                    }
-                    a
-                }),
-            };
+        match self {
+            Anchor::Max => *self,
+            Anchor::Min => snapshot.anchor_after(Point::zero()),
+            Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_right(snapshot)),
         }
-        *self
     }
 
     pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
@@ -203,16 +342,111 @@ impl Anchor {
     }
 
     pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
-        if self.is_min() || self.is_max() {
-            true
-        } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
-            (self.text_anchor == excerpt.range.context.start
-                || self.text_anchor == excerpt.range.context.end
-                || self.text_anchor.is_valid(&excerpt.buffer))
-                && excerpt.contains(self)
-        } else {
-            false
+        match self {
+            Anchor::Min | Anchor::Max => true,
+            Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.is_valid(snapshot),
+        }
+    }
+
+    fn to_excerpt_anchor(&self, snapshot: &MultiBufferSnapshot) -> Option<ExcerptAnchor> {
+        match self {
+            Anchor::Min => {
+                let excerpt = snapshot.excerpts.first()?;
+
+                Some(ExcerptAnchor {
+                    text_anchor: excerpt.range.context.start,
+                    path: excerpt.path_key_index,
+                    diff_base_anchor: None,
+                })
+            }
+            Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor),
+            Anchor::Max => {
+                let excerpt = snapshot.excerpts.last()?;
+
+                Some(ExcerptAnchor {
+                    text_anchor: excerpt.range.context.end,
+                    path: excerpt.path_key_index,
+                    diff_base_anchor: None,
+                })
+            }
+        }
+    }
+
+    pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+        let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
+            return AnchorSeekTarget::Empty;
+        };
+
+        excerpt_anchor.seek_target(snapshot)
+    }
+
+    pub(crate) fn excerpt_anchor(&self) -> Option<ExcerptAnchor> {
+        match self {
+            Anchor::Min | Anchor::Max => None,
+            Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor),
+        }
+    }
+
+    pub(crate) fn text_anchor(&self) -> Option<text::Anchor> {
+        match self {
+            Anchor::Min | Anchor::Max => None,
+            Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor()),
+        }
+    }
+
+    pub fn opaque_id(&self) -> Option<[u8; 20]> {
+        self.text_anchor().map(|a| a.opaque_id())
+    }
+
+    /// Note: anchor_to_buffer_anchor is probably what you want
+    pub fn raw_text_anchor(&self) -> Option<text::Anchor> {
+        match self {
+            Anchor::Min | Anchor::Max => None,
+            Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor),
+        }
+    }
+
+    pub(crate) fn try_seek_target(
+        &self,
+        snapshot: &MultiBufferSnapshot,
+    ) -> Option<AnchorSeekTarget> {
+        let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
+            return Some(AnchorSeekTarget::Empty);
+        };
+        excerpt_anchor.try_seek_target(snapshot)
+    }
+
+    /// Returns the text anchor for this anchor.
+    /// Panics if the anchor is from a different buffer.
+    pub fn text_anchor_in(&self, buffer: &BufferSnapshot) -> text::Anchor {
+        match self {
+            Anchor::Min => text::Anchor::min_for_buffer(buffer.remote_id()),
+            Anchor::Excerpt(excerpt_anchor) => {
+                let text_anchor = excerpt_anchor.text_anchor;
+                assert_eq!(text_anchor.buffer_id, buffer.remote_id());
+                text_anchor
+            }
+            Anchor::Max => text::Anchor::max_for_buffer(buffer.remote_id()),
+        }
+    }
+
+    pub fn diff_base_anchor(&self) -> Option<text::Anchor> {
+        self.excerpt_anchor()?.diff_base_anchor
+    }
+
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn expect_text_anchor(&self) -> text::Anchor {
+        self.excerpt_anchor().unwrap().text_anchor
+    }
+
+    pub fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self {
+        match &mut self {
+            Anchor::Min | Anchor::Max => {}
+            Anchor::Excerpt(excerpt_anchor) => {
+                excerpt_anchor.diff_base_anchor = Some(diff_base_anchor);
+            }
         }
+        self
     }
 }
 

crates/multi_buffer/src/multi_buffer.rs πŸ”—

@@ -8,6 +8,7 @@ use self::transaction::History;
 
 pub use anchor::{Anchor, AnchorRangeExt};
 
+use anchor::{AnchorSeekTarget, ExcerptAnchor};
 use anyhow::{Result, anyhow};
 use buffer_diff::{
     BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunkSecondaryStatus,
@@ -15,14 +16,14 @@ use buffer_diff::{
 };
 use clock::ReplicaId;
 use collections::{BTreeMap, Bound, HashMap, HashSet};
-use gpui::{App, Context, Entity, EntityId, EventEmitter};
+use gpui::{App, Context, Entity, EventEmitter};
 use itertools::Itertools;
 use language::{
-    AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability,
-    CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File,
-    IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
-    OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
-    ToPoint as _, TransactionId, TreeSitterOptions, Unclipped,
+    AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier,
+    CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings,
+    IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point,
+    PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId,
+    TreeSitterOptions, Unclipped,
     language_settings::{AllLanguageSettings, LanguageSettings},
 };
 
@@ -37,7 +38,8 @@ use std::{
     any::type_name,
     borrow::Cow,
     cell::{Cell, OnceCell, Ref, RefCell},
-    cmp, fmt,
+    cmp::{self, Ordering},
+    fmt,
     future::Future,
     io,
     iter::{self, FromIterator},
@@ -51,15 +53,13 @@ use std::{
 use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, TreeMap};
 use text::{
     BufferId, Edit, LineIndent, TextSummary,
-    locator::Locator,
     subscription::{Subscription, Topic},
 };
 use theme::SyntaxTheme;
 use unicode_segmentation::UnicodeSegmentation;
-use util::post_inc;
 use ztracing::instrument;
 
-pub use self::path_key::{PathExcerptInsertResult, PathKey};
+pub use self::path_key::PathKey;
 
 pub static EXCERPT_CONTEXT_LINES: OnceLock<fn(&App) -> u32> = OnceLock::new();
 
@@ -67,9 +67,6 @@ pub fn excerpt_context_lines(cx: &App) -> u32 {
     EXCERPT_CONTEXT_LINES.get().map(|f| f(cx)).unwrap_or(2)
 }
 
-#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct ExcerptId(u32);
-
 /// One or more [`Buffers`](Buffer) being edited in a single view.
 ///
 /// See <https://zed.dev/features#multi-buffers>
@@ -79,10 +76,6 @@ pub struct MultiBuffer {
     snapshot: RefCell<MultiBufferSnapshot>,
     /// Contains the state of the buffers being edited
     buffers: BTreeMap<BufferId, BufferState>,
-    /// Mapping from path keys to their excerpts.
-    excerpts_by_path: BTreeMap<PathKey, Vec<ExcerptId>>,
-    /// Mapping from excerpt IDs to their path key.
-    paths_by_excerpt: HashMap<ExcerptId, PathKey>,
     /// Mapping from buffer IDs to their diff states
     diffs: HashMap<BufferId, DiffState>,
     subscriptions: Topic<MultiBufferOffset>,
@@ -98,24 +91,20 @@ pub struct MultiBuffer {
     buffer_changed_since_sync: Rc<Cell<bool>>,
 }
 
+#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct PathKeyIndex(u64);
+
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub enum Event {
-    ExcerptsAdded {
+    BufferRangesUpdated {
         buffer: Entity<Buffer>,
-        predecessor: ExcerptId,
-        excerpts: Vec<(ExcerptId, ExcerptRange<language::Anchor>)>,
+        path_key: PathKey,
+        ranges: Vec<ExcerptRange<text::Anchor>>,
     },
-    ExcerptsRemoved {
-        ids: Vec<ExcerptId>,
-        /// Contains only buffer IDs for which all excerpts have been removed.
-        /// Buffers that still have remaining excerpts are never included.
+    BuffersRemoved {
         removed_buffer_ids: Vec<BufferId>,
     },
-    ExcerptsExpanded {
-        ids: Vec<ExcerptId>,
-    },
-    ExcerptsEdited {
-        excerpt_ids: Vec<ExcerptId>,
+    BuffersEdited {
         buffer_ids: Vec<BufferId>,
     },
     DiffHunksToggled,
@@ -145,14 +134,14 @@ pub struct MultiBufferDiffHunk {
     pub buffer_id: BufferId,
     /// The range of the underlying buffer that this hunk corresponds to.
     pub buffer_range: Range<text::Anchor>,
-    /// The excerpt that contains the diff hunk.
-    pub excerpt_id: ExcerptId,
     /// The range within the buffer's diff base that this hunk corresponds to.
     pub diff_base_byte_range: Range<BufferOffset>,
     /// The status of this hunk (added/modified/deleted and secondary status).
     pub status: DiffHunkStatus,
     /// The word diffs for this hunk.
     pub word_diffs: Vec<Range<MultiBufferOffset>>,
+    pub excerpt_range: ExcerptRange<text::Anchor>,
+    pub multi_buffer_range: Range<Anchor>,
 }
 
 impl MultiBufferDiffHunk {
@@ -165,17 +154,12 @@ impl MultiBufferDiffHunk {
             && self.buffer_range.start.is_min()
             && self.buffer_range.end.is_max()
     }
-
-    pub fn multi_buffer_range(&self) -> Range<Anchor> {
-        let start = Anchor::in_buffer(self.excerpt_id, self.buffer_range.start);
-        let end = Anchor::in_buffer(self.excerpt_id, self.buffer_range.end);
-        start..end
-    }
 }
 
 pub type MultiBufferPoint = Point;
+/// ExcerptOffset is offset into the non-deleted text of the multibuffer
 type ExcerptOffset = ExcerptDimension<MultiBufferOffset>;
-type ExcerptPoint = ExcerptDimension<Point>;
+/// ExcerptOffset is based on the non-deleted text of the multibuffer
 
 #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)]
 #[serde(transparent)]
@@ -518,10 +502,6 @@ pub trait ToPoint: 'static + fmt::Debug {
 
 struct BufferState {
     buffer: Entity<Buffer>,
-    last_version: RefCell<clock::Global>,
-    last_non_text_state_update_count: Cell<usize>,
-    // Note, any changes to this field value require updating snapshot.buffer_locators as well
-    excerpts: Vec<Locator>,
     _subscriptions: [gpui::Subscription; 2],
 }
 
@@ -694,15 +674,31 @@ impl DiffState {
     }
 }
 
+#[derive(Clone)]
+struct BufferStateSnapshot {
+    path_key: PathKey,
+    path_key_index: PathKeyIndex,
+    buffer_snapshot: BufferSnapshot,
+}
+
+impl fmt::Debug for BufferStateSnapshot {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("BufferStateSnapshot")
+            .field("path_key", &self.path_key)
+            .field("buffer_id", &self.buffer_snapshot.remote_id())
+            .finish()
+    }
+}
+
 /// The contents of a [`MultiBuffer`] at a single point in time.
 #[derive(Clone, Default)]
 pub struct MultiBufferSnapshot {
     excerpts: SumTree<Excerpt>,
-    buffer_locators: TreeMap<BufferId, Arc<[Locator]>>,
+    buffers: TreeMap<BufferId, BufferStateSnapshot>,
+    path_keys_by_index: TreeMap<PathKeyIndex, PathKey>,
+    indices_by_path_key: TreeMap<PathKey, PathKeyIndex>,
     diffs: SumTree<DiffStateSnapshot>,
     diff_transforms: SumTree<DiffTransform>,
-    excerpt_ids: SumTree<ExcerptIdMapping>,
-    replaced_excerpts: Arc<HashMap<ExcerptId, ExcerptId>>,
     non_text_state_update_count: usize,
     edit_count: usize,
     is_dirty: bool,
@@ -717,24 +713,12 @@ pub struct MultiBufferSnapshot {
     show_headers: bool,
 }
 
-// follower: None
-// - BufferContent(Some)
-// - BufferContent(None)
-// - DeletedHunk
-//
-// follower: Some
-// - BufferContent(Some)
-// - BufferContent(None)
-
 #[derive(Debug, Clone)]
 enum DiffTransform {
-    // RealText
     BufferContent {
         summary: MBTextSummary,
-        // modified_hunk_info
         inserted_hunk_info: Option<DiffTransformHunkInfo>,
     },
-    // ExpandedHunkText
     DeletedHunk {
         summary: TextSummary,
         buffer_id: BufferId,
@@ -746,52 +730,71 @@ enum DiffTransform {
 
 #[derive(Clone, Copy, Debug)]
 struct DiffTransformHunkInfo {
-    excerpt_id: ExcerptId,
+    buffer_id: BufferId,
     hunk_start_anchor: text::Anchor,
     hunk_secondary_status: DiffHunkSecondaryStatus,
     is_logically_deleted: bool,
+    excerpt_end: ExcerptAnchor,
 }
 
 impl Eq for DiffTransformHunkInfo {}
 
 impl PartialEq for DiffTransformHunkInfo {
     fn eq(&self, other: &DiffTransformHunkInfo) -> bool {
-        self.excerpt_id == other.excerpt_id && self.hunk_start_anchor == other.hunk_start_anchor
+        self.buffer_id == other.buffer_id && self.hunk_start_anchor == other.hunk_start_anchor
     }
 }
 
 impl std::hash::Hash for DiffTransformHunkInfo {
     fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
-        self.excerpt_id.hash(state);
+        self.buffer_id.hash(state);
         self.hunk_start_anchor.hash(state);
     }
 }
 
 #[derive(Clone)]
-pub struct ExcerptInfo {
-    pub id: ExcerptId,
-    pub buffer: Arc<BufferSnapshot>,
-    pub buffer_id: BufferId,
+pub struct ExcerptBoundaryInfo {
+    pub start_anchor: Anchor,
     pub range: ExcerptRange<text::Anchor>,
     pub end_row: MultiBufferRow,
 }
 
-impl std::fmt::Debug for ExcerptInfo {
+impl ExcerptBoundaryInfo {
+    pub fn start_text_anchor(&self) -> text::Anchor {
+        self.range.context.start
+    }
+    pub fn buffer_id(&self) -> BufferId {
+        self.start_text_anchor().buffer_id
+    }
+    pub fn buffer<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot {
+        snapshot
+            .buffer_for_id(self.buffer_id())
+            .expect("buffer snapshot not found for excerpt boundary")
+    }
+}
+
+impl std::fmt::Debug for ExcerptBoundaryInfo {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct(type_name::<Self>())
-            .field("id", &self.id)
-            .field("buffer_id", &self.buffer_id)
-            .field("path", &self.buffer.file().map(|f| f.path()))
+            .field("buffer_id", &self.buffer_id())
             .field("range", &self.range)
             .finish()
     }
 }
 
+impl PartialEq for ExcerptBoundaryInfo {
+    fn eq(&self, other: &Self) -> bool {
+        self.start_anchor == other.start_anchor && self.range == other.range
+    }
+}
+
+impl Eq for ExcerptBoundaryInfo {}
+
 /// A boundary between `Excerpt`s in a [`MultiBuffer`]
 #[derive(Debug)]
 pub struct ExcerptBoundary {
-    pub prev: Option<ExcerptInfo>,
-    pub next: ExcerptInfo,
+    pub prev: Option<ExcerptBoundaryInfo>,
+    pub next: ExcerptBoundaryInfo,
     /// The row in the `MultiBuffer` where the boundary is located
     pub row: MultiBufferRow,
 }
@@ -800,7 +803,7 @@ impl ExcerptBoundary {
     pub fn starts_new_buffer(&self) -> bool {
         match (self.prev.as_ref(), &self.next) {
             (None, _) => true,
-            (Some(prev), next) => prev.buffer_id != next.buffer_id,
+            (Some(prev), next) => prev.buffer_id() != next.buffer_id(),
         }
     }
 }
@@ -808,7 +811,7 @@ impl ExcerptBoundary {
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub struct ExpandInfo {
     pub direction: ExpandExcerptDirection,
-    pub excerpt_id: ExcerptId,
+    pub start_anchor: Anchor,
 }
 
 #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
@@ -822,45 +825,20 @@ pub struct RowInfo {
 }
 
 /// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`].
-#[derive(Clone)]
-struct Excerpt {
-    /// The unique identifier for this excerpt
-    id: ExcerptId,
+#[derive(Clone, Debug)]
+pub(crate) struct Excerpt {
     /// The location of the excerpt in the [`MultiBuffer`]
-    locator: Locator,
-    /// The buffer being excerpted
-    buffer_id: BufferId,
-    /// A snapshot of the buffer being excerpted
-    buffer: Arc<BufferSnapshot>,
+    pub(crate) path_key: PathKey,
+    pub(crate) path_key_index: PathKeyIndex,
+    pub(crate) buffer_id: BufferId,
     /// The range of the buffer to be shown in the excerpt
-    range: ExcerptRange<text::Anchor>,
+    pub(crate) range: ExcerptRange<text::Anchor>,
+
     /// The last row in the excerpted slice of the buffer
-    max_buffer_row: BufferRow,
+    pub(crate) max_buffer_row: BufferRow,
     /// A summary of the text in the excerpt
-    text_summary: TextSummary,
-    has_trailing_newline: bool,
-}
-
-/// A public view into an `Excerpt` in a [`MultiBuffer`].
-///
-/// Contains methods for getting the [`Buffer`] of the excerpt,
-/// as well as mapping offsets to/from buffer and multibuffer coordinates.
-#[derive(Clone)]
-pub struct MultiBufferExcerpt<'a> {
-    excerpt: &'a Excerpt,
-    diff_transforms:
-        sum_tree::Cursor<'a, 'static, DiffTransform, DiffTransforms<MultiBufferOffset>>,
-    /// The offset in the multibuffer considering diff transforms.
-    offset: MultiBufferOffset,
-    /// The offset in the multibuffer without diff transforms.
-    excerpt_offset: ExcerptOffset,
-    buffer_offset: BufferOffset,
-}
-
-#[derive(Clone, Debug)]
-struct ExcerptIdMapping {
-    id: ExcerptId,
-    locator: Locator,
+    pub(crate) text_summary: TextSummary,
+    pub(crate) has_trailing_newline: bool,
 }
 
 /// A range of text from a single [`Buffer`], to be shown as an `Excerpt`.
@@ -883,16 +861,37 @@ impl<T: Clone> ExcerptRange<T> {
     }
 }
 
-#[derive(Clone, Debug, Default)]
+impl ExcerptRange<text::Anchor> {
+    pub fn contains(&self, t: &text::Anchor, snapshot: &BufferSnapshot) -> bool {
+        self.context.start.cmp(t, snapshot).is_le() && self.context.end.cmp(t, snapshot).is_ge()
+    }
+}
+
+#[derive(Clone, Debug)]
 pub struct ExcerptSummary {
-    excerpt_id: ExcerptId,
-    /// The location of the last [`Excerpt`] being summarized
-    excerpt_locator: Locator,
+    path_key: PathKey,
+    max_anchor: Option<text::Anchor>,
     widest_line_number: u32,
     text: MBTextSummary,
     count: usize,
 }
 
+impl ExcerptSummary {
+    pub fn min() -> Self {
+        ExcerptSummary {
+            path_key: PathKey::min(),
+            max_anchor: None,
+            widest_line_number: 0,
+            text: MBTextSummary::default(),
+            count: 0,
+        }
+    }
+
+    fn len(&self) -> ExcerptOffset {
+        ExcerptDimension(self.text.len)
+    }
+}
+
 #[derive(Debug, Clone)]
 pub struct DiffTransformSummary {
     input: MBTextSummary,
@@ -1068,13 +1067,13 @@ pub struct MultiBufferChunks<'a> {
     excerpts: Cursor<'a, 'static, Excerpt, ExcerptOffset>,
     diff_transforms:
         Cursor<'a, 'static, DiffTransform, Dimensions<MultiBufferOffset, ExcerptOffset>>,
-    diffs: &'a SumTree<DiffStateSnapshot>,
     diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>,
     buffer_chunk: Option<Chunk<'a>>,
     range: Range<MultiBufferOffset>,
     excerpt_offset_range: Range<ExcerptOffset>,
     excerpt_chunks: Option<ExcerptChunks<'a>>,
     language_aware: bool,
+    snapshot: &'a MultiBufferSnapshot,
 }
 
 pub struct ReversedMultiBufferChunks<'a> {
@@ -1128,8 +1127,8 @@ impl<'a, MBD: MultiBufferDimension> Dimension<'a, DiffTransformSummary> for Diff
 struct MultiBufferCursor<'a, MBD, BD> {
     excerpts: Cursor<'a, 'static, Excerpt, ExcerptDimension<MBD>>,
     diff_transforms: Cursor<'a, 'static, DiffTransform, DiffTransforms<MBD>>,
-    diffs: &'a SumTree<DiffStateSnapshot>,
     cached_region: OnceCell<Option<MultiBufferRegion<'a, MBD, BD>>>,
+    snapshot: &'a MultiBufferSnapshot,
 }
 
 #[derive(Clone)]
@@ -1144,8 +1143,8 @@ struct MultiBufferRegion<'a, MBD, BD> {
 }
 
 struct ExcerptChunks<'a> {
-    excerpt_id: ExcerptId,
     content_chunks: BufferChunks<'a>,
+    end: ExcerptAnchor,
     has_footer: bool,
 }
 
@@ -1155,7 +1154,6 @@ struct BufferEdit {
     new_text: Arc<str>,
     is_insertion: bool,
     original_indent_column: Option<u32>,
-    excerpt_id: ExcerptId,
 }
 
 #[derive(Clone, Copy, Debug, PartialEq)]
@@ -1258,8 +1256,6 @@ impl MultiBuffer {
             singleton: false,
             capability,
             title: None,
-            excerpts_by_path: Default::default(),
-            paths_by_excerpt: Default::default(),
             buffer_changed_since_sync: Default::default(),
             history: History::default(),
         }
@@ -1276,11 +1272,6 @@ impl MultiBuffer {
                 *buffer_id,
                 BufferState {
                     buffer: buffer_state.buffer.clone(),
-                    last_version: buffer_state.last_version.clone(),
-                    last_non_text_state_update_count: buffer_state
-                        .last_non_text_state_update_count
-                        .clone(),
-                    excerpts: buffer_state.excerpts.clone(),
                     _subscriptions: [
                         new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()),
                         new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event),
@@ -1295,8 +1286,6 @@ impl MultiBuffer {
         Self {
             snapshot: RefCell::new(self.snapshot.borrow().clone()),
             buffers,
-            excerpts_by_path: Default::default(),
-            paths_by_excerpt: Default::default(),
             diffs: diff_bases,
             subscriptions: Default::default(),
             singleton: self.singleton,
@@ -1451,7 +1440,7 @@ impl MultiBuffer {
                 _ => Default::default(),
             };
 
-            let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits(
+            let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits(
                 edits,
                 this.snapshot.get_mut(),
                 &original_indent_columns,
@@ -1472,14 +1461,12 @@ impl MultiBuffer {
                         mut new_text,
                         mut is_insertion,
                         original_indent_column,
-                        excerpt_id,
                     }) = edits.next()
                     {
                         while let Some(BufferEdit {
                             range: next_range,
                             is_insertion: next_is_insertion,
                             new_text: next_new_text,
-                            excerpt_id: next_excerpt_id,
                             ..
                         }) = edits.peek()
                         {
@@ -1492,9 +1479,7 @@ impl MultiBuffer {
                             if should_coalesce {
                                 range.end = cmp::max(next_range.end, range.end);
                                 is_insertion |= *next_is_insertion;
-                                if excerpt_id == *next_excerpt_id {
-                                    new_text = format!("{new_text}{next_new_text}").into();
-                                }
+                                new_text = format!("{new_text}{next_new_text}").into();
                                 edits.next();
                             } else {
                                 break;
@@ -1542,10 +1527,7 @@ impl MultiBuffer {
                 })
             }
 
-            cx.emit(Event::ExcerptsEdited {
-                excerpt_ids: edited_excerpt_ids,
-                buffer_ids,
-            });
+            cx.emit(Event::BuffersEdited { buffer_ids });
         }
     }
 
@@ -1553,9 +1535,8 @@ impl MultiBuffer {
         edits: Vec<(Range<MultiBufferOffset>, Arc<str>)>,
         snapshot: &MultiBufferSnapshot,
         original_indent_columns: &[Option<u32>],
-    ) -> (HashMap<BufferId, Vec<BufferEdit>>, Vec<ExcerptId>) {
+    ) -> HashMap<BufferId, Vec<BufferEdit>> {
         let mut buffer_edits: HashMap<BufferId, Vec<BufferEdit>> = Default::default();
-        let mut edited_excerpt_ids = Vec::new();
         let mut cursor = snapshot.cursor::<MultiBufferOffset, BufferOffset>();
         for (ix, (range, new_text)) in edits.into_iter().enumerate() {
             let original_indent_column = original_indent_columns.get(ix).copied().flatten();
@@ -1600,11 +1581,10 @@ impl MultiBuffer {
             let buffer_end =
                 (end_region.buffer_range.start + end_overshoot).min(end_region.buffer_range.end);
 
-            if start_region.excerpt.id == end_region.excerpt.id {
+            if start_region.excerpt == end_region.excerpt {
                 if start_region.buffer.capability == Capability::ReadWrite
                     && start_region.is_main_buffer
                 {
-                    edited_excerpt_ids.push(start_region.excerpt.id);
                     buffer_edits
                         .entry(start_region.buffer.remote_id())
                         .or_default()
@@ -1613,7 +1593,6 @@ impl MultiBuffer {
                             new_text,
                             is_insertion: true,
                             original_indent_column,
-                            excerpt_id: start_region.excerpt.id,
                         });
                 }
             } else {
@@ -1622,7 +1601,6 @@ impl MultiBuffer {
                 if start_region.buffer.capability == Capability::ReadWrite
                     && start_region.is_main_buffer
                 {
-                    edited_excerpt_ids.push(start_region.excerpt.id);
                     buffer_edits
                         .entry(start_region.buffer.remote_id())
                         .or_default()
@@ -1631,14 +1609,11 @@ impl MultiBuffer {
                             new_text: new_text.clone(),
                             is_insertion: true,
                             original_indent_column,
-                            excerpt_id: start_region.excerpt.id,
                         });
                 }
-                let excerpt_id = end_region.excerpt.id;
                 if end_region.buffer.capability == Capability::ReadWrite
                     && end_region.is_main_buffer
                 {
-                    edited_excerpt_ids.push(excerpt_id);
                     buffer_edits
                         .entry(end_region.buffer.remote_id())
                         .or_default()
@@ -1647,18 +1622,17 @@ impl MultiBuffer {
                             new_text: new_text.clone(),
                             is_insertion: false,
                             original_indent_column,
-                            excerpt_id,
                         });
                 }
+                let end_region_excerpt = end_region.excerpt.clone();
 
                 cursor.seek(&range.start);
                 cursor.next_excerpt();
                 while let Some(region) = cursor.region() {
-                    if region.excerpt.id == excerpt_id {
+                    if region.excerpt == &end_region_excerpt {
                         break;
                     }
                     if region.buffer.capability == Capability::ReadWrite && region.is_main_buffer {
-                        edited_excerpt_ids.push(region.excerpt.id);
                         buffer_edits
                             .entry(region.buffer.remote_id())
                             .or_default()
@@ -1667,14 +1641,13 @@ impl MultiBuffer {
                                 new_text: new_text.clone(),
                                 is_insertion: false,
                                 original_indent_column,
-                                excerpt_id: region.excerpt.id,
                             });
                     }
                     cursor.next_excerpt();
                 }
             }
         }
-        (buffer_edits, edited_excerpt_ids)
+        buffer_edits
     }
 
     pub fn autoindent_ranges<I, S>(&mut self, ranges: I, cx: &mut Context<Self>)
@@ -1706,7 +1679,7 @@ impl MultiBuffer {
             edits: Vec<(Range<MultiBufferOffset>, Arc<str>)>,
             cx: &mut Context<MultiBuffer>,
         ) {
-            let (buffer_edits, edited_excerpt_ids) =
+            let buffer_edits =
                 MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]);
 
             let mut buffer_ids = Vec::new();
@@ -1730,10 +1703,7 @@ impl MultiBuffer {
                 })
             }
 
-            cx.emit(Event::ExcerptsEdited {
-                excerpt_ids: edited_excerpt_ids,
-                buffer_ids,
-            });
+            cx.emit(Event::BuffersEdited { buffer_ids });
         }
     }
 
@@ -1746,26 +1716,39 @@ impl MultiBuffer {
     ) {
         let mut selections_by_buffer: HashMap<BufferId, Vec<Selection<text::Anchor>>> =
             Default::default();
-        let snapshot = self.read(cx);
-        let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
+        let snapshot = self.snapshot(cx);
+        let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
         for selection in selections {
-            let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
-            let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
+            let start = selection.start.seek_target(&snapshot);
 
-            cursor.seek(&Some(start_locator), Bias::Left);
-            while let Some(excerpt) = cursor.item()
-                && excerpt.locator <= *end_locator
-            {
-                let mut start = excerpt.range.context.start;
-                let mut end = excerpt.range.context.end;
-                if excerpt.id == selection.start.excerpt_id {
-                    start = selection.start.text_anchor;
-                }
-                if excerpt.id == selection.end.excerpt_id {
-                    end = selection.end.text_anchor;
+            cursor.seek(&start, Bias::Left);
+            while let Some(excerpt) = cursor.item() {
+                let excerpt_start =
+                    Anchor::in_buffer(excerpt.path_key_index, excerpt.range.context.start);
+                if excerpt_start.cmp(&selection.end, &snapshot).is_gt() {
+                    break;
                 }
+                let buffer = excerpt.buffer_snapshot(&snapshot);
+                let start = *text::Anchor::max(
+                    &excerpt.range.context.start,
+                    &selection
+                        .start
+                        .excerpt_anchor()
+                        .map(|excerpt_anchor| excerpt_anchor.text_anchor())
+                        .unwrap_or(text::Anchor::min_for_buffer(excerpt.buffer_id)),
+                    buffer,
+                );
+                let end = *text::Anchor::min(
+                    &excerpt.range.context.end,
+                    &selection
+                        .end
+                        .excerpt_anchor()
+                        .map(|excerpt_anchor| excerpt_anchor.text_anchor())
+                        .unwrap_or(text::Anchor::max_for_buffer(excerpt.buffer_id)),
+                    buffer,
+                );
                 selections_by_buffer
-                    .entry(excerpt.buffer_id)
+                    .entry(buffer.remote_id())
                     .or_default()
                     .push(Selection {
                         id: selection.id,
@@ -1787,25 +1770,9 @@ impl MultiBuffer {
             }
         }
 
-        for (buffer_id, mut selections) in selections_by_buffer {
+        for (buffer_id, selections) in selections_by_buffer {
             self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| {
-                selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer));
-                let mut selections = selections.into_iter().peekable();
-                let merged_selections = Arc::from_iter(iter::from_fn(|| {
-                    let mut selection = selections.next()?;
-                    while let Some(next_selection) = selections.peek() {
-                        if selection.end.cmp(&next_selection.start, buffer).is_ge() {
-                            let next_selection = selections.next().unwrap();
-                            if next_selection.end.cmp(&selection.end, buffer).is_ge() {
-                                selection.end = next_selection.end;
-                            }
-                        } else {
-                            break;
-                        }
-                    }
-                    Some(selection)
-                }));
-                buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx);
+                buffer.set_active_selections(selections.into(), line_mode, cursor_shape, cx);
             });
         }
     }
@@ -1821,200 +1788,31 @@ impl MultiBuffer {
     #[instrument(skip_all)]
     fn merge_excerpt_ranges<'a>(
         expanded_ranges: impl IntoIterator<Item = &'a ExcerptRange<Point>> + 'a,
-    ) -> (Vec<ExcerptRange<Point>>, Vec<usize>) {
+    ) -> Vec<ExcerptRange<Point>> {
+        let mut sorted: Vec<_> = expanded_ranges.into_iter().collect();
+        sorted.sort_by_key(|range| range.context.start);
         let mut merged_ranges: Vec<ExcerptRange<Point>> = Vec::new();
-        let mut counts: Vec<usize> = Vec::new();
-        for range in expanded_ranges {
+        for range in sorted {
             if let Some(last_range) = merged_ranges.last_mut() {
-                assert!(
-                    last_range.context.start <= range.context.start,
-                    "ranges must be sorted: {last_range:?} <= {range:?}"
-                );
                 if last_range.context.end >= range.context.start
                     || last_range.context.end.row + 1 == range.context.start.row
                 {
                     last_range.context.end = range.context.end.max(last_range.context.end);
-                    *counts.last_mut().unwrap() += 1;
                     continue;
                 }
             }
             merged_ranges.push(range.clone());
-            counts.push(1);
-        }
-        (merged_ranges, counts)
-    }
-
-    pub fn insert_excerpts_after<O>(
-        &mut self,
-        prev_excerpt_id: ExcerptId,
-        buffer: Entity<Buffer>,
-        ranges: impl IntoIterator<Item = ExcerptRange<O>>,
-        cx: &mut Context<Self>,
-    ) -> Vec<ExcerptId>
-    where
-        O: text::ToOffset,
-    {
-        let mut ids = Vec::new();
-        let mut next_excerpt_id =
-            if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() {
-                last_entry.id.0 + 1
-            } else {
-                1
-            };
-        self.insert_excerpts_with_ids_after(
-            prev_excerpt_id,
-            buffer,
-            ranges.into_iter().map(|range| {
-                let id = ExcerptId(post_inc(&mut next_excerpt_id));
-                ids.push(id);
-                (id, range)
-            }),
-            cx,
-        );
-        ids
-    }
-
-    pub fn insert_excerpts_with_ids_after<O>(
-        &mut self,
-        prev_excerpt_id: ExcerptId,
-        buffer: Entity<Buffer>,
-        ranges: impl IntoIterator<Item = (ExcerptId, ExcerptRange<O>)>,
-        cx: &mut Context<Self>,
-    ) where
-        O: text::ToOffset,
-    {
-        assert_eq!(self.history.transaction_depth(), 0);
-        let mut ranges = ranges.into_iter().peekable();
-        if ranges.peek().is_none() {
-            return Default::default();
-        }
-
-        self.sync_mut(cx);
-
-        let buffer_snapshot = buffer.read(cx).snapshot();
-        let buffer_id = buffer_snapshot.remote_id();
-
-        let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| {
-            self.buffer_changed_since_sync.replace(true);
-            buffer.update(cx, |buffer, _| {
-                buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync));
-            });
-            BufferState {
-                last_version: RefCell::new(buffer_snapshot.version().clone()),
-                last_non_text_state_update_count: Cell::new(
-                    buffer_snapshot.non_text_state_update_count(),
-                ),
-                excerpts: Default::default(),
-                _subscriptions: [
-                    cx.observe(&buffer, |_, _, cx| cx.notify()),
-                    cx.subscribe(&buffer, Self::on_buffer_event),
-                ],
-                buffer: buffer.clone(),
-            }
-        });
-
-        let mut snapshot = self.snapshot.get_mut();
-
-        let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
-        let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
-        let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
-        let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right);
-        prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
-
-        let edit_start = ExcerptDimension(new_excerpts.summary().text.len);
-        new_excerpts.update_last(
-            |excerpt| {
-                excerpt.has_trailing_newline = true;
-            },
-            (),
-        );
-
-        let next_locator = if let Some(excerpt) = cursor.item() {
-            excerpt.locator.clone()
-        } else {
-            Locator::max()
-        };
-
-        let mut excerpts = Vec::new();
-        let buffer_snapshot = Arc::new(buffer_snapshot);
-        while let Some((id, range)) = ranges.next() {
-            let locator = Locator::between(&prev_locator, &next_locator);
-            if let Err(ix) = buffer_state.excerpts.binary_search(&locator) {
-                buffer_state.excerpts.insert(ix, locator.clone());
-            }
-            let range = ExcerptRange {
-                context: buffer_snapshot.anchor_before(&range.context.start)
-                    ..buffer_snapshot.anchor_after(&range.context.end),
-                primary: buffer_snapshot.anchor_before(&range.primary.start)
-                    ..buffer_snapshot.anchor_after(&range.primary.end),
-            };
-            excerpts.push((id, range.clone()));
-            let excerpt = Excerpt::new(
-                id,
-                locator.clone(),
-                buffer_id,
-                buffer_snapshot.clone(),
-                range,
-                ranges.peek().is_some() || cursor.item().is_some(),
-            );
-            new_excerpts.push(excerpt, ());
-            prev_locator = locator.clone();
-
-            if let Some(last_mapping_entry) = new_excerpt_ids.last() {
-                assert!(id > last_mapping_entry.id, "excerpt ids must be increasing");
-            }
-            new_excerpt_ids.push(ExcerptIdMapping { id, locator }, ());
-        }
-        snapshot
-            .buffer_locators
-            .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect());
-
-        let edit_end = ExcerptDimension(new_excerpts.summary().text.len);
-
-        let suffix = cursor.suffix();
-        let changed_trailing_excerpt = suffix.is_empty();
-        new_excerpts.append(suffix, ());
-        drop(cursor);
-        snapshot.excerpts = new_excerpts;
-        snapshot.excerpt_ids = new_excerpt_ids;
-        if changed_trailing_excerpt {
-            snapshot.trailing_excerpt_update_count += 1;
         }
-
-        let edits = Self::sync_diff_transforms(
-            &mut snapshot,
-            vec![Edit {
-                old: edit_start..edit_start,
-                new: edit_start..edit_end,
-            }],
-            DiffChangeKind::BufferEdited,
-        );
-        if !edits.is_empty() {
-            self.subscriptions.publish(edits);
-        }
-
-        cx.emit(Event::Edited {
-            edited_buffer: None,
-            is_local: true,
-        });
-        cx.emit(Event::ExcerptsAdded {
-            buffer,
-            predecessor: prev_excerpt_id,
-            excerpts,
-        });
-        cx.notify();
+        merged_ranges
     }
 
     pub fn clear(&mut self, cx: &mut Context<Self>) {
         self.sync_mut(cx);
-        let ids = self.excerpt_ids();
         let removed_buffer_ids = std::mem::take(&mut self.buffers).into_keys().collect();
-        self.excerpts_by_path.clear();
-        self.paths_by_excerpt.clear();
+        self.diffs.clear();
         let MultiBufferSnapshot {
             excerpts,
-            buffer_locators,
-            diffs: _,
+            diffs,
             diff_transforms: _,
             non_text_state_update_count: _,
             edit_count: _,
@@ -2023,27 +1821,25 @@ impl MultiBuffer {
             has_conflict,
             has_inverted_diff,
             singleton: _,
-            excerpt_ids: _,
-            replaced_excerpts,
             trailing_excerpt_update_count,
             all_diff_hunks_expanded: _,
             show_deleted_hunks: _,
             use_extended_diff_range: _,
             show_headers: _,
+            path_keys_by_index: _,
+            indices_by_path_key: _,
+            buffers,
         } = self.snapshot.get_mut();
-        buffer_locators.clear();
         let start = ExcerptDimension(MultiBufferOffset::ZERO);
         let prev_len = ExcerptDimension(excerpts.summary().text.len);
         *excerpts = Default::default();
+        *buffers = Default::default();
+        *diffs = Default::default();
         *trailing_excerpt_update_count += 1;
         *is_dirty = false;
         *has_deleted_file = false;
         *has_conflict = false;
         *has_inverted_diff = false;
-        match Arc::get_mut(replaced_excerpts) {
-            Some(replaced_excerpts) => replaced_excerpts.clear(),
-            None => *replaced_excerpts = Default::default(),
-        }
 
         let edits = Self::sync_diff_transforms(
             self.snapshot.get_mut(),
@@ -2060,118 +1856,16 @@ impl MultiBuffer {
             edited_buffer: None,
             is_local: true,
         });
-        cx.emit(Event::ExcerptsRemoved {
-            ids,
-            removed_buffer_ids,
-        });
+        cx.emit(Event::BuffersRemoved { removed_buffer_ids });
         cx.notify();
     }
 
-    #[ztracing::instrument(skip_all)]
-    pub fn excerpts_for_buffer(
-        &self,
-        buffer_id: BufferId,
-        cx: &App,
-    ) -> Vec<(ExcerptId, Arc<BufferSnapshot>, ExcerptRange<text::Anchor>)> {
-        let mut excerpts = Vec::new();
-        let snapshot = self.read(cx);
-        let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
-        if let Some(locators) = snapshot.buffer_locators.get(&buffer_id) {
-            for locator in &**locators {
-                cursor.seek_forward(&Some(locator), Bias::Left);
-                if let Some(excerpt) = cursor.item()
-                    && excerpt.locator == *locator
-                {
-                    excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone()));
-                }
-            }
-        }
-
-        excerpts
-    }
-
-    pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec<Range<Point>> {
-        let snapshot = self.read(cx);
-        let mut excerpts = snapshot
-            .excerpts
-            .cursor::<Dimensions<Option<&Locator>, ExcerptPoint>>(());
-        let mut diff_transforms = snapshot
-            .diff_transforms
-            .cursor::<Dimensions<ExcerptPoint, OutputDimension<Point>>>(());
-        diff_transforms.next();
-        let locators = snapshot
-            .buffer_locators
-            .get(&buffer_id)
-            .into_iter()
-            .flat_map(|v| &**v);
-        let mut result = Vec::new();
-        for locator in locators {
-            excerpts.seek_forward(&Some(locator), Bias::Left);
-            if let Some(excerpt) = excerpts.item()
-                && excerpt.locator == *locator
-            {
-                let excerpt_start = excerpts.start().1;
-                let excerpt_end = excerpt_start + excerpt.text_summary.lines;
-
-                diff_transforms.seek_forward(&excerpt_start, Bias::Left);
-                let overshoot = excerpt_start - diff_transforms.start().0;
-                let start = diff_transforms.start().1 + overshoot;
-
-                diff_transforms.seek_forward(&excerpt_end, Bias::Right);
-                let overshoot = excerpt_end - diff_transforms.start().0;
-                let end = diff_transforms.start().1 + overshoot;
-
-                result.push(start.0..end.0)
-            }
-        }
-        result
-    }
-
-    pub fn excerpt_buffer_ids(&self) -> Vec<BufferId> {
-        self.snapshot
-            .borrow()
-            .excerpts
-            .iter()
-            .map(|entry| entry.buffer_id)
-            .collect()
-    }
-
-    pub fn excerpt_ids(&self) -> Vec<ExcerptId> {
-        let snapshot = self.snapshot.borrow();
-        let mut ids = Vec::with_capacity(snapshot.excerpts.summary().count);
-        ids.extend(snapshot.excerpts.iter().map(|entry| entry.id));
-        ids
-    }
-
-    pub fn excerpt_containing(
-        &self,
-        position: impl ToOffset,
-        cx: &App,
-    ) -> Option<(ExcerptId, Entity<Buffer>, Range<text::Anchor>)> {
+    pub fn range_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Option<Range<Point>> {
         let snapshot = self.read(cx);
-        let offset = position.to_offset(&snapshot);
-
-        let mut cursor = snapshot.cursor::<MultiBufferOffset, BufferOffset>();
-        cursor.seek(&offset);
-        cursor
-            .excerpt()
-            .or_else(|| snapshot.excerpts.last())
-            .map(|excerpt| {
-                (
-                    excerpt.id,
-                    self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(),
-                    excerpt.range.context.clone(),
-                )
-            })
-    }
-
-    pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option<Entity<Buffer>> {
-        if let Some(buffer_id) = anchor.text_anchor.buffer_id {
-            self.buffer(buffer_id)
-        } else {
-            let (_, buffer, _) = self.excerpt_containing(anchor, cx)?;
-            Some(buffer)
-        }
+        let path_key = snapshot.path_key_index_for_buffer(buffer_id)?;
+        let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id));
+        let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id));
+        Some((start..end).to_point(&snapshot))
     }
 
     // If point is at the end of the buffer, the last excerpt is returned

crates/multi_buffer/src/multi_buffer_tests.rs πŸ”—

@@ -77,22 +77,19 @@ fn test_buffer_point_to_anchor_at_end_of_singleton_buffer(cx: &mut App) {
     let buffer = cx.new(|cx| Buffer::local("abc", cx));
     let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
 
-    let excerpt_id = multibuffer
+    let anchor = multibuffer
         .read(cx)
-        .excerpt_ids()
-        .into_iter()
-        .next()
+        .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx)
         .unwrap();
-    let anchor = multibuffer
+    let (anchor, _) = multibuffer
         .read(cx)
-        .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx);
+        .snapshot(cx)
+        .anchor_to_buffer_anchor(anchor)
+        .unwrap();
 
     assert_eq!(
         anchor,
-        Some(Anchor::in_buffer(
-            excerpt_id,
-            buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)),
-        ))
+        buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)),
     );
 }
 
@@ -346,7 +343,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
     );
 
     let snapshot = multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
+        multibuffer.remove_excerpts(PathKey::sorted(1), cx);
         multibuffer.snapshot(cx)
     });
 
@@ -373,7 +370,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
                     boundary.row,
                     boundary
                         .next
-                        .buffer
+                        .buffer(snapshot)
                         .text_for_range(boundary.next.range.context)
                         .collect::<String>(),
                     starts_new_buffer,
@@ -440,7 +437,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
 
     multibuffer.update(cx, |multibuffer, cx| {
         multibuffer.add_diff(diff, cx);
-        multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -480,7 +477,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
     );
 
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -521,7 +518,7 @@ async fn test_diff_hunks_in_range_query_starting_at_added_row(cx: &mut TestAppCo
 
     multibuffer.update(cx, |multibuffer, cx| {
         multibuffer.add_diff(diff, cx);
-        multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -766,12 +763,27 @@ fn test_excerpt_events(cx: &mut App) {
         cx.subscribe(
             &leader_multibuffer,
             move |follower, _, event, cx| match event.clone() {
-                Event::ExcerptsAdded {
+                Event::BufferRangesUpdated {
                     buffer,
-                    predecessor,
-                    excerpts,
-                } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
-                Event::ExcerptsRemoved { ids, .. } => follower.remove_excerpts(ids, cx),
+                    path_key,
+                    ranges,
+                } => {
+                    let buffer_snapshot = buffer.read(cx).snapshot();
+                    follower.set_merged_excerpt_ranges_for_path(
+                        path_key,
+                        buffer,
+                        &buffer_snapshot,
+                        ranges,
+                        cx,
+                    );
+                }
+                Event::BuffersRemoved {
+                    removed_buffer_ids, ..
+                } => {
+                    for id in removed_buffer_ids {
+                        follower.remove_excerpts_for_buffer(id, cx);
+                    }
+                }
                 Event::Edited { .. } => {
                     *follower_edit_event_count.write() += 1;
                 }
@@ -885,9 +897,14 @@ fn test_expand_excerpts(cx: &mut App) {
     drop(snapshot);
 
     multibuffer.update(cx, |multibuffer, cx| {
-        let line_zero = multibuffer.snapshot(cx).anchor_before(Point::new(0, 0));
+        let multibuffer_snapshot = multibuffer.snapshot(cx);
+        let line_zero = multibuffer_snapshot.anchor_before(Point::new(0, 0));
         multibuffer.expand_excerpts(
-            multibuffer.excerpt_ids(),
+            multibuffer.snapshot(cx).excerpts().map(|excerpt| {
+                multibuffer_snapshot
+                    .anchor_in_excerpt(excerpt.context.start)
+                    .unwrap()
+            }),
             1,
             ExpandExcerptDirection::UpAndDown,
             cx,
@@ -1184,16 +1201,10 @@ fn test_multibuffer_anchors(cx: &mut App) {
             .to_offset(&old_snapshot),
         MultiBufferOffset(0)
     );
-    assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0));
-    assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0));
-    assert_eq!(
-        Anchor::max().to_offset(&old_snapshot),
-        MultiBufferOffset(10)
-    );
-    assert_eq!(
-        Anchor::max().to_offset(&old_snapshot),
-        MultiBufferOffset(10)
-    );
+    assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0));
+    assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0));
+    assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10));
+    assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10));
 
     buffer_1.update(cx, |buffer, cx| {
         buffer.edit([(0..0, "W")], None, cx);
@@ -1270,153 +1281,6 @@ fn test_multibuffer_anchors(cx: &mut App) {
     );
 }
 
-#[gpui::test]
-fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) {
-    let buffer_1 = cx.new(|cx| Buffer::local("abcd", cx));
-    let buffer_2 = cx.new(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx));
-    let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
-
-    // Create an insertion id in buffer 1 that doesn't exist in buffer 2.
-    // Add an excerpt from buffer 1 that spans this new insertion.
-    buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx));
-    let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| {
-        let buffer_1_snapshot = buffer_1.read(cx).snapshot();
-        multibuffer.set_excerpt_ranges_for_path(
-            PathKey::sorted(0),
-            buffer_1,
-            &buffer_1_snapshot,
-            vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))],
-            cx,
-        );
-        multibuffer.excerpt_ids().into_iter().next().unwrap()
-    });
-
-    let snapshot_1 = multibuffer.read(cx).snapshot(cx);
-    assert_eq!(snapshot_1.text(), "abcd123");
-
-    // Replace the buffer 1 excerpt with new excerpts from buffer 2.
-    let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx);
-        let snapshot_2 = buffer_2.read(cx).snapshot();
-        multibuffer.set_excerpt_ranges_for_path(
-            PathKey::sorted(1),
-            buffer_2.clone(),
-            &buffer_2.read(cx).snapshot(),
-            vec![
-                ExcerptRange::new((0..4).to_point(&snapshot_2)),
-                ExcerptRange::new((6..10).to_point(&snapshot_2)),
-                ExcerptRange::new((12..16).to_point(&snapshot_2)),
-            ],
-            cx,
-        );
-        let mut ids = multibuffer
-            .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)
-            .into_iter()
-            .map(|(id, _, _)| id);
-        (ids.next().unwrap(), ids.next().unwrap())
-    });
-    let snapshot_2 = multibuffer.read(cx).snapshot(cx);
-    assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP");
-
-    // The old excerpt id doesn't get reused.
-    assert_ne!(excerpt_id_2, excerpt_id_1);
-
-    // Resolve some anchors from the previous snapshot in the new snapshot.
-    // The current excerpts are from a different buffer, so we don't attempt to
-    // resolve the old text anchor in the new buffer.
-    assert_eq!(
-        snapshot_2.summary_for_anchor::<MultiBufferOffset>(
-            &snapshot_1.anchor_before(MultiBufferOffset(2))
-        ),
-        MultiBufferOffset(0)
-    );
-    assert_eq!(
-        snapshot_2.summaries_for_anchors::<MultiBufferOffset, _>(&[
-            snapshot_1.anchor_before(MultiBufferOffset(2)),
-            snapshot_1.anchor_after(MultiBufferOffset(3))
-        ]),
-        vec![MultiBufferOffset(0), MultiBufferOffset(0)]
-    );
-
-    // Refresh anchors from the old snapshot. The return value indicates that both
-    // anchors lost their original excerpt.
-    let refresh = snapshot_2.refresh_anchors(&[
-        snapshot_1.anchor_before(MultiBufferOffset(2)),
-        snapshot_1.anchor_after(MultiBufferOffset(3)),
-    ]);
-    assert_eq!(
-        refresh,
-        &[
-            (0, snapshot_2.anchor_before(MultiBufferOffset(0)), false),
-            (1, snapshot_2.anchor_after(MultiBufferOffset(0)), false),
-        ]
-    );
-
-    // Replace the middle excerpt with a smaller excerpt in buffer 2,
-    // that intersects the old excerpt.
-    multibuffer.update(cx, |multibuffer, cx| {
-        let snapshot_2 = buffer_2.read(cx).snapshot();
-        multibuffer.set_excerpt_ranges_for_path(
-            PathKey::sorted(1),
-            buffer_2.clone(),
-            &buffer_2.read(cx).snapshot(),
-            vec![
-                ExcerptRange::new((0..4).to_point(&snapshot_2)),
-                ExcerptRange::new((12..16).to_point(&snapshot_2)),
-            ],
-            cx,
-        );
-        multibuffer.set_excerpt_ranges_for_path(
-            PathKey::sorted(1),
-            buffer_2.clone(),
-            &buffer_2.read(cx).snapshot(),
-            vec![
-                ExcerptRange::new((0..4).to_point(&snapshot_2)),
-                ExcerptRange::new((5..8).to_point(&snapshot_2)),
-                ExcerptRange::new((12..16).to_point(&snapshot_2)),
-            ],
-            cx,
-        );
-    });
-
-    let snapshot_3 = multibuffer.read(cx).snapshot(cx);
-    assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP");
-
-    // Resolve some anchors from the previous snapshot in the new snapshot.
-    // The third anchor can't be resolved, since its excerpt has been removed,
-    // so it resolves to the same position as its predecessor.
-    let anchors = [
-        snapshot_2.anchor_before(MultiBufferOffset(0)),
-        snapshot_2.anchor_after(MultiBufferOffset(2)),
-        snapshot_2.anchor_after(MultiBufferOffset(6)),
-        snapshot_2.anchor_after(MultiBufferOffset(14)),
-    ];
-    assert_eq!(
-        snapshot_3.summaries_for_anchors::<MultiBufferOffset, _>(&anchors),
-        &[
-            MultiBufferOffset(0),
-            MultiBufferOffset(2),
-            MultiBufferOffset(9),
-            MultiBufferOffset(13)
-        ]
-    );
-
-    let new_anchors = snapshot_3.refresh_anchors(&anchors);
-    assert_eq!(
-        new_anchors.iter().map(|a| (a.0, a.2)).collect::<Vec<_>>(),
-        &[(0, true), (1, true), (2, true), (3, true)]
-    );
-    assert_eq!(
-        snapshot_3.summaries_for_anchors::<MultiBufferOffset, _>(new_anchors.iter().map(|a| &a.1)),
-        &[
-            MultiBufferOffset(0),
-            MultiBufferOffset(2),
-            MultiBufferOffset(7),
-            MultiBufferOffset(13)
-        ]
-    );
-}
-
 #[gpui::test]
 async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     let text = indoc!(
@@ -1467,7 +1331,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     );
 
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -1513,7 +1377,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
     assert_line_indents(&snapshot);
 
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx)
+        multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx)
     });
     assert_new_snapshot(
         &multibuffer,
@@ -1700,7 +1564,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
     });
 
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -1751,7 +1615,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
 
     // Now collapse all diff hunks
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -2097,6 +1961,203 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
     });
 }
 
+#[gpui::test]
+fn test_update_excerpt_ranges_for_path(cx: &mut TestAppContext) {
+    let buffer = cx.new(|cx| {
+        Buffer::local(
+            indoc! {
+            "row 0
+            row 1
+            row 2
+            row 3
+            row 4
+            row 5
+            row 6
+            row 7
+            row 8
+            row 9
+            row 10
+            row 11
+            row 12
+            row 13
+            row 14
+            "},
+            cx,
+        )
+    });
+    let path = PathKey::with_sort_prefix(0, rel_path("test.rs").into_arc());
+
+    let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(2..4), Point::row_range(8..10)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 2
+            row 3
+            row 4
+            -----
+            row 8
+            row 9
+            row 10
+            "},
+    );
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.update_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(12..13)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 12
+            row 13
+            "},
+    );
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(2..4)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 2
+            row 3
+            row 4
+            "},
+    );
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.update_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(3..5)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 2
+            row 3
+            row 4
+            row 5
+            "},
+    );
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![
+                Point::row_range(0..1),
+                Point::row_range(6..8),
+                Point::row_range(12..13),
+            ],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 0
+            row 1
+            -----
+            row 6
+            row 7
+            row 8
+            -----
+            row 12
+            row 13
+            "},
+    );
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.update_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(7..9)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 6
+            row 7
+            row 8
+            row 9
+            "},
+    );
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(2..3), Point::row_range(6..7)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 2
+            row 3
+            -----
+            row 6
+            row 7
+            "},
+    );
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.update_excerpts_for_path(
+            path.clone(),
+            buffer.clone(),
+            vec![Point::row_range(3..6)],
+            0,
+            cx,
+        );
+    });
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {"-----
+            row 2
+            row 3
+            row 4
+            row 5
+            row 6
+            row 7
+            "},
+    );
+}
+
 #[gpui::test]
 fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
     let buf1 = cx.new(|cx| {
@@ -2178,6 +2239,405 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
     );
 }
 
+#[gpui::test]
+fn test_set_excerpts_for_path_replaces_previous_buffer(cx: &mut TestAppContext) {
+    let buffer_a = cx.new(|cx| {
+        Buffer::local(
+            indoc! {
+            "alpha
+            beta
+            gamma
+            delta
+            epsilon
+            ",
+            },
+            cx,
+        )
+    });
+    let buffer_b = cx.new(|cx| {
+        Buffer::local(
+            indoc! {
+            "one
+            two
+            three
+            four
+            ",
+            },
+            cx,
+        )
+    });
+    let path: PathKey = PathKey::with_sort_prefix(0, rel_path("shared/path").into_arc());
+
+    let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+    let removed_buffer_ids: Arc<RwLock<Vec<BufferId>>> = Default::default();
+    multibuffer.update(cx, |_, cx| {
+        let removed_buffer_ids = removed_buffer_ids.clone();
+        cx.subscribe(&multibuffer, move |_, _, event, _| {
+            if let Event::BuffersRemoved {
+                removed_buffer_ids: ids,
+            } = event
+            {
+                removed_buffer_ids.write().extend(ids.iter().copied());
+            }
+        })
+        .detach();
+    });
+
+    let ranges_a = vec![Point::row_range(0..1), Point::row_range(3..4)];
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(path.clone(), buffer_a.clone(), ranges_a.clone(), 0, cx);
+    });
+    let (anchor_a1, anchor_a2) = multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        let buffer_snapshot = buffer_a.read(cx).snapshot();
+        let mut anchors = ranges_a.into_iter().filter_map(|range| {
+            let text_range = buffer_snapshot.anchor_range_inside(range);
+            let start = snapshot.anchor_in_buffer(text_range.start)?;
+            let end = snapshot.anchor_in_buffer(text_range.end)?;
+            Some(start..end)
+        });
+        (
+            anchors.next().expect("should have first anchor"),
+            anchors.next().expect("should have second anchor"),
+        )
+    });
+
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {
+        "-----
+        alpha
+        beta
+        -----
+        delta
+        epsilon
+        "
+        },
+    );
+
+    let buffer_a_id = buffer_a.read_with(cx, |buffer, _| buffer.remote_id());
+    multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        assert!(
+            snapshot
+                .excerpts()
+                .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id),
+        );
+    });
+
+    let ranges_b = vec![Point::row_range(1..2)];
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(path.clone(), buffer_b.clone(), ranges_b.clone(), 1, cx);
+    });
+    let anchor_b = multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        let buffer_snapshot = buffer_b.read(cx).snapshot();
+        ranges_b
+            .into_iter()
+            .filter_map(|range| {
+                let text_range = buffer_snapshot.anchor_range_inside(range);
+                let start = snapshot.anchor_in_buffer(text_range.start)?;
+                let end = snapshot.anchor_in_buffer(text_range.end)?;
+                Some(start..end)
+            })
+            .next()
+            .expect("should have an anchor")
+    });
+
+    let buffer_b_id = buffer_b.read_with(cx, |buffer, _| buffer.remote_id());
+    multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        assert!(
+            !snapshot
+                .excerpts()
+                .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id),
+        );
+        assert!(
+            snapshot
+                .excerpts()
+                .any(|excerpt| excerpt.context.start.buffer_id == buffer_b_id),
+        );
+        assert!(
+            multibuffer.buffer(buffer_a_id).is_none(),
+            "old buffer should be fully removed from the multibuffer"
+        );
+        assert!(
+            multibuffer.buffer(buffer_b_id).is_some(),
+            "new buffer should be present in the multibuffer"
+        );
+    });
+    assert!(
+        removed_buffer_ids.read().contains(&buffer_a_id),
+        "BuffersRemoved event should have been emitted for the old buffer"
+    );
+
+    assert_excerpts_match(
+        &multibuffer,
+        cx,
+        indoc! {
+        "-----
+        one
+        two
+        three
+        four
+        "
+        },
+    );
+
+    multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        anchor_a1.start.cmp(&anchor_b.start, &snapshot);
+        anchor_a1.end.cmp(&anchor_b.end, &snapshot);
+        anchor_a1.start.cmp(&anchor_a2.start, &snapshot);
+        anchor_a1.end.cmp(&anchor_a2.end, &snapshot);
+    });
+}
+
+#[gpui::test]
+fn test_stale_anchor_after_buffer_removal_and_path_reuse(cx: &mut TestAppContext) {
+    let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx));
+    let buffer_b = cx.new(|cx| Buffer::local("xxx\nyyy\nzzz\n", cx));
+    let buffer_other = cx.new(|cx| Buffer::local("111\n222\n333\n", cx));
+    let path = PathKey::with_sort_prefix(0, rel_path("the/path").into_arc());
+    let other_path = PathKey::with_sort_prefix(1, rel_path("other/path").into_arc());
+
+    let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer_a.clone(),
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
+            cx,
+        );
+        multibuffer.set_excerpts_for_path(
+            other_path.clone(),
+            buffer_other.clone(),
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
+            cx,
+        );
+    });
+
+    buffer_a.update(cx, |buffer, cx| {
+        buffer.edit(
+            [(Point::new(1, 0)..Point::new(1, 0), "INSERTED ")],
+            None,
+            cx,
+        );
+    });
+
+    let stale_anchor = multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        snapshot.anchor_before(Point::new(1, 5))
+    });
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.remove_excerpts(path.clone(), cx);
+    });
+
+    multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        let offset = stale_anchor.to_offset(&snapshot);
+        assert!(
+            offset.0 <= snapshot.len().0,
+            "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}",
+            snapshot.len()
+        );
+    });
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.set_excerpts_for_path(
+            path.clone(),
+            buffer_b.clone(),
+            [Point::new(0, 0)..Point::new(2, 3)],
+            0,
+            cx,
+        );
+    });
+
+    multibuffer.read_with(cx, |multibuffer, cx| {
+        let snapshot = multibuffer.snapshot(cx);
+        let offset = stale_anchor.to_offset(&snapshot);
+        assert!(
+            offset.0 <= snapshot.len().0,
+            "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}",
+            snapshot.len()
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_map_excerpt_ranges(cx: &mut TestAppContext) {
+    let base_text = indoc!(
+        "
+        {
+          (aaa)
+          (bbb)
+          (ccc)
+        }
+        xxx
+        yyy
+        zzz
+        [
+          (ddd)
+          (EEE)
+        ]
+        "
+    );
+    let text = indoc!(
+        "
+        {
+          (aaa)
+          (CCC)
+        }
+        xxx
+        yyy
+        zzz
+        [
+          (ddd)
+          (EEE)
+        ]
+        "
+    );
+
+    let buffer = cx.new(|cx| Buffer::local(text, cx));
+    let diff = cx
+        .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx));
+    cx.run_until_parked();
+
+    let multibuffer = cx.new(|cx| {
+        let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+        multibuffer.set_excerpts_for_path(
+            PathKey::sorted(0),
+            buffer.clone(),
+            [
+                Point::new(0, 0)..Point::new(3, 1),
+                Point::new(7, 0)..Point::new(10, 1),
+            ],
+            0,
+            cx,
+        );
+        multibuffer.add_diff(diff.clone(), cx);
+        multibuffer
+    });
+
+    multibuffer.update(cx, |multibuffer, cx| {
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
+    });
+    cx.run_until_parked();
+
+    let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+
+    let actual_diff = format_diff(
+        &snapshot.text(),
+        &snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
+        &Default::default(),
+        None,
+    );
+    pretty_assertions::assert_eq!(
+        actual_diff,
+        indoc!(
+            "
+              {
+                (aaa)
+            -   (bbb)
+            -   (ccc)
+            +   (CCC)
+              } [\u{2193}]
+              [ [\u{2191}]
+                (ddd)
+                (EEE)
+              ] [\u{2193}]"
+        )
+    );
+
+    assert_eq!(
+        snapshot.map_excerpt_ranges(
+            snapshot.point_to_offset(Point::new(1, 3))..snapshot.point_to_offset(Point::new(1, 3)),
+            |buffer, excerpt_range, input_range| {
+                assert_eq!(
+                    buffer.offset_to_point(input_range.start.0)
+                        ..buffer.offset_to_point(input_range.end.0),
+                    Point::new(1, 3)..Point::new(1, 3),
+                );
+                assert_eq!(
+                    buffer.offset_to_point(excerpt_range.context.start.0)
+                        ..buffer.offset_to_point(excerpt_range.context.end.0),
+                    Point::new(0, 0)..Point::new(3, 1),
+                );
+                vec![
+                    (input_range.start..BufferOffset(input_range.start.0 + 3), ()),
+                    (excerpt_range.context, ()),
+                    (
+                        BufferOffset(text::ToOffset::to_offset(&Point::new(2, 2), buffer))
+                            ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 7), buffer)),
+                        (),
+                    ),
+                    (
+                        BufferOffset(text::ToOffset::to_offset(&Point::new(0, 0), buffer))
+                            ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 0), buffer)),
+                        (),
+                    ),
+                ]
+            },
+        ),
+        Some(vec![
+            (
+                snapshot.point_to_offset(Point::new(1, 3))
+                    ..snapshot.point_to_offset(Point::new(1, 6)),
+                (),
+            ),
+            (
+                snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(5, 1)),
+                ()
+            ),
+            (
+                snapshot.point_to_offset(Point::new(4, 2))
+                    ..snapshot.point_to_offset(Point::new(4, 7)),
+                (),
+            ),
+            (
+                snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(4, 0)),
+                ()
+            ),
+        ]),
+    );
+
+    assert_eq!(
+        snapshot.map_excerpt_ranges(
+            snapshot.point_to_offset(Point::new(5, 0))..snapshot.point_to_offset(Point::new(7, 0)),
+            |_, _, range| vec![(range, ())],
+        ),
+        None,
+    );
+
+    assert_eq!(
+        snapshot.map_excerpt_ranges(
+            snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)),
+            |buffer, excerpt_range, input_range| {
+                assert_eq!(
+                    buffer.offset_to_point(input_range.start.0)
+                        ..buffer.offset_to_point(input_range.end.0),
+                    Point::new(8, 3)..Point::new(8, 6),
+                );
+                assert_eq!(
+                    buffer.offset_to_point(excerpt_range.context.start.0)
+                        ..buffer.offset_to_point(excerpt_range.context.end.0),
+                    Point::new(7, 0)..Point::new(10, 1),
+                );
+                vec![(input_range, ())]
+            },
+        ),
+        Some(vec![(
+            snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)),
+            (),
+        )]),
+    );
+}
+
 #[gpui::test]
 async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
     let base_text_1 = indoc!(
@@ -2273,7 +2733,7 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
     );
 
     multibuffer.update(cx, |multibuffer, cx| {
-        multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+        multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
     });
 
     assert_new_snapshot(
@@ -2423,101 +2883,136 @@ struct ReferenceMultibuffer {
     excerpts: Vec<ReferenceExcerpt>,
     diffs: HashMap<BufferId, Entity<BufferDiff>>,
     inverted_diffs: HashMap<BufferId, (Entity<BufferDiff>, Entity<language::Buffer>)>,
+    expanded_diff_hunks_by_buffer: HashMap<BufferId, Vec<text::Anchor>>,
 }
 
-#[derive(Debug)]
+#[derive(Clone, Debug)]
 struct ReferenceExcerpt {
-    id: ExcerptId,
+    path_key: PathKey,
+    path_key_index: PathKeyIndex,
     buffer: Entity<Buffer>,
     range: Range<text::Anchor>,
-    expanded_diff_hunks: Vec<text::Anchor>,
 }
 
-#[derive(Debug)]
+#[derive(Clone, Debug)]
 struct ReferenceRegion {
     buffer_id: Option<BufferId>,
     range: Range<usize>,
     buffer_range: Option<Range<Point>>,
     status: Option<DiffHunkStatus>,
-    excerpt_id: Option<ExcerptId>,
+    excerpt_range: Option<Range<text::Anchor>>,
+    excerpt_path_key_index: Option<PathKeyIndex>,
 }
 
 impl ReferenceMultibuffer {
-    fn expand_excerpts(&mut self, excerpts: &HashSet<ExcerptId>, line_count: u32, cx: &App) {
-        if line_count == 0 {
+    fn expand_excerpts(
+        &mut self,
+        excerpts: &HashSet<ExcerptRange<text::Anchor>>,
+        line_count: u32,
+        cx: &mut App,
+    ) {
+        use text::AnchorRangeExt as _;
+
+        if line_count == 0 || excerpts.is_empty() {
             return;
         }
 
-        for id in excerpts {
-            let excerpt = self.excerpts.iter_mut().find(|e| e.id == *id).unwrap();
-            let snapshot = excerpt.buffer.read(cx).snapshot();
-            let mut point_range = excerpt.range.to_point(&snapshot);
-            point_range.start = Point::new(point_range.start.row.saturating_sub(line_count), 0);
-            point_range.end =
-                snapshot.clip_point(Point::new(point_range.end.row + line_count, 0), Bias::Left);
-            point_range.end.column = snapshot.line_len(point_range.end.row);
-            excerpt.range =
-                snapshot.anchor_before(point_range.start)..snapshot.anchor_after(point_range.end);
+        let mut excerpts_by_buffer: HashMap<BufferId, Vec<ExcerptRange<text::Anchor>>> =
+            HashMap::default();
+        for excerpt in excerpts {
+            excerpts_by_buffer
+                .entry(excerpt.context.start.buffer_id)
+                .or_default()
+                .push(excerpt.clone())
         }
-    }
 
-    fn remove_excerpt(&mut self, id: ExcerptId, cx: &App) {
-        let ix = self
-            .excerpts
-            .iter()
-            .position(|excerpt| excerpt.id == id)
-            .unwrap();
-        let excerpt = self.excerpts.remove(ix);
-        let buffer = excerpt.buffer.read(cx);
-        let buffer_id = buffer.remote_id();
-        log::info!(
-            "Removing excerpt {}: {:?}",
-            ix,
-            buffer
-                .text_for_range(excerpt.range.to_offset(buffer))
-                .collect::<String>(),
-        );
-        if !self
-            .excerpts
-            .iter()
-            .any(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id)
-        {
-            self.diffs.remove(&buffer_id);
-            self.inverted_diffs.remove(&buffer_id);
+        for (buffer_id, excerpts_to_expand) in excerpts_by_buffer {
+            let mut buffer = None;
+            let mut buffer_snapshot = None;
+            let mut path = None;
+            let mut path_key_index = None;
+            let mut new_ranges =
+                self.excerpts
+                    .iter()
+                    .filter(|excerpt| excerpt.range.start.buffer_id == buffer_id)
+                    .map(|excerpt| {
+                        let snapshot = excerpt.buffer.read(cx).snapshot();
+                        let mut range = excerpt.range.to_point(&snapshot);
+                        if excerpts_to_expand.iter().any(|info| {
+                            excerpt.range.contains_anchor(info.context.start, &snapshot)
+                        }) {
+                            range.start = Point::new(range.start.row.saturating_sub(line_count), 0);
+                            range.end = snapshot
+                                .clip_point(Point::new(range.end.row + line_count, 0), Bias::Left);
+                            range.end.column = snapshot.line_len(range.end.row);
+                        }
+                        buffer = Some(excerpt.buffer.clone());
+                        buffer_snapshot = Some(snapshot);
+                        path = Some(excerpt.path_key.clone());
+                        path_key_index = Some(excerpt.path_key_index);
+                        ExcerptRange::new(range)
+                    })
+                    .collect::<Vec<_>>();
+
+            new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start));
+
+            self.set_excerpts(
+                path.unwrap(),
+                path_key_index.unwrap(),
+                buffer.unwrap(),
+                &buffer_snapshot.unwrap(),
+                new_ranges,
+                cx,
+            );
         }
     }
 
-    fn insert_excerpt_after(
+    fn set_excerpts(
         &mut self,
-        prev_id: ExcerptId,
-        new_excerpt_id: ExcerptId,
-        (buffer_handle, anchor_range): (Entity<Buffer>, Range<text::Anchor>),
+        path_key: PathKey,
+        path_key_index: PathKeyIndex,
+        buffer: Entity<Buffer>,
+        buffer_snapshot: &BufferSnapshot,
+        ranges: Vec<ExcerptRange<Point>>,
+        cx: &mut App,
     ) {
-        let excerpt_ix = if prev_id == ExcerptId::max() {
-            self.excerpts.len()
-        } else {
-            self.excerpts
-                .iter()
-                .position(|excerpt| excerpt.id == prev_id)
-                .unwrap()
-                + 1
-        };
-        self.excerpts.insert(
-            excerpt_ix,
-            ReferenceExcerpt {
-                id: new_excerpt_id,
-                buffer: buffer_handle,
-                range: anchor_range,
-                expanded_diff_hunks: Vec::new(),
-            },
+        self.excerpts.retain(|excerpt| {
+            excerpt.path_key != path_key && excerpt.buffer.entity_id() != buffer.entity_id()
+        });
+
+        let ranges = MultiBuffer::merge_excerpt_ranges(&ranges);
+
+        let (Ok(ix) | Err(ix)) = self
+            .excerpts
+            .binary_search_by(|probe| probe.path_key.cmp(&path_key));
+        self.excerpts.splice(
+            ix..ix,
+            ranges.into_iter().map(|range| ReferenceExcerpt {
+                path_key: path_key.clone(),
+                path_key_index,
+                buffer: buffer.clone(),
+                range: buffer_snapshot.anchor_before(range.context.start)
+                    ..buffer_snapshot.anchor_after(range.context.end),
+            }),
         );
+        self.update_expanded_diff_hunks_for_buffer(buffer_snapshot.remote_id(), cx);
     }
 
-    fn expand_diff_hunks(&mut self, excerpt_id: ExcerptId, range: Range<text::Anchor>, cx: &App) {
+    fn expand_diff_hunks(&mut self, path_key: PathKey, range: Range<text::Anchor>, cx: &App) {
         let excerpt = self
             .excerpts
             .iter_mut()
-            .find(|e| e.id == excerpt_id)
+            .find(|e| {
+                e.path_key == path_key
+                    && e.range
+                        .start
+                        .cmp(&range.start, &e.buffer.read(cx).snapshot())
+                        .is_le()
+                    && e.range
+                        .end
+                        .cmp(&range.end, &e.buffer.read(cx).snapshot())
+                        .is_ge()
+            })
             .unwrap();
         let buffer = excerpt.buffer.read(cx).snapshot();
         let buffer_id = buffer.remote_id();

crates/multi_buffer/src/path_key.rs πŸ”—

@@ -1,24 +1,20 @@
-use std::{mem, ops::Range, sync::Arc};
+use std::{ops::Range, rc::Rc, sync::Arc};
 
-use collections::HashSet;
 use gpui::{App, AppContext, Context, Entity};
 use itertools::Itertools;
 use language::{Buffer, BufferSnapshot};
 use rope::Point;
-use text::{Bias, OffsetRangeExt, locator::Locator};
-use util::{post_inc, rel_path::RelPath};
+use sum_tree::{Dimensions, SumTree};
+use text::{Bias, BufferId, Edit, OffsetRangeExt, Patch};
+use util::rel_path::RelPath;
 use ztracing::instrument;
 
 use crate::{
-    Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges,
+    Anchor, BufferState, BufferStateSnapshot, DiffChangeKind, Event, Excerpt, ExcerptOffset,
+    ExcerptRange, ExcerptSummary, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
+    PathKeyIndex, build_excerpt_ranges, remove_diff_state,
 };
 
-#[derive(Debug, Clone)]
-pub struct PathExcerptInsertResult {
-    pub excerpt_ids: Vec<ExcerptId>,
-    pub added_new_excerpt: bool,
-}
-
 #[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)]
 pub struct PathKey {
     // Used by the derived PartialOrd & Ord
@@ -27,6 +23,13 @@ pub struct PathKey {
 }
 
 impl PathKey {
+    pub fn min() -> Self {
+        Self {
+            sort_prefix: None,
+            path: RelPath::empty().into_arc(),
+        }
+    }
+
     pub fn sorted(sort_prefix: u64) -> Self {
         Self {
             sort_prefix: Some(sort_prefix),
@@ -55,41 +58,17 @@ impl PathKey {
 }
 
 impl MultiBuffer {
-    pub fn paths(&self) -> impl Iterator<Item = &PathKey> + '_ {
-        self.excerpts_by_path.keys()
-    }
-
-    pub fn excerpts_for_path(&self, path: &PathKey) -> impl '_ + Iterator<Item = ExcerptId> {
-        self.excerpts_by_path
-            .get(path)
-            .map(|excerpts| excerpts.as_slice())
-            .unwrap_or_default()
-            .iter()
-            .copied()
-    }
-
-    pub fn path_for_excerpt(&self, excerpt: ExcerptId) -> Option<PathKey> {
-        self.paths_by_excerpt.get(&excerpt).cloned()
-    }
-
-    pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
-        if let Some(to_remove) = self.excerpts_by_path.remove(&path) {
-            self.remove_excerpts(to_remove, cx)
-        }
-    }
-
     pub fn buffer_for_path(&self, path: &PathKey, cx: &App) -> Option<Entity<Buffer>> {
-        let excerpt_id = self.excerpts_by_path.get(path)?.first()?;
-        let snapshot = self.read(cx);
-        let excerpt = snapshot.excerpt(*excerpt_id)?;
-        self.buffer(excerpt.buffer_id)
+        let snapshot = self.snapshot(cx);
+        let excerpt = snapshot.excerpts_for_path(path).next()?;
+        self.buffer(excerpt.context.start.buffer_id)
     }
 
     pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option<Anchor> {
-        let excerpt_id = self.excerpts_by_path.get(path)?.first()?;
-        let snapshot = self.read(cx);
-        let excerpt = snapshot.excerpt(*excerpt_id)?;
-        Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start))
+        let snapshot = self.snapshot(cx);
+        let excerpt = snapshot.excerpts_for_path(path).next()?;
+        let path_key_index = snapshot.path_key_index_for_buffer(excerpt.context.start.buffer_id)?;
+        Some(Anchor::in_buffer(path_key_index, excerpt.context.start))
     }
 
     pub fn set_excerpts_for_buffer(
@@ -98,12 +77,14 @@ impl MultiBuffer {
         ranges: impl IntoIterator<Item = Range<Point>>,
         context_line_count: u32,
         cx: &mut Context<Self>,
-    ) -> (Vec<Range<Anchor>>, bool) {
+    ) -> bool {
         let path = PathKey::for_buffer(&buffer, cx);
         self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx)
     }
 
     /// Sets excerpts, returns `true` if at least one new excerpt was added.
+    ///
+    /// Any existing excerpts for this buffer or this path will be replaced by the provided ranges.
     #[instrument(skip_all)]
     pub fn set_excerpts_for_path(
         &mut self,
@@ -112,20 +93,83 @@ impl MultiBuffer {
         ranges: impl IntoIterator<Item = Range<Point>>,
         context_line_count: u32,
         cx: &mut Context<Self>,
-    ) -> (Vec<Range<Anchor>>, bool) {
+    ) -> bool {
         let buffer_snapshot = buffer.read(cx).snapshot();
+        let ranges: Vec<_> = ranges.into_iter().collect();
         let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot);
 
-        let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
-        self.set_merged_excerpt_ranges_for_path(
-            path,
-            buffer,
-            excerpt_ranges,
+        let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+        let (inserted, _path_key_index) =
+            self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx);
+        inserted
+    }
+
+    /// Like [`Self::set_excerpts_for_path`], but expands the provided ranges to cover any overlapping existing excerpts
+    /// for the same buffer and path.
+    ///
+    /// Existing excerpts that do not overlap any of the provided ranges are discarded.
+    pub fn update_excerpts_for_path(
+        &mut self,
+        path: PathKey,
+        buffer: Entity<Buffer>,
+        ranges: impl IntoIterator<Item = Range<Point>>,
+        context_line_count: u32,
+        cx: &mut Context<Self>,
+    ) -> bool {
+        let buffer_snapshot = buffer.read(cx).snapshot();
+        let ranges: Vec<_> = ranges.into_iter().collect();
+        let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot);
+        let merged = self.merge_new_with_existing_excerpt_ranges(
+            &path,
             &buffer_snapshot,
-            new,
-            counts,
+            excerpt_ranges,
             cx,
-        )
+        );
+
+        let (inserted, _path_key_index) =
+            self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx);
+        inserted
+    }
+
+    pub fn merge_new_with_existing_excerpt_ranges(
+        &self,
+        path: &PathKey,
+        buffer_snapshot: &BufferSnapshot,
+        mut excerpt_ranges: Vec<ExcerptRange<Point>>,
+        cx: &App,
+    ) -> Vec<ExcerptRange<Point>> {
+        let multibuffer_snapshot = self.snapshot(cx);
+
+        if multibuffer_snapshot.path_for_buffer(buffer_snapshot.remote_id()) == Some(path) {
+            excerpt_ranges.sort_by_key(|range| range.context.start);
+            let mut combined_ranges = Vec::new();
+            let mut new_ranges = excerpt_ranges.into_iter().peekable();
+            for existing_range in
+                multibuffer_snapshot.excerpts_for_buffer(buffer_snapshot.remote_id())
+            {
+                let existing_range = ExcerptRange {
+                    context: existing_range.context.to_point(buffer_snapshot),
+                    primary: existing_range.primary.to_point(buffer_snapshot),
+                };
+                while let Some(new_range) = new_ranges.peek()
+                    && new_range.context.end < existing_range.context.start
+                {
+                    combined_ranges.push(new_range.clone());
+                    new_ranges.next();
+                }
+
+                if let Some(new_range) = new_ranges.peek()
+                    && new_range.context.start <= existing_range.context.end
+                {
+                    combined_ranges.push(existing_range)
+                }
+            }
+            combined_ranges.extend(new_ranges);
+            excerpt_ranges = combined_ranges;
+        }
+
+        excerpt_ranges.sort_by_key(|range| range.context.start);
+        Self::merge_excerpt_ranges(&excerpt_ranges)
     }
 
     pub fn set_excerpt_ranges_for_path(
@@ -135,17 +179,11 @@ impl MultiBuffer {
         buffer_snapshot: &BufferSnapshot,
         excerpt_ranges: Vec<ExcerptRange<Point>>,
         cx: &mut Context<Self>,
-    ) -> (Vec<Range<Anchor>>, bool) {
-        let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
-        self.set_merged_excerpt_ranges_for_path(
-            path,
-            buffer,
-            excerpt_ranges,
-            buffer_snapshot,
-            new,
-            counts,
-            cx,
-        )
+    ) -> bool {
+        let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+        let (inserted, _path_key_index) =
+            self.set_merged_excerpt_ranges_for_path(path, buffer, buffer_snapshot, merged, cx);
+        inserted
     }
 
     pub fn set_anchored_excerpts_for_path(
@@ -161,350 +199,505 @@ impl MultiBuffer {
         let mut app = cx.to_async();
         async move {
             let snapshot = buffer_snapshot.clone();
-            let (excerpt_ranges, new, counts) = app
+            let (ranges, merged_excerpt_ranges) = app
                 .background_spawn(async move {
-                    let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot));
+                    let point_ranges = ranges.iter().map(|range| range.to_point(&snapshot));
                     let excerpt_ranges =
-                        build_excerpt_ranges(ranges, context_line_count, &snapshot);
-                    let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
-                    (excerpt_ranges, new, counts)
+                        build_excerpt_ranges(point_ranges, context_line_count, &snapshot);
+                    let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+                    (ranges, merged)
                 })
                 .await;
 
             multi_buffer
                 .update(&mut app, move |multi_buffer, cx| {
-                    let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path(
+                    let (_, path_key_index) = multi_buffer.set_merged_excerpt_ranges_for_path(
                         path_key,
                         buffer,
-                        excerpt_ranges,
                         &buffer_snapshot,
-                        new,
-                        counts,
+                        merged_excerpt_ranges,
                         cx,
                     );
                     ranges
+                        .into_iter()
+                        .map(|range| Anchor::range_in_buffer(path_key_index, range))
+                        .collect()
                 })
                 .ok()
                 .unwrap_or_default()
         }
     }
 
-    pub(super) fn expand_excerpts_with_paths(
+    pub fn expand_excerpts(
         &mut self,
-        ids: impl IntoIterator<Item = ExcerptId>,
+        anchors: impl IntoIterator<Item = Anchor>,
         line_count: u32,
         direction: ExpandExcerptDirection,
         cx: &mut Context<Self>,
     ) {
-        let mut sorted_ids: Vec<ExcerptId> = ids.into_iter().collect();
-        sorted_ids.sort_by(|a, b| {
-            let path_a = self.paths_by_excerpt.get(a);
-            let path_b = self.paths_by_excerpt.get(b);
-            path_a.cmp(&path_b)
-        });
-        let grouped = sorted_ids
-            .into_iter()
-            .chunk_by(|id| self.paths_by_excerpt.get(id).cloned())
+        if line_count == 0 {
+            return;
+        }
+
+        let snapshot = self.snapshot(cx);
+        let mut sorted_anchors = anchors
             .into_iter()
-            .filter_map(|(k, v)| Some((k?, v.into_iter().collect::<Vec<_>>())))
+            .filter_map(|anchor| anchor.excerpt_anchor())
             .collect::<Vec<_>>();
-        let snapshot = self.snapshot(cx);
-
-        for (path, ids) in grouped.into_iter() {
-            let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else {
+        if sorted_anchors.is_empty() {
+            return;
+        }
+        sorted_anchors.sort_by(|a, b| a.cmp(b, &snapshot));
+        let buffers = sorted_anchors.into_iter().chunk_by(|anchor| anchor.path);
+        let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
+
+        for (path_index, excerpt_anchors) in &buffers {
+            let path = snapshot
+                .path_keys_by_index
+                .get(&path_index)
+                .expect("anchor from wrong multibuffer");
+
+            let mut excerpt_anchors = excerpt_anchors.peekable();
+            let mut ranges = Vec::new();
+
+            cursor.seek_forward(path, Bias::Left);
+            let Some((buffer, buffer_snapshot)) = cursor
+                .item()
+                .map(|excerpt| (excerpt.buffer(&self), excerpt.buffer_snapshot(&snapshot)))
+            else {
                 continue;
             };
 
-            let ids_to_expand = HashSet::from_iter(ids);
-            let mut excerpt_id_ = None;
-            let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| {
-                let excerpt = snapshot.excerpt(*excerpt_id)?;
-                let excerpt_id = excerpt.id;
-                if excerpt_id_.is_none() {
-                    excerpt_id_ = Some(excerpt_id);
+            while let Some(excerpt) = cursor.item()
+                && &excerpt.path_key == path
+            {
+                let mut range = ExcerptRange {
+                    context: excerpt.range.context.to_point(buffer_snapshot),
+                    primary: excerpt.range.primary.to_point(buffer_snapshot),
+                };
+
+                let mut needs_expand = false;
+                while excerpt_anchors.peek().is_some_and(|anchor| {
+                    excerpt
+                        .range
+                        .contains(&anchor.text_anchor(), buffer_snapshot)
+                }) {
+                    needs_expand = true;
+                    excerpt_anchors.next();
                 }
 
-                let mut context = excerpt.range.context.to_point(&excerpt.buffer);
-                if ids_to_expand.contains(&excerpt_id) {
+                if needs_expand {
                     match direction {
                         ExpandExcerptDirection::Up => {
-                            context.start.row = context.start.row.saturating_sub(line_count);
-                            context.start.column = 0;
+                            range.context.start.row =
+                                range.context.start.row.saturating_sub(line_count);
+                            range.context.start.column = 0;
                         }
                         ExpandExcerptDirection::Down => {
-                            context.end.row =
-                                (context.end.row + line_count).min(excerpt.buffer.max_point().row);
-                            context.end.column = excerpt.buffer.line_len(context.end.row);
+                            range.context.end.row = (range.context.end.row + line_count)
+                                .min(excerpt.buffer_snapshot(&snapshot).max_point().row);
+                            range.context.end.column = excerpt
+                                .buffer_snapshot(&snapshot)
+                                .line_len(range.context.end.row);
                         }
                         ExpandExcerptDirection::UpAndDown => {
-                            context.start.row = context.start.row.saturating_sub(line_count);
-                            context.start.column = 0;
-                            context.end.row =
-                                (context.end.row + line_count).min(excerpt.buffer.max_point().row);
-                            context.end.column = excerpt.buffer.line_len(context.end.row);
+                            range.context.start.row =
+                                range.context.start.row.saturating_sub(line_count);
+                            range.context.start.column = 0;
+                            range.context.end.row = (range.context.end.row + line_count)
+                                .min(excerpt.buffer_snapshot(&snapshot).max_point().row);
+                            range.context.end.column = excerpt
+                                .buffer_snapshot(&snapshot)
+                                .line_len(range.context.end.row);
                         }
                     }
                 }
 
-                Some(ExcerptRange {
-                    context,
-                    primary: excerpt.range.primary.to_point(&excerpt.buffer),
-                })
-            });
-            let mut merged_ranges: Vec<ExcerptRange<Point>> = Vec::new();
-            for range in expanded_ranges {
-                if let Some(last_range) = merged_ranges.last_mut()
-                    && last_range.context.end >= range.context.start
-                {
-                    last_range.context.end = range.context.end;
-                    continue;
-                }
-                merged_ranges.push(range)
+                ranges.push(range);
+                cursor.next();
             }
-            let Some(excerpt_id) = excerpt_id_ else {
-                continue;
-            };
-            let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(excerpt_id) else {
-                continue;
-            };
 
-            let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else {
-                continue;
-            };
+            ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start));
 
-            let buffer_snapshot = buffer.read(cx).snapshot();
-            self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx);
+            self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx);
         }
     }
 
     /// Sets excerpts, returns `true` if at least one new excerpt was added.
-    fn set_merged_excerpt_ranges_for_path(
+    pub(crate) fn set_merged_excerpt_ranges_for_path<T>(
         &mut self,
         path: PathKey,
         buffer: Entity<Buffer>,
-        ranges: Vec<ExcerptRange<Point>>,
         buffer_snapshot: &BufferSnapshot,
-        new: Vec<ExcerptRange<Point>>,
-        counts: Vec<usize>,
+        new: Vec<ExcerptRange<T>>,
         cx: &mut Context<Self>,
-    ) -> (Vec<Range<Anchor>>, bool) {
-        let insert_result = self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx);
-
-        let mut result = Vec::new();
-        let mut ranges = ranges.into_iter();
-        for (excerpt_id, range_count) in insert_result
-            .excerpt_ids
+    ) -> (bool, PathKeyIndex)
+    where
+        T: language::ToOffset,
+    {
+        let anchor_ranges = new
             .into_iter()
-            .zip(counts.into_iter())
-        {
-            for range in ranges.by_ref().take(range_count) {
-                let range = Anchor::range_in_buffer(
-                    excerpt_id,
-                    buffer_snapshot.anchor_before(&range.primary.start)
-                        ..buffer_snapshot.anchor_after(&range.primary.end),
-                );
-                result.push(range)
-            }
+            .map(|r| ExcerptRange {
+                context: buffer_snapshot.anchor_before(r.context.start)
+                    ..buffer_snapshot.anchor_after(r.context.end),
+                primary: buffer_snapshot.anchor_before(r.primary.start)
+                    ..buffer_snapshot.anchor_after(r.primary.end),
+            })
+            .collect::<Vec<_>>();
+        let inserted =
+            self.update_path_excerpts(path.clone(), buffer, buffer_snapshot, &anchor_ranges, cx);
+        let path_key_index = self.get_or_create_path_key_index(&path);
+        (inserted, path_key_index)
+    }
+
+    pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex {
+        let mut snapshot = self.snapshot.borrow_mut();
+
+        if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) {
+            return existing;
         }
-        (result, insert_result.added_new_excerpt)
+
+        let index = snapshot
+            .path_keys_by_index
+            .last()
+            .map(|(index, _)| PathKeyIndex(index.0 + 1))
+            .unwrap_or(PathKeyIndex(0));
+        snapshot.path_keys_by_index.insert(index, path_key.clone());
+        snapshot.indices_by_path_key.insert(path_key.clone(), index);
+        index
     }
 
     pub fn update_path_excerpts(
         &mut self,
-        path: PathKey,
+        path_key: PathKey,
         buffer: Entity<Buffer>,
         buffer_snapshot: &BufferSnapshot,
-        new: Vec<ExcerptRange<Point>>,
+        to_insert: &Vec<ExcerptRange<text::Anchor>>,
         cx: &mut Context<Self>,
-    ) -> PathExcerptInsertResult {
-        let mut insert_after = self
-            .excerpts_by_path
-            .range(..path.clone())
-            .next_back()
-            .and_then(|(_, value)| value.last().copied())
-            .unwrap_or(ExcerptId::min());
-
-        let existing = self
-            .excerpts_by_path
-            .get(&path)
-            .cloned()
-            .unwrap_or_default();
-        let mut new_iter = new.into_iter().peekable();
-        let mut existing_iter = existing.into_iter().peekable();
-
-        let mut excerpt_ids = Vec::new();
-        let mut to_remove = Vec::new();
-        let mut to_insert: Vec<(ExcerptId, ExcerptRange<Point>)> = Vec::new();
-        let mut added_a_new_excerpt = false;
-        let snapshot = self.snapshot(cx);
+    ) -> bool {
+        let path_key_index = self.get_or_create_path_key_index(&path_key);
+        if let Some(old_path_key) = self
+            .snapshot(cx)
+            .path_for_buffer(buffer_snapshot.remote_id())
+            && old_path_key != &path_key
+        {
+            self.remove_excerpts(old_path_key.clone(), cx);
+        }
 
-        let mut next_excerpt_id =
-            if let Some(last_entry) = self.snapshot.get_mut().excerpt_ids.last() {
-                last_entry.id.0 + 1
-            } else {
-                1
-            };
+        if to_insert.len() == 0 {
+            self.remove_excerpts(path_key.clone(), cx);
 
-        let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id));
+            return false;
+        }
+        assert_eq!(self.history.transaction_depth(), 0);
+        self.sync_mut(cx);
 
-        let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
-        excerpts_cursor.next();
+        let buffer_id = buffer_snapshot.remote_id();
 
-        loop {
-            let existing = if let Some(&existing_id) = existing_iter.peek() {
-                let locator = snapshot.excerpt_locator_for_id(existing_id);
-                excerpts_cursor.seek_forward(&Some(locator), Bias::Left);
-                if let Some(excerpt) = excerpts_cursor.item() {
-                    if excerpt.buffer_id != buffer_snapshot.remote_id() {
-                        to_remove.push(existing_id);
-                        existing_iter.next();
-                        continue;
-                    }
-                    Some((existing_id, excerpt.range.context.to_point(buffer_snapshot)))
-                } else {
-                    None
-                }
-            } else {
-                None
+        let mut snapshot = self.snapshot.get_mut();
+        let mut cursor = snapshot
+            .excerpts
+            .cursor::<Dimensions<PathKey, ExcerptOffset>>(());
+        let mut new_excerpts = SumTree::new(());
+
+        let new_ranges = to_insert.clone();
+        let mut to_insert = to_insert.iter().peekable();
+        let mut patch = Patch::empty();
+        let mut added_new_excerpt = false;
+
+        new_excerpts.append(cursor.slice(&path_key, Bias::Left), ());
+
+        // handle the case where the path key used to be associated
+        // with a different buffer by removing its excerpts.
+        if let Some(excerpt) = cursor.item()
+            && &excerpt.path_key == &path_key
+            && excerpt.buffer_id != buffer_id
+        {
+            let old_buffer_id = excerpt.buffer_id;
+            self.buffers.remove(&old_buffer_id);
+            snapshot.buffers.remove(&old_buffer_id);
+            remove_diff_state(&mut snapshot.diffs, old_buffer_id);
+            self.diffs.remove(&old_buffer_id);
+            let before = cursor.position.1;
+            cursor.seek_forward(&path_key, Bias::Right);
+            let after = cursor.position.1;
+            patch.push(Edit {
+                old: before..after,
+                new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+            });
+            cx.emit(Event::BuffersRemoved {
+                removed_buffer_ids: vec![old_buffer_id],
+            });
+        }
+
+        while let Some(excerpt) = cursor.item()
+            && excerpt.path_key == path_key
+        {
+            assert_eq!(excerpt.buffer_id, buffer_id);
+            let Some(next_excerpt) = to_insert.peek() else {
+                break;
             };
+            if &excerpt.range == *next_excerpt {
+                let before = new_excerpts.summary().len();
+                new_excerpts.update_last(
+                    |prev_excerpt| {
+                        if !prev_excerpt.has_trailing_newline {
+                            prev_excerpt.has_trailing_newline = true;
+                            patch.push(Edit {
+                                old: cursor.position.1..cursor.position.1,
+                                new: before..before + MultiBufferOffset(1),
+                            });
+                        }
+                    },
+                    (),
+                );
+                new_excerpts.push(excerpt.clone(), ());
+                to_insert.next();
+                cursor.next();
+                continue;
+            }
 
-            let new = new_iter.peek();
-            // Try to merge the next new range or existing excerpt into the last
-            // queued insert.
-            if let Some((last_id, last)) = to_insert.last_mut() {
-                // Next new range overlaps the last queued insert: absorb it by
-                // extending the insert's end.
-                if let Some(new) = new
-                    && last.context.end >= new.context.start
-                {
-                    last.context.end = last.context.end.max(new.context.end);
-                    excerpt_ids.push(*last_id);
-                    new_iter.next();
-                    continue;
-                }
-                // Next existing excerpt overlaps the last queued insert: absorb
-                // it by extending the insert's end, and record the existing
-                // excerpt as replaced so anchors in it resolve to the new one.
-                if let Some((existing_id, existing_range)) = &existing
-                    && last.context.end >= existing_range.start
-                {
-                    last.context.end = last.context.end.max(existing_range.end);
-                    to_remove.push(*existing_id);
-                    Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
-                        .insert(*existing_id, *last_id);
-                    existing_iter.next();
-                    continue;
-                }
+            if excerpt
+                .range
+                .context
+                .start
+                .cmp(&next_excerpt.context.start, &buffer_snapshot)
+                .is_le()
+            {
+                // remove old excerpt
+                let before = cursor.position.1;
+                cursor.next();
+                let after = cursor.position.1;
+                patch.push(Edit {
+                    old: before..after,
+                    new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+                });
+            } else {
+                // insert new excerpt
+                let next_excerpt = to_insert.next().unwrap();
+                added_new_excerpt = true;
+                let before = new_excerpts.summary().len();
+                new_excerpts.update_last(
+                    |prev_excerpt| {
+                        prev_excerpt.has_trailing_newline = true;
+                    },
+                    (),
+                );
+                new_excerpts.push(
+                    Excerpt::new(
+                        path_key.clone(),
+                        path_key_index,
+                        &buffer_snapshot,
+                        next_excerpt.clone(),
+                        false,
+                    ),
+                    (),
+                );
+                let after = new_excerpts.summary().len();
+                patch.push_maybe_empty(Edit {
+                    old: cursor.position.1..cursor.position.1,
+                    new: before..after,
+                });
             }
+        }
 
-            match (new, existing) {
-                (None, None) => break,
+        // remove any further trailing excerpts
+        let mut before = cursor.position.1;
+        cursor.seek_forward(&path_key, Bias::Right);
+        let after = cursor.position.1;
+        // if we removed the previous last excerpt, remove the trailing newline from the new last excerpt
+        if cursor.item().is_none() && to_insert.peek().is_none() {
+            new_excerpts.update_last(
+                |excerpt| {
+                    if excerpt.has_trailing_newline {
+                        before.0.0 = before
+                            .0
+                            .0
+                            .checked_sub(1)
+                            .expect("should have preceding excerpt");
+                        excerpt.has_trailing_newline = false;
+                    }
+                },
+                (),
+            );
+        }
+        patch.push(Edit {
+            old: before..after,
+            new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+        });
 
-                // No more new ranges; remove the remaining existing excerpt.
-                (None, Some((existing_id, _))) => {
-                    existing_iter.next();
-                    to_remove.push(existing_id);
-                }
+        while let Some(next_excerpt) = to_insert.next() {
+            added_new_excerpt = true;
+            let before = new_excerpts.summary().len();
+            new_excerpts.update_last(
+                |prev_excerpt| {
+                    prev_excerpt.has_trailing_newline = true;
+                },
+                (),
+            );
+            new_excerpts.push(
+                Excerpt::new(
+                    path_key.clone(),
+                    path_key_index,
+                    &buffer_snapshot,
+                    next_excerpt.clone(),
+                    false,
+                ),
+                (),
+            );
+            let after = new_excerpts.summary().len();
+            patch.push_maybe_empty(Edit {
+                old: cursor.position.1..cursor.position.1,
+                new: before..after,
+            });
+        }
 
-                // No more existing excerpts; queue the new range for insertion.
-                (Some(_), None) => {
-                    added_a_new_excerpt = true;
-                    let new_id = next_excerpt_id();
-                    excerpt_ids.push(new_id);
-                    to_insert.push((new_id, new_iter.next().unwrap()));
-                }
+        let suffix_start = cursor.position.1;
+        let suffix = cursor.suffix();
+        let changed_trailing_excerpt = suffix.is_empty();
+        if !suffix.is_empty() {
+            let before = new_excerpts.summary().len();
+            new_excerpts.update_last(
+                |prev_excerpt| {
+                    if !prev_excerpt.has_trailing_newline {
+                        prev_excerpt.has_trailing_newline = true;
+                        patch.push(Edit {
+                            old: suffix_start..suffix_start,
+                            new: before..before + MultiBufferOffset(1),
+                        });
+                    }
+                },
+                (),
+            );
+        }
+        new_excerpts.append(suffix, ());
+        drop(cursor);
+
+        snapshot.excerpts = new_excerpts;
+        snapshot.buffers.insert(
+            buffer_id,
+            BufferStateSnapshot {
+                path_key: path_key.clone(),
+                path_key_index,
+                buffer_snapshot: buffer_snapshot.clone(),
+            },
+        );
+
+        self.buffers.entry(buffer_id).or_insert_with(|| {
+            self.buffer_changed_since_sync.replace(true);
+            buffer.update(cx, |buffer, _| {
+                buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync));
+            });
+            BufferState {
+                _subscriptions: [
+                    cx.observe(&buffer, |_, _, cx| cx.notify()),
+                    cx.subscribe(&buffer, Self::on_buffer_event),
+                ],
+                buffer: buffer.clone(),
+            }
+        });
 
-                // Existing excerpt ends before the new range starts, so it
-                // has no corresponding new range and must be removed. Flush
-                // pending inserts and advance `insert_after` past it so that
-                // future inserts receive locators *after* this excerpt's
-                // locator, preserving forward ordering.
-                (Some(new), Some((_, existing_range)))
-                    if existing_range.end < new.context.start =>
-                {
-                    self.insert_excerpts_with_ids_after(
-                        insert_after,
-                        buffer.clone(),
-                        mem::take(&mut to_insert),
-                        cx,
-                    );
-                    insert_after = existing_iter.next().unwrap();
-                    to_remove.push(insert_after);
-                }
-                // New range ends before the existing excerpt starts, so the
-                // new range has no corresponding existing excerpt. Queue it
-                // for insertion at the current `insert_after` position
-                // (before the existing excerpt), which is the correct
-                // spatial ordering.
-                (Some(new), Some((_, existing_range)))
-                    if existing_range.start > new.context.end =>
-                {
-                    let new_id = next_excerpt_id();
-                    excerpt_ids.push(new_id);
-                    to_insert.push((new_id, new_iter.next().unwrap()));
-                }
-                // Exact match: keep the existing excerpt in place, flush
-                // any pending inserts before it, and use it as the new
-                // `insert_after` anchor.
-                (Some(new), Some((_, existing_range)))
-                    if existing_range.start == new.context.start
-                        && existing_range.end == new.context.end =>
-                {
-                    self.insert_excerpts_with_ids_after(
-                        insert_after,
-                        buffer.clone(),
-                        mem::take(&mut to_insert),
-                        cx,
-                    );
-                    insert_after = existing_iter.next().unwrap();
-                    excerpt_ids.push(insert_after);
-                    new_iter.next();
-                }
+        if changed_trailing_excerpt {
+            snapshot.trailing_excerpt_update_count += 1;
+        }
 
-                // Partial overlap: replace the existing excerpt with a new
-                // one whose range is the union of both, and record the
-                // replacement so that anchors in the old excerpt resolve to
-                // the new one.
-                (Some(_), Some((_, existing_range))) => {
-                    let existing_id = existing_iter.next().unwrap();
-                    let new_id = next_excerpt_id();
-                    Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
-                        .insert(existing_id, new_id);
-                    to_remove.push(existing_id);
-                    let mut range = new_iter.next().unwrap();
-                    range.context.start = range.context.start.min(existing_range.start);
-                    range.context.end = range.context.end.max(existing_range.end);
-                    excerpt_ids.push(new_id);
-                    to_insert.push((new_id, range));
-                }
-            };
+        let edits = Self::sync_diff_transforms(
+            &mut snapshot,
+            patch.into_inner(),
+            DiffChangeKind::BufferEdited,
+        );
+        if !edits.is_empty() {
+            self.subscriptions.publish(edits);
         }
 
-        self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx);
-        // todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly
-        to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id));
-        self.remove_excerpts(to_remove, cx);
+        cx.emit(Event::Edited {
+            edited_buffer: None,
+            is_local: true,
+        });
+        cx.emit(Event::BufferRangesUpdated {
+            buffer,
+            path_key: path_key.clone(),
+            ranges: new_ranges,
+        });
+        cx.notify();
 
-        if excerpt_ids.is_empty() {
-            self.excerpts_by_path.remove(&path);
-        } else {
-            let snapshot = &*self.snapshot.get_mut();
-            let excerpt_ids = excerpt_ids
-                .iter()
-                .dedup()
-                .cloned()
-                // todo(lw): There is a logic bug somewhere that causes excerpt_ids to not necessarily be in order by locator
-                .sorted_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id))
-                .collect();
-            for &excerpt_id in &excerpt_ids {
-                self.paths_by_excerpt.insert(excerpt_id, path.clone());
-            }
-            self.excerpts_by_path.insert(path, excerpt_ids);
+        added_new_excerpt
+    }
+
+    pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context<Self>) {
+        let snapshot = self.sync_mut(cx);
+        let Some(path) = snapshot.path_for_buffer(buffer).cloned() else {
+            return;
+        };
+        self.remove_excerpts(path, cx);
+    }
+
+    pub fn remove_excerpts(&mut self, path: PathKey, cx: &mut Context<Self>) {
+        assert_eq!(self.history.transaction_depth(), 0);
+        self.sync_mut(cx);
+
+        let mut snapshot = self.snapshot.get_mut();
+        let mut cursor = snapshot
+            .excerpts
+            .cursor::<Dimensions<PathKey, ExcerptOffset>>(());
+        let mut new_excerpts = SumTree::new(());
+        new_excerpts.append(cursor.slice(&path, Bias::Left), ());
+        let mut edit_start = cursor.position.1;
+        let mut buffer_id = None;
+        if let Some(excerpt) = cursor.item()
+            && excerpt.path_key == path
+        {
+            buffer_id = Some(excerpt.buffer_id);
         }
+        cursor.seek(&path, Bias::Right);
+        let edit_end = cursor.position.1;
+        let suffix = cursor.suffix();
+        let changed_trailing_excerpt = suffix.is_empty();
+        new_excerpts.append(suffix, ());
+
+        if let Some(buffer_id) = buffer_id {
+            snapshot.buffers.remove(&buffer_id);
+            remove_diff_state(&mut snapshot.diffs, buffer_id);
+            self.buffers.remove(&buffer_id);
+            self.diffs.remove(&buffer_id);
+            cx.emit(Event::BuffersRemoved {
+                removed_buffer_ids: vec![buffer_id],
+            })
+        }
+        drop(cursor);
+        if changed_trailing_excerpt {
+            snapshot.trailing_excerpt_update_count += 1;
+            new_excerpts.update_last(
+                |excerpt| {
+                    if excerpt.has_trailing_newline {
+                        excerpt.has_trailing_newline = false;
+                        edit_start.0.0 = edit_start
+                            .0
+                            .0
+                            .checked_sub(1)
+                            .expect("should have at least one excerpt");
+                    }
+                },
+                (),
+            )
+        }
+
+        let edit = Edit {
+            old: edit_start..edit_end,
+            new: edit_start..edit_start,
+        };
+        snapshot.excerpts = new_excerpts;
 
-        PathExcerptInsertResult {
-            excerpt_ids,
-            added_new_excerpt: added_a_new_excerpt,
+        let edits =
+            Self::sync_diff_transforms(&mut snapshot, vec![edit], DiffChangeKind::BufferEdited);
+        if !edits.is_empty() {
+            self.subscriptions.publish(edits);
         }
+
+        cx.emit(Event::Edited {
+            edited_buffer: None,
+            is_local: true,
+        });
+        cx.notify();
     }
 }

crates/multi_buffer/src/transaction.rs πŸ”—

@@ -2,15 +2,15 @@ use gpui::{App, Context, Entity};
 use language::{self, Buffer, TransactionId};
 use std::{
     collections::HashMap,
-    ops::{AddAssign, Range, Sub},

+    ops::Range,

     time::{Duration, Instant},
 };
 use sum_tree::Bias;
 use text::BufferId;
 
-use crate::{BufferState, MultiBufferDimension};

+use crate::{Anchor, BufferState, MultiBufferOffset};

 
-use super::{Event, ExcerptSummary, MultiBuffer};

+use super::{Event, MultiBuffer};

 
 #[derive(Clone)]
 pub(super) struct History {
@@ -314,71 +314,50 @@ impl MultiBuffer {
         }
     }
 
-    pub fn edited_ranges_for_transaction<D>(

+    pub fn edited_ranges_for_transaction(

         &self,
         transaction_id: TransactionId,
         cx: &App,
-    ) -> Vec<Range<D>>

-    where

-        D: MultiBufferDimension

-            + Ord

-            + Sub<D, Output = D::TextDimension>

-            + AddAssign<D::TextDimension>,

-        D::TextDimension: PartialOrd + Sub<D::TextDimension, Output = D::TextDimension>,

-    {

+    ) -> Vec<Range<MultiBufferOffset>> {

         let Some(transaction) = self.history.transaction(transaction_id) else {
             return Vec::new();
         };
 
-        let mut ranges = Vec::new();

         let snapshot = self.read(cx);
-        let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());

+        let mut buffer_anchors = Vec::new();

 
         for (buffer_id, buffer_transaction) in &transaction.buffer_transactions {
-            let Some(buffer_state) = self.buffers.get(buffer_id) else {

+            let Some(buffer) = self.buffer(*buffer_id) else {

                 continue;
             };
+            let Some(excerpt) = snapshot.first_excerpt_for_buffer(*buffer_id) else {

+                continue;

+            };

+            let buffer_snapshot = buffer.read(cx).snapshot();

 
-            let buffer = buffer_state.buffer.read(cx);

-            for range in

-                buffer.edited_ranges_for_transaction_id::<D::TextDimension>(*buffer_transaction)

+            for range in buffer

+                .read(cx)

+                .edited_ranges_for_transaction_id::<usize>(*buffer_transaction)

             {
-                for excerpt_id in &buffer_state.excerpts {

-                    cursor.seek(excerpt_id, Bias::Left);

-                    if let Some(excerpt) = cursor.item()

-                        && excerpt.locator == *excerpt_id

-                    {

-                        let excerpt_buffer_start = excerpt

-                            .range

-                            .context

-                            .start

-                            .summary::<D::TextDimension>(buffer);

-                        let excerpt_buffer_end = excerpt

-                            .range

-                            .context

-                            .end

-                            .summary::<D::TextDimension>(buffer);

-                        let excerpt_range = excerpt_buffer_start..excerpt_buffer_end;

-                        if excerpt_range.contains(&range.start)

-                            && excerpt_range.contains(&range.end)

-                        {

-                            let excerpt_start = D::from_summary(&cursor.start().text);

-

-                            let mut start = excerpt_start;

-                            start += range.start - excerpt_buffer_start;

-                            let mut end = excerpt_start;

-                            end += range.end - excerpt_buffer_start;

-

-                            ranges.push(start..end);

-                            break;

-                        }

-                    }

-                }

+                buffer_anchors.push(Anchor::in_buffer(

+                    excerpt.path_key_index,

+                    buffer_snapshot.anchor_at(range.start, Bias::Left),

+                ));

+                buffer_anchors.push(Anchor::in_buffer(

+                    excerpt.path_key_index,

+                    buffer_snapshot.anchor_at(range.end, Bias::Right),

+                ));

             }
         }
+        buffer_anchors.sort_unstable_by(|a, b| a.cmp(b, &snapshot));

 
-        ranges.sort_by_key(|range| range.start);

-        ranges

+        snapshot

+            .summaries_for_anchors(buffer_anchors.iter())

+            .as_chunks::<2>()

+            .0

+            .iter()

+            .map(|&[s, e]| s..e)

+            .collect::<Vec<_>>()

     }
 
     pub fn merge_transactions(

crates/outline/src/outline.rs πŸ”—

@@ -79,29 +79,37 @@ fn outline_for_editor(
     cx: &mut App,
 ) -> Option<Task<Vec<OutlineItem<Anchor>>>> {
     let multibuffer = editor.read(cx).buffer().read(cx).snapshot(cx);
-    let (excerpt_id, _, buffer_snapshot) = multibuffer.as_singleton()?;
+    let buffer_snapshot = multibuffer.as_singleton()?;
     let buffer_id = buffer_snapshot.remote_id();
     let task = editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx));
 
     Some(cx.background_executor().spawn(async move {
         task.await
             .into_iter()
-            .map(|item| OutlineItem {
-                depth: item.depth,
-                range: Anchor::range_in_buffer(excerpt_id, item.range),
-                source_range_for_text: Anchor::range_in_buffer(
-                    excerpt_id,
-                    item.source_range_for_text,
-                ),
-                text: item.text,
-                highlight_ranges: item.highlight_ranges,
-                name_ranges: item.name_ranges,
-                body_range: item
-                    .body_range
-                    .map(|r| Anchor::range_in_buffer(excerpt_id, r)),
-                annotation_range: item
-                    .annotation_range
-                    .map(|r| Anchor::range_in_buffer(excerpt_id, r)),
+            .filter_map(|item| {
+                Some(OutlineItem {
+                    depth: item.depth,
+                    range: multibuffer.anchor_in_buffer(item.range.start)?
+                        ..multibuffer.anchor_in_buffer(item.range.end)?,
+                    source_range_for_text: multibuffer
+                        .anchor_in_buffer(item.source_range_for_text.start)?
+                        ..multibuffer.anchor_in_buffer(item.source_range_for_text.end)?,
+                    text: item.text,
+                    highlight_ranges: item.highlight_ranges,
+                    name_ranges: item.name_ranges,
+                    body_range: item.body_range.and_then(|r| {
+                        Some(
+                            multibuffer.anchor_in_buffer(r.start)?
+                                ..multibuffer.anchor_in_buffer(r.end)?,
+                        )
+                    }),
+                    annotation_range: item.annotation_range.and_then(|r| {
+                        Some(
+                            multibuffer.anchor_in_buffer(r.start)?
+                                ..multibuffer.anchor_in_buffer(r.end)?,
+                        )
+                    }),
+                })
             })
             .collect()
     }))

crates/outline_panel/src/outline_panel.rs πŸ”—

@@ -1,11 +1,11 @@
 mod outline_panel_settings;
 
 use anyhow::Context as _;
-use collections::{BTreeSet, HashMap, HashSet, hash_map};
+use collections::{BTreeSet, HashMap, HashSet};
 use db::kvp::KeyValueStore;
 use editor::{
-    AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange,
-    MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects,
+    AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptRange, MultiBufferSnapshot,
+    RangeToAnchorExt, SelectionEffects,
     display_map::ToDisplayPoint,
     items::{entry_git_aware_label_color, entry_label_color},
     scroll::{Autoscroll, ScrollAnchor},
@@ -129,12 +129,12 @@ pub struct OutlinePanel {
     selected_entry: SelectedEntry,
     active_item: Option<ActiveItem>,
     _subscriptions: Vec<Subscription>,
-    new_entries_for_fs_update: HashSet<ExcerptId>,
+    new_entries_for_fs_update: HashSet<BufferId>,
     fs_entries_update_task: Task<()>,
     cached_entries_update_task: Task<()>,
     reveal_selection_task: Task<anyhow::Result<()>>,
     outline_fetch_tasks: HashMap<BufferId, Task<()>>,
-    excerpts: HashMap<BufferId, HashMap<ExcerptId, Excerpt>>,
+    buffers: HashMap<BufferId, BufferOutlines>,
     cached_entries: Vec<CachedEntry>,
     filter_editor: Entity<Editor>,
     mode: ItemsDisplayMode,
@@ -334,42 +334,41 @@ enum CollapsedEntry {
     Dir(WorktreeId, ProjectEntryId),
     File(WorktreeId, BufferId),
     ExternalFile(BufferId),
-    Excerpt(BufferId, ExcerptId),
-    Outline(BufferId, ExcerptId, Range<Anchor>),
+    Excerpt(ExcerptRange<Anchor>),
+    Outline(Range<Anchor>),
 }
 
-#[derive(Debug)]
-struct Excerpt {
-    range: ExcerptRange<language::Anchor>,
-    outlines: ExcerptOutlines,
+struct BufferOutlines {
+    excerpts: Vec<ExcerptRange<Anchor>>,
+    outlines: OutlineState,
 }
 
-impl Excerpt {
+impl BufferOutlines {
     fn invalidate_outlines(&mut self) {
-        if let ExcerptOutlines::Outlines(valid_outlines) = &mut self.outlines {
-            self.outlines = ExcerptOutlines::Invalidated(std::mem::take(valid_outlines));
+        if let OutlineState::Outlines(valid_outlines) = &mut self.outlines {
+            self.outlines = OutlineState::Invalidated(std::mem::take(valid_outlines));
         }
     }
 
     fn iter_outlines(&self) -> impl Iterator<Item = &Outline> {
         match &self.outlines {
-            ExcerptOutlines::Outlines(outlines) => outlines.iter(),
-            ExcerptOutlines::Invalidated(outlines) => outlines.iter(),
-            ExcerptOutlines::NotFetched => [].iter(),
+            OutlineState::Outlines(outlines) => outlines.iter(),
+            OutlineState::Invalidated(outlines) => outlines.iter(),
+            OutlineState::NotFetched => [].iter(),
         }
     }
 
     fn should_fetch_outlines(&self) -> bool {
         match &self.outlines {
-            ExcerptOutlines::Outlines(_) => false,
-            ExcerptOutlines::Invalidated(_) => true,
-            ExcerptOutlines::NotFetched => true,
+            OutlineState::Outlines(_) => false,
+            OutlineState::Invalidated(_) => true,
+            OutlineState::NotFetched => true,
         }
     }
 }
 
 #[derive(Debug)]
-enum ExcerptOutlines {
+enum OutlineState {
     Outlines(Vec<Outline>),
     Invalidated(Vec<Outline>),
     NotFetched,
@@ -536,54 +535,24 @@ impl SearchData {
     }
 }
 
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-struct OutlineEntryExcerpt {
-    id: ExcerptId,
-    buffer_id: BufferId,
-    range: ExcerptRange<language::Anchor>,
-}
-
-#[derive(Clone, Debug, Eq)]
-struct OutlineEntryOutline {
-    buffer_id: BufferId,
-    excerpt_id: ExcerptId,
-    outline: Outline,
-}
-
-impl PartialEq for OutlineEntryOutline {
-    fn eq(&self, other: &Self) -> bool {
-        self.buffer_id == other.buffer_id
-            && self.excerpt_id == other.excerpt_id
-            && self.outline.depth == other.outline.depth
-            && self.outline.range == other.outline.range
-            && self.outline.text == other.outline.text
-    }
-}
-
-impl Hash for OutlineEntryOutline {
-    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
-        (
-            self.buffer_id,
-            self.excerpt_id,
-            self.outline.depth,
-            &self.outline.range,
-            &self.outline.text,
-        )
-            .hash(state);
-    }
-}
-
 #[derive(Clone, Debug, PartialEq, Eq)]
 enum OutlineEntry {
-    Excerpt(OutlineEntryExcerpt),
-    Outline(OutlineEntryOutline),
+    Excerpt(ExcerptRange<Anchor>),
+    Outline(Outline),
 }
 
 impl OutlineEntry {
-    fn ids(&self) -> (BufferId, ExcerptId) {
+    fn buffer_id(&self) -> BufferId {
         match self {
-            OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id),
-            OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id),
+            OutlineEntry::Excerpt(excerpt) => excerpt.context.start.buffer_id,
+            OutlineEntry::Outline(outline) => outline.range.start.buffer_id,
+        }
+    }
+
+    fn range(&self) -> Range<Anchor> {
+        match self {
+            OutlineEntry::Excerpt(excerpt) => excerpt.context.clone(),
+            OutlineEntry::Outline(outline) => outline.range.clone(),
         }
     }
 }
@@ -593,7 +562,7 @@ struct FsEntryFile {
     worktree_id: WorktreeId,
     entry: GitEntry,
     buffer_id: BufferId,
-    excerpts: Vec<ExcerptId>,
+    excerpts: Vec<ExcerptRange<language::Anchor>>,
 }
 
 impl PartialEq for FsEntryFile {
@@ -631,7 +600,7 @@ impl Hash for FsEntryDirectory {
 #[derive(Debug, Clone, Eq)]
 struct FsEntryExternalFile {
     buffer_id: BufferId,
-    excerpts: Vec<ExcerptId>,
+    excerpts: Vec<ExcerptRange<language::Anchor>>,
 }
 
 impl PartialEq for FsEntryExternalFile {
@@ -787,10 +756,8 @@ impl OutlinePanel {
                     if &current_theme != new_theme {
                         outline_panel_settings = *new_settings;
                         current_theme = new_theme.clone();
-                        for excerpts in outline_panel.excerpts.values_mut() {
-                            for excerpt in excerpts.values_mut() {
-                                excerpt.invalidate_outlines();
-                            }
+                        for buffer in outline_panel.buffers.values_mut() {
+                            buffer.invalidate_outlines();
                         }
                         outlines_invalidated = true;
                         let update_cached_items = outline_panel.update_non_fs_items(window, cx);
@@ -809,30 +776,23 @@ impl OutlinePanel {
 
                             let new_depth = new_settings.expand_outlines_with_depth;
 
-                            for (buffer_id, excerpts) in &outline_panel.excerpts {
-                                for (excerpt_id, excerpt) in excerpts {
-                                    if let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines {
-                                        for outline in outlines {
-                                            if outline_panel
-                                                .outline_children_cache
-                                                .get(buffer_id)
-                                                .and_then(|children_map| {
-                                                    let key =
-                                                        (outline.range.clone(), outline.depth);
-                                                    children_map.get(&key)
-                                                })
-                                                .copied()
-                                                .unwrap_or(false)
-                                                && (new_depth == 0 || outline.depth >= new_depth)
-                                            {
-                                                outline_panel.collapsed_entries.insert(
-                                                    CollapsedEntry::Outline(
-                                                        *buffer_id,
-                                                        *excerpt_id,
-                                                        outline.range.clone(),
-                                                    ),
-                                                );
-                                            }
+                            for (buffer_id, buffer) in &outline_panel.buffers {
+                                if let OutlineState::Outlines(outlines) = &buffer.outlines {
+                                    for outline in outlines {
+                                        if outline_panel
+                                            .outline_children_cache
+                                            .get(buffer_id)
+                                            .and_then(|children_map| {
+                                                let key = (outline.range.clone(), outline.depth);
+                                                children_map.get(&key)
+                                            })
+                                            .copied()
+                                            .unwrap_or(false)
+                                            && (new_depth == 0 || outline.depth >= new_depth)
+                                        {
+                                            outline_panel.collapsed_entries.insert(
+                                                CollapsedEntry::Outline(outline.range.clone()),
+                                            );
                                         }
                                     }
                                 }
@@ -852,7 +812,7 @@ impl OutlinePanel {
 
                     if !outlines_invalidated {
                         let new_document_symbols = outline_panel
-                            .excerpts
+                            .buffers
                             .keys()
                             .filter_map(|buffer_id| {
                                 let buffer = outline_panel
@@ -867,10 +827,8 @@ impl OutlinePanel {
                             .collect();
                         if new_document_symbols != document_symbols_by_buffer {
                             document_symbols_by_buffer = new_document_symbols;
-                            for excerpts in outline_panel.excerpts.values_mut() {
-                                for excerpt in excerpts.values_mut() {
-                                    excerpt.invalidate_outlines();
-                                }
+                            for buffer in outline_panel.buffers.values_mut() {
+                                buffer.invalidate_outlines();
                             }
                             let update_cached_items = outline_panel.update_non_fs_items(window, cx);
                             if update_cached_items {
@@ -914,7 +872,7 @@ impl OutlinePanel {
                 cached_entries_update_task: Task::ready(()),
                 reveal_selection_task: Task::ready(Ok(())),
                 outline_fetch_tasks: HashMap::default(),
-                excerpts: HashMap::default(),
+                buffers: HashMap::default(),
                 cached_entries: Vec::new(),
                 _subscriptions: vec![
                     settings_subscription,
@@ -1110,16 +1068,13 @@ impl OutlinePanel {
             PanelEntry::Fs(FsEntry::ExternalFile(file)) => {
                 change_selection = false;
                 scroll_to_buffer = Some(file.buffer_id);
-                multi_buffer_snapshot.excerpts().find_map(
-                    |(excerpt_id, buffer_snapshot, excerpt_range)| {
-                        if buffer_snapshot.remote_id() == file.buffer_id {
-                            multi_buffer_snapshot
-                                .anchor_in_excerpt(excerpt_id, excerpt_range.context.start)
-                        } else {
-                            None
-                        }
-                    },
-                )
+                multi_buffer_snapshot.excerpts().find_map(|excerpt_range| {
+                    if excerpt_range.context.start.buffer_id == file.buffer_id {
+                        multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start)
+                    } else {
+                        None
+                    }
+                })
             }
 
             PanelEntry::Fs(FsEntry::File(file)) => {
@@ -1132,26 +1087,20 @@ impl OutlinePanel {
                             .and_then(|path| project.get_open_buffer(&path, cx))
                     })
                     .map(|buffer| {
-                        active_multi_buffer
-                            .read(cx)
-                            .excerpts_for_buffer(buffer.read(cx).remote_id(), cx)
+                        multi_buffer_snapshot.excerpts_for_buffer(buffer.read(cx).remote_id())
                     })
-                    .and_then(|excerpts| {
-                        let (excerpt_id, _, excerpt_range) = excerpts.first()?;
-                        multi_buffer_snapshot
-                            .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start)
+                    .and_then(|mut excerpts| {
+                        let excerpt_range = excerpts.next()?;
+                        multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start)
                     })
             }
             PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot
-                .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start)
-                .or_else(|| {
-                    multi_buffer_snapshot
-                        .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end)
-                }),
+                .anchor_in_excerpt(outline.range.start)
+                .or_else(|| multi_buffer_snapshot.anchor_in_excerpt(outline.range.end)),
             PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
                 change_selection = false;
                 change_focus = false;
-                multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start)
+                multi_buffer_snapshot.anchor_in_excerpt(excerpt.context.start)
             }
             PanelEntry::Search(search_entry) => Some(search_entry.match_range.start),
         };
@@ -1359,12 +1308,12 @@ impl OutlinePanel {
                 PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
                     previous_entries.find(|entry| match entry {
                         PanelEntry::Fs(FsEntry::File(file)) => {
-                            file.buffer_id == excerpt.buffer_id
-                                && file.excerpts.contains(&excerpt.id)
+                            file.buffer_id == excerpt.context.start.buffer_id
+                                && file.excerpts.contains(&excerpt)
                         }
                         PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
-                            external_file.buffer_id == excerpt.buffer_id
-                                && external_file.excerpts.contains(&excerpt.id)
+                            external_file.buffer_id == excerpt.context.start.buffer_id
+                                && external_file.excerpts.contains(&excerpt)
                         }
                         _ => false,
                     })
@@ -1372,8 +1321,16 @@ impl OutlinePanel {
                 PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
                     previous_entries.find(|entry| {
                         if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry {
-                            outline.buffer_id == excerpt.buffer_id
-                                && outline.excerpt_id == excerpt.id
+                            if outline.range.start.buffer_id != excerpt.context.start.buffer_id {
+                                return false;
+                            }
+                            let Some(buffer_snapshot) =
+                                self.buffer_snapshot_for_id(outline.range.start.buffer_id, cx)
+                            else {
+                                return false;
+                            };
+                            excerpt.contains(&outline.range.start, &buffer_snapshot)
+                                || excerpt.contains(&outline.range.end, &buffer_snapshot)
                         } else {
                             false
                         }
@@ -1584,13 +1541,11 @@ impl OutlinePanel {
                 Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
             }
             PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
-                Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
+                Some(CollapsedEntry::Excerpt(excerpt.clone()))
+            }
+            PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
+                Some(CollapsedEntry::Outline(outline.range.clone()))
             }
-            PanelEntry::Outline(OutlineEntry::Outline(outline)) => Some(CollapsedEntry::Outline(
-                outline.buffer_id,
-                outline.excerpt_id,
-                outline.outline.range.clone(),
-            )),
             PanelEntry::Search(_) => return,
         };
         let Some(collapsed_entry) = entry_to_expand else {
@@ -1691,14 +1646,10 @@ impl OutlinePanel {
             }
             PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self
                 .collapsed_entries
-                .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)),
-            PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
-                self.collapsed_entries.insert(CollapsedEntry::Outline(
-                    outline.buffer_id,
-                    outline.excerpt_id,
-                    outline.outline.range.clone(),
-                ))
-            }
+                .insert(CollapsedEntry::Excerpt(excerpt.clone())),
+            PanelEntry::Outline(OutlineEntry::Outline(outline)) => self
+                .collapsed_entries
+                .insert(CollapsedEntry::Outline(outline.range.clone())),
             PanelEntry::Search(_) => false,
         };
 
@@ -1753,31 +1704,26 @@ impl OutlinePanel {
             }
         }
 
-        for (&buffer_id, excerpts) in &self.excerpts {
-            for (&excerpt_id, excerpt) in excerpts {
-                match &excerpt.outlines {
-                    ExcerptOutlines::Outlines(outlines) => {
-                        for outline in outlines {
-                            to_uncollapse.insert(CollapsedEntry::Outline(
-                                buffer_id,
-                                excerpt_id,
-                                outline.range.clone(),
-                            ));
-                        }
+        for (_buffer_id, buffer) in &self.buffers {
+            match &buffer.outlines {
+                OutlineState::Outlines(outlines) => {
+                    for outline in outlines {
+                        to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone()));
                     }
-                    ExcerptOutlines::Invalidated(outlines) => {
-                        for outline in outlines {
-                            to_uncollapse.insert(CollapsedEntry::Outline(
-                                buffer_id,
-                                excerpt_id,
-                                outline.range.clone(),
-                            ));
-                        }
+                }
+                OutlineState::Invalidated(outlines) => {
+                    for outline in outlines {
+                        to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone()));
                     }
-                    ExcerptOutlines::NotFetched => {}
                 }
-                to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+                OutlineState::NotFetched => {}
             }
+            to_uncollapse.extend(
+                buffer
+                    .excerpts
+                    .iter()
+                    .map(|excerpt| CollapsedEntry::Excerpt(excerpt.clone())),
+            );
         }
 
         for cached in &self.cached_entries {
@@ -1844,14 +1790,10 @@ impl OutlinePanel {
                         ..
                     }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)),
                     PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
-                        Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
+                        Some(CollapsedEntry::Excerpt(excerpt.clone()))
                     }
                     PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
-                        Some(CollapsedEntry::Outline(
-                            outline.buffer_id,
-                            outline.excerpt_id,
-                            outline.outline.range.clone(),
-                        ))
+                        Some(CollapsedEntry::Outline(outline.range.clone()))
                     }
                     PanelEntry::Search(_) => None,
                 },
@@ -1939,17 +1881,13 @@ impl OutlinePanel {
                 }
             }
             PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
-                let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id);
+                let collapsed_entry = CollapsedEntry::Excerpt(excerpt.clone());
                 if !self.collapsed_entries.remove(&collapsed_entry) {
                     self.collapsed_entries.insert(collapsed_entry);
                 }
             }
             PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
-                let collapsed_entry = CollapsedEntry::Outline(
-                    outline.buffer_id,
-                    outline.excerpt_id,
-                    outline.outline.range.clone(),
-                );
+                let collapsed_entry = CollapsedEntry::Outline(outline.range.clone());
                 if !self.collapsed_entries.remove(&collapsed_entry) {
                     self.collapsed_entries.insert(collapsed_entry);
                 }
@@ -2103,6 +2041,8 @@ impl OutlinePanel {
         let project = self.project.clone();
         self.reveal_selection_task = cx.spawn_in(window, async move |outline_panel, cx| {
             cx.background_executor().timer(UPDATE_DEBOUNCE).await;
+            let multibuffer_snapshot =
+                editor.read_with(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx));
             let entry_with_selection =
                 outline_panel.update_in(cx, |outline_panel, window, cx| {
                     outline_panel.location_for_editor_selection(&editor, window, cx)
@@ -2132,14 +2072,28 @@ impl OutlinePanel {
                         })
                 }),
                 PanelEntry::Outline(outline_entry) => {
-                    let (buffer_id, excerpt_id) = outline_entry.ids();
+                    let buffer_id = outline_entry.buffer_id();
+                    let outline_range = outline_entry.range();
                     outline_panel.update(cx, |outline_panel, cx| {
                         outline_panel
                             .collapsed_entries
                             .remove(&CollapsedEntry::ExternalFile(buffer_id));
-                        outline_panel
-                            .collapsed_entries
-                            .remove(&CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+                        if let Some(buffer_snapshot) =
+                            outline_panel.buffer_snapshot_for_id(buffer_id, cx)
+                        {
+                            outline_panel.collapsed_entries.retain(|entry| match entry {
+                                CollapsedEntry::Excerpt(excerpt_range) => {
+                                    let intersects = excerpt_range.context.start.buffer_id
+                                        == buffer_id
+                                        && (excerpt_range
+                                            .contains(&outline_range.start, &buffer_snapshot)
+                                            || excerpt_range
+                                                .contains(&outline_range.end, &buffer_snapshot));
+                                    !intersects
+                                }
+                                _ => true,
+                            });
+                        }
                         let project = outline_panel.project.read(cx);
                         let entry_id = project
                             .buffer_for_id(buffer_id, cx)
@@ -2160,11 +2114,9 @@ impl OutlinePanel {
                     })?
                 }
                 PanelEntry::Fs(FsEntry::ExternalFile(..)) => None,
-                PanelEntry::Search(SearchEntry { match_range, .. }) => match_range
-                    .start
-                    .text_anchor
-                    .buffer_id
-                    .or(match_range.end.text_anchor.buffer_id)
+                PanelEntry::Search(SearchEntry { match_range, .. }) => multibuffer_snapshot
+                    .anchor_to_buffer_anchor(match_range.start)
+                    .map(|(anchor, _)| anchor.buffer_id)
                     .map(|buffer_id| {
                         outline_panel.update(cx, |outline_panel, cx| {
                             outline_panel
@@ -2246,30 +2198,30 @@ impl OutlinePanel {
 
     fn render_excerpt(
         &self,
-        excerpt: &OutlineEntryExcerpt,
+        excerpt: &ExcerptRange<Anchor>,
         depth: usize,
         window: &mut Window,
         cx: &mut Context<OutlinePanel>,
     ) -> Option<Stateful<Div>> {
-        let item_id = ElementId::from(excerpt.id.to_proto() as usize);
+        let item_id = ElementId::from(format!("{excerpt:?}"));
         let is_active = match self.selected_entry() {
             Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => {
-                selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id
+                selected_excerpt == excerpt
             }
             _ => false,
         };
         let has_outlines = self
-            .excerpts
-            .get(&excerpt.buffer_id)
-            .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines {
-                ExcerptOutlines::Outlines(outlines) => Some(outlines),
-                ExcerptOutlines::Invalidated(outlines) => Some(outlines),
-                ExcerptOutlines::NotFetched => None,
+            .buffers
+            .get(&excerpt.context.start.buffer_id)
+            .and_then(|buffer| match &buffer.outlines {
+                OutlineState::Outlines(outlines) => Some(outlines),
+                OutlineState::Invalidated(outlines) => Some(outlines),
+                OutlineState::NotFetched => None,
             })
             .is_some_and(|outlines| !outlines.is_empty());
         let is_expanded = !self
             .collapsed_entries
-            .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id));
+            .contains(&CollapsedEntry::Excerpt(excerpt.clone()));
         let color = entry_label_color(is_active);
         let icon = if has_outlines {
             FileIcons::get_chevron_icon(is_expanded, cx)
@@ -2279,7 +2231,7 @@ impl OutlinePanel {
         }
         .unwrap_or_else(empty_icon);
 
-        let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?;
+        let label = self.excerpt_label(&excerpt, cx)?;
         let label_element = Label::new(label)
             .single_line()
             .color(color)
@@ -2297,13 +2249,8 @@ impl OutlinePanel {
         ))
     }
 
-    fn excerpt_label(
-        &self,
-        buffer_id: BufferId,
-        range: &ExcerptRange<language::Anchor>,
-        cx: &App,
-    ) -> Option<String> {
-        let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?;
+    fn excerpt_label(&self, range: &ExcerptRange<language::Anchor>, cx: &App) -> Option<String> {
+        let buffer_snapshot = self.buffer_snapshot_for_id(range.context.start.buffer_id, cx)?;
         let excerpt_range = range.context.to_point(&buffer_snapshot);
         Some(format!(
             "Lines {}- {}",
@@ -2314,19 +2261,19 @@ impl OutlinePanel {
 
     fn render_outline(
         &self,
-        outline: &OutlineEntryOutline,
+        outline: &Outline,
         depth: usize,
         string_match: Option<&StringMatch>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Stateful<Div> {
         let item_id = ElementId::from(SharedString::from(format!(
-            "{:?}|{:?}{:?}|{:?}",
-            outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text,
+            "{:?}|{:?}",
+            outline.range, &outline.text,
         )));
 
         let label_element = outline::render_item(
-            &outline.outline,
+            &outline,
             string_match
                 .map(|string_match| string_match.ranges().collect::<Vec<_>>())
                 .unwrap_or_default(),
@@ -2335,26 +2282,22 @@ impl OutlinePanel {
         .into_any_element();
 
         let is_active = match self.selected_entry() {
-            Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => {
-                outline == selected && outline.outline == selected.outline
-            }
+            Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => outline == selected,
             _ => false,
         };
 
         let has_children = self
             .outline_children_cache
-            .get(&outline.buffer_id)
+            .get(&outline.range.start.buffer_id)
             .and_then(|children_map| {
-                let key = (outline.outline.range.clone(), outline.outline.depth);
+                let key = (outline.range.clone(), outline.depth);
                 children_map.get(&key)
             })
             .copied()
             .unwrap_or(false);
-        let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Outline(
-            outline.buffer_id,
-            outline.excerpt_id,
-            outline.outline.range.clone(),
-        ));
+        let is_expanded = !self
+            .collapsed_entries
+            .contains(&CollapsedEntry::Outline(outline.range.clone()));
 
         let icon = if has_children {
             FileIcons::get_chevron_icon(is_expanded, cx)
@@ -2784,7 +2727,7 @@ impl OutlinePanel {
             let mut new_collapsed_entries = HashSet::default();
             let mut new_unfolded_dirs = HashMap::default();
             let mut root_entries = HashSet::default();
-            let mut new_excerpts = HashMap::<BufferId, HashMap<ExcerptId, Excerpt>>::default();
+            let mut new_buffers = HashMap::<BufferId, BufferOutlines>::default();
             let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| {
                 let git_store = outline_panel.project.read(cx).git_store().clone();
                 new_collapsed_entries = outline_panel.collapsed_entries.clone();
@@ -2793,13 +2736,18 @@ impl OutlinePanel {
 
                 multi_buffer_snapshot.excerpts().fold(
                     HashMap::default(),
-                    |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| {
+                    |mut buffer_excerpts, excerpt_range| {
+                        let Some(buffer_snapshot) = multi_buffer_snapshot
+                            .buffer_for_id(excerpt_range.context.start.buffer_id)
+                        else {
+                            return buffer_excerpts;
+                        };
                         let buffer_id = buffer_snapshot.remote_id();
                         let file = File::from_dyn(buffer_snapshot.file());
                         let entry_id = file.and_then(|file| file.project_entry_id());
                         let worktree = file.map(|file| file.worktree.read(cx).snapshot());
-                        let is_new = new_entries.contains(&excerpt_id)
-                            || !outline_panel.excerpts.contains_key(&buffer_id);
+                        let is_new = new_entries.contains(&buffer_id)
+                            || !outline_panel.buffers.contains_key(&buffer_id);
                         let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx);
                         let status = git_store
                             .read(cx)
@@ -2813,29 +2761,28 @@ impl OutlinePanel {
                                 (is_new, is_folded, Vec::new(), entry_id, worktree, status)
                             })
                             .2
-                            .push(excerpt_id);
+                            .push(excerpt_range.clone());
 
-                        let outlines = match outline_panel
-                            .excerpts
-                            .get(&buffer_id)
-                            .and_then(|excerpts| excerpts.get(&excerpt_id))
-                        {
-                            Some(old_excerpt) => match &old_excerpt.outlines {
-                                ExcerptOutlines::Outlines(outlines) => {
-                                    ExcerptOutlines::Outlines(outlines.clone())
+                        new_buffers
+                            .entry(buffer_id)
+                            .or_insert_with(|| {
+                                let outlines = match outline_panel.buffers.get(&buffer_id) {
+                                    Some(old_buffer) => match &old_buffer.outlines {
+                                        OutlineState::Outlines(outlines) => {
+                                            OutlineState::Outlines(outlines.clone())
+                                        }
+                                        OutlineState::Invalidated(_) => OutlineState::NotFetched,
+                                        OutlineState::NotFetched => OutlineState::NotFetched,
+                                    },
+                                    None => OutlineState::NotFetched,
+                                };
+                                BufferOutlines {
+                                    outlines,
+                                    excerpts: Vec::new(),
                                 }
-                                ExcerptOutlines::Invalidated(_) => ExcerptOutlines::NotFetched,
-                                ExcerptOutlines::NotFetched => ExcerptOutlines::NotFetched,
-                            },
-                            None => ExcerptOutlines::NotFetched,
-                        };
-                        new_excerpts.entry(buffer_id).or_default().insert(
-                            excerpt_id,
-                            Excerpt {
-                                range: excerpt_range,
-                                outlines,
-                            },
-                        );
+                            })
+                            .excerpts
+                            .push(excerpt_range);
                         buffer_excerpts
                     },
                 )
@@ -2856,7 +2803,7 @@ impl OutlinePanel {
                         BTreeMap::<WorktreeId, HashMap<ProjectEntryId, GitEntry>>::default();
                     let mut worktree_excerpts = HashMap::<
                         WorktreeId,
-                        HashMap<ProjectEntryId, (BufferId, Vec<ExcerptId>)>,
+                        HashMap<ProjectEntryId, (BufferId, Vec<ExcerptRange<Anchor>>)>,
                     >::default();
                     let mut external_excerpts = HashMap::default();
 
@@ -3134,7 +3081,7 @@ impl OutlinePanel {
             outline_panel
                 .update_in(cx, |outline_panel, window, cx| {
                     outline_panel.new_entries_for_fs_update.clear();
-                    outline_panel.excerpts = new_excerpts;
+                    outline_panel.buffers = new_buffers;
                     outline_panel.collapsed_entries = new_collapsed_entries;
                     outline_panel.unfolded_dirs = new_unfolded_dirs;
                     outline_panel.fs_entries = new_fs_entries;
@@ -3144,7 +3091,7 @@ impl OutlinePanel {
 
                     // Only update cached entries if we don't have outlines to fetch
                     // If we do have outlines to fetch, let fetch_outdated_outlines handle the update
-                    if outline_panel.excerpt_fetch_ranges(cx).is_empty() {
+                    if outline_panel.buffers_to_fetch().is_empty() {
                         outline_panel.update_cached_entries(debounce, window, cx);
                     }
 
@@ -3192,8 +3139,15 @@ impl OutlinePanel {
             item_handle: new_active_item.downgrade_item(),
             active_editor: new_active_editor.downgrade(),
         });
-        self.new_entries_for_fs_update
-            .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids());
+        self.new_entries_for_fs_update.extend(
+            new_active_editor
+                .read(cx)
+                .buffer()
+                .read(cx)
+                .snapshot(cx)
+                .excerpts()
+                .map(|excerpt| excerpt.context.start.buffer_id),
+        );
         self.selected_entry.invalidate();
         self.update_fs_entries(new_active_editor, None, window, cx);
     }
@@ -3211,7 +3165,7 @@ impl OutlinePanel {
         self.fs_entries.clear();
         self.fs_entries_depth.clear();
         self.fs_children_count.clear();
-        self.excerpts.clear();
+        self.buffers.clear();
         self.cached_entries = Vec::new();
         self.selected_entry = SelectedEntry::None;
         self.pinned = false;
@@ -3225,23 +3179,14 @@ impl OutlinePanel {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) -> Option<PanelEntry> {
-        let selection = editor.update(cx, |editor, cx| {
-            editor
-                .selections
-                .newest::<language::Point>(&editor.display_snapshot(cx))
-                .head()
-        });
         let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
         let multi_buffer = editor.read(cx).buffer();
         let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
-        let (excerpt_id, buffer, _) = editor
-            .read(cx)
-            .buffer()
-            .read(cx)
-            .excerpt_containing(selection, cx)?;
-        let buffer_id = buffer.read(cx).remote_id();
+        let anchor = editor.update(cx, |editor, _| editor.selections.newest_anchor().head());
+        let selection_display_point = anchor.to_display_point(&editor_snapshot);
+        let (anchor, _) = multi_buffer_snapshot.anchor_to_buffer_anchor(anchor)?;
 
-        if editor.read(cx).is_buffer_folded(buffer_id, cx) {
+        if editor.read(cx).is_buffer_folded(anchor.buffer_id, cx) {
             return self
                 .fs_entries
                 .iter()
@@ -3254,14 +3199,12 @@ impl OutlinePanel {
                     | FsEntry::ExternalFile(FsEntryExternalFile {
                         buffer_id: other_buffer_id,
                         ..
-                    }) => buffer_id == *other_buffer_id,
+                    }) => anchor.buffer_id == *other_buffer_id,
                 })
                 .cloned()
                 .map(PanelEntry::Fs);
         }
 
-        let selection_display_point = selection.to_display_point(&editor_snapshot);
-
         match &self.mode {
             ItemsDisplayMode::Search(search_state) => search_state
                 .matches
@@ -3298,32 +3241,31 @@ impl OutlinePanel {
                     })
                 }),
             ItemsDisplayMode::Outline => self.outline_location(
-                buffer_id,
-                excerpt_id,
+                anchor,
                 multi_buffer_snapshot,
                 editor_snapshot,
                 selection_display_point,
+                cx,
             ),
         }
     }
 
     fn outline_location(
         &self,
-        buffer_id: BufferId,
-        excerpt_id: ExcerptId,
+        selection_anchor: Anchor,
         multi_buffer_snapshot: editor::MultiBufferSnapshot,
         editor_snapshot: editor::EditorSnapshot,
         selection_display_point: DisplayPoint,
+        cx: &App,
     ) -> Option<PanelEntry> {
         let excerpt_outlines = self
-            .excerpts
-            .get(&buffer_id)
-            .and_then(|excerpts| excerpts.get(&excerpt_id))
+            .buffers
+            .get(&selection_anchor.buffer_id)
             .into_iter()
-            .flat_map(|excerpt| excerpt.iter_outlines())
+            .flat_map(|buffer| buffer.iter_outlines())
             .flat_map(|outline| {
                 let range = multi_buffer_snapshot
-                    .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?;
+                    .buffer_anchor_range_to_anchor_range(outline.range.clone())?;
                 Some((
                     range.start.to_display_point(&editor_snapshot)
                         ..range.end.to_display_point(&editor_snapshot),
@@ -3411,16 +3353,16 @@ impl OutlinePanel {
             .cloned();
 
         let closest_container = match outline_item {
-            Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline {
-                buffer_id,
-                excerpt_id,
-                outline,
-            })),
+            Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(outline)),
             None => {
                 self.cached_entries.iter().rev().find_map(|cached_entry| {
                     match &cached_entry.entry {
                         PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
-                            if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id {
+                            if excerpt.context.start.buffer_id == selection_anchor.buffer_id
+                                && let Some(buffer_snapshot) =
+                                    self.buffer_snapshot_for_id(excerpt.context.start.buffer_id, cx)
+                                && excerpt.contains(&selection_anchor, &buffer_snapshot)
+                            {
                                 Some(cached_entry.entry.clone())
                             } else {
                                 None
@@ -3430,6 +3372,7 @@ impl OutlinePanel {
                             FsEntry::ExternalFile(FsEntryExternalFile {
                                 buffer_id: file_buffer_id,
                                 excerpts: file_excerpts,
+                                ..
                             })
                             | FsEntry::File(FsEntryFile {
                                 buffer_id: file_buffer_id,
@@ -3437,7 +3380,13 @@ impl OutlinePanel {
                                 ..
                             }),
                         ) => {
-                            if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) {
+                            if *file_buffer_id == selection_anchor.buffer_id
+                                && let Some(buffer_snapshot) =
+                                    self.buffer_snapshot_for_id(*file_buffer_id, cx)
+                                && file_excerpts.iter().any(|excerpt| {
+                                    excerpt.contains(&selection_anchor, &buffer_snapshot)
+                                })
+                            {
                                 Some(cached_entry.entry.clone())
                             } else {
                                 None
@@ -3452,18 +3401,17 @@ impl OutlinePanel {
     }
 
     fn fetch_outdated_outlines(&mut self, window: &mut Window, cx: &mut Context<Self>) {
-        let excerpt_fetch_ranges = self.excerpt_fetch_ranges(cx);
-        if excerpt_fetch_ranges.is_empty() {
+        let buffers_to_fetch = self.buffers_to_fetch();
+        if buffers_to_fetch.is_empty() {
             return;
         }
 
         let first_update = Arc::new(AtomicBool::new(true));
-        for (buffer_id, (_buffer_snapshot, excerpt_ranges)) in excerpt_fetch_ranges {
+        for buffer_id in buffers_to_fetch {
             let outline_task = self.active_editor().map(|editor| {
                 editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx))
             });
 
-            let excerpt_ids = excerpt_ranges.keys().copied().collect::<Vec<_>>();
             let first_update = first_update.clone();
 
             self.outline_fetch_tasks.insert(
@@ -3498,40 +3446,26 @@ impl OutlinePanel {
                                     Some(UPDATE_DEBOUNCE)
                                 };
 
-                            for excerpt_id in &excerpt_ids {
-                                if let Some(excerpt) = outline_panel
-                                    .excerpts
-                                    .entry(buffer_id)
-                                    .or_default()
-                                    .get_mut(excerpt_id)
+                            if let Some(buffer) = outline_panel.buffers.get_mut(&buffer_id) {
+                                buffer.outlines = OutlineState::Outlines(fetched_outlines.clone());
+
+                                if let Some(default_depth) = pending_default_depth
+                                    && let OutlineState::Outlines(outlines) = &buffer.outlines
                                 {
-                                    excerpt.outlines =
-                                        ExcerptOutlines::Outlines(fetched_outlines.clone());
-
-                                    if let Some(default_depth) = pending_default_depth
-                                        && let ExcerptOutlines::Outlines(outlines) =
-                                            &excerpt.outlines
-                                    {
-                                        outlines
-                                            .iter()
-                                            .filter(|outline| {
-                                                (default_depth == 0
-                                                    || outline.depth >= default_depth)
-                                                    && outlines_with_children.contains(&(
-                                                        outline.range.clone(),
-                                                        outline.depth,
-                                                    ))
-                                            })
-                                            .for_each(|outline| {
-                                                outline_panel.collapsed_entries.insert(
-                                                    CollapsedEntry::Outline(
-                                                        buffer_id,
-                                                        *excerpt_id,
-                                                        outline.range.clone(),
-                                                    ),
-                                                );
-                                            });
-                                    }
+                                    outlines
+                                        .iter()
+                                        .filter(|outline| {
+                                            (default_depth == 0 || outline.depth >= default_depth)
+                                                && outlines_with_children.contains(&(
+                                                    outline.range.clone(),
+                                                    outline.depth,
+                                                ))
+                                        })
+                                        .for_each(|outline| {
+                                            outline_panel.collapsed_entries.insert(
+                                                CollapsedEntry::Outline(outline.range.clone()),
+                                            );
+                                        });
                                 }
                             }
 

crates/project/src/git_store.rs πŸ”—

@@ -4706,12 +4706,11 @@ impl Repository {
                                 .commit_oid_to_index
                                 .insert(commit_data.sha, graph_data.commit_data.len());
                             graph_data.commit_data.push(commit_data);
-
-                            cx.emit(RepositoryEvent::GraphEvent(
-                                graph_data_key.clone(),
-                                GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
-                            ));
                         }
+                        cx.emit(RepositoryEvent::GraphEvent(
+                            graph_data_key.clone(),
+                            GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
+                        ));
                     });
 
                 match &graph_data {

crates/project/src/lsp_store/semantic_tokens.rs πŸ”—

@@ -585,7 +585,7 @@ async fn raw_to_buffer_semantic_tokens(
                     }
 
                     Some(BufferSemanticToken {
-                        range: buffer_snapshot.anchor_range_around(start..end),
+                        range: buffer_snapshot.anchor_range_inside(start..end),
                         token_type: token.token_type,
                         token_modifiers: token.token_modifiers,
                     })

crates/project/src/project.rs πŸ”—

@@ -1032,6 +1032,8 @@ impl DirectoryLister {
     }
 }
 
+pub const CURRENT_PROJECT_FEATURES: &[&str] = &["new-style-anchors"];
+
 #[cfg(feature = "test-support")]
 pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext {
     trigger_kind: lsp::CompletionTriggerKind::INVOKED,
@@ -1644,6 +1646,10 @@ impl Project {
                 project_id: remote_id,
                 committer_email: committer.email,
                 committer_name: committer.name,
+                features: CURRENT_PROJECT_FEATURES
+                    .iter()
+                    .map(|s| s.to_string())
+                    .collect(),
             })
             .await?;
         Self::from_join_project_response(

crates/project/tests/integration/project_tests.rs πŸ”—

@@ -1771,7 +1771,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
             DiagnosticSet::from_sorted_entries(
                 vec![DiagnosticEntry {
                     diagnostic: Default::default(),
-                    range: Anchor::MIN..Anchor::MAX,
+                    range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
                 }],
                 &buffer.snapshot(),
             ),
@@ -8525,9 +8525,10 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
     unstaged_diff.update(cx, |unstaged_diff, cx| {
         let snapshot = buffer.read(cx).snapshot();
         assert_hunks(
-            unstaged_diff
-                .snapshot(cx)
-                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+            unstaged_diff.snapshot(cx).hunks_intersecting_range(
+                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                &snapshot,
+            ),
             &snapshot,
             &unstaged_diff.base_text(cx).text(),
             &[(
@@ -8616,8 +8617,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
     diff_1.update(cx, |diff, cx| {
         let snapshot = buffer_1.read(cx).snapshot();
         assert_hunks(
-            diff.snapshot(cx)
-                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+            diff.snapshot(cx).hunks_intersecting_range(
+                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                &snapshot,
+            ),
             &snapshot,
             &diff.base_text_string(cx).unwrap(),
             &[
@@ -8658,8 +8661,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
     diff_1.update(cx, |diff, cx| {
         let snapshot = buffer_1.read(cx).snapshot();
         assert_hunks(
-            diff.snapshot(cx)
-                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+            diff.snapshot(cx).hunks_intersecting_range(
+                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                &snapshot,
+            ),
             &snapshot,
             &diff.base_text(cx).text(),
             &[(
@@ -8688,8 +8693,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
     diff_2.update(cx, |diff, cx| {
         let snapshot = buffer_2.read(cx).snapshot();
         assert_hunks(
-            diff.snapshot(cx)
-                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+            diff.snapshot(cx).hunks_intersecting_range(
+                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                &snapshot,
+            ),
             &snapshot,
             &diff.base_text_string(cx).unwrap(),
             &[(
@@ -8710,8 +8717,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
     diff_2.update(cx, |diff, cx| {
         let snapshot = buffer_2.read(cx).snapshot();
         assert_hunks(
-            diff.snapshot(cx)
-                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+            diff.snapshot(cx).hunks_intersecting_range(
+                Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+                &snapshot,
+            ),
             &snapshot,
             &diff.base_text_string(cx).unwrap(),
             &[(

crates/proto/proto/buffer.proto πŸ”—

@@ -212,10 +212,15 @@ message Selection {
 }
 
 message EditorAnchor {
-  uint64 excerpt_id = 1;
+  optional uint64 excerpt_id = 1;
   Anchor anchor = 2;
 }
 
+message PathKey {
+  optional uint64 sort_prefix = 1;
+  string path = 2;
+}
+
 enum CursorShape {
   CursorBar = 0;
   CursorBlock = 1;

crates/proto/proto/call.proto πŸ”—

@@ -174,6 +174,7 @@ message ShareProject {
   reserved 3;
   bool is_ssh_project = 4;
   optional bool windows_paths = 5;
+  repeated string features = 6;
 }
 
 message ShareProjectResponse {
@@ -193,6 +194,7 @@ message JoinProject {
   uint64 project_id = 1;
   optional string committer_email = 2;
   optional string committer_name = 3;
+  repeated string features = 4;
 }
 
 message JoinProjectResponse {
@@ -204,6 +206,7 @@ message JoinProjectResponse {
   repeated string language_server_capabilities = 8;
   ChannelRole role = 6;
   bool windows_paths = 9;
+  repeated string features = 10;
   reserved 7;
 }
 
@@ -359,6 +362,8 @@ message UpdateView {
     reserved 7;
     double scroll_x = 8;
     double scroll_y = 9;
+    repeated PathExcerpts updated_paths = 10;
+    repeated uint64 deleted_buffers = 11;
   }
 }
 
@@ -385,6 +390,7 @@ message View {
     reserved 8;
     double scroll_x = 9;
     double scroll_y = 10;
+    repeated PathExcerpts path_excerpts = 11;
   }
 
   message ChannelView {
@@ -407,6 +413,19 @@ message Excerpt {
   Anchor primary_end = 6;
 }
 
+message ExcerptRange {
+  Anchor context_start = 1;
+  Anchor context_end = 2;
+  Anchor primary_start = 3;
+  Anchor primary_end = 4;
+}
+
+message PathExcerpts {
+  PathKey path_key = 1;
+  uint64 buffer_id = 2;
+  repeated ExcerptRange ranges = 3;
+}
+
 message Contact {
   uint64 user_id = 1;
   bool online = 2;

crates/recent_projects/src/recent_projects.rs πŸ”—

@@ -2068,9 +2068,16 @@ mod tests {
             )
             .await;
 
+        // Open a file path (not a directory) so that the worktree root is a
+        // file. This means `active_project_directory` returns `None`, which
+        // causes `DevContainerContext::from_workspace` to return `None`,
+        // preventing `open_dev_container` from spawning real I/O (docker
+        // commands, shell environment loading) that is incompatible with the
+        // test scheduler. The modal is still created and the re-entrancy
+        // guard that this test validates is still exercised.
         cx.update(|cx| {
             open_paths(
-                &[PathBuf::from(path!("/project"))],
+                &[PathBuf::from(path!("/project/src/main.rs"))],
                 app_state,
                 workspace::OpenOptions::default(),
                 cx,

crates/recent_projects/src/remote_servers.rs πŸ”—

@@ -11,6 +11,7 @@ use dev_container::{
 };
 use editor::Editor;
 
+use extension_host::ExtensionStore;
 use futures::{FutureExt, channel::oneshot, future::Shared};
 use gpui::{
     Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity,
@@ -41,6 +42,7 @@ use std::{
         atomic::{self, AtomicUsize},
     },
 };
+
 use ui::{
     CommonAnimationExt, IconButtonShape, KeyBinding, List, ListItem, ListSeparator, Modal,
     ModalFooter, ModalHeader, Navigable, NavigableEntry, Section, Tooltip, WithScrollbar,
@@ -1854,10 +1856,13 @@ impl RemoteServerProjects {
     ) {
         let replace_window = window.window_handle().downcast::<MultiWorkspace>();
         let app_state = Arc::downgrade(&app_state);
+
         cx.spawn_in(window, async move |entity, cx| {
-            let (connection, starting_dir) =
-                match start_dev_container_with_config(context, config).await {
-                    Ok((c, s)) => (Connection::DevContainer(c), s),
+            let environment = context.environment(cx).await;
+
+            let (dev_container_connection, starting_dir) =
+                match start_dev_container_with_config(context, config, environment).await {
+                    Ok((c, s)) => (c, s),
                     Err(e) => {
                         log::error!("Failed to start dev container: {:?}", e);
                         cx.prompt(
@@ -1881,6 +1886,16 @@ impl RemoteServerProjects {
                         return;
                     }
                 };
+            cx.update(|_, cx| {
+                ExtensionStore::global(cx).update(cx, |this, cx| {
+                    for extension in &dev_container_connection.extension_ids {
+                        log::info!("Installing extension {extension} from devcontainer");
+                        this.install_latest_extension(Arc::from(extension.clone()), cx);
+                    }
+                })
+            })
+            .log_err();
+
             entity
                 .update(cx, |_, cx| {
                     cx.emit(DismissEvent);
@@ -1891,7 +1906,7 @@ impl RemoteServerProjects {
                 return;
             };
             let result = open_remote_project(
-                connection.into(),
+                Connection::DevContainer(dev_container_connection).into(),
                 vec![starting_dir].into_iter().map(PathBuf::from).collect(),
                 app_state,
                 OpenOptions {

crates/remote/src/transport/docker.rs πŸ”—

@@ -6,6 +6,7 @@ use collections::HashMap;
 use parking_lot::Mutex;
 use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
 use semver::Version as SemanticVersion;
+use std::collections::BTreeMap;
 use std::time::Instant;
 use std::{
     path::{Path, PathBuf},
@@ -36,6 +37,7 @@ pub struct DockerConnectionOptions {
     pub remote_user: String,
     pub upload_binary_over_docker_exec: bool,
     pub use_podman: bool,
+    pub remote_env: BTreeMap<String, String>,
 }
 
 pub(crate) struct DockerExecConnection {
@@ -499,10 +501,14 @@ impl DockerExecConnection {
         args.push("-u".to_string());
         args.push(self.connection_options.remote_user.clone());
 
+        for (k, v) in self.connection_options.remote_env.iter() {
+            args.push("-e".to_string());
+            args.push(format!("{k}={v}"));
+        }
+
         for (k, v) in env.iter() {
             args.push("-e".to_string());
-            let env_declaration = format!("{}={}", k, v);
-            args.push(env_declaration);
+            args.push(format!("{k}={v}"));
         }
 
         args.push(self.connection_options.container_id.clone());
@@ -632,6 +638,11 @@ impl RemoteConnection for DockerExecConnection {
         };
 
         let mut docker_args = vec!["exec".to_string()];
+
+        for (k, v) in self.connection_options.remote_env.iter() {
+            docker_args.push("-e".to_string());
+            docker_args.push(format!("{k}={v}"));
+        }
         for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] {
             if let Some(value) = std::env::var(env_var).ok() {
                 docker_args.push("-e".to_string());
@@ -768,9 +779,14 @@ impl RemoteConnection for DockerExecConnection {
             docker_args.push(parsed_working_dir);
         }
 
+        for (k, v) in self.connection_options.remote_env.iter() {
+            docker_args.push("-e".to_string());
+            docker_args.push(format!("{k}={v}"));
+        }
+
         for (k, v) in env.iter() {
             docker_args.push("-e".to_string());
-            docker_args.push(format!("{}={}", k, v));
+            docker_args.push(format!("{k}={v}"));
         }
 
         match interactive {

crates/search/src/buffer_search.rs πŸ”—

@@ -3550,7 +3550,16 @@ mod tests {
 
         // Manually unfold one buffer (simulating a chevron click)
         let first_buffer_id = editor.read_with(cx, |editor, cx| {
-            editor.buffer().read(cx).excerpt_buffer_ids()[0]
+            editor
+                .buffer()
+                .read(cx)
+                .snapshot(cx)
+                .excerpts()
+                .nth(0)
+                .unwrap()
+                .context
+                .start
+                .buffer_id
         });
         editor.update_in(cx, |editor, _window, cx| {
             editor.unfold_buffer(first_buffer_id, cx);
@@ -3564,7 +3573,16 @@ mod tests {
 
         // Manually unfold the second buffer too
         let second_buffer_id = editor.read_with(cx, |editor, cx| {
-            editor.buffer().read(cx).excerpt_buffer_ids()[1]
+            editor
+                .buffer()
+                .read(cx)
+                .snapshot(cx)
+                .excerpts()
+                .nth(1)
+                .unwrap()
+                .context
+                .start
+                .buffer_id
         });
         editor.update_in(cx, |editor, _window, cx| {
             editor.unfold_buffer(second_buffer_id, cx);

crates/search/src/project_search.rs πŸ”—

@@ -11,8 +11,8 @@ use crate::{
 use anyhow::Context as _;
 use collections::HashMap;
 use editor::{
-    Anchor, Editor, EditorEvent, EditorSettings, ExcerptId, MAX_TAB_TITLE_LEN, MultiBuffer,
-    PathKey, SelectionEffects,
+    Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey,
+    SelectionEffects,
     actions::{Backtab, FoldAll, SelectAll, Tab, UnfoldAll},
     items::active_match_index,
     multibuffer_context_lines,
@@ -342,41 +342,32 @@ impl ProjectSearch {
     }
 
     fn remove_deleted_buffers(&mut self, cx: &mut Context<Self>) {
-        let (deleted_paths, removed_excerpt_ids) = {
-            let excerpts = self.excerpts.read(cx);
-            let deleted_paths: Vec<PathKey> = excerpts
-                .paths()
-                .filter(|path| {
-                    excerpts.buffer_for_path(path, cx).is_some_and(|buffer| {
-                        buffer
-                            .read(cx)
-                            .file()
-                            .is_some_and(|file| file.disk_state().is_deleted())
-                    })
-                })
-                .cloned()
-                .collect();
-
-            let removed_excerpt_ids: collections::HashSet<ExcerptId> = deleted_paths
-                .iter()
-                .flat_map(|path| excerpts.excerpts_for_path(path))
-                .collect();
-
-            (deleted_paths, removed_excerpt_ids)
-        };
+        let deleted_buffer_ids = self
+            .excerpts
+            .read(cx)
+            .all_buffers_iter()
+            .filter(|buffer| {
+                buffer
+                    .read(cx)
+                    .file()
+                    .is_some_and(|file| file.disk_state().is_deleted())
+            })
+            .map(|buffer| buffer.read(cx).remote_id())
+            .collect::<Vec<_>>();
 
-        if deleted_paths.is_empty() {
+        if deleted_buffer_ids.is_empty() {
             return;
         }
 
-        self.excerpts.update(cx, |excerpts, cx| {
-            for path in deleted_paths {
-                excerpts.remove_excerpts_for_path(path, cx);
+        let snapshot = self.excerpts.update(cx, |excerpts, cx| {
+            for buffer_id in deleted_buffer_ids {
+                excerpts.remove_excerpts_for_buffer(buffer_id, cx);
             }
+            excerpts.snapshot(cx)
         });
 
         self.match_ranges
-            .retain(|range| !removed_excerpt_ids.contains(&range.start.excerpt_id));
+            .retain(|range| snapshot.anchor_to_buffer_anchor(range.start).is_some());
 
         cx.notify();
     }
@@ -2990,7 +2981,13 @@ pub mod tests {
                     .read(cx)
                     .buffer()
                     .read(cx)
-                    .excerpt_buffer_ids()[0]
+                    .snapshot(cx)
+                    .excerpts()
+                    .next()
+                    .unwrap()
+                    .context
+                    .start
+                    .buffer_id
             })
             .expect("should read buffer ids");
 

crates/settings_content/src/agent.rs πŸ”—

@@ -81,11 +81,14 @@ pub enum SidebarSide {
 )]
 #[serde(rename_all = "snake_case")]
 pub enum ThinkingBlockDisplay {
+    /// Thinking blocks fully expand during streaming, then auto-collapse
+    /// when the model finishes thinking. Users can re-expand after collapse.
+    #[default]
+    Auto,
     /// Thinking blocks auto-expand with a height constraint during streaming,
     /// then remain in their constrained state when complete. Users can click
     /// to fully expand or collapse.
-    #[default]
-    Automatic,
+    Preview,
     /// Thinking blocks are always fully expanded by default (no height constraint).
     AlwaysExpanded,
     /// Thinking blocks are always collapsed by default.
@@ -156,10 +159,10 @@ pub struct AgentSettingsContent {
     ///
     /// Default: "primary_screen"
     pub notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
-    /// Whether to play a sound when the agent has either completed its response, or needs user input.
+    /// When to play a sound when the agent has either completed its response, or needs user input.
     ///
-    /// Default: false
-    pub play_sound_when_agent_done: Option<bool>,
+    /// Default: never
+    pub play_sound_when_agent_done: Option<PlaySoundWhenAgentDone>,
     /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
     ///
     /// Default: true
@@ -344,6 +347,37 @@ pub enum NotifyWhenAgentWaiting {
     Never,
 }
 
+#[derive(
+    Copy,
+    Clone,
+    Default,
+    Debug,
+    Serialize,
+    Deserialize,
+    JsonSchema,
+    MergeFrom,
+    PartialEq,
+    strum::VariantArray,
+    strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum PlaySoundWhenAgentDone {
+    #[default]
+    Never,
+    WhenHidden,
+    Always,
+}
+
+impl PlaySoundWhenAgentDone {
+    pub fn should_play(&self, visible: bool) -> bool {
+        match self {
+            PlaySoundWhenAgentDone::Never => false,
+            PlaySoundWhenAgentDone::WhenHidden => !visible,
+            PlaySoundWhenAgentDone::Always => true,
+        }
+    }
+}
+
 #[with_fallible_options]
 #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
 pub struct LanguageModelSelection {

crates/settings_content/src/settings_content.rs πŸ”—

@@ -65,7 +65,8 @@ macro_rules! settings_overrides {
         }
     }
 }
-use std::collections::BTreeSet;
+use std::collections::{BTreeMap, BTreeSet};
+use std::hash::Hash;
 use std::sync::Arc;
 pub use util::serde::default_true;
 
@@ -1023,6 +1024,8 @@ pub struct DevContainerConnection {
     pub remote_user: String,
     pub container_id: String,
     pub use_podman: bool,
+    pub extension_ids: Vec<String>,
+    pub remote_env: BTreeMap<String, String>,
 }
 
 #[with_fallible_options]

crates/settings_ui/src/page_data.rs πŸ”—

@@ -7278,7 +7278,7 @@ fn ai_page(cx: &App) -> SettingsPage {
             }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Play Sound When Agent Done",
-                description: "Whether to play a sound when the agent has either completed its response, or needs user input.",
+                description: "When to play a sound when the agent has either completed its response, or needs user input.",
                 field: Box::new(SettingField {
                     json_path: Some("agent.play_sound_when_agent_done"),
                     pick: |settings_content| {
@@ -7340,7 +7340,7 @@ fn ai_page(cx: &App) -> SettingsPage {
             }),
             SettingsPageItem::SettingItem(SettingItem {
                 title: "Thinking Display",
-                description: "How thinking blocks should be displayed by default. 'Automatic' auto-expands with a height constraint during streaming. 'Always Expanded' shows full content. 'Always Collapsed' keeps them collapsed.",
+                description: "How thinking blocks should be displayed by default. 'Auto' fully expands during streaming, then auto-collapses when done. 'Preview' auto-expands with a height constraint during streaming. 'Always Expanded' shows full content. 'Always Collapsed' keeps them collapsed.",
                 field: Box::new(SettingField {
                     json_path: Some("agent.thinking_display"),
                     pick: |settings_content| {

crates/settings_ui/src/settings_ui.rs πŸ”—

@@ -523,6 +523,7 @@ fn init_renderers(cx: &mut App) {
         .add_basic_renderer::<settings::VimInsertModeCursorShape>(render_dropdown)
         .add_basic_renderer::<settings::SteppingGranularity>(render_dropdown)
         .add_basic_renderer::<settings::NotifyWhenAgentWaiting>(render_dropdown)
+        .add_basic_renderer::<settings::PlaySoundWhenAgentDone>(render_dropdown)
         .add_basic_renderer::<settings::NewThreadLocation>(render_dropdown)
         .add_basic_renderer::<settings::ThinkingBlockDisplay>(render_dropdown)
         .add_basic_renderer::<settings::ImageFileSizeUnit>(render_dropdown)

crates/sidebar/src/sidebar.rs πŸ”—

@@ -4504,6 +4504,7 @@ impl Sidebar {
 
         let archive_view = cx.new(|cx| {
             ThreadsArchiveView::new(
+                active_workspace.downgrade(),
                 agent_connection_store.clone(),
                 agent_server_store.clone(),
                 window,

crates/tasks_ui/src/tasks_ui.rs πŸ”—

@@ -321,13 +321,11 @@ pub fn task_contexts(
         })
         .unwrap_or_default();
 
-    let latest_selection = active_editor.as_ref().map(|active_editor| {
-        active_editor
-            .read(cx)
-            .selections
-            .newest_anchor()
-            .head()
-            .text_anchor
+    let latest_selection = active_editor.as_ref().and_then(|active_editor| {
+        let snapshot = active_editor.read(cx).buffer().read(cx).snapshot(cx);
+        snapshot
+            .anchor_to_buffer_anchor(active_editor.read(cx).selections.newest_anchor().head())
+            .map(|(anchor, _)| anchor)
     });
 
     let mut worktree_abs_paths = workspace

crates/text/src/anchor.rs πŸ”—

@@ -24,7 +24,7 @@ pub struct Anchor {
     /// Whether this anchor stays attached to the character *before* or *after*
     /// the offset.
     pub bias: Bias,
-    pub buffer_id: Option<BufferId>,
+    pub buffer_id: BufferId,
 }
 
 impl Debug for Anchor {
@@ -46,28 +46,7 @@ impl Debug for Anchor {
 }
 
 impl Anchor {
-    pub const MIN: Self = Self {
-        timestamp_replica_id: clock::Lamport::MIN.replica_id,
-        timestamp_value: clock::Lamport::MIN.value,
-        offset: u32::MIN,
-        bias: Bias::Left,
-        buffer_id: None,
-    };
-
-    pub const MAX: Self = Self {
-        timestamp_replica_id: clock::Lamport::MAX.replica_id,
-        timestamp_value: clock::Lamport::MAX.value,
-        offset: u32::MAX,
-        bias: Bias::Right,
-        buffer_id: None,
-    };
-
-    pub fn new(
-        timestamp: clock::Lamport,
-        offset: u32,
-        bias: Bias,
-        buffer_id: Option<BufferId>,
-    ) -> Self {
+    pub fn new(timestamp: clock::Lamport, offset: u32, bias: Bias, buffer_id: BufferId) -> Self {
         Self {
             timestamp_replica_id: timestamp.replica_id,
             timestamp_value: timestamp.value,
@@ -83,7 +62,7 @@ impl Anchor {
             timestamp_value: clock::Lamport::MIN.value,
             offset: u32::MIN,
             bias: Bias::Left,
-            buffer_id: Some(buffer_id),
+            buffer_id,
         }
     }
 
@@ -93,7 +72,7 @@ impl Anchor {
             timestamp_value: clock::Lamport::MAX.value,
             offset: u32::MAX,
             bias: Bias::Right,
-            buffer_id: Some(buffer_id),
+            buffer_id,
         }
     }
 
@@ -171,7 +150,7 @@ impl Anchor {
     pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool {
         if self.is_min() || self.is_max() {
             true
-        } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) {
+        } else if self.buffer_id != buffer.remote_id {
             false
         } else {
             let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else {
@@ -207,6 +186,18 @@ impl Anchor {
             value: self.timestamp_value,
         }
     }
+
+    pub fn opaque_id(&self) -> [u8; 20] {
+        let mut bytes = [0u8; 20];
+        let buffer_id: u64 = self.buffer_id.into();
+        bytes[0..8].copy_from_slice(&buffer_id.to_le_bytes());
+        bytes[8..12].copy_from_slice(&self.offset.to_le_bytes());
+        bytes[12..16].copy_from_slice(&self.timestamp_value.to_le_bytes());
+        let replica_id = self.timestamp_replica_id.as_u16();
+        bytes[16..18].copy_from_slice(&replica_id.to_le_bytes());
+        bytes[18] = self.bias as u8;
+        bytes
+    }
 }
 
 pub trait OffsetRangeExt {
@@ -237,6 +228,7 @@ where
 pub trait AnchorRangeExt {
     fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Ordering;
     fn overlaps(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> bool;
+    fn contains_anchor(&self, b: Anchor, buffer: &BufferSnapshot) -> bool;
 }
 
 impl AnchorRangeExt for Range<Anchor> {
@@ -250,4 +242,8 @@ impl AnchorRangeExt for Range<Anchor> {
     fn overlaps(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> bool {
         self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt()
     }
+
+    fn contains_anchor(&self, other: Anchor, buffer: &BufferSnapshot) -> bool {
+        self.start.cmp(&other, buffer).is_le() && self.end.cmp(&other, buffer).is_ge()
+    }
 }

crates/text/src/patch.rs πŸ”—

@@ -56,7 +56,10 @@ where
         if edit.is_empty() {
             return;
         }
+        self.push_maybe_empty(edit);
+    }
 
+    pub fn push_maybe_empty(&mut self, edit: Edit<T>) {
         if let Some(last) = self.0.last_mut() {
             if last.old.end >= edit.old.start {
                 last.old.end = edit.old.end;

crates/text/src/text.rs πŸ”—

@@ -2377,7 +2377,7 @@ impl BufferSnapshot {
     pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
     where
         D: 'a + TextDimension,
-        A: 'a + IntoIterator<Item = &'a Anchor>,
+        A: 'a + IntoIterator<Item = Anchor>,
     {
         let anchors = anchors.into_iter();
         self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
@@ -2390,7 +2390,7 @@ impl BufferSnapshot {
     ) -> impl 'a + Iterator<Item = (D, T)>
     where
         D: 'a + TextDimension,
-        A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
+        A: 'a + IntoIterator<Item = (Anchor, T)>,
     {
         let anchors = anchors.into_iter();
         let mut fragment_cursor = self
@@ -2406,7 +2406,7 @@ impl BufferSnapshot {
                 return (D::from_text_summary(&self.visible_text.summary()), payload);
             }
 
-            let Some(insertion) = self.try_find_fragment(anchor) else {
+            let Some(insertion) = self.try_find_fragment(&anchor) else {
                 panic!(
                     "invalid insertion for buffer {}@{:?} with anchor {:?}",
                     self.remote_id(),
@@ -2457,7 +2457,7 @@ impl BufferSnapshot {
         } else if anchor.is_max() {
             self.visible_text.len()
         } else {
-            debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
+            debug_assert_eq!(anchor.buffer_id, self.remote_id);
             debug_assert!(
                 self.version.observed(anchor.timestamp()),
                 "Anchor timestamp {:?} not observed by buffer {:?}",
@@ -2489,7 +2489,7 @@ impl BufferSnapshot {
 
     #[cold]
     fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
-        if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
+        if anchor.buffer_id != self.remote_id {
             panic!(
                 "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
                 self.remote_id, self.version
@@ -2553,12 +2553,12 @@ impl BufferSnapshot {
     }
 
     /// Returns an anchor range for the given input position range that is anchored to the text in the range.
-    pub fn anchor_range_around<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
+    pub fn anchor_range_inside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
         self.anchor_after(position.start)..self.anchor_before(position.end)
     }
 
     /// Returns an anchor range for the given input position range that is anchored to the text before and after.
-    pub fn anchor_range_between<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
+    pub fn anchor_range_outside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
         self.anchor_before(position.start)..self.anchor_after(position.end)
     }
 
@@ -2608,7 +2608,7 @@ impl BufferSnapshot {
                 fragment.timestamp,
                 fragment.insertion_offset + overshoot as u32,
                 bias,
-                Some(self.remote_id),
+                self.remote_id,
             )
         }
     }
@@ -2616,8 +2616,7 @@ impl BufferSnapshot {
     pub fn can_resolve(&self, anchor: &Anchor) -> bool {
         anchor.is_min()
             || anchor.is_max()
-            || (Some(self.remote_id) == anchor.buffer_id
-                && self.version.observed(anchor.timestamp()))
+            || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp()))
     }
 
     pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
@@ -2643,7 +2642,10 @@ impl BufferSnapshot {
     where
         D: TextDimension + Ord,
     {
-        self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+        self.edits_since_in_range(
+            since,
+            Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
+        )
     }
 
     pub fn anchored_edits_since<'a, D>(
@@ -2653,7 +2655,10 @@ impl BufferSnapshot {
     where
         D: TextDimension + Ord,
     {
-        self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+        self.anchored_edits_since_in_range(
+            since,
+            Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
+        )
     }
 
     pub fn edits_since_in_range<'a, D>(
@@ -2916,13 +2921,13 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
                 fragment.timestamp,
                 fragment.insertion_offset,
                 Bias::Right,
-                Some(self.buffer_id),
+                self.buffer_id,
             );
             let end_anchor = Anchor::new(
                 fragment.timestamp,
                 fragment.insertion_offset + fragment.len,
                 Bias::Left,
-                Some(self.buffer_id),
+                self.buffer_id,
             );
 
             if !fragment.was_visible(self.since, self.undos) && fragment.visible {

crates/toolchain_selector/src/active_toolchain.rs πŸ”—

@@ -117,7 +117,7 @@ impl ActiveToolchain {
         cx: &mut Context<Self>,
     ) {
         let editor = editor.read(cx);
-        if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+        if let Some(buffer) = editor.active_buffer(cx)
             && let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx))
         {
             let subscription = cx.subscribe_in(

crates/toolchain_selector/src/toolchain_selector.rs πŸ”—

@@ -584,11 +584,11 @@ impl ToolchainSelector {
         window: &mut Window,
         cx: &mut Context<Workspace>,
     ) -> Option<()> {
-        let (_, buffer, _) = workspace
+        let buffer = workspace
             .active_item(cx)?
             .act_as::<Editor>(cx)?
             .read(cx)
-            .active_excerpt(cx)?;
+            .active_buffer(cx)?;
         let project = workspace.project().clone();
 
         let language_name = buffer.read(cx).language()?.name();

crates/ui/src/components/data_table.rs πŸ”—

@@ -1,14 +1,15 @@
 use std::{ops::Range, rc::Rc};
 
 use gpui::{
-    AbsoluteLength, AppContext, Context, DefiniteLength, DragMoveEvent, Entity, EntityId,
-    FocusHandle, Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point,
-    Stateful, UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list,
+    AbsoluteLength, AppContext as _, DefiniteLength, DragMoveEvent, Entity, EntityId, FocusHandle,
+    Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, Stateful,
+    UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list,
 };
+use itertools::intersperse_with;
 
 use crate::{
     ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component,
-    ComponentScope, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator,
+    ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator,
     InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce,
     ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled,
     StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex,
@@ -16,20 +17,20 @@ use crate::{
     table_row::{IntoTableRow as _, TableRow},
     v_flex,
 };
-use itertools::intersperse_with;
 
 pub mod table_row;
 #[cfg(test)]
 mod tests;
 
 const RESIZE_COLUMN_WIDTH: f32 = 8.0;
+const RESIZE_DIVIDER_WIDTH: f32 = 1.0;
 
 /// Represents an unchecked table row, which is a vector of elements.
 /// Will be converted into `TableRow<T>` internally
 pub type UncheckedTableRow<T> = Vec<T>;
 
 #[derive(Debug)]
-struct DraggedColumn(usize);
+pub(crate) struct DraggedColumn(pub(crate) usize);
 
 struct UniformListData {
     render_list_of_rows_fn:
@@ -110,106 +111,103 @@ impl TableInteractionState {
             view.update(cx, |view, cx| f(view, e, window, cx)).ok();
         }
     }
+}
 
-    /// Renders invisible resize handles overlaid on top of table content.
-    ///
-    /// - Spacer: invisible element that matches the width of table column content
-    /// - Divider: contains the actual resize handle that users can drag to resize columns
-    ///
-    /// Structure: [spacer] [divider] [spacer] [divider] [spacer]
-    ///
-    /// Business logic:
-    /// 1. Creates spacers matching each column width
-    /// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns)
-    /// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize
-    /// 4. Returns an absolute-positioned overlay that sits on top of table content
-    fn render_resize_handles(
-        &self,
-        column_widths: &TableRow<Length>,
-        resizable_columns: &TableRow<TableResizeBehavior>,
-        initial_sizes: &TableRow<DefiniteLength>,
-        columns: Option<Entity<TableColumnWidths>>,
-        window: &mut Window,
-        cx: &mut App,
-    ) -> AnyElement {
-        let spacers = column_widths
-            .as_slice()
-            .iter()
-            .map(|width| base_cell_style(Some(*width)).into_any_element());
-
-        let mut column_ix = 0;
-        let resizable_columns_shared = Rc::new(resizable_columns.clone());
-        let initial_sizes_shared = Rc::new(initial_sizes.clone());
-        let mut resizable_columns_iter = resizable_columns.as_slice().iter();
-
-        // Insert dividers between spacers (column content)
-        let dividers = intersperse_with(spacers, || {
-            let resizable_columns = Rc::clone(&resizable_columns_shared);
-            let initial_sizes = Rc::clone(&initial_sizes_shared);
-            window.with_id(column_ix, |window| {
-                let mut resize_divider = div()
-                    // This is required because this is evaluated at a different time than the use_state call above
-                    .id(column_ix)
-                    .relative()
-                    .top_0()
-                    .w_px()
-                    .h_full()
-                    .bg(cx.theme().colors().border.opacity(0.8));
-
-                let mut resize_handle = div()
-                    .id("column-resize-handle")
-                    .absolute()
-                    .left_neg_0p5()
-                    .w(px(RESIZE_COLUMN_WIDTH))
-                    .h_full();
-
-                if resizable_columns_iter
-                    .next()
-                    .is_some_and(TableResizeBehavior::is_resizable)
-                {
-                    let hovered = window.use_state(cx, |_window, _cx| false);
-
-                    resize_divider = resize_divider.when(*hovered.read(cx), |div| {
-                        div.bg(cx.theme().colors().border_focused)
-                    });
-
-                    resize_handle = resize_handle
-                        .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered))
-                        .cursor_col_resize()
-                        .when_some(columns.clone(), |this, columns| {
-                            this.on_click(move |event, window, cx| {
-                                if event.click_count() >= 2 {
-                                    columns.update(cx, |columns, _| {
-                                        columns.on_double_click(
-                                            column_ix,
-                                            &initial_sizes,
-                                            &resizable_columns,
-                                            window,
-                                        );
-                                    })
-                                }
+/// Renders invisible resize handles overlaid on top of table content.
+///
+/// - Spacer: invisible element that matches the width of table column content
+/// - Divider: contains the actual resize handle that users can drag to resize columns
+///
+/// Structure: [spacer] [divider] [spacer] [divider] [spacer]
+///
+/// Business logic:
+/// 1. Creates spacers matching each column width
+/// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns)
+/// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize
+/// 4. Returns an absolute-positioned overlay that sits on top of table content
+fn render_resize_handles(
+    column_widths: &TableRow<Length>,
+    resizable_columns: &TableRow<TableResizeBehavior>,
+    initial_sizes: &TableRow<DefiniteLength>,
+    columns: Option<Entity<RedistributableColumnsState>>,
+    window: &mut Window,
+    cx: &mut App,
+) -> AnyElement {
+    let spacers = column_widths
+        .as_slice()
+        .iter()
+        .map(|width| base_cell_style(Some(*width)).into_any_element());
+
+    let mut column_ix = 0;
+    let resizable_columns_shared = Rc::new(resizable_columns.clone());
+    let initial_sizes_shared = Rc::new(initial_sizes.clone());
+    let mut resizable_columns_iter = resizable_columns.as_slice().iter();
+
+    let dividers = intersperse_with(spacers, || {
+        let resizable_columns = Rc::clone(&resizable_columns_shared);
+        let initial_sizes = Rc::clone(&initial_sizes_shared);
+        window.with_id(column_ix, |window| {
+            let mut resize_divider = div()
+                .id(column_ix)
+                .relative()
+                .top_0()
+                .w(px(RESIZE_DIVIDER_WIDTH))
+                .h_full()
+                .bg(cx.theme().colors().border.opacity(0.8));
+
+            let mut resize_handle = div()
+                .id("column-resize-handle")
+                .absolute()
+                .left_neg_0p5()
+                .w(px(RESIZE_COLUMN_WIDTH))
+                .h_full();
+
+            if resizable_columns_iter
+                .next()
+                .is_some_and(TableResizeBehavior::is_resizable)
+            {
+                let hovered = window.use_state(cx, |_window, _cx| false);
+
+                resize_divider = resize_divider.when(*hovered.read(cx), |div| {
+                    div.bg(cx.theme().colors().border_focused)
+                });
+
+                resize_handle = resize_handle
+                    .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered))
+                    .cursor_col_resize()
+                    .when_some(columns.clone(), |this, columns| {
+                        this.on_click(move |event, window, cx| {
+                            if event.click_count() >= 2 {
+                                columns.update(cx, |columns, _| {
+                                    columns.on_double_click(
+                                        column_ix,
+                                        &initial_sizes,
+                                        &resizable_columns,
+                                        window,
+                                    );
+                                })
+                            }
 
-                                cx.stop_propagation();
-                            })
+                            cx.stop_propagation();
                         })
-                        .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| {
-                            cx.new(|_cx| gpui::Empty)
-                        })
-                }
+                    })
+                    .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| {
+                        cx.new(|_cx| gpui::Empty)
+                    })
+            }
 
-                column_ix += 1;
-                resize_divider.child(resize_handle).into_any_element()
-            })
-        });
+            column_ix += 1;
+            resize_divider.child(resize_handle).into_any_element()
+        })
+    });
 
-        h_flex()
-            .id("resize-handles")
-            .absolute()
-            .inset_0()
-            .w_full()
-            .children(dividers)
-            .into_any_element()
-    }
+    h_flex()
+        .id("resize-handles")
+        .absolute()
+        .inset_0()
+        .w_full()
+        .children(dividers)
+        .into_any_element()
 }
 
 #[derive(Debug, Copy, Clone, PartialEq)]
@@ -233,25 +231,181 @@ impl TableResizeBehavior {
     }
 }
 
-pub struct TableColumnWidths {
-    widths: TableRow<DefiniteLength>,
-    visible_widths: TableRow<DefiniteLength>,
-    cached_bounds_width: Pixels,
-    initialized: bool,
+pub enum ColumnWidthConfig {
+    /// Static column widths (no resize handles).
+    Static {
+        widths: StaticColumnWidths,
+        /// Controls widths of the whole table.
+        table_width: Option<DefiniteLength>,
+    },
+    /// Redistributable columns β€” dragging redistributes the fixed available space
+    /// among columns without changing the overall table width.
+    Redistributable {
+        columns_state: Entity<RedistributableColumnsState>,
+        table_width: Option<DefiniteLength>,
+    },
+}
+
+pub enum StaticColumnWidths {
+    /// All columns share space equally (flex-1 / Length::Auto).
+    Auto,
+    /// Each column has a specific width.
+    Explicit(TableRow<DefiniteLength>),
 }
 
-impl TableColumnWidths {
-    pub fn new(cols: usize, _: &mut App) -> Self {
+impl ColumnWidthConfig {
+    /// Auto-width columns, auto-size table.
+    pub fn auto() -> Self {
+        ColumnWidthConfig::Static {
+            widths: StaticColumnWidths::Auto,
+            table_width: None,
+        }
+    }
+
+    /// Redistributable columns with no fixed table width.
+    pub fn redistributable(columns_state: Entity<RedistributableColumnsState>) -> Self {
+        ColumnWidthConfig::Redistributable {
+            columns_state,
+            table_width: None,
+        }
+    }
+
+    /// Auto-width columns, fixed table width.
+    pub fn auto_with_table_width(width: impl Into<DefiniteLength>) -> Self {
+        ColumnWidthConfig::Static {
+            widths: StaticColumnWidths::Auto,
+            table_width: Some(width.into()),
+        }
+    }
+
+    /// Column widths for rendering.
+    pub fn widths_to_render(&self, cx: &App) -> Option<TableRow<Length>> {
+        match self {
+            ColumnWidthConfig::Static {
+                widths: StaticColumnWidths::Auto,
+                ..
+            } => None,
+            ColumnWidthConfig::Static {
+                widths: StaticColumnWidths::Explicit(widths),
+                ..
+            } => Some(widths.map_cloned(Length::Definite)),
+            ColumnWidthConfig::Redistributable {
+                columns_state: entity,
+                ..
+            } => {
+                let state = entity.read(cx);
+                Some(state.preview_widths.map_cloned(Length::Definite))
+            }
+        }
+    }
+
+    /// Table-level width.
+    pub fn table_width(&self) -> Option<Length> {
+        match self {
+            ColumnWidthConfig::Static { table_width, .. }
+            | ColumnWidthConfig::Redistributable { table_width, .. } => {
+                table_width.map(Length::Definite)
+            }
+        }
+    }
+
+    /// ListHorizontalSizingBehavior for uniform_list.
+    pub fn list_horizontal_sizing(&self) -> ListHorizontalSizingBehavior {
+        match self.table_width() {
+            Some(_) => ListHorizontalSizingBehavior::Unconstrained,
+            None => ListHorizontalSizingBehavior::FitList,
+        }
+    }
+
+    /// Render resize handles overlay if applicable.
+    pub fn render_resize_handles(&self, window: &mut Window, cx: &mut App) -> Option<AnyElement> {
+        match self {
+            ColumnWidthConfig::Redistributable {
+                columns_state: entity,
+                ..
+            } => {
+                let (column_widths, resize_behavior, initial_widths) = {
+                    let state = entity.read(cx);
+                    (
+                        state.preview_widths.map_cloned(Length::Definite),
+                        state.resize_behavior.clone(),
+                        state.initial_widths.clone(),
+                    )
+                };
+                Some(render_resize_handles(
+                    &column_widths,
+                    &resize_behavior,
+                    &initial_widths,
+                    Some(entity.clone()),
+                    window,
+                    cx,
+                ))
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns info needed for header double-click-to-reset, if applicable.
+    pub fn header_resize_info(&self, cx: &App) -> Option<HeaderResizeInfo> {
+        match self {
+            ColumnWidthConfig::Redistributable { columns_state, .. } => {
+                let state = columns_state.read(cx);
+                Some(HeaderResizeInfo {
+                    columns_state: columns_state.downgrade(),
+                    resize_behavior: state.resize_behavior.clone(),
+                    initial_widths: state.initial_widths.clone(),
+                })
+            }
+            _ => None,
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct HeaderResizeInfo {
+    pub columns_state: WeakEntity<RedistributableColumnsState>,
+    pub resize_behavior: TableRow<TableResizeBehavior>,
+    pub initial_widths: TableRow<DefiniteLength>,
+}
+
+pub struct RedistributableColumnsState {
+    pub(crate) initial_widths: TableRow<DefiniteLength>,
+    pub(crate) committed_widths: TableRow<DefiniteLength>,
+    pub(crate) preview_widths: TableRow<DefiniteLength>,
+    pub(crate) resize_behavior: TableRow<TableResizeBehavior>,
+    pub(crate) cached_table_width: Pixels,
+}
+
+impl RedistributableColumnsState {
+    pub fn new(
+        cols: usize,
+        initial_widths: UncheckedTableRow<impl Into<DefiniteLength>>,
+        resize_behavior: UncheckedTableRow<TableResizeBehavior>,
+    ) -> Self {
+        let widths: TableRow<DefiniteLength> = initial_widths
+            .into_iter()
+            .map(Into::into)
+            .collect::<Vec<_>>()
+            .into_table_row(cols);
         Self {
-            widths: vec![DefiniteLength::default(); cols].into_table_row(cols),
-            visible_widths: vec![DefiniteLength::default(); cols].into_table_row(cols),
-            cached_bounds_width: Default::default(),
-            initialized: false,
+            initial_widths: widths.clone(),
+            committed_widths: widths.clone(),
+            preview_widths: widths,
+            resize_behavior: resize_behavior.into_table_row(cols),
+            cached_table_width: Default::default(),
         }
     }
 
     pub fn cols(&self) -> usize {
-        self.widths.cols()
+        self.committed_widths.cols()
+    }
+
+    pub fn initial_widths(&self) -> &TableRow<DefiniteLength> {
+        &self.initial_widths
+    }
+
+    pub fn resize_behavior(&self) -> &TableRow<TableResizeBehavior> {
+        &self.resize_behavior
     }
 
     fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 {
@@ -264,19 +418,19 @@ impl TableColumnWidths {
         }
     }
 
-    fn on_double_click(
+    pub(crate) fn on_double_click(
         &mut self,
         double_click_position: usize,
         initial_sizes: &TableRow<DefiniteLength>,
         resize_behavior: &TableRow<TableResizeBehavior>,
         window: &mut Window,
     ) {
-        let bounds_width = self.cached_bounds_width;
+        let bounds_width = self.cached_table_width;
         let rem_size = window.rem_size();
         let initial_sizes =
             initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
         let widths = self
-            .widths
+            .committed_widths
             .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
 
         let updated_widths = Self::reset_to_initial_size(
@@ -285,53 +439,16 @@ impl TableColumnWidths {
             initial_sizes,
             resize_behavior,
         );
-        self.widths = updated_widths.map(DefiniteLength::Fraction);
-        self.visible_widths = self.widths.clone(); // previously was copy
+        self.committed_widths = updated_widths.map(DefiniteLength::Fraction);
+        self.preview_widths = self.committed_widths.clone();
     }
 
-    fn reset_to_initial_size(
+    pub(crate) fn reset_to_initial_size(
         col_idx: usize,
         mut widths: TableRow<f32>,
         initial_sizes: TableRow<f32>,
         resize_behavior: &TableRow<TableResizeBehavior>,
     ) -> TableRow<f32> {
-        // RESET:
-        // Part 1:
-        // Figure out if we should shrink/grow the selected column
-        // Get diff which represents the change in column we want to make initial size delta curr_size = diff
-        //
-        // Part 2: We need to decide which side column we should move and where
-        //
-        // If we want to grow our column we should check the left/right columns diff to see what side
-        // has a greater delta than their initial size. Likewise, if we shrink our column we should check
-        // the left/right column diffs to see what side has the smallest delta.
-        //
-        // Part 3: resize
-        //
-        // col_idx represents the column handle to the right of an active column
-        //
-        // If growing and right has the greater delta {
-        //    shift col_idx to the right
-        // } else if growing and left has the greater delta {
-        //  shift col_idx - 1 to the left
-        // } else if shrinking and the right has the greater delta {
-        //  shift
-        // } {
-        //
-        // }
-        // }
-        //
-        // if we need to shrink, then if the right
-        //
-
-        // DRAGGING
-        // we get diff which represents the change in the _drag handle_ position
-        // -diff => dragging left ->
-        //      grow the column to the right of the handle as much as we can shrink columns to the left of the handle
-        // +diff => dragging right -> growing handles column
-        //      grow the column to the left of the handle as much as we can shrink columns to the right of the handle
-        //
-
         let diff = initial_sizes[col_idx] - widths[col_idx];
 
         let left_diff =
@@ -376,10 +493,9 @@ impl TableColumnWidths {
         widths
     }
 
-    fn on_drag_move(
+    pub(crate) fn on_drag_move(
         &mut self,
         drag_event: &DragMoveEvent<DraggedColumn>,
-        resize_behavior: &TableRow<TableResizeBehavior>,
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
@@ -391,43 +507,42 @@ impl TableColumnWidths {
         let bounds_width = bounds.right() - bounds.left();
         let col_idx = drag_event.drag(cx).0;
 
-        let column_handle_width = Self::get_fraction(
-            &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_COLUMN_WIDTH))),
+        let divider_width = Self::get_fraction(
+            &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))),
             bounds_width,
             rem_size,
         );
 
         let mut widths = self
-            .widths
+            .committed_widths
             .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
 
         for length in widths[0..=col_idx].iter() {
-            col_position += length + column_handle_width;
+            col_position += length + divider_width;
         }
 
         let mut total_length_ratio = col_position;
         for length in widths[col_idx + 1..].iter() {
             total_length_ratio += length;
         }
-        let cols = resize_behavior.cols();
-        total_length_ratio += (cols - 1 - col_idx) as f32 * column_handle_width;
+        let cols = self.resize_behavior.cols();
+        total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width;
 
         let drag_fraction = (drag_position.x - bounds.left()) / bounds_width;
         let drag_fraction = drag_fraction * total_length_ratio;
-        let diff = drag_fraction - col_position - column_handle_width / 2.0;
+        let diff = drag_fraction - col_position - divider_width / 2.0;
 
-        Self::drag_column_handle(diff, col_idx, &mut widths, resize_behavior);
+        Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior);
 
-        self.visible_widths = widths.map(DefiniteLength::Fraction);
+        self.preview_widths = widths.map(DefiniteLength::Fraction);
     }
 
-    fn drag_column_handle(
+    pub(crate) fn drag_column_handle(
         diff: f32,
         col_idx: usize,
         widths: &mut TableRow<f32>,
         resize_behavior: &TableRow<TableResizeBehavior>,
     ) {
-        // if diff > 0.0 then go right
         if diff > 0.0 {
             Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1);
         } else {
@@ -435,7 +550,7 @@ impl TableColumnWidths {
         }
     }
 
-    fn propagate_resize_diff(
+    pub(crate) fn propagate_resize_diff(
         diff: f32,
         col_idx: usize,
         widths: &mut TableRow<f32>,
@@ -493,44 +608,16 @@ impl TableColumnWidths {
     }
 }
 
-pub struct TableWidths {
-    initial: TableRow<DefiniteLength>,
-    current: Option<Entity<TableColumnWidths>>,
-    resizable: TableRow<TableResizeBehavior>,
-}
-
-impl TableWidths {
-    pub fn new(widths: TableRow<impl Into<DefiniteLength>>) -> Self {
-        let widths = widths.map(Into::into);
-
-        let expected_length = widths.cols();
-        TableWidths {
-            initial: widths,
-            current: None,
-            resizable: vec![TableResizeBehavior::None; expected_length]
-                .into_table_row(expected_length),
-        }
-    }
-
-    fn lengths(&self, cx: &App) -> TableRow<Length> {
-        self.current
-            .as_ref()
-            .map(|entity| entity.read(cx).visible_widths.map_cloned(Length::Definite))
-            .unwrap_or_else(|| self.initial.map_cloned(Length::Definite))
-    }
-}
-
 /// A table component
 #[derive(RegisterComponent, IntoElement)]
 pub struct Table {
     striped: bool,
     show_row_borders: bool,
     show_row_hover: bool,
-    width: Option<Length>,
     headers: Option<TableRow<AnyElement>>,
     rows: TableContents,
     interaction_state: Option<WeakEntity<TableInteractionState>>,
-    col_widths: Option<TableWidths>,
+    column_width_config: ColumnWidthConfig,
     map_row: Option<Rc<dyn Fn((usize, Stateful<Div>), &mut Window, &mut App) -> AnyElement>>,
     use_ui_font: bool,
     empty_table_callback: Option<Rc<dyn Fn(&mut Window, &mut App) -> AnyElement>>,
@@ -547,15 +634,14 @@ impl Table {
             striped: false,
             show_row_borders: true,
             show_row_hover: true,
-            width: None,
             headers: None,
             rows: TableContents::Vec(Vec::new()),
             interaction_state: None,
             map_row: None,
             use_ui_font: true,
             empty_table_callback: None,
-            col_widths: None,
             disable_base_cell_style: false,
+            column_width_config: ColumnWidthConfig::auto(),
         }
     }
 
@@ -626,10 +712,18 @@ impl Table {
         self
     }
 
-    /// Sets the width of the table.
-    /// Will enable horizontal scrolling if [`Self::interactable`] is also called.
-    pub fn width(mut self, width: impl Into<Length>) -> Self {
-        self.width = Some(width.into());
+    /// Sets a fixed table width with auto column widths.
+    ///
+    /// This is a shorthand for `.width_config(ColumnWidthConfig::auto_with_table_width(width))`.
+    /// For resizable columns or explicit column widths, use [`Table::width_config`] directly.
+    pub fn width(mut self, width: impl Into<DefiniteLength>) -> Self {
+        self.column_width_config = ColumnWidthConfig::auto_with_table_width(width);
+        self
+    }
+
+    /// Sets the column width configuration for the table.
+    pub fn width_config(mut self, config: ColumnWidthConfig) -> Self {
+        self.column_width_config = config;
         self
     }
 
@@ -637,10 +731,8 @@ impl Table {
     ///
     /// Vertical scrolling will be enabled by default if the table is taller than its container.
     ///
-    /// Horizontal scrolling will only be enabled if [`Self::width`] is also called, otherwise
-    /// the list will always shrink the table columns to fit their contents I.e. If [`Self::uniform_list`]
-    /// is used without a width and with [`Self::interactable`], the [`ListHorizontalSizingBehavior`] will
-    /// be set to [`ListHorizontalSizingBehavior::FitList`].
+    /// Horizontal scrolling will only be enabled if a table width is set via [`ColumnWidthConfig`],
+    /// otherwise the list will always shrink the table columns to fit their contents.
     pub fn interactable(mut self, interaction_state: &Entity<TableInteractionState>) -> Self {
         self.interaction_state = Some(interaction_state.downgrade());
         self
@@ -666,36 +758,6 @@ impl Table {
         self
     }
 
-    pub fn column_widths(mut self, widths: UncheckedTableRow<impl Into<DefiniteLength>>) -> Self {
-        if self.col_widths.is_none() {
-            self.col_widths = Some(TableWidths::new(widths.into_table_row(self.cols)));
-        }
-        self
-    }
-
-    pub fn resizable_columns(
-        mut self,
-        resizable: UncheckedTableRow<TableResizeBehavior>,
-        column_widths: &Entity<TableColumnWidths>,
-        cx: &mut App,
-    ) -> Self {
-        if let Some(table_widths) = self.col_widths.as_mut() {
-            table_widths.resizable = resizable.into_table_row(self.cols);
-            let column_widths = table_widths
-                .current
-                .get_or_insert_with(|| column_widths.clone());
-
-            column_widths.update(cx, |widths, _| {
-                if !widths.initialized {
-                    widths.initialized = true;
-                    widths.widths = table_widths.initial.clone();
-                    widths.visible_widths = widths.widths.clone();
-                }
-            })
-        }
-        self
-    }
-
     pub fn no_ui_font(mut self) -> Self {
         self.use_ui_font = false;
         self
@@ -812,11 +874,7 @@ pub fn render_table_row(
 pub fn render_table_header(
     headers: TableRow<impl IntoElement>,
     table_context: TableRenderContext,
-    columns_widths: Option<(
-        WeakEntity<TableColumnWidths>,
-        TableRow<TableResizeBehavior>,
-        TableRow<DefiniteLength>,
-    )>,
+    resize_info: Option<HeaderResizeInfo>,
     entity_id: Option<EntityId>,
     cx: &mut App,
 ) -> impl IntoElement {
@@ -837,9 +895,7 @@ pub fn render_table_header(
         .flex()
         .flex_row()
         .items_center()
-        .justify_between()
         .w_full()
-        .p_2()
         .border_b_1()
         .border_color(cx.theme().colors().border)
         .children(
@@ -850,34 +906,33 @@ pub fn render_table_header(
                 .zip(column_widths.into_vec())
                 .map(|((header_idx, h), width)| {
                     base_cell_style_text(width, table_context.use_ui_font, cx)
+                        .px_1()
+                        .py_0p5()
                         .child(h)
                         .id(ElementId::NamedInteger(
                             shared_element_id.clone(),
                             header_idx as u64,
                         ))
-                        .when_some(
-                            columns_widths.as_ref().cloned(),
-                            |this, (column_widths, resizables, initial_sizes)| {
-                                if resizables[header_idx].is_resizable() {
-                                    this.on_click(move |event, window, cx| {
-                                        if event.click_count() > 1 {
-                                            column_widths
-                                                .update(cx, |column, _| {
-                                                    column.on_double_click(
-                                                        header_idx,
-                                                        &initial_sizes,
-                                                        &resizables,
-                                                        window,
-                                                    );
-                                                })
-                                                .ok();
-                                        }
-                                    })
-                                } else {
-                                    this
-                                }
-                            },
-                        )
+                        .when_some(resize_info.as_ref().cloned(), |this, info| {
+                            if info.resize_behavior[header_idx].is_resizable() {
+                                this.on_click(move |event, window, cx| {
+                                    if event.click_count() > 1 {
+                                        info.columns_state
+                                            .update(cx, |column, _| {
+                                                column.on_double_click(
+                                                    header_idx,
+                                                    &info.initial_widths,
+                                                    &info.resize_behavior,
+                                                    window,
+                                                );
+                                            })
+                                            .ok();
+                                    }
+                                })
+                            } else {
+                                this
+                            }
+                        })
                 }),
         )
 }
@@ -901,7 +956,7 @@ impl TableRenderContext {
             show_row_borders: table.show_row_borders,
             show_row_hover: table.show_row_hover,
             total_row_count: table.rows.len(),
-            column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)),
+            column_widths: table.column_width_config.widths_to_render(cx),
             map_row: table.map_row.clone(),
             use_ui_font: table.use_ui_font,
             disable_base_cell_style: table.disable_base_cell_style,
@@ -913,48 +968,52 @@ impl RenderOnce for Table {
     fn render(mut self, window: &mut Window, cx: &mut App) -> impl IntoElement {
         let table_context = TableRenderContext::new(&self, cx);
         let interaction_state = self.interaction_state.and_then(|state| state.upgrade());
-        let current_widths = self
-            .col_widths
-            .as_ref()
-            .and_then(|widths| Some((widths.current.as_ref()?, widths.resizable.clone())))
-            .map(|(curr, resize_behavior)| (curr.downgrade(), resize_behavior));
 
-        let current_widths_with_initial_sizes = self
-            .col_widths
+        let header_resize_info = interaction_state
             .as_ref()
-            .and_then(|widths| {
-                Some((
-                    widths.current.as_ref()?,
-                    widths.resizable.clone(),
-                    widths.initial.clone(),
-                ))
-            })
-            .map(|(curr, resize_behavior, initial)| (curr.downgrade(), resize_behavior, initial));
+            .and_then(|_| self.column_width_config.header_resize_info(cx));
 
-        let width = self.width;
+        let table_width = self.column_width_config.table_width();
+        let horizontal_sizing = self.column_width_config.list_horizontal_sizing();
         let no_rows_rendered = self.rows.is_empty();
 
+        // Extract redistributable entity for drag/drop/prepaint handlers
+        let redistributable_entity =
+            interaction_state
+                .as_ref()
+                .and_then(|_| match &self.column_width_config {
+                    ColumnWidthConfig::Redistributable {
+                        columns_state: entity,
+                        ..
+                    } => Some(entity.downgrade()),
+                    _ => None,
+                });
+
+        let resize_handles = interaction_state
+            .as_ref()
+            .and_then(|_| self.column_width_config.render_resize_handles(window, cx));
+
         let table = div()
-            .when_some(width, |this, width| this.w(width))
+            .when_some(table_width, |this, width| this.w(width))
             .h_full()
             .v_flex()
             .when_some(self.headers.take(), |this, headers| {
                 this.child(render_table_header(
                     headers,
                     table_context.clone(),
-                    current_widths_with_initial_sizes,
+                    header_resize_info,
                     interaction_state.as_ref().map(Entity::entity_id),
                     cx,
                 ))
             })
-            .when_some(current_widths, {
-                |this, (widths, resize_behavior)| {
+            .when_some(redistributable_entity, {
+                |this, widths| {
                     this.on_drag_move::<DraggedColumn>({
                         let widths = widths.clone();
                         move |e, window, cx| {
                             widths
                                 .update(cx, |widths, cx| {
-                                    widths.on_drag_move(e, &resize_behavior, window, cx);
+                                    widths.on_drag_move(e, window, cx);
                                 })
                                 .ok();
                         }
@@ -965,7 +1024,7 @@ impl RenderOnce for Table {
                             widths
                                 .update(cx, |widths, _| {
                                     // This works because all children x axis bounds are the same
-                                    widths.cached_bounds_width =
+                                    widths.cached_table_width =
                                         bounds[0].right() - bounds[0].left();
                                 })
                                 .ok();
@@ -974,10 +1033,9 @@ impl RenderOnce for Table {
                     .on_drop::<DraggedColumn>(move |_, _, cx| {
                         widths
                             .update(cx, |widths, _| {
-                                widths.widths = widths.visible_widths.clone();
+                                widths.committed_widths = widths.preview_widths.clone();
                             })
                             .ok();
-                        // Finish the resize operation
                     })
                 }
             })
@@ -1029,11 +1087,7 @@ impl RenderOnce for Table {
                             .size_full()
                             .flex_grow()
                             .with_sizing_behavior(ListSizingBehavior::Auto)
-                            .with_horizontal_sizing_behavior(if width.is_some() {
-                                ListHorizontalSizingBehavior::Unconstrained
-                            } else {
-                                ListHorizontalSizingBehavior::FitList
-                            })
+                            .with_horizontal_sizing_behavior(horizontal_sizing)
                             .when_some(
                                 interaction_state.as_ref(),
                                 |this, state| {
@@ -1063,25 +1117,7 @@ impl RenderOnce for Table {
                             .with_sizing_behavior(ListSizingBehavior::Auto),
                         ),
                     })
-                    .when_some(
-                        self.col_widths.as_ref().zip(interaction_state.as_ref()),
-                        |parent, (table_widths, state)| {
-                            parent.child(state.update(cx, |state, cx| {
-                                let resizable_columns = &table_widths.resizable;
-                                let column_widths = table_widths.lengths(cx);
-                                let columns = table_widths.current.clone();
-                                let initial_sizes = &table_widths.initial;
-                                state.render_resize_handles(
-                                    &column_widths,
-                                    resizable_columns,
-                                    initial_sizes,
-                                    columns,
-                                    window,
-                                    cx,
-                                )
-                            }))
-                        },
-                    );
+                    .when_some(resize_handles, |parent, handles| parent.child(handles));
 
                 if let Some(state) = interaction_state.as_ref() {
                     let scrollbars = state

crates/ui/src/components/data_table/tests.rs πŸ”—

@@ -82,7 +82,7 @@ mod reset_column_size {
         let cols = initial_sizes.len();
         let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols);
         let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols);
-        let result = TableColumnWidths::reset_to_initial_size(
+        let result = RedistributableColumnsState::reset_to_initial_size(
             column_index,
             TableRow::from_vec(widths, cols),
             TableRow::from_vec(initial_sizes, cols),
@@ -259,7 +259,7 @@ mod drag_handle {
         let distance = distance as f32 / total_1;
 
         let mut widths_table_row = TableRow::from_vec(widths, cols);
-        TableColumnWidths::drag_column_handle(
+        RedistributableColumnsState::drag_column_handle(
             distance,
             column_index,
             &mut widths_table_row,

crates/util/src/command.rs πŸ”—

@@ -68,6 +68,10 @@ impl Command {
         self
     }
 
+    pub fn get_args(&self) -> impl Iterator<Item = &OsStr> {
+        self.0.get_args()
+    }
+
     pub fn env(&mut self, key: impl AsRef<OsStr>, val: impl AsRef<OsStr>) -> &mut Self {
         self.0.env(key, val);
         self
@@ -129,4 +133,8 @@ impl Command {
     pub async fn status(&mut self) -> std::io::Result<std::process::ExitStatus> {
         self.0.status().await
     }
+
+    pub fn get_program(&self) -> &OsStr {
+        self.0.get_program()
+    }
 }

crates/util/src/command/darwin.rs πŸ”—

@@ -104,6 +104,10 @@ impl Command {
         self
     }
 
+    pub fn get_args(&self) -> impl Iterator<Item = &OsStr> {
+        self.args.iter().map(|s| s.as_os_str())
+    }
+
     pub fn env(&mut self, key: impl AsRef<OsStr>, val: impl AsRef<OsStr>) -> &mut Self {
         self.envs
             .insert(key.as_ref().to_owned(), Some(val.as_ref().to_owned()));
@@ -217,6 +221,10 @@ impl Command {
         let mut child = self.spawn()?;
         child.status().await
     }
+
+    pub fn get_program(&self) -> &OsStr {
+        self.program.as_os_str()
+    }
 }
 
 #[derive(Debug)]

crates/vim/src/command.rs πŸ”—

@@ -1348,7 +1348,7 @@ impl Position {
         let snapshot = editor.snapshot(window, cx);
         let target = match self {
             Position::Line { row, offset } => {
-                if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| {
+                if let Some(anchor) = editor.active_buffer(cx).and_then(|buffer| {
                     editor.buffer().read(cx).buffer_point_to_anchor(
                         &buffer,
                         Point::new(row.saturating_sub(1), 0),
@@ -2336,7 +2336,7 @@ impl Vim {
             match c {
                 '%' => {
                     self.update_editor(cx, |_, editor, cx| {
-                        if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+                        if let Some(buffer) = editor.active_buffer(cx)
                             && let Some(file) = buffer.read(cx).file()
                             && let Some(local) = file.as_local()
                         {

crates/vim/src/helix.rs πŸ”—

@@ -648,6 +648,7 @@ impl Vim {
                     self.search = SearchState {
                         direction: searchable::Direction::Next,
                         count: 1,
+                        cmd_f_search: false,
                         prior_selections,
                         prior_operator: self.operator_stack.last().cloned(),
                         prior_mode: self.mode,

crates/vim/src/motion.rs πŸ”—

@@ -1,5 +1,6 @@
 use editor::{
     Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset,
+    ToPoint as _,
     display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint},
     movement::{
         self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point,
@@ -11,6 +12,7 @@ use multi_buffer::MultiBufferRow;
 use schemars::JsonSchema;
 use serde::Deserialize;
 use std::{f64, ops::Range};
+
 use workspace::searchable::Direction;
 
 use crate::{
@@ -2340,39 +2342,19 @@ fn start_of_next_sentence(
 
 fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint {
     let point = map.display_point_to_point(display_point, Bias::Left);
-    let Some(mut excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+    let snapshot = map.buffer_snapshot();
+    let Some((buffer_snapshot, _)) = snapshot.point_to_buffer_point(point) else {
+        return display_point;
+    };
+
+    let Some(anchor) = snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after(
+        buffer_snapshot.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left),
+    )) else {
         return display_point;
     };
-    let offset = excerpt.buffer().point_to_offset(
-        excerpt
-            .buffer()
-            .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left),
-    );
-    let buffer_range = excerpt.buffer_range();
-    if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 {
-        let point = map
-            .buffer_snapshot()
-            .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset)));
-        return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left);
-    }
-    for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() {
-        let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer)
-            ..language::ToOffset::to_offset(&range.context.end, buffer);
-        if offset >= excerpt_range.start && offset <= excerpt_range.end {
-            let text_anchor = buffer.anchor_after(offset);
-            let anchor = Anchor::in_buffer(excerpt, text_anchor);
-            return anchor.to_display_point(map);
-        } else if offset <= excerpt_range.start {
-            let anchor = Anchor::in_buffer(excerpt, range.context.start);
-            return anchor.to_display_point(map);
-        }
-    }
 
     map.clip_point(
-        map.point_to_display_point(
-            map.buffer_snapshot().clip_point(point, Bias::Left),
-            Bias::Left,
-        ),
+        map.point_to_display_point(anchor.to_point(snapshot), Bias::Left),
         Bias::Left,
     )
 }

crates/vim/src/normal.rs πŸ”—

@@ -932,7 +932,7 @@ impl Vim {
         Vim::take_forced_motion(cx);
         self.update_editor(cx, |vim, editor, cx| {
             let selection = editor.selections.newest_anchor();
-            let Some((buffer, point, _)) = editor
+            let Some((buffer, point)) = editor
                 .buffer()
                 .read(cx)
                 .point_to_buffer_point(selection.head(), cx)

crates/vim/src/normal/search.rs πŸ”—

@@ -284,6 +284,7 @@ impl Vim {
         self.search = SearchState {
             direction,
             count,
+            cmd_f_search: false,
             prior_selections,
             prior_operator: self.operator_stack.last().cloned(),
             prior_mode,
@@ -298,6 +299,7 @@ impl Vim {
         let current_mode = self.mode;
         self.search = Default::default();
         self.search.prior_mode = current_mode;
+        self.search.cmd_f_search = true;
         cx.propagate();
     }
 
@@ -957,6 +959,45 @@ mod test {
         cx.assert_editor_state("«oneˇ» one one one");
     }
 
+    #[gpui::test]
+    async fn test_non_vim_search_in_vim_mode(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+        cx.cx.set_state("Λ‡one one one one");
+        cx.run_until_parked();
+        cx.simulate_keystrokes("cmd-f");
+        cx.run_until_parked();
+
+        cx.assert_state("«oneˇ» one one one", Mode::Visual);
+        cx.simulate_keystrokes("enter");
+        cx.run_until_parked();
+        cx.assert_state("one «oneˇ» one one", Mode::Visual);
+        cx.simulate_keystrokes("shift-enter");
+        cx.run_until_parked();
+        cx.assert_state("«oneˇ» one one one", Mode::Visual);
+
+        cx.simulate_keystrokes("escape");
+        cx.run_until_parked();
+        cx.assert_state("«oneˇ» one one one", Mode::Visual);
+    }
+
+    #[gpui::test]
+    async fn test_non_vim_search_in_vim_insert_mode(cx: &mut gpui::TestAppContext) {
+        let mut cx = VimTestContext::new(cx, true).await;
+        cx.set_state("Λ‡one one one one", Mode::Insert);
+        cx.run_until_parked();
+        cx.simulate_keystrokes("cmd-f");
+        cx.run_until_parked();
+
+        cx.assert_state("«oneˇ» one one one", Mode::Insert);
+        cx.simulate_keystrokes("enter");
+        cx.run_until_parked();
+        cx.assert_state("one «oneˇ» one one", Mode::Insert);
+
+        cx.simulate_keystrokes("escape");
+        cx.run_until_parked();
+        cx.assert_state("one «oneˇ» one one", Mode::Insert);
+    }
+
     #[gpui::test]
     async fn test_visual_star_hash(cx: &mut gpui::TestAppContext) {
         let mut cx = NeovimBackedTestContext::new(cx).await;

crates/vim/src/object.rs πŸ”—

@@ -203,33 +203,24 @@ fn find_mini_delimiters(
     is_valid_delimiter: &DelimiterPredicate,
 ) -> Option<Range<DisplayPoint>> {
     let point = map.clip_at_line_end(display_point).to_point(map);
-    let offset = point.to_offset(&map.buffer_snapshot());
+    let offset = map.buffer_snapshot().point_to_offset(point);
 
     let line_range = get_line_range(map, point);
     let visible_line_range = get_visible_line_range(&line_range);
 
     let snapshot = &map.buffer_snapshot();
-    let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
-    let buffer = excerpt.buffer();
-    let buffer_offset = excerpt.map_offset_to_buffer(offset);
 
-    let bracket_filter = |open: Range<usize>, close: Range<usize>| {
-        is_valid_delimiter(buffer, open.start, close.start)
-    };
-
-    // Try to find delimiters in visible range first
     let ranges = map
         .buffer_snapshot()
         .bracket_ranges(visible_line_range)
         .map(|ranges| {
             ranges.filter_map(|(open, close)| {
-                // Convert the ranges from multibuffer space to buffer space as
-                // that is what `is_valid_delimiter` expects, otherwise it might
-                // panic as the values might be out of bounds.
-                let buffer_open = excerpt.map_range_to_buffer(open.clone());
-                let buffer_close = excerpt.map_range_to_buffer(close.clone());
+                let (buffer, buffer_open) =
+                    snapshot.range_to_buffer_range::<MultiBufferOffset>(open.clone())?;
+                let (_, buffer_close) =
+                    snapshot.range_to_buffer_range::<MultiBufferOffset>(close.clone())?;
 
-                if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) {
+                if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) {
                     Some((open, close))
                 } else {
                     None
@@ -247,18 +238,31 @@ fn find_mini_delimiters(
         );
     }
 
-    // Fall back to innermost enclosing brackets
-    let (open_bracket, close_bracket) = buffer
-        .innermost_enclosing_bracket_ranges(buffer_offset..buffer_offset, Some(&bracket_filter))?;
+    let results = snapshot.map_excerpt_ranges(offset..offset, |buffer, _, input_range| {
+        let buffer_offset = input_range.start.0;
+        let bracket_filter = |open: Range<usize>, close: Range<usize>| {
+            is_valid_delimiter(buffer, open.start, close.start)
+        };
+        let Some((open, close)) = buffer.innermost_enclosing_bracket_ranges(
+            buffer_offset..buffer_offset,
+            Some(&bracket_filter),
+        ) else {
+            return vec![];
+        };
+        vec![
+            (BufferOffset(open.start)..BufferOffset(open.end), ()),
+            (BufferOffset(close.start)..BufferOffset(close.end), ()),
+        ]
+    })?;
+
+    if results.len() < 2 {
+        return None;
+    }
 
     Some(
         DelimiterRange {
-            open: excerpt.map_range_from_buffer(
-                BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end),
-            ),
-            close: excerpt.map_range_from_buffer(
-                BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end),
-            ),
+            open: results[0].0.clone(),
+            close: results[1].0.clone(),
         }
         .to_display_range(map, around),
     )
@@ -935,61 +939,64 @@ pub fn surrounding_html_tag(
     }
 
     let snapshot = &map.buffer_snapshot();
-    let offset = head.to_offset(map, Bias::Left);
-    let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
-    let buffer = excerpt.buffer();
-    let offset = excerpt.map_offset_to_buffer(offset);
-
-    // Find the most closest to current offset
-    let mut cursor = buffer.syntax_layer_at(offset)?.node().walk();
-    let mut last_child_node = cursor.node();
-    while cursor.goto_first_child_for_byte(offset.0).is_some() {
-        last_child_node = cursor.node();
-    }
-
-    let mut last_child_node = Some(last_child_node);
-    while let Some(cur_node) = last_child_node {
-        if cur_node.child_count() >= 2 {
-            let first_child = cur_node.child(0);
-            let last_child = cur_node.child(cur_node.child_count() as u32 - 1);
-            if let (Some(first_child), Some(last_child)) = (first_child, last_child) {
-                let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range()));
-                let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range()));
-                // It needs to be handled differently according to the selection length
-                let is_valid = if range.end.to_offset(map, Bias::Left)
-                    - range.start.to_offset(map, Bias::Left)
-                    <= 1
-                {
-                    offset.0 <= last_child.end_byte()
-                } else {
-                    excerpt
-                        .map_offset_to_buffer(range.start.to_offset(map, Bias::Left))
-                        .0
-                        >= first_child.start_byte()
-                        && excerpt
-                            .map_offset_to_buffer(range.end.to_offset(map, Bias::Left))
-                            .0
-                            <= last_child.start_byte() + 1
-                };
-                if open_tag.is_some() && open_tag == close_tag && is_valid {
-                    let range = if around {
-                        first_child.byte_range().start..last_child.byte_range().end
-                    } else {
-                        first_child.byte_range().end..last_child.byte_range().start
-                    };
-                    let range = BufferOffset(range.start)..BufferOffset(range.end);
-                    if excerpt.contains_buffer_range(range.clone()) {
-                        let result = excerpt.map_range_from_buffer(range);
-                        return Some(
-                            result.start.to_display_point(map)..result.end.to_display_point(map),
-                        );
+    let head_offset = head.to_offset(map, Bias::Left);
+    let range_start = range.start.to_offset(map, Bias::Left);
+    let range_end = range.end.to_offset(map, Bias::Left);
+    let head_is_start = head_offset <= range_start;
+
+    let results = snapshot.map_excerpt_ranges(
+        range_start..range_end,
+        |buffer, _excerpt_range, input_buffer_range| {
+            let buffer_offset = if head_is_start {
+                input_buffer_range.start
+            } else {
+                input_buffer_range.end
+            };
+
+            let Some(layer) = buffer.syntax_layer_at(buffer_offset) else {
+                return Vec::new();
+            };
+            let mut cursor = layer.node().walk();
+            let mut last_child_node = cursor.node();
+            while cursor.goto_first_child_for_byte(buffer_offset.0).is_some() {
+                last_child_node = cursor.node();
+            }
+
+            let mut last_child_node = Some(last_child_node);
+            while let Some(cur_node) = last_child_node {
+                if cur_node.child_count() >= 2 {
+                    let first_child = cur_node.child(0);
+                    let last_child = cur_node.child(cur_node.child_count() as u32 - 1);
+                    if let (Some(first_child), Some(last_child)) = (first_child, last_child) {
+                        let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range()));
+                        let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range()));
+                        let is_valid = if range_end.saturating_sub(range_start) <= 1 {
+                            buffer_offset.0 <= last_child.end_byte()
+                        } else {
+                            input_buffer_range.start.0 >= first_child.start_byte()
+                                && input_buffer_range.end.0 <= last_child.start_byte() + 1
+                        };
+                        if open_tag.is_some() && open_tag == close_tag && is_valid {
+                            let buffer_range = if around {
+                                first_child.byte_range().start..last_child.byte_range().end
+                            } else {
+                                first_child.byte_range().end..last_child.byte_range().start
+                            };
+                            return vec![(
+                                BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+                                (),
+                            )];
+                        }
                     }
                 }
+                last_child_node = cur_node.parent();
             }
-        }
-        last_child_node = cur_node.parent();
-    }
-    None
+            Vec::new()
+        },
+    )?;
+
+    let (result, ()) = results.into_iter().next()?;
+    Some(result.start.to_display_point(map)..result.end.to_display_point(map))
 }
 
 /// Returns a range that surrounds the word and following whitespace
@@ -1163,44 +1170,55 @@ fn text_object(
     let snapshot = &map.buffer_snapshot();
     let offset = relative_to.to_offset(map, Bias::Left);
 
-    let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
-    let buffer = excerpt.buffer();
-    let offset = excerpt.map_offset_to_buffer(offset);
-
-    let mut matches: Vec<Range<usize>> = buffer
-        .text_object_ranges(offset..offset, TreeSitterOptions::default())
-        .filter_map(|(r, m)| if m == target { Some(r) } else { None })
-        .collect();
-    matches.sort_by_key(|r| r.end - r.start);
-    if let Some(buffer_range) = matches.first() {
-        let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end);
-        let range = excerpt.map_range_from_buffer(buffer_range);
-        return Some(range.start.to_display_point(map)..range.end.to_display_point(map));
-    }
-
-    let around = target.around()?;
-    let mut matches: Vec<Range<usize>> = buffer
-        .text_object_ranges(offset..offset, TreeSitterOptions::default())
-        .filter_map(|(r, m)| if m == around { Some(r) } else { None })
-        .collect();
-    matches.sort_by_key(|r| r.end - r.start);
-    let around_range = matches.first()?;
-
-    let mut matches: Vec<Range<usize>> = buffer
-        .text_object_ranges(around_range.clone(), TreeSitterOptions::default())
-        .filter_map(|(r, m)| if m == target { Some(r) } else { None })
-        .collect();
-    matches.sort_by_key(|r| r.start);
-    if let Some(buffer_range) = matches.first()
-        && !buffer_range.is_empty()
-    {
-        let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end);
-        let range = excerpt.map_range_from_buffer(buffer_range);
-        return Some(range.start.to_display_point(map)..range.end.to_display_point(map));
-    }
-    let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end);
-    let buffer_range = excerpt.map_range_from_buffer(around_range);
-    return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map));
+    let results =
+        snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| {
+            let buffer_offset = buffer_range.start;
+
+            let mut matches: Vec<Range<usize>> = buffer
+                .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default())
+                .filter_map(|(r, m)| if m == target { Some(r) } else { None })
+                .collect();
+            matches.sort_by_key(|r| r.end - r.start);
+            if let Some(buffer_range) = matches.first() {
+                return vec![(
+                    BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+                    (),
+                )];
+            }
+
+            let Some(around) = target.around() else {
+                return vec![];
+            };
+            let mut matches: Vec<Range<usize>> = buffer
+                .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default())
+                .filter_map(|(r, m)| if m == around { Some(r) } else { None })
+                .collect();
+            matches.sort_by_key(|r| r.end - r.start);
+            let Some(around_range) = matches.first() else {
+                return vec![];
+            };
+
+            let mut matches: Vec<Range<usize>> = buffer
+                .text_object_ranges(around_range.clone(), TreeSitterOptions::default())
+                .filter_map(|(r, m)| if m == target { Some(r) } else { None })
+                .collect();
+            matches.sort_by_key(|r| r.start);
+            if let Some(buffer_range) = matches.first()
+                && !buffer_range.is_empty()
+            {
+                return vec![(
+                    BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+                    (),
+                )];
+            }
+            vec![(
+                BufferOffset(around_range.start)..BufferOffset(around_range.end),
+                (),
+            )]
+        })?;
+
+    let (range, ()) = results.into_iter().next()?;
+    Some(range.start.to_display_point(map)..range.end.to_display_point(map))
 }
 
 fn argument(
@@ -1211,16 +1229,11 @@ fn argument(
     let snapshot = &map.buffer_snapshot();
     let offset = relative_to.to_offset(map, Bias::Left);
 
-    // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level
-    let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
-    let buffer = excerpt.buffer();
-
     fn comma_delimited_range_at(
         buffer: &BufferSnapshot,
         mut offset: BufferOffset,
         include_comma: bool,
     ) -> Option<Range<BufferOffset>> {
-        // Seek to the first non-whitespace character
         offset += buffer
             .chars_at(offset)
             .take_while(|c| c.is_whitespace())
@@ -1228,25 +1241,20 @@ fn argument(
             .sum::<usize>();
 
         let bracket_filter = |open: Range<usize>, close: Range<usize>| {
-            // Filter out empty ranges
             if open.end == close.start {
                 return false;
             }
 
-            // If the cursor is outside the brackets, ignore them
             if open.start == offset.0 || close.end == offset.0 {
                 return false;
             }
 
-            // TODO: Is there any better way to filter out string brackets?
-            // Used to filter out string brackets
             matches!(
                 buffer.chars_at(open.start).next(),
                 Some('(' | '[' | '{' | '<' | '|')
             )
         };
 
-        // Find the brackets containing the cursor
         let (open_bracket, close_bracket) =
             buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?;
 
@@ -1256,7 +1264,6 @@ fn argument(
         let node = layer.node();
         let mut cursor = node.walk();
 
-        // Loop until we find the smallest node whose parent covers the bracket range. This node is the argument in the parent argument list
         let mut parent_covers_bracket_range = false;
         loop {
             let node = cursor.node();
@@ -1268,20 +1275,17 @@ fn argument(
             }
             parent_covers_bracket_range = covers_bracket_range;
 
-            // Unable to find a child node with a parent that covers the bracket range, so no argument to select
             cursor.goto_first_child_for_byte(offset.0)?;
         }
 
         let mut argument_node = cursor.node();
 
-        // If the child node is the open bracket, move to the next sibling.
         if argument_node.byte_range() == open_bracket {
             if !cursor.goto_next_sibling() {
                 return Some(inner_bracket_range);
             }
             argument_node = cursor.node();
         }
-        // While the child node is the close bracket or a comma, move to the previous sibling
         while argument_node.byte_range() == close_bracket || argument_node.kind() == "," {
             if !cursor.goto_previous_sibling() {
                 return Some(inner_bracket_range);
@@ -1292,14 +1296,11 @@ fn argument(
             }
         }
 
-        // The start and end of the argument range, defaulting to the start and end of the argument node
         let mut start = argument_node.start_byte();
         let mut end = argument_node.end_byte();
 
         let mut needs_surrounding_comma = include_comma;
 
-        // Seek backwards to find the start of the argument - either the previous comma or the opening bracket.
-        // We do this because multiple nodes can represent a single argument, such as with rust `vec![a.b.c, d.e.f]`
         while cursor.goto_previous_sibling() {
             let prev = cursor.node();
 
@@ -1317,7 +1318,6 @@ fn argument(
             }
         }
 
-        // Do the same for the end of the argument, extending to next comma or the end of the argument list
         while cursor.goto_next_sibling() {
             let next = cursor.node();
 
@@ -1326,7 +1326,6 @@ fn argument(
                 break;
             } else if next.kind() == "," {
                 if needs_surrounding_comma {
-                    // Select up to the beginning of the next argument if there is one, otherwise to the end of the comma
                     if let Some(next_arg) = next.next_sibling() {
                         end = next_arg.start_byte();
                     } else {
@@ -1342,14 +1341,17 @@ fn argument(
         Some(BufferOffset(start)..BufferOffset(end))
     }
 
-    let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?;
+    let results =
+        snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| {
+            let buffer_offset = buffer_range.start;
+            match comma_delimited_range_at(buffer, buffer_offset, around) {
+                Some(result) => vec![(result, ())],
+                None => vec![],
+            }
+        })?;
 
-    if excerpt.contains_buffer_range(result.clone()) {
-        let result = excerpt.map_range_from_buffer(result);
-        Some(result.start.to_display_point(map)..result.end.to_display_point(map))
-    } else {
-        None
-    }
+    let (range, ()) = results.into_iter().next()?;
+    Some(range.start.to_display_point(map)..range.end.to_display_point(map))
 }
 
 fn indent(
@@ -3369,7 +3371,12 @@ mod test {
             // but, since this is being set manually, the language isn't
             // automatically set.
             let editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
-            let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+            let buffer_ids = multi_buffer
+                .read(cx)
+                .snapshot(cx)
+                .excerpts()
+                .map(|excerpt| excerpt.context.start.buffer_id)
+                .collect::<Vec<_>>();
             if let Some(buffer) = multi_buffer.read(cx).buffer(buffer_ids[1]) {
                 buffer.update(cx, |buffer, cx| {
                     buffer.set_language(Some(language::rust_lang()), cx);

crates/vim/src/state.rs πŸ”—

@@ -426,7 +426,7 @@ impl MarksState {
                             name.clone(),
                             buffer
                                 .read(cx)
-                                .summaries_for_anchors::<Point, _>(anchors)
+                                .summaries_for_anchors::<Point, _>(anchors.iter().copied())
                                 .collect(),
                         )
                     })
@@ -492,7 +492,14 @@ impl MarksState {
         {
             let buffer_marks = old_marks
                 .into_iter()
-                .map(|(k, v)| (k, v.into_iter().map(|anchor| anchor.text_anchor).collect()))
+                .map(|(k, v)| {
+                    (
+                        k,
+                        v.into_iter()
+                            .filter_map(|anchor| anchor.raw_text_anchor())
+                            .collect(),
+                    )
+                })
                 .collect();
             self.buffer_marks
                 .insert(buffer.read(cx).remote_id(), buffer_marks);
@@ -569,6 +576,7 @@ impl MarksState {
         anchors: Vec<Anchor>,
         cx: &mut Context<Self>,
     ) {
+        let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx);
         let buffer = multibuffer.read(cx).as_singleton();
         let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx));
 
@@ -602,7 +610,7 @@ impl MarksState {
             name.clone(),
             anchors
                 .into_iter()
-                .map(|anchor| anchor.text_anchor)
+                .filter_map(|anchor| Some(multibuffer_snapshot.anchor_to_buffer_anchor(anchor)?.0))
                 .collect(),
         );
         if !self.watched_buffers.contains_key(&buffer_id) {
@@ -629,12 +637,13 @@ impl MarksState {
                 return Some(Mark::Local(anchors.get(name)?.clone()));
             }
 
-            let (excerpt_id, buffer_id, _) = multi_buffer.read(cx).read(cx).as_singleton()?;
-            if let Some(anchors) = self.buffer_marks.get(&buffer_id) {
+            let multibuffer_snapshot = multi_buffer.read(cx).snapshot(cx);
+            let buffer_snapshot = multibuffer_snapshot.as_singleton()?;
+            if let Some(anchors) = self.buffer_marks.get(&buffer_snapshot.remote_id()) {
                 let text_anchors = anchors.get(name)?;
                 let anchors = text_anchors
                     .iter()
-                    .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor))
+                    .filter_map(|anchor| multibuffer_snapshot.anchor_in_excerpt(*anchor))
                     .collect();
                 return Some(Mark::Local(anchors));
             }
@@ -895,14 +904,13 @@ impl VimGlobals {
                 }
             }
             '%' => editor.and_then(|editor| {
-                let selection = editor
-                    .selections
-                    .newest::<Point>(&editor.display_snapshot(cx));
-                if let Some((_, buffer, _)) = editor
-                    .buffer()
-                    .read(cx)
-                    .excerpt_containing(selection.head(), cx)
-                {
+                let multibuffer = editor.buffer().read(cx);
+                let snapshot = multibuffer.snapshot(cx);
+                let selection = editor.selections.newest_anchor();
+                let buffer = snapshot
+                    .anchor_to_buffer_anchor(selection.head())
+                    .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id));
+                if let Some(buffer) = buffer {
                     buffer
                         .read(cx)
                         .file()
@@ -1022,6 +1030,7 @@ impl Clone for ReplayableAction {
 pub struct SearchState {
     pub direction: Direction,
     pub count: usize,
+    pub cmd_f_search: bool,
 
     pub prior_selections: Vec<Range<Anchor>>,
     pub prior_operator: Option<Operator>,

crates/vim/src/test.rs πŸ”—

@@ -2117,7 +2117,12 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) {
         );
         let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
 
-        let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+        let buffer_ids = multi_buffer
+            .read(cx)
+            .snapshot(cx)
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
+            .collect::<Vec<_>>();
         // fold all but the second buffer, so that we test navigating between two
         // adjacent folded buffers, as well as folded buffers at the start and
         // end the multibuffer
@@ -2262,7 +2267,13 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) {
         "
     });
     cx.update_editor(|editor, _, cx| {
-        let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids();
+        let buffer_ids = editor
+            .buffer()
+            .read(cx)
+            .snapshot(cx)
+            .excerpts()
+            .map(|excerpt| excerpt.context.start.buffer_id)
+            .collect::<Vec<_>>();
         editor.fold_buffer(buffer_ids[1], cx);
     });
 

crates/vim/src/vim.rs πŸ”—

@@ -432,8 +432,12 @@ pub fn init(cx: &mut App) {
                 .and_then(|item| item.act_as::<Editor>(cx))
                 .and_then(|editor| editor.read(cx).addon::<VimAddon>().cloned());
             let Some(vim) = vim else { return };
-            vim.entity.update(cx, |_, cx| {
-                cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx))
+            vim.entity.update(cx, |vim, cx| {
+                if !vim.search.cmd_f_search {
+                    cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx))
+                } else {
+                    cx.propagate()
+                }
             })
         });
         workspace.register_action(|_, _: &GoToTab, window, cx| {
@@ -2086,7 +2090,7 @@ impl Vim {
         VimEditorSettingsState {
             cursor_shape: self.cursor_shape(cx),
             clip_at_line_ends: self.clip_at_line_ends(),
-            collapse_matches: !HelixModeSetting::get_global(cx).0,
+            collapse_matches: !HelixModeSetting::get_global(cx).0 && !self.search.cmd_f_search,
             input_enabled: self.editor_input_enabled(),
             expects_character_input: self.expects_character_input(),
             autoindent: self.should_autoindent(),

crates/workspace/src/notifications.rs πŸ”—

@@ -5,7 +5,7 @@ use gpui::{
     DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, Render, ScrollHandle,
     Task, TextStyleRefinement, UnderlineStyle, WeakEntity, svg,
 };
-use markdown::{Markdown, MarkdownElement, MarkdownStyle};
+use markdown::{CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle};
 use parking_lot::Mutex;
 use project::project_settings::ProjectSettings;
 use settings::Settings;
@@ -401,8 +401,7 @@ impl Render for LanguageServerPrompt {
                         MarkdownElement::new(self.markdown.clone(), markdown_style(window, cx))
                             .text_size(TextSize::Small.rems(cx))
                             .code_block_renderer(markdown::CodeBlockRenderer::Default {
-                                copy_button: false,
-                                copy_button_on_hover: false,
+                                copy_button_visibility: CopyButtonVisibility::Hidden,
                                 border: false,
                             })
                             .on_url_click(|link, _, cx| cx.open_url(&link)),
@@ -1227,10 +1226,8 @@ where
                     let mut display = format!("{err:#}");
                     if !display.ends_with('\n') {
                         display.push('.');
-                        display.push(' ')
                     }
-                    let detail =
-                        f(err, window, cx).unwrap_or_else(|| format!("{display}Please try again."));
+                    let detail = f(err, window, cx).unwrap_or(display);
                     window.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"], cx)
                 }) {
                     prompt.await.ok();

crates/workspace/src/pane_group.rs πŸ”—

@@ -1,6 +1,7 @@
 use crate::{
     AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace,
     WorkspaceSettings,
+    notifications::DetachAndPromptErr,
     pane_group::element::pane_axis,
     workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical},
 };
@@ -438,14 +439,19 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> {
                                 let app_state = self.app_state.clone();
                                 this.cursor_pointer().on_mouse_down(
                                     MouseButton::Left,
-                                    move |_, _, cx| {
+                                    move |_, window, cx| {
                                         crate::join_in_room_project(
                                             leader_project_id,
                                             leader_user_id,
                                             app_state.clone(),
                                             cx,
                                         )
-                                        .detach_and_log_err(cx);
+                                        .detach_and_prompt_err(
+                                            "Failed to join project",
+                                            window,
+                                            cx,
+                                            |error, _, _| Some(format!("{error:#}")),
+                                        );
                                     },
                                 )
                             },

crates/workspace/src/persistence.rs πŸ”—

@@ -971,6 +971,9 @@ impl Domain for WorkspaceDb {
         sql!(
             ALTER TABLE remote_connections ADD COLUMN use_podman BOOLEAN;
         ),
+        sql!(
+            ALTER TABLE remote_connections ADD COLUMN remote_env TEXT;
+        ),
     ];
 
     // Allow recovering from bad migration that was initially shipped to nightly
@@ -1500,6 +1503,7 @@ impl WorkspaceDb {
         let mut name = None;
         let mut container_id = None;
         let mut use_podman = None;
+        let mut remote_env = None;
         match options {
             RemoteConnectionOptions::Ssh(options) => {
                 kind = RemoteConnectionKind::Ssh;
@@ -1518,6 +1522,7 @@ impl WorkspaceDb {
                 name = Some(options.name);
                 use_podman = Some(options.use_podman);
                 user = Some(options.remote_user);
+                remote_env = serde_json::to_string(&options.remote_env).ok();
             }
             #[cfg(any(test, feature = "test-support"))]
             RemoteConnectionOptions::Mock(options) => {
@@ -1536,6 +1541,7 @@ impl WorkspaceDb {
             name,
             container_id,
             use_podman,
+            remote_env,
         )
     }
 
@@ -1549,6 +1555,7 @@ impl WorkspaceDb {
         name: Option<String>,
         container_id: Option<String>,
         use_podman: Option<bool>,
+        remote_env: Option<String>,
     ) -> Result<RemoteConnectionId> {
         if let Some(id) = this.select_row_bound(sql!(
             SELECT id
@@ -1582,8 +1589,9 @@ impl WorkspaceDb {
                     distro,
                     name,
                     container_id,
-                    use_podman
-                ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)
+                    use_podman,
+                    remote_env
+                    ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)
                 RETURNING id
             ))?((
                 kind.serialize(),
@@ -1594,6 +1602,7 @@ impl WorkspaceDb {
                 name,
                 container_id,
                 use_podman,
+                remote_env,
             ))?
             .context("failed to insert remote project")?;
             Ok(RemoteConnectionId(id))
@@ -1695,13 +1704,13 @@ impl WorkspaceDb {
     fn remote_connections(&self) -> Result<HashMap<RemoteConnectionId, RemoteConnectionOptions>> {
         Ok(self.select(sql!(
             SELECT
-                id, kind, host, port, user, distro, container_id, name, use_podman
+                id, kind, host, port, user, distro, container_id, name, use_podman, remote_env
             FROM
                 remote_connections
         ))?()?
         .into_iter()
         .filter_map(
-            |(id, kind, host, port, user, distro, container_id, name, use_podman)| {
+            |(id, kind, host, port, user, distro, container_id, name, use_podman, remote_env)| {
                 Some((
                     RemoteConnectionId(id),
                     Self::remote_connection_from_row(
@@ -1713,6 +1722,7 @@ impl WorkspaceDb {
                         container_id,
                         name,
                         use_podman,
+                        remote_env,
                     )?,
                 ))
             },
@@ -1724,9 +1734,9 @@ impl WorkspaceDb {
         &self,
         id: RemoteConnectionId,
     ) -> Result<RemoteConnectionOptions> {
-        let (kind, host, port, user, distro, container_id, name, use_podman) =
+        let (kind, host, port, user, distro, container_id, name, use_podman, remote_env) =
             self.select_row_bound(sql!(
-                SELECT kind, host, port, user, distro, container_id, name, use_podman
+                SELECT kind, host, port, user, distro, container_id, name, use_podman, remote_env
                 FROM remote_connections
                 WHERE id = ?
             ))?(id.0)?
@@ -1740,6 +1750,7 @@ impl WorkspaceDb {
             container_id,
             name,
             use_podman,
+            remote_env,
         )
         .context("invalid remote_connection row")
     }
@@ -1753,6 +1764,7 @@ impl WorkspaceDb {
         container_id: Option<String>,
         name: Option<String>,
         use_podman: Option<bool>,
+        remote_env: Option<String>,
     ) -> Option<RemoteConnectionOptions> {
         match RemoteConnectionKind::deserialize(&kind)? {
             RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions {
@@ -1766,12 +1778,15 @@ impl WorkspaceDb {
                 ..Default::default()
             })),
             RemoteConnectionKind::Docker => {
+                let remote_env: BTreeMap<String, String> =
+                    serde_json::from_str(&remote_env?).ok()?;
                 Some(RemoteConnectionOptions::Docker(DockerConnectionOptions {
                     container_id: container_id?,
                     name: name?,
                     remote_user: user?,
                     upload_binary_over_docker_exec: false,
                     use_podman: use_podman?,
+                    remote_env,
                 }))
             }
         }

crates/workspace/src/workspace.rs πŸ”—

@@ -5528,7 +5528,9 @@ impl Workspace {
             if let Some(project_id) = other_project_id {
                 let app_state = self.app_state.clone();
                 crate::join_in_room_project(project_id, remote_participant.user.id, app_state, cx)
-                    .detach_and_log_err(cx);
+                    .detach_and_prompt_err("Failed to join project", window, cx, |error, _, _| {
+                        Some(format!("{error:#}"))
+                    });
             }
         }
 

crates/zed/Cargo.toml πŸ”—

@@ -2,7 +2,7 @@
 description = "The fast, collaborative code editor."
 edition.workspace = true
 name = "zed"
-version = "0.231.0"
+version = "0.232.0"
 publish.workspace = true
 license = "GPL-3.0-or-later"
 authors = ["Zed Team <hi@zed.dev>"]

crates/zed/src/visual_test_runner.rs πŸ”—

@@ -109,7 +109,7 @@ use {
     image::RgbaImage,
     project::{AgentId, Project},
     project_panel::ProjectPanel,
-    settings::{NotifyWhenAgentWaiting, Settings as _},
+    settings::{NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings as _},
     settings_ui::SettingsWindow,
     std::{
         any::Any,
@@ -231,7 +231,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
         agent_settings::AgentSettings::override_global(
             agent_settings::AgentSettings {
                 notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
-                play_sound_when_agent_done: false,
+                play_sound_when_agent_done: PlaySoundWhenAgentDone::Never,
                 ..agent_settings::AgentSettings::get_global(cx).clone()
             },
             cx,

crates/zed/src/zed/telemetry_log.rs πŸ”—

@@ -12,7 +12,7 @@ use gpui::{
     StyleRefinement, Task, TextStyleRefinement, Window, list, prelude::*,
 };
 use language::LanguageRegistry;
-use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
+use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle};
 use project::Project;
 use settings::Settings;
 use telemetry_events::{Event, EventWrapper};
@@ -424,8 +424,11 @@ impl TelemetryLogView {
                             },
                         )
                         .code_block_renderer(CodeBlockRenderer::Default {
-                            copy_button: false,
-                            copy_button_on_hover: expanded,
+                            copy_button_visibility: if expanded {
+                                CopyButtonVisibility::VisibleOnHover
+                            } else {
+                                CopyButtonVisibility::Hidden
+                            },
                             border: false,
                         }),
                     ),

docs/src/ai/agent-settings.md πŸ”—

@@ -292,13 +292,16 @@ The default value is `false`.
 
 ### Sound Notification
 
-Control whether to hear a notification sound when the agent is done generating changes or needs your input.
-The default value is `false`.
+Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `never`.
+
+- `"never"` (default) β€” Never play the sound.
+- `"when_hidden"` β€” Only play the sound when the agent panel is not visible.
+- `"always"` β€” Always play the sound on completion.
 
 ```json [settings]
 {
   "agent": {
-    "play_sound_when_agent_done": true
+    "play_sound_when_agent_done": "never"
   }
 }
 ```

flake.lock πŸ”—

@@ -2,11 +2,11 @@
   "nodes": {
     "crane": {
       "locked": {
-        "lastModified": 1774313767,
-        "narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=",
+        "lastModified": 1769737823,
+        "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=",
         "owner": "ipetkov",
         "repo": "crane",
-        "rev": "3d9df76e29656c679c744968b17fbaf28f0e923d",
+        "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba",
         "type": "github"
       },
       "original": {
@@ -20,11 +20,11 @@
         "nixpkgs-lib": "nixpkgs-lib"
       },
       "locked": {
-        "lastModified": 1772408722,
-        "narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=",
+        "lastModified": 1769996383,
+        "narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=",
         "owner": "hercules-ci",
         "repo": "flake-parts",
-        "rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3",
+        "rev": "57928607ea566b5db3ad13af0e57e921e6b12381",
         "type": "github"
       },
       "original": {
@@ -35,11 +35,11 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1774709303,
-        "narHash": "sha256-D3Q07BbIA2KnTcSXIqqu9P586uWxN74zNoCH3h2ESHg=",
+        "lastModified": 1769789167,
+        "narHash": "sha256-kKB3bqYJU5nzYeIROI82Ef9VtTbu4uA3YydSk/Bioa8=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "8110df5ad7abf5d4c0f6fb0f8f978390e77f9685",
+        "rev": "62c8382960464ceb98ea593cb8321a2cf8f9e3e5",
         "type": "github"
       },
       "original": {
@@ -51,11 +51,11 @@
     },
     "nixpkgs-lib": {
       "locked": {
-        "lastModified": 1772328832,
-        "narHash": "sha256-e+/T/pmEkLP6BHhYjx6GmwP5ivonQQn0bJdH9YrRB+Q=",
+        "lastModified": 1769909678,
+        "narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=",
         "owner": "nix-community",
         "repo": "nixpkgs.lib",
-        "rev": "c185c7a5e5dd8f9add5b2f8ebeff00888b070742",
+        "rev": "72716169fe93074c333e8d0173151350670b824c",
         "type": "github"
       },
       "original": {
@@ -79,11 +79,11 @@
         ]
       },
       "locked": {
-        "lastModified": 1774840424,
-        "narHash": "sha256-3Oi4mBKzOCFQYLUyEjyc0s5cnlNj1MzmhpVKoLptpe8=",
+        "lastModified": 1775013181,
+        "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=",
         "owner": "oxalica",
         "repo": "rust-overlay",
-        "rev": "d9f52b51548e76ab8b6e7d647763047ebdec835c",
+        "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be",
         "type": "github"
       },
       "original": {

nix/build.nix πŸ”—

@@ -38,6 +38,8 @@
   libxfixes,
   libxkbcommon,
   libxrandr,
+  libx11,
+  libxcb,
   nodejs_22,
   openssl,
   perl,
@@ -181,8 +183,8 @@ let
         wayland
         gpu-lib
         libglvnd
-        xorg.libX11
-        xorg.libxcb
+        libx11
+        libxcb
         libdrm
         libgbm
         libva

nix/livekit-libwebrtc/package.nix πŸ”—

@@ -37,6 +37,8 @@
   libxfixes,
   libxrandr,
   libxtst,
+  libx11,
+  libxi,
   pipewire,
   xorg,
 }:
@@ -224,8 +226,8 @@ stdenv.mkDerivation {
     libxrandr
     libxtst
     pipewire
-    xorg.libX11
-    xorg.libXi
+    libx11
+    libxi
   ]);
 
   preConfigure = ''