diff --git a/Cargo.lock b/Cargo.lock index 9dd3c00221b171cbe821d38e966f1d54f55d9f99..9fc4824b84e426a58e424b3aed997d535fca64b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4729,6 +4729,9 @@ dependencies = [ name = "dev_container" version = "0.1.0" dependencies = [ + "async-tar", + "async-trait", + "env_logger 0.11.8", "fs", "futures 0.3.31", "gpui", @@ -4736,16 +4739,17 @@ dependencies = [ "http_client", "log", "menu", - "node_runtime", "paths", "picker", "project", "serde", "serde_json", + "serde_json_lenient", "settings", - "smol", + "shlex", "ui", "util", + "walkdir", "workspace", "worktree", ] @@ -9330,7 +9334,6 @@ dependencies = [ "schemars", "serde", "serde_json", - "settings", "smol", "thiserror 2.0.17", "util", @@ -21890,7 +21893,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.231.0" +version = "0.232.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/assets/settings/default.json b/assets/settings/default.json index 57bad245474b9469a0a9b9d5674c692059f039af..d8286685b502fea9d531d4f631f06c979c985be0 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1102,11 +1102,14 @@ // "all_screens" - Show these notifications on all screens // "never" - Never show these notifications "notify_when_agent_waiting": "primary_screen", - // Whether to play a sound when the agent has either completed + // When to play a sound when the agent has either completed // its response, or needs user input. - - // Default: false - "play_sound_when_agent_done": false, + // "never" - Never play the sound + // "when_hidden" - Only play the sound when the agent panel is not visible + // "always" - Always play the sound + // + // Default: never + "play_sound_when_agent_done": "never", // Whether to have edit cards in the agent panel expanded, showing a preview of the full diff. // // Default: true @@ -1117,8 +1120,8 @@ "expand_terminal_card": true, // How thinking blocks should be displayed by default in the agent panel. // - // Default: automatic - "thinking_display": "automatic", + // Default: auto + "thinking_display": "auto", // Whether clicking the stop button on a running terminal tool should also cancel the agent's generation. // Note that this only applies to the stop button, not to ctrl+c inside the terminal. // diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 937592b8a94df00ca1c7565d43893b99693f8892..0bcb8254c8b8123eef3faaa913bb360de8dcc76d 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2616,7 +2616,7 @@ impl AcpThread { text_diff(old_text.as_str(), &content) .into_iter() .map(|(range, replacement)| { - (snapshot.anchor_range_around(range), replacement) + (snapshot.anchor_range_inside(range), replacement) }) .collect::>() }) diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 08b1b9bdf24d1ff9980164c1af8b3e60bd2f3339..a6d3b86db7c980bb5e4e5a8cacee95abeaabc3f1 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -191,7 +191,7 @@ impl Diff { } pub fn has_revealed_range(&self, cx: &App) -> bool { - self.multibuffer().read(cx).paths().next().is_some() + !self.multibuffer().read(cx).is_empty() } pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool { diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index 52a9d03f893d0b82bf6395b4c96bc9ebe14d3afe..ae8a39c8df4f73ae8be6b748694dbde5d2a0c102 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -13,7 +13,7 @@ use gpui::{ StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*, }; use language::LanguageRegistry; -use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use project::{AgentId, Project}; use settings::Settings; use theme_settings::ThemeSettings; @@ -384,8 +384,11 @@ impl AcpTools { ) .code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: expanded, + copy_button_visibility: if expanded { + CopyButtonVisibility::VisibleOnHover + } else { + CopyButtonVisibility::Hidden + }, border: false, }, ), diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 3faf767c7020763eadc7db6c93af42f650a07434..1f17d38f7d2a2770350026f2f145a53723ef7481 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -738,6 +738,7 @@ impl ActionLog { let task = if let Some(existing_file_content) = existing_file_content { // Capture the agent's content before restoring existing file content let agent_content = buffer.read(cx).text(); + let buffer_id = buffer.read(cx).remote_id(); buffer.update(cx, |buffer, cx| { buffer.start_transaction(); @@ -750,7 +751,10 @@ impl ActionLog { undo_info = Some(PerBufferUndo { buffer: buffer.downgrade(), - edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)], + edits_to_restore: vec![( + Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id), + agent_content, + )], status: UndoBufferStatus::Created { had_existing_content: true, }, @@ -990,8 +994,8 @@ impl ActionLog { let mut valid_edits = Vec::new(); for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore { - if anchor_range.start.buffer_id == Some(buffer.remote_id()) - && anchor_range.end.buffer_id == Some(buffer.remote_id()) + if anchor_range.start.buffer_id == buffer.remote_id() + && anchor_range.end.buffer_id == buffer.remote_id() { valid_edits.push((anchor_range, text_to_restore)); } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 6e6cf9735a922695bf089bdcc78798fb086ad364..f0dae2a7b39dcad0fea280a2354f2f3c5c61600b 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -374,13 +374,13 @@ impl EditAgent { buffer.edit(edits.iter().cloned(), None, cx); let max_edit_end = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.end), + edits.iter().map(|(range, _)| range.end), ) .max() .unwrap(); let min_edit_start = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.start), + edits.iter().map(|(range, _)| range.start), ) .min() .unwrap(); diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index e74b6e4c5ce34383ad7ea702f1ba3a0cfd028455..c67942e5cd3769f814fad62f7311bf7967f3317a 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -563,7 +563,7 @@ mod tests { use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool}; use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules}; use gpui::px; - use settings::{DockPosition, NotifyWhenAgentWaiting}; + use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone}; use std::sync::Arc; fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings { @@ -584,7 +584,7 @@ mod tests { default_profile: AgentProfileId::default(), profiles: Default::default(), notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::default(), single_file_review: false, model_parameters: vec![], enable_feedback: false, diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 88ec1e67787ad6efbeaa46b83b9034a24b10d3db..c0c29bfc43d9c58ac011b3170edf81210ba8ee66 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -760,7 +760,7 @@ impl EditSession { { if let Some(match_range) = matcher.push(chunk, None) { let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(match_range.clone()) + buffer.anchor_range_outside(match_range.clone()) }); self.diff .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); @@ -795,7 +795,7 @@ impl EditSession { let anchor_range = self .buffer - .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); + .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); self.diff .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); @@ -953,7 +953,7 @@ fn apply_char_operations( } CharOperation::Delete { bytes } => { let delete_end = *edit_cursor + bytes; - let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end); + let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end); agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); *edit_cursor = delete_end; } diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 2ef65fe33641cdeca1a77642251523275511e81f..f0730d39eee17cbd544e5ba8574b30f03963c524 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -13,8 +13,8 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation, - NotifyWhenAgentWaiting, RegisterSetting, Settings, SettingsContent, SettingsStore, - SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, + NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent, + SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, update_settings_file, }; @@ -165,7 +165,7 @@ pub struct AgentSettings { pub profiles: IndexMap, pub notify_when_agent_waiting: NotifyWhenAgentWaiting, - pub play_sound_when_agent_done: bool, + pub play_sound_when_agent_done: PlaySoundWhenAgentDone, pub single_file_review: bool, pub model_parameters: Vec, pub enable_feedback: bool, @@ -618,7 +618,7 @@ impl Settings for AgentSettings { .collect(), notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(), - play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(), + play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(), single_file_review: agent.single_file_review.unwrap(), model_parameters: agent.model_parameters, enable_feedback: agent.enable_feedback.unwrap(), diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 2e709c0be3297e270119c048c7b8e25e7958ee69..d5cf63f6cdde9a85a54daaa29f8fc2c6833bdd77 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -138,11 +138,12 @@ impl AgentDiffPane { path_a.cmp(&path_b) }); - let mut paths_to_delete = self + let mut buffers_to_delete = self .multibuffer .read(cx) - .paths() - .cloned() + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) .collect::>(); for (buffer, diff_handle) in sorted_buffers { @@ -151,7 +152,7 @@ impl AgentDiffPane { } let path_key = PathKey::for_buffer(&buffer, cx); - paths_to_delete.remove(&path_key); + buffers_to_delete.remove(&buffer.read(cx).remote_id()); let snapshot = buffer.read(cx).snapshot(); @@ -168,7 +169,7 @@ impl AgentDiffPane { let (was_empty, is_excerpt_newly_added) = self.multibuffer.update(cx, |multibuffer, cx| { let was_empty = multibuffer.is_empty(); - let (_, is_excerpt_newly_added) = multibuffer.set_excerpts_for_path( + let is_excerpt_newly_added = multibuffer.update_excerpts_for_path( path_key.clone(), buffer.clone(), diff_hunk_ranges, @@ -183,13 +184,13 @@ impl AgentDiffPane { if was_empty { let first_hunk = editor .diff_hunks_in_ranges( - &[editor::Anchor::min()..editor::Anchor::max()], + &[editor::Anchor::Min..editor::Anchor::Max], &self.multibuffer.read(cx).read(cx), ) .next(); if let Some(first_hunk) = first_hunk { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([first_hunk_start..first_hunk_start]); }) @@ -208,8 +209,8 @@ impl AgentDiffPane { } self.multibuffer.update(cx, |multibuffer, cx| { - for path in paths_to_delete { - multibuffer.remove_excerpts_for_path(path, cx); + for buffer_id in buffers_to_delete { + multibuffer.remove_excerpts_for_buffer(buffer_id, cx); } }); @@ -239,13 +240,13 @@ impl AgentDiffPane { self.editor.update(cx, |editor, cx| { let first_hunk = editor .diff_hunks_in_ranges( - &[position..editor::Anchor::max()], + &[position..editor::Anchor::Max], &self.multibuffer.read(cx).read(cx), ) .next(); if let Some(first_hunk) = first_hunk { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([first_hunk_start..first_hunk_start]); }) @@ -282,7 +283,7 @@ impl AgentDiffPane { editor, &snapshot, &self.thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], self.workspace.clone(), window, cx, @@ -451,20 +452,20 @@ fn update_editor_selection( diff_hunks .last() .and_then(|last_kept_hunk| { - let last_kept_hunk_end = last_kept_hunk.multi_buffer_range().end; + let last_kept_hunk_end = last_kept_hunk.multi_buffer_range.end; editor .diff_hunks_in_ranges( - &[last_kept_hunk_end..editor::Anchor::max()], + &[last_kept_hunk_end..editor::Anchor::Max], buffer_snapshot, ) .nth(1) }) .or_else(|| { let first_kept_hunk = diff_hunks.first()?; - let first_kept_hunk_start = first_kept_hunk.multi_buffer_range().start; + let first_kept_hunk_start = first_kept_hunk.multi_buffer_range.start; editor .diff_hunks_in_ranges( - &[editor::Anchor::min()..first_kept_hunk_start], + &[editor::Anchor::Min..first_kept_hunk_start], buffer_snapshot, ) .next() @@ -473,7 +474,7 @@ fn update_editor_selection( if let Some(target_hunk) = target_hunk { editor.change_selections(Default::default(), window, cx, |selections| { - let next_hunk_start = target_hunk.multi_buffer_range().start; + let next_hunk_start = target_hunk.multi_buffer_range.start; selections.select_anchor_ranges([next_hunk_start..next_hunk_start]); }) } @@ -1567,7 +1568,7 @@ impl AgentDiff { editor.update(cx, |editor, cx| { let snapshot = multibuffer.read(cx).snapshot(cx); if let Some(first_hunk) = snapshot.diff_hunks().next() { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), @@ -1648,7 +1649,7 @@ impl AgentDiff { editor, &snapshot, thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], window, cx, ); @@ -1669,7 +1670,7 @@ impl AgentDiff { editor, &snapshot, thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], workspace.clone(), window, cx, diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index a32f92942682fc0c5efbbcd35a9848c90b761184..0ed0aeb78bf8889136a479ed2dac5caba633db55 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -66,7 +66,10 @@ use project::project_settings::ProjectSettings; use project::{Project, ProjectPath, Worktree}; use prompt_store::{PromptStore, UserPromptId}; use rules_library::{RulesLibrary, open_rules_library}; +use settings::TerminalDockPosition; use settings::{Settings, update_settings_file}; +use terminal::terminal_settings::TerminalSettings; +use terminal_view::{TerminalView, terminal_panel::TerminalPanel}; use theme_settings::ThemeSettings; use ui::{ Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, DocumentationSide, @@ -86,6 +89,30 @@ use zed_actions::{ const AGENT_PANEL_KEY: &str = "agent_panel"; const RECENTLY_UPDATED_MENU_LIMIT: usize = 6; +const LAST_USED_AGENT_KEY: &str = "agent_panel__last_used_external_agent"; + +#[derive(Serialize, Deserialize)] +struct LastUsedAgent { + agent: Agent, +} + +/// Reads the most recently used agent across all workspaces. Used as a fallback +/// when opening a workspace that has no per-workspace agent preference yet. +fn read_global_last_used_agent(kvp: &KeyValueStore) -> Option { + kvp.read_kvp(LAST_USED_AGENT_KEY) + .log_err() + .flatten() + .and_then(|json| serde_json::from_str::(&json).log_err()) + .map(|entry| entry.agent) +} + +async fn write_global_last_used_agent(kvp: KeyValueStore, agent: Agent) { + if let Some(json) = serde_json::to_string(&LastUsedAgent { agent }).log_err() { + kvp.write_kvp(LAST_USED_AGENT_KEY.to_string(), json) + .await + .log_err(); + } +} fn read_serialized_panel( workspace_id: workspace::WorkspaceId, @@ -399,6 +426,48 @@ pub fn init(cx: &mut App) { }) .register_action( |workspace: &mut Workspace, _: &AddSelectionToThread, window, cx| { + let active_editor = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)); + let has_editor_selection = active_editor.is_some_and(|editor| { + editor.update(cx, |editor, cx| { + editor.has_non_empty_selection(&editor.display_snapshot(cx)) + }) + }); + + let has_terminal_selection = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .is_some_and(|terminal_view| { + terminal_view + .read(cx) + .terminal() + .read(cx) + .last_content + .selection_text + .as_ref() + .is_some_and(|text| !text.is_empty()) + }); + + let has_terminal_panel_selection = + workspace.panel::(cx).is_some_and(|panel| { + let position = match TerminalSettings::get_global(cx).dock { + TerminalDockPosition::Left => DockPosition::Left, + TerminalDockPosition::Bottom => DockPosition::Bottom, + TerminalDockPosition::Right => DockPosition::Right, + }; + let dock_is_open = + workspace.dock_at_position(position).read(cx).is_open(); + dock_is_open && !panel.read(cx).terminal_selections(cx).is_empty() + }); + + if !has_editor_selection + && !has_terminal_selection + && !has_terminal_panel_selection + { + return; + } + let Some(panel) = workspace.panel::(cx) else { return; }; @@ -665,13 +734,18 @@ impl AgentPanel { .ok() .flatten(); - let serialized_panel = cx + let (serialized_panel, global_last_used_agent) = cx .background_spawn(async move { - kvp.and_then(|kvp| { - workspace_id - .and_then(|id| read_serialized_panel(id, &kvp)) - .or_else(|| read_legacy_serialized_panel(&kvp)) - }) + match kvp { + Some(kvp) => { + let panel = workspace_id + .and_then(|id| read_serialized_panel(id, &kvp)) + .or_else(|| read_legacy_serialized_panel(&kvp)); + let global_agent = read_global_last_used_agent(&kvp); + (panel, global_agent) + } + None => (None, None), + } }) .await; @@ -710,10 +784,21 @@ impl AgentPanel { let panel = cx.new(|cx| Self::new(workspace, prompt_store, window, cx)); - if let Some(serialized_panel) = &serialized_panel { - panel.update(cx, |panel, cx| { + panel.update(cx, |panel, cx| { + let is_via_collab = panel.project.read(cx).is_via_collab(); + + // Only apply a non-native global fallback to local projects. + // Collab workspaces only support NativeAgent, so inheriting a + // custom agent would cause set_active → new_agent_thread_inner + // to bypass the collab guard in external_thread. + let global_fallback = global_last_used_agent + .filter(|agent| !is_via_collab || agent.is_native()); + + if let Some(serialized_panel) = &serialized_panel { if let Some(selected_agent) = serialized_panel.selected_agent.clone() { panel.selected_agent = selected_agent; + } else if let Some(agent) = global_fallback { + panel.selected_agent = agent; } if let Some(start_thread_in) = serialized_panel.start_thread_in { let is_worktree_flag_enabled = @@ -734,9 +819,11 @@ impl AgentPanel { ); } } - cx.notify(); - }); - } + } else if let Some(agent) = global_fallback { + panel.selected_agent = agent; + } + cx.notify(); + }); if let Some(thread_info) = last_active_thread { let agent = thread_info.agent_type.clone(); @@ -1069,85 +1156,30 @@ impl AgentPanel { let workspace = self.workspace.clone(); let project = self.project.clone(); let fs = self.fs.clone(); - let is_via_collab = self.project.read(cx).is_via_collab(); - - const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent"; - - #[derive(Serialize, Deserialize)] - struct LastUsedExternalAgent { - agent: crate::Agent, - } - let thread_store = self.thread_store.clone(); - let kvp = KeyValueStore::global(cx); - - if let Some(agent) = agent_choice { - cx.background_spawn({ - let agent = agent.clone(); - let kvp = kvp; - async move { - if let Some(serialized) = - serde_json::to_string(&LastUsedExternalAgent { agent }).log_err() - { - kvp.write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized) - .await - .log_err(); - } - } - }) - .detach(); - let server = agent.server(fs, thread_store); - self.create_agent_thread( - server, - resume_session_id, - work_dirs, - title, - initial_content, - workspace, - project, - agent, - focus, - window, - cx, - ); - } else { - cx.spawn_in(window, async move |this, cx| { - let ext_agent = if is_via_collab { - Agent::NativeAgent - } else { - cx.background_spawn(async move { kvp.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) }) - .await - .log_err() - .flatten() - .and_then(|value| { - serde_json::from_str::(&value).log_err() - }) - .map(|agent| agent.agent) - .unwrap_or(Agent::NativeAgent) - }; - - let server = ext_agent.server(fs, thread_store); - this.update_in(cx, |agent_panel, window, cx| { - agent_panel.create_agent_thread( - server, - resume_session_id, - work_dirs, - title, - initial_content, - workspace, - project, - ext_agent, - focus, - window, - cx, - ); - })?; + let agent = agent_choice.unwrap_or_else(|| { + if self.project.read(cx).is_via_collab() { + Agent::NativeAgent + } else { + self.selected_agent.clone() + } + }); - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - } + let server = agent.server(fs, thread_store); + self.create_agent_thread( + server, + resume_session_id, + work_dirs, + title, + initial_content, + workspace, + project, + agent, + focus, + window, + cx, + ); } fn deploy_rules_library( @@ -2102,15 +2134,25 @@ impl AgentPanel { initial_content: Option, workspace: WeakEntity, project: Entity, - ext_agent: Agent, + agent: Agent, focus: bool, window: &mut Window, cx: &mut Context, ) { - if self.selected_agent != ext_agent { - self.selected_agent = ext_agent.clone(); + if self.selected_agent != agent { + self.selected_agent = agent.clone(); self.serialize(cx); } + + cx.background_spawn({ + let kvp = KeyValueStore::global(cx); + let agent = agent.clone(); + async move { + write_global_last_used_agent(kvp, agent).await; + } + }) + .detach(); + let thread_store = server .clone() .downcast::() @@ -2123,7 +2165,7 @@ impl AgentPanel { crate::ConversationView::new( server, connection_store, - ext_agent, + agent, resume_session_id, work_dirs, title, @@ -5611,4 +5653,211 @@ mod tests { "Thread A work_dirs should revert to only /project_a after removing /project_b" ); } + + #[gpui::test] + async fn test_new_workspace_inherits_global_last_used_agent(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + // Use an isolated DB so parallel tests can't overwrite our global key. + cx.set_global(db::AppDatabase::test_new()); + }); + + let custom_agent = Agent::Custom { + id: "my-preferred-agent".into(), + }; + + // Write a known agent to the global KVP to simulate a user who has + // previously used this agent in another workspace. + let kvp = cx.update(|cx| KeyValueStore::global(cx)); + write_global_last_used_agent(kvp, custom_agent.clone()).await; + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Load the panel via `load()`, which reads the global fallback + // asynchronously when no per-workspace state exists. + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let panel = AgentPanel::load(workspace.downgrade(), async_cx) + .await + .expect("panel load should succeed"); + cx.run_until_parked(); + + panel.read_with(cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, custom_agent, + "new workspace should inherit the global last-used agent" + ); + }); + } + + #[gpui::test] + async fn test_workspaces_maintain_independent_agent_selection(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + let project_a = Project::test(fs.clone(), [], cx).await; + let project_b = Project::test(fs, [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + let workspace_a = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + let workspace_b = multi_workspace + .update(cx, |multi_workspace, window, cx| { + multi_workspace.test_add_workspace(project_b.clone(), window, cx) + }) + .unwrap(); + + workspace_a.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + workspace_b.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let agent_a = Agent::Custom { + id: "agent-alpha".into(), + }; + let agent_b = Agent::Custom { + id: "agent-beta".into(), + }; + + // Set up workspace A with agent_a + let panel_a = workspace_a.update_in(cx, |workspace, window, cx| { + cx.new(|cx| AgentPanel::new(workspace, None, window, cx)) + }); + panel_a.update(cx, |panel, _cx| { + panel.selected_agent = agent_a.clone(); + }); + + // Set up workspace B with agent_b + let panel_b = workspace_b.update_in(cx, |workspace, window, cx| { + cx.new(|cx| AgentPanel::new(workspace, None, window, cx)) + }); + panel_b.update(cx, |panel, _cx| { + panel.selected_agent = agent_b.clone(); + }); + + // Serialize both panels + panel_a.update(cx, |panel, cx| panel.serialize(cx)); + panel_b.update(cx, |panel, cx| panel.serialize(cx)); + cx.run_until_parked(); + + // Load fresh panels from serialized state and verify independence + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded_a = AgentPanel::load(workspace_a.downgrade(), async_cx) + .await + .expect("panel A load should succeed"); + cx.run_until_parked(); + + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded_b = AgentPanel::load(workspace_b.downgrade(), async_cx) + .await + .expect("panel B load should succeed"); + cx.run_until_parked(); + + loaded_a.read_with(cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, agent_a, + "workspace A should restore agent-alpha, not agent-beta" + ); + }); + + loaded_b.read_with(cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, agent_b, + "workspace B should restore agent-beta, not agent-alpha" + ); + }); + } + + #[gpui::test] + async fn test_new_thread_uses_workspace_selected_agent(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let custom_agent = Agent::Custom { + id: "my-custom-agent".into(), + }; + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let panel = cx.new(|cx| AgentPanel::new(workspace, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + // Set selected_agent to a custom agent + panel.update(cx, |panel, _cx| { + panel.selected_agent = custom_agent.clone(); + }); + + // Call new_thread, which internally calls external_thread(None, ...) + // This resolves the agent from self.selected_agent + panel.update_in(cx, |panel, window, cx| { + panel.new_thread(&NewThread, window, cx); + }); + + panel.read_with(cx, |panel, _cx| { + assert_eq!( + panel.selected_agent, custom_agent, + "selected_agent should remain the custom agent after new_thread" + ); + assert!( + panel.active_conversation_view().is_some(), + "a thread should have been created" + ); + }); + } } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 98715056ccec43fb91cc4dc9307cf41d84719fc0..185a54825d3af18f16f2eb30188ea866c099bf32 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -674,7 +674,9 @@ mod tests { use feature_flags::FeatureFlagAppExt; use gpui::{BorrowAppContext, TestAppContext, px}; use project::DisableAiSettings; - use settings::{DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore}; + use settings::{ + DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings, SettingsStore, + }; #[gpui::test] fn test_agent_command_palette_visibility(cx: &mut TestAppContext) { @@ -705,7 +707,7 @@ mod tests { default_profile: AgentProfileId::default(), profiles: Default::default(), notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::Never, single_file_review: false, model_parameters: vec![], enable_feedback: false, diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 9857dd8a4752e567b6c22ee0fb5932c79a15d82a..d5288c564d7211a986fa6347e2b74782c58d9c75 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -18,6 +18,9 @@ use language_model::{ LanguageModelRequestTool, LanguageModelTextStream, LanguageModelToolChoice, LanguageModelToolUse, Role, TokenUsage, }; +use language_models::provider::anthropic::telemetry::{ + AnthropicCompletionType, AnthropicEventData, AnthropicEventReporter, AnthropicEventType, +}; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; use prompt_store::PromptBuilder; @@ -300,7 +303,7 @@ impl CodegenAlternative { let snapshot = buffer.read(cx).snapshot(cx); let (old_buffer, _, _) = snapshot - .range_to_buffer_ranges(range.start..=range.end) + .range_to_buffer_ranges(range.start..range.end) .pop() .unwrap(); let old_buffer = cx.new(|cx| { @@ -637,7 +640,7 @@ impl CodegenAlternative { stream: impl 'static + Future>, cx: &mut Context, ) -> Task<()> { - let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx); + let anthropic_reporter = AnthropicEventReporter::new(&model, cx); let session_id = self.session_id; let model_telemetry_id = model.telemetry_id(); let model_provider_id = model.provider_id().to_string(); @@ -681,7 +684,7 @@ impl CodegenAlternative { let language_name = { let multibuffer = self.buffer.read(cx); let snapshot = multibuffer.snapshot(cx); - let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end); + let ranges = snapshot.range_to_buffer_ranges(self.range.start..self.range.end); ranges .first() .and_then(|(buffer, _, _)| buffer.language()) @@ -830,9 +833,9 @@ impl CodegenAlternative { error_message = error_message.as_deref(), ); - anthropic_reporter.report(language_model::AnthropicEventData { - completion_type: language_model::AnthropicCompletionType::Editor, - event: language_model::AnthropicEventType::Response, + anthropic_reporter.report(AnthropicEventData { + completion_type: AnthropicCompletionType::Editor, + event: AnthropicEventType::Response, language_name: language_name.map(|n| n.to_string()), message_id, }); diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index b6be6502b152847822a79bc8c486195345c0a195..a72b352375ef9b219729172f0d19854287e0e7fc 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -9,9 +9,7 @@ use crate::ThreadHistory; use acp_thread::MentionUri; use agent_client_protocol as acp; use anyhow::Result; -use editor::{ - CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH, -}; +use editor::{CompletionProvider, Editor, code_context_menus::COMPLETION_MENU_MAX_WIDTH}; use futures::FutureExt as _; use fuzzy::{PathMatch, StringMatch, StringMatchCandidate}; use gpui::{App, BackgroundExecutor, Entity, SharedString, Task, WeakEntity}; @@ -28,7 +26,7 @@ use prompt_store::{PromptStore, UserPromptId}; use rope::Point; use settings::{Settings, TerminalDockPosition}; use terminal::terminal_settings::TerminalSettings; -use terminal_view::terminal_panel::TerminalPanel; +use terminal_view::{TerminalView, terminal_panel::TerminalPanel}; use text::{Anchor, ToOffset as _, ToPoint as _}; use ui::IconName; use ui::prelude::*; @@ -562,8 +560,7 @@ impl PromptCompletionProvider { .collect(); // Collect terminal selections from all terminal views if the terminal panel is visible - let terminal_selections: Vec = - terminal_selections_if_panel_open(workspace, cx); + let terminal_selections: Vec = terminal_selections(workspace, cx); const EDITOR_PLACEHOLDER: &str = "selection "; const TERMINAL_PLACEHOLDER: &str = "terminal "; @@ -622,7 +619,7 @@ impl PromptCompletionProvider { for (terminal_text, terminal_range) in terminal_ranges { let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let Some(start) = - snapshot.as_singleton_anchor(source_range.start) + snapshot.anchor_in_excerpt(source_range.start) else { return; }; @@ -1198,7 +1195,7 @@ impl PromptCompletionProvider { }) }); - let has_terminal_selection = !terminal_selections_if_panel_open(workspace, cx).is_empty(); + let has_terminal_selection = !terminal_selections(workspace, cx).is_empty(); if has_editor_selection || has_terminal_selection { entries.push(PromptContextEntry::Action( @@ -1236,7 +1233,6 @@ impl PromptCompletionProvider { impl CompletionProvider for PromptCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: Anchor, _trigger: CompletionContext, @@ -2169,28 +2165,45 @@ fn build_code_label_for_path( label.build() } -/// Returns terminal selections from all terminal views if the terminal panel is open. -fn terminal_selections_if_panel_open(workspace: &Entity, cx: &App) -> Vec { - let Some(panel) = workspace.read(cx).panel::(cx) else { - return Vec::new(); - }; +fn terminal_selections(workspace: &Entity, cx: &App) -> Vec { + let mut selections = Vec::new(); - // Check if the dock containing this panel is open - let position = match TerminalSettings::get_global(cx).dock { - TerminalDockPosition::Left => DockPosition::Left, - TerminalDockPosition::Bottom => DockPosition::Bottom, - TerminalDockPosition::Right => DockPosition::Right, - }; - let dock_is_open = workspace - .read(cx) - .dock_at_position(position) + // Check if the active item is a terminal (in a panel or not) + if let Some(terminal_view) = workspace .read(cx) - .is_open(); - if !dock_is_open { - return Vec::new(); + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + { + if let Some(text) = terminal_view + .read(cx) + .terminal() + .read(cx) + .last_content + .selection_text + .clone() + .filter(|text| !text.is_empty()) + { + selections.push(text); + } + } + + if let Some(panel) = workspace.read(cx).panel::(cx) { + let position = match TerminalSettings::get_global(cx).dock { + TerminalDockPosition::Left => DockPosition::Left, + TerminalDockPosition::Bottom => DockPosition::Bottom, + TerminalDockPosition::Right => DockPosition::Right, + }; + let dock_is_open = workspace + .read(cx) + .dock_at_position(position) + .read(cx) + .is_open(); + if dock_is_open { + selections.extend(panel.read(cx).terminal_selections(cx)); + } } - panel.read(cx).terminal_selections(cx) + selections } fn selection_ranges( @@ -2213,17 +2226,8 @@ fn selection_ranges( selections .into_iter() - .map(|s| { - let (start, end) = if s.is_empty() { - let row = multi_buffer::MultiBufferRow(s.start.row); - let line_start = text::Point::new(s.start.row, 0); - let line_end = text::Point::new(s.start.row, snapshot.line_len(row)); - (line_start, line_end) - } else { - (s.start, s.end) - }; - snapshot.anchor_after(start)..snapshot.anchor_before(end) - }) + .filter(|s| !s.is_empty()) + .map(|s| snapshot.anchor_after(s.start)..snapshot.anchor_before(s.end)) .flat_map(|range| { let (start_buffer, start) = buffer.text_anchor_for_position(range.start, cx)?; let (end_buffer, end) = buffer.text_anchor_for_position(range.end, cx)?; diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 2231f421bc2af0d8038c002a72c226f551f243cc..9b8b3224a420b32b4f534869ded19b3be821c080 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -2340,7 +2340,7 @@ impl ConversationView { .is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx)) }; #[cfg(feature = "audio")] - if settings.play_sound_when_agent_done && !_visible { + if settings.play_sound_when_agent_done.should_play(_visible) { Audio::play_sound(Sound::AgentDone, cx); } } diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index 63aa8b8529655a26b99ba74062f8d0a6a4812c5f..c065c3de3d83c0eb5b68bf9a3610ff925762c952 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -5152,9 +5152,12 @@ impl ThreadView { } pub(crate) fn auto_expand_streaming_thought(&mut self, cx: &mut Context) { - // Only auto-expand thinking blocks in Automatic mode. - // AlwaysExpanded shows them open by default; AlwaysCollapsed keeps them closed. - if AgentSettings::get_global(cx).thinking_display != ThinkingBlockDisplay::Automatic { + let thinking_display = AgentSettings::get_global(cx).thinking_display; + + if !matches!( + thinking_display, + ThinkingBlockDisplay::Auto | ThinkingBlockDisplay::Preview + ) { return; } @@ -5183,6 +5186,13 @@ impl ThreadView { cx.notify(); } } else if self.auto_expanded_thinking_block.is_some() { + if thinking_display == ThinkingBlockDisplay::Auto { + if let Some(key) = self.auto_expanded_thinking_block { + if !self.user_toggled_thinking_blocks.contains(&key) { + self.expanded_thinking_blocks.remove(&key); + } + } + } self.auto_expanded_thinking_block = None; cx.notify(); } @@ -5196,7 +5206,19 @@ impl ThreadView { let thinking_display = AgentSettings::get_global(cx).thinking_display; match thinking_display { - ThinkingBlockDisplay::Automatic => { + ThinkingBlockDisplay::Auto => { + let is_open = self.expanded_thinking_blocks.contains(&key) + || self.user_toggled_thinking_blocks.contains(&key); + + if is_open { + self.expanded_thinking_blocks.remove(&key); + self.user_toggled_thinking_blocks.remove(&key); + } else { + self.expanded_thinking_blocks.insert(key); + self.user_toggled_thinking_blocks.insert(key); + } + } + ThinkingBlockDisplay::Preview => { let is_user_expanded = self.user_toggled_thinking_blocks.contains(&key); let is_in_expanded_set = self.expanded_thinking_blocks.contains(&key); @@ -5249,7 +5271,11 @@ impl ThreadView { let is_in_expanded_set = self.expanded_thinking_blocks.contains(&key); let (is_open, is_constrained) = match thinking_display { - ThinkingBlockDisplay::Automatic => { + ThinkingBlockDisplay::Auto => { + let is_open = is_user_toggled || is_in_expanded_set; + (is_open, false) + } + ThinkingBlockDisplay::Preview => { let is_open = is_user_toggled || is_in_expanded_set; let is_constrained = is_in_expanded_set && !is_user_toggled; (is_open, is_constrained) @@ -7103,17 +7129,10 @@ impl ThreadView { }; active_editor.update_in(cx, |editor, window, cx| { - let singleton = editor - .buffer() - .read(cx) - .read(cx) - .as_singleton() - .map(|(a, b, _)| (a, b)); - if let Some((excerpt_id, buffer_id)) = singleton - && let Some(agent_buffer) = agent_location.buffer.upgrade() - && agent_buffer.read(cx).remote_id() == buffer_id + let snapshot = editor.buffer().read(cx).snapshot(cx); + if snapshot.as_singleton().is_some() + && let Some(anchor) = snapshot.anchor_in_excerpt(agent_location.position) { - let anchor = editor::Anchor::in_buffer(excerpt_id, agent_location.position); editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([anchor..anchor]); }) diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 3b98e496d4732deaf54be9b4e14da380285f467f..20e0b702978b7e72a8526b03570854965335310c 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -1,5 +1,6 @@ -use language_model::AnthropicEventData; -use language_model::report_anthropic_event; +use language_models::provider::anthropic::telemetry::{ + AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event, +}; use std::cmp; use std::mem; use std::ops::Range; @@ -26,8 +27,8 @@ use editor::RowExt; use editor::SelectionEffects; use editor::scroll::ScrollOffset; use editor::{ - Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey, - MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer, + MultiBufferSnapshot, ToOffset as _, ToPoint, actions::SelectAll, display_map::{ BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins, @@ -442,15 +443,17 @@ impl InlineAssistant { let newest_selection = newest_selection.unwrap(); let mut codegen_ranges = Vec::new(); - for (buffer, buffer_range, excerpt_id) in - snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| { - snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end) - })) + for (buffer, buffer_range, _) in selections + .iter() + .flat_map(|selection| snapshot.range_to_buffer_ranges(selection.start..selection.end)) { - let anchor_range = Anchor::range_in_buffer( - excerpt_id, - buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end), - ); + let (Some(start), Some(end)) = ( + snapshot.anchor_in_buffer(buffer.anchor_before(buffer_range.start)), + snapshot.anchor_in_buffer(buffer.anchor_after(buffer_range.end)), + ) else { + continue; + }; + let anchor_range = start..end; codegen_ranges.push(anchor_range); @@ -467,8 +470,8 @@ impl InlineAssistant { report_anthropic_event( &model.model, AnthropicEventData { - completion_type: language_model::AnthropicCompletionType::Editor, - event: language_model::AnthropicEventType::Invoked, + completion_type: AnthropicCompletionType::Editor, + event: AnthropicEventType::Invoked, language_name: buffer.language().map(|language| language.name().to_proto()), message_id: None, }, @@ -981,8 +984,7 @@ impl InlineAssistant { match event { EditorEvent::Edited { transaction_id } => { let buffer = editor.read(cx).buffer().read(cx); - let edited_ranges = - buffer.edited_ranges_for_transaction::(*transaction_id, cx); + let edited_ranges = buffer.edited_ranges_for_transaction(*transaction_id, cx); let snapshot = buffer.snapshot(cx); for assist_id in editor_assists.assist_ids.clone() { @@ -1088,7 +1090,7 @@ impl InlineAssistant { let multibuffer = editor.read(cx).buffer().read(cx); let snapshot = multibuffer.snapshot(cx); let ranges = - snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end); + snapshot.range_to_buffer_ranges(assist.range.start..assist.range.end); ranges .first() .and_then(|(buffer, _, _)| buffer.language()) @@ -1105,13 +1107,13 @@ impl InlineAssistant { ( "rejected", "Assistant Response Rejected", - language_model::AnthropicEventType::Reject, + AnthropicEventType::Reject, ) } else { ( "accepted", "Assistant Response Accepted", - language_model::AnthropicEventType::Accept, + AnthropicEventType::Accept, ) }; @@ -1128,8 +1130,8 @@ impl InlineAssistant { report_anthropic_event( &model.model, - language_model::AnthropicEventData { - completion_type: language_model::AnthropicCompletionType::Editor, + AnthropicEventData { + completion_type: AnthropicCompletionType::Editor, event: anthropic_event_type, language_name, message_id, @@ -1495,10 +1497,10 @@ impl InlineAssistant { let mut new_blocks = Vec::new(); for (new_row, old_row_range) in deleted_row_ranges { - let (_, start, _) = old_snapshot + let (_, start) = old_snapshot .point_to_buffer_point(Point::new(*old_row_range.start(), 0)) .unwrap(); - let (_, end, _) = old_snapshot + let (_, end) = old_snapshot .point_to_buffer_point(Point::new( *old_row_range.end(), old_snapshot.line_len(MultiBufferRow(*old_row_range.end())), @@ -1529,7 +1531,7 @@ impl InlineAssistant { editor.set_read_only(true); editor.set_show_edit_predictions(Some(false), window, cx); editor.highlight_rows::( - Anchor::min()..Anchor::max(), + Anchor::Min..Anchor::Max, cx.theme().status().deleted_background, Default::default(), cx, @@ -1937,9 +1939,8 @@ impl CodeActionProvider for AssistantCodeActionProvider { fn apply_code_action( &self, - buffer: Entity, + _buffer: Entity, action: CodeAction, - excerpt_id: ExcerptId, _push_to_history: bool, window: &mut Window, cx: &mut App, @@ -1969,31 +1970,8 @@ impl CodeActionProvider for AssistantCodeActionProvider { let range = editor .update(cx, |editor, cx| { editor.buffer().update(cx, |multibuffer, cx| { - let buffer = buffer.read(cx); - let multibuffer_snapshot = multibuffer.read(cx); - - let old_context_range = - multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; - let mut new_context_range = old_context_range.clone(); - if action - .range - .start - .cmp(&old_context_range.start, buffer) - .is_lt() - { - new_context_range.start = action.range.start; - } - if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { - new_context_range.end = action.range.end; - } - drop(multibuffer_snapshot); - - if new_context_range != old_context_range { - multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); - } - let multibuffer_snapshot = multibuffer.read(cx); - multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range) + multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range) }) }) .context("invalid range")?; diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 2559edc566d4467eaaab180e0a16f4af5fae7ab9..4db856f9dd1e512a7b8b43eadcefccc22fe50188 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -6,7 +6,7 @@ use agent_servers::{AgentServer, AgentServerDelegate}; use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, HashSet}; use editor::{ - Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset, + Anchor, Editor, EditorSnapshot, FoldPlaceholder, ToOffset, display_map::{Crease, CreaseId, CreaseMetadata, FoldId}, scroll::Autoscroll, }; @@ -204,10 +204,9 @@ impl MentionSet { }; let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); - let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else { + let Some(start_anchor) = snapshot.buffer_snapshot().anchor_in_excerpt(start) else { return Task::ready(()); }; - let excerpt_id = start_anchor.excerpt_id; let end_anchor = snapshot.buffer_snapshot().anchor_before( start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize, ); @@ -234,7 +233,6 @@ impl MentionSet { }) .shared(); insert_crease_for_mention( - excerpt_id, start, content_len, mention_uri.name().into(), @@ -249,7 +247,6 @@ impl MentionSet { ) } else { insert_crease_for_mention( - excerpt_id, start, content_len, crease_text, @@ -468,7 +465,7 @@ impl MentionSet { }; let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); - let Some(start) = snapshot.as_singleton_anchor(source_range.start) else { + let Some(start) = snapshot.anchor_in_excerpt(source_range.start) else { return; }; @@ -745,19 +742,17 @@ pub(crate) async fn insert_images_as_context( let replacement_text = MentionUri::PastedImage.as_link().to_string(); for (image, name) in images { - let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor + let Some((text_anchor, multibuffer_anchor)) = editor .update_in(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let (excerpt_id, _, buffer_snapshot) = - snapshot.buffer_snapshot().as_singleton().unwrap(); - - let cursor_anchor = editor.selections.newest_anchor().start.text_anchor; - let text_anchor = cursor_anchor.bias_left(&buffer_snapshot); - let multibuffer_anchor = snapshot + let (cursor_anchor, buffer_snapshot) = snapshot .buffer_snapshot() - .anchor_in_excerpt(excerpt_id, text_anchor); + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start) + .unwrap(); + let text_anchor = cursor_anchor.bias_left(buffer_snapshot); + let multibuffer_anchor = snapshot.buffer_snapshot().anchor_in_excerpt(text_anchor); editor.insert(&format!("{replacement_text} "), window, cx); - (excerpt_id, text_anchor, multibuffer_anchor) + (text_anchor, multibuffer_anchor) }) .ok() else { @@ -775,7 +770,6 @@ pub(crate) async fn insert_images_as_context( let image = Arc::new(image); let Ok(Some((crease_id, tx))) = cx.update(|window, cx| { insert_crease_for_mention( - excerpt_id, text_anchor, content_len, name.clone(), @@ -909,7 +903,6 @@ pub(crate) fn paste_images_as_context( } pub(crate) fn insert_crease_for_mention( - excerpt_id: ExcerptId, anchor: text::Anchor, content_len: usize, crease_label: SharedString, @@ -927,7 +920,7 @@ pub(crate) fn insert_crease_for_mention( let crease_id = editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; + let start = snapshot.anchor_in_excerpt(anchor)?; let start = start.bias_right(&snapshot); let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index df36f38899c9abea165d0ff5a01834a2bb84c82f..8660e792cd23bc418b1d2c204bfafb2a81ba48df 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -203,12 +203,10 @@ fn insert_mention_for_project_path( MentionInsertPosition::AtCursor => editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx); let snapshot = buffer.snapshot(cx); - let (_, _, buffer_snapshot) = snapshot.as_singleton()?; - let text_anchor = editor - .selections - .newest_anchor() - .start - .text_anchor + let buffer_snapshot = snapshot.as_singleton()?; + let text_anchor = snapshot + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)? + .0 .bias_left(&buffer_snapshot); editor.insert(&mention_text, window, cx); @@ -224,7 +222,7 @@ fn insert_mention_for_project_path( editor.update(cx, |editor, cx| { editor.edit( [( - multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + multi_buffer::Anchor::Max..multi_buffer::Anchor::Max, new_text, )], cx, @@ -603,7 +601,7 @@ impl MessageEditor { COMMAND_HINT_INLAY_ID, hint_pos, &InlayHint { - position: hint_pos.text_anchor, + position: snapshot.anchor_to_buffer_anchor(hint_pos)?.0, label: InlayHintLabel::String(hint), kind: Some(InlayHintKind::Parameter), padding_left: false, @@ -640,12 +638,11 @@ impl MessageEditor { let start = self.editor.update(cx, |editor, cx| { editor.set_text(content, window, cx); - editor - .buffer() - .read(cx) - .snapshot(cx) - .anchor_before(Point::zero()) - .text_anchor + let snapshot = editor.buffer().read(cx).snapshot(cx); + snapshot + .anchor_to_buffer_anchor(snapshot.anchor_before(Point::zero())) + .unwrap() + .0 }); let supports_images = self.session_capabilities.read().supports_images(); @@ -999,13 +996,10 @@ impl MessageEditor { if should_insert_creases && let Some(selections) = editor_clipboard_selections { cx.stop_propagation(); - let insertion_target = self - .editor - .read(cx) - .selections - .newest_anchor() - .start - .text_anchor; + let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); + let (insertion_target, _) = snapshot + .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start) + .unwrap(); let project = workspace.read(cx).project().clone(); for selection in selections { @@ -1021,21 +1015,19 @@ impl MessageEditor { }; let mention_text = mention_uri.as_link().to_string(); - let (excerpt_id, text_anchor, content_len) = - self.editor.update(cx, |editor, cx| { - let buffer = editor.buffer().read(cx); - let snapshot = buffer.snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap(); - let text_anchor = insertion_target.bias_left(&buffer_snapshot); + let (text_anchor, content_len) = self.editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx); + let snapshot = buffer.snapshot(cx); + let buffer_snapshot = snapshot.as_singleton().unwrap(); + let text_anchor = insertion_target.bias_left(&buffer_snapshot); - editor.insert(&mention_text, window, cx); - editor.insert(" ", window, cx); + editor.insert(&mention_text, window, cx); + editor.insert(" ", window, cx); - (excerpt_id, text_anchor, mention_text.len()) - }); + (text_anchor, mention_text.len()) + }); let Some((crease_id, tx)) = insert_crease_for_mention( - excerpt_id, text_anchor, content_len, crease_text.into(), @@ -1145,8 +1137,7 @@ impl MessageEditor { for (anchor, content_len, mention_uri) in all_mentions { let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, + snapshot.anchor_to_buffer_anchor(anchor).unwrap().0, content_len, mention_uri.name().into(), mention_uri.icon_path(cx), @@ -1339,25 +1330,23 @@ impl MessageEditor { }; let mention_text = mention_uri.as_link().to_string(); - let (excerpt_id, text_anchor, content_len) = editor.update(cx, |editor, cx| { + let (text_anchor, content_len) = editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx); let snapshot = buffer.snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap(); - let text_anchor = editor - .selections - .newest_anchor() - .start - .text_anchor + let buffer_snapshot = snapshot.as_singleton().unwrap(); + let text_anchor = snapshot + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start) + .unwrap() + .0 .bias_left(&buffer_snapshot); editor.insert(&mention_text, window, cx); editor.insert(" ", window, cx); - (excerpt_id, text_anchor, mention_text.len()) + (text_anchor, mention_text.len()) }); let Some((crease_id, tx)) = insert_crease_for_mention( - excerpt_id, text_anchor, content_len, mention_uri.name().into(), @@ -1700,8 +1689,7 @@ impl MessageEditor { let adjusted_start = insertion_start + range.start; let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start)); let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, + snapshot.anchor_to_buffer_anchor(anchor).unwrap().0, range.end - range.start, mention_uri.name().into(), mention_uri.icon_path(cx), @@ -2077,23 +2065,13 @@ mod tests { cx.run_until_parked(); - let excerpt_id = editor.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_ids() - .into_iter() - .next() - .unwrap() - }); let completions = editor.update_in(cx, |editor, window, cx| { editor.set_text("Hello @file ", window, cx); let buffer = editor.buffer().read(cx).as_singleton().unwrap(); let completion_provider = editor.completion_provider().unwrap(); completion_provider.completions( - excerpt_id, &buffer, - text::Anchor::MAX, + text::Anchor::max_for_buffer(buffer.read(cx).remote_id()), CompletionContext { trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER, trigger_character: Some("@".into()), @@ -2114,7 +2092,7 @@ mod tests { editor.update_in(cx, |editor, window, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let range = snapshot - .anchor_range_in_excerpt(excerpt_id, completion.replace_range) + .buffer_anchor_range_to_anchor_range(completion.replace_range) .unwrap(); editor.edit([(range, completion.new_text)], cx); (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx); diff --git a/crates/agent_ui/src/terminal_codegen.rs b/crates/agent_ui/src/terminal_codegen.rs index e93d3d3991378ddb4156b264be1f0a5ab4d4faac..19adf2d880607091526e94bb0f8bfbb2d944dfa7 100644 --- a/crates/agent_ui/src/terminal_codegen.rs +++ b/crates/agent_ui/src/terminal_codegen.rs @@ -2,6 +2,9 @@ use crate::inline_prompt_editor::CodegenStatus; use futures::{SinkExt, StreamExt, channel::mpsc}; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task}; use language_model::{ConfiguredModel, LanguageModelRegistry, LanguageModelRequest}; +use language_models::provider::anthropic::telemetry::{ + AnthropicCompletionType, AnthropicEventData, AnthropicEventReporter, AnthropicEventType, +}; use std::time::Instant; use terminal::Terminal; use uuid::Uuid; @@ -40,7 +43,7 @@ impl TerminalCodegen { return; }; - let anthropic_reporter = language_model::AnthropicEventReporter::new(&model, cx); + let anthropic_reporter = AnthropicEventReporter::new(&model, cx); let session_id = self.session_id; let model_telemetry_id = model.telemetry_id(); let model_provider_id = model.provider_id().to_string(); @@ -94,9 +97,9 @@ impl TerminalCodegen { error_message = error_message, ); - anthropic_reporter.report(language_model::AnthropicEventData { - completion_type: language_model::AnthropicCompletionType::Terminal, - event: language_model::AnthropicEventType::Response, + anthropic_reporter.report(AnthropicEventData { + completion_type: AnthropicCompletionType::Terminal, + event: AnthropicEventType::Response, language_name: None, message_id, }); diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index d8bcabf276e76c4701894d2830af88171072fe49..89c1ec431386e548dc9188b46fe2f88ffef77668 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -17,7 +17,10 @@ use gpui::{App, Entity, Focusable, Global, Subscription, Task, UpdateGlobal, Wea use language::Buffer; use language_model::{ CompletionIntent, ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, - LanguageModelRequestMessage, Role, report_anthropic_event, + LanguageModelRequestMessage, Role, +}; +use language_models::provider::anthropic::telemetry::{ + AnthropicCompletionType, AnthropicEventData, AnthropicEventType, report_anthropic_event, }; use project::Project; use prompt_store::{PromptBuilder, PromptStore}; @@ -312,13 +315,13 @@ impl TerminalInlineAssistant { ( "rejected", "Assistant Response Rejected", - language_model::AnthropicEventType::Reject, + AnthropicEventType::Reject, ) } else { ( "accepted", "Assistant Response Accepted", - language_model::AnthropicEventType::Accept, + AnthropicEventType::Accept, ) }; @@ -335,8 +338,8 @@ impl TerminalInlineAssistant { report_anthropic_event( &model, - language_model::AnthropicEventData { - completion_type: language_model::AnthropicCompletionType::Terminal, + AnthropicEventData { + completion_type: AnthropicCompletionType::Terminal, event: anthropic_event_type, language_name: None, message_id, diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index dfa60fa2551191ac022ea0394e03c08524546cb0..55c7f12d568a3d7089df112531f42655c36a21fb 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -58,7 +58,7 @@ fn migrate_thread_metadata(cx: &mut App) { .read(cx) .entries() .filter_map(|entry| { - if existing_entries.contains(&entry.id.0) || entry.folder_paths.is_empty() { + if existing_entries.contains(&entry.id.0) { return None; } @@ -84,6 +84,9 @@ fn migrate_thread_metadata(cx: &mut App) { if is_first_migration { let mut per_project: HashMap> = HashMap::default(); for entry in &mut to_migrate { + if entry.folder_paths.is_empty() { + continue; + } per_project .entry(entry.folder_paths.clone()) .or_default() @@ -330,6 +333,25 @@ impl ThreadMetadataStore { .log_err(); } + pub fn update_working_directories( + &mut self, + session_id: &acp::SessionId, + work_dirs: PathList, + cx: &mut Context, + ) { + if !cx.has_flag::() { + return; + } + + if let Some(thread) = self.threads.get(session_id) { + self.save_internal(ThreadMetadata { + folder_paths: work_dirs, + ..thread.clone() + }); + cx.notify(); + } + } + pub fn archive(&mut self, session_id: &acp::SessionId, cx: &mut Context) { self.update_archived(session_id, true, cx); } @@ -561,7 +583,13 @@ impl ThreadMetadataStore { PathList::new(&paths) }; - let archived = existing_thread.map(|t| t.archived).unwrap_or(false); + // Threads without a folder path (e.g. started in an empty + // window) are archived by default so they don't get lost, + // because they won't show up in the sidebar. Users can reload + // them from the archive. + let archived = existing_thread + .map(|t| t.archived) + .unwrap_or(folder_paths.is_empty()); let metadata = ThreadMetadata { session_id, @@ -1173,7 +1201,7 @@ mod tests { store.read(cx).entries().cloned().collect::>() }); - assert_eq!(list.len(), 3); + assert_eq!(list.len(), 4); assert!( list.iter() .all(|metadata| metadata.agent_id.as_ref() == agent::ZED_AGENT_ID.as_ref()) @@ -1192,17 +1220,12 @@ mod tests { .collect::>(); assert!(migrated_session_ids.contains(&"a-session-1")); assert!(migrated_session_ids.contains(&"b-session-0")); - assert!(!migrated_session_ids.contains(&"projectless")); + assert!(migrated_session_ids.contains(&"projectless")); let migrated_entries = list .iter() .filter(|metadata| metadata.session_id.0.as_ref() != "a-session-0") .collect::>(); - assert!( - migrated_entries - .iter() - .all(|metadata| !metadata.folder_paths.is_empty()) - ); assert!(migrated_entries.iter().all(|metadata| metadata.archived)); } @@ -1448,6 +1471,84 @@ mod tests { assert_eq!(metadata_ids, vec![session_id]); } + #[gpui::test] + async fn test_threads_without_project_association_are_archived_by_default( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project_without_worktree = Project::test(fs.clone(), None::<&Path>, cx).await; + let project_with_worktree = Project::test(fs, [Path::new("/project-a")], cx).await; + let connection = Rc::new(StubAgentConnection::new()); + + let thread_without_worktree = cx + .update(|cx| { + connection.clone().new_session( + project_without_worktree.clone(), + PathList::default(), + cx, + ) + }) + .await + .unwrap(); + let session_without_worktree = + cx.read(|cx| thread_without_worktree.read(cx).session_id().clone()); + + cx.update(|cx| { + thread_without_worktree.update(cx, |thread, cx| { + thread.set_title("No Project Thread".into(), cx).detach(); + }); + }); + cx.run_until_parked(); + + let thread_with_worktree = cx + .update(|cx| { + connection.clone().new_session( + project_with_worktree.clone(), + PathList::default(), + cx, + ) + }) + .await + .unwrap(); + let session_with_worktree = + cx.read(|cx| thread_with_worktree.read(cx).session_id().clone()); + + cx.update(|cx| { + thread_with_worktree.update(cx, |thread, cx| { + thread.set_title("Project Thread".into(), cx).detach(); + }); + }); + cx.run_until_parked(); + + cx.update(|cx| { + let store = ThreadMetadataStore::global(cx); + let store = store.read(cx); + + let without_worktree = store + .entry(&session_without_worktree) + .expect("missing metadata for thread without project association"); + assert!(without_worktree.folder_paths.is_empty()); + assert!( + without_worktree.archived, + "expected thread without project association to be archived" + ); + + let with_worktree = store + .entry(&session_with_worktree) + .expect("missing metadata for thread with project association"); + assert_eq!( + with_worktree.folder_paths, + PathList::new(&[Path::new("/project-a")]) + ); + assert!( + !with_worktree.archived, + "expected thread with project association to remain unarchived" + ); + }); + } + #[gpui::test] async fn test_subagent_threads_excluded_from_sidebar_metadata(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 74a93129d387e0aaac6e7092d9e086dd64e369f7..9aca31e1edbe729fccecfc0dd8f0530d2aed2564 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -1,3 +1,6 @@ +use std::collections::HashSet; +use std::sync::Arc; + use crate::agent_connection_store::AgentConnectionStore; use crate::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore}; @@ -9,18 +12,31 @@ use agent_settings::AgentSettings; use chrono::{DateTime, Datelike as _, Local, NaiveDate, TimeDelta, Utc}; use editor::Editor; use fs::Fs; +use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ - AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, ListState, Render, - SharedString, Subscription, Task, WeakEntity, Window, list, prelude::*, px, + AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, + ListState, Render, SharedString, Subscription, Task, WeakEntity, Window, list, prelude::*, px, }; use itertools::Itertools as _; use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; +use picker::{ + Picker, PickerDelegate, + highlighted_match_with_paths::{HighlightedMatch, HighlightedMatchWithPaths}, +}; use project::{AgentId, AgentServerStore}; use settings::Settings as _; use theme::ActiveTheme; use ui::ThreadItem; use ui::{ - Divider, KeyBinding, Tooltip, WithScrollbar, prelude::*, utils::platform_title_bar_height, + Divider, KeyBinding, ListItem, ListItemSpacing, ListSubHeader, Tooltip, WithScrollbar, + prelude::*, utils::platform_title_bar_height, +}; +use ui_input::ErasedEditor; +use util::ResultExt; +use util::paths::PathExt; +use workspace::{ + ModalView, PathList, SerializedWorkspaceLocation, Workspace, WorkspaceDb, WorkspaceId, + resolve_worktree_workspaces, }; use zed_actions::agents_sidebar::FocusSidebarFilter; @@ -110,12 +126,14 @@ pub struct ThreadsArchiveView { filter_editor: Entity, _subscriptions: Vec, _refresh_history_task: Task<()>, + workspace: WeakEntity, agent_connection_store: WeakEntity, agent_server_store: WeakEntity, } impl ThreadsArchiveView { pub fn new( + workspace: WeakEntity, agent_connection_store: WeakEntity, agent_server_store: WeakEntity, window: &mut Window, @@ -176,6 +194,7 @@ impl ThreadsArchiveView { thread_metadata_store_subscription, ], _refresh_history_task: Task::ready(()), + workspace, agent_connection_store, agent_server_store, }; @@ -254,7 +273,14 @@ impl ThreadsArchiveView { self.list_state.reset(items.len()); self.items = items; - self.hovered_index = None; + + if !preserve { + self.hovered_index = None; + } else if let Some(ix) = self.hovered_index { + if ix >= self.items.len() || !self.is_selectable_item(ix) { + self.hovered_index = None; + } + } if let Some(scroll_top) = saved_scroll { self.list_state.scroll_to(scroll_top); @@ -288,11 +314,57 @@ impl ThreadsArchiveView { window: &mut Window, cx: &mut Context, ) { + if thread.folder_paths.is_empty() { + self.show_project_picker_for_thread(thread, window, cx); + return; + } + self.selection = None; self.reset_filter_editor_text(window, cx); cx.emit(ThreadsArchiveViewEvent::Unarchive { thread }); } + fn show_project_picker_for_thread( + &mut self, + thread: ThreadMetadata, + window: &mut Window, + cx: &mut Context, + ) { + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + + let archive_view = cx.weak_entity(); + let fs = workspace.read(cx).app_state().fs.clone(); + let current_workspace_id = workspace.read(cx).database_id(); + let sibling_workspace_ids: HashSet = workspace + .read(cx) + .multi_workspace() + .and_then(|mw| mw.upgrade()) + .map(|mw| { + mw.read(cx) + .workspaces() + .iter() + .filter_map(|ws| ws.read(cx).database_id()) + .collect() + }) + .unwrap_or_default(); + + workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(window, cx, |window, cx| { + ProjectPickerModal::new( + thread, + fs, + archive_view, + current_workspace_id, + sibling_workspace_ids, + window, + cx, + ) + }); + }); + } + fn is_selectable_item(&self, ix: usize) -> bool { matches!(self.items.get(ix), Some(ArchiveListItem::Entry { .. })) } @@ -380,10 +452,6 @@ impl ThreadsArchiveView { return; }; - if thread.folder_paths.is_empty() { - return; - } - self.unarchive_thread(thread.clone(), window, cx); } @@ -471,6 +539,7 @@ impl ThreadsArchiveView { let agent = thread.agent_id.clone(); let session_id = thread.session_id.clone(); cx.listener(move |this, _, _, cx| { + this.preserve_selection_on_next_update = true; this.delete_thread(session_id.clone(), agent.clone(), cx); cx.stop_propagation(); }) @@ -683,3 +752,534 @@ impl Render for ThreadsArchiveView { .child(content) } } + +struct ProjectPickerModal { + picker: Entity>, + _subscription: Subscription, +} + +impl ProjectPickerModal { + fn new( + thread: ThreadMetadata, + fs: Arc, + archive_view: WeakEntity, + current_workspace_id: Option, + sibling_workspace_ids: HashSet, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let delegate = ProjectPickerDelegate { + thread, + archive_view, + workspaces: Vec::new(), + filtered_entries: Vec::new(), + selected_index: 0, + current_workspace_id, + sibling_workspace_ids, + focus_handle: cx.focus_handle(), + }; + + let picker = cx.new(|cx| { + Picker::list(delegate, window, cx) + .list_measure_all() + .modal(false) + }); + + let picker_focus_handle = picker.focus_handle(cx); + picker.update(cx, |picker, _| { + picker.delegate.focus_handle = picker_focus_handle; + }); + + let _subscription = + cx.subscribe(&picker, |_this: &mut Self, _, _event: &DismissEvent, cx| { + cx.emit(DismissEvent); + }); + + let db = WorkspaceDb::global(cx); + cx.spawn_in(window, async move |this, cx| { + let workspaces = db + .recent_workspaces_on_disk(fs.as_ref()) + .await + .log_err() + .unwrap_or_default(); + let workspaces = resolve_worktree_workspaces(workspaces, fs.as_ref()).await; + this.update_in(cx, move |this, window, cx| { + this.picker.update(cx, move |picker, cx| { + picker.delegate.workspaces = workspaces; + picker.update_matches(picker.query(cx), window, cx) + }) + }) + .ok(); + }) + .detach(); + + picker.focus_handle(cx).focus(window, cx); + + Self { + picker, + _subscription, + } + } +} + +impl EventEmitter for ProjectPickerModal {} + +impl Focusable for ProjectPickerModal { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.picker.focus_handle(cx) + } +} + +impl ModalView for ProjectPickerModal {} + +impl Render for ProjectPickerModal { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + v_flex() + .key_context("ProjectPickerModal") + .elevation_3(cx) + .w(rems(34.)) + .on_action(cx.listener(|this, _: &workspace::Open, window, cx| { + this.picker.update(cx, |picker, cx| { + picker.delegate.open_local_folder(window, cx) + }) + })) + .child(self.picker.clone()) + } +} + +enum ProjectPickerEntry { + Header(SharedString), + Workspace(StringMatch), +} + +struct ProjectPickerDelegate { + thread: ThreadMetadata, + archive_view: WeakEntity, + current_workspace_id: Option, + sibling_workspace_ids: HashSet, + workspaces: Vec<( + WorkspaceId, + SerializedWorkspaceLocation, + PathList, + DateTime, + )>, + filtered_entries: Vec, + selected_index: usize, + focus_handle: FocusHandle, +} + +impl ProjectPickerDelegate { + fn update_working_directories_and_unarchive( + &mut self, + paths: PathList, + window: &mut Window, + cx: &mut Context>, + ) { + self.thread.folder_paths = paths.clone(); + ThreadMetadataStore::global(cx).update(cx, |store, cx| { + store.update_working_directories(&self.thread.session_id, paths, cx); + }); + + self.archive_view + .update(cx, |view, cx| { + view.selection = None; + view.reset_filter_editor_text(window, cx); + cx.emit(ThreadsArchiveViewEvent::Unarchive { + thread: self.thread.clone(), + }); + }) + .log_err(); + } + + fn is_current_workspace(&self, workspace_id: WorkspaceId) -> bool { + self.current_workspace_id == Some(workspace_id) + } + + fn is_sibling_workspace(&self, workspace_id: WorkspaceId) -> bool { + self.sibling_workspace_ids.contains(&workspace_id) + && !self.is_current_workspace(workspace_id) + } + + fn selected_match(&self) -> Option<&StringMatch> { + match self.filtered_entries.get(self.selected_index)? { + ProjectPickerEntry::Workspace(hit) => Some(hit), + ProjectPickerEntry::Header(_) => None, + } + } + + fn open_local_folder(&mut self, window: &mut Window, cx: &mut Context>) { + let paths_receiver = cx.prompt_for_paths(gpui::PathPromptOptions { + files: false, + directories: true, + multiple: false, + prompt: None, + }); + cx.spawn_in(window, async move |this, cx| { + let Ok(Ok(Some(paths))) = paths_receiver.await else { + return; + }; + if paths.is_empty() { + return; + } + + let work_dirs = PathList::new(&paths); + + this.update_in(cx, |this, window, cx| { + this.delegate + .update_working_directories_and_unarchive(work_dirs, window, cx); + cx.emit(DismissEvent); + }) + .log_err(); + }) + .detach(); + } +} + +impl EventEmitter for ProjectPickerDelegate {} + +impl PickerDelegate for ProjectPickerDelegate { + type ListItem = AnyElement; + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + format!("Associate the \"{}\" thread with...", self.thread.title).into() + } + + fn render_editor( + &self, + editor: &Arc, + window: &mut Window, + cx: &mut Context>, + ) -> Div { + h_flex() + .flex_none() + .h_9() + .px_2p5() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(editor.render(window, cx)) + } + + fn match_count(&self) -> usize { + self.filtered_entries.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index( + &mut self, + ix: usize, + _window: &mut Window, + _cx: &mut Context>, + ) { + self.selected_index = ix; + } + + fn can_select(&self, ix: usize, _window: &mut Window, _cx: &mut Context>) -> bool { + matches!( + self.filtered_entries.get(ix), + Some(ProjectPickerEntry::Workspace(_)) + ) + } + + fn update_matches( + &mut self, + query: String, + _window: &mut Window, + cx: &mut Context>, + ) -> Task<()> { + let query = query.trim_start(); + let smart_case = query.chars().any(|c| c.is_uppercase()); + let is_empty_query = query.is_empty(); + + let sibling_candidates: Vec<_> = self + .workspaces + .iter() + .enumerate() + .filter(|(_, (id, _, _, _))| self.is_sibling_workspace(*id)) + .map(|(id, (_, _, paths, _))| { + let combined_string = paths + .ordered_paths() + .map(|path| path.compact().to_string_lossy().into_owned()) + .collect::>() + .join(""); + StringMatchCandidate::new(id, &combined_string) + }) + .collect(); + + let mut sibling_matches = smol::block_on(fuzzy::match_strings( + &sibling_candidates, + query, + smart_case, + true, + 100, + &Default::default(), + cx.background_executor().clone(), + )); + + sibling_matches.sort_unstable_by(|a, b| { + b.score + .partial_cmp(&a.score) + .unwrap_or(std::cmp::Ordering::Equal) + .then_with(|| a.candidate_id.cmp(&b.candidate_id)) + }); + + let recent_candidates: Vec<_> = self + .workspaces + .iter() + .enumerate() + .filter(|(_, (id, _, _, _))| { + !self.is_current_workspace(*id) && !self.is_sibling_workspace(*id) + }) + .map(|(id, (_, _, paths, _))| { + let combined_string = paths + .ordered_paths() + .map(|path| path.compact().to_string_lossy().into_owned()) + .collect::>() + .join(""); + StringMatchCandidate::new(id, &combined_string) + }) + .collect(); + + let mut recent_matches = smol::block_on(fuzzy::match_strings( + &recent_candidates, + query, + smart_case, + true, + 100, + &Default::default(), + cx.background_executor().clone(), + )); + + recent_matches.sort_unstable_by(|a, b| { + b.score + .partial_cmp(&a.score) + .unwrap_or(std::cmp::Ordering::Equal) + .then_with(|| a.candidate_id.cmp(&b.candidate_id)) + }); + + let mut entries = Vec::new(); + + let has_siblings_to_show = if is_empty_query { + !sibling_candidates.is_empty() + } else { + !sibling_matches.is_empty() + }; + + if has_siblings_to_show { + entries.push(ProjectPickerEntry::Header("This Window".into())); + + if is_empty_query { + for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() { + if self.is_sibling_workspace(*workspace_id) { + entries.push(ProjectPickerEntry::Workspace(StringMatch { + candidate_id: id, + score: 0.0, + positions: Vec::new(), + string: String::new(), + })); + } + } + } else { + for m in sibling_matches { + entries.push(ProjectPickerEntry::Workspace(m)); + } + } + } + + let has_recent_to_show = if is_empty_query { + !recent_candidates.is_empty() + } else { + !recent_matches.is_empty() + }; + + if has_recent_to_show { + entries.push(ProjectPickerEntry::Header("Recent Projects".into())); + + if is_empty_query { + for (id, (workspace_id, _, _, _)) in self.workspaces.iter().enumerate() { + if !self.is_current_workspace(*workspace_id) + && !self.is_sibling_workspace(*workspace_id) + { + entries.push(ProjectPickerEntry::Workspace(StringMatch { + candidate_id: id, + score: 0.0, + positions: Vec::new(), + string: String::new(), + })); + } + } + } else { + for m in recent_matches { + entries.push(ProjectPickerEntry::Workspace(m)); + } + } + } + + self.filtered_entries = entries; + + self.selected_index = self + .filtered_entries + .iter() + .position(|e| matches!(e, ProjectPickerEntry::Workspace(_))) + .unwrap_or(0); + + Task::ready(()) + } + + fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) { + let candidate_id = match self.filtered_entries.get(self.selected_index) { + Some(ProjectPickerEntry::Workspace(hit)) => hit.candidate_id, + _ => return, + }; + let Some((_workspace_id, _location, paths, _)) = self.workspaces.get(candidate_id) else { + return; + }; + + self.update_working_directories_and_unarchive(paths.clone(), window, cx); + cx.emit(DismissEvent); + } + + fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {} + + fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option { + let text = if self.workspaces.is_empty() { + "No recent projects found" + } else { + "No matches" + }; + Some(text.into()) + } + + fn render_match( + &self, + ix: usize, + selected: bool, + window: &mut Window, + cx: &mut Context>, + ) -> Option { + match self.filtered_entries.get(ix)? { + ProjectPickerEntry::Header(title) => Some( + v_flex() + .w_full() + .gap_1() + .when(ix > 0, |this| this.mt_1().child(Divider::horizontal())) + .child(ListSubHeader::new(title.clone()).inset(true)) + .into_any_element(), + ), + ProjectPickerEntry::Workspace(hit) => { + let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; + + let ordered_paths: Vec<_> = paths + .ordered_paths() + .map(|p| p.compact().to_string_lossy().to_string()) + .collect(); + + let tooltip_path: SharedString = ordered_paths.join("\n").into(); + + let mut path_start_offset = 0; + let match_labels: Vec<_> = paths + .ordered_paths() + .map(|p| p.compact()) + .map(|path| { + let path_string = path.to_string_lossy(); + let path_text = path_string.to_string(); + let path_byte_len = path_text.len(); + + let path_positions: Vec = hit + .positions + .iter() + .copied() + .skip_while(|pos| *pos < path_start_offset) + .take_while(|pos| *pos < path_start_offset + path_byte_len) + .map(|pos| pos - path_start_offset) + .collect(); + + let file_name_match = path.file_name().map(|file_name| { + let file_name_text = file_name.to_string_lossy().into_owned(); + let file_name_start = path_byte_len - file_name_text.len(); + let highlight_positions: Vec = path_positions + .iter() + .copied() + .skip_while(|pos| *pos < file_name_start) + .take_while(|pos| *pos < file_name_start + file_name_text.len()) + .map(|pos| pos - file_name_start) + .collect(); + HighlightedMatch { + text: file_name_text, + highlight_positions, + color: Color::Default, + } + }); + + path_start_offset += path_byte_len; + file_name_match + }) + .collect(); + + let highlighted_match = HighlightedMatchWithPaths { + prefix: match location { + SerializedWorkspaceLocation::Remote(options) => { + Some(SharedString::from(options.display_name())) + } + _ => None, + }, + match_label: HighlightedMatch::join(match_labels.into_iter().flatten(), ", "), + paths: Vec::new(), + }; + + Some( + ListItem::new(ix) + .toggle_state(selected) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .child( + h_flex() + .gap_3() + .flex_grow() + .child(highlighted_match.render(window, cx)), + ) + .tooltip(Tooltip::text(tooltip_path)) + .into_any_element(), + ) + } + } + } + + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { + let has_selection = self.selected_match().is_some(); + let focus_handle = self.focus_handle.clone(); + + Some( + h_flex() + .flex_1() + .p_1p5() + .gap_1() + .justify_end() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .child( + Button::new("open_local_folder", "Choose from Local Folders") + .key_binding(KeyBinding::for_action_in( + &workspace::Open::default(), + &focus_handle, + cx, + )) + .on_click(cx.listener(|this, _, window, cx| { + this.delegate.open_local_folder(window, cx); + })), + ) + .child( + Button::new("select_project", "Select") + .disabled(!has_selection) + .key_binding(KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx)) + .on_click(cx.listener(move |picker, _, window, cx| { + picker.delegate.confirm(false, window, cx); + })), + ) + .into_any(), + ) + } +} diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 1cb1e801c2cd68d442321da76c0abb848f9fa0d8..c168bd2956e0687eca5e5adeb16edbe70e9edd54 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -171,9 +171,9 @@ impl sum_tree::Item for PendingHunk { impl sum_tree::Summary for DiffHunkSummary { type Context<'a> = &'a text::BufferSnapshot; - fn zero(_cx: Self::Context<'_>) -> Self { + fn zero(buffer: &text::BufferSnapshot) -> Self { DiffHunkSummary { - buffer_range: Anchor::MIN..Anchor::MIN, + buffer_range: Anchor::min_min_range_for_buffer(buffer.remote_id()), diff_base_byte_range: 0..0, added_rows: 0, removed_rows: 0, @@ -248,6 +248,10 @@ impl BufferDiffSnapshot { buffer_diff.update(cx, |buffer_diff, cx| buffer_diff.snapshot(cx)) } + pub fn buffer_id(&self) -> BufferId { + self.inner.buffer_snapshot.remote_id() + } + pub fn is_empty(&self) -> bool { self.inner.hunks.is_empty() } @@ -953,7 +957,7 @@ impl BufferDiffInner { .flat_map(move |hunk| { [ ( - &hunk.buffer_range.start, + hunk.buffer_range.start, ( hunk.buffer_range.start, hunk.diff_base_byte_range.start, @@ -961,7 +965,7 @@ impl BufferDiffInner { ), ), ( - &hunk.buffer_range.end, + hunk.buffer_range.end, (hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk), ), ] @@ -1653,7 +1657,7 @@ impl BufferDiff { ) { let hunks = self .snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) + .hunks_intersecting_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), buffer) .collect::>(); let Some(secondary) = self.secondary_diff.clone() else { return; diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index f92a8163d54de0c21c7318c4baab5aad5ce49b75..37a3fd823ec03d3b1d94419ac47662431d718708 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -21,7 +21,7 @@ use language::LanguageRegistry; use livekit::{LocalTrackPublication, ParticipantIdentity, RoomEvent}; use livekit_client::{self as livekit, AudioStream, TrackSid}; use postage::{sink::Sink, stream::Stream, watch}; -use project::Project; +use project::{CURRENT_PROJECT_FEATURES, Project}; use settings::Settings as _; use std::sync::atomic::AtomicU64; use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant}; @@ -1237,6 +1237,10 @@ impl Room { worktrees: project.read(cx).worktree_metadata_protos(cx), is_ssh_project: project.read(cx).is_via_remote_server(), windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows), + features: CURRENT_PROJECT_FEATURES + .iter() + .map(|s| s.to_string()) + .collect(), }); cx.spawn(async move |this, cx| { diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index f40d90a983978e8928477b5a2973dfa05e05b907..6a11a6b924eed3dfd79ff379638ed4085e2b7bcb 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -2141,11 +2141,13 @@ mod tests { project_id: 1, committer_name: None, committer_email: None, + features: Vec::new(), }); server.send(proto::JoinProject { project_id: 2, committer_name: None, committer_email: None, + features: Vec::new(), }); done_rx1.recv().await.unwrap(); done_rx2.recv().await.unwrap(); diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 75d7dbf194068f78b3d566e54bb0fa18f66a9878..2a87d617ebb19117ca87c00cc0887b07e416c8bd 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -48,7 +48,8 @@ CREATE TABLE "projects" ( "host_connection_id" INTEGER, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, - "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE + "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE, + "features" TEXT NOT NULL DEFAULT '' ); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 394deaf2c0d6a80a2ab6ab1b95a333081c816e23..8a56b9ce982f9a39a14bfc55fe8a34870ddea1c6 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -332,7 +332,8 @@ CREATE TABLE public.projects ( room_id integer, host_connection_id integer, host_connection_server_id integer, - windows_paths boolean DEFAULT false + windows_paths boolean DEFAULT false, + features text NOT NULL DEFAULT '' ); CREATE SEQUENCE public.projects_id_seq diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index d8803c253f5feef8ef5e040f3ea112abcc688f52..3e4c36631b29d35871cac101542bcc6904fbb271 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -589,6 +589,7 @@ pub struct Project { pub repositories: Vec, pub language_servers: Vec, pub path_style: PathStyle, + pub features: Vec, } pub struct ProjectCollaborator { diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 71365fb3846c1dccbf527d76779ed8816bde243b..3fc59f96332180d7d7bca4b6f71a345d9699e9e2 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -34,6 +34,7 @@ impl Database { worktrees: &[proto::WorktreeMetadata], is_ssh_project: bool, windows_paths: bool, + features: &[String], ) -> Result> { self.room_transaction(room_id, |tx| async move { let participant = room_participant::Entity::find() @@ -71,6 +72,7 @@ impl Database { ))), id: ActiveValue::NotSet, windows_paths: ActiveValue::set(windows_paths), + features: ActiveValue::set(serde_json::to_string(features).unwrap()), } .insert(&*tx) .await?; @@ -948,6 +950,7 @@ impl Database { } else { PathStyle::Posix }; + let features: Vec = serde_json::from_str(&project.features).unwrap_or_default(); let project = Project { id: project.id, @@ -977,6 +980,7 @@ impl Database { }) .collect(), path_style, + features, }; Ok((project, replica_id as ReplicaId)) } diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index 11a9b972ebcd7af29d6e6c234096384ce9ff7701..76d399cfc6445ca7c2516cc4cd76e885230868af 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -13,6 +13,7 @@ pub struct Model { pub host_connection_id: Option, pub host_connection_server_id: Option, pub windows_paths: bool, + pub features: String, } impl Model { diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 3c4efe0580c18c938f8245de9f40bf216bab9c81..e05df1909db1e8afed0c06425d84799ff985f3c5 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -1775,6 +1775,7 @@ async fn share_project( &request.worktrees, request.is_ssh_project, request.windows_paths.unwrap_or(false), + &request.features, ) .await?; response.send(proto::ShareProjectResponse { @@ -1840,6 +1841,28 @@ async fn join_project( tracing::info!(%project_id, "join project"); let db = session.db().await; + let project_model = db.get_project(project_id).await?; + let host_features: Vec = + serde_json::from_str(&project_model.features).unwrap_or_default(); + let guest_features: HashSet<_> = request.features.iter().collect(); + let host_features_set: HashSet<_> = host_features.iter().collect(); + if guest_features != host_features_set { + let host_connection_id = project_model.host_connection()?; + let mut pool = session.connection_pool().await; + let host_version = pool + .connection(host_connection_id) + .map(|c| c.zed_version.to_string()); + let guest_version = pool + .connection(session.connection_id) + .map(|c| c.zed_version.to_string()); + drop(pool); + Err(anyhow!( + "The host (v{}) and guest (v{}) are using incompatible versions of Zed. The peer with the older version must update to collaborate.", + host_version.as_deref().unwrap_or("unknown"), + guest_version.as_deref().unwrap_or("unknown"), + ))?; + } + let (project, replica_id) = &mut *db .join_project( project_id, @@ -1850,6 +1873,7 @@ async fn join_project( ) .await?; drop(db); + tracing::info!(%project_id, "join remote project"); let collaborators = project .collaborators @@ -1909,6 +1933,7 @@ async fn join_project( language_server_capabilities, role: project.role.into(), windows_paths: project.path_style == PathStyle::Windows, + features: project.features.clone(), })?; for (worktree_id, worktree) in mem::take(&mut project.worktrees) { diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs index a5aca7dd82ca23b1c348bea1fff5d2da2870c654..dd8ae9a2c02cfae6c6c7e8c369308c5092be113e 100644 --- a/crates/collab/tests/integration/channel_buffer_tests.rs +++ b/crates/collab/tests/integration/channel_buffer_tests.rs @@ -313,7 +313,7 @@ fn assert_remote_selections( let snapshot = editor.snapshot(window, cx); let hub = editor.collaboration_hub().unwrap(); let collaborators = hub.collaborators(cx); - let range = Anchor::min()..Anchor::max(); + let range = Anchor::Min..Anchor::Max; let remote_selections = snapshot .remote_selections_in_range(&range, hub, cx) .map(|s| { diff --git a/crates/collab/tests/integration/db_tests/db_tests.rs b/crates/collab/tests/integration/db_tests/db_tests.rs index e2006b7fb9984c4bd0cf16a62e9321b2f7007e9e..710f95dbf7d82e05a541b844b093a04ca88565f7 100644 --- a/crates/collab/tests/integration/db_tests/db_tests.rs +++ b/crates/collab/tests/integration/db_tests/db_tests.rs @@ -350,20 +350,41 @@ async fn test_project_count(db: &Arc) { .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 1 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 1 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 0 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); db.leave_room(ConnectionId { owner_id, id: 1 }) diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs index c4031788c87f747c3125f4dbc509d68ea3720b43..7109b0f31452d2573426aa2300e7967b8f5a6601 100644 --- a/crates/collab/tests/integration/following_tests.rs +++ b/crates/collab/tests/integration/following_tests.rs @@ -2184,6 +2184,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut ); mb }); + let multibuffer_snapshot = multibuffer.update(cx_a, |mb, cx| mb.snapshot(cx)); let snapshot = buffer.update(cx_a, |buffer, _| buffer.snapshot()); let editor: Entity = cx_a.new_window_entity(|window, cx| { Editor::for_multibuffer( @@ -2205,7 +2206,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut editor .selections .disjoint_anchor_ranges() - .map(|range| range.start.text_anchor.to_point(&snapshot)) + .map(|range| { + multibuffer_snapshot + .anchor_to_buffer_anchor(range.start) + .unwrap() + .0 + .to_point(&snapshot) + }) .collect::>() }); multibuffer.update(cx_a, |multibuffer, cx| { @@ -2232,7 +2239,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut editor .selections .disjoint_anchor_ranges() - .map(|range| range.start.text_anchor.to_point(&snapshot)) + .map(|range| { + multibuffer_snapshot + .anchor_to_buffer_anchor(range.start) + .unwrap() + .0 + .to_point(&snapshot) + }) .collect::>() }); assert_eq!(positions, new_positions); diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 6a53e590586ec2353feafe267501619e8bbfcc71..91385b298dc661c4a79e4fb52d5be0f38672bff5 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -1166,7 +1166,7 @@ impl CollabPanel { "Failed to join project", window, cx, - |_, _, _| None, + |error, _, _| Some(format!("{error:#}")), ); }) .ok(); @@ -1729,7 +1729,7 @@ impl CollabPanel { "Failed to join project", window, cx, - |_, _, _| None, + |error, _, _| Some(format!("{error:#}")), ); } } diff --git a/crates/csv_preview/src/csv_preview.rs b/crates/csv_preview/src/csv_preview.rs index b0b6ad4186758fd33693d5ee29bd2f0d4d28b816..1b99139b004a940dfa0902e185f67fb4b77ed6a1 100644 --- a/crates/csv_preview/src/csv_preview.rs +++ b/crates/csv_preview/src/csv_preview.rs @@ -9,7 +9,10 @@ use std::{ }; use crate::table_data_engine::TableDataEngine; -use ui::{SharedString, TableColumnWidths, TableInteractionState, prelude::*}; +use ui::{ + AbsoluteLength, DefiniteLength, RedistributableColumnsState, SharedString, + TableInteractionState, TableResizeBehavior, prelude::*, +}; use workspace::{Item, SplitDirection, Workspace}; use crate::{parser::EditorState, settings::CsvPreviewSettings, types::TableLikeContent}; @@ -52,6 +55,32 @@ pub fn init(cx: &mut App) { } impl CsvPreviewView { + pub(crate) fn sync_column_widths(&self, cx: &mut Context) { + // plus 1 for the rows column + let cols = self.engine.contents.headers.cols() + 1; + let remaining_col_number = cols.saturating_sub(1); + let fraction = if remaining_col_number > 0 { + 1. / remaining_col_number as f32 + } else { + 1. + }; + let mut widths = vec![DefiniteLength::Fraction(fraction); cols]; + let line_number_width = self.calculate_row_identifier_column_width(); + widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into())); + + let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols]; + resize_behaviors[0] = TableResizeBehavior::None; + + self.column_widths.widths.update(cx, |state, _cx| { + if state.cols() != cols + || state.initial_widths().as_slice() != widths.as_slice() + || state.resize_behavior().as_slice() != resize_behaviors.as_slice() + { + *state = RedistributableColumnsState::new(cols, widths, resize_behaviors); + } + }); + } + pub fn register(workspace: &mut Workspace) { workspace.register_action_renderer(|div, _, _, cx| { div.when(cx.has_flag::(), |div| { @@ -132,9 +161,7 @@ impl CsvPreviewView { editor, |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| { match event { - EditorEvent::Edited { .. } - | EditorEvent::DirtyChanged - | EditorEvent::ExcerptsEdited { .. } => { + EditorEvent::Edited { .. } | EditorEvent::DirtyChanged => { this.parse_csv_from_active_editor(true, cx); } _ => {} @@ -286,18 +313,19 @@ impl PerformanceMetrics { /// Holds state of column widths for a table component in CSV preview. pub(crate) struct ColumnWidths { - pub widths: Entity, + pub widths: Entity, } impl ColumnWidths { pub(crate) fn new(cx: &mut Context, cols: usize) -> Self { Self { - widths: cx.new(|cx| TableColumnWidths::new(cols, cx)), + widths: cx.new(|_cx| { + RedistributableColumnsState::new( + cols, + vec![ui::DefiniteLength::Fraction(1.0 / cols as f32); cols], + vec![ui::TableResizeBehavior::Resizable; cols], + ) + }), } } - /// Replace the current `TableColumnWidths` entity with a new one for the given column count. - pub(crate) fn replace(&self, cx: &mut Context, cols: usize) { - self.widths - .update(cx, |entity, cx| *entity = TableColumnWidths::new(cols, cx)); - } } diff --git a/crates/csv_preview/src/parser.rs b/crates/csv_preview/src/parser.rs index b087404e0ebbd13cdaf20cab692f5470ea6ce292..efa3573d7aa53d97e2801ff00feb4665072830f4 100644 --- a/crates/csv_preview/src/parser.rs +++ b/crates/csv_preview/src/parser.rs @@ -80,11 +80,8 @@ impl CsvPreviewView { .insert("Parsing", (parse_duration, Instant::now())); log::debug!("Parsed {} rows", parsed_csv.rows.len()); - // Update table width so it can be rendered properly - let cols = parsed_csv.headers.cols(); - view.column_widths.replace(cx, cols + 1); // Add 1 for the line number column - view.engine.contents = parsed_csv; + view.sync_column_widths(cx); view.last_parse_end_time = Some(parse_end_time); view.apply_filter_sort(); diff --git a/crates/csv_preview/src/renderer/render_table.rs b/crates/csv_preview/src/renderer/render_table.rs index 0cc3bc3c46fb24570b3c99c9121dff3860c6b820..fb3d7e5fc603ba5b109319cfb19466dc3ad7652f 100644 --- a/crates/csv_preview/src/renderer/render_table.rs +++ b/crates/csv_preview/src/renderer/render_table.rs @@ -1,11 +1,9 @@ use crate::types::TableCell; use gpui::{AnyElement, Entity}; use std::ops::Range; -use ui::Table; -use ui::TableColumnWidths; -use ui::TableResizeBehavior; -use ui::UncheckedTableRow; -use ui::{DefiniteLength, div, prelude::*}; +use ui::{ + ColumnWidthConfig, RedistributableColumnsState, Table, UncheckedTableRow, div, prelude::*, +}; use crate::{ CsvPreviewView, @@ -15,44 +13,22 @@ use crate::{ impl CsvPreviewView { /// Creates a new table. - /// Column number is derived from the `TableColumnWidths` entity. + /// Column number is derived from the `RedistributableColumnsState` entity. pub(crate) fn create_table( &self, - current_widths: &Entity, + current_widths: &Entity, cx: &mut Context, ) -> AnyElement { - let cols = current_widths.read(cx).cols(); - let remaining_col_number = cols - 1; - let fraction = if remaining_col_number > 0 { - 1. / remaining_col_number as f32 - } else { - 1. // only column with line numbers is present. Put 100%, but it will be overwritten anyways :D - }; - let mut widths = vec![DefiniteLength::Fraction(fraction); cols]; - let line_number_width = self.calculate_row_identifier_column_width(); - widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into())); - - let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols]; - resize_behaviors[0] = TableResizeBehavior::None; - - self.create_table_inner( - self.engine.contents.rows.len(), - widths, - resize_behaviors, - current_widths, - cx, - ) + self.create_table_inner(self.engine.contents.rows.len(), current_widths, cx) } fn create_table_inner( &self, row_count: usize, - widths: UncheckedTableRow, - resize_behaviors: UncheckedTableRow, - current_widths: &Entity, + current_widths: &Entity, cx: &mut Context, ) -> AnyElement { - let cols = widths.len(); + let cols = current_widths.read(cx).cols(); // Create headers array with interactive elements let mut headers = Vec::with_capacity(cols); @@ -78,8 +54,7 @@ impl CsvPreviewView { Table::new(cols) .interactable(&self.table_interaction_state) .striped() - .column_widths(widths) - .resizable_columns(resize_behaviors, current_widths, cx) + .width_config(ColumnWidthConfig::redistributable(current_widths.clone())) .header(headers) .disable_base_style() .map(|table| { diff --git a/crates/csv_preview/src/renderer/row_identifiers.rs b/crates/csv_preview/src/renderer/row_identifiers.rs index a122aa9bf3d803b9deb9c6211e117ba4aa593d93..fc8bf68845fd41917e7d60bf5f9276295534c902 100644 --- a/crates/csv_preview/src/renderer/row_identifiers.rs +++ b/crates/csv_preview/src/renderer/row_identifiers.rs @@ -139,6 +139,7 @@ impl CsvPreviewView { RowIdentifiers::SrcLines => RowIdentifiers::RowNum, RowIdentifiers::RowNum => RowIdentifiers::SrcLines, }; + this.sync_column_widths(cx); cx.notify(); }); }), diff --git a/crates/csv_preview/src/renderer/table_cell.rs b/crates/csv_preview/src/renderer/table_cell.rs index 32900ab77708936e218e9af10a4de5fba796e6a7..733488110fbcdb39761b150a74c135426ca6514a 100644 --- a/crates/csv_preview/src/renderer/table_cell.rs +++ b/crates/csv_preview/src/renderer/table_cell.rs @@ -53,7 +53,6 @@ fn create_table_cell( .px_1() .bg(cx.theme().colors().editor_background) .border_b_1() - .border_r_1() .border_color(cx.theme().colors().border_variant) .map(|div| match vertical_alignment { VerticalAlignment::Top => div.items_start(), diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 124967650b31cd88e72b2867838fb3a4ecbcf920..f5947a4393b2eeb8ca6ad3f844962500aa4ecf2d 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -299,7 +299,7 @@ pub fn init(cx: &mut App) { return; } maybe!({ - let (buffer, position, _) = editor + let (buffer, position) = editor .update(cx, |editor, cx| { let cursor_point: language::Point = editor .selections diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index c488e88d74e7f282bd0424a2213e08e2c9bec15f..65bc949b2b6ddb1a707abf2e001ffde151fb70b8 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -7,8 +7,8 @@ use anyhow::Result; use collections::HashMap; use dap::{CompletionItem, CompletionItemType, OutputEvent}; use editor::{ - Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId, - HighlightKey, MultiBufferOffset, SizingBehavior, + Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, HighlightKey, + MultiBufferOffset, SizingBehavior, }; use fuzzy::StringMatchCandidate; use gpui::{ @@ -528,7 +528,6 @@ struct ConsoleQueryBarCompletionProvider(WeakEntity); impl CompletionProvider for ConsoleQueryBarCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/dev_container/Cargo.toml b/crates/dev_container/Cargo.toml index e3a67601c3837bd9579a477576e9c837f73c1e75..e04b965b076fe1ba6c5a8f47e548b922dab55d4a 100644 --- a/crates/dev_container/Cargo.toml +++ b/crates/dev_container/Cargo.toml @@ -5,21 +5,26 @@ publish.workspace = true edition.workspace = true [dependencies] +async-tar.workspace = true +async-trait.workspace = true serde.workspace = true serde_json.workspace = true +serde_json_lenient.workspace = true +shlex.workspace = true http_client.workspace = true http.workspace = true gpui.workspace = true +fs.workspace = true futures.workspace = true log.workspace = true -node_runtime.workspace = true menu.workspace = true paths.workspace = true picker.workspace = true +project.workspace = true settings.workspace = true -smol.workspace = true ui.workspace = true util.workspace = true +walkdir.workspace = true worktree.workspace = true workspace.workspace = true @@ -32,6 +37,8 @@ settings = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } worktree = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } +env_logger.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/crates/dev_container/src/command_json.rs b/crates/dev_container/src/command_json.rs new file mode 100644 index 0000000000000000000000000000000000000000..9823fec4068f141efb4e306fa455bbb7b29a678e --- /dev/null +++ b/crates/dev_container/src/command_json.rs @@ -0,0 +1,64 @@ +use std::process::Output; + +use async_trait::async_trait; +use serde::Deserialize; +use util::command::Command; + +use crate::devcontainer_api::DevContainerError; + +pub(crate) struct DefaultCommandRunner; + +impl DefaultCommandRunner { + pub(crate) fn new() -> Self { + Self + } +} + +#[async_trait] +impl CommandRunner for DefaultCommandRunner { + async fn run_command(&self, command: &mut Command) -> Result { + command.output().await + } +} + +#[async_trait] +pub(crate) trait CommandRunner: Send + Sync { + async fn run_command(&self, command: &mut Command) -> Result; +} + +pub(crate) async fn evaluate_json_command( + mut command: Command, +) -> Result, DevContainerError> +where + T: for<'de> Deserialize<'de>, +{ + let output = command.output().await.map_err(|e| { + log::error!("Error running command {:?}: {e}", command); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + deserialize_json_output(output).map_err(|e| { + log::error!("Error running command {:?}: {e}", command); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + }) +} + +pub(crate) fn deserialize_json_output(output: Output) -> Result, String> +where + T: for<'de> Deserialize<'de>, +{ + if output.status.success() { + let raw = String::from_utf8_lossy(&output.stdout); + if raw.is_empty() || raw.trim() == "[]" || raw.trim() == "{}" { + return Ok(None); + } + let value = serde_json_lenient::from_str(&raw) + .map_err(|e| format!("Error deserializing from raw json: {e}")); + value + } else { + let std_err = String::from_utf8_lossy(&output.stderr); + Err(format!( + "Sent non-successful output; cannot deserialize. StdErr: {std_err}" + )) + } +} diff --git a/crates/dev_container/src/devcontainer_api.rs b/crates/dev_container/src/devcontainer_api.rs index 15c39dde119be04e5c58f34f268a98935954d6fe..f9f0136fcfc5ffe29c643acb0371b89107ab3d47 100644 --- a/crates/dev_container/src/devcontainer_api.rs +++ b/crates/dev_container/src/devcontainer_api.rs @@ -2,18 +2,26 @@ use std::{ collections::{HashMap, HashSet}, fmt::Display, path::{Path, PathBuf}, + sync::Arc, }; -use node_runtime::NodeRuntime; +use futures::TryFutureExt; +use gpui::{AsyncWindowContext, Entity}; +use project::Worktree; use serde::Deserialize; -use settings::DevContainerConnection; -use smol::fs; -use util::command::Command; +use settings::{DevContainerConnection, infer_json_indent_size, replace_value_in_json_text}; use util::rel_path::RelPath; +use walkdir::WalkDir; use workspace::Workspace; use worktree::Snapshot; -use crate::{DevContainerContext, DevContainerFeature, DevContainerTemplate}; +use crate::{ + DevContainerContext, DevContainerFeature, DevContainerTemplate, + devcontainer_json::DevContainer, + devcontainer_manifest::{read_devcontainer_configuration, spawn_dev_container}, + devcontainer_templates_repository, get_latest_oci_manifest, get_oci_token, ghcr_registry, + oci::download_oci_tarball, +}; /// Represents a discovered devcontainer configuration #[derive(Debug, Clone, PartialEq, Eq)] @@ -42,63 +50,33 @@ impl DevContainerConfig { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] -struct DevContainerUp { - _outcome: String, - container_id: String, - remote_user: String, - remote_workspace_folder: String, +pub(crate) struct DevContainerUp { + pub(crate) container_id: String, + pub(crate) remote_user: String, + pub(crate) remote_workspace_folder: String, + #[serde(default)] + pub(crate) extension_ids: Vec, + #[serde(default)] + pub(crate) remote_env: HashMap, } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] +#[derive(Debug)] pub(crate) struct DevContainerApply { - pub(crate) files: Vec, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct DevContainerConfiguration { - name: Option, -} - -#[derive(Debug, Deserialize)] -pub(crate) struct DevContainerConfigurationOutput { - configuration: DevContainerConfiguration, -} - -pub(crate) struct DevContainerCli { - pub path: PathBuf, - node_runtime_path: Option, -} - -impl DevContainerCli { - fn command(&self, use_podman: bool) -> Command { - let mut command = if let Some(node_runtime_path) = &self.node_runtime_path { - let mut command = - util::command::new_command(node_runtime_path.as_os_str().display().to_string()); - command.arg(self.path.display().to_string()); - command - } else { - util::command::new_command(self.path.display().to_string()) - }; - - if use_podman { - command.arg("--docker-path"); - command.arg("podman"); - } - command - } + pub(crate) project_files: Vec>, } #[derive(Debug, Clone, PartialEq, Eq)] pub enum DevContainerError { + CommandFailed(String), DockerNotAvailable, - DevContainerCliNotAvailable, + ContainerNotValid(String), DevContainerTemplateApplyFailed(String), + DevContainerScriptsFailed, DevContainerUpFailed(String), DevContainerNotFound, DevContainerParseFailed, - NodeRuntimeNotAvailable, + FilesystemError, + ResourceFetchFailed, NotInValidProject, } @@ -110,8 +88,11 @@ impl Display for DevContainerError { match self { DevContainerError::DockerNotAvailable => "docker CLI not found on $PATH".to_string(), - DevContainerError::DevContainerCliNotAvailable => - "devcontainer CLI not found on path".to_string(), + DevContainerError::ContainerNotValid(id) => format!( + "docker image {id} did not have expected configuration for a dev container" + ), + DevContainerError::DevContainerScriptsFailed => + "lifecycle scripts could not execute for dev container".to_string(), DevContainerError::DevContainerUpFailed(_) => { "DevContainer creation failed".to_string() } @@ -122,14 +103,32 @@ impl Display for DevContainerError { "No valid dev container definition found in project".to_string(), DevContainerError::DevContainerParseFailed => "Failed to parse file .devcontainer/devcontainer.json".to_string(), - DevContainerError::NodeRuntimeNotAvailable => - "Cannot find a valid node runtime".to_string(), DevContainerError::NotInValidProject => "Not within a valid project".to_string(), + DevContainerError::CommandFailed(program) => + format!("Failure running external program {program}"), + DevContainerError::FilesystemError => + "Error downloading resources locally".to_string(), + DevContainerError::ResourceFetchFailed => + "Failed to fetch resources from template or feature repository".to_string(), } ) } } +pub(crate) async fn read_default_devcontainer_configuration( + cx: &DevContainerContext, + environment: HashMap, +) -> Result { + let default_config = DevContainerConfig::default_config(); + + read_devcontainer_configuration(default_config, cx, environment) + .await + .map_err(|e| { + log::error!("Default configuration not found: {:?}", e); + DevContainerError::DevContainerNotFound + }) +} + /// Finds all available devcontainer configurations in the project. /// /// See [`find_configs_in_snapshot`] for the locations that are scanned. @@ -241,27 +240,35 @@ pub fn find_configs_in_snapshot(snapshot: &Snapshot) -> Vec pub async fn start_dev_container_with_config( context: DevContainerContext, config: Option, + environment: HashMap, ) -> Result<(DevContainerConnection, String), DevContainerError> { check_for_docker(context.use_podman).await?; - let cli = ensure_devcontainer_cli(&context.node_runtime).await?; - let config_path = config.map(|c| context.project_directory.join(&c.config_path)); - match devcontainer_up(&context, &cli, config_path.as_deref()).await { + let Some(actual_config) = config.clone() else { + return Err(DevContainerError::NotInValidProject); + }; + + match spawn_dev_container( + &context, + environment.clone(), + actual_config.clone(), + context.project_directory.clone().as_ref(), + ) + .await + { Ok(DevContainerUp { container_id, remote_workspace_folder, remote_user, + extension_ids, + remote_env, .. }) => { let project_name = - match read_devcontainer_configuration(&context, &cli, config_path.as_deref()).await - { - Ok(DevContainerConfigurationOutput { - configuration: - DevContainerConfiguration { - name: Some(project_name), - }, - }) => project_name, + match read_devcontainer_configuration(actual_config, &context, environment).await { + Ok(DevContainer { + name: Some(name), .. + }) => name, _ => get_backup_project_name(&remote_workspace_folder, &container_id), }; @@ -270,31 +277,19 @@ pub async fn start_dev_container_with_config( container_id, use_podman: context.use_podman, remote_user, + extension_ids, + remote_env: remote_env.into_iter().collect(), }; Ok((connection, remote_workspace_folder)) } Err(err) => { - let message = format!("Failed with nested error: {}", err); + let message = format!("Failed with nested error: {:?}", err); Err(DevContainerError::DevContainerUpFailed(message)) } } } -#[cfg(not(target_os = "windows"))] -fn dev_container_cli() -> String { - "devcontainer".to_string() -} - -#[cfg(target_os = "windows")] -fn dev_container_cli() -> String { - "devcontainer.cmd".to_string() -} - -fn dev_container_script() -> String { - "devcontainer.js".to_string() -} - async fn check_for_docker(use_podman: bool) -> Result<(), DevContainerError> { let mut command = if use_podman { util::command::new_command("podman") @@ -312,261 +307,157 @@ async fn check_for_docker(use_podman: bool) -> Result<(), DevContainerError> { } } -pub(crate) async fn ensure_devcontainer_cli( - node_runtime: &NodeRuntime, -) -> Result { - let mut command = util::command::new_command(&dev_container_cli()); - command.arg("--version"); - - if let Err(e) = command.output().await { - log::error!( - "Unable to find devcontainer CLI in $PATH. Checking for a zed installed version. Error: {:?}", - e - ); - - let Ok(node_runtime_path) = node_runtime.binary_path().await else { - return Err(DevContainerError::NodeRuntimeNotAvailable); +pub(crate) async fn apply_devcontainer_template( + worktree: Entity, + template: &DevContainerTemplate, + template_options: &HashMap, + features_selected: &HashSet, + context: &DevContainerContext, + cx: &mut AsyncWindowContext, +) -> Result { + let token = get_oci_token( + ghcr_registry(), + devcontainer_templates_repository(), + &context.http_client, + ) + .map_err(|e| { + log::error!("Failed to get OCI auth token: {e}"); + DevContainerError::ResourceFetchFailed + }) + .await?; + let manifest = get_latest_oci_manifest( + &token.token, + ghcr_registry(), + devcontainer_templates_repository(), + &context.http_client, + Some(&template.id), + ) + .map_err(|e| { + log::error!("Failed to fetch template from OCI repository: {e}"); + DevContainerError::ResourceFetchFailed + }) + .await?; + + let layer = &manifest.layers.get(0).ok_or_else(|| { + log::error!("Given manifest has no layers to query for blob. Aborting"); + DevContainerError::ResourceFetchFailed + })?; + + let timestamp = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_millis()) + .unwrap_or(0); + let extract_dir = std::env::temp_dir() + .join(&template.id) + .join(format!("extracted-{timestamp}")); + + context.fs.create_dir(&extract_dir).await.map_err(|e| { + log::error!("Could not create temporary directory: {e}"); + DevContainerError::FilesystemError + })?; + + download_oci_tarball( + &token.token, + ghcr_registry(), + devcontainer_templates_repository(), + &layer.digest, + "application/vnd.oci.image.manifest.v1+json", + &extract_dir, + &context.http_client, + &context.fs, + Some(&template.id), + ) + .map_err(|e| { + log::error!("Error downloading tarball: {:?}", e); + DevContainerError::ResourceFetchFailed + }) + .await?; + + let downloaded_devcontainer_folder = &extract_dir.join(".devcontainer/"); + let mut project_files = Vec::new(); + for entry in WalkDir::new(downloaded_devcontainer_folder) { + let Ok(entry) = entry else { + continue; }; - - let datadir_cli_path = paths::devcontainer_dir() - .join("node_modules") - .join("@devcontainers") - .join("cli") - .join(&dev_container_script()); - - log::debug!( - "devcontainer not found in path, using local location: ${}", - datadir_cli_path.display() - ); - - let mut command = - util::command::new_command(node_runtime_path.as_os_str().display().to_string()); - command.arg(datadir_cli_path.display().to_string()); - command.arg("--version"); - - match command.output().await { - Err(e) => log::error!( - "Unable to find devcontainer CLI in Data dir. Will try to install. Error: {:?}", - e - ), - Ok(output) => { - if output.status.success() { - log::info!("Found devcontainer CLI in Data dir"); - return Ok(DevContainerCli { - path: datadir_cli_path.clone(), - node_runtime_path: Some(node_runtime_path.clone()), - }); - } else { - log::error!( - "Could not run devcontainer CLI from data_dir. Will try once more to install. Output: {:?}", - output - ); - } - } + if !entry.file_type().is_file() { + continue; } - - if let Err(e) = fs::create_dir_all(paths::devcontainer_dir()).await { - log::error!("Unable to create devcontainer directory. Error: {:?}", e); - return Err(DevContainerError::DevContainerCliNotAvailable); + let relative_path = entry.path().strip_prefix(&extract_dir).map_err(|e| { + log::error!("Can't create relative path: {e}"); + DevContainerError::FilesystemError + })?; + let rel_path = RelPath::unix(relative_path) + .map_err(|e| { + log::error!("Can't create relative path: {e}"); + DevContainerError::FilesystemError + })? + .into_arc(); + let content = context.fs.load(entry.path()).await.map_err(|e| { + log::error!("Unable to read file: {e}"); + DevContainerError::FilesystemError + })?; + + let mut content = expand_template_options(content, template_options); + if let Some("devcontainer.json") = &rel_path.file_name() { + content = insert_features_into_devcontainer_json(&content, features_selected) } - - if let Err(e) = node_runtime - .npm_install_packages( - &paths::devcontainer_dir(), - &[("@devcontainers/cli", "latest")], - ) - .await - { - log::error!( - "Unable to install devcontainer CLI to data directory. Error: {:?}", - e - ); - return Err(DevContainerError::DevContainerCliNotAvailable); - }; - - let mut command = - util::command::new_command(node_runtime_path.as_os_str().display().to_string()); - command.arg(datadir_cli_path.display().to_string()); - command.arg("--version"); - if let Err(e) = command.output().await { - log::error!( - "Unable to find devcontainer cli after NPM install. Error: {:?}", - e - ); - Err(DevContainerError::DevContainerCliNotAvailable) - } else { - Ok(DevContainerCli { - path: datadir_cli_path, - node_runtime_path: Some(node_runtime_path), + worktree + .update(cx, |worktree, cx| { + worktree.create_entry(rel_path.clone(), false, Some(content.into_bytes()), cx) }) - } - } else { - log::info!("Found devcontainer cli on $PATH, using it"); - Ok(DevContainerCli { - path: PathBuf::from(&dev_container_cli()), - node_runtime_path: None, - }) - } -} - -async fn devcontainer_up( - context: &DevContainerContext, - cli: &DevContainerCli, - config_path: Option<&Path>, -) -> Result { - let mut command = cli.command(context.use_podman); - command.arg("up"); - command.arg("--workspace-folder"); - command.arg(context.project_directory.display().to_string()); - - if let Some(config) = config_path { - command.arg("--config"); - command.arg(config.display().to_string()); + .await + .map_err(|e| { + log::error!("Unable to create entry in worktree: {e}"); + DevContainerError::NotInValidProject + })?; + project_files.push(rel_path); } - log::info!("Running full devcontainer up command: {:?}", command); - - match command.output().await { - Ok(output) => { - if output.status.success() { - let raw = String::from_utf8_lossy(&output.stdout); - parse_json_from_cli(&raw) - } else { - let message = format!( - "Non-success status running devcontainer up for workspace: out: {}, err: {}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); - - log::error!("{}", &message); - Err(DevContainerError::DevContainerUpFailed(message)) - } - } - Err(e) => { - let message = format!("Error running devcontainer up: {:?}", e); - log::error!("{}", &message); - Err(DevContainerError::DevContainerUpFailed(message)) - } - } + Ok(DevContainerApply { project_files }) } -pub(crate) async fn read_devcontainer_configuration( - context: &DevContainerContext, - cli: &DevContainerCli, - config_path: Option<&Path>, -) -> Result { - let mut command = cli.command(context.use_podman); - command.arg("read-configuration"); - command.arg("--workspace-folder"); - command.arg(context.project_directory.display().to_string()); - - if let Some(config) = config_path { - command.arg("--config"); - command.arg(config.display().to_string()); - } - - match command.output().await { - Ok(output) => { - if output.status.success() { - let raw = String::from_utf8_lossy(&output.stdout); - parse_json_from_cli(&raw) - } else { - let message = format!( - "Non-success status running devcontainer read-configuration for workspace: out: {:?}, err: {:?}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); - log::error!("{}", &message); - Err(DevContainerError::DevContainerNotFound) - } - } - Err(e) => { - let message = format!("Error running devcontainer read-configuration: {:?}", e); - log::error!("{}", &message); - Err(DevContainerError::DevContainerNotFound) - } +fn insert_features_into_devcontainer_json( + content: &str, + features: &HashSet, +) -> String { + if features.is_empty() { + return content.to_string(); } -} - -pub(crate) async fn apply_dev_container_template( - template: &DevContainerTemplate, - template_options: &HashMap, - features_selected: &HashSet, - context: &DevContainerContext, - cli: &DevContainerCli, -) -> Result { - let mut command = cli.command(context.use_podman); - - let Ok(serialized_options) = serde_json::to_string(template_options) else { - log::error!("Unable to serialize options for {:?}", template_options); - return Err(DevContainerError::DevContainerParseFailed); - }; - command.arg("templates"); - command.arg("apply"); - command.arg("--workspace-folder"); - command.arg(context.project_directory.display().to_string()); - command.arg("--template-id"); - command.arg(format!( - "{}/{}", - template - .source_repository - .as_ref() - .unwrap_or(&String::from("")), - template.id - )); - command.arg("--template-args"); - command.arg(serialized_options); - command.arg("--features"); - command.arg(template_features_to_json(features_selected)); - - log::debug!("Running full devcontainer apply command: {:?}", command); + let features_value: serde_json::Value = features + .iter() + .map(|f| { + let key = format!( + "{}/{}:{}", + f.source_repository.as_deref().unwrap_or(""), + f.id, + f.major_version() + ); + (key, serde_json::Value::Object(Default::default())) + }) + .collect::>() + .into(); + + let tab_size = infer_json_indent_size(content); + let (range, replacement) = replace_value_in_json_text( + content, + &["features"], + tab_size, + Some(&features_value), + None, + ); - match command.output().await { - Ok(output) => { - if output.status.success() { - let raw = String::from_utf8_lossy(&output.stdout); - parse_json_from_cli(&raw) - } else { - let message = format!( - "Non-success status running devcontainer templates apply for workspace: out: {:?}, err: {:?}", - String::from_utf8_lossy(&output.stdout), - String::from_utf8_lossy(&output.stderr) - ); + let mut result = content.to_string(); + result.replace_range(range, &replacement); + result +} - log::error!("{}", &message); - Err(DevContainerError::DevContainerTemplateApplyFailed(message)) - } - } - Err(e) => { - let message = format!("Error running devcontainer templates apply: {:?}", e); - log::error!("{}", &message); - Err(DevContainerError::DevContainerTemplateApplyFailed(message)) - } +fn expand_template_options(content: String, template_options: &HashMap) -> String { + let mut replaced_content = content; + for (key, val) in template_options { + replaced_content = replaced_content.replace(&format!("${{templateOption:{key}}}"), val) } -} -// Try to parse directly first (newer versions output pure JSON) -// If that fails, look for JSON start (older versions have plaintext prefix) -fn parse_json_from_cli(raw: &str) -> Result { - serde_json::from_str::(&raw) - .or_else(|e| { - log::error!("Error parsing json: {} - will try to find json object in larger plaintext", e); - let json_start = raw - .find(|c| c == '{') - .ok_or_else(|| { - log::error!("No JSON found in devcontainer up output"); - DevContainerError::DevContainerParseFailed - })?; - - serde_json::from_str(&raw[json_start..]).map_err(|e| { - log::error!( - "Unable to parse JSON from devcontainer up output (starting at position {}), error: {:?}", - json_start, - e - ); - DevContainerError::DevContainerParseFailed - }) - }) + replaced_content } fn get_backup_project_name(remote_workspace_folder: &str, container_id: &str) -> String { @@ -577,36 +468,11 @@ fn get_backup_project_name(remote_workspace_folder: &str, container_id: &str) -> .unwrap_or_else(|| container_id.to_string()) } -fn template_features_to_json(features_selected: &HashSet) -> String { - let features_map = features_selected - .iter() - .map(|feature| { - let mut map = HashMap::new(); - map.insert( - "id", - format!( - "{}/{}:{}", - feature - .source_repository - .as_ref() - .unwrap_or(&String::from("")), - feature.id, - feature.major_version() - ), - ); - map - }) - .collect::>>(); - serde_json::to_string(&features_map).unwrap() -} - #[cfg(test)] mod tests { use std::path::PathBuf; - use crate::devcontainer_api::{ - DevContainerConfig, DevContainerUp, find_configs_in_snapshot, parse_json_from_cli, - }; + use crate::devcontainer_api::{DevContainerConfig, find_configs_in_snapshot}; use fs::FakeFs; use gpui::TestAppContext; use project::Project; @@ -621,30 +487,6 @@ mod tests { }); } - #[test] - fn should_parse_from_devcontainer_json() { - let json = r#"{"outcome":"success","containerId":"826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a","remoteUser":"vscode","remoteWorkspaceFolder":"/workspaces/zed"}"#; - let up: DevContainerUp = parse_json_from_cli(json).unwrap(); - assert_eq!(up._outcome, "success"); - assert_eq!( - up.container_id, - "826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a" - ); - assert_eq!(up.remote_user, "vscode"); - assert_eq!(up.remote_workspace_folder, "/workspaces/zed"); - - let json_in_plaintext = r#"[2026-01-22T16:19:08.802Z] @devcontainers/cli 0.80.1. Node.js v22.21.1. darwin 24.6.0 arm64. - {"outcome":"success","containerId":"826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a","remoteUser":"vscode","remoteWorkspaceFolder":"/workspaces/zed"}"#; - let up: DevContainerUp = parse_json_from_cli(json_in_plaintext).unwrap(); - assert_eq!(up._outcome, "success"); - assert_eq!( - up.container_id, - "826abcac45afd412abff083ab30793daff2f3c8ce2c831df728baf39933cb37a" - ); - assert_eq!(up.remote_user, "vscode"); - assert_eq!(up.remote_workspace_folder, "/workspaces/zed"); - } - #[gpui::test] async fn test_find_configs_root_devcontainer_json(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/dev_container/src/devcontainer_json.rs b/crates/dev_container/src/devcontainer_json.rs new file mode 100644 index 0000000000000000000000000000000000000000..4429c63a37a87d1b54455b8169359ddf40511e24 --- /dev/null +++ b/crates/dev_container/src/devcontainer_json.rs @@ -0,0 +1,1358 @@ +use std::{collections::HashMap, fmt::Display, path::Path, sync::Arc}; + +use crate::{command_json::CommandRunner, devcontainer_api::DevContainerError}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_json_lenient::Value; +use util::command::Command; + +#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)] +#[serde(untagged)] +pub(crate) enum ForwardPort { + Number(u16), + String(String), +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) enum PortAttributeProtocol { + Https, + Http, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) enum OnAutoForward { + Notify, + OpenBrowser, + OpenBrowserOnce, + OpenPreview, + Silent, + Ignore, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) struct PortAttributes { + label: String, + on_auto_forward: OnAutoForward, + elevate_if_needed: bool, + require_local_port: bool, + protocol: PortAttributeProtocol, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) enum UserEnvProbe { + None, + InteractiveShell, + LoginShell, + LoginInteractiveShell, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) enum ShutdownAction { + None, + StopContainer, + StopCompose, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) struct MountDefinition { + pub(crate) source: String, + pub(crate) target: String, + #[serde(rename = "type")] + pub(crate) mount_type: Option, +} + +impl Display for MountDefinition { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "type={},source={},target={},consistency=cached", + self.mount_type.clone().unwrap_or_else(|| { + if self.source.starts_with('/') { + "bind".to_string() + } else { + "volume".to_string() + } + }), + self.source, + self.target + ) + } +} + +/// Represents the value associated with a feature ID in the `features` map of devcontainer.json. +/// +/// Per the spec, the value can be: +/// - A boolean (`true` to enable with defaults) +/// - A string (shorthand for `{"version": ""}`) +/// - An object mapping option names to string or boolean values +/// +/// See: https://containers.dev/implementors/features/#devcontainerjson-properties +#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)] +#[serde(untagged)] +pub(crate) enum FeatureOptions { + Bool(bool), + String(String), + Options(HashMap), +} + +#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Clone)] +#[serde(untagged)] +pub(crate) enum FeatureOptionValue { + Bool(bool), + String(String), +} +impl std::fmt::Display for FeatureOptionValue { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + FeatureOptionValue::Bool(b) => write!(f, "{}", b), + FeatureOptionValue::String(s) => write!(f, "{}", s), + } + } +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq, Default)] +pub(crate) struct ZedCustomizationsWrapper { + pub(crate) zed: ZedCustomization, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct ZedCustomization { + #[serde(default)] + pub(crate) extensions: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ContainerBuild { + pub(crate) dockerfile: String, + context: Option, + pub(crate) args: Option>, + options: Option>, + target: Option, + #[serde(default, deserialize_with = "deserialize_string_or_array")] + cache_from: Option>, +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq)] +struct LifecycleScriptInternal { + command: Option, + args: Vec, +} + +impl LifecycleScriptInternal { + fn from_args(args: Vec) -> Self { + let command = args.get(0).map(|a| a.to_string()); + let remaining = args.iter().skip(1).map(|a| a.to_string()).collect(); + Self { + command, + args: remaining, + } + } +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq)] +pub struct LifecycleScript { + scripts: HashMap, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) struct HostRequirements { + cpus: Option, + memory: Option, + storage: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub(crate) enum LifecycleCommand { + InitializeCommand, + OnCreateCommand, + UpdateContentCommand, + PostCreateCommand, + PostStartCommand, +} + +#[derive(Debug, PartialEq, Eq)] +pub(crate) enum DevContainerBuildType { + Image, + Dockerfile, + DockerCompose, + None, +} +#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq, Default)] +#[serde(rename_all = "camelCase")] +pub(crate) struct DevContainer { + pub(crate) image: Option, + pub(crate) name: Option, + pub(crate) remote_user: Option, + pub(crate) forward_ports: Option>, + pub(crate) ports_attributes: Option>, + pub(crate) other_ports_attributes: Option, + pub(crate) container_env: Option>, + pub(crate) remote_env: Option>, + pub(crate) container_user: Option, + #[serde(rename = "updateRemoteUserUID")] + pub(crate) update_remote_user_uid: Option, + user_env_probe: Option, + override_command: Option, + shutdown_action: Option, + init: Option, + pub(crate) privileged: Option, + cap_add: Option>, + security_opt: Option>, + #[serde(default, deserialize_with = "deserialize_mount_definitions")] + pub(crate) mounts: Option>, + pub(crate) features: Option>, + pub(crate) override_feature_install_order: Option>, + pub(crate) customizations: Option, + pub(crate) build: Option, + #[serde(default, deserialize_with = "deserialize_string_or_int")] + pub(crate) app_port: Option, + #[serde(default, deserialize_with = "deserialize_mount_definition")] + pub(crate) workspace_mount: Option, + pub(crate) workspace_folder: Option, + run_args: Option>, + #[serde(default, deserialize_with = "deserialize_string_or_array")] + pub(crate) docker_compose_file: Option>, + pub(crate) service: Option, + run_services: Option>, + pub(crate) initialize_command: Option, + pub(crate) on_create_command: Option, + pub(crate) update_content_command: Option, + pub(crate) post_create_command: Option, + pub(crate) post_start_command: Option, + pub(crate) post_attach_command: Option, + wait_for: Option, + host_requirements: Option, +} + +pub(crate) fn deserialize_devcontainer_json(json: &str) -> Result { + match serde_json_lenient::from_str(json) { + Ok(devcontainer) => Ok(devcontainer), + Err(e) => { + log::error!("Unable to deserialize devcontainer from json: {e}"); + Err(DevContainerError::DevContainerParseFailed) + } + } +} + +impl DevContainer { + pub(crate) fn build_type(&self) -> DevContainerBuildType { + if self.image.is_some() { + return DevContainerBuildType::Image; + } else if self.docker_compose_file.is_some() { + return DevContainerBuildType::DockerCompose; + } else if self.build.is_some() { + return DevContainerBuildType::Dockerfile; + } + return DevContainerBuildType::None; + } + + pub(crate) fn has_features(&self) -> bool { + self.features + .as_ref() + .map(|features| !features.is_empty()) + .unwrap_or(false) + } +} + +// Custom deserializer that parses the entire customizations object as a +// serde_json_lenient::Value first, then extracts the "zed" portion. +// This avoids a bug in serde_json_lenient's `ignore_value` codepath which +// does not handle trailing commas in skipped values. +impl<'de> Deserialize<'de> for ZedCustomizationsWrapper { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let value = Value::deserialize(deserializer)?; + let zed = value + .get("zed") + .map(|zed_value| serde_json_lenient::from_value::(zed_value.clone())) + .transpose() + .map_err(serde::de::Error::custom)? + .unwrap_or_default(); + Ok(ZedCustomizationsWrapper { zed }) + } +} + +impl LifecycleScript { + fn from_map(args: HashMap>) -> Self { + Self { + scripts: args + .into_iter() + .map(|(k, v)| (k, LifecycleScriptInternal::from_args(v))) + .collect(), + } + } + fn from_str(args: &str) -> Self { + let script: Vec = args.split(" ").map(|a| a.to_string()).collect(); + + Self::from_args(script) + } + fn from_args(args: Vec) -> Self { + Self::from_map(HashMap::from([("default".to_string(), args)])) + } + pub fn script_commands(&self) -> HashMap { + self.scripts + .iter() + .filter_map(|(k, v)| { + if let Some(inner_command) = &v.command { + let mut command = Command::new(inner_command); + command.args(&v.args); + Some((k.clone(), command)) + } else { + log::warn!( + "Lifecycle script command {k}, value {:?} has no program to run. Skipping", + v + ); + None + } + }) + .collect() + } + + pub async fn run( + &self, + command_runnder: &Arc, + working_directory: &Path, + ) -> Result<(), DevContainerError> { + for (command_name, mut command) in self.script_commands() { + log::debug!("Running script {command_name}"); + + command.current_dir(working_directory); + + let output = command_runnder + .run_command(&mut command) + .await + .map_err(|e| { + log::error!("Error running command {command_name}: {e}"); + DevContainerError::CommandFailed(command_name.clone()) + })?; + if !output.status.success() { + let std_err = String::from_utf8_lossy(&output.stderr); + log::error!( + "Command {command_name} produced a non-successful output. StdErr: {std_err}" + ); + } + let std_out = String::from_utf8_lossy(&output.stdout); + log::debug!("Command {command_name} output:\n {std_out}"); + } + Ok(()) + } +} + +impl<'de> Deserialize<'de> for LifecycleScript { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + use serde::de::{self, Visitor}; + use std::fmt; + + struct LifecycleScriptVisitor; + + impl<'de> Visitor<'de> for LifecycleScriptVisitor { + type Value = LifecycleScript; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string, an array of strings, or a map of arrays") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + Ok(LifecycleScript::from_str(value)) + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: de::SeqAccess<'de>, + { + let mut array = Vec::new(); + while let Some(elem) = seq.next_element()? { + array.push(elem); + } + Ok(LifecycleScript::from_args(array)) + } + + fn visit_map(self, mut map: A) -> Result + where + A: de::MapAccess<'de>, + { + let mut result = HashMap::new(); + while let Some(key) = map.next_key::()? { + let value: Value = map.next_value()?; + let script_args = match value { + Value::String(s) => { + s.split(" ").map(|s| s.to_string()).collect::>() + } + Value::Array(arr) => { + let strings: Vec = arr + .into_iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect(); + strings + } + _ => continue, + }; + result.insert(key, script_args); + } + Ok(LifecycleScript::from_map(result)) + } + } + + deserializer.deserialize_any(LifecycleScriptVisitor) + } +} + +fn deserialize_mount_definition<'de, D>( + deserializer: D, +) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + use serde::de::Error; + + #[derive(Deserialize)] + #[serde(untagged)] + enum MountItem { + Object(MountDefinition), + String(String), + } + + let item = MountItem::deserialize(deserializer)?; + + let mount = match item { + MountItem::Object(mount) => mount, + MountItem::String(s) => { + let mut source = None; + let mut target = None; + let mut mount_type = None; + + for part in s.split(',') { + let part = part.trim(); + if let Some((key, value)) = part.split_once('=') { + match key.trim() { + "source" => source = Some(value.trim().to_string()), + "target" => target = Some(value.trim().to_string()), + "type" => mount_type = Some(value.trim().to_string()), + _ => {} // Ignore unknown keys + } + } + } + + let source = source + .ok_or_else(|| D::Error::custom(format!("mount string missing 'source': {}", s)))?; + let target = target + .ok_or_else(|| D::Error::custom(format!("mount string missing 'target': {}", s)))?; + + MountDefinition { + source, + target, + mount_type, + } + } + }; + + Ok(Some(mount)) +} + +fn deserialize_mount_definitions<'de, D>( + deserializer: D, +) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + use serde::de::Error; + + #[derive(Deserialize)] + #[serde(untagged)] + enum MountItem { + Object(MountDefinition), + String(String), + } + + let items = Vec::::deserialize(deserializer)?; + let mut mounts = Vec::new(); + + for item in items { + match item { + MountItem::Object(mount) => mounts.push(mount), + MountItem::String(s) => { + let mut source = None; + let mut target = None; + let mut mount_type = None; + + for part in s.split(',') { + let part = part.trim(); + if let Some((key, value)) = part.split_once('=') { + match key.trim() { + "source" => source = Some(value.trim().to_string()), + "target" => target = Some(value.trim().to_string()), + "type" => mount_type = Some(value.trim().to_string()), + _ => {} // Ignore unknown keys + } + } + } + + let source = source.ok_or_else(|| { + D::Error::custom(format!("mount string missing 'source': {}", s)) + })?; + let target = target.ok_or_else(|| { + D::Error::custom(format!("mount string missing 'target': {}", s)) + })?; + + mounts.push(MountDefinition { + source, + target, + mount_type, + }); + } + } + } + + Ok(Some(mounts)) +} + +fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrInt { + String(String), + Int(u32), + } + + match StringOrInt::deserialize(deserializer)? { + StringOrInt::String(s) => Ok(Some(s)), + StringOrInt::Int(b) => Ok(Some(b.to_string())), + } +} + +fn deserialize_string_or_array<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrArray { + String(String), + Array(Vec), + } + + match StringOrArray::deserialize(deserializer)? { + StringOrArray::String(s) => Ok(Some(vec![s])), + StringOrArray::Array(b) => Ok(Some(b)), + } +} + +#[cfg(test)] +mod test { + use std::collections::HashMap; + + use crate::{ + devcontainer_api::DevContainerError, + devcontainer_json::{ + ContainerBuild, DevContainer, DevContainerBuildType, FeatureOptions, ForwardPort, + HostRequirements, LifecycleCommand, LifecycleScript, MountDefinition, OnAutoForward, + PortAttributeProtocol, PortAttributes, ShutdownAction, UserEnvProbe, ZedCustomization, + ZedCustomizationsWrapper, deserialize_devcontainer_json, + }, + }; + + #[test] + fn should_deserialize_customizations_with_unknown_keys() { + let json_with_other_customizations = r#" + { + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "customizations": { + "vscode": { + "extensions": [ + "dbaeumer.vscode-eslint", + "GitHub.vscode-pull-request-github", + ], + }, + "zed": { + "extensions": ["vue", "ruby"], + }, + "codespaces": { + "repositories": { + "devcontainers/features": { + "permissions": { + "contents": "write", + "workflows": "write", + }, + }, + }, + }, + }, + } + "#; + + let result = deserialize_devcontainer_json(json_with_other_customizations); + + assert!( + result.is_ok(), + "Should ignore unknown customization keys, but got: {:?}", + result.err() + ); + let devcontainer = result.expect("ok"); + assert_eq!( + devcontainer.customizations, + Some(ZedCustomizationsWrapper { + zed: ZedCustomization { + extensions: vec!["vue".to_string(), "ruby".to_string()] + } + }) + ); + } + + #[test] + fn should_deserialize_customizations_without_zed_key() { + let json_without_zed = r#" + { + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "customizations": { + "vscode": { + "extensions": ["dbaeumer.vscode-eslint"] + } + } + } + "#; + + let result = deserialize_devcontainer_json(json_without_zed); + + assert!( + result.is_ok(), + "Should handle missing zed key in customizations, but got: {:?}", + result.err() + ); + let devcontainer = result.expect("ok"); + assert_eq!( + devcontainer.customizations, + Some(ZedCustomizationsWrapper { + zed: ZedCustomization { extensions: vec![] } + }) + ); + } + + #[test] + fn should_deserialize_simple_devcontainer_json() { + let given_bad_json = "{ \"image\": 123 }"; + + let result = deserialize_devcontainer_json(given_bad_json); + + assert!(result.is_err()); + assert_eq!( + result.expect_err("err"), + DevContainerError::DevContainerParseFailed + ); + + let given_image_container_json = r#" + // These are some external comments. serde_lenient should handle them + { + // These are some internal comments + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "name": "myDevContainer", + "remoteUser": "root", + "forwardPorts": [ + "db:5432", + 3000 + ], + "portsAttributes": { + "3000": { + "label": "This Port", + "onAutoForward": "notify", + "elevateIfNeeded": false, + "requireLocalPort": true, + "protocol": "https" + }, + "db:5432": { + "label": "This Port too", + "onAutoForward": "silent", + "elevateIfNeeded": true, + "requireLocalPort": false, + "protocol": "http" + } + }, + "otherPortsAttributes": { + "label": "Other Ports", + "onAutoForward": "openBrowser", + "elevateIfNeeded": true, + "requireLocalPort": true, + "protocol": "https" + }, + "updateRemoteUserUID": true, + "remoteEnv": { + "MYVAR1": "myvarvalue", + "MYVAR2": "myvarothervalue" + }, + "initializeCommand": ["echo", "initialize_command"], + "onCreateCommand": "echo on_create_command", + "updateContentCommand": { + "first": "echo update_content_command", + "second": ["echo", "update_content_command"] + }, + "postCreateCommand": ["echo", "post_create_command"], + "postStartCommand": "echo post_start_command", + "postAttachCommand": { + "something": "echo post_attach_command", + "something1": "echo something else", + }, + "waitFor": "postStartCommand", + "userEnvProbe": "loginShell", + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/anaconda:1": {} + }, + "overrideFeatureInstallOrder": [ + "ghcr.io/devcontainers/features/anaconda:1", + "ghcr.io/devcontainers/features/aws-cli:1" + ], + "hostRequirements": { + "cpus": 2, + "memory": "8gb", + "storage": "32gb", + // Note that we're not parsing this currently + "gpu": true, + }, + "appPort": 8081, + "containerEnv": { + "MYVAR3": "myvar3", + "MYVAR4": "myvar4" + }, + "containerUser": "myUser", + "mounts": [ + { + "source": "/localfolder/app", + "target": "/workspaces/app", + "type": "volume" + } + ], + "runArgs": [ + "-c", + "some_command" + ], + "shutdownAction": "stopContainer", + "overrideCommand": true, + "workspaceFolder": "/workspaces", + "workspaceMount": "source=/app,target=/workspaces/app,type=bind,consistency=cached", + "customizations": { + "vscode": { + // Just confirm that this can be included and ignored + }, + "zed": { + "extensions": [ + "html" + ] + } + } + } + "#; + + let result = deserialize_devcontainer_json(given_image_container_json); + + assert!(result.is_ok()); + let devcontainer = result.expect("ok"); + assert_eq!( + devcontainer, + DevContainer { + image: Some(String::from("mcr.microsoft.com/devcontainers/base:ubuntu")), + name: Some(String::from("myDevContainer")), + remote_user: Some(String::from("root")), + forward_ports: Some(vec![ + ForwardPort::String("db:5432".to_string()), + ForwardPort::Number(3000), + ]), + ports_attributes: Some(HashMap::from([ + ( + "3000".to_string(), + PortAttributes { + label: "This Port".to_string(), + on_auto_forward: OnAutoForward::Notify, + elevate_if_needed: false, + require_local_port: true, + protocol: PortAttributeProtocol::Https + } + ), + ( + "db:5432".to_string(), + PortAttributes { + label: "This Port too".to_string(), + on_auto_forward: OnAutoForward::Silent, + elevate_if_needed: true, + require_local_port: false, + protocol: PortAttributeProtocol::Http + } + ) + ])), + other_ports_attributes: Some(PortAttributes { + label: "Other Ports".to_string(), + on_auto_forward: OnAutoForward::OpenBrowser, + elevate_if_needed: true, + require_local_port: true, + protocol: PortAttributeProtocol::Https + }), + update_remote_user_uid: Some(true), + remote_env: Some(HashMap::from([ + ("MYVAR1".to_string(), "myvarvalue".to_string()), + ("MYVAR2".to_string(), "myvarothervalue".to_string()) + ])), + initialize_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "initialize_command".to_string() + ])), + on_create_command: Some(LifecycleScript::from_str("echo on_create_command")), + update_content_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "first".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ), + ( + "second".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ) + ]))), + post_create_command: Some(LifecycleScript::from_str("echo post_create_command")), + post_start_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "post_start_command".to_string() + ])), + post_attach_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "something".to_string(), + vec!["echo".to_string(), "post_attach_command".to_string()] + ), + ( + "something1".to_string(), + vec![ + "echo".to_string(), + "something".to_string(), + "else".to_string() + ] + ) + ]))), + wait_for: Some(LifecycleCommand::PostStartCommand), + user_env_probe: Some(UserEnvProbe::LoginShell), + features: Some(HashMap::from([ + ( + "ghcr.io/devcontainers/features/aws-cli:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ), + ( + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ) + ])), + override_feature_install_order: Some(vec![ + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + "ghcr.io/devcontainers/features/aws-cli:1".to_string() + ]), + host_requirements: Some(HostRequirements { + cpus: Some(2), + memory: Some("8gb".to_string()), + storage: Some("32gb".to_string()), + }), + app_port: Some("8081".to_string()), + container_env: Some(HashMap::from([ + ("MYVAR3".to_string(), "myvar3".to_string()), + ("MYVAR4".to_string(), "myvar4".to_string()) + ])), + container_user: Some("myUser".to_string()), + mounts: Some(vec![MountDefinition { + source: "/localfolder/app".to_string(), + target: "/workspaces/app".to_string(), + mount_type: Some("volume".to_string()), + }]), + run_args: Some(vec!["-c".to_string(), "some_command".to_string()]), + shutdown_action: Some(ShutdownAction::StopContainer), + override_command: Some(true), + workspace_folder: Some("/workspaces".to_string()), + workspace_mount: Some(MountDefinition { + source: "/app".to_string(), + target: "/workspaces/app".to_string(), + mount_type: Some("bind".to_string()) + }), + customizations: Some(ZedCustomizationsWrapper { + zed: ZedCustomization { + extensions: vec!["html".to_string()] + } + }), + ..Default::default() + } + ); + + assert_eq!(devcontainer.build_type(), DevContainerBuildType::Image); + } + + #[test] + fn should_deserialize_docker_compose_devcontainer_json() { + let given_docker_compose_json = r#" + // These are some external comments. serde_lenient should handle them + { + // These are some internal comments + "name": "myDevContainer", + "remoteUser": "root", + "forwardPorts": [ + "db:5432", + 3000 + ], + "portsAttributes": { + "3000": { + "label": "This Port", + "onAutoForward": "notify", + "elevateIfNeeded": false, + "requireLocalPort": true, + "protocol": "https" + }, + "db:5432": { + "label": "This Port too", + "onAutoForward": "silent", + "elevateIfNeeded": true, + "requireLocalPort": false, + "protocol": "http" + } + }, + "otherPortsAttributes": { + "label": "Other Ports", + "onAutoForward": "openBrowser", + "elevateIfNeeded": true, + "requireLocalPort": true, + "protocol": "https" + }, + "updateRemoteUserUID": true, + "remoteEnv": { + "MYVAR1": "myvarvalue", + "MYVAR2": "myvarothervalue" + }, + "initializeCommand": ["echo", "initialize_command"], + "onCreateCommand": "echo on_create_command", + "updateContentCommand": { + "first": "echo update_content_command", + "second": ["echo", "update_content_command"] + }, + "postCreateCommand": ["echo", "post_create_command"], + "postStartCommand": "echo post_start_command", + "postAttachCommand": { + "something": "echo post_attach_command", + "something1": "echo something else", + }, + "waitFor": "postStartCommand", + "userEnvProbe": "loginShell", + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/anaconda:1": {} + }, + "overrideFeatureInstallOrder": [ + "ghcr.io/devcontainers/features/anaconda:1", + "ghcr.io/devcontainers/features/aws-cli:1" + ], + "hostRequirements": { + "cpus": 2, + "memory": "8gb", + "storage": "32gb", + // Note that we're not parsing this currently + "gpu": true, + }, + "dockerComposeFile": "docker-compose.yml", + "service": "myService", + "runServices": [ + "myService", + "mySupportingService" + ], + "workspaceFolder": "/workspaces/thing", + "shutdownAction": "stopCompose", + "overrideCommand": true + } + "#; + let result = deserialize_devcontainer_json(given_docker_compose_json); + + assert!(result.is_ok()); + let devcontainer = result.expect("ok"); + assert_eq!( + devcontainer, + DevContainer { + name: Some(String::from("myDevContainer")), + remote_user: Some(String::from("root")), + forward_ports: Some(vec![ + ForwardPort::String("db:5432".to_string()), + ForwardPort::Number(3000), + ]), + ports_attributes: Some(HashMap::from([ + ( + "3000".to_string(), + PortAttributes { + label: "This Port".to_string(), + on_auto_forward: OnAutoForward::Notify, + elevate_if_needed: false, + require_local_port: true, + protocol: PortAttributeProtocol::Https + } + ), + ( + "db:5432".to_string(), + PortAttributes { + label: "This Port too".to_string(), + on_auto_forward: OnAutoForward::Silent, + elevate_if_needed: true, + require_local_port: false, + protocol: PortAttributeProtocol::Http + } + ) + ])), + other_ports_attributes: Some(PortAttributes { + label: "Other Ports".to_string(), + on_auto_forward: OnAutoForward::OpenBrowser, + elevate_if_needed: true, + require_local_port: true, + protocol: PortAttributeProtocol::Https + }), + update_remote_user_uid: Some(true), + remote_env: Some(HashMap::from([ + ("MYVAR1".to_string(), "myvarvalue".to_string()), + ("MYVAR2".to_string(), "myvarothervalue".to_string()) + ])), + initialize_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "initialize_command".to_string() + ])), + on_create_command: Some(LifecycleScript::from_str("echo on_create_command")), + update_content_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "first".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ), + ( + "second".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ) + ]))), + post_create_command: Some(LifecycleScript::from_str("echo post_create_command")), + post_start_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "post_start_command".to_string() + ])), + post_attach_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "something".to_string(), + vec!["echo".to_string(), "post_attach_command".to_string()] + ), + ( + "something1".to_string(), + vec![ + "echo".to_string(), + "something".to_string(), + "else".to_string() + ] + ) + ]))), + wait_for: Some(LifecycleCommand::PostStartCommand), + user_env_probe: Some(UserEnvProbe::LoginShell), + features: Some(HashMap::from([ + ( + "ghcr.io/devcontainers/features/aws-cli:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ), + ( + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ) + ])), + override_feature_install_order: Some(vec![ + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + "ghcr.io/devcontainers/features/aws-cli:1".to_string() + ]), + host_requirements: Some(HostRequirements { + cpus: Some(2), + memory: Some("8gb".to_string()), + storage: Some("32gb".to_string()), + }), + docker_compose_file: Some(vec!["docker-compose.yml".to_string()]), + service: Some("myService".to_string()), + run_services: Some(vec![ + "myService".to_string(), + "mySupportingService".to_string(), + ]), + workspace_folder: Some("/workspaces/thing".to_string()), + shutdown_action: Some(ShutdownAction::StopCompose), + override_command: Some(true), + ..Default::default() + } + ); + + assert_eq!( + devcontainer.build_type(), + DevContainerBuildType::DockerCompose + ); + } + + #[test] + fn should_deserialize_dockerfile_devcontainer_json() { + let given_dockerfile_container_json = r#" + // These are some external comments. serde_lenient should handle them + { + // These are some internal comments + "name": "myDevContainer", + "remoteUser": "root", + "forwardPorts": [ + "db:5432", + 3000 + ], + "portsAttributes": { + "3000": { + "label": "This Port", + "onAutoForward": "notify", + "elevateIfNeeded": false, + "requireLocalPort": true, + "protocol": "https" + }, + "db:5432": { + "label": "This Port too", + "onAutoForward": "silent", + "elevateIfNeeded": true, + "requireLocalPort": false, + "protocol": "http" + } + }, + "otherPortsAttributes": { + "label": "Other Ports", + "onAutoForward": "openBrowser", + "elevateIfNeeded": true, + "requireLocalPort": true, + "protocol": "https" + }, + "updateRemoteUserUID": true, + "remoteEnv": { + "MYVAR1": "myvarvalue", + "MYVAR2": "myvarothervalue" + }, + "initializeCommand": ["echo", "initialize_command"], + "onCreateCommand": "echo on_create_command", + "updateContentCommand": { + "first": "echo update_content_command", + "second": ["echo", "update_content_command"] + }, + "postCreateCommand": ["echo", "post_create_command"], + "postStartCommand": "echo post_start_command", + "postAttachCommand": { + "something": "echo post_attach_command", + "something1": "echo something else", + }, + "waitFor": "postStartCommand", + "userEnvProbe": "loginShell", + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/anaconda:1": {} + }, + "overrideFeatureInstallOrder": [ + "ghcr.io/devcontainers/features/anaconda:1", + "ghcr.io/devcontainers/features/aws-cli:1" + ], + "hostRequirements": { + "cpus": 2, + "memory": "8gb", + "storage": "32gb", + // Note that we're not parsing this currently + "gpu": true, + }, + "appPort": 8081, + "containerEnv": { + "MYVAR3": "myvar3", + "MYVAR4": "myvar4" + }, + "containerUser": "myUser", + "mounts": [ + { + "source": "/localfolder/app", + "target": "/workspaces/app", + "type": "volume" + }, + "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory", + ], + "runArgs": [ + "-c", + "some_command" + ], + "shutdownAction": "stopContainer", + "overrideCommand": true, + "workspaceFolder": "/workspaces", + "workspaceMount": "source=/folder,target=/workspace,type=bind,consistency=cached", + "build": { + "dockerfile": "DockerFile", + "context": "..", + "args": { + "MYARG": "MYVALUE" + }, + "options": [ + "--some-option", + "--mount" + ], + "target": "development", + "cacheFrom": "some_image" + } + } + "#; + + let result = deserialize_devcontainer_json(given_dockerfile_container_json); + + assert!(result.is_ok()); + let devcontainer = result.expect("ok"); + assert_eq!( + devcontainer, + DevContainer { + name: Some(String::from("myDevContainer")), + remote_user: Some(String::from("root")), + forward_ports: Some(vec![ + ForwardPort::String("db:5432".to_string()), + ForwardPort::Number(3000), + ]), + ports_attributes: Some(HashMap::from([ + ( + "3000".to_string(), + PortAttributes { + label: "This Port".to_string(), + on_auto_forward: OnAutoForward::Notify, + elevate_if_needed: false, + require_local_port: true, + protocol: PortAttributeProtocol::Https + } + ), + ( + "db:5432".to_string(), + PortAttributes { + label: "This Port too".to_string(), + on_auto_forward: OnAutoForward::Silent, + elevate_if_needed: true, + require_local_port: false, + protocol: PortAttributeProtocol::Http + } + ) + ])), + other_ports_attributes: Some(PortAttributes { + label: "Other Ports".to_string(), + on_auto_forward: OnAutoForward::OpenBrowser, + elevate_if_needed: true, + require_local_port: true, + protocol: PortAttributeProtocol::Https + }), + update_remote_user_uid: Some(true), + remote_env: Some(HashMap::from([ + ("MYVAR1".to_string(), "myvarvalue".to_string()), + ("MYVAR2".to_string(), "myvarothervalue".to_string()) + ])), + initialize_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "initialize_command".to_string() + ])), + on_create_command: Some(LifecycleScript::from_str("echo on_create_command")), + update_content_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "first".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ), + ( + "second".to_string(), + vec!["echo".to_string(), "update_content_command".to_string()] + ) + ]))), + post_create_command: Some(LifecycleScript::from_str("echo post_create_command")), + post_start_command: Some(LifecycleScript::from_args(vec![ + "echo".to_string(), + "post_start_command".to_string() + ])), + post_attach_command: Some(LifecycleScript::from_map(HashMap::from([ + ( + "something".to_string(), + vec!["echo".to_string(), "post_attach_command".to_string()] + ), + ( + "something1".to_string(), + vec![ + "echo".to_string(), + "something".to_string(), + "else".to_string() + ] + ) + ]))), + wait_for: Some(LifecycleCommand::PostStartCommand), + user_env_probe: Some(UserEnvProbe::LoginShell), + features: Some(HashMap::from([ + ( + "ghcr.io/devcontainers/features/aws-cli:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ), + ( + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + FeatureOptions::Options(HashMap::new()) + ) + ])), + override_feature_install_order: Some(vec![ + "ghcr.io/devcontainers/features/anaconda:1".to_string(), + "ghcr.io/devcontainers/features/aws-cli:1".to_string() + ]), + host_requirements: Some(HostRequirements { + cpus: Some(2), + memory: Some("8gb".to_string()), + storage: Some("32gb".to_string()), + }), + app_port: Some("8081".to_string()), + container_env: Some(HashMap::from([ + ("MYVAR3".to_string(), "myvar3".to_string()), + ("MYVAR4".to_string(), "myvar4".to_string()) + ])), + container_user: Some("myUser".to_string()), + mounts: Some(vec![ + MountDefinition { + source: "/localfolder/app".to_string(), + target: "/workspaces/app".to_string(), + mount_type: Some("volume".to_string()), + }, + MountDefinition { + source: "dev-containers-cli-bashhistory".to_string(), + target: "/home/node/commandhistory".to_string(), + mount_type: None, + } + ]), + run_args: Some(vec!["-c".to_string(), "some_command".to_string()]), + shutdown_action: Some(ShutdownAction::StopContainer), + override_command: Some(true), + workspace_folder: Some("/workspaces".to_string()), + workspace_mount: Some(MountDefinition { + source: "/folder".to_string(), + target: "/workspace".to_string(), + mount_type: Some("bind".to_string()) + }), + build: Some(ContainerBuild { + dockerfile: "DockerFile".to_string(), + context: Some("..".to_string()), + args: Some(HashMap::from([( + "MYARG".to_string(), + "MYVALUE".to_string() + )])), + options: Some(vec!["--some-option".to_string(), "--mount".to_string()]), + target: Some("development".to_string()), + cache_from: Some(vec!["some_image".to_string()]), + }), + ..Default::default() + } + ); + + assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile); + } +} diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs new file mode 100644 index 0000000000000000000000000000000000000000..1c2863f96118b5bac006f3a590da8cf8980994e2 --- /dev/null +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -0,0 +1,6571 @@ +use std::{ + collections::HashMap, + fmt::Debug, + hash::{DefaultHasher, Hash, Hasher}, + path::{Path, PathBuf}, + sync::Arc, +}; + +use fs::Fs; +use http_client::HttpClient; +use util::{ResultExt, command::Command}; + +use crate::{ + DevContainerConfig, DevContainerContext, + command_json::{CommandRunner, DefaultCommandRunner}, + devcontainer_api::{DevContainerError, DevContainerUp}, + devcontainer_json::{ + DevContainer, DevContainerBuildType, FeatureOptions, ForwardPort, MountDefinition, + deserialize_devcontainer_json, + }, + docker::{ + Docker, DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild, + DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config, + }, + features::{DevContainerFeatureJson, FeatureManifest, parse_oci_feature_ref}, + get_oci_token, + oci::{TokenResponse, download_oci_tarball, get_oci_manifest}, + safe_id_lower, +}; + +enum ConfigStatus { + Deserialized(DevContainer), + VariableParsed(DevContainer), +} + +#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeResources { + files: Vec, + config: DockerComposeConfig, +} + +struct DevContainerManifest { + http_client: Arc, + fs: Arc, + docker_client: Arc, + command_runner: Arc, + raw_config: String, + config: ConfigStatus, + local_environment: HashMap, + local_project_directory: PathBuf, + config_directory: PathBuf, + file_name: String, + root_image: Option, + features_build_info: Option, + features: Vec, +} +const DEFAULT_REMOTE_PROJECT_DIR: &str = "/workspaces/"; +impl DevContainerManifest { + async fn new( + context: &DevContainerContext, + environment: HashMap, + docker_client: Arc, + command_runner: Arc, + local_config: DevContainerConfig, + local_project_path: &Path, + ) -> Result { + let config_path = local_project_path.join(local_config.config_path.clone()); + log::debug!("parsing devcontainer json found in {:?}", &config_path); + let devcontainer_contents = context.fs.load(&config_path).await.map_err(|e| { + log::error!("Unable to read devcontainer contents: {e}"); + DevContainerError::DevContainerParseFailed + })?; + + let devcontainer = deserialize_devcontainer_json(&devcontainer_contents)?; + + let devcontainer_directory = config_path.parent().ok_or_else(|| { + log::error!("Dev container file should be in a directory"); + DevContainerError::NotInValidProject + })?; + let file_name = config_path + .file_name() + .and_then(|f| f.to_str()) + .ok_or_else(|| { + log::error!("Dev container file has no file name, or is invalid unicode"); + DevContainerError::DevContainerParseFailed + })?; + + Ok(Self { + fs: context.fs.clone(), + http_client: context.http_client.clone(), + docker_client, + command_runner, + raw_config: devcontainer_contents, + config: ConfigStatus::Deserialized(devcontainer), + local_project_directory: local_project_path.to_path_buf(), + local_environment: environment, + config_directory: devcontainer_directory.to_path_buf(), + file_name: file_name.to_string(), + root_image: None, + features_build_info: None, + features: Vec::new(), + }) + } + + fn devcontainer_id(&self) -> String { + let mut labels = self.identifying_labels(); + labels.sort_by_key(|(key, _)| *key); + + let mut hasher = DefaultHasher::new(); + for (key, value) in &labels { + key.hash(&mut hasher); + value.hash(&mut hasher); + } + + format!("{:016x}", hasher.finish()) + } + + fn identifying_labels(&self) -> Vec<(&str, String)> { + let labels = vec![ + ( + "devcontainer.local_folder", + (self.local_project_directory.display()).to_string(), + ), + ( + "devcontainer.config_file", + (self.config_file().display()).to_string(), + ), + ]; + labels + } + + fn parse_nonremote_vars_for_content(&self, content: &str) -> Result { + let mut replaced_content = content + .replace("${devcontainerId}", &self.devcontainer_id()) + .replace( + "${containerWorkspaceFolderBasename}", + &self.remote_workspace_base_name().unwrap_or_default(), + ) + .replace( + "${localWorkspaceFolderBasename}", + &self.local_workspace_base_name()?, + ) + .replace( + "${containerWorkspaceFolder}", + &self + .remote_workspace_folder() + .map(|path| path.display().to_string()) + .unwrap_or_default() + .replace('\\', "/"), + ) + .replace( + "${localWorkspaceFolder}", + &self.local_workspace_folder().replace('\\', "/"), + ); + for (k, v) in &self.local_environment { + let find = format!("${{localEnv:{k}}}"); + replaced_content = replaced_content.replace(&find, &v.replace('\\', "/")); + } + + Ok(replaced_content) + } + + fn parse_nonremote_vars(&mut self) -> Result<(), DevContainerError> { + let replaced_content = self.parse_nonremote_vars_for_content(&self.raw_config)?; + let parsed_config = deserialize_devcontainer_json(&replaced_content)?; + + self.config = ConfigStatus::VariableParsed(parsed_config); + + Ok(()) + } + + fn runtime_remote_env( + &self, + container_env: &HashMap, + ) -> Result, DevContainerError> { + let mut merged_remote_env = container_env.clone(); + // HOME is user-specific, and we will often not run as the image user + merged_remote_env.remove("HOME"); + if let Some(remote_env) = self.dev_container().remote_env.clone() { + let mut raw = serde_json_lenient::to_string(&remote_env).map_err(|e| { + log::error!( + "Unexpected error serializing dev container remote_env: {e} - {:?}", + remote_env + ); + DevContainerError::DevContainerParseFailed + })?; + for (k, v) in container_env { + raw = raw.replace(&format!("${{containerEnv:{k}}}"), v); + } + let reserialized: HashMap = serde_json_lenient::from_str(&raw) + .map_err(|e| { + log::error!( + "Unexpected error reserializing dev container remote env: {e} - {:?}", + &raw + ); + DevContainerError::DevContainerParseFailed + })?; + for (k, v) in reserialized { + merged_remote_env.insert(k, v); + } + } + Ok(merged_remote_env) + } + + fn config_file(&self) -> PathBuf { + self.config_directory.join(&self.file_name) + } + + fn dev_container(&self) -> &DevContainer { + match &self.config { + ConfigStatus::Deserialized(dev_container) => dev_container, + ConfigStatus::VariableParsed(dev_container) => dev_container, + } + } + + async fn dockerfile_location(&self) -> Option { + let dev_container = self.dev_container(); + match dev_container.build_type() { + DevContainerBuildType::Image => None, + DevContainerBuildType::Dockerfile => dev_container + .build + .as_ref() + .map(|build| self.config_directory.join(&build.dockerfile)), + DevContainerBuildType::DockerCompose => { + let Ok(docker_compose_manifest) = self.docker_compose_manifest().await else { + return None; + }; + let Ok((_, main_service)) = find_primary_service(&docker_compose_manifest, self) + else { + return None; + }; + main_service + .build + .and_then(|b| b.dockerfile) + .map(|dockerfile| self.config_directory.join(dockerfile)) + } + DevContainerBuildType::None => None, + } + } + + fn generate_features_image_tag(&self, dockerfile_build_path: String) -> String { + let mut hasher = DefaultHasher::new(); + let prefix = match &self.dev_container().name { + Some(name) => &safe_id_lower(name), + None => "zed-dc", + }; + let prefix = prefix.get(..6).unwrap_or(prefix); + + dockerfile_build_path.hash(&mut hasher); + + let hash = hasher.finish(); + format!("{}-{:x}-features", prefix, hash) + } + + /// Gets the base image from the devcontainer with the following precedence: + /// - The devcontainer image if an image is specified + /// - The image sourced in the Dockerfile if a Dockerfile is specified + /// - The image sourced in the docker-compose main service, if one is specified + /// - The image sourced in the docker-compose main service dockerfile, if one is specified + /// If no such image is available, return an error + async fn get_base_image_from_config(&self) -> Result { + if let Some(image) = &self.dev_container().image { + return Ok(image.to_string()); + } + if let Some(dockerfile) = self.dev_container().build.as_ref().map(|b| &b.dockerfile) { + let dockerfile_contents = self + .fs + .load(&self.config_directory.join(dockerfile)) + .await + .map_err(|e| { + log::error!("Error reading dockerfile: {e}"); + DevContainerError::DevContainerParseFailed + })?; + return image_from_dockerfile(self, dockerfile_contents); + } + if self.dev_container().docker_compose_file.is_some() { + let docker_compose_manifest = self.docker_compose_manifest().await?; + let (_, main_service) = find_primary_service(&docker_compose_manifest, &self)?; + + if let Some(dockerfile) = main_service + .build + .as_ref() + .and_then(|b| b.dockerfile.as_ref()) + { + let dockerfile_contents = self + .fs + .load(&self.config_directory.join(dockerfile)) + .await + .map_err(|e| { + log::error!("Error reading dockerfile: {e}"); + DevContainerError::DevContainerParseFailed + })?; + return image_from_dockerfile(self, dockerfile_contents); + } + if let Some(image) = &main_service.image { + return Ok(image.to_string()); + } + + log::error!("No valid base image found in docker-compose configuration"); + return Err(DevContainerError::DevContainerParseFailed); + } + log::error!("No valid base image found in dev container configuration"); + Err(DevContainerError::DevContainerParseFailed) + } + + async fn download_feature_and_dockerfile_resources(&mut self) -> Result<(), DevContainerError> { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet download resources" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + let root_image_tag = self.get_base_image_from_config().await?; + let root_image = self.docker_client.inspect(&root_image_tag).await?; + + if dev_container.build_type() == DevContainerBuildType::Image + && !dev_container.has_features() + { + log::debug!("No resources to download. Proceeding with just the image"); + return Ok(()); + } + + let temp_base = std::env::temp_dir().join("devcontainer-zed"); + let timestamp = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_millis()) + .unwrap_or(0); + + let features_content_dir = temp_base.join(format!("container-features-{}", timestamp)); + let empty_context_dir = temp_base.join("empty-folder"); + + self.fs + .create_dir(&features_content_dir) + .await + .map_err(|e| { + log::error!("Failed to create features content dir: {e}"); + DevContainerError::FilesystemError + })?; + + self.fs.create_dir(&empty_context_dir).await.map_err(|e| { + log::error!("Failed to create empty context dir: {e}"); + DevContainerError::FilesystemError + })?; + + let dockerfile_path = features_content_dir.join("Dockerfile.extended"); + let image_tag = + self.generate_features_image_tag(dockerfile_path.clone().display().to_string()); + + let build_info = FeaturesBuildInfo { + dockerfile_path, + features_content_dir, + empty_context_dir, + build_image: dev_container.image.clone(), + image_tag, + }; + + let features = match &dev_container.features { + Some(features) => features, + None => &HashMap::new(), + }; + + let container_user = get_container_user_from_config(&root_image, self)?; + let remote_user = get_remote_user_from_config(&root_image, self)?; + + let builtin_env_content = format!( + "_CONTAINER_USER={}\n_REMOTE_USER={}\n", + container_user, remote_user + ); + + let builtin_env_path = build_info + .features_content_dir + .join("devcontainer-features.builtin.env"); + + self.fs + .write(&builtin_env_path, &builtin_env_content.as_bytes()) + .await + .map_err(|e| { + log::error!("Failed to write builtin env file: {e}"); + DevContainerError::FilesystemError + })?; + + let ordered_features = + resolve_feature_order(features, &dev_container.override_feature_install_order); + + for (index, (feature_ref, options)) in ordered_features.iter().enumerate() { + if matches!(options, FeatureOptions::Bool(false)) { + log::debug!( + "Feature '{}' is disabled (set to false), skipping", + feature_ref + ); + continue; + } + + let feature_id = extract_feature_id(feature_ref); + let consecutive_id = format!("{}_{}", feature_id, index); + let feature_dir = build_info.features_content_dir.join(&consecutive_id); + + self.fs.create_dir(&feature_dir).await.map_err(|e| { + log::error!( + "Failed to create feature directory for {}: {e}", + feature_ref + ); + DevContainerError::FilesystemError + })?; + + let oci_ref = parse_oci_feature_ref(feature_ref).ok_or_else(|| { + log::error!( + "Feature '{}' is not a supported OCI feature reference", + feature_ref + ); + DevContainerError::DevContainerParseFailed + })?; + let TokenResponse { token } = + get_oci_token(&oci_ref.registry, &oci_ref.path, &self.http_client) + .await + .map_err(|e| { + log::error!("Failed to get OCI token for feature '{}': {e}", feature_ref); + DevContainerError::ResourceFetchFailed + })?; + let manifest = get_oci_manifest( + &oci_ref.registry, + &oci_ref.path, + &token, + &self.http_client, + &oci_ref.version, + None, + ) + .await + .map_err(|e| { + log::error!( + "Failed to fetch OCI manifest for feature '{}': {e}", + feature_ref + ); + DevContainerError::ResourceFetchFailed + })?; + let digest = &manifest + .layers + .first() + .ok_or_else(|| { + log::error!( + "OCI manifest for feature '{}' contains no layers", + feature_ref + ); + DevContainerError::ResourceFetchFailed + })? + .digest; + download_oci_tarball( + &token, + &oci_ref.registry, + &oci_ref.path, + digest, + "application/vnd.devcontainers.layer.v1+tar", + &feature_dir, + &self.http_client, + &self.fs, + None, + ) + .await?; + + let feature_json_path = &feature_dir.join("devcontainer-feature.json"); + if !self.fs.is_file(feature_json_path).await { + let message = format!( + "No devcontainer-feature.json found in {:?}, no defaults to apply", + feature_json_path + ); + log::error!("{}", &message); + return Err(DevContainerError::ResourceFetchFailed); + } + + let contents = self.fs.load(&feature_json_path).await.map_err(|e| { + log::error!("error reading devcontainer-feature.json: {:?}", e); + DevContainerError::FilesystemError + })?; + + let contents_parsed = self.parse_nonremote_vars_for_content(&contents)?; + + let feature_json: DevContainerFeatureJson = + serde_json_lenient::from_str(&contents_parsed).map_err(|e| { + log::error!("Failed to parse devcontainer-feature.json: {e}"); + DevContainerError::ResourceFetchFailed + })?; + + let feature_manifest = FeatureManifest::new(consecutive_id, feature_dir, feature_json); + + log::debug!("Downloaded OCI feature content for '{}'", feature_ref); + + let env_content = feature_manifest + .write_feature_env(&self.fs, options) + .await?; + + let wrapper_content = generate_install_wrapper(feature_ref, feature_id, &env_content)?; + + self.fs + .write( + &feature_manifest + .file_path() + .join("devcontainer-features-install.sh"), + &wrapper_content.as_bytes(), + ) + .await + .map_err(|e| { + log::error!("Failed to write install wrapper for {}: {e}", feature_ref); + DevContainerError::FilesystemError + })?; + + self.features.push(feature_manifest); + } + + // --- Phase 3: Generate extended Dockerfile from the inflated manifests --- + + let is_compose = dev_container.build_type() == DevContainerBuildType::DockerCompose; + let use_buildkit = self.docker_client.supports_compose_buildkit() || !is_compose; + + let dockerfile_base_content = if let Some(location) = &self.dockerfile_location().await { + self.fs.load(location).await.log_err() + } else { + None + }; + + let dockerfile_content = self.generate_dockerfile_extended( + &container_user, + &remote_user, + dockerfile_base_content, + use_buildkit, + ); + + self.fs + .write(&build_info.dockerfile_path, &dockerfile_content.as_bytes()) + .await + .map_err(|e| { + log::error!("Failed to write Dockerfile.extended: {e}"); + DevContainerError::FilesystemError + })?; + + log::debug!( + "Features build resources written to {:?}", + build_info.features_content_dir + ); + + self.root_image = Some(root_image); + self.features_build_info = Some(build_info); + + Ok(()) + } + + fn generate_dockerfile_extended( + &self, + container_user: &str, + remote_user: &str, + dockerfile_content: Option, + use_buildkit: bool, + ) -> String { + #[cfg(not(target_os = "windows"))] + let update_remote_user_uid = self.dev_container().update_remote_user_uid.unwrap_or(true); + #[cfg(target_os = "windows")] + let update_remote_user_uid = false; + let feature_layers: String = self + .features + .iter() + .map(|manifest| { + manifest.generate_dockerfile_feature_layer( + use_buildkit, + FEATURES_CONTAINER_TEMP_DEST_FOLDER, + ) + }) + .collect(); + + let container_home_cmd = get_ent_passwd_shell_command(container_user); + let remote_home_cmd = get_ent_passwd_shell_command(remote_user); + + let dockerfile_content = dockerfile_content + .map(|content| { + if dockerfile_alias(&content).is_some() { + content + } else { + dockerfile_inject_alias(&content, "dev_container_auto_added_stage_label") + } + }) + .unwrap_or("".to_string()); + + let dest = FEATURES_CONTAINER_TEMP_DEST_FOLDER; + + let feature_content_source_stage = if use_buildkit { + "".to_string() + } else { + "\nFROM dev_container_feature_content_temp as dev_containers_feature_content_source\n" + .to_string() + }; + + let builtin_env_source_path = if use_buildkit { + "./devcontainer-features.builtin.env" + } else { + "/tmp/build-features/devcontainer-features.builtin.env" + }; + + let mut extended_dockerfile = format!( + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +{dockerfile_content} +{feature_content_source_stage} +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source {builtin_env_source_path} /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p {dest} +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ {dest} + +RUN \ +echo "_CONTAINER_USER_HOME=$({container_home_cmd} | cut -d: -f6)" >> {dest}/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$({remote_home_cmd} | cut -d: -f6)" >> {dest}/devcontainer-features.builtin.env + +{feature_layers} + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER +"# + ); + + // If we're not adding a uid update layer, then we should add env vars to this layer instead + if !update_remote_user_uid { + extended_dockerfile = format!( + r#"{extended_dockerfile} +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true +"# + ); + + for feature in &self.features { + let container_env_layer = feature.generate_dockerfile_env(); + extended_dockerfile = format!("{extended_dockerfile}\n{container_env_layer}"); + } + + if let Some(env) = &self.dev_container().container_env { + for (key, value) in env { + extended_dockerfile = format!("{extended_dockerfile}ENV {key}={value}\n"); + } + } + } + + extended_dockerfile + } + + fn build_merged_resources( + &self, + base_image: DockerInspect, + ) -> Result { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet merge resources" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + let mut mounts = dev_container.mounts.clone().unwrap_or(Vec::new()); + + let mut feature_mounts = self.features.iter().flat_map(|f| f.mounts()).collect(); + + mounts.append(&mut feature_mounts); + + let privileged = dev_container.privileged.unwrap_or(false) + || self.features.iter().any(|f| f.privileged()); + + let mut entrypoint_script_lines = vec![ + "echo Container started".to_string(), + "trap \"exit 0\" 15".to_string(), + ]; + + for entrypoint in self.features.iter().filter_map(|f| f.entrypoint()) { + entrypoint_script_lines.push(entrypoint.clone()); + } + entrypoint_script_lines.append(&mut vec![ + "exec \"$@\"".to_string(), + "while sleep 1 & wait $!; do :; done".to_string(), + ]); + + Ok(DockerBuildResources { + image: base_image, + additional_mounts: mounts, + privileged, + entrypoint_script: entrypoint_script_lines.join("\n").trim().to_string(), + }) + } + + async fn build_resources(&self) -> Result { + if let ConfigStatus::Deserialized(_) = &self.config { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet build resources" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + let dev_container = self.dev_container(); + match dev_container.build_type() { + DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => { + let built_docker_image = self.build_docker_image().await?; + let built_docker_image = self + .update_remote_user_uid(built_docker_image, None) + .await?; + + let resources = self.build_merged_resources(built_docker_image)?; + Ok(DevContainerBuildResources::Docker(resources)) + } + DevContainerBuildType::DockerCompose => { + log::debug!("Using docker compose. Building extended compose files"); + let docker_compose_resources = self.build_and_extend_compose_files().await?; + + return Ok(DevContainerBuildResources::DockerCompose( + docker_compose_resources, + )); + } + DevContainerBuildType::None => { + return Err(DevContainerError::DevContainerParseFailed); + } + } + } + + async fn run_dev_container( + &self, + build_resources: DevContainerBuildResources, + ) -> Result { + let ConfigStatus::VariableParsed(_) = &self.config else { + log::error!( + "Variables have not been parsed; cannot proceed with running the dev container" + ); + return Err(DevContainerError::DevContainerParseFailed); + }; + let running_container = match build_resources { + DevContainerBuildResources::DockerCompose(resources) => { + self.run_docker_compose(resources).await? + } + DevContainerBuildResources::Docker(resources) => { + self.run_docker_image(resources).await? + } + }; + + let remote_user = get_remote_user_from_config(&running_container, self)?; + let remote_workspace_folder = get_remote_dir_from_config( + &running_container, + (&self.local_project_directory.display()).to_string(), + )?; + + let remote_env = self.runtime_remote_env(&running_container.config.env_as_map()?)?; + + Ok(DevContainerUp { + container_id: running_container.id, + remote_user, + remote_workspace_folder, + extension_ids: self.extension_ids(), + remote_env, + }) + } + + async fn docker_compose_manifest(&self) -> Result { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet get docker compose files" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + let Some(docker_compose_files) = dev_container.docker_compose_file.clone() else { + return Err(DevContainerError::DevContainerParseFailed); + }; + let docker_compose_full_paths = docker_compose_files + .iter() + .map(|relative| self.config_directory.join(relative)) + .collect::>(); + + let Some(config) = self + .docker_client + .get_docker_compose_config(&docker_compose_full_paths) + .await? + else { + log::error!("Output could not deserialize into DockerComposeConfig"); + return Err(DevContainerError::DevContainerParseFailed); + }; + Ok(DockerComposeResources { + files: docker_compose_full_paths, + config, + }) + } + + async fn build_and_extend_compose_files( + &self, + ) -> Result { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet build from compose files" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + + let Some(features_build_info) = &self.features_build_info else { + log::error!( + "Cannot build and extend compose files: features build info is not yet constructed" + ); + return Err(DevContainerError::DevContainerParseFailed); + }; + let mut docker_compose_resources = self.docker_compose_manifest().await?; + let supports_buildkit = self.docker_client.supports_compose_buildkit(); + + let (main_service_name, main_service) = + find_primary_service(&docker_compose_resources, self)?; + let built_service_image = if main_service + .build + .as_ref() + .map(|b| b.dockerfile.as_ref()) + .is_some() + { + if !supports_buildkit { + self.build_feature_content_image().await?; + } + + let dockerfile_path = &features_build_info.dockerfile_path; + + let build_args = if !supports_buildkit { + HashMap::from([ + ( + "_DEV_CONTAINERS_BASE_IMAGE".to_string(), + "dev_container_auto_added_stage_label".to_string(), + ), + ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()), + ]) + } else { + HashMap::from([ + ("BUILDKIT_INLINE_CACHE".to_string(), "1".to_string()), + ( + "_DEV_CONTAINERS_BASE_IMAGE".to_string(), + "dev_container_auto_added_stage_label".to_string(), + ), + ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()), + ]) + }; + + let additional_contexts = if !supports_buildkit { + None + } else { + Some(HashMap::from([( + "dev_containers_feature_content_source".to_string(), + features_build_info + .features_content_dir + .display() + .to_string(), + )])) + }; + + let build_override = DockerComposeConfig { + name: None, + services: HashMap::from([( + main_service_name.clone(), + DockerComposeService { + image: Some(features_build_info.image_tag.clone()), + entrypoint: None, + cap_add: None, + security_opt: None, + labels: None, + build: Some(DockerComposeServiceBuild { + context: Some( + features_build_info.empty_context_dir.display().to_string(), + ), + dockerfile: Some(dockerfile_path.display().to_string()), + args: Some(build_args), + additional_contexts, + }), + volumes: Vec::new(), + ..Default::default() + }, + )]), + volumes: HashMap::new(), + }; + + let temp_base = std::env::temp_dir().join("devcontainer-zed"); + let config_location = temp_base.join("docker_compose_build.json"); + + let config_json = serde_json_lenient::to_string(&build_override).map_err(|e| { + log::error!("Error serializing docker compose runtime override: {e}"); + DevContainerError::DevContainerParseFailed + })?; + + self.fs + .write(&config_location, config_json.as_bytes()) + .await + .map_err(|e| { + log::error!("Error writing the runtime override file: {e}"); + DevContainerError::FilesystemError + })?; + + docker_compose_resources.files.push(config_location); + + self.docker_client + .docker_compose_build(&docker_compose_resources.files, &self.project_name()) + .await?; + self.docker_client + .inspect(&features_build_info.image_tag) + .await? + } else if let Some(image) = &main_service.image { + if dev_container + .features + .as_ref() + .is_none_or(|features| features.is_empty()) + { + self.docker_client.inspect(image).await? + } else { + if !supports_buildkit { + self.build_feature_content_image().await?; + } + + let dockerfile_path = &features_build_info.dockerfile_path; + + let build_args = if !supports_buildkit { + HashMap::from([ + ("_DEV_CONTAINERS_BASE_IMAGE".to_string(), image.clone()), + ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()), + ]) + } else { + HashMap::from([ + ("BUILDKIT_INLINE_CACHE".to_string(), "1".to_string()), + ("_DEV_CONTAINERS_BASE_IMAGE".to_string(), image.clone()), + ("_DEV_CONTAINERS_IMAGE_USER".to_string(), "root".to_string()), + ]) + }; + + let additional_contexts = if !supports_buildkit { + None + } else { + Some(HashMap::from([( + "dev_containers_feature_content_source".to_string(), + features_build_info + .features_content_dir + .display() + .to_string(), + )])) + }; + + let build_override = DockerComposeConfig { + name: None, + services: HashMap::from([( + main_service_name.clone(), + DockerComposeService { + image: Some(features_build_info.image_tag.clone()), + entrypoint: None, + cap_add: None, + security_opt: None, + labels: None, + build: Some(DockerComposeServiceBuild { + context: Some( + features_build_info.empty_context_dir.display().to_string(), + ), + dockerfile: Some(dockerfile_path.display().to_string()), + args: Some(build_args), + additional_contexts, + }), + volumes: Vec::new(), + ..Default::default() + }, + )]), + volumes: HashMap::new(), + }; + + let temp_base = std::env::temp_dir().join("devcontainer-zed"); + let config_location = temp_base.join("docker_compose_build.json"); + + let config_json = serde_json_lenient::to_string(&build_override).map_err(|e| { + log::error!("Error serializing docker compose runtime override: {e}"); + DevContainerError::DevContainerParseFailed + })?; + + self.fs + .write(&config_location, config_json.as_bytes()) + .await + .map_err(|e| { + log::error!("Error writing the runtime override file: {e}"); + DevContainerError::FilesystemError + })?; + + docker_compose_resources.files.push(config_location); + + self.docker_client + .docker_compose_build(&docker_compose_resources.files, &self.project_name()) + .await?; + + self.docker_client + .inspect(&features_build_info.image_tag) + .await? + } + } else { + log::error!("Docker compose must have either image or dockerfile defined"); + return Err(DevContainerError::DevContainerParseFailed); + }; + + let built_service_image = self + .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag)) + .await?; + + let resources = self.build_merged_resources(built_service_image)?; + + let network_mode = main_service.network_mode.as_ref(); + let network_mode_service = network_mode.and_then(|mode| mode.strip_prefix("service:")); + let runtime_override_file = self + .write_runtime_override_file(&main_service_name, network_mode_service, resources) + .await?; + + docker_compose_resources.files.push(runtime_override_file); + + Ok(docker_compose_resources) + } + + async fn write_runtime_override_file( + &self, + main_service_name: &str, + network_mode_service: Option<&str>, + resources: DockerBuildResources, + ) -> Result { + let config = + self.build_runtime_override(main_service_name, network_mode_service, resources)?; + let temp_base = std::env::temp_dir().join("devcontainer-zed"); + let config_location = temp_base.join("docker_compose_runtime.json"); + + let config_json = serde_json_lenient::to_string(&config).map_err(|e| { + log::error!("Error serializing docker compose runtime override: {e}"); + DevContainerError::DevContainerParseFailed + })?; + + self.fs + .write(&config_location, config_json.as_bytes()) + .await + .map_err(|e| { + log::error!("Error writing the runtime override file: {e}"); + DevContainerError::FilesystemError + })?; + + Ok(config_location) + } + + fn build_runtime_override( + &self, + main_service_name: &str, + network_mode_service: Option<&str>, + resources: DockerBuildResources, + ) -> Result { + let mut runtime_labels = vec![]; + + if let Some(metadata) = &resources.image.config.labels.metadata { + let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| { + log::error!("Error serializing docker image metadata: {e}"); + DevContainerError::ContainerNotValid(resources.image.id.clone()) + })?; + + runtime_labels.push(format!( + "{}={}", + "devcontainer.metadata", serialized_metadata + )); + } + + for (k, v) in self.identifying_labels() { + runtime_labels.push(format!("{}={}", k, v)); + } + + let config_volumes: HashMap = resources + .additional_mounts + .iter() + .filter_map(|mount| { + if let Some(mount_type) = &mount.mount_type + && mount_type.to_lowercase() == "volume" + { + Some(( + mount.source.clone(), + DockerComposeVolume { + name: mount.source.clone(), + }, + )) + } else { + None + } + }) + .collect(); + + let volumes: Vec = resources + .additional_mounts + .iter() + .map(|v| MountDefinition { + source: v.source.clone(), + target: v.target.clone(), + mount_type: v.mount_type.clone(), + }) + .collect(); + + let mut main_service = DockerComposeService { + entrypoint: Some(vec![ + "/bin/sh".to_string(), + "-c".to_string(), + resources.entrypoint_script, + "-".to_string(), + ]), + cap_add: Some(vec!["SYS_PTRACE".to_string()]), + security_opt: Some(vec!["seccomp=unconfined".to_string()]), + labels: Some(runtime_labels), + volumes, + privileged: Some(resources.privileged), + ..Default::default() + }; + // let mut extra_service_port_declarations: Vec<(String, DockerComposeService)> = Vec::new(); + let mut service_declarations: HashMap = HashMap::new(); + if let Some(forward_ports) = &self.dev_container().forward_ports { + let main_service_ports: Vec = forward_ports + .iter() + .filter_map(|f| match f { + ForwardPort::Number(port) => Some(port.to_string()), + ForwardPort::String(port) => { + let parts: Vec<&str> = port.split(":").collect(); + if parts.len() <= 1 { + Some(port.to_string()) + } else if parts.len() == 2 { + if parts[0] == main_service_name { + Some(parts[1].to_string()) + } else { + None + } + } else { + None + } + } + }) + .collect(); + for port in main_service_ports { + // If the main service uses a different service's network bridge, append to that service's ports instead + if let Some(network_service_name) = network_mode_service { + if let Some(service) = service_declarations.get_mut(network_service_name) { + service.ports.push(format!("{port}:{port}")); + } else { + service_declarations.insert( + network_service_name.to_string(), + DockerComposeService { + ports: vec![format!("{port}:{port}")], + ..Default::default() + }, + ); + } + } else { + main_service.ports.push(format!("{port}:{port}")); + } + } + let other_service_ports: Vec<(&str, &str)> = forward_ports + .iter() + .filter_map(|f| match f { + ForwardPort::Number(_) => None, + ForwardPort::String(port) => { + let parts: Vec<&str> = port.split(":").collect(); + if parts.len() != 2 { + None + } else { + if parts[0] == main_service_name { + None + } else { + Some((parts[0], parts[1])) + } + } + } + }) + .collect(); + for (service_name, port) in other_service_ports { + if let Some(service) = service_declarations.get_mut(service_name) { + service.ports.push(format!("{port}:{port}")); + } else { + service_declarations.insert( + service_name.to_string(), + DockerComposeService { + ports: vec![format!("{port}:{port}")], + ..Default::default() + }, + ); + } + } + } + if let Some(port) = &self.dev_container().app_port { + if let Some(network_service_name) = network_mode_service { + if let Some(service) = service_declarations.get_mut(network_service_name) { + service.ports.push(format!("{port}:{port}")); + } else { + service_declarations.insert( + network_service_name.to_string(), + DockerComposeService { + ports: vec![format!("{port}:{port}")], + ..Default::default() + }, + ); + } + } else { + main_service.ports.push(format!("{port}:{port}")); + } + } + + service_declarations.insert(main_service_name.to_string(), main_service); + let new_docker_compose_config = DockerComposeConfig { + name: None, + services: service_declarations, + volumes: config_volumes, + }; + + Ok(new_docker_compose_config) + } + + async fn build_docker_image(&self) -> Result { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet build image" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + + match dev_container.build_type() { + DevContainerBuildType::Image => { + let Some(image_tag) = &dev_container.image else { + return Err(DevContainerError::DevContainerParseFailed); + }; + let base_image = self.docker_client.inspect(image_tag).await?; + if dev_container + .features + .as_ref() + .is_none_or(|features| features.is_empty()) + { + log::debug!("No features to add. Using base image"); + return Ok(base_image); + } + } + DevContainerBuildType::Dockerfile => {} + DevContainerBuildType::DockerCompose | DevContainerBuildType::None => { + return Err(DevContainerError::DevContainerParseFailed); + } + }; + + let mut command = self.create_docker_build()?; + + let output = self + .command_runner + .run_command(&mut command) + .await + .map_err(|e| { + log::error!("Error building docker image: {e}"); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("docker buildx build failed: {stderr}"); + return Err(DevContainerError::CommandFailed( + command.get_program().display().to_string(), + )); + } + + // After a successful build, inspect the newly tagged image to get its metadata + let Some(features_build_info) = &self.features_build_info else { + log::error!("Features build info expected, but not created"); + return Err(DevContainerError::DevContainerParseFailed); + }; + let image = self + .docker_client + .inspect(&features_build_info.image_tag) + .await?; + + Ok(image) + } + + #[cfg(target_os = "windows")] + async fn update_remote_user_uid( + &self, + image: DockerInspect, + _override_tag: Option<&str>, + ) -> Result { + Ok(image) + } + #[cfg(not(target_os = "windows"))] + async fn update_remote_user_uid( + &self, + image: DockerInspect, + override_tag: Option<&str>, + ) -> Result { + let dev_container = self.dev_container(); + + let Some(features_build_info) = &self.features_build_info else { + return Ok(image); + }; + + // updateRemoteUserUID defaults to true per the devcontainers spec + if dev_container.update_remote_user_uid == Some(false) { + return Ok(image); + } + + let remote_user = get_remote_user_from_config(&image, self)?; + if remote_user == "root" || remote_user.chars().all(|c| c.is_ascii_digit()) { + return Ok(image); + } + + let image_user = image + .config + .image_user + .as_deref() + .unwrap_or("root") + .to_string(); + + let host_uid = Command::new("id") + .arg("-u") + .output() + .await + .map_err(|e| { + log::error!("Failed to get host UID: {e}"); + DevContainerError::CommandFailed("id -u".to_string()) + }) + .and_then(|output| { + String::from_utf8_lossy(&output.stdout) + .trim() + .parse::() + .map_err(|e| { + log::error!("Failed to parse host UID: {e}"); + DevContainerError::CommandFailed("id -u".to_string()) + }) + })?; + + let host_gid = Command::new("id") + .arg("-g") + .output() + .await + .map_err(|e| { + log::error!("Failed to get host GID: {e}"); + DevContainerError::CommandFailed("id -g".to_string()) + }) + .and_then(|output| { + String::from_utf8_lossy(&output.stdout) + .trim() + .parse::() + .map_err(|e| { + log::error!("Failed to parse host GID: {e}"); + DevContainerError::CommandFailed("id -g".to_string()) + }) + })?; + + let dockerfile_content = self.generate_update_uid_dockerfile(); + + let dockerfile_path = features_build_info + .features_content_dir + .join("updateUID.Dockerfile"); + self.fs + .write(&dockerfile_path, dockerfile_content.as_bytes()) + .await + .map_err(|e| { + log::error!("Failed to write updateUID Dockerfile: {e}"); + DevContainerError::FilesystemError + })?; + + let updated_image_tag = override_tag + .map(|t| t.to_string()) + .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag)); + + let mut command = Command::new(self.docker_client.docker_cli()); + command.args(["build"]); + command.args(["-f", &dockerfile_path.display().to_string()]); + command.args(["-t", &updated_image_tag]); + command.args([ + "--build-arg", + &format!("BASE_IMAGE={}", features_build_info.image_tag), + ]); + command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]); + command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]); + command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]); + command.args(["--build-arg", &format!("IMAGE_USER={}", image_user)]); + command.arg(features_build_info.empty_context_dir.display().to_string()); + + let output = self + .command_runner + .run_command(&mut command) + .await + .map_err(|e| { + log::error!("Error building UID update image: {e}"); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("UID update build failed: {stderr}"); + return Err(DevContainerError::CommandFailed( + command.get_program().display().to_string(), + )); + } + + self.docker_client.inspect(&updated_image_tag).await + } + + #[cfg(not(target_os = "windows"))] + fn generate_update_uid_dockerfile(&self) -> String { + let mut dockerfile = r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true +"#.to_string(); + for feature in &self.features { + let container_env_layer = feature.generate_dockerfile_env(); + dockerfile = format!("{dockerfile}\n{container_env_layer}"); + } + + if let Some(env) = &self.dev_container().container_env { + for (key, value) in env { + dockerfile = format!("{dockerfile}ENV {key}={value}\n"); + } + } + dockerfile + } + + async fn build_feature_content_image(&self) -> Result<(), DevContainerError> { + let Some(features_build_info) = &self.features_build_info else { + log::error!("Features build info not available for building feature content image"); + return Err(DevContainerError::DevContainerParseFailed); + }; + let features_content_dir = &features_build_info.features_content_dir; + + let dockerfile_content = "FROM scratch\nCOPY . /tmp/build-features/\n"; + let dockerfile_path = features_content_dir.join("Dockerfile.feature-content"); + + self.fs + .write(&dockerfile_path, dockerfile_content.as_bytes()) + .await + .map_err(|e| { + log::error!("Failed to write feature content Dockerfile: {e}"); + DevContainerError::FilesystemError + })?; + + let mut command = Command::new(self.docker_client.docker_cli()); + command.args([ + "build", + "-t", + "dev_container_feature_content_temp", + "-f", + &dockerfile_path.display().to_string(), + &features_content_dir.display().to_string(), + ]); + + let output = self + .command_runner + .run_command(&mut command) + .await + .map_err(|e| { + log::error!("Error building feature content image: {e}"); + DevContainerError::CommandFailed(self.docker_client.docker_cli()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("Feature content image build failed: {stderr}"); + return Err(DevContainerError::CommandFailed( + self.docker_client.docker_cli(), + )); + } + + Ok(()) + } + + fn create_docker_build(&self) -> Result { + let dev_container = match &self.config { + ConfigStatus::Deserialized(_) => { + log::error!( + "Dev container has not yet been parsed for variable expansion. Cannot yet proceed with docker build" + ); + return Err(DevContainerError::DevContainerParseFailed); + } + ConfigStatus::VariableParsed(dev_container) => dev_container, + }; + + let Some(features_build_info) = &self.features_build_info else { + log::error!( + "Cannot create docker build command; features build info has not been constructed" + ); + return Err(DevContainerError::DevContainerParseFailed); + }; + let mut command = Command::new(self.docker_client.docker_cli()); + + command.args(["buildx", "build"]); + + // --load is short for --output=docker, loading the built image into the local docker images + command.arg("--load"); + + // BuildKit build context: provides the features content directory as a named context + // that the Dockerfile.extended can COPY from via `--from=dev_containers_feature_content_source` + command.args([ + "--build-context", + &format!( + "dev_containers_feature_content_source={}", + features_build_info.features_content_dir.display() + ), + ]); + + // Build args matching the CLI reference implementation's `getFeaturesBuildOptions` + if let Some(build_image) = &features_build_info.build_image { + command.args([ + "--build-arg", + &format!("_DEV_CONTAINERS_BASE_IMAGE={}", build_image), + ]); + } else { + command.args([ + "--build-arg", + "_DEV_CONTAINERS_BASE_IMAGE=dev_container_auto_added_stage_label", + ]); + } + + command.args([ + "--build-arg", + &format!( + "_DEV_CONTAINERS_IMAGE_USER={}", + self.root_image + .as_ref() + .and_then(|docker_image| docker_image.config.image_user.as_ref()) + .unwrap_or(&"root".to_string()) + ), + ]); + + command.args([ + "--build-arg", + "_DEV_CONTAINERS_FEATURE_CONTENT_SOURCE=dev_container_feature_content_temp", + ]); + + if let Some(args) = dev_container.build.as_ref().and_then(|b| b.args.as_ref()) { + for (key, value) in args { + command.args(["--build-arg", &format!("{}={}", key, value)]); + } + } + + command.args(["--target", "dev_containers_target_stage"]); + + command.args([ + "-f", + &features_build_info.dockerfile_path.display().to_string(), + ]); + + command.args(["-t", &features_build_info.image_tag]); + + if dev_container.build_type() == DevContainerBuildType::Dockerfile { + command.arg(self.config_directory.display().to_string()); + } else { + // Use an empty folder as the build context to avoid pulling in unneeded files. + // The actual feature content is supplied via the BuildKit build context above. + command.arg(features_build_info.empty_context_dir.display().to_string()); + } + + Ok(command) + } + + async fn run_docker_compose( + &self, + resources: DockerComposeResources, + ) -> Result { + let mut command = Command::new(self.docker_client.docker_cli()); + command.args(&["compose", "--project-name", &self.project_name()]); + for docker_compose_file in resources.files { + command.args(&["-f", &docker_compose_file.display().to_string()]); + } + command.args(&["up", "-d"]); + + let output = self + .command_runner + .run_command(&mut command) + .await + .map_err(|e| { + log::error!("Error running docker compose up: {e}"); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("Non-success status from docker compose up: {}", stderr); + return Err(DevContainerError::CommandFailed( + command.get_program().display().to_string(), + )); + } + + if let Some(docker_ps) = self.check_for_existing_container().await? { + log::debug!("Found newly created dev container"); + return self.docker_client.inspect(&docker_ps.id).await; + } + + log::error!("Could not find existing container after docker compose up"); + + Err(DevContainerError::DevContainerParseFailed) + } + + async fn run_docker_image( + &self, + build_resources: DockerBuildResources, + ) -> Result { + let mut docker_run_command = self.create_docker_run_command(build_resources)?; + + let output = self + .command_runner + .run_command(&mut docker_run_command) + .await + .map_err(|e| { + log::error!("Error running docker run: {e}"); + DevContainerError::CommandFailed( + docker_run_command.get_program().display().to_string(), + ) + })?; + + if !output.status.success() { + let std_err = String::from_utf8_lossy(&output.stderr); + log::error!("Non-success status from docker run. StdErr: {std_err}"); + return Err(DevContainerError::CommandFailed( + docker_run_command.get_program().display().to_string(), + )); + } + + log::debug!("Checking for container that was started"); + let Some(docker_ps) = self.check_for_existing_container().await? else { + log::error!("Could not locate container just created"); + return Err(DevContainerError::DevContainerParseFailed); + }; + self.docker_client.inspect(&docker_ps.id).await + } + + fn local_workspace_folder(&self) -> String { + self.local_project_directory.display().to_string() + } + fn local_workspace_base_name(&self) -> Result { + self.local_project_directory + .file_name() + .map(|f| f.display().to_string()) + .ok_or(DevContainerError::DevContainerParseFailed) + } + + fn remote_workspace_folder(&self) -> Result { + self.dev_container() + .workspace_folder + .as_ref() + .map(|folder| PathBuf::from(folder)) + .or(Some( + PathBuf::from(DEFAULT_REMOTE_PROJECT_DIR).join(self.local_workspace_base_name()?), + )) + .ok_or(DevContainerError::DevContainerParseFailed) + } + fn remote_workspace_base_name(&self) -> Result { + self.remote_workspace_folder().and_then(|f| { + f.file_name() + .map(|file_name| file_name.display().to_string()) + .ok_or(DevContainerError::DevContainerParseFailed) + }) + } + + fn remote_workspace_mount(&self) -> Result { + if let Some(mount) = &self.dev_container().workspace_mount { + return Ok(mount.clone()); + } + let Some(project_directory_name) = self.local_project_directory.file_name() else { + return Err(DevContainerError::DevContainerParseFailed); + }; + + Ok(MountDefinition { + source: self.local_workspace_folder(), + target: format!("/workspaces/{}", project_directory_name.display()), + mount_type: None, + }) + } + + fn create_docker_run_command( + &self, + build_resources: DockerBuildResources, + ) -> Result { + let remote_workspace_mount = self.remote_workspace_mount()?; + + let docker_cli = self.docker_client.docker_cli(); + let mut command = Command::new(&docker_cli); + + command.arg("run"); + + if build_resources.privileged { + command.arg("--privileged"); + } + + if &docker_cli == "podman" { + command.args(&["--security-opt", "label=disable", "--userns=keep-id"]); + } + + command.arg("--sig-proxy=false"); + command.arg("-d"); + command.arg("--mount"); + command.arg(remote_workspace_mount.to_string()); + + for mount in &build_resources.additional_mounts { + command.arg("--mount"); + command.arg(mount.to_string()); + } + + for (key, val) in self.identifying_labels() { + command.arg("-l"); + command.arg(format!("{}={}", key, val)); + } + + if let Some(metadata) = &build_resources.image.config.labels.metadata { + let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| { + log::error!("Problem serializing image metadata: {e}"); + DevContainerError::ContainerNotValid(build_resources.image.id.clone()) + })?; + command.arg("-l"); + command.arg(format!( + "{}={}", + "devcontainer.metadata", serialized_metadata + )); + } + + if let Some(forward_ports) = &self.dev_container().forward_ports { + for port in forward_ports { + if let ForwardPort::Number(port_number) = port { + command.arg("-p"); + command.arg(format!("{port_number}:{port_number}")); + } + } + } + if let Some(app_port) = &self.dev_container().app_port { + command.arg("-p"); + command.arg(format!("{app_port}:{app_port}")); + } + + command.arg("--entrypoint"); + command.arg("/bin/sh"); + command.arg(&build_resources.image.id); + command.arg("-c"); + + command.arg(build_resources.entrypoint_script); + command.arg("-"); + + Ok(command) + } + + fn extension_ids(&self) -> Vec { + self.dev_container() + .customizations + .as_ref() + .map(|c| c.zed.extensions.clone()) + .unwrap_or_default() + } + + async fn build_and_run(&mut self) -> Result { + self.run_initialize_commands().await?; + + self.download_feature_and_dockerfile_resources().await?; + + let build_resources = self.build_resources().await?; + + let devcontainer_up = self.run_dev_container(build_resources).await?; + + self.run_remote_scripts(&devcontainer_up, true).await?; + + Ok(devcontainer_up) + } + + async fn run_remote_scripts( + &self, + devcontainer_up: &DevContainerUp, + new_container: bool, + ) -> Result<(), DevContainerError> { + let ConfigStatus::VariableParsed(config) = &self.config else { + log::error!("Config not yet parsed, cannot proceed with remote scripts"); + return Err(DevContainerError::DevContainerScriptsFailed); + }; + let remote_folder = self.remote_workspace_folder()?.display().to_string(); + + if new_container { + if let Some(on_create_command) = &config.on_create_command { + for (command_name, command) in on_create_command.script_commands() { + log::debug!("Running on create command {command_name}"); + self.docker_client + .run_docker_exec( + &devcontainer_up.container_id, + &remote_folder, + "root", + &devcontainer_up.remote_env, + command, + ) + .await?; + } + } + if let Some(update_content_command) = &config.update_content_command { + for (command_name, command) in update_content_command.script_commands() { + log::debug!("Running update content command {command_name}"); + self.docker_client + .run_docker_exec( + &devcontainer_up.container_id, + &remote_folder, + "root", + &devcontainer_up.remote_env, + command, + ) + .await?; + } + } + + if let Some(post_create_command) = &config.post_create_command { + for (command_name, command) in post_create_command.script_commands() { + log::debug!("Running post create command {command_name}"); + self.docker_client + .run_docker_exec( + &devcontainer_up.container_id, + &remote_folder, + &devcontainer_up.remote_user, + &devcontainer_up.remote_env, + command, + ) + .await?; + } + } + if let Some(post_start_command) = &config.post_start_command { + for (command_name, command) in post_start_command.script_commands() { + log::debug!("Running post start command {command_name}"); + self.docker_client + .run_docker_exec( + &devcontainer_up.container_id, + &remote_folder, + &devcontainer_up.remote_user, + &devcontainer_up.remote_env, + command, + ) + .await?; + } + } + } + if let Some(post_attach_command) = &config.post_attach_command { + for (command_name, command) in post_attach_command.script_commands() { + log::debug!("Running post attach command {command_name}"); + self.docker_client + .run_docker_exec( + &devcontainer_up.container_id, + &remote_folder, + &devcontainer_up.remote_user, + &devcontainer_up.remote_env, + command, + ) + .await?; + } + } + + Ok(()) + } + + async fn run_initialize_commands(&self) -> Result<(), DevContainerError> { + let ConfigStatus::VariableParsed(config) = &self.config else { + log::error!("Config not yet parsed, cannot proceed with initializeCommand"); + return Err(DevContainerError::DevContainerParseFailed); + }; + + if let Some(initialize_command) = &config.initialize_command { + log::debug!("Running initialize command"); + initialize_command + .run(&self.command_runner, &self.local_project_directory) + .await + } else { + log::warn!("No initialize command found"); + Ok(()) + } + } + + async fn check_for_existing_devcontainer( + &self, + ) -> Result, DevContainerError> { + if let Some(docker_ps) = self.check_for_existing_container().await? { + log::debug!("Dev container already found. Proceeding with it"); + + let docker_inspect = self.docker_client.inspect(&docker_ps.id).await?; + + if !docker_inspect.is_running() { + log::debug!("Container not running. Will attempt to start, and then proceed"); + self.docker_client.start_container(&docker_ps.id).await?; + } + + let remote_user = get_remote_user_from_config(&docker_inspect, self)?; + + let remote_folder = get_remote_dir_from_config( + &docker_inspect, + (&self.local_project_directory.display()).to_string(), + )?; + + let remote_env = self.runtime_remote_env(&docker_inspect.config.env_as_map()?)?; + + let dev_container_up = DevContainerUp { + container_id: docker_ps.id, + remote_user: remote_user, + remote_workspace_folder: remote_folder, + extension_ids: self.extension_ids(), + remote_env, + }; + + self.run_remote_scripts(&dev_container_up, false).await?; + + Ok(Some(dev_container_up)) + } else { + log::debug!("Existing container not found."); + + Ok(None) + } + } + + async fn check_for_existing_container(&self) -> Result, DevContainerError> { + self.docker_client + .find_process_by_filters( + self.identifying_labels() + .iter() + .map(|(k, v)| format!("label={k}={v}")) + .collect(), + ) + .await + } + + fn project_name(&self) -> String { + if let Some(name) = &self.dev_container().name { + safe_id_lower(name) + } else { + let alternate_name = &self + .local_workspace_base_name() + .unwrap_or(self.local_workspace_folder()); + safe_id_lower(alternate_name) + } + } +} + +/// Holds all the information needed to construct a `docker buildx build` command +/// that extends a base image with dev container features. +/// +/// This mirrors the `ImageBuildOptions` interface in the CLI reference implementation +/// (cli/src/spec-node/containerFeatures.ts). +#[derive(Debug, Eq, PartialEq)] +pub(crate) struct FeaturesBuildInfo { + /// Path to the generated Dockerfile.extended + pub dockerfile_path: PathBuf, + /// Path to the features content directory (used as a BuildKit build context) + pub features_content_dir: PathBuf, + /// Path to an empty directory used as the Docker build context + pub empty_context_dir: PathBuf, + /// The base image name (e.g. "mcr.microsoft.com/devcontainers/rust:2-1-bookworm") + pub build_image: Option, + /// The tag to apply to the built image (e.g. "vsc-myproject-features") + pub image_tag: String, +} + +pub(crate) async fn read_devcontainer_configuration( + config: DevContainerConfig, + context: &DevContainerContext, + environment: HashMap, +) -> Result { + let docker = if context.use_podman { + Docker::new("podman") + } else { + Docker::new("docker") + }; + let mut dev_container = DevContainerManifest::new( + context, + environment, + Arc::new(docker), + Arc::new(DefaultCommandRunner::new()), + config, + &context.project_directory.as_ref(), + ) + .await?; + dev_container.parse_nonremote_vars()?; + Ok(dev_container.dev_container().clone()) +} + +pub(crate) async fn spawn_dev_container( + context: &DevContainerContext, + environment: HashMap, + config: DevContainerConfig, + local_project_path: &Path, +) -> Result { + let docker = if context.use_podman { + Docker::new("podman") + } else { + Docker::new("docker") + }; + let mut devcontainer_manifest = DevContainerManifest::new( + context, + environment, + Arc::new(docker), + Arc::new(DefaultCommandRunner::new()), + config, + local_project_path, + ) + .await?; + + devcontainer_manifest.parse_nonremote_vars()?; + + log::debug!("Checking for existing container"); + if let Some(devcontainer) = devcontainer_manifest + .check_for_existing_devcontainer() + .await? + { + Ok(devcontainer) + } else { + log::debug!("Existing container not found. Building"); + + devcontainer_manifest.build_and_run().await + } +} + +#[derive(Debug)] +struct DockerBuildResources { + image: DockerInspect, + additional_mounts: Vec, + privileged: bool, + entrypoint_script: String, +} + +#[derive(Debug)] +enum DevContainerBuildResources { + DockerCompose(DockerComposeResources), + Docker(DockerBuildResources), +} + +fn find_primary_service( + docker_compose: &DockerComposeResources, + devcontainer: &DevContainerManifest, +) -> Result<(String, DockerComposeService), DevContainerError> { + let Some(service_name) = &devcontainer.dev_container().service else { + return Err(DevContainerError::DevContainerParseFailed); + }; + + match docker_compose.config.services.get(service_name) { + Some(service) => Ok((service_name.clone(), service.clone())), + None => Err(DevContainerError::DevContainerParseFailed), + } +} + +/// Destination folder inside the container where feature content is staged during build. +/// Mirrors the CLI's `FEATURES_CONTAINER_TEMP_DEST_FOLDER`. +const FEATURES_CONTAINER_TEMP_DEST_FOLDER: &str = "/tmp/dev-container-features"; + +/// Escapes regex special characters in a string. +fn escape_regex_chars(input: &str) -> String { + let mut result = String::with_capacity(input.len() * 2); + for c in input.chars() { + if ".*+?^${}()|[]\\".contains(c) { + result.push('\\'); + } + result.push(c); + } + result +} + +/// Extracts the short feature ID from a full feature reference string. +/// +/// Examples: +/// - `ghcr.io/devcontainers/features/aws-cli:1` → `aws-cli` +/// - `ghcr.io/user/repo/go` → `go` +/// - `ghcr.io/devcontainers/features/rust@sha256:abc` → `rust` +/// - `./myFeature` → `myFeature` +fn extract_feature_id(feature_ref: &str) -> &str { + let without_version = if let Some(at_idx) = feature_ref.rfind('@') { + &feature_ref[..at_idx] + } else { + let last_slash = feature_ref.rfind('/'); + let last_colon = feature_ref.rfind(':'); + match (last_slash, last_colon) { + (Some(slash), Some(colon)) if colon > slash => &feature_ref[..colon], + _ => feature_ref, + } + }; + match without_version.rfind('/') { + Some(idx) => &without_version[idx + 1..], + None => without_version, + } +} + +/// Generates a shell command that looks up a user's passwd entry. +/// +/// Mirrors the CLI's `getEntPasswdShellCommand` in `commonUtils.ts`. +/// Tries `getent passwd` first, then falls back to grepping `/etc/passwd`. +fn get_ent_passwd_shell_command(user: &str) -> String { + let escaped_for_shell = user.replace('\\', "\\\\").replace('\'', "\\'"); + let escaped_for_regex = escape_regex_chars(user).replace('\'', "\\'"); + format!( + " (command -v getent >/dev/null 2>&1 && getent passwd '{shell}' || grep -E '^{re}|^[^:]*:[^:]*:{re}:' /etc/passwd || true)", + shell = escaped_for_shell, + re = escaped_for_regex, + ) +} + +/// Determines feature installation order, respecting `overrideFeatureInstallOrder`. +/// +/// Features listed in the override come first (in the specified order), followed +/// by any remaining features sorted lexicographically by their full reference ID. +fn resolve_feature_order<'a>( + features: &'a HashMap, + override_order: &Option>, +) -> Vec<(&'a String, &'a FeatureOptions)> { + if let Some(order) = override_order { + let mut ordered: Vec<(&'a String, &'a FeatureOptions)> = Vec::new(); + for ordered_id in order { + if let Some((key, options)) = features.get_key_value(ordered_id) { + ordered.push((key, options)); + } + } + let mut remaining: Vec<_> = features + .iter() + .filter(|(id, _)| !order.iter().any(|o| o == *id)) + .collect(); + remaining.sort_by_key(|(id, _)| id.as_str()); + ordered.extend(remaining); + ordered + } else { + let mut entries: Vec<_> = features.iter().collect(); + entries.sort_by_key(|(id, _)| id.as_str()); + entries + } +} + +/// Generates the `devcontainer-features-install.sh` wrapper script for one feature. +/// +/// Mirrors the CLI's `getFeatureInstallWrapperScript` in +/// `containerFeaturesConfiguration.ts`. +fn generate_install_wrapper( + feature_ref: &str, + feature_id: &str, + env_variables: &str, +) -> Result { + let escaped_id = shlex::try_quote(feature_ref).map_err(|e| { + log::error!("Error escaping feature ref {feature_ref}: {e}"); + DevContainerError::DevContainerParseFailed + })?; + let escaped_name = shlex::try_quote(feature_id).map_err(|e| { + log::error!("Error escaping feature {feature_id}: {e}"); + DevContainerError::DevContainerParseFailed + })?; + let options_indented: String = env_variables + .lines() + .filter(|l| !l.is_empty()) + .map(|l| format!(" {}", l)) + .collect::>() + .join("\n"); + let escaped_options = shlex::try_quote(&options_indented).map_err(|e| { + log::error!("Error escaping options {options_indented}: {e}"); + DevContainerError::DevContainerParseFailed + })?; + + let script = format!( + r#"#!/bin/sh +set -e + +on_exit () {{ + [ $? -eq 0 ] && exit + echo 'ERROR: Feature "{escaped_name}" ({escaped_id}) failed to install!' +}} + +trap on_exit EXIT + +echo =========================================================================== +echo 'Feature : {escaped_name}' +echo 'Id : {escaped_id}' +echo 'Options :' +echo {escaped_options} +echo =========================================================================== + +set -a +. ../devcontainer-features.builtin.env +. ./devcontainer-features.env +set +a + +chmod +x ./install.sh +./install.sh +"# + ); + + Ok(script) +} + +// Dockerfile actions need to be moved to their own file +fn dockerfile_alias(dockerfile_content: &str) -> Option { + dockerfile_content + .lines() + .find(|line| line.starts_with("FROM")) + .and_then(|line| { + let words: Vec<&str> = line.split(" ").collect(); + if words.len() > 2 && words[words.len() - 2].to_lowercase() == "as" { + return Some(words[words.len() - 1].to_string()); + } else { + return None; + } + }) +} + +fn dockerfile_inject_alias(dockerfile_content: &str, alias: &str) -> String { + if dockerfile_alias(dockerfile_content).is_some() { + dockerfile_content.to_string() + } else { + dockerfile_content + .lines() + .map(|line| { + if line.starts_with("FROM") { + format!("{} AS {}", line, alias) + } else { + line.to_string() + } + }) + .collect::>() + .join("\n") + } +} + +fn image_from_dockerfile( + devcontainer: &DevContainerManifest, + dockerfile_contents: String, +) -> Result { + let mut raw_contents = dockerfile_contents + .lines() + .find(|line| line.starts_with("FROM")) + .and_then(|from_line| { + from_line + .split(' ') + .collect::>() + .get(1) + .map(|s| s.to_string()) + }) + .ok_or_else(|| { + log::error!("Could not find an image definition in dockerfile"); + DevContainerError::DevContainerParseFailed + })?; + + for (k, v) in devcontainer + .dev_container() + .build + .as_ref() + .and_then(|b| b.args.as_ref()) + .unwrap_or(&HashMap::new()) + { + raw_contents = raw_contents.replace(&format!("${{{}}}", k), v); + } + Ok(raw_contents) +} + +// Container user things +// This should come from spec - see the docs +fn get_remote_user_from_config( + docker_config: &DockerInspect, + devcontainer: &DevContainerManifest, +) -> Result { + if let DevContainer { + remote_user: Some(user), + .. + } = &devcontainer.dev_container() + { + return Ok(user.clone()); + } + let Some(metadata) = &docker_config.config.labels.metadata else { + log::error!("Could not locate metadata"); + return Err(DevContainerError::ContainerNotValid( + docker_config.id.clone(), + )); + }; + for metadatum in metadata { + if let Some(remote_user) = metadatum.get("remoteUser") { + if let Some(remote_user_str) = remote_user.as_str() { + return Ok(remote_user_str.to_string()); + } + } + } + log::error!("Could not locate the remote user"); + Err(DevContainerError::ContainerNotValid( + docker_config.id.clone(), + )) +} + +// This should come from spec - see the docs +fn get_container_user_from_config( + docker_config: &DockerInspect, + devcontainer: &DevContainerManifest, +) -> Result { + if let Some(user) = &devcontainer.dev_container().container_user { + return Ok(user.to_string()); + } + if let Some(metadata) = &docker_config.config.labels.metadata { + for metadatum in metadata { + if let Some(container_user) = metadatum.get("containerUser") { + if let Some(container_user_str) = container_user.as_str() { + return Ok(container_user_str.to_string()); + } + } + } + } + if let Some(image_user) = &docker_config.config.image_user { + return Ok(image_user.to_string()); + } + + Err(DevContainerError::DevContainerParseFailed) +} + +#[cfg(test)] +mod test { + use std::{ + collections::HashMap, + ffi::OsStr, + path::PathBuf, + process::{ExitStatus, Output}, + sync::{Arc, Mutex}, + }; + + use async_trait::async_trait; + use fs::{FakeFs, Fs}; + use gpui::{AppContext, TestAppContext}; + use http_client::{AsyncBody, FakeHttpClient, HttpClient}; + use project::{ + ProjectEnvironment, + worktree_store::{WorktreeIdCounter, WorktreeStore}, + }; + use serde_json_lenient::Value; + use util::{command::Command, paths::SanitizedPath}; + + use crate::{ + DevContainerConfig, DevContainerContext, + command_json::CommandRunner, + devcontainer_api::DevContainerError, + devcontainer_json::MountDefinition, + devcontainer_manifest::{ + ConfigStatus, DevContainerManifest, DockerBuildResources, DockerComposeResources, + DockerInspect, extract_feature_id, find_primary_service, get_remote_user_from_config, + }, + docker::{ + DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild, + DockerComposeVolume, DockerConfigLabels, DockerInspectConfig, DockerInspectMount, + DockerPs, + }, + oci::TokenResponse, + }; + const TEST_PROJECT_PATH: &str = "/path/to/local/project"; + + async fn build_tarball(content: Vec<(&str, &str)>) -> Vec { + let buffer = futures::io::Cursor::new(Vec::new()); + let mut builder = async_tar::Builder::new(buffer); + for (file_name, content) in content { + if content.is_empty() { + let mut header = async_tar::Header::new_gnu(); + header.set_size(0); + header.set_mode(0o755); + header.set_entry_type(async_tar::EntryType::Directory); + header.set_cksum(); + builder + .append_data(&mut header, file_name, &[] as &[u8]) + .await + .unwrap(); + } else { + let data = content.as_bytes(); + let mut header = async_tar::Header::new_gnu(); + header.set_size(data.len() as u64); + header.set_mode(0o755); + header.set_entry_type(async_tar::EntryType::Regular); + header.set_cksum(); + builder + .append_data(&mut header, file_name, data) + .await + .unwrap(); + } + } + let buffer = builder.into_inner().await.unwrap(); + buffer.into_inner() + } + + fn test_project_filename() -> String { + PathBuf::from(TEST_PROJECT_PATH) + .file_name() + .expect("is valid") + .display() + .to_string() + } + + async fn init_devcontainer_config( + fs: &Arc, + devcontainer_contents: &str, + ) -> DevContainerConfig { + fs.insert_tree( + format!("{TEST_PROJECT_PATH}/.devcontainer"), + serde_json::json!({"devcontainer.json": devcontainer_contents}), + ) + .await; + + DevContainerConfig::default_config() + } + + struct TestDependencies { + fs: Arc, + _http_client: Arc, + docker: Arc, + command_runner: Arc, + } + + async fn init_default_devcontainer_manifest( + cx: &mut TestAppContext, + devcontainer_contents: &str, + ) -> Result<(TestDependencies, DevContainerManifest), DevContainerError> { + let fs = FakeFs::new(cx.executor()); + let http_client = fake_http_client(); + let command_runner = Arc::new(TestCommandRunner::new()); + let docker = Arc::new(FakeDocker::new()); + let environment = HashMap::new(); + + init_devcontainer_manifest( + cx, + fs, + http_client, + docker, + command_runner, + environment, + devcontainer_contents, + ) + .await + } + + async fn init_devcontainer_manifest( + cx: &mut TestAppContext, + fs: Arc, + http_client: Arc, + docker_client: Arc, + command_runner: Arc, + environment: HashMap, + devcontainer_contents: &str, + ) -> Result<(TestDependencies, DevContainerManifest), DevContainerError> { + let local_config = init_devcontainer_config(&fs, devcontainer_contents).await; + let project_path = SanitizedPath::new_arc(&PathBuf::from(TEST_PROJECT_PATH)); + let worktree_store = + cx.new(|_cx| WorktreeStore::local(false, fs.clone(), WorktreeIdCounter::default())); + let project_environment = + cx.new(|cx| ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)); + + let context = DevContainerContext { + project_directory: SanitizedPath::cast_arc(project_path), + use_podman: false, + fs: fs.clone(), + http_client: http_client.clone(), + environment: project_environment.downgrade(), + }; + + let test_dependencies = TestDependencies { + fs: fs.clone(), + _http_client: http_client.clone(), + docker: docker_client.clone(), + command_runner: command_runner.clone(), + }; + let manifest = DevContainerManifest::new( + &context, + environment, + docker_client, + command_runner, + local_config, + &PathBuf::from(TEST_PROJECT_PATH), + ) + .await?; + + Ok((test_dependencies, manifest)) + } + + #[gpui::test] + async fn should_get_remote_user_from_devcontainer_if_available(cx: &mut TestAppContext) { + let (_, devcontainer_manifest) = init_default_devcontainer_manifest( + cx, + r#" +// These are some external comments. serde_lenient should handle them +{ + // These are some internal comments + "image": "image", + "remoteUser": "root", +} + "#, + ) + .await + .unwrap(); + + let mut metadata = HashMap::new(); + metadata.insert( + "remoteUser".to_string(), + serde_json_lenient::Value::String("vsCode".to_string()), + ); + let given_docker_config = DockerInspect { + id: "docker_id".to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![metadata]), + }, + image_user: None, + env: Vec::new(), + }, + mounts: None, + state: None, + }; + + let remote_user = + get_remote_user_from_config(&given_docker_config, &devcontainer_manifest).unwrap(); + + assert_eq!(remote_user, "root".to_string()) + } + + #[gpui::test] + async fn should_get_remote_user_from_docker_config(cx: &mut TestAppContext) { + let (_, devcontainer_manifest) = + init_default_devcontainer_manifest(cx, "{}").await.unwrap(); + let mut metadata = HashMap::new(); + metadata.insert( + "remoteUser".to_string(), + serde_json_lenient::Value::String("vsCode".to_string()), + ); + let given_docker_config = DockerInspect { + id: "docker_id".to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![metadata]), + }, + image_user: None, + env: Vec::new(), + }, + mounts: None, + state: None, + }; + + let remote_user = get_remote_user_from_config(&given_docker_config, &devcontainer_manifest); + + assert!(remote_user.is_ok()); + let remote_user = remote_user.expect("ok"); + assert_eq!(&remote_user, "vsCode") + } + + #[test] + fn should_extract_feature_id_from_references() { + assert_eq!( + extract_feature_id("ghcr.io/devcontainers/features/aws-cli:1"), + "aws-cli" + ); + assert_eq!( + extract_feature_id("ghcr.io/devcontainers/features/go"), + "go" + ); + assert_eq!(extract_feature_id("ghcr.io/user/repo/node:18.0.0"), "node"); + assert_eq!(extract_feature_id("./myFeature"), "myFeature"); + assert_eq!( + extract_feature_id("ghcr.io/devcontainers/features/rust@sha256:abc123"), + "rust" + ); + } + + #[gpui::test] + async fn should_create_correct_docker_run_command(cx: &mut TestAppContext) { + let mut metadata = HashMap::new(); + metadata.insert( + "remoteUser".to_string(), + serde_json_lenient::Value::String("vsCode".to_string()), + ); + + let (_, devcontainer_manifest) = + init_default_devcontainer_manifest(cx, "{}").await.unwrap(); + let build_resources = DockerBuildResources { + image: DockerInspect { + id: "mcr.microsoft.com/devcontainers/base:ubuntu".to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { metadata: None }, + image_user: None, + env: Vec::new(), + }, + mounts: None, + state: None, + }, + additional_mounts: vec![], + privileged: false, + entrypoint_script: "echo Container started\n trap \"exit 0\" 15\n exec \"$@\"\n while sleep 1 & wait $!; do :; done".to_string(), + }; + let docker_run_command = devcontainer_manifest.create_docker_run_command(build_resources); + + assert!(docker_run_command.is_ok()); + let docker_run_command = docker_run_command.expect("ok"); + + assert_eq!(docker_run_command.get_program(), "docker"); + let expected_config_file_label = PathBuf::from(TEST_PROJECT_PATH) + .join(".devcontainer") + .join("devcontainer.json"); + let expected_config_file_label = expected_config_file_label.display(); + assert_eq!( + docker_run_command.get_args().collect::>(), + vec![ + OsStr::new("run"), + OsStr::new("--sig-proxy=false"), + OsStr::new("-d"), + OsStr::new("--mount"), + OsStr::new( + "type=bind,source=/path/to/local/project,target=/workspaces/project,consistency=cached" + ), + OsStr::new("-l"), + OsStr::new("devcontainer.local_folder=/path/to/local/project"), + OsStr::new("-l"), + OsStr::new(&format!( + "devcontainer.config_file={expected_config_file_label}" + )), + OsStr::new("--entrypoint"), + OsStr::new("/bin/sh"), + OsStr::new("mcr.microsoft.com/devcontainers/base:ubuntu"), + OsStr::new("-c"), + OsStr::new( + " + echo Container started + trap \"exit 0\" 15 + exec \"$@\" + while sleep 1 & wait $!; do :; done + " + .trim() + ), + OsStr::new("-"), + ] + ) + } + + #[gpui::test] + async fn should_find_primary_service_in_docker_compose(cx: &mut TestAppContext) { + // State where service not defined in dev container + let (_, given_dev_container) = init_default_devcontainer_manifest(cx, "{}").await.unwrap(); + let given_docker_compose_config = DockerComposeResources { + config: DockerComposeConfig { + name: Some("devcontainers".to_string()), + services: HashMap::new(), + ..Default::default() + }, + ..Default::default() + }; + + let bad_result = find_primary_service(&given_docker_compose_config, &given_dev_container); + + assert!(bad_result.is_err()); + + // State where service defined in devcontainer, not found in DockerCompose config + let (_, given_dev_container) = + init_default_devcontainer_manifest(cx, r#"{"service": "not_found_service"}"#) + .await + .unwrap(); + let given_docker_compose_config = DockerComposeResources { + config: DockerComposeConfig { + name: Some("devcontainers".to_string()), + services: HashMap::new(), + ..Default::default() + }, + ..Default::default() + }; + + let bad_result = find_primary_service(&given_docker_compose_config, &given_dev_container); + + assert!(bad_result.is_err()); + // State where service defined in devcontainer and in DockerCompose config + + let (_, given_dev_container) = + init_default_devcontainer_manifest(cx, r#"{"service": "found_service"}"#) + .await + .unwrap(); + let given_docker_compose_config = DockerComposeResources { + config: DockerComposeConfig { + name: Some("devcontainers".to_string()), + services: HashMap::from([( + "found_service".to_string(), + DockerComposeService { + ..Default::default() + }, + )]), + ..Default::default() + }, + ..Default::default() + }; + + let (service_name, _) = + find_primary_service(&given_docker_compose_config, &given_dev_container).unwrap(); + + assert_eq!(service_name, "found_service".to_string()); + } + + #[gpui::test] + async fn test_nonremote_variable_replacement_with_default_mount(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + let given_devcontainer_contents = r#" +// These are some external comments. serde_lenient should handle them +{ + // These are some internal comments + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "name": "myDevContainer-${devcontainerId}", + "remoteUser": "root", + "remoteEnv": { + "DEVCONTAINER_ID": "${devcontainerId}", + "MYVAR2": "myvarothervalue", + "REMOTE_WORKSPACE_FOLDER_BASENAME": "${containerWorkspaceFolderBasename}", + "LOCAL_WORKSPACE_FOLDER_BASENAME": "${localWorkspaceFolderBasename}", + "REMOTE_WORKSPACE_FOLDER": "${containerWorkspaceFolder}", + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}", + "LOCAL_ENV_VAR_1": "${localEnv:local_env_1}", + "LOCAL_ENV_VAR_2": "${localEnv:my_other_env}" + + } +} + "#; + let (_, mut devcontainer_manifest) = init_devcontainer_manifest( + cx, + fs, + fake_http_client(), + Arc::new(FakeDocker::new()), + Arc::new(TestCommandRunner::new()), + HashMap::from([ + ("local_env_1".to_string(), "local_env_value1".to_string()), + ("my_other_env".to_string(), "THISVALUEHERE".to_string()), + ]), + given_devcontainer_contents, + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let ConfigStatus::VariableParsed(variable_replaced_devcontainer) = + &devcontainer_manifest.config + else { + panic!("Config not parsed"); + }; + + // ${devcontainerId} + let devcontainer_id = devcontainer_manifest.devcontainer_id(); + assert_eq!( + variable_replaced_devcontainer.name, + Some(format!("myDevContainer-{devcontainer_id}")) + ); + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("DEVCONTAINER_ID")), + Some(&devcontainer_id) + ); + + // ${containerWorkspaceFolderBasename} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER_BASENAME")), + Some(&test_project_filename()) + ); + + // ${localWorkspaceFolderBasename} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER_BASENAME")), + Some(&test_project_filename()) + ); + + // ${containerWorkspaceFolder} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER")), + Some(&format!("/workspaces/{}", test_project_filename())) + ); + + // ${localWorkspaceFolder} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER")), + Some(&TEST_PROJECT_PATH.to_string()) + ); + + // ${localEnv:VARIABLE_NAME} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_ENV_VAR_1")), + Some(&"local_env_value1".to_string()) + ); + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_ENV_VAR_2")), + Some(&"THISVALUEHERE".to_string()) + ); + } + + #[gpui::test] + async fn test_nonremote_variable_replacement_with_explicit_mount(cx: &mut TestAppContext) { + let given_devcontainer_contents = r#" + // These are some external comments. serde_lenient should handle them + { + // These are some internal comments + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "name": "myDevContainer-${devcontainerId}", + "remoteUser": "root", + "remoteEnv": { + "DEVCONTAINER_ID": "${devcontainerId}", + "MYVAR2": "myvarothervalue", + "REMOTE_WORKSPACE_FOLDER_BASENAME": "${containerWorkspaceFolderBasename}", + "LOCAL_WORKSPACE_FOLDER_BASENAME": "${localWorkspaceFolderBasename}", + "REMOTE_WORKSPACE_FOLDER": "${containerWorkspaceFolder}", + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + + }, + "workspaceMount": "source=/local/folder,target=/workspace/subfolder,type=bind,consistency=cached", + "workspaceFolder": "/workspace/customfolder" + } + "#; + + let (_, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let ConfigStatus::VariableParsed(variable_replaced_devcontainer) = + &devcontainer_manifest.config + else { + panic!("Config not parsed"); + }; + + // ${devcontainerId} + let devcontainer_id = devcontainer_manifest.devcontainer_id(); + assert_eq!( + variable_replaced_devcontainer.name, + Some(format!("myDevContainer-{devcontainer_id}")) + ); + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("DEVCONTAINER_ID")), + Some(&devcontainer_id) + ); + + // ${containerWorkspaceFolderBasename} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER_BASENAME")), + Some(&"customfolder".to_string()) + ); + + // ${localWorkspaceFolderBasename} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER_BASENAME")), + Some(&"project".to_string()) + ); + + // ${containerWorkspaceFolder} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("REMOTE_WORKSPACE_FOLDER")), + Some(&"/workspace/customfolder".to_string()) + ); + + // ${localWorkspaceFolder} + assert_eq!( + variable_replaced_devcontainer + .remote_env + .as_ref() + .and_then(|env| env.get("LOCAL_WORKSPACE_FOLDER")), + Some(&TEST_PROJECT_PATH.to_string()) + ); + } + + // updateRemoteUserUID is treated as false in Windows, so this test will fail + // It is covered by test_spawns_devcontainer_with_dockerfile_and_no_update_uid + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_dockerfile_and_features(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + /*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + { + "name": "cli-${devcontainerId}", + // "image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye", + "build": { + "dockerfile": "Dockerfile", + "args": { + "VARIANT": "18-bookworm", + "FOO": "bar", + }, + }, + "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached", + "workspaceFolder": "/workspace2", + "mounts": [ + // Keep command history across instances + "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory", + ], + + "forwardPorts": [ + 8082, + 8083, + ], + "appPort": "8084", + + "containerEnv": { + "VARIABLE_VALUE": "value", + }, + + "initializeCommand": "touch IAM.md", + + "onCreateCommand": "echo 'onCreateCommand' >> ON_CREATE_COMMAND.md", + + "updateContentCommand": "echo 'updateContentCommand' >> UPDATE_CONTENT_COMMAND.md", + + "postCreateCommand": { + "yarn": "yarn install", + "debug": "echo 'postStartCommand' >> POST_START_COMMAND.md", + }, + + "postStartCommand": "echo 'postStartCommand' >> POST_START_COMMAND.md", + + "postAttachCommand": "echo 'postAttachCommand' >> POST_ATTACH_COMMAND.md", + + "remoteUser": "node", + + "remoteEnv": { + "PATH": "${containerEnv:PATH}:/some/other/path", + "OTHER_ENV": "other_env_value" + }, + + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "moby": false, + }, + "ghcr.io/devcontainers/features/go:1": {}, + }, + + "customizations": { + "vscode": { + "extensions": [ + "dbaeumer.vscode-eslint", + "GitHub.vscode-pull-request-github", + ], + }, + "zed": { + "extensions": ["vue", "ruby"], + }, + "codespaces": { + "repositories": { + "devcontainers/features": { + "permissions": { + "contents": "write", + "workflows": "write", + }, + }, + }, + }, + }, + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +ARG VARIANT="16-bullseye" +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} + +RUN mkdir -p /workspaces && chown node:node /workspaces + +ARG USERNAME=node +USER $USERNAME + +# Save command line history +RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \ +&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \ +&& mkdir -p /home/$USERNAME/commandhistory \ +&& touch /home/$USERNAME/commandhistory/.bash_history \ +&& chown -R $USERNAME /home/$USERNAME/commandhistory + "#.trim().to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + assert_eq!( + devcontainer_up.extension_ids, + vec!["vue".to_string(), "ruby".to_string()] + ); + + let files = test_dependencies.fs.files(); + let feature_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "Dockerfile.extended") + }) + .expect("to be found"); + let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap(); + assert_eq!( + &feature_dockerfile, + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +ARG VARIANT="16-bullseye" +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label + +RUN mkdir -p /workspaces && chown node:node /workspaces + +ARG USERNAME=node +USER $USERNAME + +# Save command line history +RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \ +&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \ +&& mkdir -p /home/$USERNAME/commandhistory \ +&& touch /home/$USERNAME/commandhistory/.bash_history \ +&& chown -R $USERNAME /home/$USERNAME/commandhistory + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p /tmp/dev-container-features +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features + +RUN \ +echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'node' || grep -E '^node|^[^:]*:[^:]*:node:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env + + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_0,target=/tmp/build-features-src/docker-in-docker_0 \ +cp -ar /tmp/build-features-src/docker-in-docker_0 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_0 \ +&& cd /tmp/dev-container-features/docker-in-docker_0 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/docker-in-docker_0 + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./go_1,target=/tmp/build-features-src/go_1 \ +cp -ar /tmp/build-features-src/go_1 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/go_1 \ +&& cd /tmp/dev-container-features/go_1 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/go_1 + + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER +"# + ); + + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true + +ENV DOCKER_BUILDKIT=1 + +ENV GOPATH=/go +ENV GOROOT=/usr/local/go +ENV PATH=/usr/local/go/bin:/go/bin:${PATH} +ENV VARIABLE_VALUE=value +"# + ); + + let golang_install_wrapper = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "devcontainer-features-install.sh") + && f.to_str().is_some_and(|s| s.contains("/go_")) + }) + .expect("to be found"); + let golang_install_wrapper = test_dependencies + .fs + .load(golang_install_wrapper) + .await + .unwrap(); + assert_eq!( + &golang_install_wrapper, + r#"#!/bin/sh +set -e + +on_exit () { + [ $? -eq 0 ] && exit + echo 'ERROR: Feature "go" (ghcr.io/devcontainers/features/go:1) failed to install!' +} + +trap on_exit EXIT + +echo =========================================================================== +echo 'Feature : go' +echo 'Id : ghcr.io/devcontainers/features/go:1' +echo 'Options :' +echo ' GOLANGCILINTVERSION=latest + VERSION=latest' +echo =========================================================================== + +set -a +. ../devcontainer-features.builtin.env +. ./devcontainer-features.env +set +a + +chmod +x ./install.sh +./install.sh +"# + ); + + let docker_commands = test_dependencies + .command_runner + .commands_by_program("docker"); + + let docker_run_command = docker_commands + .iter() + .find(|c| c.args.get(0).is_some_and(|a| a == "run")) + .expect("found"); + + assert_eq!( + docker_run_command.args, + vec![ + "run".to_string(), + "--privileged".to_string(), + "--sig-proxy=false".to_string(), + "-d".to_string(), + "--mount".to_string(), + "type=bind,source=/path/to/local/project,target=/workspace2,consistency=cached".to_string(), + "--mount".to_string(), + "type=volume,source=dev-containers-cli-bashhistory,target=/home/node/commandhistory,consistency=cached".to_string(), + "--mount".to_string(), + "type=volume,source=dind-var-lib-docker-42dad4b4ca7b8ced,target=/var/lib/docker,consistency=cached".to_string(), + "-l".to_string(), + "devcontainer.local_folder=/path/to/local/project".to_string(), + "-l".to_string(), + "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string(), + "-l".to_string(), + "devcontainer.metadata=[{\"remoteUser\":\"node\"}]".to_string(), + "-p".to_string(), + "8082:8082".to_string(), + "-p".to_string(), + "8083:8083".to_string(), + "-p".to_string(), + "8084:8084".to_string(), + "--entrypoint".to_string(), + "/bin/sh".to_string(), + "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105".to_string(), + "-c".to_string(), + "echo Container started\ntrap \"exit 0\" 15\n/usr/local/share/docker-init.sh\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done".to_string(), + "-".to_string() + ] + ); + + let docker_exec_commands = test_dependencies + .docker + .exec_commands_recorded + .lock() + .unwrap(); + + assert!(docker_exec_commands.iter().all(|exec| { + exec.env + == HashMap::from([ + ("OTHER_ENV".to_string(), "other_env_value".to_string()), + ( + "PATH".to_string(), + "/initial/path:/some/other/path".to_string(), + ), + ]) + })) + } + + // updateRemoteUserUID is treated as false in Windows, so this test will fail + // It is covered by test_spawns_devcontainer_with_docker_compose_and_no_update_uid + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_docker_compose(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + // For format details, see https://aka.ms/devcontainer.json. For config options, see the + // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres + { + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + }, + "name": "Rust and PostgreSQL", + "dockerComposeFile": "docker-compose.yml", + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [ + 8083, + "db:5432", + "db:1234", + ], + "appPort": "8084", + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "rustc --version", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" + } + "#; + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"), + r#" +version: '3.8' + +volumes: + postgres-data: + +services: + app: + build: + context: . + dockerfile: Dockerfile + env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + + volumes: + - ../..:/workspaces:cached + + # Overrides default command so things don't shut down after the process ends. + command: sleep infinity + + # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. + network_mode: service:db + + # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. + # (Adding the "ports" property to this file will not forward from a Codespace.) + + db: + image: postgres:14.1 + restart: unless-stopped + volumes: + - postgres-data:/var/lib/postgresql/data + env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + + # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally. + # (Adding the "ports" property to this file will not forward from a Codespace.) + "#.trim().to_string(), + ) + .await + .unwrap(); + + test_dependencies.fs.atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get -y install clang lld \ + && apt-get autoremove -y && apt-get clean -y + "#.trim().to_string()).await.unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let feature_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "Dockerfile.extended") + }) + .expect("to be found"); + let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap(); + assert_eq!( + &feature_dockerfile, + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get -y install clang lld \ + && apt-get autoremove -y && apt-get clean -y + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p /tmp/dev-container-features +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features + +RUN \ +echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env + + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./aws-cli_0,target=/tmp/build-features-src/aws-cli_0 \ +cp -ar /tmp/build-features-src/aws-cli_0 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \ +&& cd /tmp/dev-container-features/aws-cli_0 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/aws-cli_0 + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_1,target=/tmp/build-features-src/docker-in-docker_1 \ +cp -ar /tmp/build-features-src/docker-in-docker_1 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \ +&& cd /tmp/dev-container-features/docker-in-docker_1 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/docker-in-docker_1 + + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER +"# + ); + + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true + + +ENV DOCKER_BUILDKIT=1 +"# + ); + + let runtime_override = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "docker_compose_runtime.json") + }) + .expect("to be found"); + let runtime_override = test_dependencies.fs.load(runtime_override).await.unwrap(); + + let expected_runtime_override = DockerComposeConfig { + name: None, + services: HashMap::from([ + ( + "app".to_string(), + DockerComposeService { + entrypoint: Some(vec![ + "/bin/sh".to_string(), + "-c".to_string(), + "echo Container started\ntrap \"exit 0\" 15\n/usr/local/share/docker-init.sh\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done".to_string(), + "-".to_string(), + ]), + cap_add: Some(vec!["SYS_PTRACE".to_string()]), + security_opt: Some(vec!["seccomp=unconfined".to_string()]), + privileged: Some(true), + labels: Some(vec![ + "devcontainer.metadata=[{\"remoteUser\":\"vscode\"}]".to_string(), + "devcontainer.local_folder=/path/to/local/project".to_string(), + "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string() + ]), + volumes: vec![ + MountDefinition { + source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(), + target: "/var/lib/docker".to_string(), + mount_type: Some("volume".to_string()) + } + ], + ..Default::default() + }, + ), + ( + "db".to_string(), + DockerComposeService { + ports: vec![ + "8083:8083".to_string(), + "5432:5432".to_string(), + "1234:1234".to_string(), + "8084:8084".to_string() + ], + ..Default::default() + }, + ), + ]), + volumes: HashMap::from([( + "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(), + DockerComposeVolume { + name: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(), + }, + )]), + }; + + assert_eq!( + serde_json_lenient::from_str::(&runtime_override).unwrap(), + expected_runtime_override + ) + } + + #[gpui::test] + async fn test_spawns_devcontainer_with_docker_compose_and_no_update_uid( + cx: &mut TestAppContext, + ) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + // For format details, see https://aka.ms/devcontainer.json. For config options, see the + // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres + { + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + }, + "name": "Rust and PostgreSQL", + "dockerComposeFile": "docker-compose.yml", + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [ + 8083, + "db:5432", + "db:1234", + ], + "updateRemoteUserUID": false, + "appPort": "8084", + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "rustc --version", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" + } + "#; + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"), + r#" +version: '3.8' + +volumes: +postgres-data: + +services: +app: + build: + context: . + dockerfile: Dockerfile + env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + + volumes: + - ../..:/workspaces:cached + + # Overrides default command so things don't shut down after the process ends. + command: sleep infinity + + # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. + network_mode: service:db + + # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. + # (Adding the "ports" property to this file will not forward from a Codespace.) + +db: + image: postgres:14.1 + restart: unless-stopped + volumes: + - postgres-data:/var/lib/postgresql/data + env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + + # Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally. + # (Adding the "ports" property to this file will not forward from a Codespace.) + "#.trim().to_string(), + ) + .await + .unwrap(); + + test_dependencies.fs.atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +&& apt-get -y install clang lld \ +&& apt-get autoremove -y && apt-get clean -y + "#.trim().to_string()).await.unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let feature_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "Dockerfile.extended") + }) + .expect("to be found"); + let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap(); + assert_eq!( + &feature_dockerfile, + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +&& apt-get -y install clang lld \ +&& apt-get autoremove -y && apt-get clean -y + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p /tmp/dev-container-features +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features + +RUN \ +echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env + + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./aws-cli_0,target=/tmp/build-features-src/aws-cli_0 \ +cp -ar /tmp/build-features-src/aws-cli_0 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \ +&& cd /tmp/dev-container-features/aws-cli_0 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/aws-cli_0 + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_1,target=/tmp/build-features-src/docker-in-docker_1 \ +cp -ar /tmp/build-features-src/docker-in-docker_1 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \ +&& cd /tmp/dev-container-features/docker-in-docker_1 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/docker-in-docker_1 + + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true + + +ENV DOCKER_BUILDKIT=1 +"# + ); + } + + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_docker_compose_and_podman(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + // For format details, see https://aka.ms/devcontainer.json. For config options, see the + // README at: https://github.com/devcontainers/templates/tree/main/src/rust-postgres + { + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + }, + "name": "Rust and PostgreSQL", + "dockerComposeFile": "docker-compose.yml", + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [5432], + + // Use 'postCreateCommand' to run commands after the container is created. + // "postCreateCommand": "rustc --version", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" + } + "#; + let mut fake_docker = FakeDocker::new(); + fake_docker.set_podman(true); + let (test_dependencies, mut devcontainer_manifest) = init_devcontainer_manifest( + cx, + FakeFs::new(cx.executor()), + fake_http_client(), + Arc::new(fake_docker), + Arc::new(TestCommandRunner::new()), + HashMap::new(), + given_devcontainer_contents, + ) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose.yml"), + r#" +version: '3.8' + +volumes: +postgres-data: + +services: +app: +build: + context: . + dockerfile: Dockerfile +env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + +volumes: + - ../..:/workspaces:cached + +# Overrides default command so things don't shut down after the process ends. +command: sleep infinity + +# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. +network_mode: service:db + +# Use "forwardPorts" in **devcontainer.json** to forward an app port locally. +# (Adding the "ports" property to this file will not forward from a Codespace.) + +db: +image: postgres:14.1 +restart: unless-stopped +volumes: + - postgres-data:/var/lib/postgresql/data +env_file: + # Ensure that the variables in .env match the same variables in devcontainer.json + - .env + +# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally. +# (Adding the "ports" property to this file will not forward from a Codespace.) + "#.trim().to_string(), + ) + .await + .unwrap(); + + test_dependencies.fs.atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +&& apt-get -y install clang lld \ +&& apt-get autoremove -y && apt-get clean -y + "#.trim().to_string()).await.unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + + let feature_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "Dockerfile.extended") + }) + .expect("to be found"); + let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap(); + assert_eq!( + &feature_dockerfile, + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +FROM mcr.microsoft.com/devcontainers/rust:2-1-bookworm AS dev_container_auto_added_stage_label + +# Include lld linker to improve build times either by using environment variable +# RUSTFLAGS="-C link-arg=-fuse-ld=lld" or with Cargo's configuration file (i.e see .cargo/config.toml). +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +&& apt-get -y install clang lld \ +&& apt-get autoremove -y && apt-get clean -y + +FROM dev_container_feature_content_temp as dev_containers_feature_content_source + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source /tmp/build-features/devcontainer-features.builtin.env /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p /tmp/dev-container-features +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features + +RUN \ +echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'vscode' || grep -E '^vscode|^[^:]*:[^:]*:vscode:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env + + +COPY --chown=root:root --from=dev_containers_feature_content_source /tmp/build-features/aws-cli_0 /tmp/dev-container-features/aws-cli_0 +RUN chmod -R 0755 /tmp/dev-container-features/aws-cli_0 \ +&& cd /tmp/dev-container-features/aws-cli_0 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh + +COPY --chown=root:root --from=dev_containers_feature_content_source /tmp/build-features/docker-in-docker_1 /tmp/dev-container-features/docker-in-docker_1 +RUN chmod -R 0755 /tmp/dev-container-features/docker-in-docker_1 \ +&& cd /tmp/dev-container-features/docker-in-docker_1 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh + + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER +"# + ); + + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true + + +ENV DOCKER_BUILDKIT=1 +"# + ); + } + + #[gpui::test] + async fn test_spawns_devcontainer_with_dockerfile_and_no_update_uid(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + /*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + { + "name": "cli-${devcontainerId}", + // "image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye", + "build": { + "dockerfile": "Dockerfile", + "args": { + "VARIANT": "18-bookworm", + "FOO": "bar", + }, + }, + "workspaceMount": "source=${localWorkspaceFolder},target=${containerWorkspaceFolder},type=bind,consistency=cached", + "workspaceFolder": "/workspace2", + "mounts": [ + // Keep command history across instances + "source=dev-containers-cli-bashhistory,target=/home/node/commandhistory", + ], + + "forwardPorts": [ + 8082, + 8083, + ], + "appPort": "8084", + "updateRemoteUserUID": false, + + "containerEnv": { + "VARIABLE_VALUE": "value", + }, + + "initializeCommand": "touch IAM.md", + + "onCreateCommand": "echo 'onCreateCommand' >> ON_CREATE_COMMAND.md", + + "updateContentCommand": "echo 'updateContentCommand' >> UPDATE_CONTENT_COMMAND.md", + + "postCreateCommand": { + "yarn": "yarn install", + "debug": "echo 'postStartCommand' >> POST_START_COMMAND.md", + }, + + "postStartCommand": "echo 'postStartCommand' >> POST_START_COMMAND.md", + + "postAttachCommand": "echo 'postAttachCommand' >> POST_ATTACH_COMMAND.md", + + "remoteUser": "node", + + "remoteEnv": { + "PATH": "${containerEnv:PATH}:/some/other/path", + "OTHER_ENV": "other_env_value" + }, + + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": { + "moby": false, + }, + "ghcr.io/devcontainers/features/go:1": {}, + }, + + "customizations": { + "vscode": { + "extensions": [ + "dbaeumer.vscode-eslint", + "GitHub.vscode-pull-request-github", + ], + }, + "zed": { + "extensions": ["vue", "ruby"], + }, + "codespaces": { + "repositories": { + "devcontainers/features": { + "permissions": { + "contents": "write", + "workflows": "write", + }, + }, + }, + }, + }, + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/Dockerfile"), + r#" +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +ARG VARIANT="16-bullseye" +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} + +RUN mkdir -p /workspaces && chown node:node /workspaces + +ARG USERNAME=node +USER $USERNAME + +# Save command line history +RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \ +&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \ +&& mkdir -p /home/$USERNAME/commandhistory \ +&& touch /home/$USERNAME/commandhistory/.bash_history \ +&& chown -R $USERNAME /home/$USERNAME/commandhistory + "#.trim().to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + assert_eq!( + devcontainer_up.extension_ids, + vec!["vue".to_string(), "ruby".to_string()] + ); + + let files = test_dependencies.fs.files(); + let feature_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "Dockerfile.extended") + }) + .expect("to be found"); + let feature_dockerfile = test_dependencies.fs.load(feature_dockerfile).await.unwrap(); + assert_eq!( + &feature_dockerfile, + r#"ARG _DEV_CONTAINERS_BASE_IMAGE=placeholder + +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +ARG VARIANT="16-bullseye" +FROM mcr.microsoft.com/devcontainers/typescript-node:1-${VARIANT} AS dev_container_auto_added_stage_label + +RUN mkdir -p /workspaces && chown node:node /workspaces + +ARG USERNAME=node +USER $USERNAME + +# Save command line history +RUN echo "export HISTFILE=/home/$USERNAME/commandhistory/.bash_history" >> "/home/$USERNAME/.bashrc" \ +&& echo "export PROMPT_COMMAND='history -a'" >> "/home/$USERNAME/.bashrc" \ +&& mkdir -p /home/$USERNAME/commandhistory \ +&& touch /home/$USERNAME/commandhistory/.bash_history \ +&& chown -R $USERNAME /home/$USERNAME/commandhistory + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_feature_content_normalize +USER root +COPY --from=dev_containers_feature_content_source ./devcontainer-features.builtin.env /tmp/build-features/ +RUN chmod -R 0755 /tmp/build-features/ + +FROM $_DEV_CONTAINERS_BASE_IMAGE AS dev_containers_target_stage + +USER root + +RUN mkdir -p /tmp/dev-container-features +COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features + +RUN \ +echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && \ +echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'node' || grep -E '^node|^[^:]*:[^:]*:node:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env + + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./docker-in-docker_0,target=/tmp/build-features-src/docker-in-docker_0 \ +cp -ar /tmp/build-features-src/docker-in-docker_0 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/docker-in-docker_0 \ +&& cd /tmp/dev-container-features/docker-in-docker_0 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/docker-in-docker_0 + +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./go_1,target=/tmp/build-features-src/go_1 \ +cp -ar /tmp/build-features-src/go_1 /tmp/dev-container-features \ +&& chmod -R 0755 /tmp/dev-container-features/go_1 \ +&& cd /tmp/dev-container-features/go_1 \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf /tmp/dev-container-features/go_1 + + +ARG _DEV_CONTAINERS_IMAGE_USER=root +USER $_DEV_CONTAINERS_IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true + +ENV DOCKER_BUILDKIT=1 + +ENV GOPATH=/go +ENV GOROOT=/usr/local/go +ENV PATH=/usr/local/go/bin:/go/bin:${PATH} +ENV VARIABLE_VALUE=value +"# + ); + + let golang_install_wrapper = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "devcontainer-features-install.sh") + && f.to_str().is_some_and(|s| s.contains("go_")) + }) + .expect("to be found"); + let golang_install_wrapper = test_dependencies + .fs + .load(golang_install_wrapper) + .await + .unwrap(); + assert_eq!( + &golang_install_wrapper, + r#"#!/bin/sh +set -e + +on_exit () { + [ $? -eq 0 ] && exit + echo 'ERROR: Feature "go" (ghcr.io/devcontainers/features/go:1) failed to install!' +} + +trap on_exit EXIT + +echo =========================================================================== +echo 'Feature : go' +echo 'Id : ghcr.io/devcontainers/features/go:1' +echo 'Options :' +echo ' GOLANGCILINTVERSION=latest + VERSION=latest' +echo =========================================================================== + +set -a +. ../devcontainer-features.builtin.env +. ./devcontainer-features.env +set +a + +chmod +x ./install.sh +./install.sh +"# + ); + + let docker_commands = test_dependencies + .command_runner + .commands_by_program("docker"); + + let docker_run_command = docker_commands + .iter() + .find(|c| c.args.get(0).is_some_and(|a| a == "run")); + + assert!(docker_run_command.is_some()); + + let docker_exec_commands = test_dependencies + .docker + .exec_commands_recorded + .lock() + .unwrap(); + + assert!(docker_exec_commands.iter().all(|exec| { + exec.env + == HashMap::from([ + ("OTHER_ENV".to_string(), "other_env_value".to_string()), + ( + "PATH".to_string(), + "/initial/path:/some/other/path".to_string(), + ), + ]) + })) + } + + pub(crate) struct RecordedExecCommand { + pub(crate) _container_id: String, + pub(crate) _remote_folder: String, + pub(crate) _user: String, + pub(crate) env: HashMap, + pub(crate) _inner_command: Command, + } + + pub(crate) struct FakeDocker { + exec_commands_recorded: Mutex>, + podman: bool, + } + + impl FakeDocker { + pub(crate) fn new() -> Self { + Self { + podman: false, + exec_commands_recorded: Mutex::new(Vec::new()), + } + } + #[cfg(not(target_os = "windows"))] + fn set_podman(&mut self, podman: bool) { + self.podman = podman; + } + } + + #[async_trait] + impl DockerClient for FakeDocker { + async fn inspect(&self, id: &String) -> Result { + if id == "mcr.microsoft.com/devcontainers/typescript-node:1-18-bookworm" { + return Ok(DockerInspect { + id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("node".to_string()), + )])]), + }, + env: Vec::new(), + image_user: Some("root".to_string()), + }, + mounts: None, + state: None, + }); + } + if id == "mcr.microsoft.com/devcontainers/rust:2-1-bookworm" { + return Ok(DockerInspect { + id: "sha256:39ad1c7264794d60e3bc449d9d8877a8e486d19ad8fba80f5369def6a2408392" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("vscode".to_string()), + )])]), + }, + image_user: Some("root".to_string()), + env: Vec::new(), + }, + mounts: None, + state: None, + }); + } + if id.starts_with("cli_") { + return Ok(DockerInspect { + id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("node".to_string()), + )])]), + }, + image_user: Some("root".to_string()), + env: vec!["PATH=/initial/path".to_string()], + }, + mounts: None, + state: None, + }); + } + if id == "found_docker_ps" { + return Ok(DockerInspect { + id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc105" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("node".to_string()), + )])]), + }, + image_user: Some("root".to_string()), + env: vec!["PATH=/initial/path".to_string()], + }, + mounts: Some(vec![DockerInspectMount { + source: "/path/to/local/project".to_string(), + destination: "/workspaces/project".to_string(), + }]), + state: None, + }); + } + if id.starts_with("rust_a-") { + return Ok(DockerInspect { + id: "sha256:9da65c34ab809e763b13d238fd7a0f129fcabd533627d340f293308cb63620a0" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("vscode".to_string()), + )])]), + }, + image_user: Some("root".to_string()), + env: Vec::new(), + }, + mounts: None, + state: None, + }); + } + + Err(DevContainerError::DockerNotAvailable) + } + async fn get_docker_compose_config( + &self, + config_files: &Vec, + ) -> Result, DevContainerError> { + if config_files.len() == 1 + && config_files.get(0) + == Some(&PathBuf::from( + "/path/to/local/project/.devcontainer/docker-compose.yml", + )) + { + return Ok(Some(DockerComposeConfig { + name: None, + services: HashMap::from([ + ( + "app".to_string(), + DockerComposeService { + build: Some(DockerComposeServiceBuild { + context: Some(".".to_string()), + dockerfile: Some("Dockerfile".to_string()), + args: None, + additional_contexts: None, + }), + volumes: vec![MountDefinition { + source: "../..".to_string(), + target: "/workspaces".to_string(), + mount_type: Some("bind".to_string()), + }], + network_mode: Some("service:db".to_string()), + ..Default::default() + }, + ), + ( + "db".to_string(), + DockerComposeService { + image: Some("postgres:14.1".to_string()), + volumes: vec![MountDefinition { + source: "postgres-data".to_string(), + target: "/var/lib/postgresql/data".to_string(), + mount_type: Some("volume".to_string()), + }], + env_file: Some(vec![".env".to_string()]), + ..Default::default() + }, + ), + ]), + volumes: HashMap::from([( + "postgres-data".to_string(), + DockerComposeVolume::default(), + )]), + })); + } + Err(DevContainerError::DockerNotAvailable) + } + async fn docker_compose_build( + &self, + _config_files: &Vec, + _project_name: &str, + ) -> Result<(), DevContainerError> { + Ok(()) + } + async fn run_docker_exec( + &self, + container_id: &str, + remote_folder: &str, + user: &str, + env: &HashMap, + inner_command: Command, + ) -> Result<(), DevContainerError> { + let mut record = self + .exec_commands_recorded + .lock() + .expect("should be available"); + record.push(RecordedExecCommand { + _container_id: container_id.to_string(), + _remote_folder: remote_folder.to_string(), + _user: user.to_string(), + env: env.clone(), + _inner_command: inner_command, + }); + Ok(()) + } + async fn start_container(&self, _id: &str) -> Result<(), DevContainerError> { + Err(DevContainerError::DockerNotAvailable) + } + async fn find_process_by_filters( + &self, + _filters: Vec, + ) -> Result, DevContainerError> { + Ok(Some(DockerPs { + id: "found_docker_ps".to_string(), + })) + } + fn supports_compose_buildkit(&self) -> bool { + !self.podman + } + fn docker_cli(&self) -> String { + if self.podman { + "podman".to_string() + } else { + "docker".to_string() + } + } + } + + #[derive(Debug, Clone)] + pub(crate) struct TestCommand { + pub(crate) program: String, + pub(crate) args: Vec, + } + + pub(crate) struct TestCommandRunner { + commands_recorded: Mutex>, + } + + impl TestCommandRunner { + fn new() -> Self { + Self { + commands_recorded: Mutex::new(Vec::new()), + } + } + + fn commands_by_program(&self, program: &str) -> Vec { + let record = self.commands_recorded.lock().expect("poisoned"); + record + .iter() + .filter(|r| r.program == program) + .map(|r| r.clone()) + .collect() + } + } + + #[async_trait] + impl CommandRunner for TestCommandRunner { + async fn run_command(&self, command: &mut Command) -> Result { + let mut record = self.commands_recorded.lock().expect("poisoned"); + + record.push(TestCommand { + program: command.get_program().display().to_string(), + args: command + .get_args() + .map(|a| a.display().to_string()) + .collect(), + }); + + Ok(Output { + status: ExitStatus::default(), + stdout: vec![], + stderr: vec![], + }) + } + } + + fn fake_http_client() -> Arc { + FakeHttpClient::create(|request| async move { + let (parts, _body) = request.into_parts(); + if parts.uri.path() == "/token" { + let token_response = TokenResponse { + token: "token".to_string(), + }; + return Ok(http::Response::builder() + .status(200) + .body(http_client::AsyncBody::from( + serde_json_lenient::to_string(&token_response).unwrap(), + )) + .unwrap()); + } + + // OCI specific things + if parts.uri.path() == "/v2/devcontainers/features/docker-in-docker/manifests/2" { + let response = r#" + { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { + "mediaType": "application/vnd.devcontainers", + "digest": "sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "size": 2 + }, + "layers": [ + { + "mediaType": "application/vnd.devcontainers.layer.v1+tar", + "digest": "sha256:bc7ab0d8d8339416e1491419ab9ffe931458d0130110f4b18351b0fa184e67d5", + "size": 59392, + "annotations": { + "org.opencontainers.image.title": "devcontainer-feature-docker-in-docker.tgz" + } + } + ], + "annotations": { + "dev.containers.metadata": "{\"id\":\"docker-in-docker\",\"version\":\"2.16.1\",\"name\":\"Docker (Docker-in-Docker)\",\"documentationURL\":\"https://github.com/devcontainers/features/tree/main/src/docker-in-docker\",\"description\":\"Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.\",\"options\":{\"version\":{\"type\":\"string\",\"proposals\":[\"latest\",\"none\",\"20.10\"],\"default\":\"latest\",\"description\":\"Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.)\"},\"moby\":{\"type\":\"boolean\",\"default\":true,\"description\":\"Install OSS Moby build instead of Docker CE\"},\"mobyBuildxVersion\":{\"type\":\"string\",\"default\":\"latest\",\"description\":\"Install a specific version of moby-buildx when using Moby\"},\"dockerDashComposeVersion\":{\"type\":\"string\",\"enum\":[\"none\",\"v1\",\"v2\"],\"default\":\"v2\",\"description\":\"Default version of Docker Compose (v1, v2 or none)\"},\"azureDnsAutoDetection\":{\"type\":\"boolean\",\"default\":true,\"description\":\"Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure\"},\"dockerDefaultAddressPool\":{\"type\":\"string\",\"default\":\"\",\"proposals\":[],\"description\":\"Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24\"},\"installDockerBuildx\":{\"type\":\"boolean\",\"default\":true,\"description\":\"Install Docker Buildx\"},\"installDockerComposeSwitch\":{\"type\":\"boolean\",\"default\":false,\"description\":\"Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter.\"},\"disableIp6tables\":{\"type\":\"boolean\",\"default\":false,\"description\":\"Disable ip6tables (this option is only applicable for Docker versions 27 and greater)\"}},\"entrypoint\":\"/usr/local/share/docker-init.sh\",\"privileged\":true,\"containerEnv\":{\"DOCKER_BUILDKIT\":\"1\"},\"customizations\":{\"vscode\":{\"extensions\":[\"ms-azuretools.vscode-containers\"],\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes the Docker CLI (`docker`) pre-installed and available on the `PATH` for running and managing containers using a dedicated Docker daemon running inside the dev container.\"}]}}},\"mounts\":[{\"source\":\"dind-var-lib-docker-${devcontainerId}\",\"target\":\"/var/lib/docker\",\"type\":\"volume\"}],\"installsAfter\":[\"ghcr.io/devcontainers/features/common-utils\"]}", + "com.github.package.type": "devcontainer_feature" + } + } + "#; + return Ok(http::Response::builder() + .status(200) + .body(http_client::AsyncBody::from(response)) + .unwrap()); + } + + if parts.uri.path() + == "/v2/devcontainers/features/docker-in-docker/blobs/sha256:bc7ab0d8d8339416e1491419ab9ffe931458d0130110f4b18351b0fa184e67d5" + { + let response = build_tarball(vec![ + ("./NOTES.md", r#" + ## Limitations + + This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: + * As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. + * The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + + + ## OS Support + + This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + + Debian Trixie (13) does not include moby-cli and related system packages, so the feature cannot install with "moby": "true". To use this feature on Trixie, please set "moby": "false" or choose a different base image (for example, Ubuntu 24.04). + + `bash` is required to execute the `install.sh` script."#), + ("./README.md", r#" + # Docker (Docker-in-Docker) (docker-in-docker) + + Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs. + + ## Example Usage + + ```json + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {} + } + ``` + + ## Options + + | Options Id | Description | Type | Default Value | + |-----|-----|-----|-----| + | version | Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.) | string | latest | + | moby | Install OSS Moby build instead of Docker CE | boolean | true | + | mobyBuildxVersion | Install a specific version of moby-buildx when using Moby | string | latest | + | dockerDashComposeVersion | Default version of Docker Compose (v1, v2 or none) | string | v2 | + | azureDnsAutoDetection | Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure | boolean | true | + | dockerDefaultAddressPool | Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24 | string | - | + | installDockerBuildx | Install Docker Buildx | boolean | true | + | installDockerComposeSwitch | Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter. | boolean | false | + | disableIp6tables | Disable ip6tables (this option is only applicable for Docker versions 27 and greater) | boolean | false | + + ## Customizations + + ### VS Code Extensions + + - `ms-azuretools.vscode-containers` + + ## Limitations + + This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: + * As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. + * The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + + + ## OS Support + + This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + + `bash` is required to execute the `install.sh` script. + + + --- + + _Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers/features/blob/main/src/docker-in-docker/devcontainer-feature.json). Add additional notes to a `NOTES.md`._"#), + ("./devcontainer-feature.json", r#" + { + "id": "docker-in-docker", + "version": "2.16.1", + "name": "Docker (Docker-in-Docker)", + "documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-in-docker", + "description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.", + "options": { + "version": { + "type": "string", + "proposals": [ + "latest", + "none", + "20.10" + ], + "default": "latest", + "description": "Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.)" + }, + "moby": { + "type": "boolean", + "default": true, + "description": "Install OSS Moby build instead of Docker CE" + }, + "mobyBuildxVersion": { + "type": "string", + "default": "latest", + "description": "Install a specific version of moby-buildx when using Moby" + }, + "dockerDashComposeVersion": { + "type": "string", + "enum": [ + "none", + "v1", + "v2" + ], + "default": "v2", + "description": "Default version of Docker Compose (v1, v2 or none)" + }, + "azureDnsAutoDetection": { + "type": "boolean", + "default": true, + "description": "Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure" + }, + "dockerDefaultAddressPool": { + "type": "string", + "default": "", + "proposals": [], + "description": "Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24" + }, + "installDockerBuildx": { + "type": "boolean", + "default": true, + "description": "Install Docker Buildx" + }, + "installDockerComposeSwitch": { + "type": "boolean", + "default": false, + "description": "Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter." + }, + "disableIp6tables": { + "type": "boolean", + "default": false, + "description": "Disable ip6tables (this option is only applicable for Docker versions 27 and greater)" + } + }, + "entrypoint": "/usr/local/share/docker-init.sh", + "privileged": true, + "containerEnv": { + "DOCKER_BUILDKIT": "1" + }, + "customizations": { + "vscode": { + "extensions": [ + "ms-azuretools.vscode-containers" + ], + "settings": { + "github.copilot.chat.codeGeneration.instructions": [ + { + "text": "This dev container includes the Docker CLI (`docker`) pre-installed and available on the `PATH` for running and managing containers using a dedicated Docker daemon running inside the dev container." + } + ] + } + } + }, + "mounts": [ + { + "source": "dind-var-lib-docker-${devcontainerId}", + "target": "/var/lib/docker", + "type": "volume" + } + ], + "installsAfter": [ + "ghcr.io/devcontainers/features/common-utils" + ] + }"#), + ("./install.sh", r#" + #!/usr/bin/env bash + #------------------------------------------------------------------------------------------------------------- + # Copyright (c) Microsoft Corporation. All rights reserved. + # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + #------------------------------------------------------------------------------------------------------------- + # + # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md + # Maintainer: The Dev Container spec maintainers + + + DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version + USE_MOBY="${MOBY:-"true"}" + MOBY_BUILDX_VERSION="${MOBYBUILDXVERSION:-"latest"}" + DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"v2"}" #v1, v2 or none + AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" + DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL:-""}" + USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" + INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" + INSTALL_DOCKER_COMPOSE_SWITCH="${INSTALLDOCKERCOMPOSESWITCH:-"false"}" + MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" + MICROSOFT_GPG_KEYS_ROLLING_URI="https://packages.microsoft.com/keys/microsoft-rolling.asc" + DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="trixie bookworm buster bullseye bionic focal jammy noble" + DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="trixie bookworm buster bullseye bionic focal hirsute impish jammy noble" + DISABLE_IP6_TABLES="${DISABLEIP6TABLES:-false}" + + # Default: Exit on any failure. + set -e + + # Clean up + rm -rf /var/lib/apt/lists/* + + # Setup STDERR. + err() { + echo "(!) $*" >&2 + } + + if [ "$(id -u)" -ne 0 ]; then + err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 + fi + + ################### + # Helper Functions + # See: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/shared/utils.sh + ################### + + # Determine the appropriate non-root user + if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi + elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root + fi + + # Package manager update function + pkg_mgr_update() { + case ${ADJUSTED_ID} in + debian) + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi + ;; + rhel) + if [ ${PKG_MGR_CMD} = "microdnf" ]; then + cache_check_dir="/var/cache/yum" + else + cache_check_dir="/var/cache/${PKG_MGR_CMD}" + fi + if [ "$(ls ${cache_check_dir}/* 2>/dev/null | wc -l)" = 0 ]; then + echo "Running ${PKG_MGR_CMD} makecache ..." + ${PKG_MGR_CMD} makecache + fi + ;; + esac + } + + # Checks if packages are installed and installs them if not + check_packages() { + case ${ADJUSTED_ID} in + debian) + if ! dpkg -s "$@" > /dev/null 2>&1; then + pkg_mgr_update + apt-get -y install --no-install-recommends "$@" + fi + ;; + rhel) + if ! rpm -q "$@" > /dev/null 2>&1; then + pkg_mgr_update + ${PKG_MGR_CMD} -y install "$@" + fi + ;; + esac + } + + # Figure out correct version of a three part version number is not passed + find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + err "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" + } + + # Use semver logic to decrement a version number then look for the closest match + find_prev_version_from_git_tags() { + local variable_name=$1 + local current_version=${!variable_name} + local repository=$2 + # Normally a "v" is used before the version number, but support alternate cases + local prefix=${3:-"tags/v"} + # Some repositories use "_" instead of "." for version number part separation, support that + local separator=${4:-"."} + # Some tools release versions that omit the last digit (e.g. go) + local last_part_optional=${5:-"false"} + # Some repositories may have tags that include a suffix (e.g. actions/node-versions) + local version_suffix_regex=$6 + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${current_version}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${current_version}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${current_version}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + + if [ "${minor}" = "0" ] && [ "${breakfix}" = "0" ]; then + ((major=major-1)) + declare -g ${variable_name}="${major}" + # Look for latest version from previous major release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + # Handle situations like Go's odd version pattern where "0" releases omit the last part + elif [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + declare -g ${variable_name}="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ] && [ "${last_part_optional}" = "true" ]; then + declare -g ${variable_name}="${major}.${minor}" + else + declare -g ${variable_name}="${major}.${minor}.${breakfix}" + fi + fi + set -e + } + + # Function to fetch the version released prior to the latest version + get_previous_version() { + local url=$1 + local repo_url=$2 + local variable_name=$3 + prev_version=${!variable_name} + + output=$(curl -s "$repo_url"); + if echo "$output" | jq -e 'type == "object"' > /dev/null; then + message=$(echo "$output" | jq -r '.message') + + if [[ $message == "API rate limit exceeded"* ]]; then + echo -e "\nAn attempt to find latest version using GitHub Api Failed... \nReason: ${message}" + echo -e "\nAttempting to find latest version using GitHub tags." + find_prev_version_from_git_tags prev_version "$url" "tags/v" + declare -g ${variable_name}="${prev_version}" + fi + elif echo "$output" | jq -e 'type == "array"' > /dev/null; then + echo -e "\nAttempting to find latest version using GitHub Api." + version=$(echo "$output" | jq -r '.[1].tag_name') + declare -g ${variable_name}="${version#v}" + fi + echo "${variable_name}=${!variable_name}" + } + + get_github_api_repo_url() { + local url=$1 + echo "${url/https:\/\/github.com/https:\/\/api.github.com\/repos}/releases" + } + + ########################################### + # Start docker-in-docker installation + ########################################### + + # Ensure apt is in non-interactive to avoid prompts + export DEBIAN_FRONTEND=noninteractive + + # Source /etc/os-release to get OS info + . /etc/os-release + + # Determine adjusted ID and package manager + if [ "${ID}" = "debian" ] || [ "${ID_LIKE}" = "debian" ]; then + ADJUSTED_ID="debian" + PKG_MGR_CMD="apt-get" + # Use dpkg for Debian-based systems + architecture="$(dpkg --print-architecture 2>/dev/null || uname -m)" + elif [[ "${ID}" = "rhel" || "${ID}" = "fedora" || "${ID}" = "azurelinux" || "${ID}" = "mariner" || "${ID_LIKE}" = *"rhel"* || "${ID_LIKE}" = *"fedora"* || "${ID_LIKE}" = *"azurelinux"* || "${ID_LIKE}" = *"mariner"* ]]; then + ADJUSTED_ID="rhel" + # Determine the appropriate package manager for RHEL-based systems + for pkg_mgr in tdnf dnf microdnf yum; do + if command -v "$pkg_mgr" >/dev/null 2>&1; then + PKG_MGR_CMD="$pkg_mgr" + break + fi + done + + if [ -z "${PKG_MGR_CMD}" ]; then + err "Unable to find a supported package manager (tdnf, dnf, microdnf, yum)" + exit 1 + fi + + architecture="$(rpm --eval '%{_arch}' 2>/dev/null || uname -m)" + else + err "Linux distro ${ID} not supported." + exit 1 + fi + + # Azure Linux specific setup + if [ "${ID}" = "azurelinux" ]; then + VERSION_CODENAME="azurelinux${VERSION_ID}" + fi + + # Prevent attempting to install Moby on Debian trixie (packages removed) + if [ "${USE_MOBY}" = "true" ] && [ "${ID}" = "debian" ] && [ "${VERSION_CODENAME}" = "trixie" ]; then + err "The 'moby' option is not supported on Debian 'trixie' because 'moby-cli' and related system packages have been removed from that distribution." + err "To continue, either set the feature option '\"moby\": false' or use a different base image (for example: 'debian:bookworm' or 'ubuntu-24.04')." + exit 1 + fi + + # Check if distro is supported + if [ "${USE_MOBY}" = "true" ]; then + if [ "${ADJUSTED_ID}" = "debian" ]; then + if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" + err "Supported distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "(*) ${VERSION_CODENAME} is supported for Moby installation - setting up Microsoft repository" + elif [ "${ADJUSTED_ID}" = "rhel" ]; then + if [ "${ID}" = "azurelinux" ] || [ "${ID}" = "mariner" ]; then + echo " (*) ${ID} ${VERSION_ID} detected - using Microsoft repositories for Moby packages" + else + echo "RHEL-based system (${ID}) detected - Moby packages may require additional configuration" + fi + fi + else + if [ "${ADJUSTED_ID}" = "debian" ]; then + if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" + err "Supported distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "(*) ${VERSION_CODENAME} is supported for Docker CE installation (supported: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}) - setting up Docker repository" + elif [ "${ADJUSTED_ID}" = "rhel" ]; then + + echo "RHEL-based system (${ID}) detected - using Docker CE packages" + fi + fi + + # Install base dependencies + base_packages="curl ca-certificates pigz iptables gnupg2 wget jq" + case ${ADJUSTED_ID} in + debian) + check_packages apt-transport-https $base_packages dirmngr + ;; + rhel) + check_packages $base_packages tar gawk shadow-utils policycoreutils procps-ng systemd-libs systemd-devel + + ;; + esac + + # Install git if not already present + if ! command -v git >/dev/null 2>&1; then + check_packages git + fi + + # Update CA certificates to ensure HTTPS connections work properly + # This is especially important for Ubuntu 24.04 (Noble) and Debian Trixie + # Only run for Debian-based systems (RHEL uses update-ca-trust instead) + if [ "${ADJUSTED_ID}" = "debian" ] && command -v update-ca-certificates > /dev/null 2>&1; then + update-ca-certificates + fi + + # Swap to legacy iptables for compatibility (Debian only) + if [ "${ADJUSTED_ID}" = "debian" ] && type iptables-legacy > /dev/null 2>&1; then + update-alternatives --set iptables /usr/sbin/iptables-legacy + update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy + fi + + # Set up the necessary repositories + if [ "${USE_MOBY}" = "true" ]; then + # Name of open source engine/cli + engine_package_name="moby-engine" + cli_package_name="moby-cli" + + case ${ADJUSTED_ID} in + debian) + # Import key safely and import Microsoft apt repo + { + curl -sSL ${MICROSOFT_GPG_KEYS_URI} + curl -sSL ${MICROSOFT_GPG_KEYS_ROLLING_URI} + } | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg + echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list + ;; + rhel) + echo "(*) ${ID} detected - checking for Moby packages..." + + # Check if moby packages are available in default repos + if ${PKG_MGR_CMD} list available moby-engine >/dev/null 2>&1; then + echo "(*) Using built-in ${ID} Moby packages" + else + case "${ID}" in + azurelinux) + echo "(*) Moby packages not found in Azure Linux repositories" + echo "(*) For Azure Linux, Docker CE ('moby': false) is recommended" + err "Moby packages are not available for Azure Linux ${VERSION_ID}." + err "Recommendation: Use '\"moby\": false' to install Docker CE instead." + exit 1 + ;; + mariner) + echo "(*) Adding Microsoft repository for CBL-Mariner..." + # Add Microsoft repository if packages aren't available locally + curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /etc/pki/rpm-gpg/microsoft.gpg + cat > /etc/yum.repos.d/microsoft.repo << EOF + [microsoft] + name=Microsoft Repository + baseurl=https://packages.microsoft.com/repos/microsoft-cbl-mariner-2.0-prod-base/ + enabled=1 + gpgcheck=1 + gpgkey=file:///etc/pki/rpm-gpg/microsoft.gpg + EOF + # Verify packages are available after adding repo + pkg_mgr_update + if ! ${PKG_MGR_CMD} list available moby-engine >/dev/null 2>&1; then + echo "(*) Moby packages not found in Microsoft repository either" + err "Moby packages are not available for CBL-Mariner ${VERSION_ID}." + err "Recommendation: Use '\"moby\": false' to install Docker CE instead." + exit 1 + fi + ;; + *) + err "Moby packages are not available for ${ID}. Please use 'moby': false option." + exit 1 + ;; + esac + fi + ;; + esac + else + # Name of licensed engine/cli + engine_package_name="docker-ce" + cli_package_name="docker-ce-cli" + case ${ADJUSTED_ID} in + debian) + curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list + ;; + rhel) + # Docker CE repository setup for RHEL-based systems + setup_docker_ce_repo() { + curl -fsSL https://download.docker.com/linux/centos/gpg > /etc/pki/rpm-gpg/docker-ce.gpg + cat > /etc/yum.repos.d/docker-ce.repo << EOF + [docker-ce-stable] + name=Docker CE Stable + baseurl=https://download.docker.com/linux/centos/9/\$basearch/stable + enabled=1 + gpgcheck=1 + gpgkey=file:///etc/pki/rpm-gpg/docker-ce.gpg + skip_if_unavailable=1 + module_hotfixes=1 + EOF + } + install_azure_linux_deps() { + echo "(*) Installing device-mapper libraries for Docker CE..." + [ "${ID}" != "mariner" ] && ${PKG_MGR_CMD} -y install device-mapper-libs 2>/dev/null || echo "(*) Device-mapper install failed, proceeding" + echo "(*) Installing additional Docker CE dependencies..." + ${PKG_MGR_CMD} -y install libseccomp libtool-ltdl systemd-libs libcgroup tar xz || { + echo "(*) Some optional dependencies could not be installed, continuing..." + } + } + setup_selinux_context() { + if command -v getenforce >/dev/null 2>&1 && [ "$(getenforce 2>/dev/null)" != "Disabled" ]; then + echo "(*) Creating minimal SELinux context for Docker compatibility..." + mkdir -p /etc/selinux/targeted/contexts/files/ 2>/dev/null || true + echo "/var/lib/docker(/.*)? system_u:object_r:container_file_t:s0" >> /etc/selinux/targeted/contexts/files/file_contexts.local 2>/dev/null || true + fi + } + + # Special handling for RHEL Docker CE installation + case "${ID}" in + azurelinux|mariner) + echo "(*) ${ID} detected" + echo "(*) Note: Moby packages work better on Azure Linux. Consider using 'moby': true" + echo "(*) Setting up Docker CE repository..." + + setup_docker_ce_repo + install_azure_linux_deps + + if [ "${USE_MOBY}" != "true" ]; then + echo "(*) Docker CE installation for Azure Linux - skipping container-selinux" + echo "(*) Note: SELinux policies will be minimal but Docker will function normally" + setup_selinux_context + else + echo "(*) Using Moby - container-selinux not required" + fi + ;; + *) + # Standard RHEL/CentOS/Fedora approach + if command -v dnf >/dev/null 2>&1; then + dnf config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo + elif command -v yum-config-manager >/dev/null 2>&1; then + yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo + else + # Manual fallback + setup_docker_ce_repo + fi + ;; + esac + ;; + esac + fi + + # Refresh package database + case ${ADJUSTED_ID} in + debian) + apt-get update + ;; + rhel) + pkg_mgr_update + ;; + esac + + # Soft version matching + if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + engine_version_suffix="" + cli_version_suffix="" + else + case ${ADJUSTED_ID} in + debian) + # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) + docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" + docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" + # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/ + docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e # Don't exit if finding version fails - will handle gracefully + cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + engine_version_suffix="=$(apt-cache madison ${engine_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + set -e + if [ -z "${engine_version_suffix}" ] || [ "${engine_version_suffix}" = "=" ] || [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ] ; then + err "No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + ;; + rhel) + # For RHEL-based systems, use dnf/yum to find versions + docker_version_escaped="${DOCKER_VERSION//./\\.}" + set +e # Don't exit if finding version fails - will handle gracefully + if [ "${USE_MOBY}" = "true" ]; then + available_versions=$(${PKG_MGR_CMD} list --available moby-engine 2>/dev/null | grep -v "Available Packages" | awk '{print $2}' | grep -E "^${docker_version_escaped}" | head -1) + else + available_versions=$(${PKG_MGR_CMD} list --available docker-ce 2>/dev/null | grep -v "Available Packages" | awk '{print $2}' | grep -E "^${docker_version_escaped}" | head -1) + fi + set -e + if [ -n "${available_versions}" ]; then + engine_version_suffix="-${available_versions}" + cli_version_suffix="-${available_versions}" + else + echo "(*) Exact version ${DOCKER_VERSION} not found, using latest available" + engine_version_suffix="" + cli_version_suffix="" + fi + ;; + esac + fi + + # Version matching for moby-buildx + if [ "${USE_MOBY}" = "true" ]; then + if [ "${MOBY_BUILDX_VERSION}" = "latest" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + buildx_version_suffix="" + else + case ${ADJUSTED_ID} in + debian) + buildx_version_dot_escaped="${MOBY_BUILDX_VERSION//./\\.}" + buildx_version_dot_plus_escaped="${buildx_version_dot_escaped//+/\\+}" + buildx_version_regex="^(.+:)?${buildx_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e + buildx_version_suffix="=$(apt-cache madison moby-buildx | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${buildx_version_regex}")" + set -e + if [ -z "${buildx_version_suffix}" ] || [ "${buildx_version_suffix}" = "=" ]; then + err "No full or partial moby-buildx version match found for \"${MOBY_BUILDX_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison moby-buildx | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + ;; + rhel) + # For RHEL-based systems, try to find buildx version or use latest + buildx_version_escaped="${MOBY_BUILDX_VERSION//./\\.}" + set +e + available_buildx=$(${PKG_MGR_CMD} list --available moby-buildx 2>/dev/null | grep -v "Available Packages" | awk '{print $2}' | grep -E "^${buildx_version_escaped}" | head -1) + set -e + if [ -n "${available_buildx}" ]; then + buildx_version_suffix="-${available_buildx}" + else + echo "(*) Exact buildx version ${MOBY_BUILDX_VERSION} not found, using latest available" + buildx_version_suffix="" + fi + ;; + esac + echo "buildx_version_suffix ${buildx_version_suffix}" + fi + fi + + # Install Docker / Moby CLI if not already installed + if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then + echo "Docker / Moby CLI and Engine already installed." + else + case ${ADJUSTED_ID} in + debian) + if [ "${USE_MOBY}" = "true" ]; then + # Install engine + set +e # Handle error gracefully + apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx${buildx_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version (eg: 'ubuntu-24.04')." + exit 1 + fi + + # Install compose + apt-get -y install --no-install-recommends moby-compose || err "Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + else + apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix} docker-ce${engine_version_suffix} + # Install compose + apt-mark hold docker-ce docker-ce-cli + apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi + ;; + rhel) + if [ "${USE_MOBY}" = "true" ]; then + set +e # Handle error gracefully + ${PKG_MGR_CMD} -y install moby-cli${cli_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version." + exit 1 + fi + + # Install compose + if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + ${PKG_MGR_CMD} -y install moby-compose || echo "(*) Package moby-compose not available for ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi + else + # Special handling for Azure Linux Docker CE installation + if [ "${ID}" = "azurelinux" ] || [ "${ID}" = "mariner" ]; then + echo "(*) Installing Docker CE on Azure Linux (bypassing container-selinux dependency)..." + + # Use rpm with --force and --nodeps for Azure Linux + set +e # Don't exit on error for this section + ${PKG_MGR_CMD} -y install docker-ce${cli_version_suffix} docker-ce-cli${engine_version_suffix} containerd.io + install_result=$? + set -e + + if [ $install_result -ne 0 ]; then + echo "(*) Standard installation failed, trying manual installation..." + + echo "(*) Standard installation failed, trying manual installation..." + + # Create directory for downloading packages + mkdir -p /tmp/docker-ce-install + + # Download packages manually using curl since tdnf doesn't support download + echo "(*) Downloading Docker CE packages manually..." + + # Get the repository baseurl + repo_baseurl="https://download.docker.com/linux/centos/9/x86_64/stable" + + # Download packages directly + cd /tmp/docker-ce-install + + # Get package names with versions + if [ -n "${cli_version_suffix}" ]; then + docker_ce_version="${cli_version_suffix#-}" + docker_cli_version="${engine_version_suffix#-}" + else + # Get latest version from repository + docker_ce_version="latest" + fi + + echo "(*) Attempting to download Docker CE packages from repository..." + + # Try to download latest packages if specific version fails + if ! curl -fsSL "${repo_baseurl}/Packages/docker-ce-${docker_ce_version}.el9.x86_64.rpm" -o docker-ce.rpm 2>/dev/null; then + # Fallback: try to get latest available version + echo "(*) Specific version not found, trying latest..." + latest_docker=$(curl -s "${repo_baseurl}/Packages/" | grep -o 'docker-ce-[0-9][^"]*\.el9\.x86_64\.rpm' | head -1) + latest_cli=$(curl -s "${repo_baseurl}/Packages/" | grep -o 'docker-ce-cli-[0-9][^"]*\.el9\.x86_64\.rpm' | head -1) + latest_containerd=$(curl -s "${repo_baseurl}/Packages/" | grep -o 'containerd\.io-[0-9][^"]*\.el9\.x86_64\.rpm' | head -1) + + if [ -n "${latest_docker}" ]; then + curl -fsSL "${repo_baseurl}/Packages/${latest_docker}" -o docker-ce.rpm + curl -fsSL "${repo_baseurl}/Packages/${latest_cli}" -o docker-ce-cli.rpm + curl -fsSL "${repo_baseurl}/Packages/${latest_containerd}" -o containerd.io.rpm + else + echo "(*) ERROR: Could not find Docker CE packages in repository" + echo "(*) Please check repository configuration or use 'moby': true" + exit 1 + fi + fi + # Install systemd libraries required by Docker CE + echo "(*) Installing systemd libraries required by Docker CE..." + ${PKG_MGR_CMD} -y install systemd-libs || ${PKG_MGR_CMD} -y install systemd-devel || { + echo "(*) WARNING: Could not install systemd libraries" + echo "(*) Docker may fail to start without these" + } + + # Install with rpm --force --nodeps + echo "(*) Installing Docker CE packages with dependency override..." + rpm -Uvh --force --nodeps *.rpm + + # Cleanup + cd / + rm -rf /tmp/docker-ce-install + + echo "(*) Docker CE installation completed with dependency bypass" + echo "(*) Note: Some SELinux functionality may be limited without container-selinux" + fi + else + # Standard installation for other RHEL-based systems + ${PKG_MGR_CMD} -y install docker-ce${cli_version_suffix} docker-ce-cli${engine_version_suffix} containerd.io + fi + # Install compose + if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + ${PKG_MGR_CMD} -y install docker-compose-plugin || echo "(*) Package docker-compose-plugin not available for ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi + fi + ;; + esac + fi + + echo "Finished installing docker / moby!" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + # fallback for docker-compose + fallback_compose(){ + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + get_previous_version "${url}" "${repo_url}" compose_version + echo -e "\nAttempting to install v${compose_version}" + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} + } + + # If 'docker-compose' command is to be included + if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + case "${architecture}" in + amd64|x86_64) target_compose_arch=x86_64 ;; + arm64|aarch64) target_compose_arch=aarch64 ;; + *) + echo "(!) Docker in docker does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + + docker_compose_path="/usr/local/bin/docker-compose" + if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then + err "The final Compose V1 release, version 1.29.2, was May 10, 2021. These packages haven't received any security updates since then. Use at your own risk." + INSTALL_DOCKER_COMPOSE_SWITCH="false" + + if [ "${target_compose_arch}" = "x86_64" ]; then + echo "(*) Installing docker compose v1..." + curl -fsSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64" -o ${docker_compose_path} + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + elif [ "${VERSION_CODENAME}" = "bookworm" ]; then + err "Docker compose v1 is unavailable for 'bookworm' on Arm64. Kindly switch to use v2" + exit 1 + else + # Use pip to get a version that runs on this architecture + check_packages python3-minimal python3-pip libffi-dev python3-venv + echo "(*) Installing docker compose v1 via pip..." + export PYTHONUSERBASE=/usr/local + pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation + fi + else + compose_version=${DOCKER_DASH_COMPOSE_VERSION#v} + docker_compose_url="https://github.com/docker/compose" + find_version_from_git_tags compose_version "$docker_compose_url" "tags/v" + echo "(*) Installing docker-compose ${compose_version}..." + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} || { + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + fallback_compose "$docker_compose_url" + } + + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + + mkdir -p ${cli_plugins_dir} + cp ${docker_compose_path} ${cli_plugins_dir} + fi + fi + + # fallback method for compose-switch + fallback_compose-switch() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for compose-switch v${compose_switch_version}..." + get_previous_version "$url" "$repo_url" compose_switch_version + echo -e "\nAttempting to install v${compose_switch_version}" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${target_switch_arch}" -o /usr/local/bin/compose-switch + } + # Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation + if [ "${INSTALL_DOCKER_COMPOSE_SWITCH}" = "true" ] && ! type compose-switch > /dev/null 2>&1; then + if type docker-compose > /dev/null 2>&1; then + echo "(*) Installing compose-switch..." + current_compose_path="$(command -v docker-compose)" + target_compose_path="$(dirname "${current_compose_path}")/docker-compose-v1" + compose_switch_version="latest" + compose_switch_url="https://github.com/docker/compose-switch" + # Try to get latest version, fallback to known stable version if GitHub API fails + set +e + find_version_from_git_tags compose_switch_version "$compose_switch_url" + if [ $? -ne 0 ] || [ -z "${compose_switch_version}" ] || [ "${compose_switch_version}" = "latest" ]; then + echo "(*) GitHub API rate limited or failed, using fallback method" + fallback_compose-switch "$compose_switch_url" + fi + set -e + + # Map architecture for compose-switch downloads + case "${architecture}" in + amd64|x86_64) target_switch_arch=amd64 ;; + arm64|aarch64) target_switch_arch=arm64 ;; + *) target_switch_arch=${architecture} ;; + esac + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${target_switch_arch}" -o /usr/local/bin/compose-switch || fallback_compose-switch "$compose_switch_url" + chmod +x /usr/local/bin/compose-switch + # TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11 + # Setup v1 CLI as alternative in addition to compose-switch (which maps to v2) + mv "${current_compose_path}" "${target_compose_path}" + update-alternatives --install ${docker_compose_path} docker-compose /usr/local/bin/compose-switch 99 + update-alternatives --install ${docker_compose_path} docker-compose "${target_compose_path}" 1 + else + err "Skipping installation of compose-switch as docker compose is unavailable..." + fi + fi + + # If init file already exists, exit + if [ -f "/usr/local/share/docker-init.sh" ]; then + echo "/usr/local/share/docker-init.sh already exists, so exiting." + # Clean up + rm -rf /var/lib/apt/lists/* + exit 0 + fi + echo "docker-init doesn't exist, adding..." + + if ! cat /etc/group | grep -e "^docker:" > /dev/null 2>&1; then + groupadd -r docker + fi + + usermod -aG docker ${USERNAME} + + # fallback for docker/buildx + fallback_buildx() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker buildx v${buildx_version}..." + get_previous_version "$url" "$repo_url" buildx_version + buildx_file_name="buildx-v${buildx_version}.linux-${target_buildx_arch}" + echo -e "\nAttempting to install v${buildx_version}" + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} + } + + if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then + buildx_version="latest" + docker_buildx_url="https://github.com/docker/buildx" + find_version_from_git_tags buildx_version "$docker_buildx_url" "refs/tags/v" + echo "(*) Installing buildx ${buildx_version}..." + + # Map architecture for buildx downloads + case "${architecture}" in + amd64|x86_64) target_buildx_arch=amd64 ;; + arm64|aarch64) target_buildx_arch=arm64 ;; + *) target_buildx_arch=${architecture} ;; + esac + + buildx_file_name="buildx-v${buildx_version}.linux-${target_buildx_arch}" + + cd /tmp + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} || fallback_buildx "$docker_buildx_url" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + mkdir -p ${cli_plugins_dir} + mv ${buildx_file_name} ${cli_plugins_dir}/docker-buildx + chmod +x ${cli_plugins_dir}/docker-buildx + + chown -R "${USERNAME}:docker" "${docker_home}" + chmod -R g+r+w "${docker_home}" + find "${docker_home}" -type d -print0 | xargs -n 1 -0 chmod g+s + fi + + DOCKER_DEFAULT_IP6_TABLES="" + if [ "$DISABLE_IP6_TABLES" == true ]; then + requested_version="" + # checking whether the version requested either is in semver format or just a number denoting the major version + # and, extracting the major version number out of the two scenarios + semver_regex="^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?(\+([0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*))?$" + if echo "$DOCKER_VERSION" | grep -Eq $semver_regex; then + requested_version=$(echo $DOCKER_VERSION | cut -d. -f1) + elif echo "$DOCKER_VERSION" | grep -Eq "^[1-9][0-9]*$"; then + requested_version=$DOCKER_VERSION + fi + if [ "$DOCKER_VERSION" = "latest" ] || [[ -n "$requested_version" && "$requested_version" -ge 27 ]] ; then + DOCKER_DEFAULT_IP6_TABLES="--ip6tables=false" + echo "(!) As requested, passing '${DOCKER_DEFAULT_IP6_TABLES}'" + fi + fi + + if [ ! -d /usr/local/share ]; then + mkdir -p /usr/local/share + fi + + tee /usr/local/share/docker-init.sh > /dev/null \ + << EOF + #!/bin/sh + #------------------------------------------------------------------------------------------------------------- + # Copyright (c) Microsoft Corporation. All rights reserved. + # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + #------------------------------------------------------------------------------------------------------------- + + set -e + + AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} + DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} + DOCKER_DEFAULT_IP6_TABLES=${DOCKER_DEFAULT_IP6_TABLES} + EOF + + tee -a /usr/local/share/docker-init.sh > /dev/null \ + << 'EOF' + dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} DOCKER_DEFAULT_IP6_TABLES=${DOCKER_DEFAULT_IP6_TABLES} $(cat << 'INNEREOF' + # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly + find /run /var/run -iname 'docker*.pid' -delete || : + find /run /var/run -iname 'container*.pid' -delete || : + + # -- Start: dind wrapper script -- + # Maintained: https://github.com/moby/moby/blob/master/hack/dind + + export container=docker + + if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then + mount -t securityfs none /sys/kernel/security || { + echo >&2 'Could not mount /sys/kernel/security.' + echo >&2 'AppArmor detection and --privileged mode might break.' + } + fi + + # Mount /tmp (conditionally) + if ! mountpoint -q /tmp; then + mount -t tmpfs none /tmp + fi + + set_cgroup_nesting() + { + # cgroup v2: enable nesting + if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # move the processes from the root group to the /init group, + # otherwise writing subtree_control fails with EBUSY. + # An error during moving non-existent process (i.e., "cat") is ignored. + mkdir -p /sys/fs/cgroup/init + xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : + # enable controllers + sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ + > /sys/fs/cgroup/cgroup.subtree_control + fi + } + + # Set cgroup nesting, retrying if necessary + retry_cgroup_nesting=0 + + until [ "${retry_cgroup_nesting}" -eq "5" ]; + do + set +e + set_cgroup_nesting + + if [ $? -ne 0 ]; then + echo "(*) cgroup v2: Failed to enable nesting, retrying..." + else + break + fi + + retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1` + set -e + done + + # -- End: dind wrapper script -- + + # Handle DNS + set +e + cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 + if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] + then + echo "Setting dockerd Azure DNS." + CUSTOMDNS="--dns 168.63.129.16" + else + echo "Not setting dockerd DNS manually." + CUSTOMDNS="" + fi + set -e + + if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] + then + DEFAULT_ADDRESS_POOL="" + else + DEFAULT_ADDRESS_POOL="--default-address-pool $DOCKER_DEFAULT_ADDRESS_POOL" + fi + + # Start docker/moby engine + ( dockerd $CUSTOMDNS $DEFAULT_ADDRESS_POOL $DOCKER_DEFAULT_IP6_TABLES > /tmp/dockerd.log 2>&1 ) & + INNEREOF + )" + + sudo_if() { + COMMAND="$*" + + if [ "$(id -u)" -ne 0 ]; then + sudo $COMMAND + else + $COMMAND + fi + } + + retry_docker_start_count=0 + docker_ok="false" + + until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ]; + do + # Start using sudo if not invoked as root + if [ "$(id -u)" -ne 0 ]; then + sudo /bin/sh -c "${dockerd_start}" + else + eval "${dockerd_start}" + fi + + retry_count=0 + until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; + do + sleep 1s + set +e + docker info > /dev/null 2>&1 && docker_ok="true" + set -e + + retry_count=`expr $retry_count + 1` + done + + if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then + echo "(*) Failed to start docker, retrying..." + set +e + sudo_if pkill dockerd + sudo_if pkill containerd + set -e + fi + + retry_docker_start_count=`expr $retry_docker_start_count + 1` + done + + # Execute whatever commands were passed in (if any). This allows us + # to set this script to ENTRYPOINT while still executing the default CMD. + exec "$@" + EOF + + chmod +x /usr/local/share/docker-init.sh + chown ${USERNAME}:root /usr/local/share/docker-init.sh + + # Clean up + rm -rf /var/lib/apt/lists/* + + echo 'docker-in-docker-debian script has completed!'"#), + ]).await; + + return Ok(http::Response::builder() + .status(200) + .body(AsyncBody::from(response)) + .unwrap()); + } + if parts.uri.path() == "/v2/devcontainers/features/go/manifests/1" { + let response = r#" + { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { + "mediaType": "application/vnd.devcontainers", + "digest": "sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "size": 2 + }, + "layers": [ + { + "mediaType": "application/vnd.devcontainers.layer.v1+tar", + "digest": "sha256:eadd8a4757ee8ea6c1bc0aae22da49b7e5f2f1e32a87a5eac3cadeb7d2ccdad1", + "size": 20992, + "annotations": { + "org.opencontainers.image.title": "devcontainer-feature-go.tgz" + } + } + ], + "annotations": { + "dev.containers.metadata": "{\"id\":\"go\",\"version\":\"1.3.3\",\"name\":\"Go\",\"documentationURL\":\"https://github.com/devcontainers/features/tree/main/src/go\",\"description\":\"Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.\",\"options\":{\"version\":{\"type\":\"string\",\"proposals\":[\"latest\",\"none\",\"1.24\",\"1.23\"],\"default\":\"latest\",\"description\":\"Select or enter a Go version to install\"},\"golangciLintVersion\":{\"type\":\"string\",\"default\":\"latest\",\"description\":\"Version of golangci-lint to install\"}},\"init\":true,\"customizations\":{\"vscode\":{\"extensions\":[\"golang.Go\"],\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes Go and common Go utilities pre-installed and available on the `PATH`, along with the Go language extension for Go development.\"}]}}},\"containerEnv\":{\"GOROOT\":\"/usr/local/go\",\"GOPATH\":\"/go\",\"PATH\":\"/usr/local/go/bin:/go/bin:${PATH}\"},\"capAdd\":[\"SYS_PTRACE\"],\"securityOpt\":[\"seccomp=unconfined\"],\"installsAfter\":[\"ghcr.io/devcontainers/features/common-utils\"]}", + "com.github.package.type": "devcontainer_feature" + } + } + "#; + + return Ok(http::Response::builder() + .status(200) + .body(http_client::AsyncBody::from(response)) + .unwrap()); + } + if parts.uri.path() + == "/v2/devcontainers/features/go/blobs/sha256:eadd8a4757ee8ea6c1bc0aae22da49b7e5f2f1e32a87a5eac3cadeb7d2ccdad1" + { + let response = build_tarball(vec![ + ("./devcontainer-feature.json", r#" + { + "id": "go", + "version": "1.3.3", + "name": "Go", + "documentationURL": "https://github.com/devcontainers/features/tree/main/src/go", + "description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.", + "options": { + "version": { + "type": "string", + "proposals": [ + "latest", + "none", + "1.24", + "1.23" + ], + "default": "latest", + "description": "Select or enter a Go version to install" + }, + "golangciLintVersion": { + "type": "string", + "default": "latest", + "description": "Version of golangci-lint to install" + } + }, + "init": true, + "customizations": { + "vscode": { + "extensions": [ + "golang.Go" + ], + "settings": { + "github.copilot.chat.codeGeneration.instructions": [ + { + "text": "This dev container includes Go and common Go utilities pre-installed and available on the `PATH`, along with the Go language extension for Go development." + } + ] + } + } + }, + "containerEnv": { + "GOROOT": "/usr/local/go", + "GOPATH": "/go", + "PATH": "/usr/local/go/bin:/go/bin:${PATH}" + }, + "capAdd": [ + "SYS_PTRACE" + ], + "securityOpt": [ + "seccomp=unconfined" + ], + "installsAfter": [ + "ghcr.io/devcontainers/features/common-utils" + ] + } + "#), + ("./install.sh", r#" + #!/usr/bin/env bash + #------------------------------------------------------------------------------------------------------------- + # Copyright (c) Microsoft Corporation. All rights reserved. + # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information + #------------------------------------------------------------------------------------------------------------- + # + # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/go.md + # Maintainer: The VS Code and Codespaces Teams + + TARGET_GO_VERSION="${VERSION:-"latest"}" + GOLANGCILINT_VERSION="${GOLANGCILINTVERSION:-"latest"}" + + TARGET_GOROOT="${TARGET_GOROOT:-"/usr/local/go"}" + TARGET_GOPATH="${TARGET_GOPATH:-"/go"}" + USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" + INSTALL_GO_TOOLS="${INSTALL_GO_TOOLS:-"true"}" + + # https://www.google.com/linuxrepositories/ + GO_GPG_KEY_URI="https://dl.google.com/linux/linux_signing_key.pub" + + set -e + + if [ "$(id -u)" -ne 0 ]; then + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 + fi + + # Bring in ID, ID_LIKE, VERSION_ID, VERSION_CODENAME + . /etc/os-release + # Get an adjusted ID independent of distro variants + MAJOR_VERSION_ID=$(echo ${VERSION_ID} | cut -d . -f 1) + if [ "${ID}" = "debian" ] || [ "${ID_LIKE}" = "debian" ]; then + ADJUSTED_ID="debian" + elif [[ "${ID}" = "rhel" || "${ID}" = "fedora" || "${ID}" = "mariner" || "${ID_LIKE}" = *"rhel"* || "${ID_LIKE}" = *"fedora"* || "${ID_LIKE}" = *"mariner"* ]]; then + ADJUSTED_ID="rhel" + if [[ "${ID}" = "rhel" ]] || [[ "${ID}" = *"alma"* ]] || [[ "${ID}" = *"rocky"* ]]; then + VERSION_CODENAME="rhel${MAJOR_VERSION_ID}" + else + VERSION_CODENAME="${ID}${MAJOR_VERSION_ID}" + fi + else + echo "Linux distro ${ID} not supported." + exit 1 + fi + + if [ "${ADJUSTED_ID}" = "rhel" ] && [ "${VERSION_CODENAME-}" = "centos7" ]; then + # As of 1 July 2024, mirrorlist.centos.org no longer exists. + # Update the repo files to reference vault.centos.org. + sed -i s/mirror.centos.org/vault.centos.org/g /etc/yum.repos.d/*.repo + sed -i s/^#.*baseurl=http/baseurl=http/g /etc/yum.repos.d/*.repo + sed -i s/^mirrorlist=http/#mirrorlist=http/g /etc/yum.repos.d/*.repo + fi + + # Setup INSTALL_CMD & PKG_MGR_CMD + if type apt-get > /dev/null 2>&1; then + PKG_MGR_CMD=apt-get + INSTALL_CMD="${PKG_MGR_CMD} -y install --no-install-recommends" + elif type microdnf > /dev/null 2>&1; then + PKG_MGR_CMD=microdnf + INSTALL_CMD="${PKG_MGR_CMD} ${INSTALL_CMD_ADDL_REPOS} -y install --refresh --best --nodocs --noplugins --setopt=install_weak_deps=0" + elif type dnf > /dev/null 2>&1; then + PKG_MGR_CMD=dnf + INSTALL_CMD="${PKG_MGR_CMD} ${INSTALL_CMD_ADDL_REPOS} -y install --refresh --best --nodocs --noplugins --setopt=install_weak_deps=0" + else + PKG_MGR_CMD=yum + INSTALL_CMD="${PKG_MGR_CMD} ${INSTALL_CMD_ADDL_REPOS} -y install --noplugins --setopt=install_weak_deps=0" + fi + + # Clean up + clean_up() { + case ${ADJUSTED_ID} in + debian) + rm -rf /var/lib/apt/lists/* + ;; + rhel) + rm -rf /var/cache/dnf/* /var/cache/yum/* + rm -rf /tmp/yum.log + rm -rf ${GPG_INSTALL_PATH} + ;; + esac + } + clean_up + + + # Figure out correct version of a three part version number is not passed + find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" + } + + pkg_mgr_update() { + case $ADJUSTED_ID in + debian) + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + ${PKG_MGR_CMD} update -y + fi + ;; + rhel) + if [ ${PKG_MGR_CMD} = "microdnf" ]; then + if [ "$(ls /var/cache/yum/* 2>/dev/null | wc -l)" = 0 ]; then + echo "Running ${PKG_MGR_CMD} makecache ..." + ${PKG_MGR_CMD} makecache + fi + else + if [ "$(ls /var/cache/${PKG_MGR_CMD}/* 2>/dev/null | wc -l)" = 0 ]; then + echo "Running ${PKG_MGR_CMD} check-update ..." + set +e + ${PKG_MGR_CMD} check-update + rc=$? + if [ $rc != 0 ] && [ $rc != 100 ]; then + exit 1 + fi + set -e + fi + fi + ;; + esac + } + + # Checks if packages are installed and installs them if not + check_packages() { + case ${ADJUSTED_ID} in + debian) + if ! dpkg -s "$@" > /dev/null 2>&1; then + pkg_mgr_update + ${INSTALL_CMD} "$@" + fi + ;; + rhel) + if ! rpm -q "$@" > /dev/null 2>&1; then + pkg_mgr_update + ${INSTALL_CMD} "$@" + fi + ;; + esac + } + + # Ensure that login shells get the correct path if the user updated the PATH using ENV. + rm -f /etc/profile.d/00-restore-env.sh + echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh + chmod +x /etc/profile.d/00-restore-env.sh + + # Some distributions do not install awk by default (e.g. Mariner) + if ! type awk >/dev/null 2>&1; then + check_packages awk + fi + + # Determine the appropriate non-root user + if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi + elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root + fi + + export DEBIAN_FRONTEND=noninteractive + + check_packages ca-certificates gnupg2 tar gcc make pkg-config + + if [ $ADJUSTED_ID = "debian" ]; then + check_packages g++ libc6-dev + else + check_packages gcc-c++ glibc-devel + fi + # Install curl, git, other dependencies if missing + if ! type curl > /dev/null 2>&1; then + check_packages curl + fi + if ! type git > /dev/null 2>&1; then + check_packages git + fi + # Some systems, e.g. Mariner, still a few more packages + if ! type as > /dev/null 2>&1; then + check_packages binutils + fi + if ! [ -f /usr/include/linux/errno.h ]; then + check_packages kernel-headers + fi + # Minimal RHEL install may need findutils installed + if ! [ -f /usr/bin/find ]; then + check_packages findutils + fi + + # Get closest match for version number specified + find_version_from_git_tags TARGET_GO_VERSION "https://go.googlesource.com/go" "tags/go" "." "true" + + architecture="$(uname -m)" + case $architecture in + x86_64) architecture="amd64";; + aarch64 | armv8*) architecture="arm64";; + aarch32 | armv7* | armvhf*) architecture="armv6l";; + i?86) architecture="386";; + *) echo "(!) Architecture $architecture unsupported"; exit 1 ;; + esac + + # Install Go + umask 0002 + if ! cat /etc/group | grep -e "^golang:" > /dev/null 2>&1; then + groupadd -r golang + fi + usermod -a -G golang "${USERNAME}" + mkdir -p "${TARGET_GOROOT}" "${TARGET_GOPATH}" + + if [[ "${TARGET_GO_VERSION}" != "none" ]] && [[ "$(go version 2>/dev/null)" != *"${TARGET_GO_VERSION}"* ]]; then + # Use a temporary location for gpg keys to avoid polluting image + export GNUPGHOME="/tmp/tmp-gnupg" + mkdir -p ${GNUPGHOME} + chmod 700 ${GNUPGHOME} + curl -sSL -o /tmp/tmp-gnupg/golang_key "${GO_GPG_KEY_URI}" + gpg -q --import /tmp/tmp-gnupg/golang_key + echo "Downloading Go ${TARGET_GO_VERSION}..." + set +e + curl -fsSL -o /tmp/go.tar.gz "https://golang.org/dl/go${TARGET_GO_VERSION}.linux-${architecture}.tar.gz" + exit_code=$? + set -e + if [ "$exit_code" != "0" ]; then + echo "(!) Download failed." + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${TARGET_GO_VERSION}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${TARGET_GO_VERSION}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${TARGET_GO_VERSION}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + # Handle Go's odd version pattern where "0" releases omit the last part + if [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + TARGET_GO_VERSION="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags TARGET_GO_VERSION "https://go.googlesource.com/go" "tags/go" "." "true" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ]; then + TARGET_GO_VERSION="${major}.${minor}" + else + TARGET_GO_VERSION="${major}.${minor}.${breakfix}" + fi + fi + set -e + echo "Trying ${TARGET_GO_VERSION}..." + curl -fsSL -o /tmp/go.tar.gz "https://golang.org/dl/go${TARGET_GO_VERSION}.linux-${architecture}.tar.gz" + fi + curl -fsSL -o /tmp/go.tar.gz.asc "https://golang.org/dl/go${TARGET_GO_VERSION}.linux-${architecture}.tar.gz.asc" + gpg --verify /tmp/go.tar.gz.asc /tmp/go.tar.gz + echo "Extracting Go ${TARGET_GO_VERSION}..." + tar -xzf /tmp/go.tar.gz -C "${TARGET_GOROOT}" --strip-components=1 + rm -rf /tmp/go.tar.gz /tmp/go.tar.gz.asc /tmp/tmp-gnupg + else + echo "(!) Go is already installed with version ${TARGET_GO_VERSION}. Skipping." + fi + + # Install Go tools that are isImportant && !replacedByGopls based on + # https://github.com/golang/vscode-go/blob/v0.38.0/src/goToolsInformation.ts + GO_TOOLS="\ + golang.org/x/tools/gopls@latest \ + honnef.co/go/tools/cmd/staticcheck@latest \ + golang.org/x/lint/golint@latest \ + github.com/mgechev/revive@latest \ + github.com/go-delve/delve/cmd/dlv@latest \ + github.com/fatih/gomodifytags@latest \ + github.com/haya14busa/goplay/cmd/goplay@latest \ + github.com/cweill/gotests/gotests@latest \ + github.com/josharian/impl@latest" + + if [ "${INSTALL_GO_TOOLS}" = "true" ]; then + echo "Installing common Go tools..." + export PATH=${TARGET_GOROOT}/bin:${PATH} + export GOPATH=/tmp/gotools + export GOCACHE="${GOPATH}/cache" + + mkdir -p "${GOPATH}" /usr/local/etc/vscode-dev-containers "${TARGET_GOPATH}/bin" + cd "${GOPATH}" + + # Use go get for versions of go under 1.16 + go_install_command=install + if [[ "1.16" > "$(go version | grep -oP 'go\K[0-9]+\.[0-9]+(\.[0-9]+)?')" ]]; then + export GO111MODULE=on + go_install_command=get + echo "Go version < 1.16, using go get." + fi + + (echo "${GO_TOOLS}" | xargs -n 1 go ${go_install_command} -v )2>&1 | tee -a /usr/local/etc/vscode-dev-containers/go.log + + # Move Go tools into path + if [ -d "${GOPATH}/bin" ]; then + mv "${GOPATH}/bin"/* "${TARGET_GOPATH}/bin/" + fi + + # Install golangci-lint from precompiled binaries + if [ "$GOLANGCILINT_VERSION" = "latest" ] || [ "$GOLANGCILINT_VERSION" = "" ]; then + echo "Installing golangci-lint latest..." + curl -fsSL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | \ + sh -s -- -b "${TARGET_GOPATH}/bin" + else + echo "Installing golangci-lint ${GOLANGCILINT_VERSION}..." + curl -fsSL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | \ + sh -s -- -b "${TARGET_GOPATH}/bin" "v${GOLANGCILINT_VERSION}" + fi + + # Remove Go tools temp directory + rm -rf "${GOPATH}" + fi + + + chown -R "${USERNAME}:golang" "${TARGET_GOROOT}" "${TARGET_GOPATH}" + chmod -R g+r+w "${TARGET_GOROOT}" "${TARGET_GOPATH}" + find "${TARGET_GOROOT}" -type d -print0 | xargs -n 1 -0 chmod g+s + find "${TARGET_GOPATH}" -type d -print0 | xargs -n 1 -0 chmod g+s + + # Clean up + clean_up + + echo "Done!" + "#), + ]) + .await; + return Ok(http::Response::builder() + .status(200) + .body(AsyncBody::from(response)) + .unwrap()); + } + if parts.uri.path() == "/v2/devcontainers/features/aws-cli/manifests/1" { + let response = r#" + { + "schemaVersion": 2, + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "config": { + "mediaType": "application/vnd.devcontainers", + "digest": "sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "size": 2 + }, + "layers": [ + { + "mediaType": "application/vnd.devcontainers.layer.v1+tar", + "digest": "sha256:4e9b04b394fb63e297b3d5f58185406ea45bddb639c2ba83b5a8394643cd5b13", + "size": 19968, + "annotations": { + "org.opencontainers.image.title": "devcontainer-feature-aws-cli.tgz" + } + } + ], + "annotations": { + "dev.containers.metadata": "{\"id\":\"aws-cli\",\"version\":\"1.1.3\",\"name\":\"AWS CLI\",\"documentationURL\":\"https://github.com/devcontainers/features/tree/main/src/aws-cli\",\"description\":\"Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.\",\"options\":{\"version\":{\"type\":\"string\",\"proposals\":[\"latest\"],\"default\":\"latest\",\"description\":\"Select or enter an AWS CLI version.\"},\"verbose\":{\"type\":\"boolean\",\"default\":true,\"description\":\"Suppress verbose output.\"}},\"customizations\":{\"vscode\":{\"extensions\":[\"AmazonWebServices.aws-toolkit-vscode\"],\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes the AWS CLI along with needed dependencies pre-installed and available on the `PATH`, along with the AWS Toolkit extensions for AWS development.\"}]}}},\"installsAfter\":[\"ghcr.io/devcontainers/features/common-utils\"]}", + "com.github.package.type": "devcontainer_feature" + } + }"#; + return Ok(http::Response::builder() + .status(200) + .body(AsyncBody::from(response)) + .unwrap()); + } + if parts.uri.path() + == "/v2/devcontainers/features/aws-cli/blobs/sha256:4e9b04b394fb63e297b3d5f58185406ea45bddb639c2ba83b5a8394643cd5b13" + { + let response = build_tarball(vec![ + ( + "./devcontainer-feature.json", + r#" +{ + "id": "aws-cli", + "version": "1.1.3", + "name": "AWS CLI", + "documentationURL": "https://github.com/devcontainers/features/tree/main/src/aws-cli", + "description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", + "options": { + "version": { + "type": "string", + "proposals": [ + "latest" + ], + "default": "latest", + "description": "Select or enter an AWS CLI version." + }, + "verbose": { + "type": "boolean", + "default": true, + "description": "Suppress verbose output." + } + }, + "customizations": { + "vscode": { + "extensions": [ + "AmazonWebServices.aws-toolkit-vscode" + ], + "settings": { + "github.copilot.chat.codeGeneration.instructions": [ + { + "text": "This dev container includes the AWS CLI along with needed dependencies pre-installed and available on the `PATH`, along with the AWS Toolkit extensions for AWS development." + } + ] + } + } + }, + "installsAfter": [ + "ghcr.io/devcontainers/features/common-utils" + ] +} + "#, + ), + ( + "./install.sh", + r#"#!/usr/bin/env bash + #------------------------------------------------------------------------------------------------------------- + # Copyright (c) Microsoft Corporation. All rights reserved. + # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + #------------------------------------------------------------------------------------------------------------- + # + # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/awscli.md + # Maintainer: The VS Code and Codespaces Teams + + set -e + + # Clean up + rm -rf /var/lib/apt/lists/* + + VERSION=${VERSION:-"latest"} + VERBOSE=${VERBOSE:-"true"} + + AWSCLI_GPG_KEY=FB5DB77FD5C118B80511ADA8A6310ACC4672475C + AWSCLI_GPG_KEY_MATERIAL="-----BEGIN PGP PUBLIC KEY BLOCK----- + + mQINBF2Cr7UBEADJZHcgusOJl7ENSyumXh85z0TRV0xJorM2B/JL0kHOyigQluUG + ZMLhENaG0bYatdrKP+3H91lvK050pXwnO/R7fB/FSTouki4ciIx5OuLlnJZIxSzx + PqGl0mkxImLNbGWoi6Lto0LYxqHN2iQtzlwTVmq9733zd3XfcXrZ3+LblHAgEt5G + TfNxEKJ8soPLyWmwDH6HWCnjZ/aIQRBTIQ05uVeEoYxSh6wOai7ss/KveoSNBbYz + gbdzoqI2Y8cgH2nbfgp3DSasaLZEdCSsIsK1u05CinE7k2qZ7KgKAUIcT/cR/grk + C6VwsnDU0OUCideXcQ8WeHutqvgZH1JgKDbznoIzeQHJD238GEu+eKhRHcz8/jeG + 94zkcgJOz3KbZGYMiTh277Fvj9zzvZsbMBCedV1BTg3TqgvdX4bdkhf5cH+7NtWO + lrFj6UwAsGukBTAOxC0l/dnSmZhJ7Z1KmEWilro/gOrjtOxqRQutlIqG22TaqoPG + fYVN+en3Zwbt97kcgZDwqbuykNt64oZWc4XKCa3mprEGC3IbJTBFqglXmZ7l9ywG + EEUJYOlb2XrSuPWml39beWdKM8kzr1OjnlOm6+lpTRCBfo0wa9F8YZRhHPAkwKkX + XDeOGpWRj4ohOx0d2GWkyV5xyN14p2tQOCdOODmz80yUTgRpPVQUtOEhXQARAQAB + tCFBV1MgQ0xJIFRlYW0gPGF3cy1jbGlAYW1hem9uLmNvbT6JAlQEEwEIAD4WIQT7 + Xbd/1cEYuAURraimMQrMRnJHXAUCXYKvtQIbAwUJB4TOAAULCQgHAgYVCgkICwIE + FgIDAQIeAQIXgAAKCRCmMQrMRnJHXJIXEAChLUIkg80uPUkGjE3jejvQSA1aWuAM + yzy6fdpdlRUz6M6nmsUhOExjVIvibEJpzK5mhuSZ4lb0vJ2ZUPgCv4zs2nBd7BGJ + MxKiWgBReGvTdqZ0SzyYH4PYCJSE732x/Fw9hfnh1dMTXNcrQXzwOmmFNNegG0Ox + au+VnpcR5Kz3smiTrIwZbRudo1ijhCYPQ7t5CMp9kjC6bObvy1hSIg2xNbMAN/Do + ikebAl36uA6Y/Uczjj3GxZW4ZWeFirMidKbtqvUz2y0UFszobjiBSqZZHCreC34B + hw9bFNpuWC/0SrXgohdsc6vK50pDGdV5kM2qo9tMQ/izsAwTh/d/GzZv8H4lV9eO + tEis+EpR497PaxKKh9tJf0N6Q1YLRHof5xePZtOIlS3gfvsH5hXA3HJ9yIxb8T0H + QYmVr3aIUse20i6meI3fuV36VFupwfrTKaL7VXnsrK2fq5cRvyJLNzXucg0WAjPF + RrAGLzY7nP1xeg1a0aeP+pdsqjqlPJom8OCWc1+6DWbg0jsC74WoesAqgBItODMB + rsal1y/q+bPzpsnWjzHV8+1/EtZmSc8ZUGSJOPkfC7hObnfkl18h+1QtKTjZme4d + H17gsBJr+opwJw/Zio2LMjQBOqlm3K1A4zFTh7wBC7He6KPQea1p2XAMgtvATtNe + YLZATHZKTJyiqA== + =vYOk + -----END PGP PUBLIC KEY BLOCK-----" + + if [ "$(id -u)" -ne 0 ]; then + echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 + fi + + apt_get_update() + { + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi + } + + # Checks if packages are installed and installs them if not + check_packages() { + if ! dpkg -s "$@" > /dev/null 2>&1; then + apt_get_update + apt-get -y install --no-install-recommends "$@" + fi + } + + export DEBIAN_FRONTEND=noninteractive + + check_packages curl ca-certificates gpg dirmngr unzip bash-completion less + + verify_aws_cli_gpg_signature() { + local filePath=$1 + local sigFilePath=$2 + local awsGpgKeyring=aws-cli-public-key.gpg + + echo "${AWSCLI_GPG_KEY_MATERIAL}" | gpg --dearmor > "./${awsGpgKeyring}" + gpg --batch --quiet --no-default-keyring --keyring "./${awsGpgKeyring}" --verify "${sigFilePath}" "${filePath}" + local status=$? + + rm "./${awsGpgKeyring}" + + return ${status} + } + + install() { + local scriptZipFile=awscli.zip + local scriptSigFile=awscli.sig + + # See Linux install docs at https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html + if [ "${VERSION}" != "latest" ]; then + local versionStr=-${VERSION} + fi + architecture=$(dpkg --print-architecture) + case "${architecture}" in + amd64) architectureStr=x86_64 ;; + arm64) architectureStr=aarch64 ;; + *) + echo "AWS CLI does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + local scriptUrl=https://awscli.amazonaws.com/awscli-exe-linux-${architectureStr}${versionStr}.zip + curl "${scriptUrl}" -o "${scriptZipFile}" + curl "${scriptUrl}.sig" -o "${scriptSigFile}" + + verify_aws_cli_gpg_signature "$scriptZipFile" "$scriptSigFile" + if (( $? > 0 )); then + echo "Could not verify GPG signature of AWS CLI install script. Make sure you provided a valid version." + exit 1 + fi + + if [ "${VERBOSE}" = "false" ]; then + unzip -q "${scriptZipFile}" + else + unzip "${scriptZipFile}" + fi + + ./aws/install + + # kubectl bash completion + mkdir -p /etc/bash_completion.d + cp ./scripts/vendor/aws_bash_completer /etc/bash_completion.d/aws + + # kubectl zsh completion + if [ -e "${USERHOME}/.oh-my-zsh" ]; then + mkdir -p "${USERHOME}/.oh-my-zsh/completions" + cp ./scripts/vendor/aws_zsh_completer.sh "${USERHOME}/.oh-my-zsh/completions/_aws" + chown -R "${USERNAME}" "${USERHOME}/.oh-my-zsh" + fi + + rm -rf ./aws + } + + echo "(*) Installing AWS CLI..." + + install + + # Clean up + rm -rf /var/lib/apt/lists/* + + echo "Done!""#, + ), + ("./scripts/", r#""#), + ( + "./scripts/fetch-latest-completer-scripts.sh", + r#" + #!/bin/bash + #------------------------------------------------------------------------------------------------------------- + # Copyright (c) Microsoft Corporation. All rights reserved. + # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. + #------------------------------------------------------------------------------------------------------------- + # + # Docs: https://github.com/devcontainers/features/tree/main/src/aws-cli + # Maintainer: The Dev Container spec maintainers + # + # Run this script to replace aws_bash_completer and aws_zsh_completer.sh with the latest and greatest available version + # + COMPLETER_SCRIPTS=$(dirname "${BASH_SOURCE[0]}") + BASH_COMPLETER_SCRIPT="$COMPLETER_SCRIPTS/vendor/aws_bash_completer" + ZSH_COMPLETER_SCRIPT="$COMPLETER_SCRIPTS/vendor/aws_zsh_completer.sh" + + wget https://raw.githubusercontent.com/aws/aws-cli/v2/bin/aws_bash_completer -O "$BASH_COMPLETER_SCRIPT" + chmod +x "$BASH_COMPLETER_SCRIPT" + + wget https://raw.githubusercontent.com/aws/aws-cli/v2/bin/aws_zsh_completer.sh -O "$ZSH_COMPLETER_SCRIPT" + chmod +x "$ZSH_COMPLETER_SCRIPT" + "#, + ), + ("./scripts/vendor/", r#""#), + ( + "./scripts/vendor/aws_bash_completer", + r#" + # Typically that would be added under one of the following paths: + # - /etc/bash_completion.d + # - /usr/local/etc/bash_completion.d + # - /usr/share/bash-completion/completions + + complete -C aws_completer aws + "#, + ), + ( + "./scripts/vendor/aws_zsh_completer.sh", + r#" + # Source this file to activate auto completion for zsh using the bash + # compatibility helper. Make sure to run `compinit` before, which should be + # given usually. + # + # % source /path/to/zsh_complete.sh + # + # Typically that would be called somewhere in your .zshrc. + # + # Note, the overwrite of _bash_complete() is to export COMP_LINE and COMP_POINT + # That is only required for zsh <= edab1d3dbe61da7efe5f1ac0e40444b2ec9b9570 + # + # https://github.com/zsh-users/zsh/commit/edab1d3dbe61da7efe5f1ac0e40444b2ec9b9570 + # + # zsh releases prior to that version do not export the required env variables! + + autoload -Uz bashcompinit + bashcompinit -i + + _bash_complete() { + local ret=1 + local -a suf matches + local -x COMP_POINT COMP_CWORD + local -a COMP_WORDS COMPREPLY BASH_VERSINFO + local -x COMP_LINE="$words" + local -A savejobstates savejobtexts + + (( COMP_POINT = 1 + ${#${(j. .)words[1,CURRENT]}} + $#QIPREFIX + $#IPREFIX + $#PREFIX )) + (( COMP_CWORD = CURRENT - 1)) + COMP_WORDS=( $words ) + BASH_VERSINFO=( 2 05b 0 1 release ) + + savejobstates=( ${(kv)jobstates} ) + savejobtexts=( ${(kv)jobtexts} ) + + [[ ${argv[${argv[(I)nospace]:-0}-1]} = -o ]] && suf=( -S '' ) + + matches=( ${(f)"$(compgen $@ -- ${words[CURRENT]})"} ) + + if [[ -n $matches ]]; then + if [[ ${argv[${argv[(I)filenames]:-0}-1]} = -o ]]; then + compset -P '*/' && matches=( ${matches##*/} ) + compset -S '/*' && matches=( ${matches%%/*} ) + compadd -Q -f "${suf[@]}" -a matches && ret=0 + else + compadd -Q "${suf[@]}" -a matches && ret=0 + fi + fi + + if (( ret )); then + if [[ ${argv[${argv[(I)default]:-0}-1]} = -o ]]; then + _default "${suf[@]}" && ret=0 + elif [[ ${argv[${argv[(I)dirnames]:-0}-1]} = -o ]]; then + _directories "${suf[@]}" && ret=0 + fi + fi + + return ret + } + + complete -C aws_completer aws + "#, + ), + ]).await; + + return Ok(http::Response::builder() + .status(200) + .body(AsyncBody::from(response)) + .unwrap()); + } + + Ok(http::Response::builder() + .status(404) + .body(http_client::AsyncBody::default()) + .unwrap()) + }) + } +} diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs new file mode 100644 index 0000000000000000000000000000000000000000..9594eae3d0faf67669e7d1ad487925b77a54fc34 --- /dev/null +++ b/crates/dev_container/src/docker.rs @@ -0,0 +1,898 @@ +use std::{collections::HashMap, path::PathBuf}; + +use async_trait::async_trait; +use serde::{Deserialize, Deserializer, Serialize}; +use util::command::Command; + +use crate::{ + command_json::evaluate_json_command, devcontainer_api::DevContainerError, + devcontainer_json::MountDefinition, +}; + +#[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct DockerPs { + #[serde(alias = "ID")] + pub(crate) id: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct DockerState { + pub(crate) running: bool, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct DockerInspect { + pub(crate) id: String, + pub(crate) config: DockerInspectConfig, + pub(crate) mounts: Option>, + pub(crate) state: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +pub(crate) struct DockerConfigLabels { + #[serde( + rename = "devcontainer.metadata", + deserialize_with = "deserialize_metadata" + )] + pub(crate) metadata: Option>>, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct DockerInspectConfig { + pub(crate) labels: DockerConfigLabels, + #[serde(rename = "User")] + pub(crate) image_user: Option, + #[serde(default)] + pub(crate) env: Vec, +} + +impl DockerInspectConfig { + pub(crate) fn env_as_map(&self) -> Result, DevContainerError> { + let mut map = HashMap::new(); + for env_var in &self.env { + let parts: Vec<&str> = env_var.split("=").collect(); + if parts.len() != 2 { + log::error!("Unable to parse {env_var} into and environment key-value"); + return Err(DevContainerError::DevContainerParseFailed); + } + map.insert(parts[0].to_string(), parts[1].to_string()); + } + Ok(map) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct DockerInspectMount { + pub(crate) source: String, + pub(crate) destination: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeServiceBuild { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) context: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) dockerfile: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) args: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) additional_contexts: Option>, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeService { + pub(crate) image: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) entrypoint: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) cap_add: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) security_opt: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) labels: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) build: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) privileged: Option, + pub(crate) volumes: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) env_file: Option>, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub(crate) ports: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) network_mode: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeVolume { + pub(crate) name: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) name: Option, + pub(crate) services: HashMap, + pub(crate) volumes: HashMap, +} + +pub(crate) struct Docker { + docker_cli: String, +} + +impl DockerInspect { + pub(crate) fn is_running(&self) -> bool { + self.state.as_ref().map_or(false, |s| s.running) + } +} + +impl Docker { + pub(crate) fn new(docker_cli: &str) -> Self { + Self { + docker_cli: docker_cli.to_string(), + } + } + + fn is_podman(&self) -> bool { + self.docker_cli == "podman" + } + + async fn pull_image(&self, image: &String) -> Result<(), DevContainerError> { + let mut command = Command::new(&self.docker_cli); + command.args(&["pull", image]); + + let output = command.output().await.map_err(|e| { + log::error!("Error pulling image: {e}"); + DevContainerError::ResourceFetchFailed + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("Non-success result from docker pull: {stderr}"); + return Err(DevContainerError::ResourceFetchFailed); + } + Ok(()) + } + + fn create_docker_query_containers(&self, filters: Vec) -> Command { + let mut command = Command::new(&self.docker_cli); + command.args(&["ps", "-a"]); + + for filter in filters { + command.arg("--filter"); + command.arg(filter); + } + command.arg("--format={{ json . }}"); + command + } + + fn create_docker_inspect(&self, id: &str) -> Command { + let mut command = Command::new(&self.docker_cli); + command.args(&["inspect", "--format={{json . }}", id]); + command + } + + fn create_docker_compose_config_command(&self, config_files: &Vec) -> Command { + let mut command = Command::new(&self.docker_cli); + command.arg("compose"); + for file_path in config_files { + command.args(&["-f", &file_path.display().to_string()]); + } + command.args(&["config", "--format", "json"]); + command + } +} + +#[async_trait] +impl DockerClient for Docker { + async fn inspect(&self, id: &String) -> Result { + // Try to pull the image, continue on failure; Image may be local only, id a reference to a running container + self.pull_image(id).await.ok(); + + let command = self.create_docker_inspect(id); + + let Some(docker_inspect): Option = evaluate_json_command(command).await? + else { + log::error!("Docker inspect produced no deserializable output"); + return Err(DevContainerError::CommandFailed(self.docker_cli.clone())); + }; + Ok(docker_inspect) + } + + async fn get_docker_compose_config( + &self, + config_files: &Vec, + ) -> Result, DevContainerError> { + let command = self.create_docker_compose_config_command(config_files); + evaluate_json_command(command).await + } + + async fn docker_compose_build( + &self, + config_files: &Vec, + project_name: &str, + ) -> Result<(), DevContainerError> { + let mut command = Command::new(&self.docker_cli); + if !self.is_podman() { + command.env("DOCKER_BUILDKIT", "1"); + } + command.args(&["compose", "--project-name", project_name]); + for docker_compose_file in config_files { + command.args(&["-f", &docker_compose_file.display().to_string()]); + } + command.arg("build"); + + let output = command.output().await.map_err(|e| { + log::error!("Error running docker compose up: {e}"); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("Non-success status from docker compose up: {}", stderr); + return Err(DevContainerError::CommandFailed( + command.get_program().display().to_string(), + )); + } + + Ok(()) + } + async fn run_docker_exec( + &self, + container_id: &str, + remote_folder: &str, + user: &str, + env: &HashMap, + inner_command: Command, + ) -> Result<(), DevContainerError> { + let mut command = Command::new(&self.docker_cli); + + command.args(&["exec", "-w", remote_folder, "-u", user]); + + for (k, v) in env.iter() { + command.arg("-e"); + let env_declaration = format!("{}={}", k, v); + command.arg(&env_declaration); + } + + command.arg(container_id); + + command.arg("sh"); + + let mut inner_program_script: Vec = + vec![inner_command.get_program().display().to_string()]; + let mut args: Vec = inner_command + .get_args() + .map(|arg| arg.display().to_string()) + .collect(); + inner_program_script.append(&mut args); + command.args(&["-c", &inner_program_script.join(" ")]); + + let output = command.output().await.map_err(|e| { + log::error!("Error running command {e} in container exec"); + DevContainerError::ContainerNotValid(container_id.to_string()) + })?; + if !output.status.success() { + let std_err = String::from_utf8_lossy(&output.stderr); + log::error!("Command produced a non-successful output. StdErr: {std_err}"); + } + let std_out = String::from_utf8_lossy(&output.stdout); + log::debug!("Command output:\n {std_out}"); + + Ok(()) + } + async fn start_container(&self, id: &str) -> Result<(), DevContainerError> { + let mut command = Command::new(&self.docker_cli); + + command.args(&["start", id]); + + let output = command.output().await.map_err(|e| { + log::error!("Error running docker start: {e}"); + DevContainerError::CommandFailed(command.get_program().display().to_string()) + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + log::error!("Non-success status from docker start: {stderr}"); + return Err(DevContainerError::CommandFailed( + command.get_program().display().to_string(), + )); + } + + Ok(()) + } + + async fn find_process_by_filters( + &self, + filters: Vec, + ) -> Result, DevContainerError> { + let command = self.create_docker_query_containers(filters); + evaluate_json_command(command).await + } + + fn docker_cli(&self) -> String { + self.docker_cli.clone() + } + + fn supports_compose_buildkit(&self) -> bool { + !self.is_podman() + } +} + +#[async_trait] +pub(crate) trait DockerClient { + async fn inspect(&self, id: &String) -> Result; + async fn get_docker_compose_config( + &self, + config_files: &Vec, + ) -> Result, DevContainerError>; + async fn docker_compose_build( + &self, + config_files: &Vec, + project_name: &str, + ) -> Result<(), DevContainerError>; + async fn run_docker_exec( + &self, + container_id: &str, + remote_folder: &str, + user: &str, + env: &HashMap, + inner_command: Command, + ) -> Result<(), DevContainerError>; + async fn start_container(&self, id: &str) -> Result<(), DevContainerError>; + async fn find_process_by_filters( + &self, + filters: Vec, + ) -> Result, DevContainerError>; + fn supports_compose_buildkit(&self) -> bool; + /// This operates as an escape hatch for more custom uses of the docker API. + /// See DevContainerManifest::create_docker_build as an example + fn docker_cli(&self) -> String; +} + +fn deserialize_metadata<'de, D>( + deserializer: D, +) -> Result>>, D::Error> +where + D: Deserializer<'de>, +{ + let s: Option = Option::deserialize(deserializer)?; + match s { + Some(json_string) => { + let parsed: Vec> = + serde_json_lenient::from_str(&json_string).map_err(|e| { + log::error!("Error deserializing metadata: {e}"); + serde::de::Error::custom(e) + })?; + Ok(Some(parsed)) + } + None => Ok(None), + } +} + +pub(crate) fn get_remote_dir_from_config( + config: &DockerInspect, + local_dir: String, +) -> Result { + let local_path = PathBuf::from(&local_dir); + + let Some(mounts) = &config.mounts else { + log::error!("No mounts defined for container"); + return Err(DevContainerError::ContainerNotValid(config.id.clone())); + }; + + for mount in mounts { + // Sometimes docker will mount the local filesystem on host_mnt for system isolation + let mount_source = PathBuf::from(&mount.source.trim_start_matches("/host_mnt")); + if let Ok(relative_path_to_project) = local_path.strip_prefix(&mount_source) { + let remote_dir = format!( + "{}/{}", + &mount.destination, + relative_path_to_project.display() + ); + return Ok(remote_dir); + } + if mount.source == local_dir { + return Ok(mount.destination.clone()); + } + } + log::error!("No mounts to local folder"); + Err(DevContainerError::ContainerNotValid(config.id.clone())) +} + +#[cfg(test)] +mod test { + use std::{ + collections::HashMap, + ffi::OsStr, + process::{ExitStatus, Output}, + }; + + use crate::{ + command_json::deserialize_json_output, + devcontainer_json::MountDefinition, + docker::{ + Docker, DockerComposeConfig, DockerComposeService, DockerComposeVolume, DockerInspect, + DockerPs, get_remote_dir_from_config, + }, + }; + + #[test] + fn should_create_docker_inspect_command() { + let docker = Docker::new("docker"); + let given_id = "given_docker_id"; + + let command = docker.create_docker_inspect(given_id); + + assert_eq!( + command.get_args().collect::>(), + vec![ + OsStr::new("inspect"), + OsStr::new("--format={{json . }}"), + OsStr::new(given_id) + ] + ) + } + + #[test] + fn should_deserialize_docker_ps_with_filters() { + // First, deserializes empty + let empty_output = Output { + status: ExitStatus::default(), + stderr: vec![], + stdout: String::from("").into_bytes(), + }; + + let result: Option = deserialize_json_output(empty_output).unwrap(); + + assert!(result.is_none()); + + let full_output = Output { + status: ExitStatus::default(), + stderr: vec![], + stdout: String::from(r#" + { + "Command": "\"/bin/sh -c 'echo Co…\"", + "CreatedAt": "2026-02-04 15:44:21 -0800 PST", + "ID": "abdb6ab59573", + "Image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "Labels": "desktop.docker.io/mounts/0/Source=/somepath/cli,desktop.docker.io/mounts/0/SourceKind=hostFile,desktop.docker.io/mounts/0/Target=/workspaces/cli,desktop.docker.io/ports.scheme=v2,dev.containers.features=common,dev.containers.id=base-ubuntu,dev.containers.release=v0.4.24,dev.containers.source=https://github.com/devcontainers/images,dev.containers.timestamp=Fri, 30 Jan 2026 16:52:34 GMT,dev.containers.variant=noble,devcontainer.config_file=/somepath/cli/.devcontainer/dev_container_2/devcontainer.json,devcontainer.local_folder=/somepath/cli,devcontainer.metadata=[{\"id\":\"ghcr.io/devcontainers/features/common-utils:2\"},{\"id\":\"ghcr.io/devcontainers/features/git:1\",\"customizations\":{\"vscode\":{\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes an up-to-date version of Git, built from source as needed, pre-installed and available on the `PATH`.\"}]}}}},{\"remoteUser\":\"vscode\"}],org.opencontainers.image.ref.name=ubuntu,org.opencontainers.image.version=24.04,version=2.1.6", + "LocalVolumes": "0", + "Mounts": "/host_mnt/User…", + "Names": "objective_haslett", + "Networks": "bridge", + "Platform": { + "architecture": "arm64", + "os": "linux" + }, + "Ports": "", + "RunningFor": "47 hours ago", + "Size": "0B", + "State": "running", + "Status": "Up 47 hours" + } + "#).into_bytes(), + }; + + let result: Option = deserialize_json_output(full_output).unwrap(); + + assert!(result.is_some()); + let result = result.unwrap(); + assert_eq!(result.id, "abdb6ab59573".to_string()); + + // Podman variant (Id, not ID) + let full_output = Output { + status: ExitStatus::default(), + stderr: vec![], + stdout: String::from(r#" + { + "Command": "\"/bin/sh -c 'echo Co…\"", + "CreatedAt": "2026-02-04 15:44:21 -0800 PST", + "Id": "abdb6ab59573", + "Image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "Labels": "desktop.docker.io/mounts/0/Source=/somepath/cli,desktop.docker.io/mounts/0/SourceKind=hostFile,desktop.docker.io/mounts/0/Target=/workspaces/cli,desktop.docker.io/ports.scheme=v2,dev.containers.features=common,dev.containers.id=base-ubuntu,dev.containers.release=v0.4.24,dev.containers.source=https://github.com/devcontainers/images,dev.containers.timestamp=Fri, 30 Jan 2026 16:52:34 GMT,dev.containers.variant=noble,devcontainer.config_file=/somepath/cli/.devcontainer/dev_container_2/devcontainer.json,devcontainer.local_folder=/somepath/cli,devcontainer.metadata=[{\"id\":\"ghcr.io/devcontainers/features/common-utils:2\"},{\"id\":\"ghcr.io/devcontainers/features/git:1\",\"customizations\":{\"vscode\":{\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes an up-to-date version of Git, built from source as needed, pre-installed and available on the `PATH`.\"}]}}}},{\"remoteUser\":\"vscode\"}],org.opencontainers.image.ref.name=ubuntu,org.opencontainers.image.version=24.04,version=2.1.6", + "LocalVolumes": "0", + "Mounts": "/host_mnt/User…", + "Names": "objective_haslett", + "Networks": "bridge", + "Platform": { + "architecture": "arm64", + "os": "linux" + }, + "Ports": "", + "RunningFor": "47 hours ago", + "Size": "0B", + "State": "running", + "Status": "Up 47 hours" + } + "#).into_bytes(), + }; + + let result: Option = deserialize_json_output(full_output).unwrap(); + + assert!(result.is_some()); + let result = result.unwrap(); + assert_eq!(result.id, "abdb6ab59573".to_string()); + } + + #[test] + fn should_get_target_dir_from_docker_inspect() { + let given_config = r#" + { + "Id": "abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85", + "Created": "2026-02-04T23:44:21.802688084Z", + "Path": "/bin/sh", + "Args": [ + "-c", + "echo Container started\ntrap \"exit 0\" 15\n\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done", + "-" + ], + "State": { + "Status": "running", + "Running": true, + "Paused": false, + "Restarting": false, + "OOMKilled": false, + "Dead": false, + "Pid": 23087, + "ExitCode": 0, + "Error": "", + "StartedAt": "2026-02-04T23:44:21.954875084Z", + "FinishedAt": "0001-01-01T00:00:00Z" + }, + "Image": "sha256:3dcb059253b2ebb44de3936620e1cff3dadcd2c1c982d579081ca8128c1eb319", + "ResolvConfPath": "/var/lib/docker/containers/abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85/resolv.conf", + "HostnamePath": "/var/lib/docker/containers/abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85/hostname", + "HostsPath": "/var/lib/docker/containers/abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85/hosts", + "LogPath": "/var/lib/docker/containers/abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85/abdb6ab59573659b11dac9f4973796741be35b642c9b48960709304ce46dbf85-json.log", + "Name": "/objective_haslett", + "RestartCount": 0, + "Driver": "overlayfs", + "Platform": "linux", + "MountLabel": "", + "ProcessLabel": "", + "AppArmorProfile": "", + "ExecIDs": [ + "008019d93df4107fcbba78bcc6e1ed7e121844f36c26aca1a56284655a6adb53" + ], + "HostConfig": { + "Binds": null, + "ContainerIDFile": "", + "LogConfig": { + "Type": "json-file", + "Config": {} + }, + "NetworkMode": "bridge", + "PortBindings": {}, + "RestartPolicy": { + "Name": "no", + "MaximumRetryCount": 0 + }, + "AutoRemove": false, + "VolumeDriver": "", + "VolumesFrom": null, + "ConsoleSize": [ + 0, + 0 + ], + "CapAdd": null, + "CapDrop": null, + "CgroupnsMode": "private", + "Dns": [], + "DnsOptions": [], + "DnsSearch": [], + "ExtraHosts": null, + "GroupAdd": null, + "IpcMode": "private", + "Cgroup": "", + "Links": null, + "OomScoreAdj": 0, + "PidMode": "", + "Privileged": false, + "PublishAllPorts": false, + "ReadonlyRootfs": false, + "SecurityOpt": null, + "UTSMode": "", + "UsernsMode": "", + "ShmSize": 67108864, + "Runtime": "runc", + "Isolation": "", + "CpuShares": 0, + "Memory": 0, + "NanoCpus": 0, + "CgroupParent": "", + "BlkioWeight": 0, + "BlkioWeightDevice": [], + "BlkioDeviceReadBps": [], + "BlkioDeviceWriteBps": [], + "BlkioDeviceReadIOps": [], + "BlkioDeviceWriteIOps": [], + "CpuPeriod": 0, + "CpuQuota": 0, + "CpuRealtimePeriod": 0, + "CpuRealtimeRuntime": 0, + "CpusetCpus": "", + "CpusetMems": "", + "Devices": [], + "DeviceCgroupRules": null, + "DeviceRequests": null, + "MemoryReservation": 0, + "MemorySwap": 0, + "MemorySwappiness": null, + "OomKillDisable": null, + "PidsLimit": null, + "Ulimits": [], + "CpuCount": 0, + "CpuPercent": 0, + "IOMaximumIOps": 0, + "IOMaximumBandwidth": 0, + "Mounts": [ + { + "Type": "bind", + "Source": "/somepath/cli", + "Target": "/workspaces/cli", + "Consistency": "cached" + } + ], + "MaskedPaths": [ + "/proc/asound", + "/proc/acpi", + "/proc/interrupts", + "/proc/kcore", + "/proc/keys", + "/proc/latency_stats", + "/proc/timer_list", + "/proc/timer_stats", + "/proc/sched_debug", + "/proc/scsi", + "/sys/firmware", + "/sys/devices/virtual/powercap" + ], + "ReadonlyPaths": [ + "/proc/bus", + "/proc/fs", + "/proc/irq", + "/proc/sys", + "/proc/sysrq-trigger" + ] + }, + "GraphDriver": { + "Data": null, + "Name": "overlayfs" + }, + "Mounts": [ + { + "Type": "bind", + "Source": "/somepath/cli", + "Destination": "/workspaces/cli", + "Mode": "", + "RW": true, + "Propagation": "rprivate" + } + ], + "Config": { + "Hostname": "abdb6ab59573", + "Domainname": "", + "User": "root", + "AttachStdin": false, + "AttachStdout": true, + "AttachStderr": true, + "Tty": false, + "OpenStdin": false, + "StdinOnce": false, + "Env": [ + "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" + ], + "Cmd": [ + "-c", + "echo Container started\ntrap \"exit 0\" 15\n\nexec \"$@\"\nwhile sleep 1 & wait $!; do :; done", + "-" + ], + "Image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "Volumes": null, + "WorkingDir": "", + "Entrypoint": [ + "/bin/sh" + ], + "OnBuild": null, + "Labels": { + "dev.containers.features": "common", + "dev.containers.id": "base-ubuntu", + "dev.containers.release": "v0.4.24", + "dev.containers.source": "https://github.com/devcontainers/images", + "dev.containers.timestamp": "Fri, 30 Jan 2026 16:52:34 GMT", + "dev.containers.variant": "noble", + "devcontainer.config_file": "/somepath/cli/.devcontainer/dev_container_2/devcontainer.json", + "devcontainer.local_folder": "/somepath/cli", + "devcontainer.metadata": "[{\"id\":\"ghcr.io/devcontainers/features/common-utils:2\"},{\"id\":\"ghcr.io/devcontainers/features/git:1\",\"customizations\":{\"vscode\":{\"settings\":{\"github.copilot.chat.codeGeneration.instructions\":[{\"text\":\"This dev container includes an up-to-date version of Git, built from source as needed, pre-installed and available on the `PATH`.\"}]}}}},{\"remoteUser\":\"vscode\"}]", + "org.opencontainers.image.ref.name": "ubuntu", + "org.opencontainers.image.version": "24.04", + "version": "2.1.6" + }, + "StopTimeout": 1 + }, + "NetworkSettings": { + "Bridge": "", + "SandboxID": "2a94990d542fe532deb75f1cc67f761df2d669e3b41161f914079e88516cc54b", + "SandboxKey": "/var/run/docker/netns/2a94990d542f", + "Ports": {}, + "HairpinMode": false, + "LinkLocalIPv6Address": "", + "LinkLocalIPv6PrefixLen": 0, + "SecondaryIPAddresses": null, + "SecondaryIPv6Addresses": null, + "EndpointID": "ef5b35a8fbb145565853e1a1d960e737fcc18c20920e96494e4c0cfc55683570", + "Gateway": "172.17.0.1", + "GlobalIPv6Address": "", + "GlobalIPv6PrefixLen": 0, + "IPAddress": "172.17.0.3", + "IPPrefixLen": 16, + "IPv6Gateway": "", + "MacAddress": "", + "Networks": { + "bridge": { + "IPAMConfig": null, + "Links": null, + "Aliases": null, + "MacAddress": "9a:ec:af:8a:ac:81", + "DriverOpts": null, + "GwPriority": 0, + "NetworkID": "51bb8ccc4d1281db44f16d915963fc728619d4a68e2f90e5ea8f1cb94885063e", + "EndpointID": "ef5b35a8fbb145565853e1a1d960e737fcc18c20920e96494e4c0cfc55683570", + "Gateway": "172.17.0.1", + "IPAddress": "172.17.0.3", + "IPPrefixLen": 16, + "IPv6Gateway": "", + "GlobalIPv6Address": "", + "GlobalIPv6PrefixLen": 0, + "DNSNames": null + } + } + }, + "ImageManifestDescriptor": { + "mediaType": "application/vnd.oci.image.manifest.v1+json", + "digest": "sha256:39c3436527190561948236894c55b59fa58aa08d68d8867e703c8d5ab72a3593", + "size": 2195, + "platform": { + "architecture": "arm64", + "os": "linux" + } + } + } + "#; + let config = serde_json_lenient::from_str::(given_config).unwrap(); + + let target_dir = get_remote_dir_from_config(&config, "/somepath/cli".to_string()); + + assert!(target_dir.is_ok()); + assert_eq!(target_dir.unwrap(), "/workspaces/cli/".to_string()); + } + + #[test] + fn should_deserialize_docker_compose_config() { + let given_config = r#" + { + "name": "devcontainer", + "networks": { + "default": { + "name": "devcontainer_default", + "ipam": {} + } + }, + "services": { + "app": { + "command": [ + "sleep", + "infinity" + ], + "depends_on": { + "db": { + "condition": "service_started", + "restart": true, + "required": true + } + }, + "entrypoint": null, + "environment": { + "POSTGRES_DB": "postgres", + "POSTGRES_HOSTNAME": "localhost", + "POSTGRES_PASSWORD": "postgres", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "postgres" + }, + "image": "mcr.microsoft.com/devcontainers/rust:2-1-bookworm", + "network_mode": "service:db", + "volumes": [ + { + "type": "bind", + "source": "/path/to", + "target": "/workspaces", + "bind": { + "create_host_path": true + } + } + ] + }, + "db": { + "command": null, + "entrypoint": null, + "environment": { + "POSTGRES_DB": "postgres", + "POSTGRES_HOSTNAME": "localhost", + "POSTGRES_PASSWORD": "postgres", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "postgres" + }, + "image": "postgres:14.1", + "networks": { + "default": null + }, + "restart": "unless-stopped", + "volumes": [ + { + "type": "volume", + "source": "postgres-data", + "target": "/var/lib/postgresql/data", + "volume": {} + } + ] + } + }, + "volumes": { + "postgres-data": { + "name": "devcontainer_postgres-data" + } + } + } + "#; + + let docker_compose_config: DockerComposeConfig = + serde_json_lenient::from_str(given_config).unwrap(); + + let expected_config = DockerComposeConfig { + name: Some("devcontainer".to_string()), + services: HashMap::from([ + ( + "app".to_string(), + DockerComposeService { + image: Some( + "mcr.microsoft.com/devcontainers/rust:2-1-bookworm".to_string(), + ), + volumes: vec![MountDefinition { + mount_type: Some("bind".to_string()), + source: "/path/to".to_string(), + target: "/workspaces".to_string(), + }], + network_mode: Some("service:db".to_string()), + ..Default::default() + }, + ), + ( + "db".to_string(), + DockerComposeService { + image: Some("postgres:14.1".to_string()), + volumes: vec![MountDefinition { + mount_type: Some("volume".to_string()), + source: "postgres-data".to_string(), + target: "/var/lib/postgresql/data".to_string(), + }], + ..Default::default() + }, + ), + ]), + volumes: HashMap::from([( + "postgres-data".to_string(), + DockerComposeVolume { + name: "devcontainer_postgres-data".to_string(), + }, + )]), + }; + + assert_eq!(docker_compose_config, expected_config); + } +} diff --git a/crates/dev_container/src/features.rs b/crates/dev_container/src/features.rs new file mode 100644 index 0000000000000000000000000000000000000000..5c35b785852735f2c5c5bf8a5b9f73a3300097c5 --- /dev/null +++ b/crates/dev_container/src/features.rs @@ -0,0 +1,254 @@ +use std::{collections::HashMap, path::PathBuf, sync::Arc}; + +use fs::Fs; +use serde::Deserialize; +use serde_json_lenient::Value; + +use crate::{ + devcontainer_api::DevContainerError, + devcontainer_json::{FeatureOptions, MountDefinition}, + safe_id_upper, +}; + +/// Parsed components of an OCI feature reference such as +/// `ghcr.io/devcontainers/features/aws-cli:1`. +/// +/// Mirrors the CLI's `OCIRef` in `containerCollectionsOCI.ts`. +#[derive(Debug, Clone)] +pub(crate) struct OciFeatureRef { + /// Registry hostname, e.g. `ghcr.io` + pub registry: String, + /// Full repository path within the registry, e.g. `devcontainers/features/aws-cli` + pub path: String, + /// Version tag, digest, or `latest` + pub version: String, +} + +/// Minimal representation of a `devcontainer-feature.json` file, used to +/// extract option default values after the feature tarball is downloaded. +/// +/// See: https://containers.dev/implementors/features/#devcontainer-featurejson-properties +#[derive(Debug, Deserialize, Eq, PartialEq, Default)] +#[serde(rename_all = "camelCase")] +pub(crate) struct DevContainerFeatureJson { + #[serde(rename = "id")] + pub(crate) _id: Option, + #[serde(default)] + pub(crate) options: HashMap, + pub(crate) mounts: Option>, + pub(crate) privileged: Option, + pub(crate) entrypoint: Option, + pub(crate) container_env: Option>, +} + +/// A single option definition inside `devcontainer-feature.json`. +/// We only need the `default` field to populate env variables. +#[derive(Debug, Deserialize, Eq, PartialEq)] +pub(crate) struct FeatureOptionDefinition { + pub(crate) default: Option, +} + +impl FeatureOptionDefinition { + fn serialize_default(&self) -> Option { + self.default.as_ref().map(|some_value| match some_value { + Value::Bool(b) => b.to_string(), + Value::String(s) => s.to_string(), + Value::Number(n) => n.to_string(), + other => other.to_string(), + }) + } +} + +#[derive(Debug, Eq, PartialEq, Default)] +pub(crate) struct FeatureManifest { + consecutive_id: String, + file_path: PathBuf, + feature_json: DevContainerFeatureJson, +} + +impl FeatureManifest { + pub(crate) fn new( + consecutive_id: String, + file_path: PathBuf, + feature_json: DevContainerFeatureJson, + ) -> Self { + Self { + consecutive_id, + file_path, + feature_json, + } + } + pub(crate) fn container_env(&self) -> HashMap { + self.feature_json.container_env.clone().unwrap_or_default() + } + + pub(crate) fn generate_dockerfile_feature_layer( + &self, + use_buildkit: bool, + dest: &str, + ) -> String { + let id = &self.consecutive_id; + if use_buildkit { + format!( + r#" +RUN --mount=type=bind,from=dev_containers_feature_content_source,source=./{id},target=/tmp/build-features-src/{id} \ +cp -ar /tmp/build-features-src/{id} {dest} \ +&& chmod -R 0755 {dest}/{id} \ +&& cd {dest}/{id} \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh \ +&& rm -rf {dest}/{id} +"#, + ) + } else { + let source = format!("/tmp/build-features/{id}"); + let full_dest = format!("{dest}/{id}"); + format!( + r#" +COPY --chown=root:root --from=dev_containers_feature_content_source {source} {full_dest} +RUN chmod -R 0755 {full_dest} \ +&& cd {full_dest} \ +&& chmod +x ./devcontainer-features-install.sh \ +&& ./devcontainer-features-install.sh +"# + ) + } + } + + pub(crate) fn generate_dockerfile_env(&self) -> String { + let mut layer = "".to_string(); + let env = self.container_env(); + let mut env: Vec<(&String, &String)> = env.iter().collect(); + env.sort(); + + for (key, value) in env { + layer = format!("{layer}ENV {key}={value}\n") + } + layer + } + + /// Merges user options from devcontainer.json with default options defined in this feature manifest + pub(crate) fn generate_merged_env(&self, options: &FeatureOptions) -> HashMap { + let mut merged: HashMap = self + .feature_json + .options + .iter() + .filter_map(|(k, v)| { + v.serialize_default() + .map(|v_some| (safe_id_upper(k), v_some)) + }) + .collect(); + + match options { + FeatureOptions::Bool(_) => {} + FeatureOptions::String(version) => { + merged.insert("VERSION".to_string(), version.clone()); + } + FeatureOptions::Options(map) => { + for (key, value) in map { + merged.insert(safe_id_upper(key), value.to_string()); + } + } + } + merged + } + + pub(crate) async fn write_feature_env( + &self, + fs: &Arc, + options: &FeatureOptions, + ) -> Result { + let merged_env = self.generate_merged_env(options); + + let mut env_vars: Vec<(&String, &String)> = merged_env.iter().collect(); + env_vars.sort(); + + let env_file_content = env_vars + .iter() + .fold("".to_string(), |acc, (k, v)| format!("{acc}{}={}\n", k, v)); + + fs.write( + &self.file_path.join("devcontainer-features.env"), + env_file_content.as_bytes(), + ) + .await + .map_err(|e| { + log::error!("error writing devcontainer feature environment: {e}"); + DevContainerError::FilesystemError + })?; + + Ok(env_file_content) + } + + pub(crate) fn mounts(&self) -> Vec { + if let Some(mounts) = &self.feature_json.mounts { + mounts.clone() + } else { + vec![] + } + } + + pub(crate) fn privileged(&self) -> bool { + self.feature_json.privileged.unwrap_or(false) + } + + pub(crate) fn entrypoint(&self) -> Option { + self.feature_json.entrypoint.clone() + } + + pub(crate) fn file_path(&self) -> PathBuf { + self.file_path.clone() + } +} + +/// Parses an OCI feature reference string into its components. +/// +/// Handles formats like: +/// - `ghcr.io/devcontainers/features/aws-cli:1` +/// - `ghcr.io/user/repo/go` (implicitly `:latest`) +/// - `ghcr.io/devcontainers/features/rust@sha256:abc123` +/// +/// Returns `None` for local paths (`./…`) and direct tarball URIs (`https://…`). +pub(crate) fn parse_oci_feature_ref(input: &str) -> Option { + if input.starts_with('.') + || input.starts_with('/') + || input.starts_with("https://") + || input.starts_with("http://") + { + return None; + } + + let input_lower = input.to_lowercase(); + + let (resource, version) = if let Some(at_idx) = input_lower.rfind('@') { + // Digest-based: ghcr.io/foo/bar@sha256:abc + ( + input_lower[..at_idx].to_string(), + input_lower[at_idx + 1..].to_string(), + ) + } else { + let last_slash = input_lower.rfind('/'); + let last_colon = input_lower.rfind(':'); + match (last_slash, last_colon) { + (Some(slash), Some(colon)) if colon > slash => ( + input_lower[..colon].to_string(), + input_lower[colon + 1..].to_string(), + ), + _ => (input_lower, "latest".to_string()), + } + }; + + let parts: Vec<&str> = resource.split('/').collect(); + if parts.len() < 3 { + return None; + } + + let registry = parts[0].to_string(); + let path = parts[1..].join("/"); + + Some(OciFeatureRef { + registry, + path, + version, + }) +} diff --git a/crates/dev_container/src/lib.rs b/crates/dev_container/src/lib.rs index 7fcacf8004bef6c4c26e2751df6f26c02b4629ce..601394c77760bc79587f5dad3cb7e0bf8a310af3 100644 --- a/crates/dev_container/src/lib.rs +++ b/crates/dev_container/src/lib.rs @@ -1,11 +1,14 @@ use std::path::Path; +use fs::Fs; use gpui::AppContext; use gpui::Entity; use gpui::Task; +use gpui::WeakEntity; use http_client::anyhow; use picker::Picker; use picker::PickerDelegate; +use project::ProjectEnvironment; use settings::RegisterSetting; use settings::Settings; use std::collections::HashMap; @@ -25,8 +28,9 @@ use ui::Tooltip; use ui::h_flex; use ui::rems_from_px; use ui::v_flex; +use util::shell::Shell; -use gpui::{Action, DismissEvent, EventEmitter, FocusHandle, Focusable, RenderOnce, WeakEntity}; +use gpui::{Action, DismissEvent, EventEmitter, FocusHandle, Focusable, RenderOnce}; use serde::Deserialize; use ui::{ AnyElement, App, Color, CommonAnimationExt, Context, Headline, HeadlineSize, Icon, IconName, @@ -37,40 +41,94 @@ use util::ResultExt; use util::rel_path::RelPath; use workspace::{ModalView, Workspace, with_active_or_new_workspace}; -use futures::AsyncReadExt; -use http::Request; -use http_client::{AsyncBody, HttpClient}; +use http_client::HttpClient; +mod command_json; mod devcontainer_api; +mod devcontainer_json; +mod devcontainer_manifest; +mod docker; +mod features; +mod oci; -use devcontainer_api::ensure_devcontainer_cli; -use devcontainer_api::read_devcontainer_configuration; +use devcontainer_api::read_default_devcontainer_configuration; use crate::devcontainer_api::DevContainerError; -use crate::devcontainer_api::apply_dev_container_template; +use crate::devcontainer_api::apply_devcontainer_template; +use crate::oci::get_deserializable_oci_blob; +use crate::oci::get_latest_oci_manifest; +use crate::oci::get_oci_token; pub use devcontainer_api::{ DevContainerConfig, find_configs_in_snapshot, find_devcontainer_configs, start_dev_container_with_config, }; +/// Converts a string to a safe environment variable name. +/// +/// Mirrors the CLI's `getSafeId` in `containerFeatures.ts`: +/// replaces non-alphanumeric/underscore characters with `_`, replaces a +/// leading sequence of digits/underscores with a single `_`, and uppercases. +pub(crate) fn safe_id_lower(input: &str) -> String { + get_safe_id(input).to_lowercase() +} +pub(crate) fn safe_id_upper(input: &str) -> String { + get_safe_id(input).to_uppercase() +} +fn get_safe_id(input: &str) -> String { + let replaced: String = input + .chars() + .map(|c| { + if c.is_alphanumeric() || c == '_' { + c + } else { + '_' + } + }) + .collect(); + let without_leading = replaced.trim_start_matches(|c: char| c.is_ascii_digit() || c == '_'); + let result = if without_leading.len() < replaced.len() { + format!("_{}", without_leading) + } else { + replaced + }; + result +} + pub struct DevContainerContext { pub project_directory: Arc, pub use_podman: bool, - pub node_runtime: node_runtime::NodeRuntime, + pub fs: Arc, + pub http_client: Arc, + pub environment: WeakEntity, } impl DevContainerContext { pub fn from_workspace(workspace: &Workspace, cx: &App) -> Option { let project_directory = workspace.project().read(cx).active_project_directory(cx)?; let use_podman = DevContainerSettings::get_global(cx).use_podman; - let node_runtime = workspace.app_state().node_runtime.clone(); + let http_client = cx.http_client().clone(); + let fs = workspace.app_state().fs.clone(); + let environment = workspace.project().read(cx).environment().downgrade(); Some(Self { project_directory, use_podman, - node_runtime, + fs, + http_client, + environment, }) } + + pub async fn environment(&self, cx: &mut impl AppContext) -> HashMap { + let Ok(task) = self.environment.update(cx, |this, cx| { + this.local_directory_environment(&Shell::System, self.project_directory.clone(), cx) + }) else { + return HashMap::default(); + }; + task.await + .map(|env| env.into_iter().collect::>()) + .unwrap_or_default() + } } #[derive(RegisterSetting)] @@ -1043,7 +1101,7 @@ impl StatefulModal for DevContainerModal { let Ok(client) = cx.update(|_, cx| cx.http_client()) else { return; }; - match get_templates(client).await { + match get_ghcr_templates(client).await { Ok(templates) => { let message = DevContainerMessage::TemplatesRetrieved(templates.templates); @@ -1209,7 +1267,7 @@ impl StatefulModal for DevContainerModal { let Ok(client) = cx.update(|_, cx| cx.http_client()) else { return; }; - let Some(features) = get_features(client).await.log_err() else { + let Some(features) = get_ghcr_features(client).await.log_err() else { return; }; let message = DevContainerMessage::FeaturesRetrieved(features.features); @@ -1328,17 +1386,7 @@ trait StatefulModal: ModalView + EventEmitter + Render { } } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct GithubTokenResponse { - token: String, -} - -fn ghcr_url() -> &'static str { - "https://ghcr.io" -} - -fn ghcr_domain() -> &'static str { +fn ghcr_registry() -> &'static str { "ghcr.io" } @@ -1350,11 +1398,6 @@ fn devcontainer_features_repository() -> &'static str { "devcontainers/features" } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct ManifestLayer { - digest: String, -} #[derive(Debug, Deserialize, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] struct TemplateOptions { @@ -1409,12 +1452,6 @@ impl TemplateOptions { } } -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -struct DockerManifestsResponse { - layers: Vec, -} - #[derive(Debug, Deserialize, Clone, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] struct DevContainerFeature { @@ -1480,23 +1517,11 @@ fn dispatch_apply_templates( return; }; - let Ok(cli) = ensure_devcontainer_cli(&context.node_runtime).await else { - this.update_in(cx, |this, window, cx| { - this.accept_message( - DevContainerMessage::FailedToWriteTemplate( - DevContainerError::DevContainerCliNotAvailable, - ), - window, - cx, - ); - }) - .log_err(); - return; - }; + let environment = context.environment(cx).await; { if check_for_existing - && read_devcontainer_configuration(&context, &cli, None) + && read_default_devcontainer_configuration(&context, environment) .await .is_ok() { @@ -1511,12 +1536,17 @@ fn dispatch_apply_templates( return; } - let files = match apply_dev_container_template( + let worktree = workspace.read_with(cx, |workspace, cx| { + workspace.project().read(cx).worktree_for_id(tree_id, cx) + }); + + let files = match apply_devcontainer_template( + worktree.unwrap(), &template_entry.template, &template_entry.options_selected, &template_entry.features_selected, &context, - &cli, + cx, ) .await { @@ -1524,7 +1554,9 @@ fn dispatch_apply_templates( Err(e) => { this.update_in(cx, |this, window, cx| { this.accept_message( - DevContainerMessage::FailedToWriteTemplate(e), + DevContainerMessage::FailedToWriteTemplate( + DevContainerError::DevContainerTemplateApplyFailed(e.to_string()), + ), window, cx, ); @@ -1534,10 +1566,9 @@ fn dispatch_apply_templates( } }; - if files - .files - .contains(&"./.devcontainer/devcontainer.json".to_string()) - { + if files.project_files.contains(&Arc::from( + RelPath::unix(".devcontainer/devcontainer.json").unwrap(), + )) { let Some(workspace_task) = workspace .update_in(cx, |workspace, window, cx| { let Ok(path) = RelPath::unix(".devcontainer/devcontainer.json") else { @@ -1563,250 +1594,90 @@ fn dispatch_apply_templates( .detach(); } -async fn get_templates( +async fn get_ghcr_templates( client: Arc, ) -> Result { - let token = get_ghcr_token(&client).await?; - let manifest = get_latest_manifest(&token.token, &client).await?; - - let mut template_response = - get_devcontainer_templates(&token.token, &manifest.layers[0].digest, &client).await?; + let token = get_oci_token( + ghcr_registry(), + devcontainer_templates_repository(), + &client, + ) + .await?; + let manifest = get_latest_oci_manifest( + &token.token, + ghcr_registry(), + devcontainer_templates_repository(), + &client, + None, + ) + .await?; + + let mut template_response: DevContainerTemplatesResponse = get_deserializable_oci_blob( + &token.token, + ghcr_registry(), + devcontainer_templates_repository(), + &manifest.layers[0].digest, + &client, + ) + .await?; for template in &mut template_response.templates { template.source_repository = Some(format!( "{}/{}", - ghcr_domain(), + ghcr_registry(), devcontainer_templates_repository() )); } Ok(template_response) } -async fn get_features(client: Arc) -> Result { - let token = get_ghcr_token(&client).await?; - let manifest = get_latest_feature_manifest(&token.token, &client).await?; +async fn get_ghcr_features( + client: Arc, +) -> Result { + let token = get_oci_token( + ghcr_registry(), + devcontainer_templates_repository(), + &client, + ) + .await?; - let mut features_response = - get_devcontainer_features(&token.token, &manifest.layers[0].digest, &client).await?; + let manifest = get_latest_oci_manifest( + &token.token, + ghcr_registry(), + devcontainer_features_repository(), + &client, + None, + ) + .await?; + + let mut features_response: DevContainerFeaturesResponse = get_deserializable_oci_blob( + &token.token, + ghcr_registry(), + devcontainer_features_repository(), + &manifest.layers[0].digest, + &client, + ) + .await?; for feature in &mut features_response.features { feature.source_repository = Some(format!( "{}/{}", - ghcr_domain(), + ghcr_registry(), devcontainer_features_repository() )); } Ok(features_response) } -async fn get_ghcr_token(client: &Arc) -> Result { - let url = format!( - "{}/token?service=ghcr.io&scope=repository:{}:pull", - ghcr_url(), - devcontainer_templates_repository() - ); - get_deserialized_response("", &url, client).await -} - -async fn get_latest_feature_manifest( - token: &str, - client: &Arc, -) -> Result { - let url = format!( - "{}/v2/{}/manifests/latest", - ghcr_url(), - devcontainer_features_repository() - ); - get_deserialized_response(token, &url, client).await -} - -async fn get_latest_manifest( - token: &str, - client: &Arc, -) -> Result { - let url = format!( - "{}/v2/{}/manifests/latest", - ghcr_url(), - devcontainer_templates_repository() - ); - get_deserialized_response(token, &url, client).await -} - -async fn get_devcontainer_features( - token: &str, - blob_digest: &str, - client: &Arc, -) -> Result { - let url = format!( - "{}/v2/{}/blobs/{}", - ghcr_url(), - devcontainer_features_repository(), - blob_digest - ); - get_deserialized_response(token, &url, client).await -} - -async fn get_devcontainer_templates( - token: &str, - blob_digest: &str, - client: &Arc, -) -> Result { - let url = format!( - "{}/v2/{}/blobs/{}", - ghcr_url(), - devcontainer_templates_repository(), - blob_digest - ); - get_deserialized_response(token, &url, client).await -} - -async fn get_deserialized_response( - token: &str, - url: &str, - client: &Arc, -) -> Result -where - T: for<'de> Deserialize<'de>, -{ - let request = match Request::get(url) - .header("Authorization", format!("Bearer {}", token)) - .header("Accept", "application/vnd.oci.image.manifest.v1+json") - .body(AsyncBody::default()) - { - Ok(request) => request, - Err(e) => return Err(format!("Failed to create request: {}", e)), - }; - let response = match client.send(request).await { - Ok(response) => response, - Err(e) => { - return Err(format!("Failed to send request: {}", e)); - } - }; - - let mut output = String::new(); - - if let Err(e) = response.into_body().read_to_string(&mut output).await { - return Err(format!("Failed to read response body: {}", e)); - }; - - match serde_json::from_str(&output) { - Ok(response) => Ok(response), - Err(e) => Err(format!("Failed to deserialize response: {}", e)), - } -} - #[cfg(test)] mod tests { - use gpui::TestAppContext; use http_client::{FakeHttpClient, anyhow}; use crate::{ - GithubTokenResponse, devcontainer_templates_repository, get_deserialized_response, - get_devcontainer_templates, get_ghcr_token, get_latest_manifest, + DevContainerTemplatesResponse, devcontainer_templates_repository, + get_deserializable_oci_blob, ghcr_registry, }; - #[gpui::test] - async fn test_get_deserialized_response(_cx: &mut TestAppContext) { - let client = FakeHttpClient::create(|_request| async move { - Ok(http_client::Response::builder() - .status(200) - .body("{ \"token\": \"thisisatoken\" }".into()) - .unwrap()) - }); - - let response = - get_deserialized_response::("", "https://ghcr.io/token", &client) - .await; - assert!(response.is_ok()); - assert_eq!(response.unwrap().token, "thisisatoken".to_string()) - } - - #[gpui::test] - async fn test_get_ghcr_token() { - let client = FakeHttpClient::create(|request| async move { - let host = request.uri().host(); - if host.is_none() || host.unwrap() != "ghcr.io" { - return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); - } - let path = request.uri().path(); - if path != "/token" { - return Err(anyhow!("Unexpected path: {}", path)); - } - let query = request.uri().query(); - if query.is_none() - || query.unwrap() - != format!( - "service=ghcr.io&scope=repository:{}:pull", - devcontainer_templates_repository() - ) - { - return Err(anyhow!("Unexpected query: {}", query.unwrap_or_default())); - } - Ok(http_client::Response::builder() - .status(200) - .body("{ \"token\": \"thisisatoken\" }".into()) - .unwrap()) - }); - - let response = get_ghcr_token(&client).await; - assert!(response.is_ok()); - assert_eq!(response.unwrap().token, "thisisatoken".to_string()); - } - - #[gpui::test] - async fn test_get_latest_manifests() { - let client = FakeHttpClient::create(|request| async move { - let host = request.uri().host(); - if host.is_none() || host.unwrap() != "ghcr.io" { - return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); - } - let path = request.uri().path(); - if path - != format!( - "/v2/{}/manifests/latest", - devcontainer_templates_repository() - ) - { - return Err(anyhow!("Unexpected path: {}", path)); - } - Ok(http_client::Response::builder() - .status(200) - .body("{ - \"schemaVersion\": 2, - \"mediaType\": \"application/vnd.oci.image.manifest.v1+json\", - \"config\": { - \"mediaType\": \"application/vnd.devcontainers\", - \"digest\": \"sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a\", - \"size\": 2 - }, - \"layers\": [ - { - \"mediaType\": \"application/vnd.devcontainers.collection.layer.v1+json\", - \"digest\": \"sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09\", - \"size\": 65235, - \"annotations\": { - \"org.opencontainers.image.title\": \"devcontainer-collection.json\" - } - } - ], - \"annotations\": { - \"com.github.package.type\": \"devcontainer_collection\" - } - }".into()) - .unwrap()) - }); - - let response = get_latest_manifest("", &client).await; - assert!(response.is_ok()); - let response = response.unwrap(); - - assert_eq!(response.layers.len(), 1); - assert_eq!( - response.layers[0].digest, - "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09" - ); - } - #[gpui::test] async fn test_get_devcontainer_templates() { let client = FakeHttpClient::create(|request| async move { @@ -1872,8 +1743,10 @@ mod tests { }".into()) .unwrap()) }); - let response = get_devcontainer_templates( + let response: Result = get_deserializable_oci_blob( "", + ghcr_registry(), + devcontainer_templates_repository(), "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09", &client, ) diff --git a/crates/dev_container/src/oci.rs b/crates/dev_container/src/oci.rs new file mode 100644 index 0000000000000000000000000000000000000000..483f706edb919fc4031577caddd7d33558112532 --- /dev/null +++ b/crates/dev_container/src/oci.rs @@ -0,0 +1,470 @@ +use std::{path::PathBuf, pin::Pin, sync::Arc}; + +use fs::Fs; +use futures::{AsyncRead, AsyncReadExt, io::BufReader}; +use http::Request; +use http_client::{AsyncBody, HttpClient}; +use serde::{Deserialize, Serialize}; + +use crate::devcontainer_api::DevContainerError; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct TokenResponse { + pub(crate) token: String, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct DockerManifestsResponse { + pub(crate) layers: Vec, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct ManifestLayer { + pub(crate) digest: String, +} + +/// Gets a bearer token for pulling from a container registry repository. +/// +/// This uses the registry's `/token` endpoint directly, which works for +/// `ghcr.io` and other registries that follow the same convention. For +/// registries that require a full `WWW-Authenticate` negotiation flow this +/// would need to be extended. +pub(crate) async fn get_oci_token( + registry: &str, + repository_path: &str, + client: &Arc, +) -> Result { + let url = format!( + "https://{registry}/token?service={registry}&scope=repository:{repository_path}:pull", + ); + log::debug!("Fetching OCI token from: {}", url); + get_deserialized_response("", &url, client) + .await + .map_err(|e| { + log::error!("OCI token request failed for {}: {e}", url); + e + }) +} + +pub(crate) async fn get_latest_oci_manifest( + token: &str, + registry: &str, + repository_path: &str, + client: &Arc, + id: Option<&str>, +) -> Result { + get_oci_manifest(registry, repository_path, token, client, "latest", id).await +} + +pub(crate) async fn get_oci_manifest( + registry: &str, + repository_path: &str, + token: &str, + client: &Arc, + version: &str, + id: Option<&str>, +) -> Result { + let url = match id { + Some(id) => format!("https://{registry}/v2/{repository_path}/{id}/manifests/{version}"), + None => format!("https://{registry}/v2/{repository_path}/manifests/{version}"), + }; + + get_deserialized_response(token, &url, client).await +} + +pub(crate) async fn get_deserializable_oci_blob( + token: &str, + registry: &str, + repository_path: &str, + blob_digest: &str, + client: &Arc, +) -> Result +where + T: for<'a> Deserialize<'a>, +{ + let url = format!("https://{registry}/v2/{repository_path}/blobs/{blob_digest}"); + get_deserialized_response(token, &url, client).await +} + +pub(crate) async fn download_oci_tarball( + token: &str, + registry: &str, + repository_path: &str, + blob_digest: &str, + accept_header: &str, + dest_dir: &PathBuf, + client: &Arc, + fs: &Arc, + id: Option<&str>, +) -> Result<(), DevContainerError> { + let url = match id { + Some(id) => format!("https://{registry}/v2/{repository_path}/{id}/blobs/{blob_digest}"), + None => format!("https://{registry}/v2/{repository_path}/blobs/{blob_digest}"), + }; + + let request = Request::get(&url) + .header("Authorization", format!("Bearer {}", token)) + .header("Accept", accept_header) + .body(AsyncBody::default()) + .map_err(|e| { + log::error!("Failed to create blob request: {e}"); + DevContainerError::ResourceFetchFailed + })?; + + let mut response = client.send(request).await.map_err(|e| { + log::error!("Failed to download feature blob: {e}"); + DevContainerError::ResourceFetchFailed + })?; + let status = response.status(); + + let body = BufReader::new(response.body_mut()); + + if !status.is_success() { + let body_text = String::from_utf8_lossy(body.buffer()); + log::error!( + "Feature blob download returned HTTP {}: {}", + status.as_u16(), + body_text, + ); + return Err(DevContainerError::ResourceFetchFailed); + } + + futures::pin_mut!(body); + let body: Pin<&mut (dyn AsyncRead + Send)> = body; + let archive = async_tar::Archive::new(body); + fs.extract_tar_file(dest_dir, archive).await.map_err(|e| { + log::error!("Failed to extract feature tarball: {e}"); + DevContainerError::FilesystemError + })?; + + Ok(()) +} + +pub(crate) async fn get_deserialized_response( + token: &str, + url: &str, + client: &Arc, +) -> Result +where + T: for<'de> Deserialize<'de>, +{ + let request = match Request::get(url) + .header("Authorization", format!("Bearer {}", token)) + .header("Accept", "application/vnd.oci.image.manifest.v1+json") + .body(AsyncBody::default()) + { + Ok(request) => request, + Err(e) => return Err(format!("Failed to create request: {}", e)), + }; + let response = match client.send(request).await { + Ok(response) => response, + Err(e) => { + return Err(format!("Failed to send request to {}: {}", url, e)); + } + }; + + let status = response.status(); + let mut output = String::new(); + + if let Err(e) = response.into_body().read_to_string(&mut output).await { + return Err(format!("Failed to read response body from {}: {}", url, e)); + }; + + if !status.is_success() { + return Err(format!( + "OCI request to {} returned HTTP {}: {}", + url, + status.as_u16(), + &output[..output.len().min(500)], + )); + } + + match serde_json_lenient::from_str(&output) { + Ok(response) => Ok(response), + Err(e) => Err(format!( + "Failed to deserialize response from {}: {} (body: {})", + url, + e, + &output[..output.len().min(500)], + )), + } +} + +#[cfg(test)] +mod test { + use std::{path::PathBuf, sync::Arc}; + + use fs::{FakeFs, Fs}; + use gpui::TestAppContext; + use http_client::{FakeHttpClient, anyhow}; + use serde::Deserialize; + + use crate::oci::{ + TokenResponse, download_oci_tarball, get_deserializable_oci_blob, + get_deserialized_response, get_latest_oci_manifest, get_oci_token, + }; + + async fn build_test_tarball() -> Vec { + let devcontainer_json = concat!( + "// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n", + "// README at: https://github.com/devcontainers/templates/tree/main/src/alpine\n", + "{\n", + "\t\"name\": \"Alpine\",\n", + "\t// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile\n", + "\t\"image\": \"mcr.microsoft.com/devcontainers/base:alpine-${templateOption:imageVariant}\"\n", + "}\n", + ); + + let dependabot_yml = concat!( + "version: 2\n", + "updates:\n", + " - package-ecosystem: \"devcontainers\"\n", + " directory: \"/\"\n", + " schedule:\n", + " interval: weekly\n", + ); + + let buffer = futures::io::Cursor::new(Vec::new()); + let mut builder = async_tar::Builder::new(buffer); + + let files: &[(&str, &[u8], u32)] = &[ + ( + ".devcontainer/devcontainer.json", + devcontainer_json.as_bytes(), + 0o644, + ), + (".github/dependabot.yml", dependabot_yml.as_bytes(), 0o644), + ("NOTES.md", b"Some notes", 0o644), + ("README.md", b"# Alpine\n", 0o644), + ]; + + for (path, data, mode) in files { + let mut header = async_tar::Header::new_gnu(); + header.set_size(data.len() as u64); + header.set_mode(*mode); + header.set_entry_type(async_tar::EntryType::Regular); + header.set_cksum(); + builder.append_data(&mut header, path, *data).await.unwrap(); + } + + let buffer = builder.into_inner().await.unwrap(); + buffer.into_inner() + } + fn test_oci_registry() -> &'static str { + "ghcr.io" + } + fn test_oci_repository() -> &'static str { + "repository" + } + + #[gpui::test] + async fn test_get_deserialized_response(_cx: &mut TestAppContext) { + let client = FakeHttpClient::create(|_request| async move { + Ok(http_client::Response::builder() + .status(200) + .body("{ \"token\": \"thisisatoken\" }".into()) + .unwrap()) + }); + + let response = + get_deserialized_response::("", "https://ghcr.io/token", &client).await; + assert!(response.is_ok()); + assert_eq!(response.unwrap().token, "thisisatoken".to_string()) + } + + #[gpui::test] + async fn test_get_oci_token() { + let client = FakeHttpClient::create(|request| async move { + let host = request.uri().host(); + if host.is_none() || host.unwrap() != test_oci_registry() { + return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); + } + let path = request.uri().path(); + if path != "/token" { + return Err(anyhow!("Unexpected path: {}", path)); + } + let query = request.uri().query(); + if query.is_none() + || query.unwrap() + != format!( + "service=ghcr.io&scope=repository:{}:pull", + test_oci_repository() + ) + { + return Err(anyhow!("Unexpected query: {}", query.unwrap_or_default())); + } + Ok(http_client::Response::builder() + .status(200) + .body("{ \"token\": \"thisisatoken\" }".into()) + .unwrap()) + }); + + let response = get_oci_token(test_oci_registry(), test_oci_repository(), &client).await; + + assert!(response.is_ok()); + assert_eq!(response.unwrap().token, "thisisatoken".to_string()); + } + + #[gpui::test] + async fn test_get_latest_manifests() { + let client = FakeHttpClient::create(|request| async move { + let host = request.uri().host(); + if host.is_none() || host.unwrap() != test_oci_registry() { + return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); + } + let path = request.uri().path(); + if path != format!("/v2/{}/manifests/latest", test_oci_repository()) { + return Err(anyhow!("Unexpected path: {}", path)); + } + Ok(http_client::Response::builder() + .status(200) + .body("{ + \"schemaVersion\": 2, + \"mediaType\": \"application/vnd.oci.image.manifest.v1+json\", + \"config\": { + \"mediaType\": \"application/vnd.devcontainers\", + \"digest\": \"sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a\", + \"size\": 2 + }, + \"layers\": [ + { + \"mediaType\": \"application/vnd.devcontainers.collection.layer.v1+json\", + \"digest\": \"sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09\", + \"size\": 65235, + \"annotations\": { + \"org.opencontainers.image.title\": \"devcontainer-collection.json\" + } + } + ], + \"annotations\": { + \"com.github.package.type\": \"devcontainer_collection\" + } + }".into()) + .unwrap()) + }); + + let response = get_latest_oci_manifest( + "", + test_oci_registry(), + test_oci_repository(), + &client, + None, + ) + .await; + assert!(response.is_ok()); + let response = response.unwrap(); + + assert_eq!(response.layers.len(), 1); + assert_eq!( + response.layers[0].digest, + "sha256:035e9c9fd9bd61f6d3965fa4bf11f3ddfd2490a8cf324f152c13cc3724d67d09" + ); + } + + #[gpui::test] + async fn test_get_oci_blob() { + #[derive(Debug, Deserialize)] + struct DeserializableTestStruct { + foo: String, + } + + let client = FakeHttpClient::create(|request| async move { + let host = request.uri().host(); + if host.is_none() || host.unwrap() != test_oci_registry() { + return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); + } + let path = request.uri().path(); + if path != format!("/v2/{}/blobs/blobdigest", test_oci_repository()) { + return Err(anyhow!("Unexpected path: {}", path)); + } + Ok(http_client::Response::builder() + .status(200) + .body( + r#" + { + "foo": "bar" + } + "# + .into(), + ) + .unwrap()) + }); + + let response: Result = get_deserializable_oci_blob( + "", + test_oci_registry(), + test_oci_repository(), + "blobdigest", + &client, + ) + .await; + assert!(response.is_ok()); + let response = response.unwrap(); + + assert_eq!(response.foo, "bar".to_string()); + } + + #[gpui::test] + async fn test_download_oci_tarball(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let fs: Arc = FakeFs::new(cx.executor()); + + let destination_dir = PathBuf::from("/tmp/extracted"); + fs.create_dir(&destination_dir).await.unwrap(); + + let tarball_bytes = build_test_tarball().await; + let tarball = std::sync::Arc::new(tarball_bytes); + + let client = FakeHttpClient::create(move |request| { + let tarball = tarball.clone(); + async move { + let host = request.uri().host(); + if host.is_none() || host.unwrap() != test_oci_registry() { + return Err(anyhow!("Unexpected host: {}", host.unwrap_or_default())); + } + let path = request.uri().path(); + if path != format!("/v2/{}/blobs/blobdigest", test_oci_repository()) { + return Err(anyhow!("Unexpected path: {}", path)); + } + Ok(http_client::Response::builder() + .status(200) + .body(tarball.to_vec().into()) + .unwrap()) + } + }); + + let response = download_oci_tarball( + "", + test_oci_registry(), + test_oci_repository(), + "blobdigest", + "header", + &destination_dir, + &client, + &fs, + None, + ) + .await; + assert!(response.is_ok()); + + let expected_devcontainer_json = concat!( + "// For format details, see https://aka.ms/devcontainer.json. For config options, see the\n", + "// README at: https://github.com/devcontainers/templates/tree/main/src/alpine\n", + "{\n", + "\t\"name\": \"Alpine\",\n", + "\t// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile\n", + "\t\"image\": \"mcr.microsoft.com/devcontainers/base:alpine-${templateOption:imageVariant}\"\n", + "}\n", + ); + + assert_eq!( + fs.load(&destination_dir.join(".devcontainer/devcontainer.json")) + .await + .unwrap(), + expected_devcontainer_json + ) + } +} diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 56924585011921ddebc96b971fd15c3abd151a85..040aeae4742e18449523cbc255b4370814c1f8d7 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -24,6 +24,7 @@ use settings::Settings; use std::{ any::{Any, TypeId}, cmp::{self, Ordering}, + ops::Range, sync::Arc, }; use text::{Anchor, BufferSnapshot, OffsetRangeExt}; @@ -480,25 +481,35 @@ impl BufferDiagnosticsEditor { }) }); - let (anchor_ranges, _) = - buffer_diagnostics_editor - .multibuffer - .update(cx, |multibuffer, cx| { - let excerpt_ranges = excerpt_ranges - .into_iter() - .map(|range| ExcerptRange { - context: range.context.to_point(&buffer_snapshot), - primary: range.primary.to_point(&buffer_snapshot), - }) - .collect(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::for_buffer(&buffer, cx), - buffer.clone(), - &buffer_snapshot, - excerpt_ranges, - cx, - ) - }); + let excerpt_ranges: Vec<_> = excerpt_ranges + .into_iter() + .map(|range| ExcerptRange { + context: range.context.to_point(&buffer_snapshot), + primary: range.primary.to_point(&buffer_snapshot), + }) + .collect(); + buffer_diagnostics_editor + .multibuffer + .update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + PathKey::for_buffer(&buffer, cx), + buffer.clone(), + &buffer_snapshot, + excerpt_ranges.clone(), + cx, + ) + }); + let multibuffer_snapshot = + buffer_diagnostics_editor.multibuffer.read(cx).snapshot(cx); + let anchor_ranges: Vec> = excerpt_ranges + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range.primary); + let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?; + let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .collect(); if was_empty { if let Some(anchor_range) = anchor_ranges.first() { diff --git a/crates/diagnostics/src/diagnostic_renderer.rs b/crates/diagnostics/src/diagnostic_renderer.rs index 27e1cbbac9c779056ecd9da00dd7a56ff3536f17..eaf414560845ea326fc508fe19d71fb01ebc1f32 100644 --- a/crates/diagnostics/src/diagnostic_renderer.rs +++ b/crates/diagnostics/src/diagnostic_renderer.rs @@ -8,9 +8,9 @@ use editor::{ use gpui::{AppContext, Entity, Focusable, WeakEntity}; use language::{BufferId, Diagnostic, DiagnosticEntryRef, LanguageRegistry}; use lsp::DiagnosticSeverity; -use markdown::{Markdown, MarkdownElement}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement}; use settings::Settings; -use text::{AnchorRangeExt, Point}; +use text::Point; use theme_settings::ThemeSettings; use ui::{CopyButton, prelude::*}; use util::maybe; @@ -239,8 +239,7 @@ impl DiagnosticBlock { diagnostics_markdown_style(bcx.window, cx), ) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click({ @@ -290,23 +289,12 @@ impl DiagnosticBlock { .nth(ix) { let multibuffer = editor.buffer().read(cx); - let Some(snapshot) = multibuffer - .buffer(buffer_id) - .map(|entity| entity.read(cx).snapshot()) - else { + if let Some(anchor_range) = multibuffer + .snapshot(cx) + .buffer_anchor_range_to_anchor_range(diagnostic.range) + { + Self::jump_to(editor, anchor_range, window, cx); return; - }; - - for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) { - if range.context.overlaps(&diagnostic.range, &snapshot) { - Self::jump_to( - editor, - Anchor::range_in_buffer(excerpt_id, diagnostic.range), - window, - cx, - ); - return; - } } } } else if let Some(diagnostic) = editor diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index b200d01669a90c1e439338b9b01118cce8b8bb0c..9bedc2db4a138eec468857013f9f1a010923bbe6 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -12,7 +12,7 @@ use buffer_diagnostics::BufferDiagnosticsEditor; use collections::{BTreeSet, HashMap, HashSet}; use diagnostic_renderer::DiagnosticBlock; use editor::{ - Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, + Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, multibuffer_context_lines, }; @@ -301,17 +301,21 @@ impl ProjectDiagnosticsEditor { let snapshot = self .editor .update(cx, |editor, cx| editor.display_snapshot(cx)); - let buffer = self.multibuffer.read(cx); - let buffer_ids = buffer.all_buffer_ids(); let selected_buffers = self.editor.update(cx, |editor, _| { editor .selections .all_anchors(&snapshot) .iter() - .filter_map(|anchor| anchor.start.text_anchor.buffer_id) + .filter_map(|anchor| { + Some(snapshot.anchor_to_buffer_anchor(anchor.start)?.0.buffer_id) + }) .collect::>() }); - for buffer_id in buffer_ids { + for buffer_id in snapshot + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .dedup() + { if retain_selections && selected_buffers.contains(&buffer_id) { continue; } @@ -329,7 +333,7 @@ impl ProjectDiagnosticsEditor { continue; } self.multibuffer.update(cx, |b, cx| { - b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx); + b.remove_excerpts(PathKey::for_buffer(&buffer, cx), cx); }); } } @@ -581,9 +585,8 @@ impl ProjectDiagnosticsEditor { match retain_excerpts { RetainExcerpts::Dirty if !is_dirty => Vec::new(), RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer - .excerpts_for_buffer(buffer_id, cx) - .into_iter() - .map(|(_, _, range)| range) + .snapshot(cx) + .excerpts_for_buffer(buffer_id) .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b)) .collect(), } @@ -621,22 +624,33 @@ impl ProjectDiagnosticsEditor { }); }) } - let (anchor_ranges, _) = this.multibuffer.update(cx, |multi_buffer, cx| { - let excerpt_ranges = excerpt_ranges - .into_iter() - .map(|range| ExcerptRange { - context: range.context.to_point(&buffer_snapshot), - primary: range.primary.to_point(&buffer_snapshot), - }) - .collect(); + let excerpt_ranges: Vec<_> = excerpt_ranges + .into_iter() + .map(|range| ExcerptRange { + context: range.context.to_point(&buffer_snapshot), + primary: range.primary.to_point(&buffer_snapshot), + }) + .collect(); + // TODO(cole): maybe should use the nonshrinking API? + this.multibuffer.update(cx, |multi_buffer, cx| { multi_buffer.set_excerpt_ranges_for_path( PathKey::for_buffer(&buffer, cx), buffer.clone(), &buffer_snapshot, - excerpt_ranges, + excerpt_ranges.clone(), cx, ) }); + let multibuffer_snapshot = this.multibuffer.read(cx).snapshot(cx); + let anchor_ranges: Vec> = excerpt_ranges + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range.primary); + let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?; + let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .collect(); #[cfg(test)] let cloned_blocks = result_blocks.clone(); diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index d21df7868162d279cb18aeea3ef04d4ea9d7be7f..5eb422246775c4409f7f15e3a672a2d407386acc 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -414,7 +414,7 @@ mod tests { capture_example( project.clone(), buffer.clone(), - Anchor::MIN, + Anchor::min_for_buffer(buffer.read(cx).remote_id()), events, true, cx, diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 3a66f712e31d7853bede21ab96ca6c7e92bea967..61690c470829ca4bb16a6af9f1df2ea6e7cc6023 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1676,7 +1676,7 @@ impl EditPredictionStore { buffer.pending_predictions.push(PendingSettledPrediction { request_id: request_id, editable_anchor_range: edited_buffer_snapshot - .anchor_range_around(editable_offset_range), + .anchor_range_inside(editable_offset_range), example, e2e_latency, enqueued_at: now, @@ -2351,7 +2351,10 @@ impl EditPredictionStore { cx: &mut AsyncApp, ) -> Result, language::Anchor)>> { let collaborator_cursor_rows: Vec = active_buffer_snapshot - .selections_in_range(Anchor::MIN..Anchor::MAX, false) + .selections_in_range( + Anchor::min_max_range_for_buffer(active_buffer_snapshot.remote_id()), + false, + ) .flat_map(|(_, _, _, selections)| { selections.map(|s| s.head().to_point(active_buffer_snapshot).row) }) @@ -2427,7 +2430,10 @@ impl EditPredictionStore { candidate_buffer.read_with(cx, |buffer, _cx| { let snapshot = buffer.snapshot(); let has_collaborators = snapshot - .selections_in_range(Anchor::MIN..Anchor::MAX, false) + .selections_in_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + false, + ) .next() .is_some(); let position = buffer @@ -2761,7 +2767,7 @@ fn collaborator_edit_overlaps_locality_region( (position..position).to_point(snapshot), COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS, ); - let locality_anchor_range = snapshot.anchor_range_around(locality_point_range); + let locality_anchor_range = snapshot.anchor_range_inside(locality_point_range); edit_range.overlaps(&locality_anchor_range, snapshot) } diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index 14be1991d34e985067f5ad8729fd7ac8485211db..407dc4fc7239fb1974ef8bc5be4b3a99cd31f187 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -54,7 +54,6 @@ pub async fn apply_diff( let mut included_files: HashMap> = HashMap::default(); - let ranges = [Anchor::MIN..Anchor::MAX]; let mut diff = DiffParser::new(diff_str); let mut current_file = None; let mut edits: Vec<(std::ops::Range, Arc)> = vec![]; @@ -115,7 +114,7 @@ pub async fn apply_diff( edits.extend(resolve_hunk_edits_in_buffer( hunk, buffer, - ranges.as_slice(), + &[Anchor::min_max_range_for_buffer(buffer.remote_id())], status, )?); anyhow::Ok(()) diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs index 48e74dcdcc102f9ed7844f1b8829e0182fe2c97b..1407ffc73d82c6e564fe46e688b6d6d16a307c01 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -201,10 +201,14 @@ impl EditPredictionContextView { multibuffer.clear(cx); for (path, buffer, ranges, orders, _) in paths { - let (anchor_ranges, _) = - multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); - for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) { - excerpt_anchors_with_orders.push((anchor_range.start, order)); + multibuffer.set_excerpts_for_path(path, buffer.clone(), ranges.clone(), 0, cx); + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer.read(cx).snapshot(); + for (range, order) in ranges.into_iter().zip(orders) { + let text_anchor = buffer_snapshot.anchor_range_inside(range); + if let Some(start) = snapshot.anchor_in_buffer(text_anchor.start) { + excerpt_anchors_with_orders.push((start, order)); + } } } }); diff --git a/crates/edit_prediction_ui/src/rate_prediction_modal.rs b/crates/edit_prediction_ui/src/rate_prediction_modal.rs index 1fb6c36bc9503e0a2fea7b3f77d1515747d1363c..eb071bf955cede173e74993c93ab5cd294338474 100644 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs @@ -357,35 +357,26 @@ impl RatePredictionsModal { }); editor.disable_header_for_buffer(new_buffer_id, cx); - let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| { + editor.buffer().update(cx, |multibuffer, cx| { multibuffer.clear(cx); - multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx); + multibuffer.set_excerpts_for_buffer(new_buffer.clone(), [start..end], 0, cx); multibuffer.add_diff(diff, cx); - multibuffer.excerpt_ids().into_iter().next() }); - if let Some((excerpt_id, cursor_position)) = - excerpt_id.zip(prediction.cursor_position.as_ref()) - { + if let Some(cursor_position) = prediction.cursor_position.as_ref() { let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some(buffer_snapshot) = - multibuffer_snapshot.buffer_for_excerpt(excerpt_id) - { - let cursor_offset = prediction - .edit_preview - .anchor_to_offset_in_result(cursor_position.anchor) - + cursor_position.offset; - let cursor_anchor = buffer_snapshot.anchor_after(cursor_offset); - - if let Some(anchor) = - multibuffer_snapshot.anchor_in_excerpt(excerpt_id, cursor_anchor) - { - editor.splice_inlays( - &[InlayId::EditPrediction(0)], - vec![Inlay::edit_prediction(0, anchor, "▏")], - cx, - ); - } + let cursor_offset = prediction + .edit_preview + .anchor_to_offset_in_result(cursor_position.anchor) + + cursor_position.offset; + let cursor_anchor = new_buffer.read(cx).snapshot().anchor_after(cursor_offset); + + if let Some(anchor) = multibuffer_snapshot.anchor_in_excerpt(cursor_anchor) { + editor.splice_inlays( + &[InlayId::EditPrediction(0)], + vec![Inlay::edit_prediction(0, anchor, "▏")], + cx, + ); } } }); @@ -991,7 +982,6 @@ impl FeedbackCompletionProvider { impl editor::CompletionProvider for FeedbackCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs index 0c9fa29ae6a19ad81ec265cc832a5d3ec15cec51..8c8c3a36e9a73a0b3960f1239f49270647dabea7 100644 --- a/crates/editor/src/bracket_colorization.rs +++ b/crates/editor/src/bracket_colorization.rs @@ -7,9 +7,9 @@ use std::ops::Range; use crate::{Editor, HighlightKey}; use collections::{HashMap, HashSet}; use gpui::{AppContext as _, Context, HighlightStyle}; -use itertools::Itertools; use language::{BufferRow, BufferSnapshot, language_settings::LanguageSettings}; -use multi_buffer::{Anchor, ExcerptId}; +use multi_buffer::{Anchor, BufferOffset, ExcerptRange, MultiBufferSnapshot}; +use text::OffsetRangeExt as _; use ui::{ActiveTheme, utils::ensure_minimum_contrast}; impl Editor { @@ -25,55 +25,49 @@ impl Editor { let accents_count = cx.theme().accents().0.len(); let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let visible_excerpts = self.visible_excerpts(false, cx); - let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range)> = visible_excerpts + let visible_excerpts = self.visible_buffer_ranges(cx); + let excerpt_data: Vec<( + BufferSnapshot, + Range, + ExcerptRange, + )> = visible_excerpts .into_iter() - .filter_map(|(excerpt_id, (buffer, _, buffer_range))| { - let buffer = buffer.read(cx); - let buffer_snapshot = buffer.snapshot(); - if LanguageSettings::for_buffer(&buffer, cx).colorize_brackets { - Some((excerpt_id, buffer_snapshot, buffer_range)) - } else { - None - } + .filter(|(buffer_snapshot, _, _)| { + let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id()) + else { + return false; + }; + LanguageSettings::for_buffer(buffer.read(cx), cx).colorize_brackets }) .collect(); let mut fetched_tree_sitter_chunks = excerpt_data .iter() - .filter_map(|(excerpt_id, ..)| { + .filter_map(|(_, _, excerpt_range)| { + let key = excerpt_range.context.clone(); Some(( - *excerpt_id, - self.bracket_fetched_tree_sitter_chunks - .get(excerpt_id) - .cloned()?, + key.clone(), + self.bracket_fetched_tree_sitter_chunks.get(&key).cloned()?, )) }) - .collect::>>>(); + .collect::, HashSet>>>(); let bracket_matches_by_accent = cx.background_spawn(async move { - let anchors_in_multi_buffer = |current_excerpt: ExcerptId, - text_anchors: [text::Anchor; 4]| - -> Option<[Option<_>; 4]> { - multi_buffer_snapshot - .anchors_in_excerpt(current_excerpt, text_anchors)? - .collect_array() - }; - let bracket_matches_by_accent: HashMap>> = excerpt_data.into_iter().fold( HashMap::default(), - |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| { - let fetched_chunks = - fetched_tree_sitter_chunks.entry(excerpt_id).or_default(); + |mut acc, (buffer_snapshot, buffer_range, excerpt_range)| { + let fetched_chunks = fetched_tree_sitter_chunks + .entry(excerpt_range.context.clone()) + .or_default(); let brackets_by_accent = compute_bracket_ranges( + &multi_buffer_snapshot, &buffer_snapshot, buffer_range, + excerpt_range, fetched_chunks, - excerpt_id, accents_count, - &anchors_in_multi_buffer, ); for (accent_number, new_ranges) in brackets_by_accent { @@ -144,15 +138,20 @@ impl Editor { } fn compute_bracket_ranges( + multi_buffer_snapshot: &MultiBufferSnapshot, buffer_snapshot: &BufferSnapshot, - buffer_range: Range, + buffer_range: Range, + excerpt_range: ExcerptRange, fetched_chunks: &mut HashSet>, - excerpt_id: ExcerptId, accents_count: usize, - anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option; 4]>, ) -> Vec<(usize, Vec>)> { + let context = excerpt_range.context.to_offset(buffer_snapshot); + buffer_snapshot - .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks)) + .fetch_bracket_ranges( + buffer_range.start.0..buffer_range.end.0, + Some(fetched_chunks), + ) .into_iter() .flat_map(|(chunk_range, pairs)| { if fetched_chunks.insert(chunk_range) { @@ -164,37 +163,25 @@ fn compute_bracket_ranges( .filter_map(|pair| { let color_index = pair.color_index?; - let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range); - let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range); - let [ - buffer_open_range_start, - buffer_open_range_end, - buffer_close_range_start, - buffer_close_range_end, - ] = anchors_in_multi_buffer( - excerpt_id, - [ - buffer_open_range.start, - buffer_open_range.end, - buffer_close_range.start, - buffer_close_range.end, - ], - )?; - let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end); - let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end); + let mut ranges = Vec::new(); - let mut ranges = Vec::with_capacity(2); - if let Some((open_start, open_end)) = multi_buffer_open_range { - ranges.push(open_start..open_end); - } - if let Some((close_start, close_end)) = multi_buffer_close_range { - ranges.push(close_start..close_end); - } - if ranges.is_empty() { - None - } else { - Some((color_index % accents_count, ranges)) - } + if context.start <= pair.open_range.start && pair.open_range.end <= context.end { + let anchors = buffer_snapshot.anchor_range_inside(pair.open_range); + ranges.push( + multi_buffer_snapshot.anchor_in_buffer(anchors.start)? + ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?, + ); + }; + + if context.start <= pair.close_range.start && pair.close_range.end <= context.end { + let anchors = buffer_snapshot.anchor_range_inside(pair.close_range); + ranges.push( + multi_buffer_snapshot.anchor_in_buffer(anchors.start)? + ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?, + ); + }; + + Some((color_index % accents_count, ranges)) }) .collect() } @@ -1197,7 +1184,7 @@ mod foo «1{ ); } - let buffer_snapshot = snapshot.buffer().as_singleton().unwrap().2; + let buffer_snapshot = snapshot.buffer().as_singleton().unwrap(); for bracket_match in buffer_snapshot .fetch_bracket_ranges( snapshot @@ -1464,6 +1451,101 @@ mod foo «1{ ); } + #[gpui::test] + async fn test_multi_buffer_close_excerpts(cx: &mut gpui::TestAppContext) { + let comment_lines = 5; + + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "lib.rs": separate_with_comment_lines( + indoc! {r#" + fn process_data_1() { + let map: Option> = None; + } + "#}, + indoc! {r#" + fn process_data_2() { + let other_map: Option> = None; + } + "#}, + comment_lines, + ) + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/lib.rs"), cx) + }) + .await + .unwrap(); + + let second_excerpt_start = buffer_1.read_with(cx, |buffer, _| { + let text = buffer.text(); + text.lines() + .enumerate() + .find(|(_, line)| line.contains("process_data_2")) + .map(|(row, _)| row as u32) + .unwrap() + }); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [ + Point::new(0, 0)..Point::new(3, 0), + Point::new(second_excerpt_start, 0)..Point::new(second_excerpt_start + 3, 0), + ], + 0, + cx, + ); + multi_buffer + }); + + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let editor_snapshot = editor + .update(cx, |editor, window, cx| editor.snapshot(window, cx)) + .unwrap(); + assert_eq!( + concat!( + "\n", + "\n", + "fn process_data_1\u{00ab}1()1\u{00bb} \u{00ab}1{\n", + " let map: Option\u{00ab}23\u{00bb}>2\u{00bb} = None;\n", + "}1\u{00bb}\n", + "\n", + "\n", + "fn process_data_2\u{00ab}1()1\u{00bb} \u{00ab}1{\n", + " let other_map: Option\u{00ab}23\u{00bb}>2\u{00bb} = None;\n", + "}1\u{00bb}\n", + "\n", + "1 hsla(207.80, 16.20%, 69.19%, 1.00)\n", + "2 hsla(29.00, 54.00%, 65.88%, 1.00)\n", + "3 hsla(286.00, 51.00%, 75.25%, 1.00)\n", + "4 hsla(187.00, 47.00%, 59.22%, 1.00)\n", + ), + &editor_bracket_colors_markup(&editor_snapshot), + "Two close excerpts from the same buffer (within same tree-sitter chunk) should both have bracket colors" + ); + } + #[gpui::test] // reproduction of #47846 async fn test_bracket_colorization_with_folds(cx: &mut gpui::TestAppContext) { diff --git a/crates/editor/src/code_completion_tests.rs b/crates/editor/src/code_completion_tests.rs index 4602824486ebb88f78ed529abb91ddcc1c34646f..3211f0b818eb3079007db4bf268e84bd53d3cbf1 100644 --- a/crates/editor/src/code_completion_tests.rs +++ b/crates/editor/src/code_completion_tests.rs @@ -7,7 +7,7 @@ use project::{Completion, CompletionSource}; use settings::SnippetSortOrder; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use text::Anchor; +use text::{Anchor, BufferId}; #[gpui::test] async fn test_sort_kind(cx: &mut TestAppContext) { @@ -393,7 +393,7 @@ impl CompletionBuilder { kind: Option, ) -> Completion { Completion { - replace_range: Anchor::MIN..Anchor::MAX, + replace_range: Anchor::min_max_range_for_buffer(BufferId::new(1).unwrap()), new_text: label.to_string(), label: CodeLabel::plain(label.to_string(), filter_text), documentation: None, diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 3fc6080b4da8ca85d258d04de29d603ea7097623..2db2086eef422a87a0825c4a4ad820d422b160e9 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -9,8 +9,8 @@ use itertools::Itertools; use language::CodeLabel; use language::{Buffer, LanguageName, LanguageRegistry}; use lsp::CompletionItemTag; -use markdown::{Markdown, MarkdownElement}; -use multi_buffer::{Anchor, ExcerptId}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement}; +use multi_buffer::Anchor; use ordered_float::OrderedFloat; use project::lsp_store::CompletionDocumentation; use project::{CodeAction, Completion, TaskSourceKind}; @@ -357,7 +357,8 @@ impl CompletionsMenu { id: CompletionId, sort_completions: bool, choices: &Vec, - selection: Range, + initial_position: Anchor, + selection: Range, buffer: Entity, scroll_handle: Option, snippet_sort_order: SnippetSortOrder, @@ -365,7 +366,7 @@ impl CompletionsMenu { let completions = choices .iter() .map(|choice| Completion { - replace_range: selection.start.text_anchor..selection.end.text_anchor, + replace_range: selection.clone(), new_text: choice.to_string(), label: CodeLabel::plain(choice.to_string(), None), match_start: None, @@ -400,7 +401,7 @@ impl CompletionsMenu { id, source: CompletionsMenuSource::SnippetChoices, sort_completions, - initial_position: selection.start, + initial_position, initial_query: None, is_incomplete: false, buffer, @@ -1118,8 +1119,7 @@ impl CompletionsMenu { div().child( MarkdownElement::new(markdown, hover_markdown_style(window, cx)) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click(open_markdown_url), @@ -1381,7 +1381,6 @@ impl CompletionsMenu { #[derive(Clone)] pub struct AvailableCodeAction { - pub excerpt_id: ExcerptId, pub action: CodeAction, pub provider: Rc, } @@ -1434,7 +1433,6 @@ impl CodeActionContents { }) .chain(self.actions.iter().flat_map(|actions| { actions.iter().map(|available| CodeActionsItem::CodeAction { - excerpt_id: available.excerpt_id, action: available.action.clone(), provider: available.provider.clone(), }) @@ -1458,7 +1456,6 @@ impl CodeActionContents { if let Some(actions) = &self.actions { if let Some(available) = actions.get(index) { return Some(CodeActionsItem::CodeAction { - excerpt_id: available.excerpt_id, action: available.action.clone(), provider: available.provider.clone(), }); @@ -1478,7 +1475,6 @@ impl CodeActionContents { pub enum CodeActionsItem { Task(TaskSourceKind, ResolvedTask), CodeAction { - excerpt_id: ExcerptId, action: CodeAction, provider: Rc, }, diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 933f0e6e18e57c38b6bcc3636f60bd1ae671d3a6..916391b32d580dc0cc86670056a42bd5a0861aab 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -103,7 +103,7 @@ use language::{ }; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint, }; use project::project_settings::DiagnosticSeverity; @@ -125,7 +125,7 @@ use std::{ fmt::Debug, iter, num::NonZeroU32, - ops::{self, Add, Bound, Range, Sub}, + ops::{self, Add, Range, Sub}, sync::Arc, }; @@ -195,10 +195,9 @@ pub struct CompanionExcerptPatch { } pub type ConvertMultiBufferRows = fn( - &HashMap, &MultiBufferSnapshot, &MultiBufferSnapshot, - (Bound, Bound), + Range, ) -> Vec; /// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints, @@ -240,8 +239,6 @@ pub(crate) struct Companion { rhs_display_map_id: EntityId, rhs_buffer_to_lhs_buffer: HashMap, lhs_buffer_to_rhs_buffer: HashMap, - rhs_excerpt_to_lhs_excerpt: HashMap, - lhs_excerpt_to_rhs_excerpt: HashMap, rhs_rows_to_lhs_rows: ConvertMultiBufferRows, lhs_rows_to_rhs_rows: ConvertMultiBufferRows, rhs_custom_block_to_balancing_block: RefCell>, @@ -258,8 +255,6 @@ impl Companion { rhs_display_map_id, rhs_buffer_to_lhs_buffer: Default::default(), lhs_buffer_to_rhs_buffer: Default::default(), - rhs_excerpt_to_lhs_excerpt: Default::default(), - lhs_excerpt_to_rhs_excerpt: Default::default(), rhs_rows_to_lhs_rows, lhs_rows_to_rhs_rows, rhs_custom_block_to_balancing_block: Default::default(), @@ -287,14 +282,14 @@ impl Companion { display_map_id: EntityId, companion_snapshot: &MultiBufferSnapshot, our_snapshot: &MultiBufferSnapshot, - bounds: (Bound, Bound), + bounds: Range, ) -> Vec { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.rhs_rows_to_lhs_rows } else { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + self.lhs_rows_to_rhs_rows }; - convert_fn(excerpt_map, companion_snapshot, our_snapshot, bounds) + convert_fn(companion_snapshot, our_snapshot, bounds) } pub(crate) fn convert_point_from_companion( @@ -304,20 +299,15 @@ impl Companion { companion_snapshot: &MultiBufferSnapshot, point: MultiBufferPoint, ) -> Range { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.lhs_rows_to_rhs_rows } else { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + self.rhs_rows_to_lhs_rows }; - let excerpt = convert_fn( - excerpt_map, - our_snapshot, - companion_snapshot, - (Bound::Included(point), Bound::Included(point)), - ) - .into_iter() - .next(); + let excerpt = convert_fn(our_snapshot, companion_snapshot, point..point) + .into_iter() + .next(); let Some(excerpt) = excerpt else { return Point::zero()..our_snapshot.max_point(); @@ -332,20 +322,15 @@ impl Companion { companion_snapshot: &MultiBufferSnapshot, point: MultiBufferPoint, ) -> Range { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.rhs_rows_to_lhs_rows } else { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + self.lhs_rows_to_rhs_rows }; - let excerpt = convert_fn( - excerpt_map, - companion_snapshot, - our_snapshot, - (Bound::Included(point), Bound::Included(point)), - ) - .into_iter() - .next(); + let excerpt = convert_fn(companion_snapshot, our_snapshot, point..point) + .into_iter() + .next(); let Some(excerpt) = excerpt else { return Point::zero()..companion_snapshot.max_point(); @@ -353,30 +338,6 @@ impl Companion { excerpt.patch.edit_for_old_position(point).new } - pub(crate) fn companion_excerpt_to_excerpt( - &self, - display_map_id: EntityId, - ) -> &HashMap { - if self.is_rhs(display_map_id) { - &self.lhs_excerpt_to_rhs_excerpt - } else { - &self.rhs_excerpt_to_lhs_excerpt - } - } - - #[cfg(test)] - pub(crate) fn excerpt_mappings( - &self, - ) -> ( - &HashMap, - &HashMap, - ) { - ( - &self.lhs_excerpt_to_rhs_excerpt, - &self.rhs_excerpt_to_lhs_excerpt, - ) - } - fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap { if self.is_rhs(display_map_id) { &self.rhs_buffer_to_lhs_buffer @@ -385,24 +346,6 @@ impl Companion { } } - pub(crate) fn add_excerpt_mapping(&mut self, lhs_id: ExcerptId, rhs_id: ExcerptId) { - self.lhs_excerpt_to_rhs_excerpt.insert(lhs_id, rhs_id); - self.rhs_excerpt_to_lhs_excerpt.insert(rhs_id, lhs_id); - } - - pub(crate) fn remove_excerpt_mappings( - &mut self, - lhs_ids: impl IntoIterator, - rhs_ids: impl IntoIterator, - ) { - for id in lhs_ids { - self.lhs_excerpt_to_rhs_excerpt.remove(&id); - } - for id in rhs_ids { - self.rhs_excerpt_to_lhs_excerpt.remove(&id); - } - } - pub(crate) fn lhs_to_rhs_buffer(&self, lhs_buffer_id: BufferId) -> Option { self.lhs_buffer_to_rhs_buffer.get(&lhs_buffer_id).copied() } @@ -540,8 +483,7 @@ impl DisplayMap { .wrap_map .update(cx, |wrap_map, cx| wrap_map.sync(snapshot, edits, cx)); - let (snapshot, edits) = - writer.unfold_intersecting([Anchor::min()..Anchor::max()], true); + let (snapshot, edits) = writer.unfold_intersecting([Anchor::Min..Anchor::Max], true); let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, _edits) = self .wrap_map @@ -632,18 +574,6 @@ impl DisplayMap { self.companion.as_ref().map(|(_, c)| c) } - pub(crate) fn companion_excerpt_to_my_excerpt( - &self, - their_id: ExcerptId, - cx: &App, - ) -> Option { - let (_, companion) = self.companion.as_ref()?; - let c = companion.read(cx); - c.companion_excerpt_to_excerpt(self.entity_id) - .get(&their_id) - .copied() - } - fn sync_through_wrap(&mut self, cx: &mut App) -> (WrapSnapshot, WrapPatch) { let tab_size = Self::tab_size(&self.buffer, cx); let buffer_snapshot = self.buffer.read(cx).snapshot(cx); @@ -1054,17 +984,10 @@ impl DisplayMap { return; } - let excerpt_ids = snapshot - .excerpts() - .filter(|(_, buf, _)| buf.remote_id() == buffer_id) - .map(|(id, _, _)| id) - .collect::>(); - let base_placeholder = self.fold_placeholder.clone(); let creases = ranges.into_iter().filter_map(|folding_range| { - let mb_range = excerpt_ids.iter().find_map(|&id| { - snapshot.anchor_range_in_excerpt(id, folding_range.range.clone()) - })?; + let mb_range = + snapshot.buffer_anchor_range_to_anchor_range(folding_range.range.clone())?; let placeholder = if let Some(collapsed_text) = folding_range.collapsed_text { FoldPlaceholder { render: Arc::new({ diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 531de6da49e375a4f7ba2833106e1716de551ff2..25874457a8e3d4787de22e3e8c0e2c61a49708f8 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -11,8 +11,8 @@ use collections::{Bound, HashMap, HashSet}; use gpui::{AnyElement, App, EntityId, Pixels, Window}; use language::{Patch, Point}; use multi_buffer::{ - Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, - MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _, + Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow, + MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _, }; use parking_lot::Mutex; use std::{ @@ -298,10 +298,10 @@ pub struct BlockContext<'a, 'b> { pub indent_guide_padding: Pixels, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BlockId { - ExcerptBoundary(ExcerptId), - FoldedBuffer(ExcerptId), + ExcerptBoundary(Anchor), + FoldedBuffer(BufferId), Custom(CustomBlockId), Spacer(SpacerId), } @@ -310,10 +310,8 @@ impl From for ElementId { fn from(value: BlockId) -> Self { match value { BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(), - BlockId::ExcerptBoundary(excerpt_id) => { - ("ExcerptBoundary", EntityId::from(excerpt_id)).into() - } - BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(), + BlockId::ExcerptBoundary(anchor) => anchor.opaque_id().unwrap().into(), + BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id.to_proto())).into(), BlockId::Spacer(SpacerId(id)) => ("Spacer", id).into(), } } @@ -323,7 +321,7 @@ impl std::fmt::Display for BlockId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Custom(id) => write!(f, "Block({id:?})"), - Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"), + Self::ExcerptBoundary(id) => write!(f, "ExcerptBoundary({id:?})"), Self::FoldedBuffer(id) => write!(f, "FoldedBuffer({id:?})"), Self::Spacer(id) => write!(f, "Spacer({id:?})"), } @@ -340,15 +338,15 @@ struct Transform { pub enum Block { Custom(Arc), FoldedBuffer { - first_excerpt: ExcerptInfo, + first_excerpt: ExcerptBoundaryInfo, height: u32, }, ExcerptBoundary { - excerpt: ExcerptInfo, + excerpt: ExcerptBoundaryInfo, height: u32, }, BufferHeader { - excerpt: ExcerptInfo, + excerpt: ExcerptBoundaryInfo, height: u32, }, Spacer { @@ -365,12 +363,14 @@ impl Block { Block::ExcerptBoundary { excerpt: next_excerpt, .. - } => BlockId::ExcerptBoundary(next_excerpt.id), - Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id), + } => BlockId::ExcerptBoundary(next_excerpt.start_anchor), + Block::FoldedBuffer { first_excerpt, .. } => { + BlockId::FoldedBuffer(first_excerpt.buffer_id()) + } Block::BufferHeader { excerpt: next_excerpt, .. - } => BlockId::ExcerptBoundary(next_excerpt.id), + } => BlockId::ExcerptBoundary(next_excerpt.start_anchor), Block::Spacer { id, .. } => BlockId::Spacer(*id), } } @@ -1174,10 +1174,10 @@ impl BlockMap { let wrap_row = wrap_row_for(Point::new(excerpt_boundary.row.0, 0), Bias::Left); let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) { - (None, next) => Some(next.buffer_id), + (None, next) => Some(next.buffer_id()), (Some(prev), next) => { - if prev.buffer_id != next.buffer_id { - Some(next.buffer_id) + if prev.buffer_id() != next.buffer_id() { + Some(next.buffer_id()) } else { None } @@ -1195,7 +1195,7 @@ impl BlockMap { let mut last_excerpt_end_row = first_excerpt.end_row; while let Some(next_boundary) = boundaries.peek() { - if next_boundary.next.buffer_id == new_buffer_id { + if next_boundary.next.buffer_id() == new_buffer_id { last_excerpt_end_row = next_boundary.next.end_row; } else { break; @@ -1254,12 +1254,24 @@ impl BlockMap { let our_buffer = wrap_snapshot.buffer_snapshot(); let companion_buffer = companion_snapshot.buffer_snapshot(); - let patches = companion.convert_rows_to_companion( + let range = match bounds { + (Bound::Included(start), Bound::Excluded(end)) => start..end, + (Bound::Included(start), Bound::Unbounded) => start..wrap_snapshot.buffer().max_point(), + _ => unreachable!(), + }; + let mut patches = companion.convert_rows_to_companion( display_map_id, companion_buffer, our_buffer, - bounds, + range, ); + if let Some(patch) = patches.last() + && let Bound::Excluded(end) = bounds.1 + && end == wrap_snapshot.buffer().max_point() + && patch.source_excerpt_range.is_empty() + { + patches.pop(); + } let mut our_inlay_point_cursor = wrap_snapshot.inlay_point_cursor(); let mut our_fold_point_cursor = wrap_snapshot.fold_point_cursor(); @@ -1391,18 +1403,15 @@ impl BlockMap { } } - // Main loop: process one hunk/group at a time, possibly inserting spacers before and after. while let Some(source_point) = source_points.next() { let mut current_boundary = source_point; let current_range = excerpt.patch.edit_for_old_position(current_boundary).new; - // This can only occur at the end of an excerpt. if current_boundary.column > 0 { debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end); break; } - // Align the two sides at the start of this group. let (delta_at_start, mut spacer_at_start) = determine_spacer( &mut our_wrapper, &mut companion_wrapper, @@ -1434,7 +1443,6 @@ impl BlockMap { source_points.next(); } - // This can only occur at the end of an excerpt. if current_boundary.column > 0 { debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end); break; @@ -1538,7 +1546,8 @@ impl BlockMap { | Block::BufferHeader { excerpt: excerpt_b, .. }, - ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)), + ) => Some(excerpt_a.start_text_anchor().opaque_id()) + .cmp(&Some(excerpt_b.start_text_anchor().opaque_id())), ( Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, Block::Spacer { .. } | Block::Custom(_), @@ -2042,7 +2051,7 @@ impl BlockMapWriter<'_> { } else { self.block_map.folded_buffers.remove(&buffer_id); } - ranges.extend(multi_buffer.excerpt_ranges_for_buffer(buffer_id, cx)); + ranges.extend(multi_buffer.range_for_buffer(buffer_id, cx)); if let Some(companion) = &self.companion && companion.inverse.is_some() { @@ -2268,14 +2277,16 @@ impl BlockSnapshot { let custom_block = self.custom_blocks_by_id.get(&custom_block_id)?; return Some(Block::Custom(custom_block.clone())); } - BlockId::ExcerptBoundary(next_excerpt_id) => { - let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?; - self.wrap_snapshot - .make_wrap_point(excerpt_range.start, Bias::Left) + BlockId::ExcerptBoundary(start_anchor) => { + let start_point = start_anchor.to_point(&buffer); + self.wrap_snapshot.make_wrap_point(start_point, Bias::Left) } - BlockId::FoldedBuffer(excerpt_id) => self - .wrap_snapshot - .make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left), + BlockId::FoldedBuffer(buffer_id) => self.wrap_snapshot.make_wrap_point( + buffer + .anchor_in_excerpt(buffer.excerpts_for_buffer(buffer_id).next()?.context.start)? + .to_point(buffer), + Bias::Left, + ), BlockId::Spacer(_) => return None, }; let wrap_row = wrap_point.row(); @@ -2571,7 +2582,7 @@ impl BlockChunks<'_> { } pub struct StickyHeaderExcerpt<'a> { - pub excerpt: &'a ExcerptInfo, + pub excerpt: &'a ExcerptBoundaryInfo, } impl<'a> Iterator for BlockChunks<'a> { @@ -3096,7 +3107,13 @@ mod tests { ); multi_buffer }); - let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids()); + let excerpt_start_anchors = multi_buffer.read_with(cx, |mb, _| { + let snapshot = mb.snapshot(cx); + snapshot + .excerpts() + .map(|e| snapshot.anchor_in_excerpt(e.context.start).unwrap()) + .collect::>() + }); let font = test_font(); let font_size = px(14.); @@ -3129,9 +3146,9 @@ mod tests { assert_eq!( blocks, vec![ - (0..1, BlockId::ExcerptBoundary(excerpt_ids[0])), // path, header - (3..4, BlockId::ExcerptBoundary(excerpt_ids[1])), // path, header - (6..7, BlockId::ExcerptBoundary(excerpt_ids[2])), // path, header + (0..1, BlockId::ExcerptBoundary(excerpt_start_anchors[0])), // path, header + (3..4, BlockId::ExcerptBoundary(excerpt_start_anchors[1])), // path, header + (6..7, BlockId::ExcerptBoundary(excerpt_start_anchors[2])), // path, header ] ); } @@ -3447,13 +3464,13 @@ mod tests { ], cx, ); - assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6); + assert_eq!(multibuffer.read(cx).snapshot(cx).excerpts().count(), 6); multibuffer }); let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 3); @@ -3800,7 +3817,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4008,17 +4025,16 @@ mod tests { wrap_map.sync(tab_snapshot, tab_edits, cx) }); let mut block_map = block_map.write(wraps_snapshot, wrap_edits, None); - let (unfolded_buffers, folded_buffers) = buffer.read_with(cx, |buffer, _| { - let folded_buffers: Vec<_> = - block_map.block_map.folded_buffers.iter().cloned().collect(); - let mut unfolded_buffers = buffer.excerpt_buffer_ids(); - unfolded_buffers.dedup(); - log::debug!("All buffers {unfolded_buffers:?}"); - log::debug!("Folded buffers {folded_buffers:?}"); - unfolded_buffers.retain(|buffer_id| { - !block_map.block_map.folded_buffers.contains(buffer_id) - }); - (unfolded_buffers, folded_buffers) + let folded_buffers: Vec<_> = + block_map.block_map.folded_buffers.iter().cloned().collect(); + let mut unfolded_buffers = buffer_snapshot + .buffer_ids_for_range(Anchor::Min..Anchor::Max) + .collect::>(); + unfolded_buffers.dedup(); + log::debug!("All buffers {unfolded_buffers:?}"); + log::debug!("Folded buffers {folded_buffers:?}"); + unfolded_buffers.retain(|buffer_id| { + !block_map.block_map.folded_buffers.contains(buffer_id) }); let mut folded_count = folded_buffers.len(); let mut unfolded_count = unfolded_buffers.len(); @@ -4039,12 +4055,14 @@ mod tests { log::info!("Folding {buffer_to_fold:?}"); let related_excerpts = buffer_snapshot .excerpts() - .filter_map(|(excerpt_id, buffer, range)| { - if buffer.remote_id() == buffer_to_fold { + .filter_map(|excerpt| { + if excerpt.context.start.buffer_id == buffer_to_fold { Some(( - excerpt_id, - buffer - .text_for_range(range.context) + excerpt.context.start, + buffer_snapshot + .buffer_for_id(buffer_to_fold) + .unwrap() + .text_for_range(excerpt.context) .collect::(), )) } else { @@ -4518,7 +4536,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4563,7 +4581,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4635,11 +4653,6 @@ mod tests { let subscription = rhs_multibuffer.update(cx, |rhs_multibuffer, _| rhs_multibuffer.subscribe()); - let lhs_excerpt_id = - lhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0); - let rhs_excerpt_id = - rhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0); - let lhs_buffer_snapshot = cx.update(|cx| lhs_multibuffer.read(cx).snapshot(cx)); let (mut _lhs_inlay_map, lhs_inlay_snapshot) = InlayMap::new(lhs_buffer_snapshot); let (mut _lhs_fold_map, lhs_fold_snapshot) = FoldMap::new(lhs_inlay_snapshot); @@ -4661,13 +4674,11 @@ mod tests { let rhs_entity_id = rhs_multibuffer.entity_id(); let companion = cx.new(|_| { - let mut c = Companion::new( + Companion::new( rhs_entity_id, convert_rhs_rows_to_lhs, convert_lhs_rows_to_rhs, - ); - c.add_excerpt_mapping(lhs_excerpt_id, rhs_excerpt_id); - c + ) }); let rhs_edits = Patch::new(vec![text::Edit { diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 7c81562b7448bdb53bd0dd641eada92dff527aac..1664012b5eb43fb82c7c0fce38844d98ab0f7226 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -363,7 +363,7 @@ pub struct ItemSummary { impl Default for ItemSummary { fn default() -> Self { Self { - range: Anchor::min()..Anchor::min(), + range: Anchor::Min..Anchor::Min, } } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 95479e297cb82adcf8c3eb1f73e95f8b557eef43..1554bb96dab0e2f76a17df1396bd945f332af208 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -185,16 +185,18 @@ impl FoldMapWriter<'_> { continue; } + let fold_range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end); // For now, ignore any ranges that span an excerpt boundary. - let fold_range = - FoldRange(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); - if fold_range.0.start.excerpt_id != fold_range.0.end.excerpt_id { + if buffer + .anchor_range_to_buffer_anchor_range(fold_range.clone()) + .is_none() + { continue; } folds.push(Fold { id: FoldId(post_inc(&mut self.0.next_fold_id.0)), - range: fold_range, + range: FoldRange(fold_range), placeholder: fold_text, }); @@ -510,7 +512,7 @@ impl FoldMap { .snapshot .folds .cursor::(&inlay_snapshot.buffer); - folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left); + folds_cursor.seek(&FoldRange(anchor..Anchor::Max), Bias::Left); let mut folds = iter::from_fn({ let inlay_snapshot = &inlay_snapshot; @@ -1226,7 +1228,7 @@ impl DerefMut for FoldRange { impl Default for FoldRange { fn default() -> Self { - Self(Anchor::min()..Anchor::max()) + Self(Anchor::Min..Anchor::Max) } } @@ -1262,10 +1264,10 @@ pub struct FoldSummary { impl Default for FoldSummary { fn default() -> Self { Self { - start: Anchor::min(), - end: Anchor::max(), - min_start: Anchor::max(), - max_end: Anchor::min(), + start: Anchor::Min, + end: Anchor::Max, + min_start: Anchor::Max, + max_end: Anchor::Min, count: 0, } } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 9c05a182ef56eb803ff545a1c9d3914b505767aa..47ca295ccb1a08768ce129b92d10506294a9cf78 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -1342,7 +1342,7 @@ mod tests { use settings::SettingsStore; use std::{cmp::Reverse, env, sync::Arc}; use sum_tree::TreeMap; - use text::{Patch, Rope}; + use text::{BufferId, Patch, Rope}; use util::RandomCharIter; use util::post_inc; @@ -1351,10 +1351,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: false, padding_right: false, tooltip: None, @@ -1371,10 +1371,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, @@ -1391,10 +1391,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: false, padding_right: false, tooltip: None, @@ -1411,10 +1411,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, @@ -1434,10 +1434,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("🎨".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, diff --git a/crates/editor/src/document_colors.rs b/crates/editor/src/document_colors.rs index a38a0527f0641ef2d622b2f33fa1e932080ad7b5..8f8b70128ffc2bb66b2147baaa53d77e40c03c25 100644 --- a/crates/editor/src/document_colors.rs +++ b/crates/editor/src/document_colors.rs @@ -8,7 +8,7 @@ use language::point_from_lsp; use multi_buffer::Anchor; use project::{DocumentColor, InlayId}; use settings::Settings as _; -use text::{Bias, BufferId, OffsetRangeExt as _}; +use text::{Bias, BufferId}; use ui::{App, Context, Window}; use util::post_inc; @@ -160,9 +160,9 @@ impl Editor { } let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))) .filter(|editor_buffer| { let editor_buffer_id = editor_buffer.read(cx).remote_id(); @@ -184,9 +184,9 @@ impl Editor { buffers_to_query .into_iter() .filter_map(|buffer| { - let buffer_id = buffer.read(cx).remote_id(); + let buffer_snapshot = buffer.read(cx).snapshot(); let colors_task = lsp_store.document_colors(buffer, cx)?; - Some(async move { (buffer_id, colors_task.await) }) + Some(async move { (buffer_snapshot, colors_task.await) }) }) .collect::>() }) @@ -200,40 +200,21 @@ impl Editor { if all_colors.is_empty() { return; } - let Ok((multi_buffer_snapshot, editor_excerpts)) = editor.update(cx, |editor, cx| { - let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let editor_excerpts = multi_buffer_snapshot.excerpts().fold( - HashMap::default(), - |mut acc, (excerpt_id, buffer_snapshot, excerpt_range)| { - let excerpt_data = acc - .entry(buffer_snapshot.remote_id()) - .or_insert_with(Vec::new); - let excerpt_point_range = - excerpt_range.context.to_point_utf16(buffer_snapshot); - excerpt_data.push(( - excerpt_id, - buffer_snapshot.clone(), - excerpt_point_range, - )); - acc - }, - ); - (multi_buffer_snapshot, editor_excerpts) - }) else { + let Some(multi_buffer_snapshot) = editor + .update(cx, |editor, cx| editor.buffer.read(cx).snapshot(cx)) + .ok() + else { return; }; let mut new_editor_colors: HashMap, DocumentColor)>> = HashMap::default(); - for (buffer_id, colors) in all_colors { - let Some(excerpts) = editor_excerpts.get(&buffer_id) else { - continue; - }; + for (buffer_snapshot, colors) in all_colors { match colors { Ok(colors) => { if colors.colors.is_empty() { new_editor_colors - .entry(buffer_id) + .entry(buffer_snapshot.remote_id()) .or_insert_with(Vec::new) .clear(); } else { @@ -241,41 +222,33 @@ impl Editor { let color_start = point_from_lsp(color.lsp_range.start); let color_end = point_from_lsp(color.lsp_range.end); - for (excerpt_id, buffer_snapshot, excerpt_range) in excerpts { - if !excerpt_range.contains(&color_start.0) - || !excerpt_range.contains(&color_end.0) - { - continue; - } - let start = buffer_snapshot.anchor_before( - buffer_snapshot.clip_point_utf16(color_start, Bias::Left), - ); - let end = buffer_snapshot.anchor_after( - buffer_snapshot.clip_point_utf16(color_end, Bias::Right), - ); - let Some(range) = multi_buffer_snapshot - .anchor_range_in_excerpt(*excerpt_id, start..end) - else { - continue; - }; - - let new_buffer_colors = - new_editor_colors.entry(buffer_id).or_insert_with(Vec::new); - - let (Ok(i) | Err(i)) = - new_buffer_colors.binary_search_by(|(probe, _)| { - probe - .start - .cmp(&range.start, &multi_buffer_snapshot) - .then_with(|| { - probe - .end - .cmp(&range.end, &multi_buffer_snapshot) - }) - }); - new_buffer_colors.insert(i, (range, color)); - break; - } + let Some(range) = multi_buffer_snapshot + .buffer_anchor_range_to_anchor_range( + buffer_snapshot.anchor_range_outside( + buffer_snapshot + .clip_point_utf16(color_start, Bias::Left) + ..buffer_snapshot + .clip_point_utf16(color_end, Bias::Right), + ), + ) + else { + continue; + }; + + let new_buffer_colors = new_editor_colors + .entry(buffer_snapshot.remote_id()) + .or_insert_with(Vec::new); + + let (Ok(i) | Err(i)) = + new_buffer_colors.binary_search_by(|(probe, _)| { + probe + .start + .cmp(&range.start, &multi_buffer_snapshot) + .then_with(|| { + probe.end.cmp(&range.end, &multi_buffer_snapshot) + }) + }); + new_buffer_colors.insert(i, (range, color)); } } } diff --git a/crates/editor/src/document_symbols.rs b/crates/editor/src/document_symbols.rs index 0668a034c8755a8702e31ec3a060b7f3b79c6829..ef9159788a7a5c2b2c317015219090fdae6a4944 100644 --- a/crates/editor/src/document_symbols.rs +++ b/crates/editor/src/document_symbols.rs @@ -62,10 +62,10 @@ impl Editor { multi_buffer_snapshot: &MultiBufferSnapshot, cx: &Context, ) -> bool { - let Some(excerpt) = multi_buffer_snapshot.excerpt_containing(cursor..cursor) else { + let Some((anchor, _)) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor) else { return false; }; - let Some(buffer) = self.buffer.read(cx).buffer(excerpt.buffer_id()) else { + let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) else { return false; }; lsp_symbols_enabled(buffer.read(cx), cx) @@ -77,19 +77,12 @@ impl Editor { &self, cursor: Anchor, multi_buffer_snapshot: &MultiBufferSnapshot, - cx: &Context, + _cx: &Context, ) -> Option<(BufferId, Vec>)> { - let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?; - let excerpt_id = excerpt.id(); - let buffer_id = excerpt.buffer_id(); - if Some(buffer_id) != cursor.text_anchor.buffer_id { - return None; - } - let buffer = self.buffer.read(cx).buffer(buffer_id)?; - let buffer_snapshot = buffer.read(cx).snapshot(); - let cursor_text_anchor = cursor.text_anchor; - - let all_items = self.lsp_document_symbols.get(&buffer_id)?; + let (cursor_text_anchor, buffer) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor)?; + let all_items = self + .lsp_document_symbols + .get(&cursor_text_anchor.buffer_id)?; if all_items.is_empty() { return None; } @@ -97,34 +90,36 @@ impl Editor { let mut symbols = all_items .iter() .filter(|item| { - item.range - .start - .cmp(&cursor_text_anchor, &buffer_snapshot) - .is_le() - && item - .range - .end - .cmp(&cursor_text_anchor, &buffer_snapshot) - .is_ge() + item.range.start.cmp(&cursor_text_anchor, buffer).is_le() + && item.range.end.cmp(&cursor_text_anchor, buffer).is_ge() }) - .map(|item| OutlineItem { - depth: item.depth, - range: Anchor::range_in_buffer(excerpt_id, item.range.clone()), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - item.source_range_for_text.clone(), - ), - text: item.text.clone(), - highlight_ranges: item.highlight_ranges.clone(), - name_ranges: item.name_ranges.clone(), - body_range: item - .body_range - .as_ref() - .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())), - annotation_range: item - .annotation_range - .as_ref() - .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())), + .filter_map(|item| { + let range_start = multi_buffer_snapshot.anchor_in_buffer(item.range.start)?; + let range_end = multi_buffer_snapshot.anchor_in_buffer(item.range.end)?; + let source_range_for_text_start = + multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.start)?; + let source_range_for_text_end = + multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.end)?; + Some(OutlineItem { + depth: item.depth, + range: range_start..range_end, + source_range_for_text: source_range_for_text_start..source_range_for_text_end, + text: item.text.clone(), + highlight_ranges: item.highlight_ranges.clone(), + name_ranges: item.name_ranges.clone(), + body_range: item.body_range.as_ref().and_then(|r| { + Some( + multi_buffer_snapshot.anchor_in_buffer(r.start)? + ..multi_buffer_snapshot.anchor_in_buffer(r.end)?, + ) + }), + annotation_range: item.annotation_range.as_ref().and_then(|r| { + Some( + multi_buffer_snapshot.anchor_in_buffer(r.start)? + ..multi_buffer_snapshot.anchor_in_buffer(r.end)?, + ) + }), + }) }) .collect::>(); @@ -135,7 +130,7 @@ impl Editor { retain }); - Some((buffer_id, symbols)) + Some((buffer.remote_id(), symbols)) } /// Fetches document symbols from the LSP for buffers that have the setting @@ -155,9 +150,10 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) + .visible_buffers(cx) .into_iter() - .filter_map(|(_, (buffer, _, _))| { + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) + .filter_map(|buffer| { let id = buffer.read(cx).remote_id(); if for_buffer.is_none_or(|target| target == id) && lsp_symbols_enabled(buffer.read(cx), cx) diff --git a/crates/editor/src/edit_prediction_tests.rs b/crates/editor/src/edit_prediction_tests.rs index 52939a9e5a8fd1a35a3a3c0bcd2a04b893bd6628..d1e326bc93b8052f3ae089c211e65eb3ef020fdf 100644 --- a/crates/editor/src/edit_prediction_tests.rs +++ b/crates/editor/src/edit_prediction_tests.rs @@ -7,7 +7,7 @@ use gpui::{ use indoc::indoc; use language::EditPredictionsMode; use language::{Buffer, CodeLabel}; -use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot, ToPoint}; +use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; use project::{Completion, CompletionResponse, CompletionSource}; use std::{ ops::Range, @@ -1242,15 +1242,14 @@ struct FakeCompletionMenuProvider; impl CompletionProvider for FakeCompletionMenuProvider { fn completions( &self, - _excerpt_id: ExcerptId, - _buffer: &Entity, + buffer: &Entity, _buffer_position: text::Anchor, _trigger: CompletionContext, _window: &mut Window, - _cx: &mut Context, + cx: &mut Context, ) -> Task>> { let completion = Completion { - replace_range: text::Anchor::MIN..text::Anchor::MAX, + replace_range: text::Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), new_text: "fake_completion".to_string(), label: CodeLabel::plain("fake_completion".to_string(), None), documentation: None, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 76ec95928dc729e12060e75f8ec7d61197624c5f..e4cccf3fc5607937a2a82b2ab1089e00bbda6fa7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -76,7 +76,7 @@ pub use linked_editing_ranges::LinkedEdits; pub use lsp::CompletionContext; pub use lsp_ext::lsp_tasks; pub use multi_buffer::{ - Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer, + Anchor, AnchorRangeExt, BufferOffset, ExcerptRange, MBTextSummary, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset, ToPoint, }; @@ -150,7 +150,8 @@ use markdown::Markdown; use mouse_context_menu::MouseContextMenu; use movement::TextLayoutDetails; use multi_buffer::{ - ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, + ExcerptBoundaryInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, + MultiBufferRow, }; use parking_lot::Mutex; use persistence::EditorDb; @@ -640,6 +641,7 @@ pub(crate) enum EditDisplayMode { enum EditPrediction { Edit { + // TODO could be a language::Anchor? edits: Vec<(Range, Arc)>, /// Predicted cursor position as (anchor, offset_from_anchor). /// The anchor is in multibuffer coordinates; after applying edits, @@ -887,7 +889,8 @@ pub trait Addon: 'static { fn render_buffer_header_controls( &self, - _: &ExcerptInfo, + _: &ExcerptBoundaryInfo, + _: &language::BufferSnapshot, _: &Window, _: &App, ) -> Option { @@ -1340,7 +1343,7 @@ pub struct Editor { suppress_selection_callback: bool, applicable_language_settings: HashMap, LanguageSettings>, accent_data: Option, - bracket_fetched_tree_sitter_chunks: HashMap>>, + bracket_fetched_tree_sitter_chunks: HashMap, HashSet>>, semantic_token_state: SemanticTokenState, pub(crate) refresh_matching_bracket_highlights_task: Task<()>, refresh_document_symbols_task: Shared>, @@ -1763,15 +1766,13 @@ impl ClipboardSelection { project.absolute_path(&project_path, cx) }); - let line_range = file_path.as_ref().and_then(|_| { - let (_, start_point, start_excerpt_id) = buffer.point_to_buffer_point(range.start)?; - let (_, end_point, end_excerpt_id) = buffer.point_to_buffer_point(range.end)?; - if start_excerpt_id == end_excerpt_id { - Some(start_point.row..=end_point.row) - } else { - None - } - }); + let line_range = if file_path.is_some() { + buffer + .range_to_buffer_range(range) + .map(|(_, buffer_range)| buffer_range.start.row..=buffer_range.end.row) + } else { + None + }; Self { len, @@ -1852,9 +1853,8 @@ pub enum JumpData { line_offset_from_top: u32, }, MultiBufferPoint { - excerpt_id: ExcerptId, + anchor: language::Anchor, position: Point, - anchor: text::Anchor, line_offset_from_top: u32, }, } @@ -1990,17 +1990,21 @@ impl Editor { if !self.mode.is_full() { return; } - let multi_buffer = display_snapshot.buffer_snapshot(); + let multi_buffer = display_snapshot.buffer_snapshot().clone(); let scroll_anchor = self .scroll_manager .native_anchor(display_snapshot, cx) .anchor; - let Some((excerpt_id, _, buffer)) = multi_buffer.as_singleton() else { + let Some(buffer_snapshot) = multi_buffer.as_singleton() else { return; }; - let buffer = buffer.clone(); - let buffer_visible_start = scroll_anchor.text_anchor.to_point(&buffer); + let buffer = buffer_snapshot.clone(); + let Some((buffer_visible_start, _)) = multi_buffer.anchor_to_buffer_anchor(scroll_anchor) + else { + return; + }; + let buffer_visible_start = buffer_visible_start.to_point(&buffer); let max_row = buffer.max_point().row; let start_row = buffer_visible_start.row.min(max_row); let end_row = (buffer_visible_start.row + 10).min(max_row); @@ -2014,22 +2018,24 @@ impl Editor { Some(syntax.as_ref()), ) .into_iter() - .map(|outline_item| OutlineItem { - depth: outline_item.depth, - range: Anchor::range_in_buffer(excerpt_id, outline_item.range), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - outline_item.source_range_for_text, - ), - text: outline_item.text, - highlight_ranges: outline_item.highlight_ranges, - name_ranges: outline_item.name_ranges, - body_range: outline_item - .body_range - .map(|range| Anchor::range_in_buffer(excerpt_id, range)), - annotation_range: outline_item - .annotation_range - .map(|range| Anchor::range_in_buffer(excerpt_id, range)), + .filter_map(|outline_item| { + Some(OutlineItem { + depth: outline_item.depth, + range: multi_buffer + .buffer_anchor_range_to_anchor_range(outline_item.range)?, + source_range_for_text: multi_buffer.buffer_anchor_range_to_anchor_range( + outline_item.source_range_for_text, + )?, + text: outline_item.text, + highlight_ranges: outline_item.highlight_ranges, + name_ranges: outline_item.name_ranges, + body_range: outline_item.body_range.and_then(|range| { + multi_buffer.buffer_anchor_range_to_anchor_range(range) + }), + annotation_range: outline_item.annotation_range.and_then(|range| { + multi_buffer.buffer_anchor_range_to_anchor_range(range) + }), + }) }) .collect() }); @@ -3024,7 +3030,10 @@ impl Editor { fn edit_prediction_cursor_popover_prefers_preview( &self, completion: &EditPredictionState, + cx: &App, ) -> bool { + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + match &completion.completion { EditPrediction::Edit { edits, snapshot, .. @@ -3033,8 +3042,13 @@ impl Editor { let mut end_row: Option = None; for (range, text) in edits { - let edit_start_row = range.start.text_anchor.to_point(snapshot).row; - let old_end_row = range.end.text_anchor.to_point(snapshot).row; + let Some((_, range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(range.clone()) + else { + continue; + }; + let edit_start_row = range.start.to_point(snapshot).row; + let old_end_row = range.end.to_point(snapshot).row; let inserted_newline_count = text .as_ref() .chars() @@ -3083,7 +3097,7 @@ impl Editor { .active_edit_prediction .as_ref() .filter(|completion| { - self.edit_prediction_cursor_popover_prefers_preview(completion) + self.edit_prediction_cursor_popover_prefers_preview(completion, cx) }) .map_or(EditPredictionKeybindAction::Accept, |_| { EditPredictionKeybindAction::Preview @@ -3320,13 +3334,12 @@ impl Editor { self.buffer.read(cx).read(cx).file_at(point).cloned() } - pub fn active_excerpt( - &self, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { - self.buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) + pub fn active_buffer(&self, cx: &App) -> Option> { + let multibuffer = self.buffer.read(cx); + let snapshot = multibuffer.snapshot(cx); + let (anchor, _) = + snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())?; + multibuffer.buffer(anchor.buffer_id) } pub fn mode(&self) -> &EditorMode { @@ -3695,8 +3708,8 @@ impl Editor { } if local { - if let Some(buffer_id) = new_cursor_position.text_anchor.buffer_id { - self.register_buffer(buffer_id, cx); + if let Some((anchor, _)) = buffer.anchor_to_buffer_anchor(new_cursor_position) { + self.register_buffer(anchor.buffer_id, cx); } let mut context_menu = self.context_menu.borrow_mut(); @@ -3778,12 +3791,13 @@ impl Editor { if selections.len() == 1 { cx.emit(SearchEvent::ActiveMatchChanged) } - if local && let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { + if local && let Some(buffer_snapshot) = buffer.as_singleton() { let inmemory_selections = selections .iter() .map(|s| { - text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) - ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) + let start = s.range().start.text_anchor_in(buffer_snapshot); + let end = s.range().end.text_anchor_in(buffer_snapshot); + (start..end).to_point(buffer_snapshot) }) .collect(); self.update_restoration_data(cx, |data| { @@ -3829,7 +3843,6 @@ impl Editor { fn folds_did_change(&mut self, cx: &mut Context) { use text::ToOffset as _; - use text::ToPoint as _; if self.mode.is_minimap() || WorkspaceSettings::get(None, cx).restore_on_startup @@ -3838,21 +3851,18 @@ impl Editor { return; } - if !self.buffer().read(cx).is_singleton() { - return; - } - let display_snapshot = self .display_map .update(cx, |display_map, cx| display_map.snapshot(cx)); - let Some((.., snapshot)) = display_snapshot.buffer_snapshot().as_singleton() else { + let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() else { return; }; let inmemory_folds = display_snapshot .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { - fold.range.start.text_anchor.to_point(&snapshot) - ..fold.range.end.text_anchor.to_point(&snapshot) + let start = fold.range.start.text_anchor_in(buffer_snapshot); + let end = fold.range.end.text_anchor_in(buffer_snapshot); + (start..end).to_point(buffer_snapshot) }) .collect(); self.update_restoration_data(cx, |data| { @@ -3876,8 +3886,16 @@ impl Editor { let db_folds = display_snapshot .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { - let start = fold.range.start.text_anchor.to_offset(&snapshot); - let end = fold.range.end.text_anchor.to_offset(&snapshot); + let start = fold + .range + .start + .text_anchor_in(buffer_snapshot) + .to_offset(buffer_snapshot); + let end = fold + .range + .end + .text_anchor_in(buffer_snapshot) + .to_offset(buffer_snapshot); // Extract fingerprints - content at fold boundaries for validation on restore // Both fingerprints must be INSIDE the fold to avoid capturing surrounding @@ -3886,12 +3904,14 @@ impl Editor { // end_fp: last min(32, fold_len) bytes of fold content // Clip to character boundaries to handle multibyte UTF-8 characters. let fold_len = end - start; - let start_fp_end = snapshot + let start_fp_end = buffer_snapshot .clip_offset(start + std::cmp::min(FINGERPRINT_LEN, fold_len), Bias::Left); - let start_fp: String = snapshot.text_for_range(start..start_fp_end).collect(); - let end_fp_start = snapshot + let start_fp: String = buffer_snapshot + .text_for_range(start..start_fp_end) + .collect(); + let end_fp_start = buffer_snapshot .clip_offset(end.saturating_sub(FINGERPRINT_LEN).max(start), Bias::Right); - let end_fp: String = snapshot.text_for_range(end_fp_start..end).collect(); + let end_fp: String = buffer_snapshot.text_for_range(end_fp_start..end).collect(); (start, end, start_fp, end_fp) }) @@ -4654,30 +4674,31 @@ impl Editor { fn linked_editing_ranges_for( &self, - selection: Range, + query_range: Range, cx: &App, ) -> Option, Vec>>> { + use text::ToOffset as TO; + if self.linked_edit_ranges.is_empty() { return None; } - let ((base_range, linked_ranges), buffer_snapshot, buffer) = - selection.end.buffer_id.and_then(|end_buffer_id| { - if selection.start.buffer_id != Some(end_buffer_id) { - return None; - } - let buffer = self.buffer.read(cx).buffer(end_buffer_id)?; - let snapshot = buffer.read(cx).snapshot(); - self.linked_edit_ranges - .get(end_buffer_id, selection.start..selection.end, &snapshot) - .map(|ranges| (ranges, snapshot, buffer)) - })?; - use text::ToOffset as TO; + if query_range.start.buffer_id != query_range.end.buffer_id { + return None; + }; + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + let buffer = self.buffer.read(cx).buffer(query_range.end.buffer_id)?; + let buffer_snapshot = buffer.read(cx).snapshot(); + let (base_range, linked_ranges) = self.linked_edit_ranges.get( + buffer_snapshot.remote_id(), + query_range.clone(), + &buffer_snapshot, + )?; // find offset from the start of current range to current cursor position let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot); - let start_offset = TO::to_offset(&selection.start, &buffer_snapshot); + let start_offset = TO::to_offset(&query_range.start, &buffer_snapshot); let start_difference = start_offset - start_byte_offset; - let end_offset = TO::to_offset(&selection.end, &buffer_snapshot); + let end_offset = TO::to_offset(&query_range.end, &buffer_snapshot); let end_difference = end_offset - start_byte_offset; // Current range has associated linked ranges. @@ -4690,13 +4711,22 @@ impl Editor { continue; } if self.selections.disjoint_anchor_ranges().any(|s| { - if s.start.text_anchor.buffer_id != selection.start.buffer_id - || s.end.text_anchor.buffer_id != selection.end.buffer_id + let Some((selection_start, _)) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.start) + else { + return false; + }; + let Some((selection_end, _)) = multibuffer_snapshot.anchor_to_buffer_anchor(s.end) + else { + return false; + }; + if selection_start.buffer_id != query_range.start.buffer_id + || selection_end.buffer_id != query_range.end.buffer_id { return false; } - TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset - && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset + TO::to_offset(&selection_start, &buffer_snapshot) <= end_offset + && TO::to_offset(&selection_end, &buffer_snapshot) >= start_offset }) { continue; } @@ -5015,21 +5045,26 @@ impl Editor { if !self.linked_edit_ranges.is_empty() { let start_anchor = snapshot.anchor_before(selection.start); + let classifier = snapshot + .char_classifier_at(start_anchor) + .scope_context(Some(CharScopeContext::LinkedEdit)); - let is_word_char = text.chars().next().is_none_or(|char| { - let classifier = snapshot - .char_classifier_at(start_anchor.to_offset(&snapshot)) - .scope_context(Some(CharScopeContext::LinkedEdit)); - classifier.is_word(char) - }); - let is_dot = text.as_ref() == "."; - let should_apply_linked_edit = is_word_char || is_dot; + if let Some((_, anchor_range)) = + snapshot.anchor_range_to_buffer_anchor_range(start_anchor..anchor) + { + let is_word_char = text + .chars() + .next() + .is_none_or(|char| classifier.is_word(char)); - if should_apply_linked_edit { - let anchor_range = start_anchor.text_anchor..anchor.text_anchor; - linked_edits.push(&self, anchor_range, text.clone(), cx); - } else { - clear_linked_edit_ranges = true; + let is_dot = text.as_ref() == "."; + let should_apply_linked_edit = is_word_char || is_dot; + + if should_apply_linked_edit { + linked_edits.push(&self, anchor_range, text.clone(), cx); + } else { + clear_linked_edit_ranges = true; + } } } @@ -5522,7 +5557,7 @@ impl Editor { let row = cursor.row; let point = Point::new(row, 0); - let Some((buffer_handle, buffer_point, _)) = + let Some((buffer_handle, buffer_point)) = self.buffer.read(cx).point_to_buffer_point(point, cx) else { continue; @@ -5662,12 +5697,16 @@ impl Editor { /// Collects linked edits for the current selections, pairing each linked /// range with `text`. pub fn linked_edits_for_selections(&self, text: Arc, cx: &App) -> LinkedEdits { + let multibuffer_snapshot = self.buffer().read(cx).snapshot(cx); let mut linked_edits = LinkedEdits::new(); if !self.linked_edit_ranges.is_empty() { for selection in self.selections.disjoint_anchors() { - let start = selection.start.text_anchor; - let end = selection.end.text_anchor; - linked_edits.push(self, start..end, text.clone(), cx); + let Some((_, range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(selection.range()) + else { + continue; + }; + linked_edits.push(self, range, text.clone(), cx); } } linked_edits @@ -5898,53 +5937,54 @@ impl Editor { } } - pub fn visible_excerpts( - &self, - lsp_related_only: bool, - cx: &mut Context, - ) -> HashMap, clock::Global, Range)> { - let project = self.project().cloned(); - let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + pub fn is_lsp_relevant(&self, file: Option<&Arc>, cx: &App) -> bool { + let Some(project) = self.project() else { + return false; + }; + let Some(buffer_file) = project::File::from_dyn(file) else { + return false; + }; + let Some(entry_id) = buffer_file.project_entry_id() else { + return false; + }; + let project = project.read(cx); + let Some(buffer_worktree) = project.worktree_for_id(buffer_file.worktree_id(cx), cx) else { + return false; + }; + let Some(worktree_entry) = buffer_worktree.read(cx).entry_for_id(entry_id) else { + return false; + }; + !worktree_entry.is_ignored + } + + pub fn visible_buffers(&self, cx: &mut Context) -> Vec> { + let display_snapshot = self.display_snapshot(cx); + let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx); let multi_buffer = self.buffer().read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - multi_buffer_snapshot - .range_to_buffer_ranges( - self.multi_buffer_visible_range(&display_snapshot, cx) - .to_inclusive(), - ) + display_snapshot + .buffer_snapshot() + .range_to_buffer_ranges(visible_range) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) - .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { - if !lsp_related_only { - return Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range.start.0..excerpt_visible_range.end.0, - ), - )); - } + .filter_map(|(buffer_snapshot, _, _)| multi_buffer.buffer(buffer_snapshot.remote_id())) + .collect() + } - let project = project.as_ref()?.read(cx); - let buffer_file = project::File::from_dyn(buffer.file())?; - let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; - let worktree_entry = buffer_worktree - .read(cx) - .entry_for_id(buffer_file.project_entry_id()?)?; - if worktree_entry.is_ignored { - None - } else { - Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range.start.0..excerpt_visible_range.end.0, - ), - )) - } - }) + pub fn visible_buffer_ranges( + &self, + cx: &mut Context, + ) -> Vec<( + BufferSnapshot, + Range, + ExcerptRange, + )> { + let display_snapshot = self.display_snapshot(cx); + let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx); + display_snapshot + .buffer_snapshot() + .range_to_buffer_ranges(visible_range) + .into_iter() + .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) .collect() } @@ -6069,17 +6109,19 @@ impl Editor { .newest_anchor() .start .bias_right(&multibuffer_snapshot); - if position.diff_base_anchor.is_some() { + + if position.diff_base_anchor().is_some() { return; } - let buffer_position = multibuffer_snapshot.anchor_before(position); - let Some(buffer) = buffer_position - .text_anchor - .buffer_id - .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) + let multibuffer_position = multibuffer_snapshot.anchor_before(position); + let Some((buffer_position, _)) = + multibuffer_snapshot.anchor_to_buffer_anchor(multibuffer_position) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(buffer_position.buffer_id) else { + return; + }; let buffer_snapshot = buffer.read(cx).snapshot(); let menu_is_open = matches!( @@ -6088,9 +6130,9 @@ impl Editor { ); let language = buffer_snapshot - .language_at(buffer_position.text_anchor) + .language_at(buffer_position) .map(|language| language.name()); - let language_settings = multibuffer_snapshot.language_settings_at(buffer_position, cx); + let language_settings = multibuffer_snapshot.language_settings_at(multibuffer_position, cx); let completion_settings = language_settings.completions.clone(); let show_completions_on_input = self @@ -6101,7 +6143,7 @@ impl Editor { } let query: Option> = - Self::completion_query(&multibuffer_snapshot, buffer_position) + Self::completion_query(&multibuffer_snapshot, multibuffer_position) .map(|query| query.into()); drop(multibuffer_snapshot); @@ -6143,7 +6185,7 @@ impl Editor { if filter_completions { menu.filter( query.clone().unwrap_or_default(), - buffer_position.text_anchor, + buffer_position, &buffer, provider.clone(), window, @@ -6177,12 +6219,6 @@ impl Editor { } }; - let Anchor { - excerpt_id: buffer_excerpt_id, - text_anchor: buffer_position, - .. - } = buffer_position; - let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) = buffer_snapshot.surrounding_word(buffer_position, None) { @@ -6225,7 +6261,7 @@ impl Editor { trigger.as_ref().is_none_or(|trigger| { provider.is_completion_trigger( &buffer, - position.text_anchor, + buffer_position, trigger, trigger_in_words, cx, @@ -6246,14 +6282,7 @@ impl Editor { trigger_character, }; - provider.completions( - buffer_excerpt_id, - &buffer, - buffer_position, - completion_context, - window, - cx, - ) + provider.completions(&buffer, buffer_position, completion_context, window, cx) } else { Task::ready(Ok(Vec::new())) }; @@ -6593,42 +6622,42 @@ impl Editor { cx.stop_propagation(); let buffer_handle = completions_menu.buffer.clone(); + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + let (initial_position, _) = + multibuffer_snapshot.anchor_to_buffer_anchor(completions_menu.initial_position)?; let CompletionEdit { new_text, snippet, replace_range, - } = process_completion_for_edit( - &completion, - intent, - &buffer_handle, - &completions_menu.initial_position.text_anchor, - cx, - ); + } = process_completion_for_edit(&completion, intent, &buffer_handle, &initial_position, cx); - let buffer = buffer_handle.read(cx); - let snapshot = self.buffer.read(cx).snapshot(cx); - let newest_anchor = self.selections.newest_anchor(); - let replace_range_multibuffer = { - let mut excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap(); - excerpt.map_range_from_buffer(replace_range.clone()) + let buffer = buffer_handle.read(cx).snapshot(); + let newest_selection = self.selections.newest_anchor(); + + let Some(replace_range_multibuffer) = + multibuffer_snapshot.buffer_anchor_range_to_anchor_range(replace_range.clone()) + else { + return None; }; - if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) { + + let Some((buffer_snapshot, newest_range_buffer)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(newest_selection.range()) + else { return None; - } + }; let old_text = buffer .text_for_range(replace_range.clone()) .collect::(); - let lookbehind = newest_anchor + let lookbehind = newest_range_buffer .start - .text_anchor - .to_offset(buffer) - .saturating_sub(replace_range.start.0); + .to_offset(buffer_snapshot) + .saturating_sub(replace_range.start.to_offset(&buffer_snapshot)); let lookahead = replace_range .end - .0 - .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer)); + .to_offset(&buffer_snapshot) + .saturating_sub(newest_range_buffer.end.to_offset(&buffer)); let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; let suffix = &old_text[lookbehind.min(old_text.len())..]; @@ -6641,34 +6670,40 @@ impl Editor { let text: Arc = new_text.clone().into(); for selection in &selections { - let range = if selection.id == newest_anchor.id { + let range = if selection.id == newest_selection.id { replace_range_multibuffer.clone() } else { let mut range = selection.range(); // if prefix is present, don't duplicate it - if snapshot.contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) { + if multibuffer_snapshot + .contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) + { range.start = range.start.saturating_sub_usize(lookbehind); // if suffix is also present, mimic the newest cursor and replace it - if selection.id != newest_anchor.id - && snapshot.contains_str_at(range.end, suffix) + if selection.id != newest_selection.id + && multibuffer_snapshot.contains_str_at(range.end, suffix) { range.end += lookahead; } } - range + range.to_anchors(&multibuffer_snapshot) }; ranges.push(range.clone()); - let start_anchor = snapshot.anchor_before(range.start); - let end_anchor = snapshot.anchor_after(range.end); - let anchor_range = start_anchor.text_anchor..end_anchor.text_anchor; - all_commit_ranges.push(anchor_range.clone()); + let start_anchor = multibuffer_snapshot.anchor_before(range.start); + let end_anchor = multibuffer_snapshot.anchor_after(range.end); - if !self.linked_edit_ranges.is_empty() { - linked_edits.push(&self, anchor_range, text.clone(), cx); + if let Some((buffer_snapshot_2, anchor_range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(start_anchor..end_anchor) + && buffer_snapshot_2.remote_id() == buffer_snapshot.remote_id() + { + all_commit_ranges.push(anchor_range.clone()); + if !self.linked_edit_ranges.is_empty() { + linked_edits.push(&self, anchor_range, text.clone(), cx); + } } } @@ -6687,8 +6722,12 @@ impl Editor { let tx_id = self.transact(window, cx, |editor, window, cx| { if let Some(mut snippet) = snippet { snippet.text = new_text.to_string(); + let offset_ranges = ranges + .iter() + .map(|range| range.to_offset(&multibuffer_snapshot)) + .collect::>(); editor - .insert_snippet(&ranges, snippet, window, cx) + .insert_snippet(&offset_ranges, snippet, window, cx) .log_err(); } else { editor.buffer.update(cx, |multi_buffer, cx| { @@ -6703,7 +6742,10 @@ impl Editor { linked_edits.apply(cx); editor.refresh_edit_prediction(true, false, window, cx); }); - self.invalidate_autoclose_regions(&self.selections.disjoint_anchors_arc(), &snapshot); + self.invalidate_autoclose_regions( + &self.selections.disjoint_anchors_arc(), + &multibuffer_snapshot, + ); let show_new_completions_on_confirm = completion .confirm @@ -6739,7 +6781,7 @@ impl Editor { if available_commands.contains(&lsp_command.command) { Some(CodeAction { server_id: *server_id, - range: language::Anchor::MIN..language::Anchor::MIN, + range: language::Anchor::min_min_range_for_buffer(buffer.remote_id()), lsp_action: LspAction::Command(lsp_command.clone()), resolved: false, }) @@ -7069,13 +7111,9 @@ impl Editor { Some(Task::ready(Ok(()))) }) } - CodeActionsItem::CodeAction { - excerpt_id, - action, - provider, - } => { + CodeActionsItem::CodeAction { action, provider } => { let apply_code_action = - provider.apply_code_action(buffer, action, excerpt_id, true, window, cx); + provider.apply_code_action(buffer, action, true, window, cx); let workspace = workspace.downgrade(); Some(cx.spawn_in(window, async move |editor, cx| { let project_transaction = apply_code_action.await?; @@ -7175,17 +7213,19 @@ impl Editor { // avoid opening a new editor to display them. if let [(buffer, transaction)] = &*entries { - let excerpt = editor.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_containing(editor.selections.newest_anchor().head(), cx) + let cursor_excerpt = editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let head = editor.selections.newest_anchor().head(); + let (buffer_snapshot, excerpt_range) = snapshot.excerpt_containing(head..head)?; + if buffer_snapshot.remote_id() != buffer.read(cx).remote_id() { + return None; + } + Some(excerpt_range) })?; - if let Some((_, excerpted_buffer, excerpt_range)) = excerpt - && excerpted_buffer == *buffer - { + + if let Some(excerpt_range) = cursor_excerpt { let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { - let excerpt_range = excerpt_range.to_offset(buffer); + let excerpt_range = excerpt_range.context.to_offset(buffer); buffer .edited_ranges_for_transaction::(transaction) .all(|range| { @@ -7207,15 +7247,21 @@ impl Editor { .read(cx) .edited_ranges_for_transaction::(transaction) .collect::>(); - let (ranges, _) = multibuffer.set_excerpts_for_path( + multibuffer.set_excerpts_for_path( PathKey::for_buffer(buffer_handle, cx), buffer_handle.clone(), - edited_ranges, + edited_ranges.clone(), multibuffer_context_lines(cx), cx, ); - - ranges_to_highlight.extend(ranges); + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_handle.read(cx).snapshot(); + ranges_to_highlight.extend(edited_ranges.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + })); } multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); multibuffer @@ -7339,10 +7385,10 @@ impl Editor { .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) .await; - let (start_buffer, start, _, end, newest_selection) = this + let (start_buffer, start, _, end, _newest_selection) = this .update(cx, |this, cx| { let newest_selection = this.selections.newest_anchor().clone(); - if newest_selection.head().diff_base_anchor.is_some() { + if newest_selection.head().diff_base_anchor().is_some() { return None; } let display_snapshot = this.display_snapshot(cx); @@ -7378,7 +7424,6 @@ impl Editor { if let Some(provider_actions) = provider_actions.log_err() { actions.extend(provider_actions.into_iter().map(|action| { AvailableCodeAction { - excerpt_id: newest_selection.start.excerpt_id, action, provider: provider.clone(), } @@ -7426,8 +7471,7 @@ impl Editor { .selections .newest::(&snapshot.display_snapshot) .head(); - let Some((buffer, point, _)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) - else { + let Some((buffer, point)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) else { return; }; @@ -7612,27 +7656,13 @@ impl Editor { return; } - let cursor_buffer_snapshot = cursor_buffer.read(cx); let mut write_ranges = Vec::new(); let mut read_ranges = Vec::new(); + let multibuffer_snapshot = buffer.snapshot(cx); for highlight in highlights { - let buffer_id = cursor_buffer.read(cx).remote_id(); - for (excerpt_id, _, excerpt_range) in - buffer.excerpts_for_buffer(buffer_id, cx) + for range in + multibuffer_snapshot.buffer_range_to_excerpt_ranges(highlight.range) { - let start = highlight - .range - .start - .max(&excerpt_range.context.start, cursor_buffer_snapshot); - let end = highlight - .range - .end - .min(&excerpt_range.context.end, cursor_buffer_snapshot); - if start.cmp(&end, cursor_buffer_snapshot).is_ge() { - continue; - } - - let range = Anchor::range_in_buffer(excerpt_id, *start..*end); if highlight.kind == lsp::DocumentHighlightKind::WRITE { write_ranges.push(range); } else { @@ -7713,7 +7743,7 @@ impl Editor { let match_task = cx.background_spawn(async move { let buffer_ranges = multi_buffer_snapshot .range_to_buffer_ranges( - multi_buffer_range_to_query.start..=multi_buffer_range_to_query.end, + multi_buffer_range_to_query.start..multi_buffer_range_to_query.end, ) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); @@ -7731,11 +7761,11 @@ impl Editor { return Vec::default(); }; let query_range = query_range.to_anchors(&multi_buffer_snapshot); - for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { + for (buffer_snapshot, search_range, _) in buffer_ranges { match_ranges.extend( regex .search( - buffer_snapshot, + &buffer_snapshot, Some(search_range.start.0..search_range.end.0), ) .await @@ -7745,9 +7775,14 @@ impl Editor { .anchor_after(search_range.start + match_range.start); let match_end = buffer_snapshot .anchor_before(search_range.start + match_range.end); - let match_anchor_range = - Anchor::range_in_buffer(excerpt_id, match_start..match_end); - (match_anchor_range != query_range).then_some(match_anchor_range) + { + let range = multi_buffer_snapshot + .anchor_in_buffer(match_start)? + ..multi_buffer_snapshot.anchor_in_buffer(match_end)?; + Some(range).filter(|match_anchor_range| { + match_anchor_range != &query_range + }) + } }), ); } @@ -8434,13 +8469,15 @@ impl Editor { return; }; - let Some((_, buffer, _)) = self - .buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((position, _)) = + buffer_snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head()) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else { + return; + }; let extension = buffer .read(cx) @@ -8687,17 +8724,16 @@ impl Editor { } let selection = self.selections.newest_anchor(); - let cursor = selection.head(); let multibuffer = self.buffer.read(cx).snapshot(cx); + let cursor = selection.head(); + let (cursor_text_anchor, _) = multibuffer.anchor_to_buffer_anchor(cursor)?; + let buffer = self.buffer.read(cx).buffer(cursor_text_anchor.buffer_id)?; // Check project-level disable_ai setting for the current buffer - if let Some((buffer, _)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) { - if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) { - return None; - } + if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) { + return None; } let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer)); - let excerpt_id = cursor.excerpt_id; let show_in_menu = self.show_edit_predictions_in_menu(); let completions_menu_has_precedence = !show_in_menu @@ -8728,11 +8764,8 @@ impl Editor { return None; }; - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - self.edit_prediction_settings = - self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + self.edit_prediction_settings_at_position(&buffer, cursor_text_anchor, cx); self.in_leading_whitespace = multibuffer.is_line_whitespace_upto(cursor); @@ -8755,7 +8788,7 @@ impl Editor { } } - let edit_prediction = provider.suggest(&buffer, cursor_buffer_position, cx)?; + let edit_prediction = provider.suggest(&buffer, cursor_text_anchor, cx)?; let (completion_id, edits, predicted_cursor_position, edit_preview) = match edit_prediction { @@ -8789,7 +8822,7 @@ impl Editor { .into_iter() .flat_map(|(range, new_text)| { Some(( - multibuffer.anchor_range_in_excerpt(excerpt_id, range)?, + multibuffer.buffer_anchor_range_to_anchor_range(range)?, new_text, )) }) @@ -8799,7 +8832,7 @@ impl Editor { } let cursor_position = predicted_cursor_position.and_then(|predicted| { - let anchor = multibuffer.anchor_in_excerpt(excerpt_id, predicted.anchor)?; + let anchor = multibuffer.anchor_in_excerpt(predicted.anchor)?; Some((anchor, predicted.offset)) }); @@ -8813,7 +8846,9 @@ impl Editor { let cursor_row = cursor.to_point(&multibuffer).row; - let snapshot = multibuffer.buffer_for_excerpt(excerpt_id).cloned()?; + let snapshot = multibuffer + .buffer_for_id(cursor_text_anchor.buffer_id) + .cloned()?; let mut inlay_ids = Vec::new(); let invalidation_row_range; @@ -8960,20 +8995,14 @@ impl Editor { let snapshot = self.snapshot(window, cx); let multi_buffer_snapshot = snapshot.buffer_snapshot(); - let Some(project) = self.project() else { - return breakpoint_display_points; - }; let range = snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left) ..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); - for (buffer_snapshot, range, excerpt_id) in - multi_buffer_snapshot.range_to_buffer_ranges(range.start..=range.end) + for (buffer_snapshot, range, _) in + multi_buffer_snapshot.range_to_buffer_ranges(range.start..range.end) { - let Some(buffer) = project - .read(cx) - .buffer_for_id(buffer_snapshot.remote_id(), cx) - else { + let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id()) else { continue; }; let breakpoints = breakpoint_store.read(cx).breakpoints( @@ -8982,11 +9011,15 @@ impl Editor { buffer_snapshot.anchor_before(range.start) ..buffer_snapshot.anchor_after(range.end), ), - buffer_snapshot, + &buffer_snapshot, cx, ); for (breakpoint, state) in breakpoints { - let multi_buffer_anchor = Anchor::in_buffer(excerpt_id, breakpoint.position); + let Some(multi_buffer_anchor) = + multi_buffer_snapshot.anchor_in_excerpt(breakpoint.position) + else { + continue; + }; let position = multi_buffer_anchor .to_point(&multi_buffer_snapshot) .to_display_point(&snapshot); @@ -9764,7 +9797,14 @@ impl Editor { } let highlighted_edits = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(snapshot, edits, edit_preview, false, cx) + crate::edit_prediction_edit_text( + snapshot, + edits, + edit_preview, + false, + editor_snapshot.buffer_snapshot(), + cx, + ) } else { // Fallback for providers without edit_preview crate::edit_prediction_fallback_text(edits, cx) @@ -10204,7 +10244,8 @@ impl Editor { .child(div().px_1p5().child(match &prediction.completion { EditPrediction::MoveWithin { target, snapshot } => { use text::ToPoint as _; - if target.text_anchor.to_point(snapshot).row > cursor_point.row + if target.text_anchor_in(&snapshot).to_point(snapshot).row + > cursor_point.row { Icon::new(icons.down) } else { @@ -10418,19 +10459,18 @@ impl Editor { if !supports_jump { return None; } + let (target, _) = self.display_snapshot(cx).anchor_to_buffer_anchor(*target)?; Some( h_flex() .px_2() .gap_2() .flex_1() - .child( - if target.text_anchor.to_point(snapshot).row > cursor_point.row { - Icon::new(icons.down) - } else { - Icon::new(icons.up) - }, - ) + .child(if target.to_point(snapshot).row > cursor_point.row { + Icon::new(icons.down) + } else { + Icon::new(icons.up) + }) .child(Label::new("Jump to Edit")), ) } @@ -10454,12 +10494,24 @@ impl Editor { snapshot, .. } => { - let first_edit_row = edits.first()?.0.start.text_anchor.to_point(snapshot).row; + let first_edit_row = self + .display_snapshot(cx) + .anchor_to_buffer_anchor(edits.first()?.0.start)? + .0 + .to_point(snapshot) + .row; let (highlighted_edits, has_more_lines) = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(snapshot, edits, edit_preview, true, cx) - .first_line_preview() + crate::edit_prediction_edit_text( + snapshot, + edits, + edit_preview, + true, + &self.display_snapshot(cx), + cx, + ) + .first_line_preview() } else { crate::edit_prediction_fallback_text(edits, cx).first_line_preview() }; @@ -10554,21 +10606,15 @@ impl Editor { selection: Range, cx: &mut Context, ) { - let Some((_, buffer, _)) = self - .buffer() - .read(cx) - .excerpt_containing(selection.start, cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((buffer_snapshot, range)) = + buffer_snapshot.anchor_range_to_buffer_anchor_range(selection.clone()) else { return; }; - let Some((_, end_buffer, _)) = self.buffer().read(cx).excerpt_containing(selection.end, cx) - else { + let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else { return; }; - if buffer != end_buffer { - log::error!("expected anchor range to have matching buffer IDs"); - return; - } let id = post_inc(&mut self.next_completion_id); let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -10579,7 +10625,8 @@ impl Editor { id, true, choices, - selection, + selection.start, + range, buffer, old_menu.map(|menu| menu.primary_scroll_handle()), snippet_sort_order, @@ -11697,7 +11744,7 @@ impl Editor { let buffer = self.buffer().read(cx); let ranges = buffer_ids .into_iter() - .flat_map(|buffer_id| buffer.excerpt_ranges_for_buffer(buffer_id, cx)) + .flat_map(|buffer_id| buffer.range_for_buffer(buffer_id, cx)) .collect::>(); self.restore_hunks_in_ranges(ranges, window, cx); @@ -11767,8 +11814,11 @@ impl Editor { let hunks = self.snapshot(window, cx).hunks_for_ranges(ranges); self.transact(window, cx, |editor, window, cx| { editor.restore_diff_hunks(hunks, cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.refresh() + let selections = editor + .selections + .all::(&editor.display_snapshot(cx)); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select(selections); }); }); } @@ -11822,7 +11872,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(working_directory) = self.active_buffer(cx).and_then(|buffer| { let project_path = buffer.read(cx).project_path(cx)?; let project = self.project()?.read(cx); let entry = project.entry_for_path(&project_path, cx)?; @@ -11934,22 +11984,19 @@ impl Editor { snapshot: &EditorSnapshot, cx: &mut Context, ) -> Option<(Anchor, Breakpoint)> { - let buffer = self - .buffer - .read(cx) - .buffer_for_anchor(breakpoint_position, cx)?; + let (breakpoint_position, _) = snapshot + .buffer_snapshot() + .anchor_to_buffer_anchor(breakpoint_position)?; + let buffer = self.buffer.read(cx).buffer(breakpoint_position.buffer_id)?; - let enclosing_excerpt = breakpoint_position.excerpt_id; let buffer_snapshot = buffer.read(cx).snapshot(); let row = buffer_snapshot - .summary_for_anchor::(&breakpoint_position.text_anchor) + .summary_for_anchor::(&breakpoint_position) .row; - let line_len = snapshot.buffer_snapshot().line_len(MultiBufferRow(row)); - let anchor_end = snapshot - .buffer_snapshot() - .anchor_after(Point::new(row, line_len)); + let line_len = buffer_snapshot.line_len(row); + let anchor_end = buffer_snapshot.anchor_after(Point::new(row, line_len)); self.breakpoint_store .as_ref()? @@ -11957,7 +12004,7 @@ impl Editor { breakpoint_store .breakpoints( &buffer, - Some(breakpoint_position.text_anchor..anchor_end.text_anchor), + Some(breakpoint_position..anchor_end), &buffer_snapshot, cx, ) @@ -11970,7 +12017,7 @@ impl Editor { if breakpoint_row == row { snapshot .buffer_snapshot() - .anchor_in_excerpt(enclosing_excerpt, bp.position) + .anchor_in_excerpt(bp.position) .map(|position| (position, bp.bp.clone())) } else { None @@ -12246,20 +12293,20 @@ impl Editor { let Some(breakpoint_store) = &self.breakpoint_store else { return; }; - - let Some(buffer) = self - .buffer - .read(cx) - .buffer_for_anchor(breakpoint_position, cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((position, _)) = buffer_snapshot.anchor_to_buffer_anchor(breakpoint_position) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else { + return; + }; breakpoint_store.update(cx, |breakpoint_store, cx| { breakpoint_store.toggle_breakpoint( buffer, BreakpointWithPosition { - position: breakpoint_position.text_anchor, + position, bp: breakpoint, }, edit_action, @@ -15484,7 +15531,7 @@ impl Editor { } self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![Anchor::min()..Anchor::min()]); + s.select_ranges(vec![Anchor::Min..Anchor::Min]); }); } @@ -15601,7 +15648,7 @@ impl Editor { pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Anchor::min()..Anchor::max()]); + s.select_ranges(vec![Anchor::Min..Anchor::Max]); }); } @@ -17026,10 +17073,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self - .selections - .all::(&self.display_snapshot(cx)) - .into(); + let old_selections = self.selections.all_anchors(&self.display_snapshot(cx)); if old_selections.is_empty() { return; } @@ -17042,21 +17086,25 @@ impl Editor { let new_selections = old_selections .iter() .map(|selection| { - let old_range = selection.start..selection.end; - let old_range = - old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); - let excerpt = buffer.excerpt_containing(old_range.clone()); - - if let Some(mut excerpt) = excerpt - && let Some(node) = excerpt - .buffer() - .syntax_next_sibling(excerpt.map_range_to_buffer(old_range)) + selection.start.to_offset(&buffer)..selection.end.to_offset(&buffer); + if let Some(results) = buffer.map_excerpt_ranges( + old_range, + |buf, _excerpt_range, input_buffer_range| { + let Some(node) = buf.syntax_next_sibling(input_buffer_range) else { + return Vec::new(); + }; + vec![( + BufferOffset(node.byte_range().start) + ..BufferOffset(node.byte_range().end), + (), + )] + }, + ) && let [(new_range, _)] = results.as_slice() { - let new_range = excerpt.map_range_from_buffer( - BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), - ); selected_sibling = true; + let new_range = + buffer.anchor_after(new_range.start)..buffer.anchor_before(new_range.end); Selection { id: selection.id, start: new_range.start, @@ -17088,36 +17136,35 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self - .selections - .all::(&self.display_snapshot(cx)) - .into(); - if old_selections.is_empty() { - return; - } + let old_selections: Arc<[_]> = self.selections.all_anchors(&self.display_snapshot(cx)); self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let buffer = self.buffer.read(cx).snapshot(cx); + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); let mut selected_sibling = false; let new_selections = old_selections .iter() .map(|selection| { - let old_range = selection.start..selection.end; - let old_range = - old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); - let excerpt = buffer.excerpt_containing(old_range.clone()); - - if let Some(mut excerpt) = excerpt - && let Some(node) = excerpt - .buffer() - .syntax_prev_sibling(excerpt.map_range_to_buffer(old_range)) + let old_range = selection.start.to_offset(&multibuffer_snapshot) + ..selection.end.to_offset(&multibuffer_snapshot); + if let Some(results) = multibuffer_snapshot.map_excerpt_ranges( + old_range, + |buf, _excerpt_range, input_buffer_range| { + let Some(node) = buf.syntax_prev_sibling(input_buffer_range) else { + return Vec::new(); + }; + vec![( + BufferOffset(node.byte_range().start) + ..BufferOffset(node.byte_range().end), + (), + )] + }, + ) && let [(new_range, _)] = results.as_slice() { - let new_range = excerpt.map_range_from_buffer( - BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), - ); selected_sibling = true; + let new_range = multibuffer_snapshot.anchor_after(new_range.start) + ..multibuffer_snapshot.anchor_before(new_range.end); Selection { id: selection.id, start: new_range.start, @@ -17474,16 +17521,21 @@ impl Editor { }; let snapshot = self.buffer.read(cx).snapshot(cx); - let excerpt_ids = selections + let excerpt_anchors = selections .iter() - .flat_map(|selection| snapshot.excerpt_ids_for_range(selection.range())) - .unique() - .sorted() + .flat_map(|selection| { + snapshot + .range_to_buffer_ranges(selection.range()) + .into_iter() + .filter_map(|(buffer_snapshot, range, _)| { + snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after(range.start)) + }) + }) .collect::>(); if self.delegate_expand_excerpts { cx.emit(EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, }); @@ -17491,13 +17543,13 @@ impl Editor { } self.buffer.update(cx, |buffer, cx| { - buffer.expand_excerpts(excerpt_ids, lines, direction, cx) + buffer.expand_excerpts(excerpt_anchors, lines, direction, cx) }) } - pub fn expand_excerpt( + pub(crate) fn expand_excerpt( &mut self, - excerpt: ExcerptId, + excerpt_anchor: Anchor, direction: ExpandExcerptDirection, window: &mut Window, cx: &mut Context, @@ -17506,7 +17558,7 @@ impl Editor { if self.delegate_expand_excerpts { cx.emit(EditorEvent::ExpandExcerptsRequested { - excerpt_ids: vec![excerpt], + excerpt_anchors: vec![excerpt_anchor], lines: lines_to_expand, direction, }); @@ -17519,12 +17571,11 @@ impl Editor { if direction == ExpandExcerptDirection::Down { let multi_buffer = self.buffer.read(cx); let snapshot = multi_buffer.snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) - && let Some(buffer) = multi_buffer.buffer(buffer_id) - && let Some(excerpt_range) = snapshot.context_range_for_excerpt(excerpt) + if let Some((buffer_snapshot, excerpt_range)) = + snapshot.excerpt_containing(excerpt_anchor..excerpt_anchor) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_end_row = Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; + let excerpt_end_row = + Point::from_anchor(&excerpt_range.context.end, &buffer_snapshot).row; let last_row = buffer_snapshot.max_point().row; let lines_below = last_row.saturating_sub(excerpt_end_row); if lines_below >= lines_to_expand { @@ -17540,14 +17591,14 @@ impl Editor { .buffer .read(cx) .snapshot(cx) - .excerpt_before(excerpt) + .excerpt_before(excerpt_anchor) .is_none() { scroll = Some(current_scroll_position); } self.buffer.update(cx, |buffer, cx| { - buffer.expand_excerpts([excerpt], lines_to_expand, direction, cx) + buffer.expand_excerpts([excerpt_anchor], lines_to_expand, direction, cx) }); if let Some(new_scroll_position) = scroll { @@ -17571,20 +17622,15 @@ impl Editor { cx: &mut Context, ) { let multibuffer = self.buffer().read(cx); - let Some(buffer) = multibuffer.as_singleton() else { - return; - }; - let Some(start) = multibuffer.buffer_point_to_anchor(&buffer, range.start, cx) else { - return; - }; - let Some(end) = multibuffer.buffer_point_to_anchor(&buffer, range.end, cx) else { + if !multibuffer.is_singleton() { return; }; + let anchor_range = range.to_anchors(&multibuffer.snapshot(cx)); self.change_selections( SelectionEffects::default().nav_history(true), window, cx, - |s| s.select_anchor_ranges([start..end]), + |s| s.select_anchor_ranges([anchor_range]), ); } @@ -17685,9 +17731,10 @@ impl Editor { }; let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); - let Some(buffer_id) = buffer.buffer_id_for_anchor(next_diagnostic_start) else { + let Some((buffer_anchor, _)) = buffer.anchor_to_buffer_anchor(next_diagnostic_start) else { return; }; + let buffer_id = buffer_anchor.buffer_id; let snapshot = self.snapshot(window, cx); if snapshot.intersects_fold(next_diagnostic.range.start) { self.unfold_ranges( @@ -18560,9 +18607,9 @@ impl Editor { let editor_snapshot = self.snapshot(window, cx); // We don't care about multi-buffer symbols - let Some((excerpt_id, _, _)) = editor_snapshot.as_singleton() else { + if !editor_snapshot.is_singleton() { return Task::ready(Ok(())); - }; + } let cursor_offset = self .selections @@ -18582,7 +18629,11 @@ impl Editor { let multi_snapshot = editor_snapshot.buffer(); let buffer_range = |range: &Range<_>| { - Anchor::range_in_buffer(excerpt_id, range.clone()).to_offset(multi_snapshot) + Some( + multi_snapshot + .buffer_anchor_range_to_anchor_range(range.clone())? + .to_offset(multi_snapshot), + ) }; wcx.update_window(wcx.window_handle(), |_, window, acx| { @@ -18591,7 +18642,7 @@ impl Editor { .enumerate() .filter_map(|(idx, item)| { // Find the closest outline item by distance between outline text and cursor location - let source_range = buffer_range(&item.source_range_for_text); + let source_range = buffer_range(&item.source_range_for_text)?; let distance_to_closest_endpoint = cmp::min( (source_range.start.0 as isize - cursor_offset.0 as isize).abs(), (source_range.end.0 as isize - cursor_offset.0 as isize).abs(), @@ -18616,7 +18667,9 @@ impl Editor { return; }; - let range = buffer_range(&outline_items[idx].source_range_for_text); + let Some(range) = buffer_range(&outline_items[idx].source_range_for_text) else { + return; + }; let selection = [range.start..range.start]; let _ = editor @@ -18686,24 +18739,15 @@ impl Editor { let (locations, current_location_index) = multi_buffer.update(cx, |multi_buffer, cx| { + let multi_buffer_snapshot = multi_buffer.snapshot(cx); let mut locations = locations .into_iter() .filter_map(|loc| { - let start = multi_buffer.buffer_anchor_to_anchor( - &loc.buffer, - loc.range.start, - cx, - )?; - let end = multi_buffer.buffer_anchor_to_anchor( - &loc.buffer, - loc.range.end, - cx, - )?; + let start = multi_buffer_snapshot.anchor_in_excerpt(loc.range.start)?; + let end = multi_buffer_snapshot.anchor_in_excerpt(loc.range.end)?; Some(start..end) }) .collect::>(); - - let multi_buffer_snapshot = multi_buffer.snapshot(cx); // There is an O(n) implementation, but given this list will be // small (usually <100 items), the extra O(log(n)) factor isn't // worth the (surprisingly large amount of) extra complexity. @@ -18959,14 +19003,21 @@ impl Editor { for (buffer, mut ranges_for_buffer) in locations { ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); key.push((buffer.read(cx).remote_id(), ranges_for_buffer.clone())); - let (new_ranges, _) = multibuffer.set_excerpts_for_path( + multibuffer.set_excerpts_for_path( PathKey::for_buffer(&buffer, cx), buffer.clone(), - ranges_for_buffer, + ranges_for_buffer.clone(), multibuffer_context_lines(cx), cx, ); - ranges.extend(new_ranges) + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer.read(cx).snapshot(); + ranges.extend(ranges_for_buffer.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + })) } multibuffer.with_title(title) @@ -19074,28 +19125,11 @@ impl Editor { let snapshot = cursor_buffer.read(cx).snapshot(); let cursor_buffer_offset = cursor_buffer_position.to_offset(&snapshot); let cursor_buffer_offset_end = cursor_buffer_position_end.to_offset(&snapshot); - let prepare_rename = provider - .range_for_rename(&cursor_buffer, cursor_buffer_position, cx) - .unwrap_or_else(|| Task::ready(Ok(None))); + let prepare_rename = provider.range_for_rename(&cursor_buffer, cursor_buffer_position, cx); drop(snapshot); Some(cx.spawn_in(window, async move |this, cx| { - let rename_range = if let Some(range) = prepare_rename.await? { - Some(range) - } else { - this.update(cx, |this, cx| { - let buffer = this.buffer.read(cx).snapshot(cx); - let mut buffer_highlights = this - .document_highlights_for_position(selection.head(), &buffer) - .filter(|highlight| { - highlight.start.excerpt_id == selection.head().excerpt_id - && highlight.end.excerpt_id == selection.head().excerpt_id - }); - buffer_highlights - .next() - .map(|highlight| highlight.start.text_anchor..highlight.end.text_anchor) - })? - }; + let rename_range = prepare_rename.await?; if let Some(rename_range) = rename_range { this.update_in(cx, |this, window, cx| { let snapshot = cursor_buffer.read(cx).snapshot(); @@ -19417,12 +19451,12 @@ impl Editor { let mut buffer_id_to_ranges: BTreeMap>> = BTreeMap::new(); for selection_range in selection_ranges { - for (buffer, buffer_range, _) in - snapshot.range_to_buffer_ranges(selection_range.start..=selection_range.end) + for (buffer_snapshot, buffer_range, _) in + snapshot.range_to_buffer_ranges(selection_range.start..selection_range.end) { - let buffer_id = buffer.remote_id(); - let start = buffer.anchor_before(buffer_range.start); - let end = buffer.anchor_after(buffer_range.end); + let buffer_id = buffer_snapshot.remote_id(); + let start = buffer_snapshot.anchor_before(buffer_range.start); + let end = buffer_snapshot.anchor_after(buffer_range.end); buffers.insert(multi_buffer.buffer(buffer_id).unwrap()); buffer_id_to_ranges .entry(buffer_id) @@ -20200,10 +20234,10 @@ impl Editor { .is_some(); has_folds } else { - let buffer_ids = self.buffer.read(cx).excerpt_buffer_ids(); - let has_folds = buffer_ids - .iter() - .any(|buffer_id| self.is_buffer_folded(*buffer_id, cx)); + let snapshot = self.buffer.read(cx).snapshot(cx); + let has_folds = snapshot + .all_buffer_ids() + .any(|buffer_id| self.is_buffer_folded(buffer_id, cx)); has_folds }; @@ -20368,7 +20402,8 @@ impl Editor { self.toggle_fold_multiple_buffers = cx.spawn_in(window, async move |editor, cx| { editor .update_in(cx, |editor, _, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + let snapshot = editor.buffer.read(cx).snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { editor.fold_buffer(buffer_id, cx); } }) @@ -20556,7 +20591,8 @@ impl Editor { self.toggle_fold_multiple_buffers = cx.spawn(async move |editor, cx| { editor .update(cx, |editor, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + let snapshot = editor.buffer.read(cx).snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { editor.unfold_buffer(buffer_id, cx); } }) @@ -20655,25 +20691,19 @@ impl Editor { return; } - let mut all_folded_excerpt_ids = Vec::new(); - for buffer_id in &ids_to_fold { - let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(*buffer_id, cx); - all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _, _)| id)); - } - self.display_map.update(cx, |display_map, cx| { display_map.fold_buffers(ids_to_fold.clone(), cx) }); let snapshot = self.display_snapshot(cx); self.selections.change_with(&snapshot, |selections| { - for buffer_id in ids_to_fold { + for buffer_id in ids_to_fold.iter().copied() { selections.remove_selections_from_buffer(buffer_id); } }); cx.emit(EditorEvent::BufferFoldToggled { - ids: all_folded_excerpt_ids, + ids: ids_to_fold, folded: true, }); cx.notify(); @@ -20683,12 +20713,11 @@ impl Editor { if self.buffer().read(cx).is_singleton() || !self.is_buffer_folded(buffer_id, cx) { return; } - let unfolded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); self.display_map.update(cx, |display_map, cx| { display_map.unfold_buffers([buffer_id], cx); }); cx.emit(EditorEvent::BufferFoldToggled { - ids: unfolded_excerpts.iter().map(|&(id, _, _)| id).collect(), + ids: vec![buffer_id], folded: false, }); cx.notify(); @@ -20741,14 +20770,6 @@ impl Editor { return; } - let mut buffers_affected = HashSet::default(); - let multi_buffer = self.buffer().read(cx); - for range in ranges { - if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { - buffers_affected.insert(buffer.read(cx).remote_id()); - }; - } - self.display_map.update(cx, update); if auto_scroll { @@ -20786,7 +20807,7 @@ impl Editor { cx: &mut Context, ) { self.buffer.update(cx, |buffer, cx| { - buffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + buffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); } @@ -20797,7 +20818,7 @@ impl Editor { cx: &mut Context, ) { self.buffer.update(cx, |buffer, cx| { - buffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + buffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); } @@ -20822,7 +20843,7 @@ impl Editor { buffer: &'a MultiBufferSnapshot, ) -> impl 'a + Iterator { ranges.iter().flat_map(move |range| { - let end_excerpt_id = range.end.excerpt_id; + let end_excerpt = buffer.excerpt_containing(range.end..range.end); let range = range.to_point(buffer); let mut peek_end = range.end; if range.end.row < buffer.max_row().0 { @@ -20830,7 +20851,19 @@ impl Editor { } buffer .diff_hunks_in_range(range.start..peek_end) - .filter(move |hunk| hunk.excerpt_id.cmp(&end_excerpt_id, buffer).is_le()) + .filter(move |hunk| { + if let Some((_, excerpt_range)) = &end_excerpt + && let Some(end_anchor) = + buffer.anchor_in_excerpt(excerpt_range.context.end) + && let Some(hunk_end_anchor) = + buffer.anchor_in_excerpt(hunk.excerpt_range.context.end) + && hunk_end_anchor.cmp(&end_anchor, buffer).is_gt() + { + false + } else { + true + } + }) }) } @@ -21032,7 +21065,7 @@ impl Editor { pub fn clear_expanded_diff_hunks(&mut self, cx: &mut Context) -> bool { self.buffer.update(cx, |buffer, cx| { - let ranges = vec![Anchor::min()..Anchor::max()]; + let ranges = vec![Anchor::Min..Anchor::Max]; if !buffer.all_diff_hunks_expanded() && buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx) { @@ -21048,7 +21081,7 @@ impl Editor { if self.buffer.read(cx).all_diff_hunks_expanded() { return true; } - let ranges = vec![Anchor::min()..Anchor::max()]; + let ranges = vec![Anchor::Min..Anchor::Max]; self.buffer .read(cx) .has_expanded_diff_hunks_in_ranges(&ranges, cx) @@ -22103,11 +22136,11 @@ impl Editor { let end_point = overlay.anchor_range.end.to_point(&snapshot); let start_row = snapshot .point_to_buffer_point(start_point) - .map(|(_, p, _)| p.row) + .map(|(_, p)| p.row) .unwrap_or(start_point.row); let end_row = snapshot .point_to_buffer_point(end_point) - .map(|(_, p, _)| p.row) + .map(|(_, p)| p.row) .unwrap_or(end_point.row); Some((start_row, end_row)) } @@ -22607,9 +22640,9 @@ impl Editor { snapshot.range_to_buffer_ranges(start_point..end_point); let ranges: Vec<(u32, u32)> = buffer_ranges .iter() - .map(|(buffer, range, _)| { - let start = buffer.offset_to_point(range.start.0).row; - let end = buffer.offset_to_point(range.end.0).row; + .map(|(buffer_snapshot, range, _)| { + let start = buffer_snapshot.offset_to_point(range.start.0).row; + let end = buffer_snapshot.offset_to_point(range.end.0).row; (start, end) }) .collect(); @@ -22935,15 +22968,14 @@ impl Editor { } fn target_file<'a>(&self, cx: &'a App) -> Option<&'a dyn language::LocalFile> { - self.active_excerpt(cx)? - .1 + self.active_buffer(cx)? .read(cx) .file() .and_then(|f| f.as_local()) } pub fn target_file_abs_path(&self, cx: &mut Context) -> Option { - self.active_excerpt(cx).and_then(|(_, buffer, _)| { + self.active_buffer(cx).and_then(|buffer| { let buffer = buffer.read(cx); if let Some(project_path) = buffer.project_path(cx) { let project = self.project()?.read(cx); @@ -22992,7 +23024,7 @@ impl Editor { _window: &mut Window, cx: &mut Context, ) { - if let Some(path) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(path) = self.active_buffer(cx).and_then(|buffer| { let project = self.project()?.read(cx); let path = buffer.read(cx).file()?.path(); let path = path.display(project.path_style(cx)); @@ -23050,41 +23082,22 @@ impl Editor { } let position = active_stack_frame.position; - let buffer_id = position.buffer_id?; - let snapshot = self - .project - .as_ref()? - .read(cx) - .buffer_for_id(buffer_id, cx)? - .read(cx) - .snapshot(); - let mut handled = false; - for (id, _, ExcerptRange { context, .. }) in - self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx) - { - if context.start.cmp(&position, &snapshot).is_ge() - || context.end.cmp(&position, &snapshot).is_lt() - { - continue; - } - let snapshot = self.buffer.read(cx).snapshot(cx); - let multibuffer_anchor = snapshot.anchor_in_excerpt(id, position)?; + let snapshot = self.buffer.read(cx).snapshot(cx); + let multibuffer_anchor = snapshot.anchor_in_excerpt(position)?; - handled = true; - self.clear_row_highlights::(); + self.clear_row_highlights::(); - self.go_to_line::( - multibuffer_anchor, - Some(cx.theme().colors().editor_debugger_active_line_background), - window, - cx, - ); + self.go_to_line::( + multibuffer_anchor, + Some(cx.theme().colors().editor_debugger_active_line_background), + window, + cx, + ); - cx.notify(); - } + cx.notify(); - handled.then_some(()) + Some(()) }) .is_some() } @@ -23095,7 +23108,7 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - if let Some(file_stem) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_stem) = self.active_buffer(cx).and_then(|buffer| { let file = buffer.read(cx).file()?; file.path().file_stem() }) { @@ -23104,7 +23117,7 @@ impl Editor { } pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { - if let Some(file_name) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_name) = self.active_buffer(cx).and_then(|buffer| { let file = buffer.read(cx).file()?; Some(file.file_name(cx)) }) { @@ -23157,7 +23170,7 @@ impl Editor { .selections .newest::(&snapshot.display_snapshot) .head(); - let (buffer, point, _) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)?; + let (buffer, point) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)?; let (_, blame_entry) = blame .update(cx, |blame, cx| { blame @@ -23304,33 +23317,28 @@ impl Editor { let multi_buffer = self.buffer().read(cx); let multi_buffer_snapshot = multi_buffer.snapshot(cx); let buffer_ranges = multi_buffer_snapshot - .range_to_buffer_ranges(selection_range.start..=selection_range.end); + .range_to_buffer_ranges(selection_range.start..selection_range.end); - let (buffer, range, _) = if selection.reversed { + let (buffer_snapshot, range, _) = if selection.reversed { buffer_ranges.first() } else { buffer_ranges.last() }?; - let buffer_range = range.to_point(buffer); + let buffer_range = range.to_point(buffer_snapshot); + let buffer = multi_buffer.buffer(buffer_snapshot.remote_id()).unwrap(); - let Some(buffer_diff) = multi_buffer.diff_for(buffer.remote_id()) else { - return Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer_range.start.row..buffer_range.end.row, - )); + let Some(buffer_diff) = multi_buffer.diff_for(buffer_snapshot.remote_id()) else { + return Some((buffer, buffer_range.start.row..buffer_range.end.row)); }; let buffer_diff_snapshot = buffer_diff.read(cx).snapshot(cx); - let start = - buffer_diff_snapshot.buffer_point_to_base_text_point(buffer_range.start, buffer); - let end = - buffer_diff_snapshot.buffer_point_to_base_text_point(buffer_range.end, buffer); + let start = buffer_diff_snapshot + .buffer_point_to_base_text_point(buffer_range.start, &buffer_snapshot); + let end = buffer_diff_snapshot + .buffer_point_to_base_text_point(buffer_range.end, &buffer_snapshot); - Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - start.row..end.row, - )) + Some((buffer, start.row..end.row)) }); let Some((buffer, selection)) = buffer_and_selection else { @@ -23404,7 +23412,7 @@ impl Editor { end_line }; - if let Some(file_location) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_location) = self.active_buffer(cx).and_then(|buffer| { let project = self.project()?.read(cx); let file = buffer.read(cx).file()?; let path = file.path().display(project.path_style(cx)); @@ -23505,6 +23513,7 @@ impl Editor { let Some(buffer) = multibuffer.as_singleton() else { return; }; + let buffer_snapshot = buffer.read(cx).snapshot(); let Some(workspace) = self.workspace() else { return; @@ -23519,7 +23528,8 @@ impl Editor { .map(|selection| { ( buffer.clone(), - (selection.start.text_anchor..selection.end.text_anchor) + (selection.start.text_anchor_in(&buffer_snapshot) + ..selection.end.text_anchor_in(&buffer_snapshot)) .to_point(buffer.read(cx)), ) }) @@ -23688,8 +23698,7 @@ impl Editor { let start = highlight.range.start.to_display_point(&snapshot); let end = highlight.range.end.to_display_point(&snapshot); let start_row = start.row().0; - let end_row = if !highlight.range.end.text_anchor.is_max() && end.column() == 0 - { + let end_row = if !highlight.range.end.is_max() && end.column() == 0 { end.row().0.saturating_sub(1) } else { end.row().0 @@ -23925,42 +23934,6 @@ impl Editor { } } - fn document_highlights_for_position<'a>( - &'a self, - position: Anchor, - buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> { - let read_highlights = self - .background_highlights - .get(&HighlightKey::DocumentHighlightRead) - .map(|h| &h.1); - let write_highlights = self - .background_highlights - .get(&HighlightKey::DocumentHighlightWrite) - .map(|h| &h.1); - let left_position = position.bias_left(buffer); - let right_position = position.bias_right(buffer); - read_highlights - .into_iter() - .chain(write_highlights) - .flat_map(move |ranges| { - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe.end.cmp(&left_position, buffer); - if cmp.is_ge() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - - ranges[start_ix..] - .iter() - .take_while(move |range| range.start.cmp(&right_position, buffer).is_le()) - }) - } - pub fn has_background_highlights(&self, key: HighlightKey) -> bool { self.background_highlights .get(&key) @@ -24182,26 +24155,16 @@ impl Editor { return Some(Task::ready(Ok(Vec::new()))); }; - let buffer = editor.buffer.read_with(cx, |buffer, cx| { - let snapshot = buffer.snapshot(cx); - - let excerpt = snapshot.excerpt_containing( - current_execution_position..current_execution_position, - )?; - - editor.buffer.read(cx).buffer(excerpt.buffer_id()) - })?; - - if current_execution_position - .text_anchor - .buffer_id - .is_some_and(|id| id != buffer.read(cx).remote_id()) - { - return Some(Task::ready(Ok(Vec::new()))); - } + let (buffer, buffer_anchor) = + editor.buffer.read_with(cx, |multibuffer, cx| { + let multibuffer_snapshot = multibuffer.snapshot(cx); + let (buffer_anchor, _) = multibuffer_snapshot + .anchor_to_buffer_anchor(current_execution_position)?; + let buffer = multibuffer.buffer(buffer_anchor.buffer_id)?; + Some((buffer, buffer_anchor)) + })?; - let range = - buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; + let range = buffer.read(cx).anchor_before(0)..buffer_anchor; semantics.inline_values(buffer, range, cx) }) @@ -24215,7 +24178,7 @@ impl Editor { for (buffer_id, inline_value) in inline_values .into_iter() - .filter_map(|hint| Some((hint.position.buffer_id?, hint))) + .map(|hint| (hint.position.buffer_id, hint)) { buffer_inline_values .entry(buffer_id) @@ -24228,22 +24191,20 @@ impl Editor { let snapshot = editor.buffer.read(cx).snapshot(cx); let mut new_inlays = Vec::default(); - for (excerpt_id, buffer_snapshot, _) in snapshot.excerpts() { - let buffer_id = buffer_snapshot.remote_id(); - buffer_inline_values - .get(&buffer_id) - .into_iter() - .flatten() - .for_each(|hint| { - let inlay = Inlay::debugger( - post_inc(&mut editor.next_inlay_id), - Anchor::in_buffer(excerpt_id, hint.position), - hint.text(), - ); - if !inlay.text().chars().contains(&'\n') { - new_inlays.push(inlay); - } - }); + for (_buffer_id, inline_values) in buffer_inline_values { + for hint in inline_values { + let Some(anchor) = snapshot.anchor_in_excerpt(hint.position) else { + continue; + }; + let inlay = Inlay::debugger( + post_inc(&mut editor.next_inlay_id), + anchor, + hint.text(), + ); + if !inlay.text().chars().contains(&'\n') { + new_inlays.push(inlay); + } + } } let mut inlay_ids = new_inlays.iter().map(|inlay| inlay.id).collect(); @@ -24312,11 +24273,12 @@ impl Editor { }; telemetry.log_edit_event("editor", is_via_ssh); } - multi_buffer::Event::ExcerptsAdded { + multi_buffer::Event::BufferRangesUpdated { buffer, - predecessor, - excerpts, + ranges, + path_key, } => { + self.refresh_document_highlights(cx); let buffer_id = buffer.read(cx).remote_id(); if self.buffer.read(cx).diff_for(buffer_id).is_none() && let Some(project) = &self.project @@ -24330,27 +24292,29 @@ impl Editor { ) .detach(); } - self.semantic_token_state - .invalidate_buffer(&buffer.read(cx).remote_id()); + self.register_visible_buffers(cx); self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); self.refresh_runnables(None, window, cx); + self.bracket_fetched_tree_sitter_chunks + .retain(|range, _| range.start.buffer_id != buffer_id); self.colorize_brackets(false, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); - cx.emit(EditorEvent::ExcerptsAdded { + self.semantic_token_state.invalidate_buffer(&buffer_id); + cx.emit(EditorEvent::BufferRangesUpdated { buffer: buffer.clone(), - predecessor: *predecessor, - excerpts: excerpts.clone(), + ranges: ranges.clone(), + path_key: path_key.clone(), }); } - multi_buffer::Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - } => { + multi_buffer::Event::BuffersRemoved { removed_buffer_ids } => { if let Some(inlay_hints) = &mut self.inlay_hints { inlay_hints.remove_inlay_chunk_data(removed_buffer_ids); } - self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); + self.refresh_inlay_hints( + InlayHintRefreshReason::BuffersRemoved(removed_buffer_ids.clone()), + cx, + ); for buffer_id in removed_buffer_ids { self.registered_buffers.remove(buffer_id); self.clear_runnables(Some(*buffer_id)); @@ -24366,38 +24330,18 @@ impl Editor { }); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); - cx.emit(EditorEvent::ExcerptsRemoved { - ids: ids.clone(), + cx.emit(EditorEvent::BuffersRemoved { removed_buffer_ids: removed_buffer_ids.clone(), }); } - multi_buffer::Event::ExcerptsEdited { - excerpt_ids, - buffer_ids, - } => { + multi_buffer::Event::BuffersEdited { buffer_ids } => { self.display_map.update(cx, |map, cx| { map.unfold_buffers(buffer_ids.iter().copied(), cx) }); - cx.emit(EditorEvent::ExcerptsEdited { - ids: excerpt_ids.clone(), + cx.emit(EditorEvent::BuffersEdited { + buffer_ids: buffer_ids.clone(), }); } - multi_buffer::Event::ExcerptsExpanded { ids } => { - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - self.refresh_document_highlights(cx); - let snapshot = multibuffer.read(cx).snapshot(cx); - for id in ids { - self.bracket_fetched_tree_sitter_chunks.remove(id); - if let Some(buffer) = snapshot.buffer_for_excerpt(*id) { - self.semantic_token_state - .invalidate_buffer(&buffer.remote_id()); - } - } - self.colorize_brackets(false, cx); - self.update_lsp_data(None, window, cx); - self.refresh_runnables(None, window, cx); - cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) - } multi_buffer::Event::Reparsed(buffer_id) => { self.refresh_runnables(Some(*buffer_id), window, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); @@ -24700,18 +24644,13 @@ impl Editor { let mut new_selections_by_buffer = HashMap::default(); match &jump_data { Some(JumpData::MultiBufferPoint { - excerpt_id, - position, anchor, + position, line_offset_from_top, }) => { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - if let Some(buffer) = multi_buffer_snapshot - .buffer_id_for_excerpt(*excerpt_id) - .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) - { + if let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) { let buffer_snapshot = buffer.read(cx).snapshot(); - let jump_to_point = if buffer_snapshot.can_resolve(anchor) { + let jump_to_point = if buffer_snapshot.can_resolve(&anchor) { language::ToPoint::to_point(anchor, &buffer_snapshot) } else { buffer_snapshot.clip_point(*position, Bias::Left) @@ -24731,7 +24670,7 @@ impl Editor { line_offset_from_top, }) => { let point = MultiBufferPoint::new(row.0, 0); - if let Some((buffer, buffer_point, _)) = + if let Some((buffer, buffer_point)) = self.buffer.read(cx).point_to_buffer_point(point, cx) { let buffer_offset = buffer.read(cx).point_to_offset(buffer_point); @@ -24747,18 +24686,20 @@ impl Editor { .selections .all::(&self.display_snapshot(cx)); let multi_buffer = self.buffer.read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); for selection in selections { - for (snapshot, range, _, anchor) in multi_buffer - .snapshot(cx) + for (snapshot, range, anchor) in multi_buffer_snapshot .range_to_buffer_ranges_with_deleted_hunks(selection.range()) { - if let Some(anchor) = anchor { - let Some(buffer_handle) = multi_buffer.buffer_for_anchor(anchor, cx) + if let Some((text_anchor, _)) = anchor.and_then(|anchor| { + multi_buffer_snapshot.anchor_to_buffer_anchor(anchor) + }) { + let Some(buffer_handle) = multi_buffer.buffer(text_anchor.buffer_id) else { continue; }; let offset = text::ToOffset::to_offset( - &anchor.text_anchor, + &text_anchor, &buffer_handle.read(cx).snapshot(), ); let range = BufferOffset(offset)..BufferOffset(offset); @@ -24907,9 +24848,7 @@ impl Editor { }; let nav_history = editor.nav_history.take(); let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let Some((excerpt_id, _, buffer_snapshot)) = - multibuffer_snapshot.as_singleton() - else { + let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else { return; }; editor.change_selections( @@ -24921,7 +24860,7 @@ impl Editor { let range = buffer_snapshot.anchor_before(range.start) ..buffer_snapshot.anchor_after(range.end); multibuffer_snapshot - .anchor_range_in_excerpt(excerpt_id, range) + .buffer_anchor_range_to_anchor_range(range) .unwrap() })); }, @@ -25415,8 +25354,11 @@ impl Editor { } } }); - self.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.refresh() + let selections = self + .selections + .all::(&self.display_snapshot(cx)); + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select(selections); }); } @@ -25798,7 +25740,12 @@ impl Editor { if !self.lsp_data_enabled() { return; } - for (_, (visible_buffer, _, _)) in self.visible_excerpts(true, cx) { + let visible_buffers: Vec<_> = self + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) + .collect(); + for visible_buffer in visible_buffers { self.register_buffer(visible_buffer.read(cx).remote_id(), cx); } } @@ -26090,17 +26037,16 @@ fn process_completion_for_edit( range_to_replace.end = *cursor_position; } - let replace_range = range_to_replace.to_offset(buffer); CompletionEdit { new_text, - replace_range: BufferOffset(replace_range.start)..BufferOffset(replace_range.end), + replace_range: range_to_replace, snippet, } } struct CompletionEdit { new_text: String, - replace_range: Range, + replace_range: Range, snippet: Option, } @@ -26542,10 +26488,10 @@ impl NewlineConfig { range: Range, ) -> bool { let (buffer, range) = match buffer - .range_to_buffer_ranges(range.start..=range.end) + .range_to_buffer_ranges(range.start..range.end) .as_slice() { - [(buffer, range, _)] => (*buffer, range.clone()), + [(buffer_snapshot, range, _)] => (buffer_snapshot.clone(), range.clone()), _ => return false, }; let pair = { @@ -27084,7 +27030,7 @@ pub trait SemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>>>; + ) -> Task>>>; fn perform_rename( &self, @@ -27098,7 +27044,6 @@ pub trait SemanticsProvider { pub trait CompletionProvider { fn completions( &self, - excerpt_id: ExcerptId, buffer: &Entity, buffer_position: text::Anchor, trigger: CompletionContext, @@ -27167,7 +27112,6 @@ pub trait CodeActionProvider { &self, buffer_handle: Entity, action: CodeAction, - excerpt_id: ExcerptId, push_to_history: bool, window: &mut Window, cx: &mut App, @@ -27210,7 +27154,6 @@ impl CodeActionProvider for Entity { &self, buffer_handle: Entity, action: CodeAction, - _excerpt_id: ExcerptId, push_to_history: bool, _window: &mut Window, cx: &mut App, @@ -27458,7 +27401,6 @@ fn snippet_completions( impl CompletionProvider for Entity { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: text::Anchor, options: CompletionContext, @@ -27680,8 +27622,12 @@ impl SemanticsProvider for WeakEntity { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>>> { - self.update(cx, |project, cx| { + ) -> Task>>> { + let Some(this) = self.upgrade() else { + return Task::ready(Ok(None)); + }; + + this.update(cx, |project, cx| { let buffer = buffer.clone(); let task = project.prepare_rename(buffer.clone(), position, cx); cx.spawn(async move |_, cx| { @@ -27705,7 +27651,6 @@ impl SemanticsProvider for WeakEntity { }) }) }) - .ok() } fn perform_rename( @@ -27882,6 +27827,7 @@ impl EditorSnapshot { end_row.0 += 1; } let is_created_file = hunk.is_created_file(); + let multi_buffer_range = hunk.multi_buffer_range.clone(); DisplayDiffHunk::Unfolded { status: hunk.status(), @@ -27889,10 +27835,7 @@ impl EditorSnapshot { ..hunk.diff_base_byte_range.end.0, word_diffs: hunk.word_diffs, display_row_range: hunk_display_start.row()..end_row, - multi_buffer_range: Anchor::range_in_buffer( - hunk.excerpt_id, - hunk.buffer_range, - ), + multi_buffer_range, is_created_file, } }; @@ -28213,27 +28156,23 @@ pub enum EditorEvent { utf16_range_to_replace: Option>, text: Arc, }, - ExcerptsAdded { + BufferRangesUpdated { buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, + path_key: PathKey, + ranges: Vec>, }, - ExcerptsRemoved { - ids: Vec, + BuffersRemoved { removed_buffer_ids: Vec, }, + BuffersEdited { + buffer_ids: Vec, + }, BufferFoldToggled { - ids: Vec, + ids: Vec, folded: bool, }, - ExcerptsEdited { - ids: Vec, - }, - ExcerptsExpanded { - ids: Vec, - }, ExpandExcerptsRequested { - excerpt_ids: Vec, + excerpt_anchors: Vec, lines: u32, direction: ExpandExcerptDirection, }, @@ -28834,11 +28773,19 @@ fn edit_prediction_edit_text( edits: &[(Range, impl AsRef)], edit_preview: &EditPreview, include_deletions: bool, + multibuffer_snapshot: &MultiBufferSnapshot, cx: &App, ) -> HighlightedText { let edits = edits .iter() - .map(|(anchor, text)| (anchor.start.text_anchor..anchor.end.text_anchor, text)) + .filter_map(|(anchor, text)| { + Some(( + multibuffer_snapshot + .anchor_range_to_buffer_anchor_range(anchor.clone())? + .1, + text, + )) + }) .collect::>(); edit_preview.highlight_edits(current_snapshot, &edits, include_deletions, cx) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 65a872e6035565bb01fdd78e00d6cf0f35d35ef8..2afd724f5e4a7332b713e14f1e4da5ad32517f13 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -59,7 +59,6 @@ use std::{ sync::atomic::{self, AtomicUsize}, }; use test::build_editor_with_project; -use text::ToPoint as _; use unindent::Unindent; use util::{ assert_set_eq, path, @@ -1030,12 +1029,13 @@ async fn test_navigation_history(cx: &mut TestAppContext) { original_scroll_position ); + let other_buffer = + cx.new(|cx| MultiBuffer::singleton(cx.new(|cx| Buffer::local("test", cx)), cx)); + // Ensure we don't panic when navigation data contains invalid anchors *and* points. - let mut invalid_anchor = editor - .scroll_manager - .native_anchor(&editor.display_snapshot(cx), cx) - .anchor; - invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok(); + let invalid_anchor = other_buffer.update(cx, |buffer, cx| { + buffer.snapshot(cx).anchor_after(MultiBufferOffset(3)) + }); let invalid_point = Point::new(9999, 0); editor.navigate( Arc::new(NavigationData { @@ -13836,7 +13836,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multi_buffer.excerpt_ids().len(), 9); + assert_eq!(multi_buffer.read(cx).excerpts().count(), 9); multi_buffer }); let multi_buffer_editor = cx.new_window_entity(|window, cx| { @@ -18946,157 +18946,6 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { }); } -#[gpui::test] -fn test_refresh_selections(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [ - Point::new(0, 0)..Point::new(1, 4), - Point::new(3, 0)..Point::new(4, 4), - ], - 0, - cx, - ); - multibuffer - }); - - let editor = cx.add_window(|window, cx| { - let mut editor = build_editor(multibuffer.clone(), window, cx); - let snapshot = editor.snapshot(window, cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Point::new(1, 3)..Point::new(1, 3)]) - }); - editor.begin_selection( - Point::new(2, 1).to_display_point(&snapshot), - true, - 1, - window, - cx, - ); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - editor - }); - - // Refreshing selections is a no-op when excerpts haven't changed. - _ = editor.update(cx, |editor, window, cx| { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [Point::new(3, 0)..Point::new(4, 4)], - 0, - cx, - ); - }); - _ = editor.update(cx, |editor, window, cx| { - // Removing an excerpt causes the first selection to become degenerate. - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(0, 0)..Point::new(0, 0), - Point::new(0, 1)..Point::new(0, 1) - ] - ); - - // Refreshing selections will relocate the first selection to the original buffer - // location. - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(0, 0)..Point::new(0, 0), - Point::new(0, 1)..Point::new(0, 1), - ] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); -} - -#[gpui::test] -fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [ - Point::new(0, 0)..Point::new(1, 4), - Point::new(3, 0)..Point::new(4, 4), - ], - 0, - cx, - ); - assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\ndddd\neeee"); - multibuffer - }); - - let editor = cx.add_window(|window, cx| { - let mut editor = build_editor(multibuffer.clone(), window, cx); - let snapshot = editor.snapshot(window, cx); - editor.begin_selection( - Point::new(1, 3).to_display_point(&snapshot), - false, - 1, - window, - cx, - ); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(1, 3)..Point::new(1, 3)] - ); - editor - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [Point::new(3, 0)..Point::new(4, 4)], - 0, - cx, - ); - }); - _ = editor.update(cx, |editor, window, cx| { - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(0, 0)..Point::new(0, 0)] - ); - - // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(0, 0)..Point::new(0, 0)] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); -} - #[gpui::test] async fn test_extra_newline_insertion(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -19738,8 +19587,8 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { let (buffer_1, buffer_2) = project.update(cx, |project, cx| { ( - project.create_local_buffer("abc\ndef\nghi\njkl\n", None, false, cx), - project.create_local_buffer("mno\npqr\nstu\nvwx\n", None, false, cx), + project.create_local_buffer("abc\ndef\nghi\njkl\nmno\npqr\nstu\nvwx\nyza\nbcd\nefg\nhij\nklm\nnop\nqrs\ntuv\nwxy\nzab\ncde\nfgh\n", None, false, cx), + project.create_local_buffer("aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\nkkk\nlll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\n", None, false, cx), ) }); @@ -19814,7 +19663,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { // Remove some excerpts. leader.update(cx, |leader, cx| { leader.buffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path( + multibuffer.remove_excerpts( PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()), cx, ); @@ -23318,7 +23167,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multibuffer.excerpt_ids().len(), 9); + assert_eq!(multibuffer.read(cx).excerpts().count(), 9); multibuffer }); @@ -23422,7 +23271,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multibuffer.excerpt_ids().len(), 3); + assert_eq!(multibuffer.read(cx).excerpts().count(), 3); multibuffer }); @@ -24191,9 +24040,13 @@ async fn setup_indent_guides_editor( let buffer_id = cx.update_editor(|editor, window, cx| { editor.set_text(text, window, cx); - let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids(); - - buffer_ids[0] + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .remote_id() }); (buffer_id, cx) @@ -24902,7 +24755,7 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut TestAppContext) { editor .snapshot(window, cx) .buffer_snapshot() - .indent_guides_in_range(Anchor::min()..Anchor::max(), false, cx) + .indent_guides_in_range(Anchor::Min..Anchor::Max, false, cx) .map(|guide| (guide.start_row..=guide.end_row, guide.depth)) .collect::>() }); @@ -24957,12 +24810,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -25047,12 +24907,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = snapshot.buffer_snapshot(); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -25112,12 +24979,19 @@ async fn test_toggle_deletion_hunk_at_start_of_file( let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 1); @@ -25217,12 +25091,17 @@ async fn test_expand_first_line_diff_hunk_keeps_deleted_lines_visible( // Expanding a diff hunk at the first line inserts deleted lines above the first buffer line. cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); assert_eq!(hunks.len(), 1); - let hunk_range = Anchor::range_in_buffer(excerpt_id, hunks[0].buffer_range.clone()); + let hunk_range = multibuffer_snapshot + .anchor_in_excerpt(hunks[0].buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunks[0].buffer_range.end) + .unwrap(); editor.toggle_single_diff_hunk(hunk_range, cx) }); executor.run_until_parked(); @@ -25279,7 +25158,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) { multibuffer.set_excerpts_for_path( PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()), buffer.clone(), - vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)], + vec![Point::zero()..snapshot.max_point()], 2, cx, ); @@ -25365,7 +25244,7 @@ async fn test_partially_staged_hunk(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); assert_eq!(hunks.len(), 1); assert_eq!( @@ -26450,7 +26329,7 @@ async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext // `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be // picked up by the editor and update the display map accordingly. multi_buffer.update(cx, |multi_buffer, cx| { - multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx) + multi_buffer.remove_excerpts(PathKey::sorted(0), cx) }); assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); } @@ -26702,7 +26581,12 @@ async fn test_multi_buffer_navigation_with_folded_buffers(cx: &mut TestAppContex ); let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); // fold all but the second buffer, so that we test navigating between two // adjacent folded buffers, as well as folded buffers at the start and // end the multibuffer @@ -27038,7 +26922,12 @@ async fn assert_highlighted_edits( let text_anchor_edits = edits .clone() .into_iter() - .map(|(range, edit)| (range.start.text_anchor..range.end.text_anchor, edit.into())) + .map(|(range, edit)| { + ( + range.start.expect_text_anchor()..range.end.expect_text_anchor(), + edit.into(), + ) + }) .collect::>(); let edit_preview = window @@ -27055,10 +26944,11 @@ async fn assert_highlighted_edits( cx.update(|_window, cx| { let highlighted_edits = edit_prediction_edit_text( - snapshot.as_singleton().unwrap().2, + snapshot.as_singleton().unwrap(), &edits, &edit_preview, include_deletions, + &snapshot, cx, ); assertion_fn(highlighted_edits, cx) @@ -31479,12 +31369,8 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult Point::new(1, 21)..Point::new(1, 25), ]) }); - let first_buffer_id = multi_buffer - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .next() - .unwrap(); + let snapshot = multi_buffer.read(cx).snapshot(cx); + let first_buffer_id = snapshot.all_buffer_ids().next().unwrap(); let first_buffer = multi_buffer.read(cx).buffer(first_buffer_id).unwrap(); first_buffer.update(cx, |buffer, cx| { buffer.set_language(Some(markdown_language.clone()), cx); @@ -32309,6 +32195,91 @@ async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) { assert_eq!(selections, vec![empty_range(4, 5)]); } +#[gpui::test] +async fn test_clicking_sticky_header_sets_character_select_mode(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.sticky_scroll = Some(settings::StickyScrollContent { + enabled: Some(true), + }) + }); + }); + }); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.update_editor(|editor, window, cx| { + editor + .style(cx) + .text + .line_height_in_pixels(window.rem_size()) + }); + + let buffer = indoc! {" + fn foo() { + let abc = 123; + } + ˇstruct Bar; + "}; + cx.set_state(&buffer); + + cx.update_editor(|editor, _, cx| { + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_language(Some(rust_lang()), cx); + }) + }); + + let text_origin_x = cx.update_editor(|editor, _, _| { + editor + .last_position_map + .as_ref() + .unwrap() + .text_hitbox + .bounds + .origin + .x + }); + + cx.update_editor(|editor, window, cx| { + // Double click on `struct` to select it + editor.begin_selection(DisplayPoint::new(DisplayRow(3), 1), false, 2, window, cx); + editor.end_selection(window, cx); + + // Scroll down one row to make `fn foo() {` a sticky header + editor.scroll(gpui::Point { x: 0., y: 1. }, None, window, cx); + }); + cx.run_until_parked(); + + // Click at the start of the `fn foo() {` sticky header + cx.simulate_click( + gpui::Point { + x: text_origin_x, + y: 0.5 * line_height, + }, + Modifiers::none(), + ); + cx.run_until_parked(); + + // Shift-click at the end of `fn foo() {` to select the whole row + cx.update_editor(|editor, window, cx| { + editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx); + editor.end_selection(window, cx); + }); + cx.run_until_parked(); + + let selections = cx.update_editor(|editor, _, cx| display_ranges(editor, cx)); + assert_eq!( + selections, + vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 10)] + ); +} + #[gpui::test] async fn test_next_prev_reference(cx: &mut TestAppContext) { const CYCLE_POSITIONS: &[&'static str] = &[ @@ -32445,7 +32416,12 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { }); let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await; - let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids()); + let buffer_ids = cx.multibuffer(|mb, cx| { + mb.snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>() + }); cx.assert_excerpts_with_selections(indoc! {" [EXCERPT] @@ -33685,7 +33661,7 @@ async fn test_diff_review_button_shown_when_ai_enabled(cx: &mut TestAppContext) } /// Helper function to create a DiffHunkKey for testing. -/// Uses Anchor::min() as a placeholder anchor since these tests don't need +/// Uses Anchor::Min as a placeholder anchor since these tests don't need /// real buffer positioning. fn test_hunk_key(file_path: &str) -> DiffHunkKey { DiffHunkKey { @@ -33694,7 +33670,7 @@ fn test_hunk_key(file_path: &str) -> DiffHunkKey { } else { Arc::from(util::rel_path::RelPath::unix(file_path).unwrap()) }, - hunk_start_anchor: Anchor::min(), + hunk_start_anchor: Anchor::Min, } } @@ -33717,7 +33693,7 @@ fn add_test_comment( comment: &str, cx: &mut Context, ) -> usize { - editor.add_review_comment(key, comment.to_string(), Anchor::min()..Anchor::max(), cx) + editor.add_review_comment(key, comment.to_string(), Anchor::Min..Anchor::Max, cx) } #[gpui::test] diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9ce080c87bf82ec1098e2a4b1db6bc6a65d22828..7a532dc7a75ea3583456be6611ef072cd7692bc7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -54,7 +54,7 @@ use itertools::Itertools; use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting}; use markdown::Markdown; use multi_buffer::{ - Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint, + Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint, MultiBufferRow, RowInfo, }; @@ -1289,7 +1289,9 @@ impl EditorElement { cx.notify(); } - if let Some((bounds, buffer_id, blame_entry)) = &position_map.inline_blame_bounds { + if text_hovered + && let Some((bounds, buffer_id, blame_entry)) = &position_map.inline_blame_bounds + { let mouse_over_inline_blame = bounds.contains(&event.position); let mouse_over_popover = editor .inline_blame_popover @@ -1388,13 +1390,13 @@ impl EditorElement { .snapshot .display_point_to_anchor(valid_point, Bias::Left); - if let Some((buffer_snapshot, file)) = position_map + if let Some((buffer_anchor, buffer_snapshot)) = position_map .snapshot .buffer_snapshot() - .buffer_for_excerpt(buffer_anchor.excerpt_id) - .and_then(|buffer| buffer.file().map(|file| (buffer, file))) + .anchor_to_buffer_anchor(buffer_anchor) + && let Some(file) = buffer_snapshot.file() { - let as_point = text::ToPoint::to_point(&buffer_anchor.text_anchor, buffer_snapshot); + let as_point = text::ToPoint::to_point(&buffer_anchor, buffer_snapshot); let is_visible = editor .gutter_breakpoint_indicator @@ -1750,7 +1752,7 @@ impl EditorElement { // Remote cursors if let Some(collaboration_hub) = &editor.collaboration_hub { for remote_selection in snapshot.remote_selections_in_range( - &(Anchor::min()..Anchor::max()), + &(Anchor::Min..Anchor::Max), collaboration_hub.deref(), cx, ) { @@ -2587,12 +2589,6 @@ impl EditorElement { const INLINE_SLOT_CHAR_LIMIT: u32 = 4; const MAX_ALTERNATE_DISTANCE: u32 = 8; - let excerpt_id = snapshot - .display_snapshot - .buffer_snapshot() - .excerpt_containing(buffer_point..buffer_point) - .map(|excerpt| excerpt.id()); - let is_valid_row = |row_candidate: u32| -> bool { // move to other row if folded row if snapshot.is_line_folded(MultiBufferRow(row_candidate)) { @@ -2608,13 +2604,18 @@ impl EditorElement { row: row_candidate, column: 0, }; - let candidate_excerpt_id = snapshot + // move to other row if different excerpt + let range = if candidate_point < buffer_point { + candidate_point..buffer_point + } else { + buffer_point..candidate_point + }; + if snapshot .display_snapshot .buffer_snapshot() - .excerpt_containing(candidate_point..candidate_point) - .map(|excerpt| excerpt.id()); - // move to other row if different excerpt - if excerpt_id != candidate_excerpt_id { + .excerpt_containing(range) + .is_none() + { return false; } } @@ -2794,7 +2795,7 @@ impl EditorElement { .newest::(&editor_snapshot.display_snapshot) .head(); - let Some((buffer, buffer_point, _)) = editor_snapshot + let Some((buffer, buffer_point)) = editor_snapshot .buffer_snapshot() .point_to_buffer_point(cursor_point) else { @@ -3387,8 +3388,8 @@ impl EditorElement { .enumerate() .map(|(ix, row_info)| { let ExpandInfo { - excerpt_id, direction, + start_anchor, } = row_info.expand_info?; let icon_name = match direction { @@ -3417,7 +3418,7 @@ impl EditorElement { .width(width) .on_click(move |_, window, cx| { editor.update(cx, |editor, cx| { - editor.expand_excerpt(excerpt_id, direction, window, cx); + editor.expand_excerpt(start_anchor, direction, window, cx); }); }) .tooltip(Tooltip::for_action_title( @@ -3884,7 +3885,7 @@ impl EditorElement { selected_buffer_ids: &Vec, latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, - sticky_header_excerpt_id: Option, + sticky_header_excerpt_id: Option, indent_guides: &Option>, block_resize_offset: &mut i32, window: &mut Window, @@ -3972,7 +3973,7 @@ impl EditorElement { let mut result = v_flex().id(block_id).w_full().pr(editor_margins.right); if self.should_show_buffer_headers() { - let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id()); let jump_data = header_jump_data( snapshot, block_row_start, @@ -4027,8 +4028,8 @@ impl EditorElement { latest_selection_anchors, ); - if sticky_header_excerpt_id != Some(excerpt.id) { - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + if sticky_header_excerpt_id != Some(excerpt.buffer_id()) { + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); result = result.child(div().pr(editor_margins.right).child( self.render_buffer_header( @@ -4188,7 +4189,7 @@ impl EditorElement { fn render_buffer_header( &self, - for_excerpt: &ExcerptInfo, + for_excerpt: &ExcerptBoundaryInfo, is_folded: bool, is_selected: bool, is_sticky: bool, @@ -4225,7 +4226,7 @@ impl EditorElement { selected_buffer_ids: &Vec, latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, - sticky_header_excerpt_id: Option, + sticky_header_excerpt_id: Option, indent_guides: &Option>, window: &mut Window, cx: &mut App, @@ -4518,7 +4519,7 @@ impl EditorElement { let editor_bg_color = cx.theme().colors().editor_background; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let available_width = hitbox.bounds.size.width - right_margin; @@ -6732,7 +6733,13 @@ impl EditorElement { SelectionEffects::scroll(Autoscroll::top_relative(line_index)), window, cx, - |selections| selections.select_ranges([anchor..anchor]), + |selections| { + selections.clear_disjoint(); + selections.set_pending_anchor_range( + anchor..anchor, + crate::SelectMode::Character, + ); + }, ); cx.stop_propagation(); }); @@ -7886,23 +7893,26 @@ impl EditorElement { return; } let buffer_snapshot = &display_snapshot.buffer_snapshot(); - for (buffer, buffer_range, excerpt_id) in - buffer_snapshot.range_to_buffer_ranges(anchor_range.start..=anchor_range.end) + for (excerpt_buffer_snapshot, buffer_range, _) in + buffer_snapshot.range_to_buffer_ranges(anchor_range.start..anchor_range.end) { - let buffer_range = - buffer.anchor_after(buffer_range.start)..buffer.anchor_before(buffer_range.end); + let buffer_range = excerpt_buffer_snapshot.anchor_after(buffer_range.start) + ..excerpt_buffer_snapshot.anchor_before(buffer_range.end); selections.extend(debug_ranges.ranges.iter().flat_map(|debug_range| { - let player_color = theme - .players() - .color_for_participant(debug_range.occurrence_index as u32 + 1); - debug_range.ranges.iter().filter_map(move |range| { - if range.start.buffer_id != Some(buffer.remote_id()) { + debug_range.ranges.iter().filter_map(|range| { + let player_color = theme + .players() + .color_for_participant(debug_range.occurrence_index as u32 + 1); + if range.start.buffer_id != excerpt_buffer_snapshot.remote_id() { return None; } - let clipped_start = range.start.max(&buffer_range.start, buffer); - let clipped_end = range.end.min(&buffer_range.end, buffer); + let clipped_start = range + .start + .max(&buffer_range.start, &excerpt_buffer_snapshot); + let clipped_end = + range.end.min(&buffer_range.end, &excerpt_buffer_snapshot); let range = buffer_snapshot - .anchor_range_in_excerpt(excerpt_id, *clipped_start..*clipped_end)?; + .buffer_anchor_range_to_anchor_range(*clipped_start..*clipped_end)?; let start = range.start.to_display_point(display_snapshot); let end = range.end.to_display_point(display_snapshot); let selection_layout = SelectionLayout { @@ -8142,49 +8152,23 @@ pub(crate) fn header_jump_data( editor_snapshot: &EditorSnapshot, block_row_start: DisplayRow, height: u32, - first_excerpt: &ExcerptInfo, + first_excerpt: &ExcerptBoundaryInfo, latest_selection_anchors: &HashMap, ) -> JumpData { - let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id) - && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id) - && let Some(buffer) = editor_snapshot - .buffer_snapshot() - .buffer_for_excerpt(anchor.excerpt_id) + let multibuffer_snapshot = editor_snapshot.buffer_snapshot(); + let buffer = first_excerpt.buffer(multibuffer_snapshot); + let (jump_anchor, jump_buffer) = if let Some(anchor) = + latest_selection_anchors.get(&first_excerpt.buffer_id()) + && let Some((jump_anchor, selection_buffer)) = + multibuffer_snapshot.anchor_to_buffer_anchor(*anchor) { - JumpTargetInExcerptInput { - id: anchor.excerpt_id, - buffer, - excerpt_start_anchor: range.start, - jump_anchor: anchor.text_anchor, - } + (jump_anchor, selection_buffer) } else { - JumpTargetInExcerptInput { - id: first_excerpt.id, - buffer: &first_excerpt.buffer, - excerpt_start_anchor: first_excerpt.range.context.start, - jump_anchor: first_excerpt.range.primary.start, - } + (first_excerpt.range.primary.start, buffer) }; - header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target) -} - -struct JumpTargetInExcerptInput<'a> { - id: ExcerptId, - buffer: &'a language::BufferSnapshot, - excerpt_start_anchor: text::Anchor, - jump_anchor: text::Anchor, -} - -fn header_jump_data_inner( - snapshot: &EditorSnapshot, - block_row_start: DisplayRow, - height: u32, - for_excerpt: &JumpTargetInExcerptInput, -) -> JumpData { - let buffer = &for_excerpt.buffer; - let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer); - let excerpt_start = for_excerpt.excerpt_start_anchor; - let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start { + let excerpt_start = first_excerpt.range.context.start; + let jump_position = language::ToPoint::to_point(&jump_anchor, jump_buffer); + let rows_from_excerpt_start = if jump_anchor == excerpt_start { 0 } else { let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer); @@ -8193,15 +8177,14 @@ fn header_jump_data_inner( let line_offset_from_top = (block_row_start.0 + height + rows_from_excerpt_start) .saturating_sub( - snapshot + editor_snapshot .scroll_anchor - .scroll_position(&snapshot.display_snapshot) + .scroll_position(&editor_snapshot.display_snapshot) .y as u32, ); JumpData::MultiBufferPoint { - excerpt_id: for_excerpt.id, - anchor: for_excerpt.jump_anchor, + anchor: jump_anchor, position: jump_position, line_offset_from_top, } @@ -8209,7 +8192,7 @@ fn header_jump_data_inner( pub(crate) fn render_buffer_header( editor: &Entity, - for_excerpt: &ExcerptInfo, + for_excerpt: &ExcerptBoundaryInfo, is_folded: bool, is_selected: bool, is_sticky: bool, @@ -8221,6 +8204,8 @@ pub(crate) fn render_buffer_header( let multi_buffer = editor_read.buffer.read(cx); let is_read_only = editor_read.read_only(cx); let editor_handle: &dyn ItemHandle = editor; + let multibuffer_snapshot = multi_buffer.snapshot(cx); + let buffer = for_excerpt.buffer(&multibuffer_snapshot); let breadcrumbs = if is_selected { editor_read.breadcrumbs_inner(cx) @@ -8228,31 +8213,30 @@ pub(crate) fn render_buffer_header( None }; + let buffer_id = for_excerpt.buffer_id(); let file_status = multi_buffer .all_diff_hunks_expanded() - .then(|| editor_read.status_for_buffer_id(for_excerpt.buffer_id, cx)) + .then(|| editor_read.status_for_buffer_id(buffer_id, cx)) .flatten(); - let indicator = multi_buffer - .buffer(for_excerpt.buffer_id) - .and_then(|buffer| { - let buffer = buffer.read(cx); - let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) { - (true, _) => Some(Color::Warning), - (_, true) => Some(Color::Accent), - (false, false) => None, - }; - indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color)) - }); + let indicator = multi_buffer.buffer(buffer_id).and_then(|buffer| { + let buffer = buffer.read(cx); + let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) { + (true, _) => Some(Color::Warning), + (_, true) => Some(Color::Accent), + (false, false) => None, + }; + indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color)) + }); let include_root = editor_read .project .as_ref() .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) .unwrap_or_default(); - let file = for_excerpt.buffer.file(); + let file = buffer.file(); let can_open_excerpts = file.is_none_or(|file| file.can_open()); let path_style = file.map(|file| file.path_style(cx)); - let relative_path = for_excerpt.buffer.resolve_file_path(include_root, cx); + let relative_path = buffer.resolve_file_path(include_root, cx); let (parent_path, filename) = if let Some(path) = &relative_path { if let Some(path_style) = path_style { let (dir, file_name) = path_style.split(path); @@ -8267,7 +8251,7 @@ pub(crate) fn render_buffer_header( let colors = cx.theme().colors(); let header = div() - .id(("buffer-header", for_excerpt.buffer_id.to_proto())) + .id(("buffer-header", buffer_id.to_proto())) .p(BUFFER_HEADER_PADDING) .w_full() .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) @@ -8295,7 +8279,7 @@ pub(crate) fn render_buffer_header( .hover(|style| style.bg(colors.element_hover)) .map(|header| { let editor = editor.clone(); - let buffer_id = for_excerpt.buffer_id; + let buffer_id = for_excerpt.buffer_id(); let toggle_chevron_icon = FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); let button_size = rems_from_px(28.); @@ -8359,7 +8343,7 @@ pub(crate) fn render_buffer_header( .addons .values() .filter_map(|addon| { - addon.render_buffer_header_controls(for_excerpt, window, cx) + addon.render_buffer_header_controls(for_excerpt, buffer, window, cx) }) .take(1), ) @@ -8452,7 +8436,7 @@ pub(crate) fn render_buffer_header( ), ) }) - .when(!for_excerpt.buffer.capability.editable(), |el| { + .when(!buffer.capability.editable(), |el| { el.child(Icon::new(IconName::FileLock).color(Color::Muted)) }) .when_some(breadcrumbs, |then, breadcrumbs| { @@ -8503,7 +8487,7 @@ pub(crate) fn render_buffer_header( }) .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) .on_click(window.listener_for(editor, { - let buffer_id = for_excerpt.buffer_id; + let buffer_id = for_excerpt.buffer_id(); move |editor, e: &ClickEvent, window, cx| { if e.modifiers().alt { editor.open_excerpts_common( @@ -8525,7 +8509,7 @@ pub(crate) fn render_buffer_header( ), ); - let file = for_excerpt.buffer.file().cloned(); + let file = buffer.file().cloned(); let editor = editor.clone(); right_click_menu("buffer-header-context-menu") @@ -9847,14 +9831,14 @@ impl Element for EditorElement { }; let start_anchor = if start_row == Default::default() { - Anchor::min() + Anchor::Min } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left), ) }; let end_anchor = if end_row > max_row { - Anchor::max() + Anchor::Max } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right), @@ -9880,7 +9864,7 @@ impl Element for EditorElement { editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); let start_anchor = if start_row == Default::default() { - Anchor::min() + Anchor::Min } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(start_row, 0) @@ -9888,7 +9872,7 @@ impl Element for EditorElement { ) }; let end_anchor = if end_row > max_row { - Anchor::max() + Anchor::Max } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(end_row, 0) @@ -10044,9 +10028,11 @@ impl Element for EditorElement { HashMap::default(); for selection in all_anchor_selections.iter() { let head = selection.head(); - if let Some(buffer_id) = head.text_anchor.buffer_id { + if let Some((text_anchor, _)) = + snapshot.buffer_snapshot().anchor_to_buffer_anchor(head) + { anchors_by_buffer - .entry(buffer_id) + .entry(text_anchor.buffer_id) .and_modify(|(latest_id, latest_anchor)| { if selection.id > *latest_id { *latest_id = selection.id; @@ -10314,8 +10300,9 @@ impl Element for EditorElement { } else { None }; - let sticky_header_excerpt_id = - sticky_header_excerpt.as_ref().map(|top| top.excerpt.id); + let sticky_header_excerpt_id = sticky_header_excerpt + .as_ref() + .map(|top| top.excerpt.buffer_id()); let buffer = snapshot.buffer_snapshot(); let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row); @@ -12960,7 +12947,7 @@ mod tests { editor.insert_blocks( [BlockProperties { style: BlockStyle::Fixed, - placement: BlockPlacement::Above(Anchor::min()), + placement: BlockPlacement::Above(Anchor::Min), height: Some(3), render: Arc::new(|cx| div().h(3. * cx.window.line_height()).into_any()), priority: 0, diff --git a/crates/editor/src/folding_ranges.rs b/crates/editor/src/folding_ranges.rs index de32f481d52e501eea8f7814f4b114fbdbbd0458..c59a3e004a8b4f791af2d44be19878239ece1d42 100644 --- a/crates/editor/src/folding_ranges.rs +++ b/crates/editor/src/folding_ranges.rs @@ -21,9 +21,9 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(for_buffer.and_then(|id| self.buffer.read(cx).buffer(id))) .filter(|buffer| { let id = buffer.read(cx).remote_id(); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 827d182a0f11508ae301691f832e7ec04a728364..9ba5c4aa19cd66c454bf633a04636cd63bd180b8 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -204,8 +204,8 @@ impl GitBlame { git_blame.generate(cx); } } - multi_buffer::Event::ExcerptsAdded { .. } - | multi_buffer::Event::ExcerptsEdited { .. } => git_blame.regenerate_on_edit(cx), + multi_buffer::Event::BufferRangesUpdated { .. } + | multi_buffer::Event::BuffersEdited { .. } => git_blame.regenerate_on_edit(cx), _ => {} }, ); @@ -346,11 +346,10 @@ impl GitBlame { let Some(multi_buffer) = self.multi_buffer.upgrade() else { return; }; - multi_buffer - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .for_each(|id| self.sync(cx, id)); + let snapshot = multi_buffer.read(cx).snapshot(cx); + for id in snapshot.all_buffer_ids() { + self.sync(cx, id) + } } fn sync(&mut self, cx: &mut App, buffer_id: BufferId) { @@ -497,10 +496,10 @@ impl GitBlame { } let buffers_to_blame = self .multi_buffer - .update(cx, |multi_buffer, _| { - multi_buffer + .update(cx, |multi_buffer, cx| { + let snapshot = multi_buffer.snapshot(cx); + snapshot .all_buffer_ids() - .into_iter() .filter_map(|id| Some(multi_buffer.buffer(id)?.downgrade())) .collect::>() }) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 3a6ff4ec0e4fc53d19bfb51a10b1f7790933b175..7f05f4355bfaa218dbc26aab77d949b2146816d7 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -237,7 +237,8 @@ impl Editor { let Some(mb_anchor) = self .buffer() .read(cx) - .buffer_anchor_to_anchor(&buffer, anchor, cx) + .snapshot(cx) + .anchor_in_excerpt(anchor) else { return Task::ready(Ok(Navigated::No)); }; @@ -324,16 +325,13 @@ pub fn show_link_definition( return; } - let trigger_anchor = trigger_point.anchor(); - let anchor = snapshot.buffer_snapshot().anchor_before(*trigger_anchor); - let Some(buffer) = editor.buffer().read(cx).buffer_for_anchor(anchor, cx) else { + let anchor = trigger_point.anchor().bias_left(snapshot.buffer_snapshot()); + let Some((anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(anchor) else { + return; + }; + let Some(buffer) = editor.buffer.read(cx).buffer(anchor.buffer_id) else { return; }; - let Anchor { - excerpt_id, - text_anchor, - .. - } = anchor; let same_kind = hovered_link_state.preferred_kind == preferred_kind || hovered_link_state .links @@ -363,39 +361,39 @@ pub fn show_link_definition( async move { let result = match &trigger_point { TriggerPoint::Text(_) => { - if let Some((url_range, url)) = find_url(&buffer, text_anchor, cx.clone()) { + if let Some((url_range, url)) = find_url(&buffer, anchor, cx.clone()) { this.read_with(cx, |_, _| { let range = maybe!({ let range = - snapshot.anchor_range_in_excerpt(excerpt_id, url_range)?; + snapshot.buffer_anchor_range_to_anchor_range(url_range)?; Some(RangeInEditor::Text(range)) }); (range, vec![HoverLink::Url(url)]) }) .ok() } else if let Some((filename_range, filename)) = - find_file(&buffer, project.clone(), text_anchor, cx).await + find_file(&buffer, project.clone(), anchor, cx).await { let range = maybe!({ let range = - snapshot.anchor_range_in_excerpt(excerpt_id, filename_range)?; + snapshot.buffer_anchor_range_to_anchor_range(filename_range)?; Some(RangeInEditor::Text(range)) }); Some((range, vec![HoverLink::File(filename)])) } else if let Some(provider) = provider { let task = cx.update(|_, cx| { - provider.definitions(&buffer, text_anchor, preferred_kind, cx) + provider.definitions(&buffer, anchor, preferred_kind, cx) })?; if let Some(task) = task { task.await.ok().flatten().map(|definition_result| { ( definition_result.iter().find_map(|link| { link.origin.as_ref().and_then(|origin| { - let range = snapshot.anchor_range_in_excerpt( - excerpt_id, - origin.range.clone(), - )?; + let range = snapshot + .buffer_anchor_range_to_anchor_range( + origin.range.clone(), + )?; Some(RangeInEditor::Text(range)) }) }), @@ -1602,7 +1600,11 @@ mod tests { cx.set_state(input); let (position, snapshot) = cx.editor(|editor, _, cx| { - let positions = editor.selections.newest_anchor().head().text_anchor; + let positions = editor + .selections + .newest_anchor() + .head() + .expect_text_anchor(); let snapshot = editor .buffer() .clone() diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 9b127a8f1bc089d9cee28254c6b8ffc181677765..55350a9c679a10ea8597ae8c923c33af34d71360 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -17,7 +17,7 @@ use gpui::{ use itertools::Itertools; use language::{DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; -use markdown::{Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use multi_buffer::{MultiBufferOffset, ToOffset, ToPoint}; use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart}; use settings::Settings; @@ -275,12 +275,12 @@ fn show_hover( let snapshot = editor.snapshot(window, cx); - let (buffer, buffer_position) = editor + let (buffer_position, _) = editor .buffer .read(cx) - .text_anchor_for_position(anchor, cx)?; - - let (excerpt_id, _, _) = editor.buffer().read(cx).excerpt_containing(anchor, cx)?; + .snapshot(cx) + .anchor_to_buffer_anchor(anchor)?; + let buffer = editor.buffer.read(cx).buffer(buffer_position.buffer_id)?; let language_registry = editor .project() @@ -515,7 +515,7 @@ fn show_hover( .and_then(|range| { let range = snapshot .buffer_snapshot() - .anchor_range_in_excerpt(excerpt_id, range)?; + .buffer_anchor_range_to_anchor_range(range)?; Some(range) }) .or_else(|| { @@ -1040,8 +1040,7 @@ impl InfoPopover { .child( MarkdownElement::new(markdown, hover_markdown_style(window, cx)) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click(open_markdown_url) @@ -1155,8 +1154,7 @@ impl DiagnosticPopover { diagnostics_markdown_style(window, cx), ) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click( diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index 8c46e797cada703c9101fd91e670cbdd4ea713ac..689e273ce28310cb5051b0eae108b74de48d3ac1 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -45,6 +45,7 @@ impl InlaySplice { #[derive(Debug, Clone)] pub struct Inlay { pub id: InlayId, + // TODO this could be an ExcerptAnchor pub position: Anchor, pub content: InlayContent, } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 8422937ab81a392ad7d1187adcab765cc7f6875f..ac3133ea89c5da7cd861d608bcbd61975ee9535c 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -14,7 +14,7 @@ use language::{ language_settings::{InlayHintKind, InlayHintSettings}, }; use lsp::LanguageServerId; -use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot}; +use multi_buffer::{Anchor, MultiBufferSnapshot}; use project::{ HoverBlock, HoverBlockKind, InlayHintLabel, InlayHintLabelPartTooltip, InlayHintTooltip, InvalidationStrategy, ResolveState, @@ -110,14 +110,15 @@ impl LspInlayHintData { &mut self, buffer_ids: &HashSet, current_hints: impl IntoIterator, + snapshot: &MultiBufferSnapshot, ) { for buffer_id in buffer_ids { self.hint_refresh_tasks.remove(buffer_id); self.hint_chunk_fetching.remove(buffer_id); } for hint in current_hints { - if let Some(buffer_id) = hint.position.text_anchor.buffer_id { - if buffer_ids.contains(&buffer_id) { + if let Some((text_anchor, _)) = snapshot.anchor_to_buffer_anchor(hint.position) { + if buffer_ids.contains(&text_anchor.buffer_id) { self.added_hints.remove(&hint.id); } } @@ -237,7 +238,7 @@ pub enum InlayHintRefreshReason { server_id: LanguageServerId, request_id: Option, }, - ExcerptsRemoved(Vec), + BuffersRemoved(Vec), } impl Editor { @@ -303,7 +304,7 @@ impl Editor { let debounce = match &reason { InlayHintRefreshReason::SettingsChange(_) | InlayHintRefreshReason::Toggle(_) - | InlayHintRefreshReason::ExcerptsRemoved(_) + | InlayHintRefreshReason::BuffersRemoved(_) | InlayHintRefreshReason::ModifiersChanged(_) => None, _may_need_lsp_call => self.inlay_hints.as_ref().and_then(|inlay_hints| { if invalidate_cache.should_invalidate() { @@ -314,7 +315,8 @@ impl Editor { }), }; - let mut visible_excerpts = self.visible_excerpts(true, cx); + let mut visible_excerpts = self.visible_buffer_ranges(cx); + visible_excerpts.retain(|(snapshot, _, _)| self.is_lsp_relevant(snapshot.file(), cx)); let mut invalidate_hints_for_buffers = HashSet::default(); let ignore_previous_fetches = match reason { @@ -324,7 +326,7 @@ impl Editor { | InlayHintRefreshReason::ServerRemoved => true, InlayHintRefreshReason::NewLinesShown | InlayHintRefreshReason::RefreshRequested { .. } - | InlayHintRefreshReason::ExcerptsRemoved(_) => false, + | InlayHintRefreshReason::BuffersRemoved(_) => false, InlayHintRefreshReason::BufferEdited(buffer_id) => { let Some(affected_language) = self .buffer() @@ -351,8 +353,8 @@ impl Editor { ); semantics_provider.invalidate_inlay_hints(&invalidate_hints_for_buffers, cx); - visible_excerpts.retain(|_, (visible_buffer, _, _)| { - visible_buffer.read(cx).language() == Some(&affected_language) + visible_excerpts.retain(|(buffer_snapshot, _, _)| { + buffer_snapshot.language() == Some(&affected_language) }); false } @@ -371,6 +373,7 @@ impl Editor { inlay_hints.clear_for_buffers( &invalidate_hints_for_buffers, Self::visible_inlay_hints(self.display_map.read(cx)), + &multi_buffer.read(cx).snapshot(cx), ); } } @@ -379,14 +382,18 @@ impl Editor { .extend(invalidate_hints_for_buffers); let mut buffers_to_query = HashMap::default(); - for (_, (buffer, buffer_version, visible_range)) in visible_excerpts { - let buffer_id = buffer.read(cx).remote_id(); + for (buffer_snapshot, visible_range, _) in visible_excerpts { + let buffer_id = buffer_snapshot.remote_id(); if !self.registered_buffers.contains_key(&buffer_id) { continue; } - let buffer_snapshot = buffer.read(cx).snapshot(); + let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else { + continue; + }; + + let buffer_version = buffer_snapshot.version().clone(); let buffer_anchor_range = buffer_snapshot.anchor_before(visible_range.start) ..buffer_snapshot.anchor_after(visible_range.end); @@ -514,13 +521,14 @@ impl Editor { } } } - InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { + InlayHintRefreshReason::BuffersRemoved(buffers_removed) => { let to_remove = self .display_map .read(cx) .current_inlays() .filter_map(|inlay| { - if excerpts_removed.contains(&inlay.position.excerpt_id) { + let anchor = inlay.position.raw_text_anchor()?; + if buffers_removed.contains(&anchor.buffer_id) { Some(inlay.id) } else { None @@ -610,13 +618,11 @@ impl Editor { }) .max_by_key(|hint| hint.id) { - if let Some(ResolvedHint::Resolved(cached_hint)) = hovered_hint - .position - .text_anchor - .buffer_id - .and_then(|buffer_id| { + if let Some(ResolvedHint::Resolved(cached_hint)) = buffer_snapshot + .anchor_to_buffer_anchor(hovered_hint.position) + .and_then(|(anchor, _)| { lsp_store.update(cx, |lsp_store, cx| { - lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx) + lsp_store.resolved_hint(anchor.buffer_id, hovered_hint.id, cx) }) }) { @@ -787,15 +793,19 @@ impl Editor { new_hints: Vec<(Range, anyhow::Result)>, cx: &mut Context, ) { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx)) - .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id)) + .filter(|inlay| { + multi_buffer_snapshot + .anchor_to_buffer_anchor(inlay.position) + .map(|(anchor, _)| anchor.buffer_id) + == Some(buffer_id) + }) .map(|inlay| inlay.id) .collect::>(); let Some(inlay_hints) = &mut self.inlay_hints else { return; }; - - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); let Some(buffer_snapshot) = self .buffer .read(cx) @@ -910,12 +920,10 @@ impl Editor { hints_to_remove.extend( Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| { - inlay - .position - .text_anchor - .buffer_id - .is_none_or(|buffer_id| { - invalidate_hints_for_buffers.contains(&buffer_id) + multi_buffer_snapshot + .anchor_to_buffer_anchor(inlay.position) + .is_none_or(|(anchor, _)| { + invalidate_hints_for_buffers.contains(&anchor.buffer_id) }) }) .map(|inlay| inlay.id), @@ -2285,17 +2293,15 @@ pub mod tests { cx: &mut gpui::TestAppContext, ) -> Range { let ranges = editor - .update(cx, |editor, _window, cx| editor.visible_excerpts(true, cx)) + .update(cx, |editor, _window, cx| editor.visible_buffer_ranges(cx)) .unwrap(); assert_eq!( ranges.len(), 1, "Single buffer should produce a single excerpt with visible range" ); - let (_, (excerpt_buffer, _, excerpt_visible_range)) = ranges.into_iter().next().unwrap(); - excerpt_buffer.read_with(cx, |buffer, _| { - excerpt_visible_range.to_point(&buffer.snapshot()) - }) + let (buffer_snapshot, visible_range, _) = ranges.into_iter().next().unwrap(); + visible_range.to_point(&buffer_snapshot) } #[gpui::test] @@ -2968,7 +2974,7 @@ let c = 3;"# .await .unwrap(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), buffer_1.clone(), @@ -2983,15 +2989,8 @@ let c = 3;"# 0, cx, ); - let excerpt_ids = multibuffer.excerpt_ids(); - let buffer_1_excerpts = vec![excerpt_ids[0]]; - let buffer_2_excerpts = vec![excerpt_ids[1]]; - (buffer_1_excerpts, buffer_2_excerpts) }); - assert!(!buffer_1_excerpts.is_empty()); - assert!(!buffer_2_excerpts.is_empty()); - cx.executor().run_until_parked(); let editor = cx.add_window(|window, cx| { Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx) @@ -3092,7 +3091,7 @@ let c = 3;"# editor .update(cx, |editor, _, cx| { editor.buffer().update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); + multibuffer.remove_excerpts(PathKey::sorted(1), cx); }) }) .unwrap(); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index d14078e79abdbfe40879da09221bad7bef47475a..28e920c28bd9854a38a5019622248fa79cd0a8e1 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,7 +1,7 @@ use crate::{ ActiveDebugLine, Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent, - EditorSettings, ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, - NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _, + EditorSettings, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData, + ReportEditorEvent, SelectionEffects, ToPoint as _, display_map::HighlightKey, editor_settings::SeedQuerySetting, persistence::{EditorDb, SerializedEditor}, @@ -22,7 +22,7 @@ use language::{ SelectionGoal, proto::serialize_anchor as serialize_text_anchor, }; use lsp::DiagnosticSeverity; -use multi_buffer::MultiBufferOffset; +use multi_buffer::{MultiBufferOffset, PathKey}; use project::{ File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, @@ -33,14 +33,13 @@ use std::{ any::{Any, TypeId}, borrow::Cow, cmp::{self, Ordering}, - iter, ops::Range, path::{Path, PathBuf}, sync::Arc, }; use text::{BufferId, BufferSnapshot, Selection}; use ui::{IconDecorationKind, prelude::*}; -use util::{ResultExt, TryFutureExt, paths::PathExt}; +use util::{ResultExt, TryFutureExt, paths::PathExt, rel_path::RelPath}; use workspace::item::{Dedup, ItemSettings, SerializableItem, TabContentParams}; use workspace::{ CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, @@ -83,10 +82,11 @@ impl FollowableItem for Editor { }; let buffer_ids = state - .excerpts + .path_excerpts .iter() .map(|excerpt| excerpt.buffer_id) .collect::>(); + let buffers = project.update(cx, |project, cx| { buffer_ids .iter() @@ -106,38 +106,32 @@ impl FollowableItem for Editor { multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) } else { multibuffer = MultiBuffer::new(project.read(cx).capability()); - let mut sorted_excerpts = state.excerpts.clone(); - sorted_excerpts.sort_by_key(|e| e.id); - let sorted_excerpts = sorted_excerpts.into_iter().peekable(); - - for excerpt in sorted_excerpts { - let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { + for path_with_ranges in state.path_excerpts { + let Some(path_key) = + path_with_ranges.path_key.and_then(deserialize_path_key) + else { continue; }; - - let mut insert_position = ExcerptId::min(); - for e in &state.excerpts { - if e.id == excerpt.id { - break; - } - if e.id < excerpt.id { - insert_position = ExcerptId::from_proto(e.id); - } - } - - let buffer = - buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id); - - let Some(excerpt) = deserialize_excerpt_range(excerpt) else { + let Some(buffer_id) = BufferId::new(path_with_ranges.buffer_id).ok() + else { continue; }; - - let Some(buffer) = buffer else { continue }; - - multibuffer.insert_excerpts_with_ids_after( - insert_position, + let Some(buffer) = + buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id) + else { + continue; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges = path_with_ranges + .ranges + .into_iter() + .filter_map(deserialize_excerpt_range) + .collect::>(); + multibuffer.update_path_excerpts( + path_key, buffer.clone(), - [excerpt], + &buffer_snapshot, + &ranges, cx, ); } @@ -158,6 +152,7 @@ impl FollowableItem for Editor { }) })?; + editor.update(cx, |editor, cx| editor.text(cx)); update_editor_from_message( editor.downgrade(), project, @@ -215,38 +210,43 @@ impl FollowableItem for Editor { let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx); let buffer = self.buffer.read(cx); - let excerpts = buffer - .read(cx) - .excerpts() - .map(|(id, buffer, range)| proto::Excerpt { - id: id.to_proto(), - buffer_id: buffer.remote_id().into(), - context_start: Some(serialize_text_anchor(&range.context.start)), - context_end: Some(serialize_text_anchor(&range.context.end)), - primary_start: Some(serialize_text_anchor(&range.primary.start)), - primary_end: Some(serialize_text_anchor(&range.primary.end)), - }) - .collect(); let snapshot = buffer.snapshot(cx); + let mut path_excerpts: Vec = Vec::new(); + for excerpt in snapshot.excerpts() { + if let Some(prev_entry) = path_excerpts.last_mut() + && prev_entry.buffer_id == excerpt.context.start.buffer_id.to_proto() + { + prev_entry.ranges.push(serialize_excerpt_range(excerpt)); + } else if let Some(path_key) = snapshot.path_for_buffer(excerpt.context.start.buffer_id) + { + path_excerpts.push(proto::PathExcerpts { + path_key: Some(serialize_path_key(path_key)), + buffer_id: excerpt.context.start.buffer_id.to_proto(), + ranges: vec![serialize_excerpt_range(excerpt)], + }); + } + } Some(proto::view::Variant::Editor(proto::view::Editor { singleton: buffer.is_singleton(), title: buffer.explicit_title().map(ToOwned::to_owned), - excerpts, - scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)), + excerpts: Vec::new(), + scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor)), scroll_x: scroll_anchor.offset.x, scroll_y: scroll_anchor.offset.y, selections: self .selections .disjoint_anchors_arc() .iter() - .map(|s| serialize_selection(s, &snapshot)) + .map(serialize_selection) .collect(), pending_selection: self .selections .pending_anchor() .as_ref() - .map(|s| serialize_selection(s, &snapshot)), + .copied() + .map(serialize_selection), + path_excerpts, })) } @@ -277,56 +277,52 @@ impl FollowableItem for Editor { match update { proto::update_view::Variant::Editor(update) => match event { - EditorEvent::ExcerptsAdded { + EditorEvent::BufferRangesUpdated { buffer, - predecessor, - excerpts, + path_key, + ranges, } => { - let buffer_id = buffer.read(cx).remote_id(); - let mut excerpts = excerpts.iter(); - if let Some((id, range)) = excerpts.next() { - update.inserted_excerpts.push(proto::ExcerptInsertion { - previous_excerpt_id: Some(predecessor.to_proto()), - excerpt: serialize_excerpt(buffer_id, id, range), - }); - update.inserted_excerpts.extend(excerpts.map(|(id, range)| { - proto::ExcerptInsertion { - previous_excerpt_id: None, - excerpt: serialize_excerpt(buffer_id, id, range), - } - })) - } + let buffer_id = buffer.read(cx).remote_id().to_proto(); + let path_key = serialize_path_key(path_key); + let ranges = ranges + .iter() + .cloned() + .map(serialize_excerpt_range) + .collect::>(); + update.updated_paths.push(proto::PathExcerpts { + path_key: Some(path_key), + buffer_id, + ranges, + }); true } - EditorEvent::ExcerptsRemoved { ids, .. } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { update - .deleted_excerpts - .extend(ids.iter().copied().map(ExcerptId::to_proto)); + .deleted_buffers + .extend(removed_buffer_ids.iter().copied().map(BufferId::to_proto)); true } EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => { let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let snapshot = self.buffer.read(cx).snapshot(cx); let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx); - update.scroll_top_anchor = - Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)); + update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor)); update.scroll_x = scroll_anchor.offset.x; update.scroll_y = scroll_anchor.offset.y; true } EditorEvent::SelectionsChanged { .. } => { - let snapshot = self.buffer.read(cx).snapshot(cx); update.selections = self .selections .disjoint_anchors_arc() .iter() - .map(|s| serialize_selection(s, &snapshot)) + .map(serialize_selection) .collect(); update.pending_selection = self .selections .pending_anchor() .as_ref() - .map(|s| serialize_selection(s, &snapshot)); + .copied() + .map(serialize_selection); true } _ => false, @@ -370,7 +366,7 @@ impl FollowableItem for Editor { ) { let buffer = self.buffer.read(cx); let buffer = buffer.read(cx); - let Some(position) = buffer.as_singleton_anchor(location) else { + let Some(position) = buffer.anchor_in_excerpt(location) else { return; }; let selection = Selection { @@ -394,9 +390,9 @@ async fn update_editor_from_message( ) -> Result<()> { // Open all of the buffers of which excerpts were added to the editor. let inserted_excerpt_buffer_ids = message - .inserted_excerpts + .updated_paths .iter() - .filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id)) + .map(|insertion| insertion.buffer_id) .collect::>(); let inserted_excerpt_buffers = project.update(cx, |project, cx| { inserted_excerpt_buffer_ids @@ -407,66 +403,53 @@ async fn update_editor_from_message( let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?; // Update the editor's excerpts. - this.update(cx, |editor, cx| { + let buffer_snapshot = this.update(cx, |editor, cx| { editor.buffer.update(cx, |multibuffer, cx| { - let mut removed_excerpt_ids = message - .deleted_excerpts - .into_iter() - .map(ExcerptId::from_proto) - .collect::>(); - removed_excerpt_ids.sort_by({ - let multibuffer = multibuffer.read(cx); - move |a, b| a.cmp(b, &multibuffer) - }); - - let mut insertions = message.inserted_excerpts.into_iter().peekable(); - while let Some(insertion) = insertions.next() { - let Some(excerpt) = insertion.excerpt else { + for path_with_excerpts in message.updated_paths { + let Some(path_key) = path_with_excerpts.path_key.and_then(deserialize_path_key) + else { continue; }; - let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { - continue; - }; - let buffer_id = BufferId::new(excerpt.buffer_id)?; - let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { + let ranges = path_with_excerpts + .ranges + .into_iter() + .filter_map(deserialize_excerpt_range) + .collect::>(); + let Some(buffer) = BufferId::new(path_with_excerpts.buffer_id) + .ok() + .and_then(|buffer_id| project.read(cx).buffer_for_id(buffer_id, cx)) + else { continue; }; - let adjacent_excerpts = iter::from_fn(|| { - let insertion = insertions.peek()?; - if insertion.previous_excerpt_id.is_none() - && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id) - { - insertions.next()?.excerpt - } else { - None - } - }); + let buffer_snapshot = buffer.read(cx).snapshot(); + multibuffer.update_path_excerpts(path_key, buffer, &buffer_snapshot, &ranges, cx); + } - multibuffer.insert_excerpts_with_ids_after( - ExcerptId::from_proto(previous_excerpt_id), - buffer, - [excerpt] - .into_iter() - .chain(adjacent_excerpts) - .filter_map(deserialize_excerpt_range), - cx, - ); + for buffer_id in message + .deleted_buffers + .into_iter() + .filter_map(|buffer_id| BufferId::new(buffer_id).ok()) + { + multibuffer.remove_excerpts_for_buffer(buffer_id, cx); } - multibuffer.remove_excerpts(removed_excerpt_ids, cx); - anyhow::Ok(()) + multibuffer.snapshot(cx) }) - })??; + })?; // Deserialize the editor state. let selections = message .selections .into_iter() - .filter_map(deserialize_selection) + .filter_map(|selection| deserialize_selection(selection, &buffer_snapshot)) .collect::>(); - let pending_selection = message.pending_selection.and_then(deserialize_selection); - let scroll_top_anchor = message.scroll_top_anchor.and_then(deserialize_anchor); + let pending_selection = message + .pending_selection + .and_then(|selection| deserialize_selection(selection, &buffer_snapshot)); + let scroll_top_anchor = message + .scroll_top_anchor + .and_then(|selection| deserialize_anchor(selection, &buffer_snapshot)); // Wait until the buffer has received all of the operations referenced by // the editor's new state. @@ -503,79 +486,103 @@ async fn update_editor_from_message( Ok(()) } -fn serialize_excerpt( - buffer_id: BufferId, - id: &ExcerptId, - range: &ExcerptRange, -) -> Option { - Some(proto::Excerpt { - id: id.to_proto(), - buffer_id: buffer_id.into(), - context_start: Some(serialize_text_anchor(&range.context.start)), - context_end: Some(serialize_text_anchor(&range.context.end)), - primary_start: Some(serialize_text_anchor(&range.primary.start)), - primary_end: Some(serialize_text_anchor(&range.primary.end)), - }) -} - -fn serialize_selection( - selection: &Selection, - buffer: &MultiBufferSnapshot, -) -> proto::Selection { +fn serialize_selection(selection: &Selection) -> proto::Selection { proto::Selection { id: selection.id as u64, - start: Some(serialize_anchor(&selection.start, buffer)), - end: Some(serialize_anchor(&selection.end, buffer)), + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), reversed: selection.reversed, } } -fn serialize_anchor(anchor: &Anchor, buffer: &MultiBufferSnapshot) -> proto::EditorAnchor { - proto::EditorAnchor { - excerpt_id: buffer.latest_excerpt_id(anchor.excerpt_id).to_proto(), - anchor: Some(serialize_text_anchor(&anchor.text_anchor)), +fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor { + match anchor { + Anchor::Min => proto::EditorAnchor { + excerpt_id: None, + anchor: Some(proto::Anchor { + replica_id: 0, + timestamp: 0, + offset: 0, + bias: proto::Bias::Left as i32, + buffer_id: None, + }), + }, + Anchor::Excerpt(_) => proto::EditorAnchor { + excerpt_id: None, + anchor: anchor.raw_text_anchor().map(|a| serialize_text_anchor(&a)), + }, + Anchor::Max => proto::EditorAnchor { + excerpt_id: None, + anchor: Some(proto::Anchor { + replica_id: u32::MAX, + timestamp: u32::MAX, + offset: u64::MAX, + bias: proto::Bias::Right as i32, + buffer_id: None, + }), + }, + } +} + +fn serialize_excerpt_range(range: ExcerptRange) -> proto::ExcerptRange { + let context_start = language::proto::serialize_anchor(&range.context.start); + let context_end = language::proto::serialize_anchor(&range.context.end); + let primary_start = language::proto::serialize_anchor(&range.primary.start); + let primary_end = language::proto::serialize_anchor(&range.primary.end); + proto::ExcerptRange { + context_start: Some(context_start), + context_end: Some(context_end), + primary_start: Some(primary_start), + primary_end: Some(primary_end), } } fn deserialize_excerpt_range( - excerpt: proto::Excerpt, -) -> Option<(ExcerptId, ExcerptRange)> { + excerpt_range: proto::ExcerptRange, +) -> Option> { let context = { - let start = language::proto::deserialize_anchor(excerpt.context_start?)?; - let end = language::proto::deserialize_anchor(excerpt.context_end?)?; + let start = language::proto::deserialize_anchor(excerpt_range.context_start?)?; + let end = language::proto::deserialize_anchor(excerpt_range.context_end?)?; start..end }; - let primary = excerpt + let primary = excerpt_range .primary_start - .zip(excerpt.primary_end) + .zip(excerpt_range.primary_end) .and_then(|(start, end)| { let start = language::proto::deserialize_anchor(start)?; let end = language::proto::deserialize_anchor(end)?; Some(start..end) }) .unwrap_or_else(|| context.clone()); - Some(( - ExcerptId::from_proto(excerpt.id), - ExcerptRange { context, primary }, - )) + Some(ExcerptRange { context, primary }) } -fn deserialize_selection(selection: proto::Selection) -> Option> { +fn deserialize_selection( + selection: proto::Selection, + buffer: &MultiBufferSnapshot, +) -> Option> { Some(Selection { id: selection.id as usize, - start: deserialize_anchor(selection.start?)?, - end: deserialize_anchor(selection.end?)?, + start: deserialize_anchor(selection.start?, buffer)?, + end: deserialize_anchor(selection.end?, buffer)?, reversed: selection.reversed, goal: SelectionGoal::None, }) } -fn deserialize_anchor(anchor: proto::EditorAnchor) -> Option { - let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id); - Some(Anchor::in_buffer( - excerpt_id, - language::proto::deserialize_anchor(anchor.anchor?)?, - )) +fn deserialize_anchor(anchor: proto::EditorAnchor, buffer: &MultiBufferSnapshot) -> Option { + let anchor = anchor.anchor?; + if let Some(buffer_id) = anchor.buffer_id + && BufferId::new(buffer_id).is_ok() + { + let text_anchor = language::proto::deserialize_anchor(anchor)?; + buffer.anchor_in_buffer(text_anchor) + } else { + match proto::Bias::from_i32(anchor.bias)? { + proto::Bias::Left => Some(Anchor::Min), + proto::Bias::Right => Some(Anchor::Max), + } + } } impl Item for Editor { @@ -1071,7 +1078,7 @@ impl Item for Editor { f(ItemEvent::UpdateBreadcrumbs); } - EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => { + EditorEvent::BufferRangesUpdated { .. } | EditorEvent::BuffersRemoved { .. } => { f(ItemEvent::Edit); } @@ -1434,9 +1441,9 @@ impl ProjectItem for Editor { cx: &mut Context, ) -> Self { let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx); + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some((excerpt_id, _, snapshot)) = - editor.buffer().read(cx).snapshot(cx).as_singleton() + if let Some(buffer_snapshot) = editor.buffer().read(cx).snapshot(cx).as_singleton() && WorkspaceSettings::get(None, cx).restore_on_file_reopen && let Some(restoration_data) = Self::project_item_kind() .and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind)) @@ -1448,7 +1455,7 @@ impl ProjectItem for Editor { { if !restoration_data.folds.is_empty() { editor.fold_ranges( - clip_ranges(&restoration_data.folds, snapshot), + clip_ranges(&restoration_data.folds, buffer_snapshot), false, window, cx, @@ -1456,12 +1463,11 @@ impl ProjectItem for Editor { } if !restoration_data.selections.is_empty() { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(clip_ranges(&restoration_data.selections, snapshot)); + s.select_ranges(clip_ranges(&restoration_data.selections, buffer_snapshot)); }); } let (top_row, offset) = restoration_data.scroll_position; - let anchor = - Anchor::in_buffer(excerpt_id, snapshot.anchor_before(Point::new(top_row, 0))); + let anchor = multibuffer_snapshot.anchor_before(Point::new(top_row, 0)); editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx); } @@ -1838,7 +1844,7 @@ impl SearchableItem for Editor { }; for range in search_within_ranges { - for (search_buffer, search_range, excerpt_id, deleted_hunk_anchor) in + for (search_buffer, search_range, deleted_hunk_anchor) in buffer.range_to_buffer_ranges_with_deleted_hunks(range) { ranges.extend( @@ -1849,20 +1855,22 @@ impl SearchableItem for Editor { ) .await .into_iter() - .map(|match_range| { + .filter_map(|match_range| { if let Some(deleted_hunk_anchor) = deleted_hunk_anchor { let start = search_buffer .anchor_after(search_range.start + match_range.start); let end = search_buffer .anchor_before(search_range.start + match_range.end); - deleted_hunk_anchor.with_diff_base_anchor(start) - ..deleted_hunk_anchor.with_diff_base_anchor(end) + Some( + deleted_hunk_anchor.with_diff_base_anchor(start) + ..deleted_hunk_anchor.with_diff_base_anchor(end), + ) } else { let start = search_buffer .anchor_after(search_range.start + match_range.start); let end = search_buffer .anchor_before(search_range.start + match_range.end); - Anchor::range_in_buffer(excerpt_id, start..end) + buffer.buffer_anchor_range_to_anchor_range(start..end) } }), ); @@ -2050,6 +2058,20 @@ fn restore_serialized_buffer_contents( } } +fn serialize_path_key(path_key: &PathKey) -> proto::PathKey { + proto::PathKey { + sort_prefix: path_key.sort_prefix, + path: path_key.path.to_proto(), + } +} + +fn deserialize_path_key(path_key: proto::PathKey) -> Option { + Some(PathKey { + sort_prefix: path_key.sort_prefix, + path: RelPath::from_proto(&path_key.path).ok()?, + }) +} + #[cfg(test)] mod tests { use crate::editor_tests::init_test; diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index b91f039aff7cfb8bc7997cfbf63abb8dbe4662e5..d57941f6d082a929f6547c38ddbc21908304d76c 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -352,11 +352,12 @@ pub(crate) fn construct_initial_buffer_versions_map< } for (edit_range, _) in edits { - let edit_range_buffer = editor - .buffer() - .read(cx) - .excerpt_containing(edit_range.end, cx) - .map(|e| e.1); + let multibuffer = editor.buffer.read(cx); + let snapshot = multibuffer.snapshot(cx); + let anchor = snapshot.anchor_before(edit_range.end); + let edit_range_buffer = snapshot + .anchor_to_buffer_anchor(anchor) + .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id)); if let Some(buffer) = edit_range_buffer { let (buffer_id, buffer_version) = buffer.read_with(cx, |buffer, _| (buffer.remote_id(), buffer.version.clone())); diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index ccd0e64bd850f6ce84e225fe77f1c0a0d5385dc1..148bb27addecfb4982625a2d6129e7d3827d7883 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -2,7 +2,6 @@ use collections::HashMap; use gpui::{AppContext, Context, Entity, Window}; use itertools::Itertools; use language::Buffer; -use multi_buffer::MultiBufferOffset; use std::{ops::Range, sync::Arc, time::Duration}; use text::{Anchor, AnchorRangeExt, Bias, BufferId, ToOffset, ToPoint}; use util::ResultExt; @@ -62,27 +61,15 @@ pub(super) fn refresh_linked_ranges( editor .update(cx, |editor, cx| { let display_snapshot = editor.display_snapshot(cx); - let selections = editor - .selections - .all::(&display_snapshot); + let selections = editor.selections.all_anchors(&display_snapshot); let snapshot = display_snapshot.buffer_snapshot(); let buffer = editor.buffer.read(cx); - for selection in selections { - let cursor_position = selection.head(); - let start_position = snapshot.anchor_before(cursor_position); - let end_position = snapshot.anchor_after(selection.tail()); - if start_position.text_anchor.buffer_id != end_position.text_anchor.buffer_id - || end_position.text_anchor.buffer_id.is_none() + for selection in selections.iter() { + if let Some((_, range)) = + snapshot.anchor_range_to_buffer_anchor_range(selection.range()) + && let Some(buffer) = buffer.buffer(range.start.buffer_id) { - // Throw away selections spanning multiple buffers. - continue; - } - if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) { - applicable_selections.push(( - buffer, - start_position.text_anchor, - end_position.text_anchor, - )); + applicable_selections.push((buffer, range.start, range.end)); } } }) diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index ef0f92de79b0fe7a7e4a495dc29c1305b2f5eefa..6f9f94bc72227f7f30bdca1c9ae1ce436f3d5aa4 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -9,7 +9,6 @@ use language::Buffer; use language::Language; use lsp::LanguageServerId; use lsp::LanguageServerName; -use multi_buffer::Anchor; use project::LanguageServerToQuery; use project::LocationLink; use project::Project; @@ -27,7 +26,12 @@ pub(crate) fn find_specific_language_server_in_selection( cx: &mut App, filter_language: F, language_server_name: LanguageServerName, -) -> Option<(Anchor, Arc, LanguageServerId, Entity)> +) -> Option<( + text::Anchor, + Arc, + LanguageServerId, + Entity, +)> where F: Fn(&Language) -> bool, { @@ -40,19 +44,15 @@ where .iter() .find_map(|selection| { let multi_buffer = multi_buffer.read(cx); - let (position, buffer) = multi_buffer - .buffer_for_anchor(selection.head(), cx) - .map(|buffer| (selection.head(), buffer)) - .or_else(|| { - multi_buffer - .buffer_for_anchor(selection.tail(), cx) - .map(|buffer| (selection.tail(), buffer)) - })?; + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let (position, buffer) = multi_buffer_snapshot + .anchor_to_buffer_anchor(selection.head()) + .and_then(|(anchor, _)| Some((anchor, multi_buffer.buffer(anchor.buffer_id)?)))?; if !seen_buffer_ids.insert(buffer.read(cx).remote_id()) { return None; } - let language = buffer.read(cx).language_at(position.text_anchor)?; + let language = buffer.read(cx).language_at(position)?; if filter_language(&language) { let server_id = buffer.update(cx, |buffer, cx| { project @@ -108,7 +108,7 @@ pub fn lsp_tasks( let buffers = buffer_ids .iter() .filter(|&&buffer_id| match for_position { - Some(for_position) => for_position.buffer_id == Some(buffer_id), + Some(for_position) => for_position.buffer_id == buffer_id, None => true, }) .filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx)) @@ -194,7 +194,7 @@ mod tests { use language::{FakeLspAdapter, Language}; use languages::rust_lang; use lsp::{LanguageServerId, LanguageServerName}; - use multi_buffer::{Anchor, MultiBuffer}; + use multi_buffer::MultiBuffer; use project::{FakeFs, Project}; use util::path; @@ -236,7 +236,7 @@ mod tests { let filter = |language: &Language| language.name().as_ref() == "Rust"; let assert_result = |result: Option<( - Anchor, + text::Anchor, Arc, LanguageServerId, Entity, diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 2ddbb48b5fc434f65521c6dd230537aedb71dabb..0028f52d3d91ca9e6ea660dec0628e7ca6b9e520 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -205,16 +205,17 @@ pub fn deploy_context_menu( .all::(&display_map) .into_iter() .any(|s| !s.is_empty()); - let has_git_repo = buffer - .buffer_id_for_anchor(anchor) - .is_some_and(|buffer_id| { - project - .read(cx) - .git_store() - .read(cx) - .repository_and_path_for_buffer_id(buffer_id, cx) - .is_some() - }); + let has_git_repo = + buffer + .anchor_to_buffer_anchor(anchor) + .is_some_and(|(buffer_anchor, _)| { + project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_anchor.buffer_id, cx) + .is_some() + }); let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx); let run_to_cursor = window.is_action_available(&RunToCursor, cx); diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 955f511577d2cbfede1a4cb4eb6d99e429c879d6..67869f770b81f315680388165111bbc1a2e0f111 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -588,22 +588,30 @@ pub fn start_of_excerpt( direction: Direction, ) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else { return display_point; }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(map); + let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else { + return display_point; + }; + let mut start = start_anchor.to_display_point(map); if start >= display_point && start.row() > DisplayRow(0) { - let Some(excerpt) = map.buffer_snapshot().excerpt_before(excerpt.id()) else { + let Some(excerpt) = map.buffer_snapshot().excerpt_before(start_anchor) else { return display_point; }; - start = excerpt.start_anchor().to_display_point(map); + if let Some(start_anchor) = map.anchor_in_excerpt(excerpt.context.start) { + start = start_anchor.to_display_point(map); + } } start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(map); + let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else { + return display_point; + }; + let mut end = end_anchor.to_display_point(map); *end.row_mut() += 1; map.clip_point(end, Bias::Right) } @@ -616,12 +624,15 @@ pub fn end_of_excerpt( direction: Direction, ) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else { return display_point; }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(map); + let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else { + return display_point; + }; + let mut start = start_anchor.to_display_point(map); if start.row() > DisplayRow(0) { *start.row_mut() -= 1; } @@ -630,18 +641,23 @@ pub fn end_of_excerpt( start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(map); + let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else { + return display_point; + }; + let mut end = end_anchor.to_display_point(map); *end.column_mut() = 0; if end <= display_point { *end.row_mut() += 1; let point_end = map.display_point_to_point(end, Bias::Right); - let Some(excerpt) = map + let Some((_, excerpt_range)) = map .buffer_snapshot() .excerpt_containing(point_end..point_end) else { return display_point; }; - end = excerpt.end_anchor().to_display_point(map); + if let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) { + end = end_anchor.to_display_point(map); + } *end.column_mut() = 0; } end diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs index 92663ff9a96d1f84e2de387917e2d6a32b16aa00..f451eb7d61d6a2513e1ebf6ec96062b600cbecb6 100644 --- a/crates/editor/src/runnables.rs +++ b/crates/editor/src/runnables.rs @@ -8,9 +8,7 @@ use gpui::{ }; use language::{Buffer, BufferRow, Runnable}; use lsp::LanguageServerName; -use multi_buffer::{ - Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _, -}; +use multi_buffer::{Anchor, BufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _}; use project::{ Location, Project, TaskSourceKind, debugger::breakpoint_store::{Breakpoint, BreakpointSessionState}, @@ -165,7 +163,7 @@ impl Editor { .update(cx, |editor, cx| { let multi_buffer = editor.buffer().read(cx); if multi_buffer.is_singleton() { - Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max())) + Some((multi_buffer.snapshot(cx), Anchor::Min..Anchor::Max)) } else { let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -209,16 +207,8 @@ impl Editor { .fold(HashMap::default(), |mut acc, (kind, location, task)| { let buffer = location.target.buffer; let buffer_snapshot = buffer.read(cx).snapshot(); - let offset = multi_buffer_snapshot.excerpts().find_map( - |(excerpt_id, snapshot, _)| { - if snapshot.remote_id() == buffer_snapshot.remote_id() { - multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, location.target.range.start) - } else { - None - } - }, - ); + let offset = + multi_buffer_snapshot.anchor_in_excerpt(location.target.range.start); if let Some(offset) = offset { let task_buffer_range = location.target.range.to_point(&buffer_snapshot); @@ -369,20 +359,23 @@ impl Editor { (selection, buffer, snapshot) }; let selection_range = selection.range(); - let start = editor_snapshot + let Some((_, range)) = editor_snapshot .display_snapshot .buffer_snapshot() - .anchor_after(selection_range.start) - .text_anchor; - let end = editor_snapshot - .display_snapshot - .buffer_snapshot() - .anchor_after(selection_range.end) - .text_anchor; - let location = Location { - buffer, - range: start..end, + .anchor_range_to_buffer_anchor_range( + editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_after(selection_range.start) + ..editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_before(selection_range.end), + ) + else { + return Task::ready(None); }; + let location = Location { buffer, range }; let captured_variables = { let mut variables = TaskVariables::default(); let buffer = location.buffer.read(cx); @@ -430,9 +423,9 @@ impl Editor { return HashMap::default(); } let buffers = if visible_only { - self.visible_excerpts(true, cx) - .into_values() - .map(|(buffer, _, _)| buffer) + self.visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .collect() } else { self.buffer().read(cx).all_buffers() @@ -482,19 +475,15 @@ impl Editor { cx: &mut Context, ) -> Option<(Entity, u32, Arc)> { let snapshot = self.buffer.read(cx).snapshot(cx); - let offset = self - .selections - .newest::(&self.display_snapshot(cx)) - .head(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let offset = excerpt.map_offset_to_buffer(offset); - let buffer_id = excerpt.buffer().remote_id(); + let anchor = self.selections.newest_anchor().head(); + let (anchor, buffer_snapshot) = snapshot.anchor_to_buffer_anchor(anchor)?; + let offset = anchor.to_offset(buffer_snapshot); - let layer = excerpt.buffer().syntax_layer_at(offset)?; + let layer = buffer_snapshot.syntax_layer_at(offset)?; let mut cursor = layer.node().walk(); - while cursor.goto_first_child_for_byte(offset.0).is_some() { - if cursor.node().end_byte() == offset.0 { + while cursor.goto_first_child_for_byte(offset).is_some() { + if cursor.node().end_byte() == offset { cursor.goto_next_sibling(); } } @@ -503,18 +492,18 @@ impl Editor { loop { let node = cursor.node(); let node_range = node.byte_range(); - let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; + let symbol_start_row = buffer_snapshot.offset_to_point(node.start_byte()).row; // Check if this node contains our offset - if node_range.start <= offset.0 && node_range.end >= offset.0 { + if node_range.start <= offset && node_range.end >= offset { // If it contains offset, check for task if let Some(tasks) = self .runnables .runnables - .get(&buffer_id) + .get(&buffer_snapshot.remote_id()) .and_then(|(_, tasks)| tasks.get(&symbol_start_row)) { - let buffer = self.buffer.read(cx).buffer(buffer_id)?; + let buffer = self.buffer.read(cx).buffer(buffer_snapshot.remote_id())?; return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); } } diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index 6ffdf1a248a0e605f623254bbfa36776adf77cda..6d4d599961761789dbf14c77cd3843b036d05b5e 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -88,7 +88,7 @@ pub fn go_to_parent_module( let request = proto::LspExtGoToParentModule { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -106,7 +106,7 @@ pub fn go_to_parent_module( .context("go to parent module via collab")? } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -168,7 +168,7 @@ pub fn expand_macro_recursively( let request = proto::LspExtExpandMacro { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -180,7 +180,7 @@ pub fn expand_macro_recursively( } } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -195,10 +195,7 @@ pub fn expand_macro_recursively( }; if macro_expansion.is_empty() { - log::info!( - "Empty macro expansion for position {:?}", - trigger_anchor.text_anchor - ); + log::info!("Empty macro expansion for position {:?}", trigger_anchor); return Ok(()); } @@ -260,7 +257,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu let request = proto::LspExtOpenDocs { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -272,7 +269,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu } } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -287,10 +284,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu }; if docs_urls.is_empty() { - log::debug!( - "Empty docs urls for position {:?}", - trigger_anchor.text_anchor - ); + log::debug!("Empty docs urls for position {:?}", trigger_anchor); return Ok(()); } @@ -322,16 +316,18 @@ fn cancel_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.start)? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -351,16 +347,18 @@ fn run_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.head())? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -380,16 +378,18 @@ fn clear_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.head())? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index c2280e90f7d30d53c0818119df70b7c32161b78b..42b865b17ca4e241b8f0728488cacd42d52d257c 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -44,13 +44,13 @@ impl ScrollAnchor { pub(super) fn new() -> Self { Self { offset: gpui::Point::default(), - anchor: Anchor::min(), + anchor: Anchor::Min, } } pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point { self.offset.apply_along(Axis::Vertical, |offset| { - if self.anchor == Anchor::min() { + if self.anchor == Anchor::Min { 0. } else { let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64(); diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 3d22db2a4dc3c9339e51b0dae02d6d598400ad64..48438b6592a3a75c405fee496fbbd55091389a8f 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -78,7 +78,7 @@ impl Editor { let selection_head = self.selections.newest_display(&display_snapshot).head(); let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled - && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton() + && let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() { let select_head_point = rope::Point::new(selection_head.to_point(&display_snapshot).row, 0); diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 7331205d22b779b17af2186757a6b96f59b5616c..51dcca149ce597df076a083f7d0bc3ad223edae2 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -4,7 +4,6 @@ use std::{ sync::Arc, }; -use collections::HashMap; use gpui::Pixels; use itertools::Itertools as _; use language::{Bias, Point, PointUtf16, Selection, SelectionGoal}; @@ -12,7 +11,7 @@ use multi_buffer::{MultiBufferDimension, MultiBufferOffset}; use util::post_inc; use crate::{ - Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset, + Anchor, DisplayPoint, DisplayRow, MultiBufferSnapshot, SelectMode, ToOffset, display_map::{DisplaySnapshot, ToDisplayPoint}, movement::TextLayoutDetails, }; @@ -45,8 +44,8 @@ impl SelectionsCollection { pending: Some(PendingSelection { selection: Selection { id: 0, - start: Anchor::min(), - end: Anchor::min(), + start: Anchor::Min, + end: Anchor::Min, reversed: false, goal: SelectionGoal::None, }, @@ -547,13 +546,11 @@ impl SelectionsCollection { ); assert!( snapshot.can_resolve(&selection.start), - "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}, {excerpt:?}", - excerpt = snapshot.buffer_for_excerpt(selection.start.excerpt_id).map(|snapshot| snapshot.remote_id()), + "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}", ); assert!( snapshot.can_resolve(&selection.end), - "disjoint selection end is not resolvable for the given snapshot: {selection:?}, {excerpt:?}", - excerpt = snapshot.buffer_for_excerpt(selection.end.excerpt_id).map(|snapshot| snapshot.remote_id()), + "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}", ); }); assert!( @@ -572,17 +569,11 @@ impl SelectionsCollection { ); assert!( snapshot.can_resolve(&selection.start), - "pending selection start is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", - excerpt = snapshot - .buffer_for_excerpt(selection.start.excerpt_id) - .map(|snapshot| snapshot.remote_id()), + "pending selection start is not resolvable for the given snapshot: {pending:?}", ); assert!( snapshot.can_resolve(&selection.end), - "pending selection end is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", - excerpt = snapshot - .buffer_for_excerpt(selection.end.excerpt_id) - .map(|snapshot| snapshot.remote_id()), + "pending selection end is not resolvable for the given snapshot: {pending:?}", ); } } @@ -665,10 +656,10 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { self.disjoint .iter() .filter(|selection| { - if let Some(selection_buffer_id) = - self.snapshot.buffer_id_for_anchor(selection.start) + if let Some((selection_buffer_anchor, _)) = + self.snapshot.anchor_to_buffer_anchor(selection.start) { - let should_remove = selection_buffer_id == buffer_id; + let should_remove = selection_buffer_anchor.buffer_id == buffer_id; changed |= should_remove; !should_remove } else { @@ -683,10 +674,8 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { let buffer_snapshot = self.snapshot.buffer_snapshot(); let anchor = buffer_snapshot .excerpts() - .find(|(_, buffer, _)| buffer.remote_id() == buffer_id) - .and_then(|(excerpt_id, _, range)| { - buffer_snapshot.anchor_in_excerpt(excerpt_id, range.context.start) - }) + .find(|excerpt| excerpt.context.start.buffer_id == buffer_id) + .and_then(|excerpt| buffer_snapshot.anchor_in_excerpt(excerpt.context.start)) .unwrap_or_else(|| self.snapshot.anchor_before(MultiBufferOffset(0))); self.collection.disjoint = Arc::from([Selection { id: post_inc(&mut self.collection.next_selection_id), @@ -1077,80 +1066,6 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { self.selections_changed = true; self.pending.as_mut().map(|pending| &mut pending.selection) } - - /// Compute new ranges for any selections that were located in excerpts that have - /// since been removed. - /// - /// Returns a `HashMap` indicating which selections whose former head position - /// was no longer present. The keys of the map are selection ids. The values are - /// the id of the new excerpt where the head of the selection has been moved. - pub fn refresh(&mut self) -> HashMap { - let mut pending = self.collection.pending.take(); - let mut selections_with_lost_position = HashMap::default(); - - let anchors_with_status = { - let disjoint_anchors = self - .disjoint - .iter() - .flat_map(|selection| [&selection.start, &selection.end]); - self.snapshot.refresh_anchors(disjoint_anchors) - }; - let adjusted_disjoint: Vec<_> = anchors_with_status - .chunks(2) - .map(|selection_anchors| { - let (anchor_ix, start, kept_start) = selection_anchors[0]; - let (_, end, kept_end) = selection_anchors[1]; - let selection = &self.disjoint[anchor_ix / 2]; - let kept_head = if selection.reversed { - kept_start - } else { - kept_end - }; - if !kept_head { - selections_with_lost_position.insert(selection.id, selection.head().excerpt_id); - } - - Selection { - id: selection.id, - start, - end, - reversed: selection.reversed, - goal: selection.goal, - } - }) - .collect(); - - if !adjusted_disjoint.is_empty() { - let map = self.display_snapshot(); - let resolved_selections = - resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect(); - self.select::(resolved_selections); - } - - if let Some(pending) = pending.as_mut() { - let anchors = self - .snapshot - .refresh_anchors([&pending.selection.start, &pending.selection.end]); - let (_, start, kept_start) = anchors[0]; - let (_, end, kept_end) = anchors[1]; - let kept_head = if pending.selection.reversed { - kept_start - } else { - kept_end - }; - if !kept_head { - selections_with_lost_position - .insert(pending.selection.id, pending.selection.head().excerpt_id); - } - - pending.selection.start = start; - pending.selection.end = end; - } - self.collection.pending = pending; - self.selections_changed = true; - - selections_with_lost_position - } } impl Deref for MutableSelectionsCollection<'_, '_> { diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index 8408438f17533098f906c75bcc03983edfb7acf8..ebb4454f0d30f5d6343bfa3392cb795e031272fa 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -148,9 +148,9 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))) .filter_map(|editor_buffer| { let editor_buffer_id = editor_buffer.read(cx).remote_id(); @@ -1214,11 +1214,19 @@ mod tests { ); // Get the excerpt id for the TOML excerpt and expand it down by 2 lines. - let toml_excerpt_id = - editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]); + let toml_anchor = editor.read_with(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer( + toml_buffer.read(cx).remote_id(), + )) + .unwrap() + }); editor.update_in(cx, |editor, _, cx| { editor.buffer().update(cx, |buffer, cx| { - buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx); + buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx); }); }); diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 27c26d4691686c16bcbafbf74bba6b5f1156b835..6305fc73e44d745e943c1d4c8ec573e0cce7d9ed 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -7,7 +7,7 @@ use gpui::{ }; use language::BufferSnapshot; -use markdown::{Markdown, MarkdownElement}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement}; use multi_buffer::{Anchor, MultiBufferOffset, ToOffset}; use settings::Settings; use std::ops::Range; @@ -408,9 +408,8 @@ impl SignatureHelpPopover { hover_markdown_style(window, cx), ) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, - copy_button_on_hover: false, }) .on_url_click(open_markdown_url), ) @@ -421,9 +420,8 @@ impl SignatureHelpPopover { .child( MarkdownElement::new(description, hover_markdown_style(window, cx)) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, - copy_button_on_hover: false, }) .on_url_click(open_markdown_url), ) diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index cdb016ea4b612aaae288acd008f745ef2ecf0f1d..ee15583072144ca170328988ebec9959b391dbf1 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -1,5 +1,5 @@ use std::{ - ops::{Bound, Range, RangeInclusive}, + ops::{Range, RangeInclusive}, sync::Arc, }; @@ -13,7 +13,7 @@ use gpui::{ use itertools::Itertools; use language::{Buffer, Capability, HighlightedText}; use multi_buffer::{ - Anchor, BufferOffset, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, + Anchor, AnchorRangeExt as _, BufferOffset, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferSnapshot, PathKey, }; use project::Project; @@ -44,13 +44,11 @@ use crate::{ use zed_actions::assistant::InlineAssist; pub(crate) fn convert_lhs_rows_to_rhs( - lhs_excerpt_to_rhs_excerpt: &HashMap, rhs_snapshot: &MultiBufferSnapshot, lhs_snapshot: &MultiBufferSnapshot, - lhs_bounds: (Bound, Bound), + lhs_bounds: Range, ) -> Vec { patches_for_range( - lhs_excerpt_to_rhs_excerpt, lhs_snapshot, rhs_snapshot, lhs_bounds, @@ -59,13 +57,11 @@ pub(crate) fn convert_lhs_rows_to_rhs( } pub(crate) fn convert_rhs_rows_to_lhs( - rhs_excerpt_to_lhs_excerpt: &HashMap, lhs_snapshot: &MultiBufferSnapshot, rhs_snapshot: &MultiBufferSnapshot, - rhs_bounds: (Bound, Bound), + rhs_bounds: Range, ) -> Vec { patches_for_range( - rhs_excerpt_to_lhs_excerpt, rhs_snapshot, lhs_snapshot, rhs_bounds, @@ -73,6 +69,21 @@ pub(crate) fn convert_rhs_rows_to_lhs( ) } +fn rhs_range_to_base_text_range( + rhs_range: &Range, + diff_snapshot: &BufferDiffSnapshot, + rhs_buffer_snapshot: &text::BufferSnapshot, +) -> Range { + let start = diff_snapshot + .buffer_point_to_base_text_range(Point::new(rhs_range.start.row, 0), rhs_buffer_snapshot) + .start; + let end = diff_snapshot + .buffer_point_to_base_text_range(Point::new(rhs_range.end.row, 0), rhs_buffer_snapshot) + .end; + let end_column = diff_snapshot.base_text().line_len(end.row); + Point::new(start.row, 0)..Point::new(end.row, end_column) +} + fn translate_lhs_selections_to_rhs( selections_by_buffer: &HashMap>, Option)>, splittable: &SplittableEditor, @@ -168,22 +179,18 @@ fn translate_lhs_hunks_to_rhs( } fn patches_for_range( - excerpt_map: &HashMap, source_snapshot: &MultiBufferSnapshot, target_snapshot: &MultiBufferSnapshot, - source_bounds: (Bound, Bound), + source_bounds: Range, translate_fn: F, ) -> Vec where F: Fn(&BufferDiffSnapshot, RangeInclusive, &text::BufferSnapshot) -> Patch, { - struct PendingExcerpt<'a> { - source_excerpt_id: ExcerptId, - target_excerpt_id: ExcerptId, - source_buffer: &'a text::BufferSnapshot, - target_buffer: &'a text::BufferSnapshot, + struct PendingExcerpt { + source_buffer_snapshot: language::BufferSnapshot, + source_excerpt_range: ExcerptRange, buffer_point_range: Range, - source_context_range: Range, } let mut result = Vec::new(); @@ -201,41 +208,55 @@ where }; let diff = source_snapshot - .diff_for_buffer_id(first.source_buffer.remote_id()) + .diff_for_buffer_id(first.source_buffer_snapshot.remote_id()) .expect("buffer with no diff when creating patches"); - let rhs_buffer = if first.source_buffer.remote_id() == diff.base_text().remote_id() { - first.target_buffer + let source_is_lhs = + first.source_buffer_snapshot.remote_id() == diff.base_text().remote_id(); + let target_buffer_id = if source_is_lhs { + diff.buffer_id() } else { - first.source_buffer + diff.base_text().remote_id() + }; + let target_buffer = target_snapshot + .buffer_for_id(target_buffer_id) + .expect("missing corresponding buffer"); + let rhs_buffer = if source_is_lhs { + target_buffer + } else { + &first.source_buffer_snapshot }; let patch = translate_fn(diff, union_start..=union_end, rhs_buffer); for excerpt in pending.drain(..) { + let target_position = patch.old_to_new(excerpt.buffer_point_range.start); + let target_position = target_buffer.anchor_before(target_position); + let Some(target_position) = target_snapshot.anchor_in_excerpt(target_position) else { + continue; + }; + let Some((target_buffer_snapshot, target_excerpt_range)) = + target_snapshot.excerpt_containing(target_position..target_position) + else { + continue; + }; + result.push(patch_for_excerpt( source_snapshot, target_snapshot, - excerpt.source_excerpt_id, - excerpt.target_excerpt_id, - excerpt.target_buffer, - excerpt.source_context_range, + &excerpt.source_buffer_snapshot, + target_buffer_snapshot, + excerpt.source_excerpt_range, + target_excerpt_range, &patch, excerpt.buffer_point_range, )); } }; - for (source_buffer, buffer_offset_range, source_excerpt_id, source_context_range) in - source_snapshot.range_to_buffer_ranges_with_context(source_bounds) + for (buffer_snapshot, source_range, source_excerpt_range) in + source_snapshot.range_to_buffer_ranges(source_bounds) { - let Some(target_excerpt_id) = excerpt_map.get(&source_excerpt_id).copied() else { - continue; - }; - let Some(target_buffer) = target_snapshot.buffer_for_excerpt(target_excerpt_id) else { - continue; - }; - - let buffer_id = source_buffer.remote_id(); + let buffer_id = buffer_snapshot.remote_id(); if current_buffer_id != Some(buffer_id) { if let (Some(start), Some(end)) = (union_context_start.take(), union_context_end.take()) @@ -245,8 +266,8 @@ where current_buffer_id = Some(buffer_id); } - let buffer_point_range = buffer_offset_range.to_point(source_buffer); - let source_context_range = source_context_range.to_point(source_buffer); + let buffer_point_range = source_range.to_point(&buffer_snapshot); + let source_context_range = source_excerpt_range.context.to_point(&buffer_snapshot); union_context_start = Some(union_context_start.map_or(source_context_range.start, |s| { s.min(source_context_range.start) @@ -256,12 +277,9 @@ where })); pending_excerpts.push(PendingExcerpt { - source_excerpt_id, - target_excerpt_id, - source_buffer, - target_buffer, + source_buffer_snapshot: buffer_snapshot, + source_excerpt_range, buffer_point_range, - source_context_range, }); } @@ -275,55 +293,60 @@ where fn patch_for_excerpt( source_snapshot: &MultiBufferSnapshot, target_snapshot: &MultiBufferSnapshot, - source_excerpt_id: ExcerptId, - target_excerpt_id: ExcerptId, - target_buffer: &text::BufferSnapshot, - source_context_range: Range, + source_buffer_snapshot: &language::BufferSnapshot, + target_buffer_snapshot: &language::BufferSnapshot, + source_excerpt_range: ExcerptRange, + target_excerpt_range: ExcerptRange, patch: &Patch, source_edited_range: Range, ) -> CompanionExcerptPatch { - let source_multibuffer_range = source_snapshot - .range_for_excerpt(source_excerpt_id) - .expect("no excerpt for source id when creating patch"); - let source_excerpt_start_in_multibuffer = source_multibuffer_range.start; - let source_excerpt_start_in_buffer = source_context_range.start; - let source_excerpt_end_in_buffer = source_context_range.end; - let target_multibuffer_range = target_snapshot - .range_for_excerpt(target_excerpt_id) - .expect("no excerpt for target id when creating patch"); - let target_excerpt_start_in_multibuffer = target_multibuffer_range.start; - let target_context_range = target_snapshot - .context_range_for_excerpt(target_excerpt_id) - .expect("no range for target id when creating patch"); - let target_excerpt_start_in_buffer = target_context_range.start.to_point(&target_buffer); - let target_excerpt_end_in_buffer = target_context_range.end.to_point(&target_buffer); + let source_buffer_range = source_excerpt_range + .context + .to_point(source_buffer_snapshot); + let source_multibuffer_range = (source_snapshot + .anchor_in_buffer(source_excerpt_range.context.start) + .expect("buffer should exist in multibuffer") + ..source_snapshot + .anchor_in_buffer(source_excerpt_range.context.end) + .expect("buffer should exist in multibuffer")) + .to_point(source_snapshot); + let target_buffer_range = target_excerpt_range + .context + .to_point(target_buffer_snapshot); + let target_multibuffer_range = (target_snapshot + .anchor_in_buffer(target_excerpt_range.context.start) + .expect("buffer should exist in multibuffer") + ..target_snapshot + .anchor_in_buffer(target_excerpt_range.context.end) + .expect("buffer should exist in multibuffer")) + .to_point(target_snapshot); let edits = patch .edits() .iter() - .skip_while(|edit| edit.old.end < source_excerpt_start_in_buffer) - .take_while(|edit| edit.old.start <= source_excerpt_end_in_buffer) + .skip_while(|edit| edit.old.end < source_buffer_range.start) + .take_while(|edit| edit.old.start <= source_buffer_range.end) .map(|edit| { - let clamped_source_start = edit.old.start.max(source_excerpt_start_in_buffer); - let clamped_source_end = edit.old.end.min(source_excerpt_end_in_buffer); - let source_multibuffer_start = source_excerpt_start_in_multibuffer - + (clamped_source_start - source_excerpt_start_in_buffer); - let source_multibuffer_end = source_excerpt_start_in_multibuffer - + (clamped_source_end - source_excerpt_start_in_buffer); + let clamped_source_start = edit.old.start.max(source_buffer_range.start); + let clamped_source_end = edit.old.end.min(source_buffer_range.end); + let source_multibuffer_start = + source_multibuffer_range.start + (clamped_source_start - source_buffer_range.start); + let source_multibuffer_end = + source_multibuffer_range.start + (clamped_source_end - source_buffer_range.start); let clamped_target_start = edit .new .start - .max(target_excerpt_start_in_buffer) - .min(target_excerpt_end_in_buffer); + .max(target_buffer_range.start) + .min(target_buffer_range.end); let clamped_target_end = edit .new .end - .max(target_excerpt_start_in_buffer) - .min(target_excerpt_end_in_buffer); - let target_multibuffer_start = target_excerpt_start_in_multibuffer - + (clamped_target_start - target_excerpt_start_in_buffer); - let target_multibuffer_end = target_excerpt_start_in_multibuffer - + (clamped_target_end - target_excerpt_start_in_buffer); + .max(target_buffer_range.start) + .min(target_buffer_range.end); + let target_multibuffer_start = + target_multibuffer_range.start + (clamped_target_start - target_buffer_range.start); + let target_multibuffer_end = + target_multibuffer_range.start + (clamped_target_end - target_buffer_range.start); text::Edit { old: source_multibuffer_start..source_multibuffer_end, new: target_multibuffer_start..target_multibuffer_end, @@ -331,8 +354,8 @@ fn patch_for_excerpt( }); let edits = [text::Edit { - old: source_excerpt_start_in_multibuffer..source_excerpt_start_in_multibuffer, - new: target_excerpt_start_in_multibuffer..target_excerpt_start_in_multibuffer, + old: source_multibuffer_range.start..source_multibuffer_range.start, + new: target_multibuffer_range.start..target_multibuffer_range.start, }] .into_iter() .chain(edits); @@ -349,21 +372,20 @@ fn patch_for_excerpt( merged_edits.push(edit); } - let edited_range = source_excerpt_start_in_multibuffer - + (source_edited_range.start - source_excerpt_start_in_buffer) - ..source_excerpt_start_in_multibuffer - + (source_edited_range.end - source_excerpt_start_in_buffer); + let edited_range = source_multibuffer_range.start + + (source_edited_range.start - source_buffer_range.start) + ..source_multibuffer_range.start + (source_edited_range.end - source_buffer_range.start); - let source_excerpt_end = source_excerpt_start_in_multibuffer - + (source_excerpt_end_in_buffer - source_excerpt_start_in_buffer); - let target_excerpt_end = target_excerpt_start_in_multibuffer - + (target_excerpt_end_in_buffer - target_excerpt_start_in_buffer); + let source_excerpt_end = + source_multibuffer_range.start + (source_buffer_range.end - source_buffer_range.start); + let target_excerpt_end = + target_multibuffer_range.start + (target_buffer_range.end - target_buffer_range.start); CompanionExcerptPatch { patch: Patch::new(merged_edits), edited_range, - source_excerpt_range: source_excerpt_start_in_multibuffer..source_excerpt_end, - target_excerpt_range: target_excerpt_start_in_multibuffer..target_excerpt_end, + source_excerpt_range: source_multibuffer_range.start..source_excerpt_end, + target_excerpt_range: target_multibuffer_range.start..target_excerpt_end, } } @@ -390,6 +412,7 @@ pub struct SplittableEditor { struct LhsEditor { multibuffer: Entity, editor: Entity, + companion: Entity, was_last_focused: bool, _subscriptions: Vec, } @@ -470,11 +493,16 @@ impl SplittableEditor { &rhs_editor, |this, _, event: &EditorEvent, cx| match event { EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, } => { - this.expand_excerpts(excerpt_ids.iter().copied(), *lines, *direction, cx); + this.expand_excerpts( + excerpt_anchors.iter().copied(), + *lines, + *direction, + cx, + ); } _ => cx.emit(event.clone()), }, @@ -563,19 +591,31 @@ impl SplittableEditor { window, |this, _, event: &EditorEvent, window, cx| match event { EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, } => { - if this.lhs.is_some() { - let rhs_display_map = this.rhs_editor.read(cx).display_map.read(cx); - let rhs_ids: Vec<_> = excerpt_ids + if let Some(lhs) = &this.lhs { + let rhs_snapshot = this.rhs_multibuffer.read(cx).snapshot(cx); + let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx); + let rhs_anchors = excerpt_anchors .iter() - .filter_map(|id| { - rhs_display_map.companion_excerpt_to_my_excerpt(*id, cx) + .filter_map(|anchor| { + let (anchor, lhs_buffer) = + lhs_snapshot.anchor_to_buffer_anchor(*anchor)?; + let rhs_buffer_id = + lhs.companion.read(cx).lhs_to_rhs_buffer(anchor.buffer_id)?; + let rhs_buffer = rhs_snapshot.buffer_for_id(rhs_buffer_id)?; + let diff = this.rhs_multibuffer.read(cx).diff_for(rhs_buffer_id)?; + let diff_snapshot = diff.read(cx).snapshot(cx); + let rhs_point = diff_snapshot.base_text_point_to_buffer_point( + anchor.to_point(&lhs_buffer), + &rhs_buffer, + ); + rhs_snapshot.anchor_in_excerpt(rhs_buffer.anchor_before(rhs_point)) }) - .collect(); - this.expand_excerpts(rhs_ids.into_iter(), *lines, *direction, cx); + .collect::>(); + this.expand_excerpts(rhs_anchors.into_iter(), *lines, *direction, cx); } } EditorEvent::StageOrUnstageRequested { stage, hunks } => { @@ -654,15 +694,23 @@ impl SplittableEditor { }), ); + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + let lhs_display_map = lhs_editor.read(cx).display_map.clone(); + let rhs_display_map_id = rhs_display_map.entity_id(); + let companion = cx.new(|_| { + Companion::new( + rhs_display_map_id, + convert_rhs_rows_to_lhs, + convert_lhs_rows_to_rhs, + ) + }); let lhs = LhsEditor { editor: lhs_editor, multibuffer: lhs_multibuffer, was_last_focused: false, + companion: companion.clone(), _subscriptions: subscriptions, }; - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs_display_map = lhs.editor.read(cx).display_map.clone(); - let rhs_display_map_id = rhs_display_map.entity_id(); self.rhs_editor.update(cx, |editor, cx| { editor.set_delegate_expand_excerpts(true); @@ -672,35 +720,21 @@ impl SplittableEditor { }) }); - let path_diffs: Vec<_> = { + let all_paths: Vec<_> = { let rhs_multibuffer = self.rhs_multibuffer.read(cx); - rhs_multibuffer - .paths() - .filter_map(|path| { - let excerpt_id = rhs_multibuffer.excerpts_for_path(path).next()?; - let snapshot = rhs_multibuffer.snapshot(cx); - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; + let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx); + rhs_multibuffer_snapshot + .buffers_with_paths() + .filter_map(|(buffer, path)| { let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; Some((path.clone(), diff)) }) .collect() }; - let companion = cx.new(|_| { - Companion::new( - rhs_display_map_id, - convert_rhs_rows_to_lhs, - convert_lhs_rows_to_rhs, - ) - }); - self.lhs = Some(lhs); - let paths_for_sync: Vec<_> = path_diffs - .into_iter() - .map(|(path, diff)| (path, vec![], diff)) - .collect(); - self.sync_lhs_for_paths(paths_for_sync, &companion, cx); + self.sync_lhs_for_paths(all_paths, &companion, cx); rhs_display_map.update(cx, |dm, cx| { dm.set_companion(Some((lhs_display_map, companion.clone())), cx); @@ -1004,7 +1038,7 @@ impl SplittableEditor { cx.notify(); } - pub fn set_excerpts_for_path( + pub fn update_excerpts_for_path( &mut self, path: PathKey, buffer: Entity, @@ -1012,122 +1046,94 @@ impl SplittableEditor { context_line_count: u32, diff: Entity, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { + let has_ranges = ranges.clone().into_iter().next().is_some(); let Some(companion) = self.companion(cx) else { return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path( path, buffer.clone(), ranges, context_line_count, cx, ); - if !anchors.is_empty() + if has_ranges && rhs_multibuffer .diff_for(buffer.read(cx).remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { rhs_multibuffer.add_diff(diff, cx); } - (anchors, added_a_new_excerpt) + added_a_new_excerpt }); }; - let old_rhs_ids: Vec = self - .rhs_multibuffer - .read(cx) - .excerpts_for_path(&path) - .collect(); - let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path( path.clone(), buffer.clone(), ranges, context_line_count, cx, ); - if !anchors.is_empty() + if has_ranges && rhs_multibuffer .diff_for(buffer.read(cx).remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { rhs_multibuffer.add_diff(diff.clone(), cx); } - (anchors, added_a_new_excerpt) + added_a_new_excerpt }); - self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx); + self.sync_lhs_for_paths(vec![(path, diff)], &companion, cx); result } fn expand_excerpts( &mut self, - excerpt_ids: impl Iterator + Clone, + excerpt_anchors: impl Iterator + Clone, lines: u32, direction: ExpandExcerptDirection, cx: &mut Context, ) { let Some(companion) = self.companion(cx) else { self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx); }); return; }; - let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let paths: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { let snapshot = rhs_multibuffer.snapshot(cx); - let paths = excerpt_ids + let paths = excerpt_anchors .clone() - .filter_map(|excerpt_id| { - let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; - let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; - Some((path, diff)) + .filter_map(|anchor| { + let (anchor, _) = snapshot.anchor_to_buffer_anchor(anchor)?; + let path = snapshot.path_for_buffer(anchor.buffer_id)?; + let diff = rhs_multibuffer.diff_for(anchor.buffer_id)?; + Some((path.clone(), diff)) }) .collect::>() .into_iter() - .map(|(path, diff)| { - let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect(); - (path, old_ids, diff) - }) .collect(); - rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx); paths }); - self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx); + self.sync_lhs_for_paths(paths, &companion, cx); } pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - let Some(lhs) = &self.lhs else { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path, cx); - }); - return; - }; - - let rhs_excerpt_ids: Vec = self - .rhs_multibuffer - .read(cx) - .excerpts_for_path(&path) - .collect(); - let lhs_excerpt_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + rhs_multibuffer.remove_excerpts(path.clone(), cx); + }); - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); + if let Some(lhs) = &self.lhs { + lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { + lhs_multibuffer.remove_excerpts(path, cx); }); } - - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); - }); - lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { - lhs_multibuffer.remove_excerpts_for_path(path, cx); - }); } fn search_token(&self) -> SearchToken { @@ -1151,122 +1157,95 @@ impl SplittableEditor { fn sync_lhs_for_paths( &self, - paths_with_old_rhs_ids: Vec<(PathKey, Vec, Entity)>, + paths: Vec<(PathKey, Entity)>, companion: &Entity, cx: &mut Context, ) { let Some(lhs) = &self.lhs else { return }; self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids { - let old_lhs_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); - - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids); - }); - - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path).collect(); - let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else { + for (path, diff) in paths { + let main_buffer_id = diff.read(cx).buffer_id; + let Some(main_buffer) = rhs_multibuffer.buffer(diff.read(cx).buffer_id) else { lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx); + lhs_multibuffer.remove_excerpts(path, lhs_cx); }); continue; }; - let Some(main_buffer_snapshot) = rhs_multibuffer - .snapshot(cx) - .buffer_for_excerpt(excerpt_id) - .cloned() - else { - continue; - }; - let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id()) - else { - continue; - }; + let main_buffer_snapshot = main_buffer.read(cx).snapshot(); let base_text_buffer = diff.read(cx).base_text_buffer().clone(); let diff_snapshot = diff.read(cx).snapshot(cx); let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot(); - let lhs_ranges: Vec> = rhs_multibuffer - .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx) + let mut paired_ranges: Vec<(Range, ExcerptRange)> = Vec::new(); + + let mut have_excerpt = false; + let mut did_merge = false; + let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx); + for info in rhs_multibuffer_snapshot.excerpts_for_buffer(main_buffer_id) { + have_excerpt = true; + let rhs_context = info.context.to_point(&main_buffer_snapshot); + let lhs_context = rhs_range_to_base_text_range( + &rhs_context, + &diff_snapshot, + &main_buffer_snapshot, + ); + + if let Some((prev_lhs_context, prev_rhs_range)) = paired_ranges.last_mut() + && prev_lhs_context.end >= lhs_context.start + { + did_merge = true; + prev_lhs_context.end = lhs_context.end; + prev_rhs_range.context.end = info.context.end; + continue; + } + + paired_ranges.push((lhs_context, info)); + } + + let (lhs_ranges, rhs_ranges): (Vec<_>, Vec<_>) = paired_ranges.into_iter().unzip(); + let lhs_ranges = lhs_ranges .into_iter() - .filter(|(id, _, _)| rhs_excerpt_ids.contains(id)) - .map(|(_, _, excerpt_range)| { - let to_base_text = |range: Range| { - let start = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.start.row, 0), - &main_buffer_snapshot, - ) - .start; - let end = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.end.row, 0), - &main_buffer_snapshot, - ) - .end; - let end_column = diff_snapshot.base_text().line_len(end.row); - Point::new(start.row, 0)..Point::new(end.row, end_column) - }; - let primary = excerpt_range.primary.to_point(&main_buffer_snapshot); - let context = excerpt_range.context.to_point(&main_buffer_snapshot); - ExcerptRange { - primary: to_base_text(primary), - context: to_base_text(context), - } + .map(|range| { + ExcerptRange::new(base_text_buffer_snapshot.anchor_range_outside(range)) }) - .collect(); + .collect::>(); - let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - let lhs_result = lhs_multibuffer.update_path_excerpts( - path, + lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + lhs_multibuffer.update_path_excerpts( + path.clone(), base_text_buffer, &base_text_buffer_snapshot, - lhs_ranges, + &lhs_ranges, lhs_cx, ); - if !lhs_result.excerpt_ids.is_empty() + if have_excerpt && lhs_multibuffer .diff_for(base_text_buffer_snapshot.remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { - lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx); - } - - let mut groups = Vec::new(); - for (lhs_id, chunk) in &lhs_result - .excerpt_ids - .iter() - .copied() - .zip(rhs_excerpt_ids) - .chunk_by(|(lhs_id, _)| *lhs_id) - { - groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::>())); + lhs_multibuffer.add_inverted_diff( + diff.clone(), + main_buffer.clone(), + lhs_cx, + ); } - groups }); - let pairs = groups - .into_iter() - .map(|(lhs_id, rhs_group)| { - let rhs_id = if rhs_group.len() == 1 { - rhs_group[0] - } else { - rhs_multibuffer.merge_excerpts(&rhs_group, cx) - }; - (lhs_id, rhs_id) - }) - .collect::>(); + if did_merge { + rhs_multibuffer.update_path_excerpts( + path, + main_buffer, + &main_buffer_snapshot, + &rhs_ranges, + cx, + ); + } let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); let rhs_buffer_id = diff.read(cx).buffer_id; companion.update(cx, |c, _| { - for (lhs_id, rhs_id) in pairs { - c.add_excerpt_mapping(lhs_id, rhs_id); - } c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); }); } @@ -1312,7 +1291,7 @@ impl SplittableEditor { use crate::display_map::DisplayRow; self.debug_print(cx); - self.check_excerpt_mapping_invariants(cx); + self.check_excerpt_invariants(quiesced, cx); let lhs = self.lhs.as_ref().unwrap(); @@ -1362,15 +1341,21 @@ impl SplittableEditor { let (lhs_point, rhs_point) = if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() { + use multi_buffer::ToPoint as _; + let lhs_end = Point::new(lhs_hunk.row_range.end.0, 0); let rhs_end = Point::new(rhs_hunk.row_range.end.0, 0); - let lhs_exceeds = lhs_snapshot - .range_for_excerpt(lhs_hunk.excerpt_id) - .map_or(false, |range| lhs_end >= range.end); - let rhs_exceeds = rhs_snapshot - .range_for_excerpt(rhs_hunk.excerpt_id) - .map_or(false, |range| rhs_end >= range.end); + let lhs_excerpt_end = lhs_snapshot + .anchor_in_excerpt(lhs_hunk.excerpt_range.context.end) + .unwrap() + .to_point(&lhs_snapshot); + let lhs_exceeds = lhs_end >= lhs_excerpt_end; + let rhs_excerpt_end = rhs_snapshot + .anchor_in_excerpt(rhs_hunk.excerpt_range.context.end) + .unwrap() + .to_point(&rhs_snapshot); + let rhs_exceeds = rhs_end >= rhs_excerpt_end; if lhs_exceeds != rhs_exceeds { continue; } @@ -1664,109 +1649,53 @@ impl SplittableEditor { eprintln!(); } - fn check_excerpt_mapping_invariants(&self, cx: &gpui::App) { - use multi_buffer::{ExcerptId, PathKey}; - + fn check_excerpt_invariants(&self, quiesced: bool, cx: &gpui::App) { let lhs = self.lhs.as_ref().expect("should have lhs editor"); - let rhs_excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids(); - let lhs_excerpt_ids = lhs.multibuffer.read(cx).excerpt_ids(); - assert_eq!( - rhs_excerpt_ids.len(), - lhs_excerpt_ids.len(), - "excerpt count mismatch: rhs has {}, lhs has {}", - rhs_excerpt_ids.len(), - lhs_excerpt_ids.len(), - ); - - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let companion = rhs_display_map - .read(cx) - .companion() - .cloned() - .expect("should have companion"); - let (lhs_to_rhs, rhs_to_lhs) = { - let c = companion.read(cx); - let (l, r) = c.excerpt_mappings(); - (l.clone(), r.clone()) - }; - - assert_eq!( - lhs_to_rhs.len(), - rhs_to_lhs.len(), - "mapping size mismatch: lhs_to_rhs has {}, rhs_to_lhs has {}", - lhs_to_rhs.len(), - rhs_to_lhs.len(), - ); + let rhs_snapshot = self.rhs_multibuffer.read(cx).snapshot(cx); + let rhs_excerpts = rhs_snapshot.excerpts().collect::>(); + let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx); + let lhs_excerpts = lhs_snapshot.excerpts().collect::>(); + assert_eq!(lhs_excerpts.len(), rhs_excerpts.len()); - for (&lhs_id, &rhs_id) in &lhs_to_rhs { - let reverse = rhs_to_lhs.get(&rhs_id); - assert_eq!( - reverse, - Some(&lhs_id), - "lhs_to_rhs maps {lhs_id:?} -> {rhs_id:?}, but rhs_to_lhs maps {rhs_id:?} -> {reverse:?}", - ); - } - for (&rhs_id, &lhs_id) in &rhs_to_lhs { - let reverse = lhs_to_rhs.get(&lhs_id); + for (lhs_excerpt, rhs_excerpt) in lhs_excerpts.into_iter().zip(rhs_excerpts) { assert_eq!( - reverse, - Some(&rhs_id), - "rhs_to_lhs maps {rhs_id:?} -> {lhs_id:?}, but lhs_to_rhs maps {lhs_id:?} -> {reverse:?}", + lhs_snapshot + .path_for_buffer(lhs_excerpt.context.start.buffer_id) + .unwrap(), + rhs_snapshot + .path_for_buffer(rhs_excerpt.context.start.buffer_id) + .unwrap(), + "corresponding excerpts should have the same path" ); - } - - assert_eq!( - lhs_to_rhs.len(), - rhs_excerpt_ids.len(), - "mapping covers {} excerpts but rhs has {}", - lhs_to_rhs.len(), - rhs_excerpt_ids.len(), - ); - - let rhs_mapped_order: Vec = rhs_excerpt_ids - .iter() - .map(|rhs_id| { - *rhs_to_lhs.get(rhs_id).unwrap_or_else(|| { - panic!("rhs excerpt {rhs_id:?} has no mapping in rhs_to_lhs") - }) - }) - .collect(); - assert_eq!( - rhs_mapped_order, lhs_excerpt_ids, - "excerpt ordering mismatch: mapping rhs order through rhs_to_lhs doesn't match lhs order", - ); - - let rhs_paths: Vec = self.rhs_multibuffer.read(cx).paths().cloned().collect(); - let lhs_paths: Vec = lhs.multibuffer.read(cx).paths().cloned().collect(); - assert_eq!( - rhs_paths, lhs_paths, - "path set mismatch between rhs and lhs" - ); - - for path in &rhs_paths { - let rhs_path_excerpts: Vec = self + let diff = self .rhs_multibuffer .read(cx) - .excerpts_for_path(path) - .collect(); - let lhs_path_excerpts: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(path).collect(); + .diff_for(rhs_excerpt.context.start.buffer_id) + .expect("missing diff"); assert_eq!( - rhs_path_excerpts.len(), - lhs_path_excerpts.len(), - "excerpt count mismatch for path {path:?}: rhs has {}, lhs has {}", - rhs_path_excerpts.len(), - lhs_path_excerpts.len(), - ); - let rhs_path_mapped: Vec = rhs_path_excerpts - .iter() - .map(|rhs_id| *rhs_to_lhs.get(rhs_id).unwrap()) - .collect(); - assert_eq!( - rhs_path_mapped, lhs_path_excerpts, - "per-path excerpt ordering mismatch for {path:?}", + lhs_excerpt.context.start.buffer_id, + diff.read(cx).base_text(cx).remote_id(), + "corresponding lhs excerpt should show diff base text" ); + + if quiesced { + let diff_snapshot = diff.read(cx).snapshot(cx); + let lhs_buffer_snapshot = lhs_snapshot + .buffer_for_id(lhs_excerpt.context.start.buffer_id) + .unwrap(); + let rhs_buffer_snapshot = rhs_snapshot + .buffer_for_id(rhs_excerpt.context.start.buffer_id) + .unwrap(); + let lhs_range = lhs_excerpt.context.to_point(&lhs_buffer_snapshot); + let rhs_range = rhs_excerpt.context.to_point(&rhs_buffer_snapshot); + let expected_lhs_range = + rhs_range_to_base_text_range(&rhs_range, &diff_snapshot, &rhs_buffer_snapshot); + assert_eq!( + lhs_range, expected_lhs_range, + "corresponding lhs excerpt should have a matching range" + ) + } } } } @@ -2316,7 +2245,7 @@ mod tests { let context_lines = rng.random_range(0..2); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); }); editor.update(cx, |editor, cx| { editor.check_invariants(true, cx); @@ -2351,7 +2280,14 @@ mod tests { let context_lines = rng.random_range(0..2); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path( + path, + buffer, + ranges, + context_lines, + diff, + cx, + ); }); } 15..=29 => { @@ -2395,7 +2331,14 @@ mod tests { let buffer = buffer.clone(); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path( + path, + buffer, + ranges, + context_lines, + diff, + cx, + ); }); } 55..=64 => { @@ -2407,16 +2350,14 @@ mod tests { } 65..=74 => { log::info!("removing excerpts for a random path"); - let paths = editor.update(cx, |editor, cx| { - editor - .rhs_multibuffer - .read(cx) - .paths() - .cloned() - .collect::>() + let ids = editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + snapshot.all_buffer_ids().collect::>() }); - if let Some(path) = paths.choose(rng) { + if let Some(id) = ids.choose(rng) { editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + let path = snapshot.path_for_buffer(*id).unwrap(); editor.remove_excerpts_for_path(path.clone(), cx); }); } @@ -2432,18 +2373,21 @@ mod tests { }); } 80..=89 => { - let excerpt_ids = editor.update(cx, |editor, cx| { - editor.rhs_multibuffer.read(cx).excerpt_ids() + let snapshot = editor.update(cx, |editor, cx| { + editor.rhs_multibuffer.read(cx).snapshot(cx) }); - if !excerpt_ids.is_empty() { - let count = rng.random_range(1..=excerpt_ids.len().min(3)); + let excerpts = snapshot.excerpts().collect::>(); + if !excerpts.is_empty() { + let count = rng.random_range(1..=excerpts.len().min(3)); let chosen: Vec<_> = - excerpt_ids.choose_multiple(rng, count).copied().collect(); + excerpts.choose_multiple(rng, count).cloned().collect(); let line_count = rng.random_range(1..5); log::info!("expanding {count} excerpts by {line_count} lines"); editor.update(cx, |editor, cx| { editor.expand_excerpts( - chosen.into_iter(), + chosen.into_iter().map(|excerpt| { + snapshot.anchor_in_excerpt(excerpt.context.start).unwrap() + }), line_count, ExpandExcerptDirection::UpAndDown, cx, @@ -2474,7 +2418,7 @@ mod tests { .collect::>(); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, 2, diff, cx); + editor.update_excerpts_for_path(path, buffer, ranges, 2, diff, cx); }); } quiesced = true; @@ -2511,7 +2455,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer.clone(), ranges, 0, diff.clone(), cx); + editor.update_excerpts_for_path(path, buffer.clone(), ranges, 0, diff.clone(), cx); }); cx.run_until_parked(); @@ -2523,12 +2467,16 @@ mod tests { ); }); - let excerpt_ids = editor.update(cx, |editor, cx| { - editor.rhs_multibuffer.read(cx).excerpt_ids() + let excerpts = editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + snapshot + .excerpts() + .map(|excerpt| snapshot.anchor_in_excerpt(excerpt.context.start).unwrap()) + .collect::>() }); editor.update(cx, |editor, cx| { editor.expand_excerpts( - excerpt_ids.iter().copied(), + excerpts.into_iter(), 2, multi_buffer::ExpandExcerptDirection::UpAndDown, cx, @@ -2564,7 +2512,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -2693,7 +2641,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -2702,7 +2650,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -2851,7 +2799,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -2978,7 +2926,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3097,7 +3045,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3227,7 +3175,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3324,7 +3272,7 @@ mod tests { editor.update(cx, |editor, cx| { let end = Point::new(0, text.len() as u32); let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..end], @@ -3333,7 +3281,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..end], @@ -3401,7 +3349,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3464,7 +3412,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3525,7 +3473,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3641,7 +3589,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -3651,7 +3599,7 @@ mod tests { ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -3749,7 +3697,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3825,7 +3773,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3912,7 +3860,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4026,7 +3974,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4110,7 +4058,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4194,7 +4142,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4286,7 +4234,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4414,7 +4362,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4561,7 +4509,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4783,7 +4731,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5122,7 +5070,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -5131,7 +5079,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -5287,7 +5235,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5448,7 +5396,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5607,7 +5555,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5738,7 +5686,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![ @@ -5799,7 +5747,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5882,7 +5830,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..Point::new(3, 3)], @@ -5994,7 +5942,7 @@ mod tests { let path_b = cx.read(|cx| PathKey::for_buffer(&buffer_b, cx)); editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_a.clone(), buffer_a.clone(), vec![Point::new(0, 0)..buffer_a.read(cx).max_point()], @@ -6002,7 +5950,7 @@ mod tests { diff_a.clone(), cx, ); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_b.clone(), buffer_b.clone(), vec![Point::new(0, 0)..buffer_b.read(cx).max_point()], @@ -6032,7 +5980,7 @@ mod tests { cx.run_until_parked(); editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_a.clone(), buffer_a.clone(), vec![Point::new(0, 0)..buffer_a.read(cx).max_point()], @@ -6089,7 +6037,7 @@ mod tests { }; editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_key_1.clone(), buffer.clone(), vec![Point::new(0, 0)..Point::new(1, 0)], @@ -6097,7 +6045,7 @@ mod tests { diff.clone(), cx, ); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_key_2.clone(), buffer.clone(), vec![Point::new(1, 0)..buffer.read(cx).max_point()], diff --git a/crates/editor/src/split_editor_view.rs b/crates/editor/src/split_editor_view.rs index 454013c530ab8389314892011e5eb115ee6e0957..02388df9a7516e72810b91d65292795e6375470e 100644 --- a/crates/editor/src/split_editor_view.rs +++ b/crates/editor/src/split_editor_view.rs @@ -7,7 +7,7 @@ use gpui::{ ParentElement, Pixels, StatefulInteractiveElement, Styled, TextStyleRefinement, Window, div, linear_color_stop, linear_gradient, point, px, size, }; -use multi_buffer::{Anchor, ExcerptId}; +use multi_buffer::{Anchor, ExcerptBoundaryInfo}; use settings::Settings; use smallvec::smallvec; use text::BufferId; @@ -429,7 +429,7 @@ impl SplitBufferHeadersElement { let sticky_header_excerpt_id = snapshot .sticky_header_excerpt(scroll_position.y) - .map(|e| e.excerpt.id); + .map(|e| e.excerpt); let non_sticky_headers = self.build_non_sticky_headers( &snapshot, @@ -476,9 +476,10 @@ impl SplitBufferHeadersElement { let mut anchors_by_buffer: HashMap = HashMap::default(); for selection in all_anchor_selections.iter() { let head = selection.head(); - if let Some(buffer_id) = head.text_anchor.buffer_id { + if let Some((text_anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(head) + { anchors_by_buffer - .entry(buffer_id) + .entry(text_anchor.buffer_id) .and_modify(|(latest_id, latest_anchor)| { if selection.id > *latest_id { *latest_id = selection.id; @@ -520,7 +521,7 @@ impl SplitBufferHeadersElement { ); let editor_bg_color = cx.theme().colors().editor_background; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let mut header = v_flex() .id("sticky-buffer-header") @@ -594,7 +595,7 @@ impl SplitBufferHeadersElement { end_row: DisplayRow, selected_buffer_ids: &HashSet, latest_selection_anchors: &HashMap, - sticky_header_excerpt_id: Option, + sticky_header: Option<&ExcerptBoundaryInfo>, window: &mut Window, cx: &mut App, ) -> Vec { @@ -603,7 +604,7 @@ impl SplitBufferHeadersElement { for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) { let (excerpt, is_folded) = match block { Block::BufferHeader { excerpt, .. } => { - if sticky_header_excerpt_id == Some(excerpt.id) { + if sticky_header == Some(excerpt) { continue; } (excerpt, false) @@ -613,7 +614,7 @@ impl SplitBufferHeadersElement { Block::ExcerptBoundary { .. } | Block::Custom(_) | Block::Spacer { .. } => continue, }; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let jump_data = header_jump_data( snapshot, block_row, diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs new file mode 100644 index 0000000000000000000000000000000000000000..7323d4159cec58a5a7db7daa42ca201125200fae --- /dev/null +++ b/crates/editor/src/tasks.rs @@ -0,0 +1,101 @@ +use crate::Editor; + +use collections::HashMap; +use gpui::{App, Task, Window}; +use lsp::LanguageServerName; +use project::{Location, project_settings::ProjectSettings}; +use settings::Settings as _; +use task::{TaskContext, TaskVariables, VariableName}; +use text::{BufferId, ToOffset, ToPoint}; + +impl Editor { + pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task> { + let Some(project) = self.project.clone() else { + return Task::ready(None); + }; + let display_snapshot = self.display_snapshot(cx); + let selection = self.selections.newest_adjusted(&display_snapshot); + let start = display_snapshot + .buffer_snapshot() + .anchor_after(selection.start); + let end = display_snapshot + .buffer_snapshot() + .anchor_after(selection.end); + let Some((buffer_snapshot, range)) = display_snapshot + .buffer_snapshot() + .anchor_range_to_buffer_anchor_range(start..end) + else { + return Task::ready(None); + }; + let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else { + return Task::ready(None); + }; + let location = Location { buffer, range }; + let captured_variables = { + let mut variables = TaskVariables::default(); + let buffer = location.buffer.read(cx); + let buffer_id = buffer.remote_id(); + let snapshot = buffer.snapshot(); + let starting_point = location.range.start.to_point(&snapshot); + let starting_offset = starting_point.to_offset(&snapshot); + for (_, tasks) in self + .tasks + .range((buffer_id, 0)..(buffer_id, starting_point.row + 1)) + { + if !tasks + .context_range + .contains(&crate::BufferOffset(starting_offset)) + { + continue; + } + for (capture_name, value) in tasks.extra_variables.iter() { + variables.insert( + VariableName::Custom(capture_name.to_owned().into()), + value.clone(), + ); + } + } + variables + }; + + project.update(cx, |project, cx| { + project.task_store().update(cx, |task_store, cx| { + task_store.task_context_for_location(captured_variables, location, cx) + }) + }) + } + + pub fn lsp_task_sources(&self, cx: &App) -> HashMap> { + let lsp_settings = &ProjectSettings::get_global(cx).lsp; + + self.buffer() + .read(cx) + .all_buffers() + .into_iter() + .filter_map(|buffer| { + let lsp_tasks_source = buffer + .read(cx) + .language()? + .context_provider()? + .lsp_task_source()?; + if lsp_settings + .get(&lsp_tasks_source) + .is_none_or(|s| s.enable_lsp_tasks) + { + let buffer_id = buffer.read(cx).remote_id(); + Some((lsp_tasks_source, buffer_id)) + } else { + None + } + }) + .fold( + HashMap::default(), + |mut acc, (lsp_task_source, buffer_id)| { + acc.entry(lsp_task_source) + .or_insert_with(Vec::new) + .push(buffer_id); + acc + }, + ) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index bef2b3fc3ec2b949ffb8288d59b1201f6f3dde90..22f686668bd98b4c5b5235e34c0881d6583ed3bc 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -245,7 +245,7 @@ pub fn editor_content_with_blocks_and_size( format!( "§ {}", first_excerpt - .buffer + .buffer(snapshot.buffer_snapshot()) .file() .map(|file| file.file_name(cx)) .unwrap_or("") @@ -274,7 +274,7 @@ pub fn editor_content_with_blocks_and_size( format!( "§ {}", excerpt - .buffer + .buffer(snapshot.buffer_snapshot()) .file() .map(|file| file.file_name(cx)) .unwrap_or("") diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 101c1559a7a0fb6e5d0d5bba7281a0cb78ab4b65..84b03d91ca1cf2e0ba858398bcf8134ce16edb41 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -1,5 +1,5 @@ use crate::{ - AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt, + DisplayPoint, Editor, MultiBuffer, MultiBufferSnapshot, RowExt, display_map::{HighlightKey, ToDisplayPoint}, }; use buffer_diff::DiffHunkStatusKind; @@ -13,7 +13,9 @@ use gpui::{ }; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey}; +use multi_buffer::{ + Anchor, AnchorRangeExt, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey, +}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -464,7 +466,21 @@ impl EditorTestContext { let selections = editor.selections.disjoint_anchors_arc(); let excerpts = multibuffer_snapshot .excerpts() - .map(|(e_id, snapshot, range)| (e_id, snapshot.clone(), range)) + .map(|info| { + ( + multibuffer_snapshot + .buffer_for_id(info.context.start.buffer_id) + .cloned() + .unwrap(), + multibuffer_snapshot + .anchor_in_excerpt(info.context.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(info.context.end) + .unwrap(), + info, + ) + }) .collect::>(); (multibuffer_snapshot, selections, excerpts) @@ -478,14 +494,23 @@ impl EditorTestContext { fmt_additional_notes(), ); - for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() { + for (ix, (snapshot, multibuffer_range, excerpt_range)) in excerpts.into_iter().enumerate() { let is_folded = self .update_editor(|editor, _, cx| editor.is_buffer_folded(snapshot.remote_id(), cx)); let (expected_text, expected_selections) = marked_text_ranges(expected_excerpts[ix], true); if expected_text == "[FOLDED]\n" { assert!(is_folded, "excerpt {} should be folded", ix); - let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id); + let is_selected = selections.iter().any(|s| { + multibuffer_range + .start + .cmp(&s.head(), &multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), &multibuffer_snapshot) + .is_ge() + }); if !expected_selections.is_empty() { assert!( is_selected, @@ -510,7 +535,7 @@ impl EditorTestContext { ); assert_eq!( multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer(excerpt_id, range.context.clone())) + .text_for_range(multibuffer_range.clone()) .collect::(), expected_text, "{}", @@ -519,13 +544,24 @@ impl EditorTestContext { let selections = selections .iter() - .filter(|s| s.head().excerpt_id == excerpt_id) - .map(|s| { - let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - tail..head + .filter(|s| { + multibuffer_range + .start + .cmp(&s.head(), &multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), &multibuffer_snapshot) + .is_ge() + }) + .filter_map(|s| { + let (head_anchor, buffer_snapshot) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?; + let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot); + let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot); + Some(tail..head) }) .collect::>(); // todo: selections that cross excerpt boundaries.. @@ -546,9 +582,12 @@ impl EditorTestContext { let selections = editor.selections.disjoint_anchors_arc().to_vec(); let excerpts = multibuffer_snapshot .excerpts() - .map(|(e_id, snapshot, range)| { - let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx); - (e_id, snapshot.clone(), range, is_folded) + .map(|info| { + let buffer_snapshot = multibuffer_snapshot + .buffer_for_id(info.context.start.buffer_id) + .unwrap(); + let is_folded = editor.is_buffer_folded(buffer_snapshot.remote_id(), cx); + (buffer_snapshot.clone(), info, is_folded) }) .collect::>(); @@ -673,7 +712,7 @@ impl EditorTestContext { struct FormatMultiBufferAsMarkedText { multibuffer_snapshot: MultiBufferSnapshot, selections: Vec>, - excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange, bool)>, + excerpts: Vec<(BufferSnapshot, ExcerptRange, bool)>, } impl std::fmt::Display for FormatMultiBufferAsMarkedText { @@ -684,25 +723,40 @@ impl std::fmt::Display for FormatMultiBufferAsMarkedText { excerpts, } = self; - for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() { + for (_snapshot, range, is_folded) in excerpts.into_iter() { write!(f, "[EXCERPT]\n")?; if *is_folded { write!(f, "[FOLDED]\n")?; } + let multibuffer_range = multibuffer_snapshot + .buffer_anchor_range_to_anchor_range(range.context.clone()) + .unwrap(); + let mut text = multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer(*excerpt_id, range.context.clone())) + .text_for_range(multibuffer_range.clone()) .collect::(); let selections = selections .iter() - .filter(|&s| s.head().excerpt_id == *excerpt_id) - .map(|s| { - let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - tail..head + .filter(|&s| { + multibuffer_range + .start + .cmp(&s.head(), multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), multibuffer_snapshot) + .is_ge() + }) + .filter_map(|s| { + let (head_anchor, buffer_snapshot) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?; + let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&range.context.start, buffer_snapshot); + let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&range.context.start, buffer_snapshot); + Some(tail..head) }) .rev() .collect::>(); diff --git a/crates/encoding_selector/src/active_buffer_encoding.rs b/crates/encoding_selector/src/active_buffer_encoding.rs index 417ff241b72300aa90496f896fcf6c3ed3a363c7..42fd5f662f66c8e9f1eaa18953c6765c51244e77 100644 --- a/crates/encoding_selector/src/active_buffer_encoding.rs +++ b/crates/encoding_selector/src/active_buffer_encoding.rs @@ -47,7 +47,7 @@ impl ActiveBufferEncoding { self.is_shared = project.is_shared(); self.is_via_remote_server = project.is_via_remote_server(); - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + if let Some(buffer) = editor.read(cx).active_buffer(cx) { let buffer = buffer.read(cx); self.active_encoding = Some(buffer.encoding()); self.has_bom = buffer.has_bom(); diff --git a/crates/encoding_selector/src/encoding_selector.rs b/crates/encoding_selector/src/encoding_selector.rs index 3954bf29a30a0981c25bee3eb88829a7002881ad..e99b475de6773c647ef19195ef42052d37769346 100644 --- a/crates/encoding_selector/src/encoding_selector.rs +++ b/crates/encoding_selector/src/encoding_selector.rs @@ -47,11 +47,11 @@ impl EncodingSelector { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let buffer_handle = buffer.read(cx); let project = workspace.project().read(cx); diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index d169ba686098dddd4881915ece11c8a97148affa..d473fbbec618c6e7b309ab2ff9dc9eb5787ddc43 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -41,9 +41,9 @@ use theme::AccentColors; use theme_settings::ThemeSettings; use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem}; use ui::{ - ButtonLike, Chip, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, HighlightedLabel, - ScrollableHandle, Table, TableColumnWidths, TableInteractionState, TableResizeBehavior, - Tooltip, WithScrollbar, prelude::*, + ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, + HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, TableInteractionState, + TableResizeBehavior, Tooltip, WithScrollbar, prelude::*, }; use workspace::{ Workspace, @@ -901,7 +901,7 @@ pub struct GitGraph { context_menu: Option<(Entity, Point, Subscription)>, row_height: Pixels, table_interaction_state: Entity, - table_column_widths: Entity, + table_column_widths: Entity, horizontal_scroll_offset: Pixels, graph_viewport_width: Pixels, selected_entry_idx: Option, @@ -972,7 +972,23 @@ impl GitGraph { }); let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); - let table_column_widths = cx.new(|cx| TableColumnWidths::new(4, cx)); + let table_column_widths = cx.new(|_cx| { + RedistributableColumnsState::new( + 4, + vec![ + DefiniteLength::Fraction(0.72), + DefiniteLength::Fraction(0.12), + DefiniteLength::Fraction(0.10), + DefiniteLength::Fraction(0.06), + ], + vec![ + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + ], + ) + }); let mut row_height = Self::row_height(cx); cx.observe_global_in::(window, move |this, _window, cx| { @@ -2172,6 +2188,8 @@ impl GitGraph { builder.move_to(point(line_x, from_y)); let segments = &line.segments[start_segment_idx..]; + let desired_curve_height = row_height / 3.0; + let desired_curve_width = LANE_WIDTH / 3.0; for (segment_idx, segment) in segments.iter().enumerate() { let is_last = segment_idx + 1 == segments.len(); @@ -2225,66 +2243,69 @@ impl GitGraph { if is_last { to_column -= column_shift; } - builder.move_to(point(current_column, current_row)); - if (to_column - current_column).abs() > LANE_WIDTH { - // Multi-lane checkout: straight down, small - // curve turn, then straight horizontal. - if (to_row - current_row).abs() > row_height { - let vertical_end = - point(current_column, to_row - row_height); - builder.line_to(vertical_end); - builder.move_to(vertical_end); - } - - let lane_shift = if going_right { - LANE_WIDTH - } else { - -LANE_WIDTH - }; - let curve_end = - point(current_column + lane_shift, to_row); - let curve_control = point(current_column, to_row); - builder.curve_to(curve_end, curve_control); - builder.move_to(curve_end); - - builder.line_to(point(to_column, to_row)); + let available_curve_width = + (to_column - current_column).abs(); + let available_curve_height = + (to_row - current_row).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width } else { - if (to_row - current_row).abs() > row_height { - let start_curve = - point(current_column, to_row - row_height); - builder.line_to(start_curve); - builder.move_to(start_curve); - } - let control = point(current_column, to_row); - builder.curve_to(point(to_column, to_row), control); - } + -curve_width + }; + let curve_start = + point(current_column, to_row - curve_height); + let curve_end = + point(current_column + signed_curve_width, to_row); + let curve_control = point(current_column, to_row); + + builder.move_to(point(current_column, current_row)); + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } CurveKind::Merge => { if is_last { to_row -= COMMIT_CIRCLE_RADIUS; } - builder.move_to(point( + + let merge_start = point( current_column + column_shift, current_row - COMMIT_CIRCLE_RADIUS, - )); - - if (to_column - current_column).abs() > LANE_WIDTH { - let column_shift = if going_right { - LANE_WIDTH - } else { - -LANE_WIDTH - }; - let start_curve = point( - current_column + column_shift, - current_row - COMMIT_CIRCLE_RADIUS, - ); - builder.line_to(start_curve); - builder.move_to(start_curve); - } - - let control = point(to_column, current_row); - builder.curve_to(point(to_column, to_row), control); + ); + let available_curve_width = + (to_column - merge_start.x).abs(); + let available_curve_height = + (to_row - merge_start.y).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width + } else { + -curve_width + }; + let curve_start = point( + to_column - signed_curve_width, + merge_start.y, + ); + let curve_end = + point(to_column, merge_start.y + curve_height); + let curve_control = point(to_column, merge_start.y); + + builder.move_to(merge_start); + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } } current_row = to_row; @@ -2459,11 +2480,6 @@ impl Render for GitGraph { self.search_state.state = QueryState::Empty; self.search(query, cx); } - let description_width_fraction = 0.72; - let date_width_fraction = 0.12; - let author_width_fraction = 0.10; - let commit_width_fraction = 0.06; - let (commit_count, is_loading) = match self.graph_data.max_commit_count { AllCommitCount::Loaded(count) => (count, true), AllCommitCount::NotLoaded => { @@ -2523,7 +2539,10 @@ impl Render for GitGraph { .flex_col() .child( div() - .p_2() + .flex() + .items_center() + .px_1() + .py_0p5() .border_b_1() .whitespace_nowrap() .border_color(cx.theme().colors().border) @@ -2565,25 +2584,9 @@ impl Render for GitGraph { Label::new("Author").color(Color::Muted).into_any_element(), Label::new("Commit").color(Color::Muted).into_any_element(), ]) - .column_widths( - [ - DefiniteLength::Fraction(description_width_fraction), - DefiniteLength::Fraction(date_width_fraction), - DefiniteLength::Fraction(author_width_fraction), - DefiniteLength::Fraction(commit_width_fraction), - ] - .to_vec(), - ) - .resizable_columns( - vec![ - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - ], - &self.table_column_widths, - cx, - ) + .width_config(ColumnWidthConfig::redistributable( + self.table_column_widths.clone(), + )) .map_row(move |(index, row), window, cx| { let is_selected = selected_entry_idx == Some(index); let is_hovered = hovered_entry_idx == Some(index); diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 757ec1e0ebb92431e110e20f0833e2fcd0a88177..a298380336515aad24e9c55d637d392fa6898b35 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -212,7 +212,7 @@ impl CommitView { editor.insert_blocks( [BlockProperties { - placement: BlockPlacement::Above(editor::Anchor::min()), + placement: BlockPlacement::Above(editor::Anchor::Min), height: Some(1), style: BlockStyle::Sticky, render: Arc::new(|_| gpui::Empty.into_any_element()), @@ -223,7 +223,10 @@ impl CommitView { editor .buffer() .read(cx) - .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx) + .snapshot(cx) + .anchor_in_buffer(Anchor::max_for_buffer( + message_buffer.read(cx).remote_id(), + )) .map(|anchor| BlockProperties { placement: BlockPlacement::Below(anchor), height: Some(1), diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index c61214123dff8cbd414c89b586f1176f7255266e..95d46676a80ebca3b2db1ba1d7c88edee32df9ea 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -2,7 +2,7 @@ use agent_settings::AgentSettings; use collections::{HashMap, HashSet}; use editor::{ ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, - Editor, EditorEvent, ExcerptId, MultiBuffer, RowHighlightOptions, + Editor, EditorEvent, MultiBuffer, RowHighlightOptions, display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, }; use gpui::{ @@ -67,62 +67,22 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity, cx: &mu let buffers = buffer.read(cx).all_buffers(); for buffer in buffers { - buffer_added(editor, buffer, cx); + buffer_ranges_updated(editor, buffer, cx); } cx.subscribe(&cx.entity(), |editor, _, event, cx| match event { - EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx), - EditorEvent::ExcerptsExpanded { ids } => { - let multibuffer = editor.buffer().read(cx).snapshot(cx); - for excerpt_id in ids { - let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else { - continue; - }; - let addon = editor.addon::().unwrap(); - let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else { - return; - }; - excerpt_for_buffer_updated(editor, conflict_set, cx); - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + buffer_ranges_updated(editor, buffer.clone(), cx) + } + EditorEvent::BuffersRemoved { removed_buffer_ids } => { + buffers_removed(editor, removed_buffer_ids, cx) } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => buffers_removed(editor, removed_buffer_ids, cx), _ => {} }) .detach(); } -fn excerpt_for_buffer_updated( - editor: &mut Editor, - conflict_set: Entity, - cx: &mut Context, -) { - let conflicts_len = conflict_set.read(cx).snapshot().conflicts.len(); - let buffer_id = conflict_set.read(cx).snapshot().buffer_id; - let Some(buffer_conflicts) = editor - .addon_mut::() - .unwrap() - .buffers - .get(&buffer_id) - else { - return; - }; - let addon_conflicts_len = buffer_conflicts.block_ids.len(); - conflicts_updated( - editor, - conflict_set, - &ConflictSetUpdate { - buffer_range: None, - old_range: 0..addon_conflicts_len, - new_range: 0..conflicts_len, - }, - cx, - ); -} - -#[ztracing::instrument(skip_all)] -fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context) { +fn buffer_ranges_updated(editor: &mut Editor, buffer: Entity, cx: &mut Context) { let Some(project) = editor.project() else { return; }; @@ -188,14 +148,6 @@ fn conflicts_updated( let conflict_set = conflict_set.read(cx).snapshot(); let multibuffer = editor.buffer().read(cx); let snapshot = multibuffer.snapshot(cx); - let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx); - let Some(buffer_snapshot) = excerpts - .first() - .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id)) - else { - return; - }; - let old_range = maybe!({ let conflict_addon = editor.addon_mut::().unwrap(); let buffer_conflicts = conflict_addon.buffers.get(&buffer_id)?; @@ -230,23 +182,7 @@ fn conflicts_updated( let mut removed_highlighted_ranges = Vec::new(); let mut removed_block_ids = HashSet::default(); for (conflict_range, block_id) in old_conflicts { - let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict_range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict_range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; - let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else { + let Some(range) = snapshot.buffer_anchor_range_to_anchor_range(conflict_range) else { continue; }; removed_highlighted_ranges.push(range.clone()); @@ -272,26 +208,9 @@ fn conflicts_updated( let new_conflicts = &conflict_set.conflicts[event.new_range.clone()]; let mut blocks = Vec::new(); for conflict in new_conflicts { - let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict.range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict.range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; - - update_conflict_highlighting(editor, conflict, &snapshot, excerpt_id, cx); + update_conflict_highlighting(editor, conflict, &snapshot, cx); - let Some(anchor) = snapshot.anchor_in_excerpt(excerpt_id, conflict.range.start) else { + let Some(anchor) = snapshot.anchor_in_excerpt(conflict.range.start) else { continue; }; @@ -302,7 +221,7 @@ fn conflicts_updated( style: BlockStyle::Sticky, render: Arc::new({ let conflict = conflict.clone(); - move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx) + move |cx| render_conflict_buttons(&conflict, editor_handle.clone(), cx) }), priority: 0, }) @@ -328,14 +247,13 @@ fn update_conflict_highlighting( editor: &mut Editor, conflict: &ConflictRegion, buffer: &editor::MultiBufferSnapshot, - excerpt_id: editor::ExcerptId, cx: &mut Context, ) -> Option<()> { log::debug!("update conflict highlighting for {conflict:?}"); - let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?; - let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?; - let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?; + let outer = buffer.buffer_anchor_range_to_anchor_range(conflict.range.clone())?; + let ours = buffer.buffer_anchor_range_to_anchor_range(conflict.ours.clone())?; + let theirs = buffer.buffer_anchor_range_to_anchor_range(conflict.theirs.clone())?; let ours_background = cx.theme().colors().version_control_conflict_marker_ours; let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs; @@ -373,7 +291,6 @@ fn update_conflict_highlighting( fn render_conflict_buttons( conflict: &ConflictRegion, - excerpt_id: ExcerptId, editor: WeakEntity, cx: &mut BlockContext, ) -> AnyElement { @@ -395,7 +312,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone()], window, @@ -415,7 +331,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![theirs.clone()], window, @@ -436,7 +351,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone(), theirs.clone()], window, @@ -461,7 +375,7 @@ fn render_conflict_buttons( let content = editor .update(cx, |editor, cx| { let multibuffer = editor.buffer().read(cx); - let buffer_id = conflict.ours.end.buffer_id?; + let buffer_id = conflict.ours.end.buffer_id; let buffer = multibuffer.buffer(buffer_id)?; let buffer_read = buffer.read(cx); let snapshot = buffer_read.snapshot(); @@ -589,7 +503,6 @@ pub(crate) fn register_conflict_notification( pub(crate) fn resolve_conflict( editor: WeakEntity, - excerpt_id: ExcerptId, resolved_conflict: ConflictRegion, ranges: Vec>, window: &mut Window, @@ -601,7 +514,7 @@ pub(crate) fn resolve_conflict( let workspace = editor.workspace()?; let project = editor.project()?.clone(); let multibuffer = editor.buffer().clone(); - let buffer_id = resolved_conflict.ours.end.buffer_id?; + let buffer_id = resolved_conflict.ours.end.buffer_id; let buffer = multibuffer.read(cx).buffer(buffer_id)?; resolved_conflict.resolve(buffer.clone(), &ranges, cx); let conflict_addon = editor.addon_mut::().unwrap(); @@ -620,7 +533,7 @@ pub(crate) fn resolve_conflict( .ok()?; let &(_, block_id) = &state.block_ids[ix]; let range = - snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?; + snapshot.buffer_anchor_range_to_anchor_range(resolved_conflict.range)?; editor.remove_gutter_highlights::(vec![range.clone()], cx); diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index c162317b68b917db7054b2a00bf395e482689497..ec8569988200079877d8edc999ebb0dcd155b88c 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -49,7 +49,7 @@ use language_model::{ LanguageModelRequestMessage, Role, }; use menu; -use multi_buffer::ExcerptInfo; +use multi_buffer::ExcerptBoundaryInfo; use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; use project::{ @@ -5760,11 +5760,12 @@ impl editor::Addon for GitPanelAddon { fn render_buffer_header_controls( &self, - excerpt_info: &ExcerptInfo, + _excerpt_info: &ExcerptBoundaryInfo, + buffer: &language::BufferSnapshot, window: &Window, cx: &App, ) -> Option { - let file = excerpt_info.buffer.file()?; + let file = buffer.file()?; let git_panel = self.workspace.upgrade()?.read(cx).panel::(cx)?; git_panel diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index ae27b6e51fcb8f72b86f819a1aa4ac05c17c6e5f..8fa4680593a7565c84efd7503f6cf9d188d3be35 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -501,9 +501,11 @@ impl ProjectDiff { pub fn active_path(&self, cx: &App) -> Option { let editor = self.editor.read(cx).focused_editor().read(cx); + let multibuffer = editor.buffer().read(cx); let position = editor.selections.newest_anchor().head(); - let multi_buffer = editor.buffer().read(cx); - let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?; + let snapshot = multibuffer.snapshot(cx); + let (text_anchor, _) = snapshot.anchor_to_buffer_anchor(position)?; + let buffer = multibuffer.buffer(text_anchor.buffer_id)?; let file = buffer.read(cx).file()?; Some(ProjectPath { @@ -516,9 +518,7 @@ impl ProjectDiff { self.editor.update(cx, |editor, cx| { editor.rhs_editor().update(cx, |editor, cx| { editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![ - multi_buffer::Anchor::min()..multi_buffer::Anchor::min(), - ]); + s.select_ranges(vec![multi_buffer::Anchor::Min..multi_buffer::Anchor::Min]); }); }); }); @@ -569,17 +569,17 @@ impl ProjectDiff { .collect::>(); if !ranges.iter().any(|range| range.start != range.end) { selection = false; - if let Some((excerpt_id, _, range)) = self - .editor - .read(cx) - .rhs_editor() - .read(cx) - .active_excerpt(cx) + let anchor = editor.selections.newest_anchor().head(); + if let Some((_, excerpt_range)) = snapshot.excerpt_containing(anchor..anchor) + && let Some(range) = snapshot + .anchor_in_buffer(excerpt_range.context.start) + .zip(snapshot.anchor_in_buffer(excerpt_range.context.end)) + .map(|(start, end)| start..end) { - ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)]; + ranges = vec![range]; } else { ranges = Vec::default(); - } + }; } let mut has_staged_hunks = false; let mut has_unstaged_hunks = false; @@ -715,7 +715,7 @@ impl ProjectDiff { let (was_empty, is_excerpt_newly_added) = self.editor.update(cx, |editor, cx| { let was_empty = editor.rhs_editor().read(cx).buffer().read(cx).is_empty(); - let (_, is_newly_added) = editor.set_excerpts_for_path( + let is_newly_added = editor.update_excerpts_for_path( path_key.clone(), buffer, excerpt_ranges, @@ -735,7 +735,7 @@ impl ProjectDiff { cx, |selections| { selections.select_ranges([ - multi_buffer::Anchor::min()..multi_buffer::Anchor::min() + multi_buffer::Anchor::Min..multi_buffer::Anchor::Min ]) }, ); @@ -785,8 +785,9 @@ impl ProjectDiff { let mut previous_paths = this .multibuffer .read(cx) - .paths() - .cloned() + .snapshot(cx) + .buffers_with_paths() + .map(|(_, path_key)| path_key.clone()) .collect::>(); if let Some(repo) = repo { @@ -877,10 +878,23 @@ impl ProjectDiff { #[cfg(any(test, feature = "test-support"))] pub fn excerpt_paths(&self, cx: &App) -> Vec> { - self.multibuffer + let snapshot = self + .editor() + .read(cx) + .rhs_editor() + .read(cx) + .buffer() .read(cx) - .paths() - .map(|key| key.path.clone()) + .snapshot(cx); + snapshot + .excerpts() + .map(|excerpt| { + snapshot + .path_for_buffer(excerpt.context.start.buffer_id) + .unwrap() + .path + .clone() + }) .collect() } } @@ -1937,7 +1951,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); let prev_buffer_hunks = buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>(); buffer_editor.git_restore(&Default::default(), window, cx); prev_buffer_hunks @@ -1950,7 +1964,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>() }); assert_eq!(new_buffer_hunks.as_slice(), &[]); @@ -2209,9 +2223,14 @@ mod tests { cx.update(|window, cx| { let editor = diff.read(cx).editor.read(cx).rhs_editor().clone(); - let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids(); - assert_eq!(excerpt_ids.len(), 1); - let excerpt_id = excerpt_ids[0]; + let excerpts = editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .collect::>(); + assert_eq!(excerpts.len(), 1); let buffer = editor .read(cx) .buffer() @@ -2239,7 +2258,6 @@ mod tests { resolve_conflict( editor.downgrade(), - excerpt_id, snapshot.conflicts[0].clone(), vec![ours_range], window, diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 2dfef13f72681456174737af61380b87caae0ae1..fe2add8177e2c9ca92eb8d08776d561e1adaba91 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -11,7 +11,7 @@ use gpui::{ AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, Render, Task, Window, }; -use language::{self, Buffer, Point}; +use language::{self, Buffer, OffsetRangeExt, Point}; use project::Project; use settings::Settings; use std::{ @@ -52,36 +52,26 @@ impl TextDiffView { let selection_data = source_editor.update(cx, |editor, cx| { let multibuffer = editor.buffer(); - let selections = editor.selections.all::(&editor.display_snapshot(cx)); - let first_selection = selections.first()?; - - let (source_buffer, buffer_start, start_excerpt) = multibuffer - .read(cx) - .point_to_buffer_point(first_selection.start, cx)?; - let buffer_end = multibuffer - .read(cx) - .point_to_buffer_point(first_selection.end, cx) - .and_then(|(buf, pt, end_excerpt)| { - (buf.read(cx).remote_id() == source_buffer.read(cx).remote_id() - && end_excerpt == start_excerpt) - .then_some(pt) - }) - .unwrap_or(buffer_start); + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); + let first_selection = editor.selections.newest_anchor(); - let buffer_snapshot = source_buffer.read(cx); - let max_point = buffer_snapshot.max_point(); + let (source_buffer, buffer_range) = multibuffer_snapshot + .anchor_range_to_buffer_anchor_range(first_selection.range())?; + let max_point = source_buffer.max_point(); + let buffer_range = buffer_range.to_point(source_buffer); + let source_buffer = multibuffer.read(cx).buffer(source_buffer.remote_id())?; - if first_selection.is_empty() { + if buffer_range.is_empty() { let full_range = Point::new(0, 0)..max_point; return Some((source_buffer, full_range)); } - let expanded_start = Point::new(buffer_start.row, 0); - let expanded_end = if buffer_end.column > 0 { - let next_row = buffer_end.row + 1; + let expanded_start = Point::new(buffer_range.start.row, 0); + let expanded_end = if buffer_range.end.column > 0 { + let next_row = buffer_range.end.row + 1; cmp::min(max_point, Point::new(next_row, 0)) } else { - buffer_end + buffer_range.end }; Some((source_buffer, expanded_start..expanded_end)) }); diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 042d9a46b6c76a461e60d9002a2362190e253cd4..03bec51ac209fd6e3c254689b3b7caa2695fa450 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -42,23 +42,22 @@ impl UserCaretPosition { snapshot: &MultiBufferSnapshot, ) -> Self { let selection_end = selection.head(); - let (line, character) = if let Some((buffer_snapshot, point, _)) = - snapshot.point_to_buffer_point(selection_end) - { - let line_start = Point::new(point.row, 0); + let (line, character) = + if let Some((buffer_snapshot, point)) = snapshot.point_to_buffer_point(selection_end) { + let line_start = Point::new(point.row, 0); - let chars_to_last_position = buffer_snapshot - .text_summary_for_range::(line_start..point) - .chars as u32; - (line_start.row, chars_to_last_position) - } else { - let line_start = Point::new(selection_end.row, 0); + let chars_to_last_position = buffer_snapshot + .text_summary_for_range::(line_start..point) + .chars as u32; + (line_start.row, chars_to_last_position) + } else { + let line_start = Point::new(selection_end.row, 0); - let chars_to_last_position = snapshot - .text_summary_for_range::(line_start..selection_end) - .chars as u32; - (selection_end.row, chars_to_last_position) - }; + let chars_to_last_position = snapshot + .text_summary_for_range::(line_start..selection_end) + .chars as u32; + (selection_end.row, chars_to_last_position) + }; Self { line: NonZeroU32::new(line + 1).expect("added 1"), @@ -232,7 +231,7 @@ impl Render for CursorPosition { if let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) - && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) + && let Some(buffer) = editor.read(cx).active_buffer(cx) { workspace.toggle_modal(window, cx, |window, cx| { crate::GoToLine::new(editor, buffer, window, cx) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index a5332e96c731a29027ea6a69288d7d9556cb2da0..561d6a7d31398ab2a8eb74042fc1a617b7159d33 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -63,7 +63,7 @@ impl GoToLine { return; }; let editor = editor_handle.read(cx); - let Some((_, buffer, _)) = editor.active_excerpt(cx) else { + let Some(buffer) = editor.active_buffer(cx) else { return; }; workspace.update(cx, |workspace, cx| { @@ -93,11 +93,9 @@ impl GoToLine { let last_line = editor .buffer() .read(cx) - .excerpts_for_buffer(snapshot.remote_id(), cx) - .into_iter() - .map(move |(_, _, range)| { - text::ToPoint::to_point(&range.context.end, &snapshot).row - }) + .snapshot(cx) + .excerpts_for_buffer(snapshot.remote_id()) + .map(move |range| text::ToPoint::to_point(&range.context.end, &snapshot).row) .max() .unwrap_or(0); @@ -230,7 +228,7 @@ impl GoToLine { let character = query_char.unwrap_or(0).saturating_sub(1); let target_multi_buffer_row = MultiBufferRow(row); - let (buffer_snapshot, target_in_buffer, _) = snapshot.point_to_buffer_point(Point::new( + let (buffer_snapshot, target_in_buffer) = snapshot.point_to_buffer_point(Point::new( target_multi_buffer_row.min(snapshot.max_row()).0, 0, ))?; diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index cc4f586a3dce937c310e177eefaff1c81c6a4b89..bdda213dfd0f45c8d57b94bd830f966beb1c0050 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -3067,21 +3067,29 @@ fn handle_tooltip_mouse_move( } Action::ScheduleShow => { let delayed_show_task = window.spawn(cx, { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(active_tooltip); let build_tooltip = build_tooltip.clone(); let check_is_hovered_during_prepaint = check_is_hovered_during_prepaint.clone(); async move |cx| { cx.background_executor().timer(TOOLTIP_SHOW_DELAY).await; + let Some(active_tooltip) = weak_active_tooltip.upgrade() else { + return; + }; cx.update(|window, cx| { let new_tooltip = build_tooltip(window, cx).map(|(view, tooltip_is_hoverable)| { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(&active_tooltip); ActiveTooltip::Visible { tooltip: AnyTooltip { view, mouse_position: window.mouse_position(), check_visible_and_update: Rc::new( move |tooltip_bounds, window, cx| { + let Some(active_tooltip) = + weak_active_tooltip.upgrade() + else { + return false; + }; handle_tooltip_check_visible_and_update( &active_tooltip, tooltip_is_hoverable, @@ -3160,11 +3168,14 @@ fn handle_tooltip_check_visible_and_update( Action::Hide => clear_active_tooltip(active_tooltip, window), Action::ScheduleHide(tooltip) => { let delayed_hide_task = window.spawn(cx, { - let active_tooltip = active_tooltip.clone(); + let weak_active_tooltip = Rc::downgrade(active_tooltip); async move |cx| { cx.background_executor() .timer(HOVERABLE_TOOLTIP_HIDE_DELAY) .await; + let Some(active_tooltip) = weak_active_tooltip.upgrade() else { + return; + }; if active_tooltip.borrow_mut().take().is_some() { cx.update(|window, _cx| window.refresh()).ok(); } @@ -3577,6 +3588,112 @@ impl ScrollHandle { #[cfg(test)] mod tests { use super::*; + use crate::{AppContext as _, Context, InputEvent, MouseMoveEvent, TestAppContext}; + use std::rc::Weak; + + struct TestTooltipView; + + impl Render for TestTooltipView { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().w(px(20.)).h(px(20.)).child("tooltip") + } + } + + type CapturedActiveTooltip = Rc>>>>>; + + struct TooltipCaptureElement { + child: AnyElement, + captured_active_tooltip: CapturedActiveTooltip, + } + + impl IntoElement for TooltipCaptureElement { + type Element = Self; + + fn into_element(self) -> Self::Element { + self + } + } + + impl Element for TooltipCaptureElement { + type RequestLayoutState = (); + type PrepaintState = (); + + fn id(&self) -> Option { + None + } + + fn source_location(&self) -> Option<&'static core::panic::Location<'static>> { + None + } + + fn request_layout( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + window: &mut Window, + cx: &mut App, + ) -> (LayoutId, Self::RequestLayoutState) { + (self.child.request_layout(window, cx), ()) + } + + fn prepaint( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + window: &mut Window, + cx: &mut App, + ) -> Self::PrepaintState { + self.child.prepaint(window, cx); + } + + fn paint( + &mut self, + _id: Option<&GlobalElementId>, + _inspector_id: Option<&InspectorElementId>, + _bounds: Bounds, + _request_layout: &mut Self::RequestLayoutState, + _prepaint: &mut Self::PrepaintState, + window: &mut Window, + cx: &mut App, + ) { + self.child.paint(window, cx); + window.with_global_id("target".into(), |global_id, window| { + window.with_element_state::( + global_id, + |state, _window| { + let state = state.unwrap(); + *self.captured_active_tooltip.borrow_mut() = + state.active_tooltip.as_ref().map(Rc::downgrade); + ((), state) + }, + ) + }); + } + } + + struct TooltipOwner { + captured_active_tooltip: CapturedActiveTooltip, + } + + impl Render for TooltipOwner { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + TooltipCaptureElement { + child: div() + .size_full() + .child( + div() + .id("target") + .w(px(50.)) + .h(px(50.)) + .tooltip(|_, cx| cx.new(|_| TestTooltipView).into()), + ) + .into_any_element(), + captured_active_tooltip: self.captured_active_tooltip.clone(), + } + } + } #[test] fn scroll_handle_aligns_wide_children_to_left_edge() { @@ -3615,4 +3732,96 @@ mod tests { assert_eq!(handle.offset().y, px(-25.)); } + + fn setup_tooltip_owner_test() -> ( + TestAppContext, + crate::AnyWindowHandle, + CapturedActiveTooltip, + ) { + let mut test_app = TestAppContext::single(); + let captured_active_tooltip: CapturedActiveTooltip = Rc::new(RefCell::new(None)); + let window = test_app.add_window({ + let captured_active_tooltip = captured_active_tooltip.clone(); + move |_, _| TooltipOwner { + captured_active_tooltip, + } + }); + let any_window = window.into(); + + test_app + .update_window(any_window, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + + test_app + .update_window(any_window, |_, window, cx| { + window.dispatch_event( + MouseMoveEvent { + position: point(px(10.), px(10.)), + modifiers: Default::default(), + pressed_button: None, + } + .to_platform_input(), + cx, + ); + }) + .unwrap(); + + test_app + .update_window(any_window, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + + (test_app, any_window, captured_active_tooltip) + } + + #[test] + fn tooltip_waiting_for_show_is_released_when_its_owner_disappears() { + let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test(); + + let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap(); + let active_tooltip = weak_active_tooltip.upgrade().unwrap(); + assert!(matches!( + active_tooltip.borrow().as_ref(), + Some(ActiveTooltip::WaitingForShow { .. }) + )); + + test_app + .update_window(any_window, |_, window, _| { + window.remove_window(); + }) + .unwrap(); + test_app.run_until_parked(); + drop(active_tooltip); + + assert!(weak_active_tooltip.upgrade().is_none()); + } + + #[test] + fn tooltip_is_released_when_its_owner_disappears() { + let (mut test_app, any_window, captured_active_tooltip) = setup_tooltip_owner_test(); + + let weak_active_tooltip = captured_active_tooltip.borrow().clone().unwrap(); + let active_tooltip = weak_active_tooltip.upgrade().unwrap(); + + test_app.dispatcher.advance_clock(TOOLTIP_SHOW_DELAY); + test_app.run_until_parked(); + + assert!(matches!( + active_tooltip.borrow().as_ref(), + Some(ActiveTooltip::Visible { .. }) + )); + + test_app + .update_window(any_window, |_, window, _| { + window.remove_window(); + }) + .unwrap(); + test_app.run_until_parked(); + drop(active_tooltip); + + assert!(weak_active_tooltip.upgrade().is_none()); + } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 7790480e32149fa33dfd082df7a8cdbb09568134..f9885f634d962b167bcf32cc459d5bf6e0d5661e 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -5496,6 +5496,8 @@ pub enum ElementId { CodeLocation(core::panic::Location<'static>), /// A labeled child of an element. NamedChild(Arc, SharedString), + /// A byte array ID (used for text-anchors) + OpaqueId([u8; 20]), } impl ElementId { @@ -5517,6 +5519,7 @@ impl Display for ElementId { ElementId::Path(path) => write!(f, "{}", path.display())?, ElementId::CodeLocation(location) => write!(f, "{}", location)?, ElementId::NamedChild(id, name) => write!(f, "{}-{}", id, name)?, + ElementId::OpaqueId(opaque_id) => write!(f, "{:x?}", opaque_id)?, } Ok(()) @@ -5631,6 +5634,12 @@ impl From<&'static core::panic::Location<'static>> for ElementId { } } +impl From<[u8; 20]> for ElementId { + fn from(opaque_id: [u8; 20]) -> Self { + ElementId::OpaqueId(opaque_id) + } +} + /// A rectangle to be rendered in the window at the given position and size. /// Passed as an argument [`Window::paint_quad`]. #[derive(Clone)] diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index a7616e134a16bbe2b96a6d23d20453b9a5ee4e5f..7ec2d7ba8303e899331d3f38642a9a51f4c14d4c 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -1,7 +1,6 @@ use anyhow::{Result, anyhow}; use editor::{ - Bias, CompletionProvider, Editor, EditorEvent, EditorMode, ExcerptId, MinimapVisibility, - MultiBuffer, + Bias, CompletionProvider, Editor, EditorEvent, EditorMode, MinimapVisibility, MultiBuffer, }; use fuzzy::StringMatch; use gpui::{ @@ -641,7 +640,6 @@ struct RustStyleCompletionProvider { impl CompletionProvider for RustStyleCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, position: Anchor, _: editor::CompletionContext, diff --git a/crates/keymap_editor/src/action_completion_provider.rs b/crates/keymap_editor/src/action_completion_provider.rs index 98428baeb2f7b419ba7354130e12f1a4710c8aea..10d977572b9c52cba1ad9d87c7035bd1552d5e33 100644 --- a/crates/keymap_editor/src/action_completion_provider.rs +++ b/crates/keymap_editor/src/action_completion_provider.rs @@ -26,7 +26,6 @@ impl ActionCompletionProvider { impl CompletionProvider for ActionCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 6a02289353f7fc0df8fd2b3fd99313d2ce650951..ee9f6a11c2b51f7993b17c01352cfb97b535049a 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -31,10 +31,10 @@ use settings::{ BaseKeymap, KeybindSource, KeymapFile, Settings as _, SettingsAssets, infer_json_indent_size, }; use ui::{ - ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, IconButtonShape, IconPosition, - Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _, PopoverMenu, Render, Section, - SharedString, Styled as _, Table, TableColumnWidths, TableInteractionState, - TableResizeBehavior, Tooltip, Window, prelude::*, + ActiveTheme as _, App, Banner, BorrowAppContext, ColumnWidthConfig, ContextMenu, + IconButtonShape, IconPosition, Indicator, Modal, ModalFooter, ModalHeader, ParentElement as _, + PopoverMenu, RedistributableColumnsState, Render, Section, SharedString, Styled as _, Table, + TableInteractionState, TableResizeBehavior, Tooltip, Window, prelude::*, }; use ui_input::InputField; use util::ResultExt; @@ -450,7 +450,7 @@ struct KeymapEditor { context_menu: Option<(Entity, Point, Subscription)>, previous_edit: Option, humanized_action_names: HumanizedActionNameCache, - current_widths: Entity, + current_widths: Entity, show_hover_menus: bool, actions_with_schemas: HashSet<&'static str>, /// In order for the JSON LSP to run in the actions arguments editor, we @@ -623,7 +623,27 @@ impl KeymapEditor { actions_with_schemas: HashSet::default(), action_args_temp_dir: None, action_args_temp_dir_worktree: None, - current_widths: cx.new(|cx| TableColumnWidths::new(COLS, cx)), + current_widths: cx.new(|_cx| { + RedistributableColumnsState::new( + COLS, + vec![ + DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.20), + DefiniteLength::Fraction(0.14), + DefiniteLength::Fraction(0.45), + DefiniteLength::Fraction(0.08), + ], + vec![ + TableResizeBehavior::None, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, + ], + ) + }), }; this.on_keymap_changed(window, cx); @@ -2095,26 +2115,9 @@ impl Render for KeymapEditor { let this = cx.entity(); move |window, cx| this.read(cx).render_no_matches_hint(window, cx) }) - .column_widths(vec![ - DefiniteLength::Absolute(AbsoluteLength::Pixels(px(36.))), - DefiniteLength::Fraction(0.25), - DefiniteLength::Fraction(0.20), - DefiniteLength::Fraction(0.14), - DefiniteLength::Fraction(0.45), - DefiniteLength::Fraction(0.08), - ]) - .resizable_columns( - vec![ - TableResizeBehavior::None, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, - TableResizeBehavior::Resizable, // this column doesn't matter - ], - &self.current_widths, - cx, - ) + .width_config(ColumnWidthConfig::redistributable( + self.current_widths.clone(), + )) .header(vec!["", "Action", "Arguments", "Keystrokes", "Context", "Source"]) .uniform_list( "keymap-editor-table", @@ -3477,7 +3480,6 @@ struct KeyContextCompletionProvider { impl CompletionProvider for KeyContextCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index fa3263df48ff773b32332980e7341fa8a453ba4f..04564ecd6575f9470315e0571a60126c69d81d2b 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -326,23 +326,17 @@ impl DiagnosticEntry { } } -impl Default for Summary { - fn default() -> Self { - Self { - start: Anchor::MIN, - end: Anchor::MAX, - min_start: Anchor::MAX, - max_end: Anchor::MIN, - count: 0, - } - } -} - impl sum_tree::Summary for Summary { type Context<'a> = &'a text::BufferSnapshot; - fn zero(_cx: Self::Context<'_>) -> Self { - Default::default() + fn zero(buffer: &text::BufferSnapshot) -> Self { + Self { + start: Anchor::min_for_buffer(buffer.remote_id()), + end: Anchor::max_for_buffer(buffer.remote_id()), + min_start: Anchor::max_for_buffer(buffer.remote_id()), + max_end: Anchor::min_for_buffer(buffer.remote_id()), + count: 0, + } } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 89c44513067f6d2309d68a9f38984988358d8877..5e3179e929da012cce8e7da6b436e89c0c4519de 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -174,11 +174,11 @@ pub fn serialize_selection(selection: &Selection) -> proto::Selection { id: selection.id as u64, start: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.start)), - excerpt_id: 0, + excerpt_id: None, }), end: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.end)), - excerpt_id: 0, + excerpt_id: None, }), reversed: selection.reversed, } @@ -260,7 +260,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { Bias::Left => proto::Bias::Left as i32, Bias::Right => proto::Bias::Right as i32, }, - buffer_id: anchor.buffer_id.map(Into::into), + buffer_id: Some(anchor.buffer_id.into()), } } @@ -498,7 +498,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { timestamp, anchor.offset as u32, bias, - buffer_id, + buffer_id?, )) } diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index f2f79b9a793f303fef66fb4266d67f1fbd2ed52d..b73276ffd92be8915e2272b5242770fc52854af1 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -18,7 +18,7 @@ use std::{ }; use streaming_iterator::StreamingIterator; use sum_tree::{Bias, Dimensions, SeekTarget, SumTree}; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; +use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; use tree_sitter::{ Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches, QueryPredicateArg, @@ -56,7 +56,15 @@ impl Drop for SyntaxSnapshot { // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`. let _ = DROP_TX.send(std::mem::replace( &mut self.layers, - SumTree::from_summary(Default::default()), + SumTree::from_summary(SyntaxLayerSummary { + min_depth: Default::default(), + max_depth: Default::default(), + // Deliberately bogus anchors, doesn't matter in this context + range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_language: Default::default(), + contains_unknown_injections: Default::default(), + }), )); } } @@ -588,7 +596,7 @@ impl SyntaxSnapshot { let bounded_position = SyntaxLayerPositionBeforeChange { position: position.clone(), - change: changed_regions.start_position(), + change: changed_regions.start_position(text.remote_id()), }; if bounded_position.cmp(cursor.start(), text).is_gt() { let slice = cursor.slice(&bounded_position, Bias::Left); @@ -1946,11 +1954,11 @@ impl ChangedRegion { } impl ChangeRegionSet { - fn start_position(&self) -> ChangeStartPosition { + fn start_position(&self, buffer_id: BufferId) -> ChangeStartPosition { self.0.first().map_or( ChangeStartPosition { depth: usize::MAX, - position: Anchor::MAX, + position: Anchor::max_for_buffer(buffer_id), }, |region| ChangeStartPosition { depth: region.depth, @@ -1999,32 +2007,28 @@ impl ChangeRegionSet { } } -impl Default for SyntaxLayerSummary { - fn default() -> Self { +impl sum_tree::Summary for SyntaxLayerSummary { + type Context<'a> = &'a BufferSnapshot; + + fn zero(buffer: &BufferSnapshot) -> Self { Self { max_depth: 0, min_depth: 0, - range: Anchor::MAX..Anchor::MIN, - last_layer_range: Anchor::MIN..Anchor::MAX, + range: Anchor::max_for_buffer(buffer.remote_id()) + ..Anchor::min_for_buffer(buffer.remote_id()), + last_layer_range: Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), last_layer_language: None, contains_unknown_injections: false, } } -} - -impl sum_tree::Summary for SyntaxLayerSummary { - type Context<'a> = &'a BufferSnapshot; - - fn zero(_cx: &BufferSnapshot) -> Self { - Default::default() - } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { if other.max_depth > self.max_depth { self.max_depth = other.max_depth; self.range = other.range.clone(); } else { - if self.range == (Anchor::MAX..Anchor::MAX) { + if self.range.start.is_max() && self.range.end.is_max() { self.range.start = other.range.start; } if other.range.end.cmp(&self.range.end, buffer).is_gt() { diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 2e802e4205fb82aa89cc6beb67ed9e3e68ed1cf6..911100fc25b498ba5471c85d6177052495974665 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -37,7 +37,6 @@ parking_lot.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true -settings.workspace = true smol.workspace = true thiserror.workspace = true util.workspace = true diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 2f715007ec4d8c2906c0254d4bb458d056d8585e..ce71cee6bcaf4f7ea1e210cc3756bd3162715f55 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -1,16 +1,15 @@ mod api_key; mod model; +mod provider; mod rate_limiter; mod registry; mod request; mod role; -mod telemetry; pub mod tool_schema; #[cfg(any(test, feature = "test-support"))] pub mod fake_provider; -use anthropic::{AnthropicError, parse_prompt_too_long}; use anyhow::{Result, anyhow}; use client::Client; use client::UserStore; @@ -20,10 +19,8 @@ use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; -use open_router::OpenRouterError; use parking_lot::Mutex; use serde::{Deserialize, Serialize}; -pub use settings::LanguageModelCacheConfiguration; use std::ops::{Add, Sub}; use std::str::FromStr; use std::sync::Arc; @@ -38,30 +35,10 @@ pub use crate::rate_limiter::*; pub use crate::registry::*; pub use crate::request::*; pub use crate::role::*; -pub use crate::telemetry::*; pub use crate::tool_schema::LanguageModelToolSchemaFormat; +pub use provider::*; pub use zed_env_vars::{EnvVar, env_var}; -pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId = - LanguageModelProviderId::new("anthropic"); -pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Anthropic"); - -pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google"); -pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Google AI"); - -pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai"); -pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("OpenAI"); - -pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai"); -pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI"); - -pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev"); -pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = - LanguageModelProviderName::new("Zed"); - pub fn init(user_store: Entity, client: Arc, cx: &mut App) { init_settings(cx); RefreshLlmTokenListener::register(client, user_store, cx); @@ -71,6 +48,13 @@ pub fn init_settings(cx: &mut App) { registry::init(cx); } +#[derive(Clone, Debug)] +pub struct LanguageModelCacheConfiguration { + pub max_cache_anchors: usize, + pub should_speculate: bool, + pub min_total_token: u64, +} + /// A completion event from a language model. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum LanguageModelCompletionEvent { @@ -310,165 +294,6 @@ impl LanguageModelCompletionError { } } -impl From for LanguageModelCompletionError { - fn from(error: AnthropicError) -> Self { - let provider = ANTHROPIC_PROVIDER_NAME; - match error { - AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, - AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, - AnthropicError::HttpSend(error) => Self::HttpSend { provider, error }, - AnthropicError::DeserializeResponse(error) => { - Self::DeserializeResponse { provider, error } - } - AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, - AnthropicError::HttpResponseError { - status_code, - message, - } => Self::HttpResponseError { - provider, - status_code, - message, - }, - AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded { - provider, - retry_after: Some(retry_after), - }, - AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded { - provider, - retry_after, - }, - AnthropicError::ApiError(api_error) => api_error.into(), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: anthropic::ApiError) -> Self { - use anthropic::ApiErrorCode::*; - let provider = ANTHROPIC_PROVIDER_NAME; - match error.code() { - Some(code) => match code { - InvalidRequestError => Self::BadRequestFormat { - provider, - message: error.message, - }, - AuthenticationError => Self::AuthenticationError { - provider, - message: error.message, - }, - PermissionError => Self::PermissionError { - provider, - message: error.message, - }, - NotFoundError => Self::ApiEndpointNotFound { provider }, - RequestTooLarge => Self::PromptTooLarge { - tokens: parse_prompt_too_long(&error.message), - }, - RateLimitError => Self::RateLimitExceeded { - provider, - retry_after: None, - }, - ApiError => Self::ApiInternalServerError { - provider, - message: error.message, - }, - OverloadedError => Self::ServerOverloaded { - provider, - retry_after: None, - }, - }, - None => Self::Other(error.into()), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: open_ai::RequestError) -> Self { - match error { - open_ai::RequestError::HttpResponseError { - provider, - status_code, - body, - headers, - } => { - let retry_after = headers - .get(http::header::RETRY_AFTER) - .and_then(|val| val.to_str().ok()?.parse::().ok()) - .map(Duration::from_secs); - - Self::from_http_status(provider.into(), status_code, body, retry_after) - } - open_ai::RequestError::Other(e) => Self::Other(e), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: OpenRouterError) -> Self { - let provider = LanguageModelProviderName::new("OpenRouter"); - match error { - OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, - OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, - OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error }, - OpenRouterError::DeserializeResponse(error) => { - Self::DeserializeResponse { provider, error } - } - OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, - OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded { - provider, - retry_after: Some(retry_after), - }, - OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded { - provider, - retry_after, - }, - OpenRouterError::ApiError(api_error) => api_error.into(), - } - } -} - -impl From for LanguageModelCompletionError { - fn from(error: open_router::ApiError) -> Self { - use open_router::ApiErrorCode::*; - let provider = LanguageModelProviderName::new("OpenRouter"); - match error.code { - InvalidRequestError => Self::BadRequestFormat { - provider, - message: error.message, - }, - AuthenticationError => Self::AuthenticationError { - provider, - message: error.message, - }, - PaymentRequiredError => Self::AuthenticationError { - provider, - message: format!("Payment required: {}", error.message), - }, - PermissionError => Self::PermissionError { - provider, - message: error.message, - }, - RequestTimedOut => Self::HttpResponseError { - provider, - status_code: StatusCode::REQUEST_TIMEOUT, - message: error.message, - }, - RateLimitError => Self::RateLimitExceeded { - provider, - retry_after: None, - }, - ApiError => Self::ApiInternalServerError { - provider, - message: error.message, - }, - OverloadedError => Self::ServerOverloaded { - provider, - retry_after: None, - }, - } - } -} - #[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] pub enum StopReason { diff --git a/crates/language_model/src/provider.rs b/crates/language_model/src/provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..707d8e2d618894e2898e253450dbfbb5e9483bba --- /dev/null +++ b/crates/language_model/src/provider.rs @@ -0,0 +1,12 @@ +pub mod anthropic; +pub mod google; +pub mod open_ai; +pub mod open_router; +pub mod x_ai; +pub mod zed; + +pub use anthropic::*; +pub use google::*; +pub use open_ai::*; +pub use x_ai::*; +pub use zed::*; diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs new file mode 100644 index 0000000000000000000000000000000000000000..0878be2070fdbb9e57145684f59c962a32bb9fd2 --- /dev/null +++ b/crates/language_model/src/provider/anthropic.rs @@ -0,0 +1,80 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName}; +use anthropic::AnthropicError; +pub use anthropic::parse_prompt_too_long; + +pub const ANTHROPIC_PROVIDER_ID: LanguageModelProviderId = + LanguageModelProviderId::new("anthropic"); +pub const ANTHROPIC_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Anthropic"); + +impl From for LanguageModelCompletionError { + fn from(error: AnthropicError) -> Self { + let provider = ANTHROPIC_PROVIDER_NAME; + match error { + AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, + AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, + AnthropicError::HttpSend(error) => Self::HttpSend { provider, error }, + AnthropicError::DeserializeResponse(error) => { + Self::DeserializeResponse { provider, error } + } + AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, + AnthropicError::HttpResponseError { + status_code, + message, + } => Self::HttpResponseError { + provider, + status_code, + message, + }, + AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded { + provider, + retry_after: Some(retry_after), + }, + AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded { + provider, + retry_after, + }, + AnthropicError::ApiError(api_error) => api_error.into(), + } + } +} + +impl From for LanguageModelCompletionError { + fn from(error: anthropic::ApiError) -> Self { + use anthropic::ApiErrorCode::*; + let provider = ANTHROPIC_PROVIDER_NAME; + match error.code() { + Some(code) => match code { + InvalidRequestError => Self::BadRequestFormat { + provider, + message: error.message, + }, + AuthenticationError => Self::AuthenticationError { + provider, + message: error.message, + }, + PermissionError => Self::PermissionError { + provider, + message: error.message, + }, + NotFoundError => Self::ApiEndpointNotFound { provider }, + RequestTooLarge => Self::PromptTooLarge { + tokens: parse_prompt_too_long(&error.message), + }, + RateLimitError => Self::RateLimitExceeded { + provider, + retry_after: None, + }, + ApiError => Self::ApiInternalServerError { + provider, + message: error.message, + }, + OverloadedError => Self::ServerOverloaded { + provider, + retry_after: None, + }, + }, + None => Self::Other(error.into()), + } + } +} diff --git a/crates/language_model/src/provider/google.rs b/crates/language_model/src/provider/google.rs new file mode 100644 index 0000000000000000000000000000000000000000..1caee496b519f395dd10744b127bc29ee893849f --- /dev/null +++ b/crates/language_model/src/provider/google.rs @@ -0,0 +1,5 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const GOOGLE_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("google"); +pub const GOOGLE_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Google AI"); diff --git a/crates/language_model/src/provider/open_ai.rs b/crates/language_model/src/provider/open_ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..3796eb9a3aef78628c52d92e92fabb3812249e04 --- /dev/null +++ b/crates/language_model/src/provider/open_ai.rs @@ -0,0 +1,28 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderId, LanguageModelProviderName}; +use http_client::http; +use std::time::Duration; + +pub const OPEN_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("openai"); +pub const OPEN_AI_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("OpenAI"); + +impl From for LanguageModelCompletionError { + fn from(error: open_ai::RequestError) -> Self { + match error { + open_ai::RequestError::HttpResponseError { + provider, + status_code, + body, + headers, + } => { + let retry_after = headers + .get(http::header::RETRY_AFTER) + .and_then(|val| val.to_str().ok()?.parse::().ok()) + .map(Duration::from_secs); + + Self::from_http_status(provider.into(), status_code, body, retry_after) + } + open_ai::RequestError::Other(e) => Self::Other(e), + } + } +} diff --git a/crates/language_model/src/provider/open_router.rs b/crates/language_model/src/provider/open_router.rs new file mode 100644 index 0000000000000000000000000000000000000000..809e22f1fec0f2d205caa3ebbcb0baaf129b062c --- /dev/null +++ b/crates/language_model/src/provider/open_router.rs @@ -0,0 +1,69 @@ +use crate::{LanguageModelCompletionError, LanguageModelProviderName}; +use http_client::StatusCode; +use open_router::OpenRouterError; + +impl From for LanguageModelCompletionError { + fn from(error: OpenRouterError) -> Self { + let provider = LanguageModelProviderName::new("OpenRouter"); + match error { + OpenRouterError::SerializeRequest(error) => Self::SerializeRequest { provider, error }, + OpenRouterError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error }, + OpenRouterError::HttpSend(error) => Self::HttpSend { provider, error }, + OpenRouterError::DeserializeResponse(error) => { + Self::DeserializeResponse { provider, error } + } + OpenRouterError::ReadResponse(error) => Self::ApiReadResponseError { provider, error }, + OpenRouterError::RateLimit { retry_after } => Self::RateLimitExceeded { + provider, + retry_after: Some(retry_after), + }, + OpenRouterError::ServerOverloaded { retry_after } => Self::ServerOverloaded { + provider, + retry_after, + }, + OpenRouterError::ApiError(api_error) => api_error.into(), + } + } +} + +impl From for LanguageModelCompletionError { + fn from(error: open_router::ApiError) -> Self { + use open_router::ApiErrorCode::*; + let provider = LanguageModelProviderName::new("OpenRouter"); + match error.code { + InvalidRequestError => Self::BadRequestFormat { + provider, + message: error.message, + }, + AuthenticationError => Self::AuthenticationError { + provider, + message: error.message, + }, + PaymentRequiredError => Self::AuthenticationError { + provider, + message: format!("Payment required: {}", error.message), + }, + PermissionError => Self::PermissionError { + provider, + message: error.message, + }, + RequestTimedOut => Self::HttpResponseError { + provider, + status_code: StatusCode::REQUEST_TIMEOUT, + message: error.message, + }, + RateLimitError => Self::RateLimitExceeded { + provider, + retry_after: None, + }, + ApiError => Self::ApiInternalServerError { + provider, + message: error.message, + }, + OverloadedError => Self::ServerOverloaded { + provider, + retry_after: None, + }, + } + } +} diff --git a/crates/language_model/src/provider/x_ai.rs b/crates/language_model/src/provider/x_ai.rs new file mode 100644 index 0000000000000000000000000000000000000000..3d0f794fa4087a4beeb4a9b6253d016a9b592f0e --- /dev/null +++ b/crates/language_model/src/provider/x_ai.rs @@ -0,0 +1,4 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const X_AI_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("x_ai"); +pub const X_AI_PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("xAI"); diff --git a/crates/language_model/src/provider/zed.rs b/crates/language_model/src/provider/zed.rs new file mode 100644 index 0000000000000000000000000000000000000000..0ba793e99aad1caa25f049a96faf02c16e8970fa --- /dev/null +++ b/crates/language_model/src/provider/zed.rs @@ -0,0 +1,5 @@ +use crate::{LanguageModelProviderId, LanguageModelProviderName}; + +pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("zed.dev"); +pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Zed"); diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index cf7718f7b102010cc0c8a981a0425583436176b7..bf14fbb0b5804505b33074e6e4cbcc36ddf21fab 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -101,7 +101,7 @@ impl ConfiguredModel { } pub fn is_provided_by_zed(&self) -> bool { - self.provider.id() == crate::ZED_CLOUD_PROVIDER_ID + self.provider.id() == crate::provider::ZED_CLOUD_PROVIDER_ID } } diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 1fd79fb3a93d978d0912abbc4f0688e0bbe846e6..a98a0ce142dfdbaaaddc056ab378455a45147830 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -1,3 +1,5 @@ +pub mod telemetry; + use anthropic::{ ANTHROPIC_API_URL, AnthropicError, AnthropicModelMode, ContentDelta, CountTokensRequest, Event, ResponseContent, ToolResultContent, ToolResultPart, Usage, @@ -8,7 +10,8 @@ use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; use http_client::HttpClient; use language_model::{ - ApiKeyState, AuthenticateError, ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel, + ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, ApiKeyState, AuthenticateError, + ConfigurationViewTargetAgent, EnvVar, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, @@ -28,8 +31,8 @@ use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; pub use settings::AnthropicAvailableModel as AvailableModel; -const PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::ANTHROPIC_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = ANTHROPIC_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = ANTHROPIC_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct AnthropicSettings { diff --git a/crates/language_model/src/telemetry.rs b/crates/language_models/src/provider/anthropic/telemetry.rs similarity index 95% rename from crates/language_model/src/telemetry.rs rename to crates/language_models/src/provider/anthropic/telemetry.rs index 6d7f4df7f644115cae7b2148f4d78fde19674344..75fb11a81b479635ea02db77a2df8a769e795e01 100644 --- a/crates/language_model/src/telemetry.rs +++ b/crates/language_models/src/provider/anthropic/telemetry.rs @@ -1,8 +1,8 @@ -use crate::ANTHROPIC_PROVIDER_ID; use anthropic::ANTHROPIC_API_URL; use anyhow::{Context as _, anyhow}; use gpui::BackgroundExecutor; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use language_model::{ANTHROPIC_PROVIDER_ID, LanguageModel}; use std::env; use std::sync::Arc; use util::ResultExt; @@ -52,7 +52,7 @@ impl AnthropicEventType { } pub fn report_anthropic_event( - model: &Arc, + model: &Arc, event: AnthropicEventData, cx: &gpui::App, ) { @@ -69,7 +69,7 @@ pub struct AnthropicEventReporter { } impl AnthropicEventReporter { - pub fn new(model: &Arc, cx: &gpui::App) -> Self { + pub fn new(model: &Arc, cx: &gpui::App) -> Self { Self { http_client: cx.http_client(), executor: cx.background_executor().clone(), diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 161ee6e9abd5283dfbe10c4e7c9dc5597fc4b5b9..f9372a4d7ea9c078c58f633cc58bd5597ef49212 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -19,12 +19,15 @@ use gpui::{AnyElement, AnyView, App, AsyncApp, Context, Entity, Subscription, Ta use http_client::http::{HeaderMap, HeaderValue}; use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Response, StatusCode}; use language_model::{ - AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, + ANTHROPIC_PROVIDER_ID, ANTHROPIC_PROVIDER_NAME, AuthenticateError, GOOGLE_PROVIDER_ID, + GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh, - PaymentRequiredError, RateLimiter, RefreshLlmTokenListener, + OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, + RefreshLlmTokenListener, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, ZED_CLOUD_PROVIDER_ID, + ZED_CLOUD_PROVIDER_NAME, }; use release_channel::AppVersion; use schemars::JsonSchema; @@ -53,8 +56,8 @@ use crate::provider::open_ai::{ }; use crate::provider::x_ai::count_xai_tokens; -const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = ZED_CLOUD_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = ZED_CLOUD_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { @@ -568,20 +571,20 @@ impl LanguageModel for CloudLanguageModel { fn upstream_provider_id(&self) -> LanguageModelProviderId { use cloud_llm_client::LanguageModelProvider::*; match self.model.provider { - Anthropic => language_model::ANTHROPIC_PROVIDER_ID, - OpenAi => language_model::OPEN_AI_PROVIDER_ID, - Google => language_model::GOOGLE_PROVIDER_ID, - XAi => language_model::X_AI_PROVIDER_ID, + Anthropic => ANTHROPIC_PROVIDER_ID, + OpenAi => OPEN_AI_PROVIDER_ID, + Google => GOOGLE_PROVIDER_ID, + XAi => X_AI_PROVIDER_ID, } } fn upstream_provider_name(&self) -> LanguageModelProviderName { use cloud_llm_client::LanguageModelProvider::*; match self.model.provider { - Anthropic => language_model::ANTHROPIC_PROVIDER_NAME, - OpenAi => language_model::OPEN_AI_PROVIDER_NAME, - Google => language_model::GOOGLE_PROVIDER_NAME, - XAi => language_model::X_AI_PROVIDER_NAME, + Anthropic => ANTHROPIC_PROVIDER_NAME, + OpenAi => OPEN_AI_PROVIDER_NAME, + Google => GOOGLE_PROVIDER_NAME, + XAi => X_AI_PROVIDER_NAME, } } @@ -1047,12 +1050,10 @@ where fn provider_name(provider: &cloud_llm_client::LanguageModelProvider) -> LanguageModelProviderName { match provider { - cloud_llm_client::LanguageModelProvider::Anthropic => { - language_model::ANTHROPIC_PROVIDER_NAME - } - cloud_llm_client::LanguageModelProvider::OpenAi => language_model::OPEN_AI_PROVIDER_NAME, - cloud_llm_client::LanguageModelProvider::Google => language_model::GOOGLE_PROVIDER_NAME, - cloud_llm_client::LanguageModelProvider::XAi => language_model::X_AI_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::Anthropic => ANTHROPIC_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::OpenAi => OPEN_AI_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::Google => GOOGLE_PROVIDER_NAME, + cloud_llm_client::LanguageModelProvider::XAi => X_AI_PROVIDER_NAME, } } diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 334a5cbe64e6cdefbaa7c15c309ca4632109e323..8fdfb514ac6e872bd24968d33f2c1169401d5a9c 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -13,9 +13,9 @@ use language_model::{ LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason, }; use language_model::{ - IconOrSvg, LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, - LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, - LanguageModelRequest, RateLimiter, Role, + GOOGLE_PROVIDER_ID, GOOGLE_PROVIDER_NAME, IconOrSvg, LanguageModel, LanguageModelId, + LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, + LanguageModelProviderState, LanguageModelRequest, RateLimiter, Role, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -33,8 +33,8 @@ use util::ResultExt; use language_model::ApiKeyState; -const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = GOOGLE_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = GOOGLE_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct GoogleSettings { diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 8de1eaaf8465cf48838c49f6b24d3eb16d6e3487..9289c66b2a4c9213826d2d027555511c9746d00e 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -10,7 +10,8 @@ use language_model::{ LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, - LanguageModelToolUseId, MessageContent, RateLimiter, Role, StopReason, TokenUsage, env_var, + LanguageModelToolUseId, MessageContent, OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, + RateLimiter, Role, StopReason, TokenUsage, env_var, }; use menu; use open_ai::responses::{ @@ -35,8 +36,8 @@ use util::ResultExt; use crate::provider::util::{fix_streamed_json, parse_tool_arguments}; -const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::OPEN_AI_PROVIDER_NAME; +const PROVIDER_ID: LanguageModelProviderId = OPEN_AI_PROVIDER_ID; +const PROVIDER_NAME: LanguageModelProviderName = OPEN_AI_PROVIDER_NAME; const API_KEY_ENV_VAR_NAME: &str = "OPENAI_API_KEY"; static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index c75c3954cc6590c2e0cb4326c073ed004eaac280..1f280282af933094cf46cd9e7ab790efd07b8a12 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -29,7 +29,7 @@ impl ActiveBufferLanguage { self.active_language = Some(None); let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(language) = buffer.read(cx).language() { self.active_language = Some(Some(language.name())); diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index e5e6a2e264dbb923390e05b283fe341a3336af97..70a03514f45371d58d0a8ee0a14eb87565d3a514 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -51,11 +51,11 @@ impl LanguageSelector { cx: &mut Context, ) -> Option<()> { let registry = workspace.app_state().languages.clone(); - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); workspace.toggle_modal(window, cx, move |window, cx| { @@ -414,10 +414,10 @@ mod tests { ) -> Entity { let editor = open_new_buffer_editor(workspace, project, cx).await; // Ensure the buffer has no language after the editor is created - let (_, buffer, _) = editor.read_with(cx, |editor, cx| { + let buffer = editor.read_with(cx, |editor, cx| { editor - .active_excerpt(cx) - .expect("editor should have an active excerpt") + .active_buffer(cx) + .expect("editor should have an active buffer") }); buffer.update(cx, |buffer, cx| { buffer.set_language(None, cx); @@ -454,8 +454,8 @@ mod tests { .await .expect("language should exist in registry"); editor.update(cx, move |editor, cx| { - let (_, buffer, _) = editor - .active_excerpt(cx) + let buffer = editor + .active_buffer(cx) .expect("editor should have an active excerpt"); buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language), cx); @@ -578,6 +578,15 @@ mod tests { assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx); assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx); + // Ensure the empty editor's buffer has no language before asserting + let buffer = empty_editor.read_with(cx, |editor, cx| { + editor + .active_buffer(cx) + .expect("editor should have an active excerpt") + }); + buffer.update(cx, |buffer, cx| { + buffer.set_language(None, cx); + }); assert_selected_language_for_editor(&workspace, &empty_editor, None, cx); } diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index c2f684c11dc148c8f66b6cf20e0ca06e40905db7..aec0cad5b1cf4be043ca21298995b08ceb93f3f2 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -1,5 +1,5 @@ use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, + Anchor, Editor, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, scroll::Autoscroll, }; use gpui::{ @@ -8,8 +8,7 @@ use gpui::{ MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled, Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list, }; -use language::ToOffset; - +use language::{BufferId, Point, ToOffset}; use menu::{SelectNext, SelectPrevious}; use std::{mem, ops::Range}; use theme::ActiveTheme; @@ -114,12 +113,12 @@ impl HighlightCategory { #[derive(Debug, Clone)] struct HighlightEntry { - excerpt_id: ExcerptId, range: Range, + buffer_id: BufferId, + buffer_point_range: Range, range_display: SharedString, style: HighlightStyle, category: HighlightCategory, - sort_key: (ExcerptId, u32, u32, u32, u32), } /// An item in the display list: either a separator between excerpts or a highlight entry. @@ -319,20 +318,18 @@ impl HighlightsTreeView { display_map.update(cx, |display_map, cx| { for (key, text_highlights) in display_map.all_text_highlights() { for range in &text_highlights.1 { - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range: range.clone(), + buffer_id, range_display, style: text_highlights.0, category: HighlightCategory::Text(*key), - sort_key, + buffer_point_range, }); } } @@ -345,13 +342,11 @@ impl HighlightsTreeView { .and_then(|buf| buf.read(cx).language().map(|l| l.name())); for token in tokens.iter() { let range = token.range.start..token.range.end; - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - &range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, entry_buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; let Some(stylizer) = lsp_store.get_or_create_token_stylizer( token.server_id, language_name.as_ref(), @@ -388,8 +383,8 @@ impl HighlightsTreeView { }); entries.push(HighlightEntry { - excerpt_id, range, + buffer_id: entry_buffer_id, range_display, style: interner[token.style], category: HighlightCategory::SemanticToken { @@ -399,7 +394,7 @@ impl HighlightsTreeView { .map(SharedString::from), theme_key, }, - sort_key, + buffer_point_range, }); } } @@ -407,7 +402,13 @@ impl HighlightsTreeView { }); let syntax_theme = cx.theme().syntax().clone(); - for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() { + for excerpt_range in multi_buffer_snapshot.excerpts() { + let Some(buffer_snapshot) = + multi_buffer_snapshot.buffer_for_id(excerpt_range.context.start.buffer_id) + else { + continue; + }; + let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot); let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot); let range = start_offset..end_offset; @@ -438,8 +439,8 @@ impl HighlightsTreeView { let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte()); let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte()); - let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor); - let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor); + let start = multi_buffer_snapshot.anchor_in_excerpt(start_anchor); + let end = multi_buffer_snapshot.anchor_in_excerpt(end_anchor); let (start, end) = match (start, end) { (Some(s), Some(e)) => (s, e), @@ -447,29 +448,38 @@ impl HighlightsTreeView { }; let range = start..end; - let (range_display, sort_key) = - format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range, + buffer_id, range_display, style, category: HighlightCategory::SyntaxToken { capture_name, theme_key, }, - sort_key, + buffer_point_range, }); } } entries.sort_by(|a, b| { - a.sort_key - .cmp(&b.sort_key) + a.buffer_id + .cmp(&b.buffer_id) + .then_with(|| a.buffer_point_range.start.cmp(&b.buffer_point_range.start)) + .then_with(|| a.buffer_point_range.end.cmp(&b.buffer_point_range.end)) .then_with(|| a.category.cmp(&b.category)) }); - entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category); + entries.dedup_by(|a, b| { + a.buffer_id == b.buffer_id + && a.buffer_point_range == b.buffer_point_range + && a.category == b.category + }); self.cached_entries = entries; self.rebuild_display_items(&multi_buffer_snapshot, cx); @@ -485,7 +495,7 @@ impl HighlightsTreeView { fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) { self.display_items.clear(); - let mut last_excerpt_id: Option = None; + let mut last_range_end: Option = None; for (entry_ix, entry) in self.cached_entries.iter().enumerate() { if !self.should_show_entry(entry) { @@ -493,11 +503,14 @@ impl HighlightsTreeView { } if !self.is_singleton { - let excerpt_changed = - last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id); + let excerpt_changed = last_range_end.is_none_or(|anchor| { + snapshot + .excerpt_containing(anchor..entry.range.start) + .is_none() + }); if excerpt_changed { - last_excerpt_id = Some(entry.excerpt_id); - let label = excerpt_label_for(entry.excerpt_id, snapshot, cx); + last_range_end = Some(entry.range.end); + let label = excerpt_label_for(entry, snapshot, cx); self.display_items .push(DisplayItem::ExcerptSeparator { label }); } @@ -516,10 +529,6 @@ impl HighlightsTreeView { } fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) { - let cursor_point = cursor.to_point(snapshot); - let cursor_key = (cursor_point.row, cursor_point.column); - let cursor_excerpt = cursor.excerpt_id; - let best = self .display_items .iter() @@ -532,17 +541,18 @@ impl HighlightsTreeView { _ => None, }) .filter(|(_, _, entry)| { - let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key; - if !self.is_singleton && excerpt_id != cursor_excerpt { - return false; - } - let start = (start_row, start_col); - let end = (end_row, end_col); - cursor_key >= start && cursor_key <= end + entry.range.start.cmp(&cursor, snapshot).is_le() + && cursor.cmp(&entry.range.end, snapshot).is_lt() }) .min_by_key(|(_, _, entry)| { - let (_, start_row, start_col, end_row, end_col) = entry.sort_key; - (end_row - start_row, end_col.saturating_sub(start_col)) + ( + entry.buffer_point_range.end.row - entry.buffer_point_range.start.row, + entry + .buffer_point_range + .end + .column + .saturating_sub(entry.buffer_point_range.start.column), + ) }) .map(|(display_ix, entry_ix, _)| (display_ix, entry_ix)); @@ -1076,12 +1086,13 @@ impl ToolbarItemView for HighlightsTreeToolbarItemView { } fn excerpt_label_for( - excerpt_id: ExcerptId, + entry: &HighlightEntry, snapshot: &MultiBufferSnapshot, cx: &App, ) -> SharedString { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - let path_label = buffer + let path_label = snapshot + .anchor_to_buffer_anchor(entry.range.start) + .and_then(|(anchor, _)| snapshot.buffer_for_id(anchor.buffer_id)) .and_then(|buf| buf.file()) .map(|file| { let full_path = file.full_path(cx); @@ -1093,50 +1104,21 @@ fn excerpt_label_for( fn format_anchor_range( range: &Range, - excerpt_id: ExcerptId, snapshot: &MultiBufferSnapshot, - is_singleton: bool, -) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) { - if is_singleton { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - if let Some(buffer) = buffer { - let start = language::ToPoint::to_point(&range.start.text_anchor, buffer); - let end = language::ToPoint::to_point(&range.end.text_anchor, buffer); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } - } +) -> Option<(SharedString, BufferId, Range)> { + let start = range.start.to_point(snapshot); + let end = range.end.to_point(snapshot); + let ((start_buffer, start), (_, end)) = snapshot + .point_to_buffer_point(start) + .zip(snapshot.point_to_buffer_point(end))?; + let display = SharedString::from(format!( + "[{}:{} - {}:{}]", + start.row + 1, + start.column + 1, + end.row + 1, + end.column + 1, + )); + Some((display, start_buffer.remote_id(), start..end)) } fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div { diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 59b14d470003f3a8a4f45b7b2b3e51505f562e56..43b1736223478fe29f45aac0a712fafad1d2dcbe 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -1179,13 +1179,20 @@ impl StatusItemView for LspButton { .and_then(|active_editor| active_editor.editor.upgrade()) .as_ref() { - let editor_buffers = - HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids()); + let editor_buffers = HashSet::from_iter( + editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); let _editor_subscription = cx.subscribe_in( &editor, window, |lsp_button, _, e: &EditorEvent, window, cx| match e { - EditorEvent::ExcerptsAdded { buffer, .. } => { + EditorEvent::BufferRangesUpdated { buffer, .. } => { let updated = lsp_button.server_state.update(cx, |state, cx| { if let Some(active_editor) = state.active_editor.as_mut() { let buffer_id = buffer.read(cx).remote_id(); @@ -1198,9 +1205,7 @@ impl StatusItemView for LspButton { lsp_button.refresh_lsp_menu(false, window, cx); } } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { let removed = lsp_button.server_state.update(cx, |state, _| { let mut removed = false; if let Some(active_editor) = state.active_editor.as_mut() { diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index b44d2e05d90733469a5385c2695b3fda3ff47c5e..9c751dd8eaf71272b649b037425caa4aa73b39cc 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -1,7 +1,6 @@ use command_palette_hooks::CommandPaletteFilter; use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects, - scroll::Autoscroll, + Anchor, Editor, HighlightKey, MultiBufferOffset, SelectionEffects, scroll::Autoscroll, }; use gpui::{ App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable, @@ -125,7 +124,6 @@ impl EditorState { #[derive(Clone)] struct BufferState { buffer: Entity, - excerpt_id: ExcerptId, active_layer: Option, } @@ -253,18 +251,18 @@ impl SyntaxTreeView { let snapshot = editor_state .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); - let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let (buffer, range) = editor_state.editor.update(cx, |editor, cx| { let selection_range = editor .selections .last::(&editor.display_snapshot(cx)) .range(); let multi_buffer = editor.buffer().read(cx); - let (buffer, range, excerpt_id) = snapshot + let (buffer, range, _) = snapshot .buffer_snapshot() - .range_to_buffer_ranges(selection_range.start..=selection_range.end) + .range_to_buffer_ranges(selection_range.start..selection_range.end) .pop()?; let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap(); - Some((buffer, range, excerpt_id)) + Some((buffer, range)) })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer @@ -273,16 +271,14 @@ impl SyntaxTreeView { .active_buffer .get_or_insert_with(|| BufferState { buffer: buffer.clone(), - excerpt_id, active_layer: None, }); let mut prev_layer = None; if did_reparse { prev_layer = buffer_state.active_layer.take(); } - if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id { + if buffer_state.buffer != buffer { buffer_state.buffer = buffer.clone(); - buffer_state.excerpt_id = excerpt_id; buffer_state.active_layer = None; } @@ -360,8 +356,7 @@ impl SyntaxTreeView { // Build a multibuffer anchor range. let multibuffer = editor_state.editor.read(cx).buffer(); let multibuffer = multibuffer.read(cx).snapshot(cx); - let excerpt_id = buffer_state.excerpt_id; - let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?; + let range = multibuffer.buffer_anchor_range_to_anchor_range(range)?; let key = cx.entity_id().as_u64() as usize; // Update the editor with the anchor range. diff --git a/crates/languages/src/eslint.rs b/crates/languages/src/eslint.rs index 943034652de852b2c39b4887218c3c8e28f329e1..7ef55c64ef1b35fa42f35e779c4cf46b30a18ee5 100644 --- a/crates/languages/src/eslint.rs +++ b/crates/languages/src/eslint.rs @@ -7,8 +7,10 @@ use http_client::{ }; use language::{LspAdapter, LspAdapterDelegate, LspInstaller, Toolchain}; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName, Uri}; -use node_runtime::NodeRuntime; +use node_runtime::{NodeRuntime, read_package_installed_version}; +use project::Fs; use project::lsp_store::language_server_settings_for; +use semver::Version; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use settings::SettingsLocation; @@ -31,11 +33,12 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec { pub struct EsLintLspAdapter { node: NodeRuntime, + fs: Arc, } impl EsLintLspAdapter { - const CURRENT_VERSION: &'static str = "2.4.4"; - const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4"; + const CURRENT_VERSION: &'static str = "3.0.24"; + const CURRENT_VERSION_TAG_NAME: &'static str = "release/3.0.24"; #[cfg(not(windows))] const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz; @@ -45,7 +48,10 @@ impl EsLintLspAdapter { const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint"); - const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &[ + const FLAT_CONFIG_FILE_NAMES_V8_21: &'static [&'static str] = &["eslint.config.js"]; + const FLAT_CONFIG_FILE_NAMES_V8_57: &'static [&'static str] = + &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; + const FLAT_CONFIG_FILE_NAMES_V10: &'static [&'static str] = &[ "eslint.config.js", "eslint.config.mjs", "eslint.config.cjs", @@ -53,9 +59,17 @@ impl EsLintLspAdapter { "eslint.config.cts", "eslint.config.mts", ]; + const LEGACY_CONFIG_FILE_NAMES: &'static [&'static str] = &[ + ".eslintrc", + ".eslintrc.js", + ".eslintrc.cjs", + ".eslintrc.yaml", + ".eslintrc.yml", + ".eslintrc.json", + ]; - pub fn new(node: NodeRuntime) -> Self { - EsLintLspAdapter { node } + pub fn new(node: NodeRuntime, fs: Arc) -> Self { + EsLintLspAdapter { node, fs } } fn build_destination_path(container_dir: &Path) -> PathBuf { @@ -73,7 +87,7 @@ impl LspInstaller for EsLintLspAdapter { _: &mut AsyncApp, ) -> Result { let url = build_asset_url( - "zed-industries/vscode-eslint", + "microsoft/vscode-eslint", Self::CURRENT_VERSION_TAG_NAME, Self::GITHUB_ASSET_KIND, )?; @@ -148,6 +162,7 @@ impl LspInstaller for EsLintLspAdapter { ) -> Option { let server_path = Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH); + fs::metadata(&server_path).await.ok()?; Some(LanguageServerBinary { path: self.node.binary_path().await.ok()?, env: None, @@ -156,6 +171,42 @@ impl LspInstaller for EsLintLspAdapter { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum EslintConfigKind { + Flat, + Legacy, +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +struct EslintSettingsOverrides { + use_flat_config: Option, + experimental_use_flat_config: Option, +} + +impl EslintSettingsOverrides { + fn apply_to(self, workspace_configuration: &mut Value) { + if let Some(use_flat_config) = self.use_flat_config + && let Some(workspace_configuration) = workspace_configuration.as_object_mut() + { + workspace_configuration.insert("useFlatConfig".to_string(), json!(use_flat_config)); + } + + if let Some(experimental_use_flat_config) = self.experimental_use_flat_config + && let Some(workspace_configuration) = workspace_configuration.as_object_mut() + { + let experimental = workspace_configuration + .entry("experimental") + .or_insert_with(|| json!({})); + if let Some(experimental) = experimental.as_object_mut() { + experimental.insert( + "useFlatConfig".to_string(), + json!(experimental_use_flat_config), + ); + } + } + } +} + #[async_trait(?Send)] impl LspAdapter for EsLintLspAdapter { fn code_action_kinds(&self) -> Option> { @@ -173,9 +224,26 @@ impl LspAdapter for EsLintLspAdapter { cx: &mut AsyncApp, ) -> Result { let worktree_root = delegate.worktree_root_path(); - let use_flat_config = Self::FLAT_CONFIG_FILE_NAMES - .iter() - .any(|file| worktree_root.join(file).is_file()); + let requested_file_path = requested_uri + .as_ref() + .filter(|uri| uri.scheme() == "file") + .and_then(|uri| uri.to_file_path().ok()) + .filter(|path| path.starts_with(worktree_root)); + let eslint_version = find_eslint_version( + delegate.as_ref(), + worktree_root, + requested_file_path.as_deref(), + ) + .await?; + let config_kind = find_eslint_config_kind( + worktree_root, + requested_file_path.as_deref(), + eslint_version.as_ref(), + self.fs.as_ref(), + ) + .await; + let eslint_settings_overrides = + eslint_settings_overrides_for(eslint_version.as_ref(), config_kind); let mut default_workspace_configuration = json!({ "validate": "on", @@ -205,26 +273,13 @@ impl LspAdapter for EsLintLspAdapter { "showDocumentation": { "enable": true } - }, - "experimental": { - "useFlatConfig": use_flat_config, } }); + eslint_settings_overrides.apply_to(&mut default_workspace_configuration); - let file_path = requested_uri + let file_path = requested_file_path .as_ref() - .and_then(|uri| { - (uri.scheme() == "file") - .then(|| uri.to_file_path().ok()) - .flatten() - }) - .and_then(|abs_path| { - abs_path - .strip_prefix(&worktree_root) - .ok() - .map(ToOwned::to_owned) - }); - let file_path = file_path + .and_then(|abs_path| abs_path.strip_prefix(worktree_root).ok()) .and_then(|p| RelPath::unix(&p).ok().map(ToOwned::to_owned)) .unwrap_or_else(|| RelPath::empty().to_owned()); let override_options = cx.update(|cx| { @@ -271,6 +326,109 @@ impl LspAdapter for EsLintLspAdapter { } } +fn ancestor_directories<'a>( + worktree_root: &'a Path, + requested_file: Option<&'a Path>, +) -> impl Iterator + 'a { + let start = requested_file + .filter(|file| file.starts_with(worktree_root)) + .and_then(Path::parent) + .unwrap_or(worktree_root); + + start + .ancestors() + .take_while(move |dir| dir.starts_with(worktree_root)) +} + +fn flat_config_file_names(version: Option<&Version>) -> &'static [&'static str] { + match version { + Some(version) if version.major >= 10 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V10, + Some(version) if version.major == 9 => EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57, + Some(version) if version.major == 8 && version.minor >= 57 => { + EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_57 + } + Some(version) if version.major == 8 && version.minor >= 21 => { + EsLintLspAdapter::FLAT_CONFIG_FILE_NAMES_V8_21 + } + _ => &[], + } +} + +async fn find_eslint_config_kind( + worktree_root: &Path, + requested_file: Option<&Path>, + version: Option<&Version>, + fs: &dyn Fs, +) -> Option { + let flat_config_file_names = flat_config_file_names(version); + + for directory in ancestor_directories(worktree_root, requested_file) { + for file_name in flat_config_file_names { + if fs.is_file(&directory.join(file_name)).await { + return Some(EslintConfigKind::Flat); + } + } + + for file_name in EsLintLspAdapter::LEGACY_CONFIG_FILE_NAMES { + if fs.is_file(&directory.join(file_name)).await { + return Some(EslintConfigKind::Legacy); + } + } + } + + None +} + +fn eslint_settings_overrides_for( + version: Option<&Version>, + config_kind: Option, +) -> EslintSettingsOverrides { + // vscode-eslint 3.x already discovers config files and chooses a working + // directory from the active file on its own. Zed only overrides settings + // for the two cases where leaving everything unset is known to be wrong: + // + // - ESLint 8.21-8.56 flat config still needs experimental.useFlatConfig. + // - ESLint 9.x legacy config needs useFlatConfig = false. + // + // All other cases should defer to the server's own defaults and discovery. + let Some(version) = version else { + return EslintSettingsOverrides::default(); + }; + + match config_kind { + Some(EslintConfigKind::Flat) if version.major == 8 && (21..57).contains(&version.minor) => { + EslintSettingsOverrides { + use_flat_config: None, + experimental_use_flat_config: Some(true), + } + } + Some(EslintConfigKind::Legacy) if version.major == 9 => EslintSettingsOverrides { + use_flat_config: Some(false), + experimental_use_flat_config: None, + }, + _ => EslintSettingsOverrides::default(), + } +} + +async fn find_eslint_version( + delegate: &dyn LspAdapterDelegate, + worktree_root: &Path, + requested_file: Option<&Path>, +) -> Result> { + for directory in ancestor_directories(worktree_root, requested_file) { + if let Some(version) = + read_package_installed_version(directory.join("node_modules"), "eslint").await? + { + return Ok(Some(version)); + } + } + + Ok(delegate + .npm_package_installed_version("eslint") + .await? + .map(|(_, version)| version)) +} + /// On Windows, converts Unix-style separators (/) to Windows-style (\). /// On Unix, returns the path unchanged fn normalize_path_separators(path: &str) -> String { @@ -623,6 +781,217 @@ mod tests { } } + mod eslint_settings { + use super::*; + use ::fs::FakeFs; + use gpui::TestAppContext; + + #[test] + fn test_ancestor_directories_for_package_local_file() { + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + + let directories: Vec<&Path> = + ancestor_directories(&worktree_root, Some(&requested_file)).collect(); + + assert_eq!( + directories, + vec![ + Path::new(&unix_path_to_platform("/workspace/packages/web/src")), + Path::new(&unix_path_to_platform("/workspace/packages/web")), + Path::new(&unix_path_to_platform("/workspace/packages")), + Path::new(&unix_path_to_platform("/workspace")), + ] + ); + } + + #[test] + fn test_eslint_8_flat_root_repo_uses_experimental_flag() { + let version = Version::parse("8.56.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!( + settings, + EslintSettingsOverrides { + use_flat_config: None, + experimental_use_flat_config: Some(true), + } + ); + } + + #[test] + fn test_eslint_8_57_flat_repo_uses_no_override() { + let version = Version::parse("8.57.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!(settings, EslintSettingsOverrides::default()); + } + + #[test] + fn test_eslint_9_legacy_repo_uses_use_flat_config_false() { + let version = Version::parse("9.0.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Legacy)); + + assert_eq!( + settings, + EslintSettingsOverrides { + use_flat_config: Some(false), + experimental_use_flat_config: None, + } + ); + } + + #[test] + fn test_eslint_10_repo_uses_no_override() { + let version = Version::parse("10.0.0").expect("valid ESLint version"); + let settings = + eslint_settings_overrides_for(Some(&version), Some(EslintConfigKind::Flat)); + + assert_eq!(settings, EslintSettingsOverrides::default()); + } + + #[gpui::test] + async fn test_eslint_8_56_does_not_treat_cjs_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.cjs": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("8.56.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, None); + } + + #[gpui::test] + async fn test_eslint_8_57_treats_cjs_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.cjs": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("8.57.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_eslint_10_treats_typescript_config_as_flat_config(cx: &mut TestAppContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ "eslint.config.ts": "" }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform("/workspace/src/index.js")); + let version = Version::parse("10.0.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_package_local_flat_config_is_preferred_for_monorepo_file( + cx: &mut TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ + "eslint.config.js": "", + "packages": { + "web": { + "eslint.config.js": "" + } + } + }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + let version = Version::parse("8.56.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Flat)); + } + + #[gpui::test] + async fn test_package_local_legacy_config_is_detected_for_eslint_9( + cx: &mut TestAppContext, + ) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + unix_path_to_platform("/workspace"), + json!({ + "packages": { + "web": { + ".eslintrc.cjs": "" + } + } + }), + ) + .await; + let worktree_root = PathBuf::from(unix_path_to_platform("/workspace")); + let requested_file = PathBuf::from(unix_path_to_platform( + "/workspace/packages/web/src/index.js", + )); + let version = Version::parse("9.0.0").expect("valid ESLint version"); + + let config_kind = find_eslint_config_kind( + &worktree_root, + Some(&requested_file), + Some(&version), + fs.as_ref(), + ) + .await; + + assert_eq!(config_kind, Some(EslintConfigKind::Legacy)); + } + } + #[cfg(windows)] mod windows_style_paths { use super::*; diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index 9a0524dffd238b566931a4a612edd91b1e6361c3..9010bbde022e765b53ccceec042a075f85fc102b 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -59,7 +59,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime let c_lsp_adapter = Arc::new(c::CLspAdapter); let css_lsp_adapter = Arc::new(css::CssLspAdapter::new(node.clone())); - let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone())); + let eslint_adapter = Arc::new(eslint::EsLintLspAdapter::new(node.clone(), fs.clone())); let go_context_provider = Arc::new(go::GoContextProvider); let go_lsp_adapter = Arc::new(go::GoLspAdapter); let json_context_provider = Arc::new(JsonTaskProvider); diff --git a/crates/line_ending_selector/src/line_ending_indicator.rs b/crates/line_ending_selector/src/line_ending_indicator.rs index ee858d706b3a8152c868a5bd629c112a4d1b225f..9c493344e757174035a30e42126389ced9ea1624 100644 --- a/crates/line_ending_selector/src/line_ending_indicator.rs +++ b/crates/line_ending_selector/src/line_ending_indicator.rs @@ -18,7 +18,7 @@ impl LineEndingIndicator { self.line_ending = None; self.active_editor = None; - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + if let Some(buffer) = editor.read(cx).active_buffer(cx) { let line_ending = buffer.read(cx).line_ending(); self.line_ending = Some(line_ending); self.active_editor = Some(editor.downgrade()); diff --git a/crates/line_ending_selector/src/line_ending_selector.rs b/crates/line_ending_selector/src/line_ending_selector.rs index 504c327a349c97214e801f6bd375d61c7847f2be..455807565f8be52e574327f10d5881bb575c60f3 100644 --- a/crates/line_ending_selector/src/line_ending_selector.rs +++ b/crates/line_ending_selector/src/line_ending_selector.rs @@ -40,7 +40,7 @@ impl LineEndingSelector { fn toggle(editor: &WeakEntity, window: &mut Window, cx: &mut App) { let Some((workspace, buffer)) = editor .update(cx, |editor, cx| { - Some((editor.workspace()?, editor.active_excerpt(cx)?.1)) + Some((editor.workspace()?, editor.active_buffer(cx)?)) }) .ok() .flatten() diff --git a/crates/markdown/src/html/html_rendering.rs b/crates/markdown/src/html/html_rendering.rs index 56ab2db26b682e197c194157a87e646d9e55019d..103e2a6accb7dce9bc429419aafd27cbdf5080ce 100644 --- a/crates/markdown/src/html/html_rendering.rs +++ b/crates/markdown/src/html/html_rendering.rs @@ -497,7 +497,10 @@ mod tests { use gpui::{TestAppContext, size}; use ui::prelude::*; - use crate::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownOptions, MarkdownStyle}; + use crate::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions, + MarkdownStyle, + }; fn ensure_theme_initialized(cx: &mut TestAppContext) { cx.update(|cx| { @@ -530,8 +533,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }, ) @@ -591,8 +593,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }, ) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 024e377c2538214c9579c8f025250e2166cf7ace..c31ca79e7581926e7696fa596aaccc9371512841 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -270,10 +270,16 @@ pub struct MarkdownOptions { pub render_mermaid_diagrams: bool, } +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum CopyButtonVisibility { + Hidden, + AlwaysVisible, + VisibleOnHover, +} + pub enum CodeBlockRenderer { Default { - copy_button: bool, - copy_button_on_hover: bool, + copy_button_visibility: CopyButtonVisibility, border: bool, }, Custom { @@ -826,8 +832,7 @@ impl MarkdownElement { markdown, style, code_block_renderer: CodeBlockRenderer::Default { - copy_button: true, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::VisibleOnHover, border: false, }, on_url_click: None, @@ -1686,38 +1691,10 @@ impl Element for MarkdownElement { builder.pop_text_style(); if let CodeBlockRenderer::Default { - copy_button: true, .. - } = &self.code_block_renderer - { - builder.modify_current_div(|el| { - let content_range = parser::extract_code_block_content_range( - &parsed_markdown.source()[range.clone()], - ); - let content_range = content_range.start + range.start - ..content_range.end + range.start; - - let code = parsed_markdown.source()[content_range].to_string(); - let codeblock = render_copy_code_block_button( - range.end, - code, - self.markdown.clone(), - ); - el.child( - h_flex() - .w_4() - .absolute() - .top_1p5() - .right_1p5() - .justify_end() - .child(codeblock), - ) - }); - } - - if let CodeBlockRenderer::Default { - copy_button_on_hover: true, + copy_button_visibility, .. } = &self.code_block_renderer + && *copy_button_visibility != CopyButtonVisibility::Hidden { builder.modify_current_div(|el| { let content_range = parser::extract_code_block_content_range( @@ -1736,10 +1713,17 @@ impl Element for MarkdownElement { h_flex() .w_4() .absolute() - .top_0() - .right_0() .justify_end() - .visible_on_hover("code_block") + .when_else( + *copy_button_visibility + == CopyButtonVisibility::VisibleOnHover, + |this| { + this.top_0() + .right_0() + .visible_on_hover("code_block") + }, + |this| this.top_1p5().right_1p5(), + ) .child(codeblock), ) }); @@ -2772,8 +2756,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }, ) diff --git a/crates/markdown/src/mermaid.rs b/crates/markdown/src/mermaid.rs index 15f3de4d8e8c64010fe96846b05d75f012c5fc0d..b8e40ebe7ec16cbbb8d9b11ab3edfc75da46f3a9 100644 --- a/crates/markdown/src/mermaid.rs +++ b/crates/markdown/src/mermaid.rs @@ -266,7 +266,10 @@ mod tests { CachedMermaidDiagram, MermaidDiagramCache, MermaidState, ParsedMarkdownMermaidDiagramContents, extract_mermaid_diagrams, parse_mermaid_info, }; - use crate::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownOptions, MarkdownStyle}; + use crate::{ + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownOptions, + MarkdownStyle, + }; use collections::HashMap; use gpui::{Context, IntoElement, Render, RenderImage, TestAppContext, Window, size}; use std::sync::Arc; @@ -309,8 +312,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown, MarkdownStyle::default()).code_block_renderer( CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }, ) @@ -581,8 +583,7 @@ mod tests { |_window, _cx| { MarkdownElement::new(markdown.clone(), MarkdownStyle::default()) .code_block_renderer(CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) }, diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 0b9c63c3b16f5686afcfdafdba119ede8c37fe3f..f978fdfcce13808b58cd1d7467379c44b95e7433 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -13,7 +13,8 @@ use gpui::{ }; use language::LanguageRegistry; use markdown::{ - CodeBlockRenderer, Markdown, MarkdownElement, MarkdownFont, MarkdownOptions, MarkdownStyle, + CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, + MarkdownOptions, MarkdownStyle, }; use settings::Settings; use theme_settings::ThemeSettings; @@ -294,7 +295,7 @@ impl MarkdownPreviewView { EditorEvent::Edited { .. } | EditorEvent::BufferEdited { .. } | EditorEvent::DirtyChanged - | EditorEvent::ExcerptsEdited { .. } => { + | EditorEvent::BuffersEdited { .. } => { this.update_markdown_from_active_editor(true, false, window, cx); } EditorEvent::SelectionsChanged { .. } => { @@ -580,20 +581,33 @@ impl MarkdownPreviewView { .as_ref() .map(|state| state.editor.clone()); + let mut workspace_directory = None; + if let Some(workspace_entity) = self.workspace.upgrade() { + let project = workspace_entity.read(cx).project(); + if let Some(tree) = project.read(cx).worktrees(cx).next() { + workspace_directory = Some(tree.read(cx).abs_path().to_path_buf()); + } + } + let mut markdown_element = MarkdownElement::new( self.markdown.clone(), MarkdownStyle::themed(MarkdownFont::Editor, window, cx), ) .code_block_renderer(CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: true, + copy_button_visibility: CopyButtonVisibility::VisibleOnHover, border: false, }) .scroll_handle(self.scroll_handle.clone()) .show_root_block_markers() .image_resolver({ let base_directory = self.base_directory.clone(); - move |dest_url| resolve_preview_image(dest_url, base_directory.as_deref()) + move |dest_url| { + resolve_preview_image( + dest_url, + base_directory.as_deref(), + workspace_directory.as_deref(), + ) + } }) .on_url_click(move |url, window, cx| { open_preview_url(url, base_directory.clone(), &workspace, window, cx); @@ -687,7 +701,11 @@ fn resolve_preview_path(url: &str, base_directory: Option<&Path>) -> Option) -> Option { +fn resolve_preview_image( + dest_url: &str, + base_directory: Option<&Path>, + workspace_directory: Option<&Path>, +) -> Option { if dest_url.starts_with("data:") { return None; } @@ -702,6 +720,19 @@ fn resolve_preview_image(dest_url: &str, base_directory: Option<&Path>) -> Optio .map(|decoded| decoded.into_owned()) .unwrap_or_else(|_| dest_url.to_string()); + let decoded_path = Path::new(&decoded); + + if let Ok(relative_path) = decoded_path.strip_prefix("/") { + if let Some(root) = workspace_directory { + let absolute_path = root.join(relative_path); + if absolute_path.exists() { + return Some(ImageSource::Resource(Resource::Path(Arc::from( + absolute_path.as_path(), + )))); + } + } + } + let path = if Path::new(&decoded).is_absolute() { PathBuf::from(decoded) } else { @@ -778,6 +809,9 @@ impl Render for MarkdownPreviewView { #[cfg(test)] mod tests { + use crate::markdown_preview_view::ImageSource; + use crate::markdown_preview_view::Resource; + use crate::markdown_preview_view::resolve_preview_image; use anyhow::Result; use std::fs; use tempfile::TempDir; @@ -819,6 +853,54 @@ mod tests { Ok(()) } + #[test] + fn resolves_workspace_absolute_preview_images() -> Result<()> { + let temp_dir = TempDir::new()?; + let workspace_directory = temp_dir.path(); + + let base_directory = workspace_directory.join("docs"); + fs::create_dir_all(&base_directory)?; + + let image_file = workspace_directory.join("test_image.png"); + fs::write(&image_file, "mock data")?; + + let resolved_success = resolve_preview_image( + "/test_image.png", + Some(&base_directory), + Some(workspace_directory), + ); + + match resolved_success { + Some(ImageSource::Resource(Resource::Path(p))) => { + assert_eq!(p.as_ref(), image_file.as_path()); + } + _ => panic!("Expected successful resolution to be a Resource::Path"), + } + + let resolved_missing = resolve_preview_image( + "/missing_image.png", + Some(&base_directory), + Some(workspace_directory), + ); + + let expected_missing_path = if std::path::Path::new("/missing_image.png").is_absolute() { + std::path::PathBuf::from("/missing_image.png") + } else { + // join is to retain windows path prefix C:/ + #[expect(clippy::join_absolute_paths)] + base_directory.join("/missing_image.png") + }; + + match resolved_missing { + Some(ImageSource::Resource(Resource::Path(p))) => { + assert_eq!(p.as_ref(), expected_missing_path.as_path()); + } + _ => panic!("Expected missing file to fallback to a Resource::Path"), + } + + Ok(()) + } + #[test] fn does_not_treat_web_links_as_preview_paths() { assert_eq!(resolve_preview_path("https://zed.dev", None), None); diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index c49df39d59abaa924edb6c986c63701952dce01e..bc779908da7542c0bec34f799482929e96362770 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -317,8 +317,8 @@ pub(crate) mod m_2026_03_23 { pub(crate) use keymap::KEYMAP_PATTERNS; } -pub(crate) mod m_2026_03_31 { +pub(crate) mod m_2026_03_30 { mod settings; - pub(crate) use settings::remove_text_thread_settings; + pub(crate) use settings::make_play_sound_when_agent_done_an_enum; } diff --git a/crates/migrator/src/migrations/m_2026_03_30/settings.rs b/crates/migrator/src/migrations/m_2026_03_30/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..598941a6212442a4562814d43df6184e4eb76640 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_03_30/settings.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_settings; + +pub fn make_play_sound_when_agent_done_an_enum(value: &mut Value) -> Result<()> { + migrate_settings(value, &mut migrate_one) +} + +fn migrate_one(obj: &mut serde_json::Map) -> Result<()> { + let Some(play_sound) = obj + .get_mut("agent") + .and_then(|agent| agent.as_object_mut()) + .and_then(|agent| agent.get_mut("play_sound_when_agent_done")) + else { + return Ok(()); + }; + + *play_sound = match play_sound { + Value::Bool(true) => Value::String("always".to_string()), + Value::Bool(false) => Value::String("never".to_string()), + Value::String(s) if s == "never" || s == "when_hidden" || s == "always" => return Ok(()), + _ => { + anyhow::bail!("Expected play_sound_when_agent_done to be a boolean or valid enum value") + } + }; + + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2026_03_31/settings.rs b/crates/migrator/src/migrations/m_2026_03_31/settings.rs deleted file mode 100644 index 1a3fdb109f3773bada7a5fd5c00b1947e556e4c9..0000000000000000000000000000000000000000 --- a/crates/migrator/src/migrations/m_2026_03_31/settings.rs +++ /dev/null @@ -1,29 +0,0 @@ -use anyhow::Result; -use serde_json::Value; - -use crate::migrations::migrate_settings; - -pub fn remove_text_thread_settings(value: &mut Value) -> Result<()> { - migrate_settings(value, &mut migrate_one) -} - -fn migrate_one(obj: &mut serde_json::Map) -> Result<()> { - // Remove `agent.default_view` - if let Some(agent) = obj.get_mut("agent") { - if let Some(agent_obj) = agent.as_object_mut() { - agent_obj.remove("default_view"); - } - } - - // Remove `edit_predictions.enabled_in_text_threads` - if let Some(edit_predictions) = obj.get_mut("edit_predictions") { - if let Some(edit_predictions_obj) = edit_predictions.as_object_mut() { - edit_predictions_obj.remove("enabled_in_text_threads"); - } - } - - // Remove top-level `slash_commands` - obj.remove("slash_commands"); - - Ok(()) -} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index 46cccfc4055a78a27d12da54ee187a0fdc202917..136ace8a12c03c831c3eebed97e2f5915ae6afa3 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -247,7 +247,7 @@ pub fn migrate_settings(text: &str) -> Result> { migrations::m_2026_03_16::SETTINGS_PATTERNS, &SETTINGS_QUERY_2026_03_16, ), - MigrationType::Json(migrations::m_2026_03_31::remove_text_thread_settings), + MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum), ]; run_migrations(text, migrations) } @@ -941,7 +941,8 @@ mod tests { "foo": "bar" }, "edit_predictions": { - } + "enabled_in_text_threads": false, + } }"#, ), ); @@ -2400,6 +2401,132 @@ mod tests { ); } + #[test] + fn test_make_play_sound_when_agent_done_an_enum() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": true + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "always" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": false + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "never" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": "when_hidden" + } + }"# + .unindent(), + None, + ); + + // Platform key: settings nested inside "macos" should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": true + } + } + } + "# + .unindent(), + Some( + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": "always" + } + } + } + "# + .unindent(), + ), + ); + + // Profile: settings nested inside profiles should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": false + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": "never" + } + } + } + } + "# + .unindent(), + ), + ); + } + #[test] fn test_remove_context_server_source() { assert_migrate_settings( @@ -4480,109 +4607,4 @@ mod tests { ), ); } - - #[test] - fn test_remove_text_thread_settings() { - assert_migrate_with_migrations( - &[MigrationType::Json( - migrations::m_2026_03_31::remove_text_thread_settings, - )], - r#"{ - "agent": { - "default_model": { - "provider": "anthropic", - "model": "claude-sonnet" - }, - "default_view": "text_thread" - }, - "edit_predictions": { - "mode": "eager", - "enabled_in_text_threads": true - }, - "slash_commands": { - "cargo_workspace": { - "enabled": true - } - } -}"#, - Some( - r#"{ - "agent": { - "default_model": { - "provider": "anthropic", - "model": "claude-sonnet" - } - }, - "edit_predictions": { - "mode": "eager" - } -}"#, - ), - ); - } - - #[test] - fn test_remove_text_thread_settings_only_default_view() { - assert_migrate_with_migrations( - &[MigrationType::Json( - migrations::m_2026_03_31::remove_text_thread_settings, - )], - r#"{ - "agent": { - "default_model": "claude-sonnet", - "default_view": "thread" - } -}"#, - Some( - r#"{ - "agent": { - "default_model": "claude-sonnet" - } -}"#, - ), - ); - } - - #[test] - fn test_remove_text_thread_settings_only_slash_commands() { - assert_migrate_with_migrations( - &[MigrationType::Json( - migrations::m_2026_03_31::remove_text_thread_settings, - )], - r#"{ - "slash_commands": { - "cargo_workspace": { - "enabled": true - } - }, - "vim_mode": true -}"#, - Some( - r#"{ - "vim_mode": true -}"#, - ), - ); - } - - #[test] - fn test_remove_text_thread_settings_none_present() { - assert_migrate_with_migrations( - &[MigrationType::Json( - migrations::m_2026_03_31::remove_text_thread_settings, - )], - r#"{ - "agent": { - "default_model": { - "provider": "anthropic", - "model": "claude-sonnet" - } - }, - "edit_predictions": { - "mode": "eager" - } -}"#, - None, - ); - } } diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index cf4df9f53ccd2ca86fc6c064d51b7557404dd251..08b159effafa2f34dbf1b10768bf356aaf74ae31 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -1,192 +1,331 @@ -use crate::{MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16}; +use crate::{ + ExcerptSummary, MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, + PathKeyIndex, find_diff_state, +}; -use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; -use language::Point; +use super::{MultiBufferSnapshot, ToOffset, ToPoint}; +use language::{BufferSnapshot, Point}; use std::{ cmp::Ordering, ops::{Add, AddAssign, Range, Sub}, }; use sum_tree::Bias; +use text::BufferId; + +/// A multibuffer anchor derived from an anchor into a specific excerpted buffer. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct ExcerptAnchor { + pub(crate) text_anchor: text::Anchor, + pub(crate) path: PathKeyIndex, + pub(crate) diff_base_anchor: Option, +} /// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer). /// /// Unlike simple offsets, anchors remain valid as the text is edited, automatically /// adjusting to reflect insertions and deletions around them. #[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Anchor { - /// Identifies which excerpt within the multi-buffer this anchor belongs to. - /// A multi-buffer can contain multiple excerpts from different buffers. - pub excerpt_id: ExcerptId, - /// The position within the excerpt's underlying buffer. This is a stable - /// reference that remains valid as the buffer text is edited. - pub text_anchor: text::Anchor, - /// When present, indicates this anchor points into deleted text within an - /// expanded diff hunk. The anchor references a position in the diff base - /// (original) text rather than the current buffer text. This is used when - /// displaying inline diffs where deleted lines are shown. - pub diff_base_anchor: Option, +pub enum Anchor { + /// An anchor that always resolves to the start of the multibuffer. + Min, + /// An anchor that's attached to a specific excerpted buffer. + Excerpt(ExcerptAnchor), + /// An anchor that always resolves to the end of the multibuffer. + Max, } -impl std::fmt::Debug for Anchor { +pub(crate) enum AnchorSeekTarget { + Excerpt { + path_key: PathKey, + anchor: ExcerptAnchor, + // None when the buffer no longer exists in the multibuffer + snapshot: Option, + }, + Empty, +} + +impl std::fmt::Debug for AnchorSeekTarget { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if self.is_min() { - return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id); + match self { + Self::Excerpt { + path_key, + anchor, + snapshot: _, + } => f + .debug_struct("Excerpt") + .field("path_key", path_key) + .field("anchor", anchor) + .finish(), + Self::Empty => write!(f, "Empty"), } - if self.is_max() { - return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id); + } +} + +impl std::fmt::Debug for Anchor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Anchor::Min => write!(f, "Anchor::Min"), + Anchor::Max => write!(f, "Anchor::Max"), + Anchor::Excerpt(excerpt_anchor) => write!(f, "{excerpt_anchor:?}"), } + } +} - f.debug_struct("Anchor") - .field("excerpt_id", &self.excerpt_id) - .field("text_anchor", &self.text_anchor) - .field("diff_base_anchor", &self.diff_base_anchor) - .finish() +impl From for Anchor { + fn from(anchor: ExcerptAnchor) -> Self { + Anchor::Excerpt(anchor) } } -impl Anchor { - pub fn with_diff_base_anchor(self, diff_base_anchor: text::Anchor) -> Self { - Self { - diff_base_anchor: Some(diff_base_anchor), - ..self +impl ExcerptAnchor { + pub(crate) fn buffer_id(&self) -> BufferId { + self.text_anchor.buffer_id + } + + pub(crate) fn text_anchor(&self) -> text::Anchor { + self.text_anchor + } + + pub(crate) fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + self.diff_base_anchor = Some(diff_base_anchor); + self + } + + pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering { + let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else { + panic!("anchor's path was never added to multibuffer") + }; + let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else { + panic!("anchor's path was never added to multibuffer") + }; + + if self_path_key.cmp(other_path_key) != Ordering::Equal { + return self_path_key.cmp(other_path_key); + } + + // in the case that you removed the buffer containing self, + // and added the buffer containing other with the same path key + // (ordering is arbitrary but consistent) + if self.text_anchor.buffer_id != other.text_anchor.buffer_id { + return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id); + } + + let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else { + return Ordering::Equal; + }; + // Comparing two anchors into buffer A that formerly existed at path P, + // when path P has since been reused for a different buffer B + if buffer.remote_id() != self.text_anchor.buffer_id { + return Ordering::Equal; + }; + assert_eq!(self.text_anchor.buffer_id, buffer.remote_id()); + let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer); + if text_cmp != Ordering::Equal { + return text_cmp; + } + + if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) + && let Some(base_text) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + .map(|diff| diff.base_text()) + { + let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); + let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); + return match (self_anchor, other_anchor) { + (Some(a), Some(b)) => a.cmp(&b, base_text), + (Some(_), None) => match other.text_anchor().bias { + Bias::Left => Ordering::Greater, + Bias::Right => Ordering::Less, + }, + (None, Some(_)) => match self.text_anchor().bias { + Bias::Left => Ordering::Less, + Bias::Right => Ordering::Greater, + }, + (None, None) => Ordering::Equal, + }; } + + Ordering::Equal } - pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self { - Self { - excerpt_id, - text_anchor, - diff_base_anchor: None, + fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Left { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_left(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_left(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret } } - pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range) -> Range { - Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end) + fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Right { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_right(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_right(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret + } } - pub fn min() -> Self { - Self { - excerpt_id: ExcerptId::min(), - text_anchor: text::Anchor::MIN, + #[track_caller] + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + ExcerptAnchor { + path, diff_base_anchor: None, + text_anchor, } } - pub fn max() -> Self { - Self { - excerpt_id: ExcerptId::max(), - text_anchor: text::Anchor::MAX, - diff_base_anchor: None, + fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { + let Some(target) = self.try_seek_target(snapshot) else { + return false; + }; + let Some(buffer_snapshot) = snapshot.buffer_for_id(self.buffer_id()) else { + return false; + }; + // Early check to avoid invalid comparisons when seeking + if !buffer_snapshot.can_resolve(&self.text_anchor) { + return false; } + let mut cursor = snapshot.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + let is_valid = self.text_anchor == excerpt.range.context.start + || self.text_anchor == excerpt.range.context.end + || self.text_anchor.is_valid(&buffer_snapshot); + is_valid + && excerpt + .range + .context + .start + .cmp(&self.text_anchor(), buffer_snapshot) + .is_le() + && excerpt + .range + .context + .end + .cmp(&self.text_anchor(), buffer_snapshot) + .is_ge() + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + self.try_seek_target(snapshot) + .expect("anchor is from different multi-buffer") + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let path_key = snapshot.try_path_for_anchor(*self)?; + let buffer = snapshot.buffer_for_path(&path_key).cloned(); + Some(AnchorSeekTarget::Excerpt { + path_key, + anchor: *self, + snapshot: buffer, + }) + } +} + +impl ToOffset for ExcerptAnchor { + fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset { + Anchor::from(*self).to_offset(snapshot) + } + + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 { + Anchor::from(*self).to_offset_utf16(snapshot) + } +} + +impl ToPoint for ExcerptAnchor { + fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point { + Anchor::from(*self).to_point(snapshot) } + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 { + Anchor::from(*self).to_point_utf16(snapshot) + } +} + +impl Anchor { pub fn is_min(&self) -> bool { - self.excerpt_id == ExcerptId::min() - && self.text_anchor.is_min() - && self.diff_base_anchor.is_none() + matches!(self, Self::Min) } pub fn is_max(&self) -> bool { - self.excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() + matches!(self, Self::Max) } - pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { - if self == other { - return Ordering::Equal; - } + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + Self::Excerpt(ExcerptAnchor::in_buffer(path, text_anchor)) + } - let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id); - let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id); + pub(crate) fn range_in_buffer(path: PathKeyIndex, range: Range) -> Range { + Self::in_buffer(path, range.start)..Self::in_buffer(path, range.end) + } - let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot); - if excerpt_id_cmp.is_ne() { - return excerpt_id_cmp; - } - if self_excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() - && other.diff_base_anchor.is_none() - { - return Ordering::Equal; - } - if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) { - let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer); - if text_cmp.is_ne() { - return text_cmp; - } - if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) - && let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - { - let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); - let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); - return match (self_anchor, other_anchor) { - (Some(a), Some(b)) => a.cmp(&b, base_text), - (Some(_), None) => match other.text_anchor.bias { - Bias::Left => Ordering::Greater, - Bias::Right => Ordering::Less, - }, - (None, Some(_)) => match self.text_anchor.bias { - Bias::Left => Ordering::Less, - Bias::Right => Ordering::Greater, - }, - (None, None) => Ordering::Equal, - }; + pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { + match (self, other) { + (Anchor::Min, Anchor::Min) => return Ordering::Equal, + (Anchor::Max, Anchor::Max) => return Ordering::Equal, + (Anchor::Min, _) => return Ordering::Less, + (Anchor::Max, _) => return Ordering::Greater, + (_, Anchor::Max) => return Ordering::Less, + (_, Anchor::Min) => return Ordering::Greater, + (Anchor::Excerpt(self_excerpt_anchor), Anchor::Excerpt(other_excerpt_anchor)) => { + self_excerpt_anchor.cmp(other_excerpt_anchor, snapshot) } } - Ordering::Equal } pub fn bias(&self) -> Bias { - self.text_anchor.bias + match self { + Anchor::Min => Bias::Left, + Anchor::Max => Bias::Right, + Anchor::Excerpt(anchor) => anchor.text_anchor.bias, + } } pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Left - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_left(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_left(base_text); - } - a - }), - }; + match self { + Anchor::Min => *self, + Anchor::Max => snapshot.anchor_before(snapshot.max_point()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_left(snapshot)), } - *self } pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Right - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_right(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_right(base_text); - } - a - }), - }; + match self { + Anchor::Max => *self, + Anchor::Min => snapshot.anchor_after(Point::zero()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_right(snapshot)), } - *self } pub fn summary(&self, snapshot: &MultiBufferSnapshot) -> D @@ -203,16 +342,111 @@ impl Anchor { } pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { - if self.is_min() || self.is_max() { - true - } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) { - (self.text_anchor == excerpt.range.context.start - || self.text_anchor == excerpt.range.context.end - || self.text_anchor.is_valid(&excerpt.buffer)) - && excerpt.contains(self) - } else { - false + match self { + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.is_valid(snapshot), + } + } + + fn to_excerpt_anchor(&self, snapshot: &MultiBufferSnapshot) -> Option { + match self { + Anchor::Min => { + let excerpt = snapshot.excerpts.first()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.start, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + Anchor::Max => { + let excerpt = snapshot.excerpts.last()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.end, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + } + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return AnchorSeekTarget::Empty; + }; + + excerpt_anchor.seek_target(snapshot) + } + + pub(crate) fn excerpt_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + } + } + + pub(crate) fn text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor()), + } + } + + pub fn opaque_id(&self) -> Option<[u8; 20]> { + self.text_anchor().map(|a| a.opaque_id()) + } + + /// Note: anchor_to_buffer_anchor is probably what you want + pub fn raw_text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor), + } + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return Some(AnchorSeekTarget::Empty); + }; + excerpt_anchor.try_seek_target(snapshot) + } + + /// Returns the text anchor for this anchor. + /// Panics if the anchor is from a different buffer. + pub fn text_anchor_in(&self, buffer: &BufferSnapshot) -> text::Anchor { + match self { + Anchor::Min => text::Anchor::min_for_buffer(buffer.remote_id()), + Anchor::Excerpt(excerpt_anchor) => { + let text_anchor = excerpt_anchor.text_anchor; + assert_eq!(text_anchor.buffer_id, buffer.remote_id()); + text_anchor + } + Anchor::Max => text::Anchor::max_for_buffer(buffer.remote_id()), + } + } + + pub fn diff_base_anchor(&self) -> Option { + self.excerpt_anchor()?.diff_base_anchor + } + + #[cfg(any(test, feature = "test-support"))] + pub fn expect_text_anchor(&self) -> text::Anchor { + self.excerpt_anchor().unwrap().text_anchor + } + + pub fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + match &mut self { + Anchor::Min | Anchor::Max => {} + Anchor::Excerpt(excerpt_anchor) => { + excerpt_anchor.diff_base_anchor = Some(diff_base_anchor); + } } + self } } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 21b4d0e1a6c84189a9926d2d181f097c2bdf4ea7..44e2152f5258b19aada8b5b602075c2b57a1baf1 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -8,6 +8,7 @@ use self::transaction::History; pub use anchor::{Anchor, AnchorRangeExt}; +use anchor::{AnchorSeekTarget, ExcerptAnchor}; use anyhow::{Result, anyhow}; use buffer_diff::{ BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunkSecondaryStatus, @@ -15,14 +16,14 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; -use gpui::{App, Context, Entity, EntityId, EventEmitter}; +use gpui::{App, Context, Entity, EventEmitter}; use itertools::Itertools; use language::{ - AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, - CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, - IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, - OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, - ToPoint as _, TransactionId, TreeSitterOptions, Unclipped, + AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, + CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings, + IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, + PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, + TreeSitterOptions, Unclipped, language_settings::{AllLanguageSettings, LanguageSettings}, }; @@ -37,7 +38,8 @@ use std::{ any::type_name, borrow::Cow, cell::{Cell, OnceCell, Ref, RefCell}, - cmp, fmt, + cmp::{self, Ordering}, + fmt, future::Future, io, iter::{self, FromIterator}, @@ -51,15 +53,13 @@ use std::{ use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, TreeMap}; use text::{ BufferId, Edit, LineIndent, TextSummary, - locator::Locator, subscription::{Subscription, Topic}, }; use theme::SyntaxTheme; use unicode_segmentation::UnicodeSegmentation; -use util::post_inc; use ztracing::instrument; -pub use self::path_key::{PathExcerptInsertResult, PathKey}; +pub use self::path_key::PathKey; pub static EXCERPT_CONTEXT_LINES: OnceLock u32> = OnceLock::new(); @@ -67,9 +67,6 @@ pub fn excerpt_context_lines(cx: &App) -> u32 { EXCERPT_CONTEXT_LINES.get().map(|f| f(cx)).unwrap_or(2) } -#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ExcerptId(u32); - /// One or more [`Buffers`](Buffer) being edited in a single view. /// /// See @@ -79,10 +76,6 @@ pub struct MultiBuffer { snapshot: RefCell, /// Contains the state of the buffers being edited buffers: BTreeMap, - /// Mapping from path keys to their excerpts. - excerpts_by_path: BTreeMap>, - /// Mapping from excerpt IDs to their path key. - paths_by_excerpt: HashMap, /// Mapping from buffer IDs to their diff states diffs: HashMap, subscriptions: Topic, @@ -98,24 +91,20 @@ pub struct MultiBuffer { buffer_changed_since_sync: Rc>, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct PathKeyIndex(u64); + #[derive(Clone, Debug, PartialEq, Eq)] pub enum Event { - ExcerptsAdded { + BufferRangesUpdated { buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, + path_key: PathKey, + ranges: Vec>, }, - ExcerptsRemoved { - ids: Vec, - /// Contains only buffer IDs for which all excerpts have been removed. - /// Buffers that still have remaining excerpts are never included. + BuffersRemoved { removed_buffer_ids: Vec, }, - ExcerptsExpanded { - ids: Vec, - }, - ExcerptsEdited { - excerpt_ids: Vec, + BuffersEdited { buffer_ids: Vec, }, DiffHunksToggled, @@ -145,14 +134,14 @@ pub struct MultiBufferDiffHunk { pub buffer_id: BufferId, /// The range of the underlying buffer that this hunk corresponds to. pub buffer_range: Range, - /// The excerpt that contains the diff hunk. - pub excerpt_id: ExcerptId, /// The range within the buffer's diff base that this hunk corresponds to. pub diff_base_byte_range: Range, /// The status of this hunk (added/modified/deleted and secondary status). pub status: DiffHunkStatus, /// The word diffs for this hunk. pub word_diffs: Vec>, + pub excerpt_range: ExcerptRange, + pub multi_buffer_range: Range, } impl MultiBufferDiffHunk { @@ -165,17 +154,12 @@ impl MultiBufferDiffHunk { && self.buffer_range.start.is_min() && self.buffer_range.end.is_max() } - - pub fn multi_buffer_range(&self) -> Range { - let start = Anchor::in_buffer(self.excerpt_id, self.buffer_range.start); - let end = Anchor::in_buffer(self.excerpt_id, self.buffer_range.end); - start..end - } } pub type MultiBufferPoint = Point; +/// ExcerptOffset is offset into the non-deleted text of the multibuffer type ExcerptOffset = ExcerptDimension; -type ExcerptPoint = ExcerptDimension; +/// ExcerptOffset is based on the non-deleted text of the multibuffer #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)] #[serde(transparent)] @@ -518,10 +502,6 @@ pub trait ToPoint: 'static + fmt::Debug { struct BufferState { buffer: Entity, - last_version: RefCell, - last_non_text_state_update_count: Cell, - // Note, any changes to this field value require updating snapshot.buffer_locators as well - excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -694,15 +674,31 @@ impl DiffState { } } +#[derive(Clone)] +struct BufferStateSnapshot { + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: BufferSnapshot, +} + +impl fmt::Debug for BufferStateSnapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("BufferStateSnapshot") + .field("path_key", &self.path_key) + .field("buffer_id", &self.buffer_snapshot.remote_id()) + .finish() + } +} + /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, - buffer_locators: TreeMap>, + buffers: TreeMap, + path_keys_by_index: TreeMap, + indices_by_path_key: TreeMap, diffs: SumTree, diff_transforms: SumTree, - excerpt_ids: SumTree, - replaced_excerpts: Arc>, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, @@ -717,24 +713,12 @@ pub struct MultiBufferSnapshot { show_headers: bool, } -// follower: None -// - BufferContent(Some) -// - BufferContent(None) -// - DeletedHunk -// -// follower: Some -// - BufferContent(Some) -// - BufferContent(None) - #[derive(Debug, Clone)] enum DiffTransform { - // RealText BufferContent { summary: MBTextSummary, - // modified_hunk_info inserted_hunk_info: Option, }, - // ExpandedHunkText DeletedHunk { summary: TextSummary, buffer_id: BufferId, @@ -746,52 +730,71 @@ enum DiffTransform { #[derive(Clone, Copy, Debug)] struct DiffTransformHunkInfo { - excerpt_id: ExcerptId, + buffer_id: BufferId, hunk_start_anchor: text::Anchor, hunk_secondary_status: DiffHunkSecondaryStatus, is_logically_deleted: bool, + excerpt_end: ExcerptAnchor, } impl Eq for DiffTransformHunkInfo {} impl PartialEq for DiffTransformHunkInfo { fn eq(&self, other: &DiffTransformHunkInfo) -> bool { - self.excerpt_id == other.excerpt_id && self.hunk_start_anchor == other.hunk_start_anchor + self.buffer_id == other.buffer_id && self.hunk_start_anchor == other.hunk_start_anchor } } impl std::hash::Hash for DiffTransformHunkInfo { fn hash(&self, state: &mut H) { - self.excerpt_id.hash(state); + self.buffer_id.hash(state); self.hunk_start_anchor.hash(state); } } #[derive(Clone)] -pub struct ExcerptInfo { - pub id: ExcerptId, - pub buffer: Arc, - pub buffer_id: BufferId, +pub struct ExcerptBoundaryInfo { + pub start_anchor: Anchor, pub range: ExcerptRange, pub end_row: MultiBufferRow, } -impl std::fmt::Debug for ExcerptInfo { +impl ExcerptBoundaryInfo { + pub fn start_text_anchor(&self) -> text::Anchor { + self.range.context.start + } + pub fn buffer_id(&self) -> BufferId { + self.start_text_anchor().buffer_id + } + pub fn buffer<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + snapshot + .buffer_for_id(self.buffer_id()) + .expect("buffer snapshot not found for excerpt boundary") + } +} + +impl std::fmt::Debug for ExcerptBoundaryInfo { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(type_name::()) - .field("id", &self.id) - .field("buffer_id", &self.buffer_id) - .field("path", &self.buffer.file().map(|f| f.path())) + .field("buffer_id", &self.buffer_id()) .field("range", &self.range) .finish() } } +impl PartialEq for ExcerptBoundaryInfo { + fn eq(&self, other: &Self) -> bool { + self.start_anchor == other.start_anchor && self.range == other.range + } +} + +impl Eq for ExcerptBoundaryInfo {} + /// A boundary between `Excerpt`s in a [`MultiBuffer`] #[derive(Debug)] pub struct ExcerptBoundary { - pub prev: Option, - pub next: ExcerptInfo, + pub prev: Option, + pub next: ExcerptBoundaryInfo, /// The row in the `MultiBuffer` where the boundary is located pub row: MultiBufferRow, } @@ -800,7 +803,7 @@ impl ExcerptBoundary { pub fn starts_new_buffer(&self) -> bool { match (self.prev.as_ref(), &self.next) { (None, _) => true, - (Some(prev), next) => prev.buffer_id != next.buffer_id, + (Some(prev), next) => prev.buffer_id() != next.buffer_id(), } } } @@ -808,7 +811,7 @@ impl ExcerptBoundary { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct ExpandInfo { pub direction: ExpandExcerptDirection, - pub excerpt_id: ExcerptId, + pub start_anchor: Anchor, } #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] @@ -822,45 +825,20 @@ pub struct RowInfo { } /// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`]. -#[derive(Clone)] -struct Excerpt { - /// The unique identifier for this excerpt - id: ExcerptId, +#[derive(Clone, Debug)] +pub(crate) struct Excerpt { /// The location of the excerpt in the [`MultiBuffer`] - locator: Locator, - /// The buffer being excerpted - buffer_id: BufferId, - /// A snapshot of the buffer being excerpted - buffer: Arc, + pub(crate) path_key: PathKey, + pub(crate) path_key_index: PathKeyIndex, + pub(crate) buffer_id: BufferId, /// The range of the buffer to be shown in the excerpt - range: ExcerptRange, + pub(crate) range: ExcerptRange, + /// The last row in the excerpted slice of the buffer - max_buffer_row: BufferRow, + pub(crate) max_buffer_row: BufferRow, /// A summary of the text in the excerpt - text_summary: TextSummary, - has_trailing_newline: bool, -} - -/// A public view into an `Excerpt` in a [`MultiBuffer`]. -/// -/// Contains methods for getting the [`Buffer`] of the excerpt, -/// as well as mapping offsets to/from buffer and multibuffer coordinates. -#[derive(Clone)] -pub struct MultiBufferExcerpt<'a> { - excerpt: &'a Excerpt, - diff_transforms: - sum_tree::Cursor<'a, 'static, DiffTransform, DiffTransforms>, - /// The offset in the multibuffer considering diff transforms. - offset: MultiBufferOffset, - /// The offset in the multibuffer without diff transforms. - excerpt_offset: ExcerptOffset, - buffer_offset: BufferOffset, -} - -#[derive(Clone, Debug)] -struct ExcerptIdMapping { - id: ExcerptId, - locator: Locator, + pub(crate) text_summary: TextSummary, + pub(crate) has_trailing_newline: bool, } /// A range of text from a single [`Buffer`], to be shown as an `Excerpt`. @@ -883,16 +861,37 @@ impl ExcerptRange { } } -#[derive(Clone, Debug, Default)] +impl ExcerptRange { + pub fn contains(&self, t: &text::Anchor, snapshot: &BufferSnapshot) -> bool { + self.context.start.cmp(t, snapshot).is_le() && self.context.end.cmp(t, snapshot).is_ge() + } +} + +#[derive(Clone, Debug)] pub struct ExcerptSummary { - excerpt_id: ExcerptId, - /// The location of the last [`Excerpt`] being summarized - excerpt_locator: Locator, + path_key: PathKey, + max_anchor: Option, widest_line_number: u32, text: MBTextSummary, count: usize, } +impl ExcerptSummary { + pub fn min() -> Self { + ExcerptSummary { + path_key: PathKey::min(), + max_anchor: None, + widest_line_number: 0, + text: MBTextSummary::default(), + count: 0, + } + } + + fn len(&self) -> ExcerptOffset { + ExcerptDimension(self.text.len) + } +} + #[derive(Debug, Clone)] pub struct DiffTransformSummary { input: MBTextSummary, @@ -1068,13 +1067,13 @@ pub struct MultiBufferChunks<'a> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptOffset>, diff_transforms: Cursor<'a, 'static, DiffTransform, Dimensions>, - diffs: &'a SumTree, diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>, buffer_chunk: Option>, range: Range, excerpt_offset_range: Range, excerpt_chunks: Option>, language_aware: bool, + snapshot: &'a MultiBufferSnapshot, } pub struct ReversedMultiBufferChunks<'a> { @@ -1128,8 +1127,8 @@ impl<'a, MBD: MultiBufferDimension> Dimension<'a, DiffTransformSummary> for Diff struct MultiBufferCursor<'a, MBD, BD> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptDimension>, diff_transforms: Cursor<'a, 'static, DiffTransform, DiffTransforms>, - diffs: &'a SumTree, cached_region: OnceCell>>, + snapshot: &'a MultiBufferSnapshot, } #[derive(Clone)] @@ -1144,8 +1143,8 @@ struct MultiBufferRegion<'a, MBD, BD> { } struct ExcerptChunks<'a> { - excerpt_id: ExcerptId, content_chunks: BufferChunks<'a>, + end: ExcerptAnchor, has_footer: bool, } @@ -1155,7 +1154,6 @@ struct BufferEdit { new_text: Arc, is_insertion: bool, original_indent_column: Option, - excerpt_id: ExcerptId, } #[derive(Clone, Copy, Debug, PartialEq)] @@ -1258,8 +1256,6 @@ impl MultiBuffer { singleton: false, capability, title: None, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), buffer_changed_since_sync: Default::default(), history: History::default(), } @@ -1276,11 +1272,6 @@ impl MultiBuffer { *buffer_id, BufferState { buffer: buffer_state.buffer.clone(), - last_version: buffer_state.last_version.clone(), - last_non_text_state_update_count: buffer_state - .last_non_text_state_update_count - .clone(), - excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event), @@ -1295,8 +1286,6 @@ impl MultiBuffer { Self { snapshot: RefCell::new(self.snapshot.borrow().clone()), buffers, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), diffs: diff_bases, subscriptions: Default::default(), singleton: self.singleton, @@ -1451,7 +1440,7 @@ impl MultiBuffer { _ => Default::default(), }; - let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits( + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits( edits, this.snapshot.get_mut(), &original_indent_columns, @@ -1472,14 +1461,12 @@ impl MultiBuffer { mut new_text, mut is_insertion, original_indent_column, - excerpt_id, }) = edits.next() { while let Some(BufferEdit { range: next_range, is_insertion: next_is_insertion, new_text: next_new_text, - excerpt_id: next_excerpt_id, .. }) = edits.peek() { @@ -1492,9 +1479,7 @@ impl MultiBuffer { if should_coalesce { range.end = cmp::max(next_range.end, range.end); is_insertion |= *next_is_insertion; - if excerpt_id == *next_excerpt_id { - new_text = format!("{new_text}{next_new_text}").into(); - } + new_text = format!("{new_text}{next_new_text}").into(); edits.next(); } else { break; @@ -1542,10 +1527,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1553,9 +1535,8 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, snapshot: &MultiBufferSnapshot, original_indent_columns: &[Option], - ) -> (HashMap>, Vec) { + ) -> HashMap> { let mut buffer_edits: HashMap> = Default::default(); - let mut edited_excerpt_ids = Vec::new(); let mut cursor = snapshot.cursor::(); for (ix, (range, new_text)) in edits.into_iter().enumerate() { let original_indent_column = original_indent_columns.get(ix).copied().flatten(); @@ -1600,11 +1581,10 @@ impl MultiBuffer { let buffer_end = (end_region.buffer_range.start + end_overshoot).min(end_region.buffer_range.end); - if start_region.excerpt.id == end_region.excerpt.id { + if start_region.excerpt == end_region.excerpt { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1613,7 +1593,6 @@ impl MultiBuffer { new_text, is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } } else { @@ -1622,7 +1601,6 @@ impl MultiBuffer { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1631,14 +1609,11 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } - let excerpt_id = end_region.excerpt.id; if end_region.buffer.capability == Capability::ReadWrite && end_region.is_main_buffer { - edited_excerpt_ids.push(excerpt_id); buffer_edits .entry(end_region.buffer.remote_id()) .or_default() @@ -1647,18 +1622,17 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id, }); } + let end_region_excerpt = end_region.excerpt.clone(); cursor.seek(&range.start); cursor.next_excerpt(); while let Some(region) = cursor.region() { - if region.excerpt.id == excerpt_id { + if region.excerpt == &end_region_excerpt { break; } if region.buffer.capability == Capability::ReadWrite && region.is_main_buffer { - edited_excerpt_ids.push(region.excerpt.id); buffer_edits .entry(region.buffer.remote_id()) .or_default() @@ -1667,14 +1641,13 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id: region.excerpt.id, }); } cursor.next_excerpt(); } } } - (buffer_edits, edited_excerpt_ids) + buffer_edits } pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut Context) @@ -1706,7 +1679,7 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, cx: &mut Context, ) { - let (buffer_edits, edited_excerpt_ids) = + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]); let mut buffer_ids = Vec::new(); @@ -1730,10 +1703,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1746,26 +1716,39 @@ impl MultiBuffer { ) { let mut selections_by_buffer: HashMap>> = Default::default(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); + let snapshot = self.snapshot(cx); + let mut cursor = snapshot.excerpts.cursor::(()); for selection in selections { - let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); - let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); + let start = selection.start.seek_target(&snapshot); - cursor.seek(&Some(start_locator), Bias::Left); - while let Some(excerpt) = cursor.item() - && excerpt.locator <= *end_locator - { - let mut start = excerpt.range.context.start; - let mut end = excerpt.range.context.end; - if excerpt.id == selection.start.excerpt_id { - start = selection.start.text_anchor; - } - if excerpt.id == selection.end.excerpt_id { - end = selection.end.text_anchor; + cursor.seek(&start, Bias::Left); + while let Some(excerpt) = cursor.item() { + let excerpt_start = + Anchor::in_buffer(excerpt.path_key_index, excerpt.range.context.start); + if excerpt_start.cmp(&selection.end, &snapshot).is_gt() { + break; } + let buffer = excerpt.buffer_snapshot(&snapshot); + let start = *text::Anchor::max( + &excerpt.range.context.start, + &selection + .start + .excerpt_anchor() + .map(|excerpt_anchor| excerpt_anchor.text_anchor()) + .unwrap_or(text::Anchor::min_for_buffer(excerpt.buffer_id)), + buffer, + ); + let end = *text::Anchor::min( + &excerpt.range.context.end, + &selection + .end + .excerpt_anchor() + .map(|excerpt_anchor| excerpt_anchor.text_anchor()) + .unwrap_or(text::Anchor::max_for_buffer(excerpt.buffer_id)), + buffer, + ); selections_by_buffer - .entry(excerpt.buffer_id) + .entry(buffer.remote_id()) .or_default() .push(Selection { id: selection.id, @@ -1787,25 +1770,9 @@ impl MultiBuffer { } } - for (buffer_id, mut selections) in selections_by_buffer { + for (buffer_id, selections) in selections_by_buffer { self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { - selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer)); - let mut selections = selections.into_iter().peekable(); - let merged_selections = Arc::from_iter(iter::from_fn(|| { - let mut selection = selections.next()?; - while let Some(next_selection) = selections.peek() { - if selection.end.cmp(&next_selection.start, buffer).is_ge() { - let next_selection = selections.next().unwrap(); - if next_selection.end.cmp(&selection.end, buffer).is_ge() { - selection.end = next_selection.end; - } - } else { - break; - } - } - Some(selection) - })); - buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); + buffer.set_active_selections(selections.into(), line_mode, cursor_shape, cx); }); } } @@ -1821,200 +1788,31 @@ impl MultiBuffer { #[instrument(skip_all)] fn merge_excerpt_ranges<'a>( expanded_ranges: impl IntoIterator> + 'a, - ) -> (Vec>, Vec) { + ) -> Vec> { + let mut sorted: Vec<_> = expanded_ranges.into_iter().collect(); + sorted.sort_by_key(|range| range.context.start); let mut merged_ranges: Vec> = Vec::new(); - let mut counts: Vec = Vec::new(); - for range in expanded_ranges { + for range in sorted { if let Some(last_range) = merged_ranges.last_mut() { - assert!( - last_range.context.start <= range.context.start, - "ranges must be sorted: {last_range:?} <= {range:?}" - ); if last_range.context.end >= range.context.start || last_range.context.end.row + 1 == range.context.start.row { last_range.context.end = range.context.end.max(last_range.context.end); - *counts.last_mut().unwrap() += 1; continue; } } merged_ranges.push(range.clone()); - counts.push(1); - } - (merged_ranges, counts) - } - - pub fn insert_excerpts_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator>, - cx: &mut Context, - ) -> Vec - where - O: text::ToOffset, - { - let mut ids = Vec::new(); - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; - self.insert_excerpts_with_ids_after( - prev_excerpt_id, - buffer, - ranges.into_iter().map(|range| { - let id = ExcerptId(post_inc(&mut next_excerpt_id)); - ids.push(id); - (id, range) - }), - cx, - ); - ids - } - - pub fn insert_excerpts_with_ids_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator)>, - cx: &mut Context, - ) where - O: text::ToOffset, - { - assert_eq!(self.history.transaction_depth(), 0); - let mut ranges = ranges.into_iter().peekable(); - if ranges.peek().is_none() { - return Default::default(); - } - - self.sync_mut(cx); - - let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer_id = buffer_snapshot.remote_id(); - - let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| { - self.buffer_changed_since_sync.replace(true); - buffer.update(cx, |buffer, _| { - buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); - }); - BufferState { - last_version: RefCell::new(buffer_snapshot.version().clone()), - last_non_text_state_update_count: Cell::new( - buffer_snapshot.non_text_state_update_count(), - ), - excerpts: Default::default(), - _subscriptions: [ - cx.observe(&buffer, |_, _, cx| cx.notify()), - cx.subscribe(&buffer, Self::on_buffer_event), - ], - buffer: buffer.clone(), - } - }); - - let mut snapshot = self.snapshot.get_mut(); - - let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); - let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); - let mut cursor = snapshot.excerpts.cursor::>(()); - let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right); - prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); - - let edit_start = ExcerptDimension(new_excerpts.summary().text.len); - new_excerpts.update_last( - |excerpt| { - excerpt.has_trailing_newline = true; - }, - (), - ); - - let next_locator = if let Some(excerpt) = cursor.item() { - excerpt.locator.clone() - } else { - Locator::max() - }; - - let mut excerpts = Vec::new(); - let buffer_snapshot = Arc::new(buffer_snapshot); - while let Some((id, range)) = ranges.next() { - let locator = Locator::between(&prev_locator, &next_locator); - if let Err(ix) = buffer_state.excerpts.binary_search(&locator) { - buffer_state.excerpts.insert(ix, locator.clone()); - } - let range = ExcerptRange { - context: buffer_snapshot.anchor_before(&range.context.start) - ..buffer_snapshot.anchor_after(&range.context.end), - primary: buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - }; - excerpts.push((id, range.clone())); - let excerpt = Excerpt::new( - id, - locator.clone(), - buffer_id, - buffer_snapshot.clone(), - range, - ranges.peek().is_some() || cursor.item().is_some(), - ); - new_excerpts.push(excerpt, ()); - prev_locator = locator.clone(); - - if let Some(last_mapping_entry) = new_excerpt_ids.last() { - assert!(id > last_mapping_entry.id, "excerpt ids must be increasing"); - } - new_excerpt_ids.push(ExcerptIdMapping { id, locator }, ()); - } - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - - let edit_end = ExcerptDimension(new_excerpts.summary().text.len); - - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - snapshot.excerpts = new_excerpts; - snapshot.excerpt_ids = new_excerpt_ids; - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; } - - let edits = Self::sync_diff_transforms( - &mut snapshot, - vec![Edit { - old: edit_start..edit_start, - new: edit_start..edit_end, - }], - DiffChangeKind::BufferEdited, - ); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsAdded { - buffer, - predecessor: prev_excerpt_id, - excerpts, - }); - cx.notify(); + merged_ranges } pub fn clear(&mut self, cx: &mut Context) { self.sync_mut(cx); - let ids = self.excerpt_ids(); let removed_buffer_ids = std::mem::take(&mut self.buffers).into_keys().collect(); - self.excerpts_by_path.clear(); - self.paths_by_excerpt.clear(); + self.diffs.clear(); let MultiBufferSnapshot { excerpts, - buffer_locators, - diffs: _, + diffs, diff_transforms: _, non_text_state_update_count: _, edit_count: _, @@ -2023,27 +1821,25 @@ impl MultiBuffer { has_conflict, has_inverted_diff, singleton: _, - excerpt_ids: _, - replaced_excerpts, trailing_excerpt_update_count, all_diff_hunks_expanded: _, show_deleted_hunks: _, use_extended_diff_range: _, show_headers: _, + path_keys_by_index: _, + indices_by_path_key: _, + buffers, } = self.snapshot.get_mut(); - buffer_locators.clear(); let start = ExcerptDimension(MultiBufferOffset::ZERO); let prev_len = ExcerptDimension(excerpts.summary().text.len); *excerpts = Default::default(); + *buffers = Default::default(); + *diffs = Default::default(); *trailing_excerpt_update_count += 1; *is_dirty = false; *has_deleted_file = false; *has_conflict = false; *has_inverted_diff = false; - match Arc::get_mut(replaced_excerpts) { - Some(replaced_excerpts) => replaced_excerpts.clear(), - None => *replaced_excerpts = Default::default(), - } let edits = Self::sync_diff_transforms( self.snapshot.get_mut(), @@ -2060,118 +1856,16 @@ impl MultiBuffer { edited_buffer: None, is_local: true, }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); + cx.emit(Event::BuffersRemoved { removed_buffer_ids }); cx.notify(); } - #[ztracing::instrument(skip_all)] - pub fn excerpts_for_buffer( - &self, - buffer_id: BufferId, - cx: &App, - ) -> Vec<(ExcerptId, Arc, ExcerptRange)> { - let mut excerpts = Vec::new(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); - if let Some(locators) = snapshot.buffer_locators.get(&buffer_id) { - for locator in &**locators { - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator - { - excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone())); - } - } - } - - excerpts - } - - pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec> { - let snapshot = self.read(cx); - let mut excerpts = snapshot - .excerpts - .cursor::, ExcerptPoint>>(()); - let mut diff_transforms = snapshot - .diff_transforms - .cursor::>>(()); - diff_transforms.next(); - let locators = snapshot - .buffer_locators - .get(&buffer_id) - .into_iter() - .flat_map(|v| &**v); - let mut result = Vec::new(); - for locator in locators { - excerpts.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts.item() - && excerpt.locator == *locator - { - let excerpt_start = excerpts.start().1; - let excerpt_end = excerpt_start + excerpt.text_summary.lines; - - diff_transforms.seek_forward(&excerpt_start, Bias::Left); - let overshoot = excerpt_start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - - diff_transforms.seek_forward(&excerpt_end, Bias::Right); - let overshoot = excerpt_end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - - result.push(start.0..end.0) - } - } - result - } - - pub fn excerpt_buffer_ids(&self) -> Vec { - self.snapshot - .borrow() - .excerpts - .iter() - .map(|entry| entry.buffer_id) - .collect() - } - - pub fn excerpt_ids(&self) -> Vec { - let snapshot = self.snapshot.borrow(); - let mut ids = Vec::with_capacity(snapshot.excerpts.summary().count); - ids.extend(snapshot.excerpts.iter().map(|entry| entry.id)); - ids - } - - pub fn excerpt_containing( - &self, - position: impl ToOffset, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { + pub fn range_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Option> { let snapshot = self.read(cx); - let offset = position.to_offset(&snapshot); - - let mut cursor = snapshot.cursor::(); - cursor.seek(&offset); - cursor - .excerpt() - .or_else(|| snapshot.excerpts.last()) - .map(|excerpt| { - ( - excerpt.id, - self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(), - excerpt.range.context.clone(), - ) - }) - } - - pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option> { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - self.buffer(buffer_id) - } else { - let (_, buffer, _) = self.excerpt_containing(anchor, cx)?; - Some(buffer) - } + let path_key = snapshot.path_key_index_for_buffer(buffer_id)?; + let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id)); + let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id)); + Some((start..end).to_point(&snapshot)) } // If point is at the end of the buffer, the last excerpt is returned @@ -2193,15 +1887,10 @@ impl MultiBuffer { &self, point: T, cx: &App, - ) -> Option<(Entity, Point, ExcerptId)> { + ) -> Option<(Entity, Point)> { let snapshot = self.read(cx); - let (buffer, point, is_main_buffer) = - snapshot.point_to_buffer_point(point.to_point(&snapshot))?; - Some(( - self.buffers.get(&buffer.remote_id())?.buffer.clone(), - point, - is_main_buffer, - )) + let (buffer, point) = snapshot.point_to_buffer_point(point.to_point(&snapshot))?; + Some((self.buffers.get(&buffer.remote_id())?.buffer.clone(), point)) } pub fn buffer_point_to_anchor( @@ -2212,266 +1901,86 @@ impl MultiBuffer { cx: &App, ) -> Option { let mut found = None; - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - let start = range.context.start.to_point(&snapshot); - let end = range.context.end.to_point(&snapshot); - if start <= point && point < end { - found = Some((snapshot.clip_point(point, Bias::Left), excerpt_id)); + let buffer_snapshot = buffer.read(cx).snapshot(); + let text_anchor = buffer_snapshot.anchor_after(&point); + let snapshot = self.snapshot(cx); + let path_key_index = snapshot.path_key_index_for_buffer(buffer_snapshot.remote_id())?; + for excerpt in snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) { + if excerpt + .context + .start + .cmp(&text_anchor, &buffer_snapshot) + .is_gt() + { + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.start)); + break; + } else if excerpt + .context + .end + .cmp(&text_anchor, &buffer_snapshot) + .is_ge() + { + found = Some(Anchor::in_buffer(path_key_index, text_anchor)); break; } - if point < start { - found = Some((start, excerpt_id)); - } - if point >= end { - found = Some((end, excerpt_id)); - } + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.end)); } - found.map(|(point, excerpt_id)| { - let text_anchor = snapshot.anchor_after(point); - Anchor::in_buffer(excerpt_id, text_anchor) - }) + found } - pub fn buffer_anchor_to_anchor( + pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( &self, - // todo(lw): We shouldn't need this? - buffer: &Entity, - anchor: text::Anchor, - cx: &App, - ) -> Option { - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - if range.context.start.cmp(&anchor, &snapshot).is_le() - && range.context.end.cmp(&anchor, &snapshot).is_ge() - { - return Some(Anchor::in_buffer(excerpt_id, anchor)); + anchors: Anchors, + cx: &mut Context, + ) -> impl 'static + Future> + use { + let mut error = None; + let mut futures = Vec::new(); + for anchor in anchors { + if let Some(excerpt_anchor) = anchor.excerpt_anchor() { + if let Some(buffer) = self.buffers.get(&excerpt_anchor.text_anchor.buffer_id) { + buffer.buffer.update(cx, |buffer, _| { + futures.push(buffer.wait_for_anchors([excerpt_anchor.text_anchor()])) + }); + } else { + error = Some(anyhow!( + "buffer {:?} is not part of this multi-buffer", + excerpt_anchor.text_anchor.buffer_id + )); + break; + } + } + } + async move { + if let Some(error) = error { + Err(error)?; + } + for future in futures { + future.await?; } + Ok(()) } + } - None + pub fn text_anchor_for_position( + &self, + position: T, + cx: &App, + ) -> Option<(Entity, text::Anchor)> { + let snapshot = self.read(cx); + let anchor = snapshot.anchor_before(position).excerpt_anchor()?; + let buffer = self + .buffers + .get(&anchor.text_anchor.buffer_id)? + .buffer + .clone(); + Some((buffer, anchor.text_anchor())) } - pub fn merge_excerpts( + fn on_buffer_event( &mut self, - excerpt_ids: &[ExcerptId], - cx: &mut Context, - ) -> ExcerptId { - debug_assert!(!excerpt_ids.is_empty()); - if excerpt_ids.len() == 1 { - return excerpt_ids[0]; - } - - let snapshot = self.snapshot(cx); - - let first_range = snapshot - .context_range_for_excerpt(excerpt_ids[0]) - .expect("first excerpt must exist"); - let last_range = snapshot - .context_range_for_excerpt(*excerpt_ids.last().unwrap()) - .expect("last excerpt must exist"); - - let union_range = first_range.start..last_range.end; - - drop(snapshot); - - self.resize_excerpt(excerpt_ids[0], union_range, cx); - let removed = &excerpt_ids[1..]; - for &excerpt_id in removed { - if let Some(path) = self.paths_by_excerpt.get(&excerpt_id) { - if let Some(excerpt_list) = self.excerpts_by_path.get_mut(path) { - excerpt_list.retain(|id| *id != excerpt_id); - if excerpt_list.is_empty() { - let path = path.clone(); - self.excerpts_by_path.remove(&path); - } - } - } - } - self.remove_excerpts(removed.iter().copied(), cx); - - excerpt_ids[0] - } - - pub fn remove_excerpts( - &mut self, - excerpt_ids: impl IntoIterator, - cx: &mut Context, - ) { - self.sync_mut(cx); - let ids = excerpt_ids.into_iter().collect::>(); - if ids.is_empty() { - return; - } - self.buffer_changed_since_sync.replace(true); - - let mut snapshot = self.snapshot.get_mut(); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::new(); - let mut excerpt_ids = ids.iter().copied().peekable(); - let mut removed_buffer_ids = Vec::new(); - let mut removed_excerpts_for_buffers = HashSet::default(); - - while let Some(excerpt_id) = excerpt_ids.next() { - self.paths_by_excerpt.remove(&excerpt_id); - // Seek to the next excerpt to remove, preserving any preceding excerpts. - let locator = snapshot.excerpt_locator_for_id(excerpt_id); - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - - if let Some(mut excerpt) = cursor.item() { - if excerpt.id != excerpt_id { - continue; - } - let mut old_start = cursor.start().1; - - // Skip over the removed excerpt. - 'remove_excerpts: loop { - if let Some(buffer_state) = self.buffers.get_mut(&excerpt.buffer_id) { - removed_excerpts_for_buffers.insert(excerpt.buffer_id); - buffer_state.excerpts.retain(|l| l != &excerpt.locator); - if buffer_state.excerpts.is_empty() { - log::debug!( - "removing buffer and diff for buffer {}", - excerpt.buffer_id - ); - self.buffers.remove(&excerpt.buffer_id); - removed_buffer_ids.push(excerpt.buffer_id); - } - } - cursor.next(); - - // Skip over any subsequent excerpts that are also removed. - if let Some(&next_excerpt_id) = excerpt_ids.peek() { - let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id); - if let Some(next_excerpt) = cursor.item() - && next_excerpt.locator == *next_locator - { - excerpt_ids.next(); - excerpt = next_excerpt; - continue 'remove_excerpts; - } - } - - break; - } - - // When removing the last excerpt, remove the trailing newline from - // the previous excerpt. - if cursor.item().is_none() && old_start > MultiBufferOffset::ZERO { - old_start -= 1; - new_excerpts.update_last(|e| e.has_trailing_newline = false, ()); - } - - // Push an edit for the removal of this run of excerpts. - let old_end = cursor.start().1; - let new_start = ExcerptDimension(new_excerpts.summary().text.len); - edits.push(Edit { - old: old_start..old_end, - new: new_start..new_start, - }); - } - } - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - for buffer_id in removed_excerpts_for_buffers { - match self.buffers.get(&buffer_id) { - Some(buffer_state) => { - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - } - None => { - snapshot.buffer_locators.remove(&buffer_id); - } - } - } - snapshot.excerpts = new_excerpts; - for buffer_id in &removed_buffer_ids { - self.diffs.remove(buffer_id); - remove_diff_state(&mut snapshot.diffs, *buffer_id); - } - - if !removed_buffer_ids.is_empty() { - snapshot.has_inverted_diff = - snapshot.diffs.iter().any(|diff| diff.main_buffer.is_some()); - } - - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; - } - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); - cx.notify(); - } - - pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( - &self, - anchors: Anchors, - cx: &mut Context, - ) -> impl 'static + Future> + use { - let mut error = None; - let mut futures = Vec::new(); - for anchor in anchors { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - if let Some(buffer) = self.buffers.get(&buffer_id) { - buffer.buffer.update(cx, |buffer, _| { - futures.push(buffer.wait_for_anchors([anchor.text_anchor])) - }); - } else { - error = Some(anyhow!( - "buffer {buffer_id} is not part of this multi-buffer" - )); - break; - } - } - } - async move { - if let Some(error) = error { - Err(error)?; - } - for future in futures { - future.await?; - } - Ok(()) - } - } - - pub fn text_anchor_for_position( - &self, - position: T, - cx: &App, - ) -> Option<(Entity, language::Anchor)> { - let snapshot = self.read(cx); - let anchor = snapshot.anchor_before(position); - let buffer = self - .buffers - .get(&anchor.text_anchor.buffer_id?)? - .buffer - .clone(); - Some((buffer, anchor.text_anchor)) - } - - fn on_buffer_event( - &mut self, - buffer: Entity, - event: &language::BufferEvent, + buffer: Entity, + event: &language::BufferEvent, cx: &mut Context, ) { use language::BufferEvent; @@ -2532,12 +2041,15 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync_mut(cx); + let Some(buffer) = self.buffer(diff.read(cx).buffer_id) else { + return; + }; + let snapshot = self.sync_mut(cx); let diff = diff.read(cx); let buffer_id = diff.buffer_id; - let Some(buffer_state) = self.buffers.get(&buffer_id) else { + let Some(path) = snapshot.path_for_buffer(buffer_id).cloned() else { return; }; let new_diff = DiffStateSnapshot { @@ -2545,17 +2057,17 @@ impl MultiBuffer { diff: diff.snapshot(cx), main_buffer: None, }; - let mut snapshot = self.snapshot.get_mut(); + let snapshot = self.snapshot.get_mut(); let base_text_changed = find_diff_state(&snapshot.diffs, buffer_id) .is_none_or(|old_diff| !new_diff.base_texts_definitely_eq(old_diff)); snapshot.diffs.insert_or_replace(new_diff, ()); - let buffer = buffer_state.buffer.read(cx); + let buffer = buffer.read(cx); let diff_change_range = range.to_offset(buffer); - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, @@ -2577,10 +2089,10 @@ impl MultiBuffer { diff_change_range: Option>, cx: &mut Context, ) { - self.sync_mut(cx); + let snapshot = self.sync_mut(cx); let base_text_buffer_id = diff.read(cx).base_text_buffer().read(cx).remote_id(); - let Some(buffer_state) = self.buffers.get(&base_text_buffer_id) else { + let Some(path) = snapshot.path_for_buffer(base_text_buffer_id).cloned() else { return; }; @@ -2591,16 +2103,16 @@ impl MultiBuffer { diff: diff.snapshot(cx), main_buffer: Some(main_buffer_snapshot), }; - let mut snapshot = self.snapshot.get_mut(); + let snapshot = self.snapshot.get_mut(); snapshot.diffs.insert_or_replace(new_diff, ()); let Some(diff_change_range) = diff_change_range else { return; }; - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { // We don't read this field for inverted diffs. @@ -2624,14 +2136,6 @@ impl MultiBuffer { self.all_buffers_iter().collect() } - pub fn all_buffer_ids_iter(&self) -> impl Iterator { - self.buffers.keys().copied() - } - - pub fn all_buffer_ids(&self) -> Vec { - self.all_buffer_ids_iter().collect() - } - pub fn buffer(&self, buffer_id: BufferId) -> Option> { self.buffers .get(&buffer_id) @@ -2644,14 +2148,11 @@ impl MultiBuffer { } pub fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { - let buffer_id = self - .snapshot - .borrow() + let snapshot = self.snapshot(cx); + snapshot .excerpts .first() - .map(|excerpt| excerpt.buffer.remote_id()); - buffer_id - .and_then(|buffer_id| self.buffer(buffer_id)) + .and_then(|excerpt| self.buffer(excerpt.range.context.start.buffer_id)) .map(|buffer| LanguageSettings::for_buffer(&buffer.read(cx), cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::default(), cx)) } @@ -2814,7 +2315,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_expanded(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = true; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], true, cx); } pub fn all_diff_hunks_expanded(&self) -> bool { @@ -2823,7 +2324,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_collapsed(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = false; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], false, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], false, cx); } pub fn set_show_deleted_hunks(&mut self, show: bool, cx: &mut Context) { @@ -2833,7 +2334,7 @@ impl MultiBuffer { let old_len = self.snapshot.borrow().len(); - let ranges = std::iter::once((Point::zero()..Point::MAX, ExcerptId::max())); + let ranges = std::iter::once((Point::zero()..Point::MAX, None)); let _ = self.expand_or_collapse_diff_hunks_inner(ranges, true, cx); let new_len = self.snapshot.borrow().len(); @@ -2856,7 +2357,7 @@ impl MultiBuffer { pub fn has_multiple_hunks(&self, cx: &App) -> bool { self.read(cx) - .diff_hunks_in_range(Anchor::min()..Anchor::max()) + .diff_hunks_in_range(Anchor::Min..Anchor::Max) .nth(1) .is_some() } @@ -2902,7 +2403,7 @@ impl MultiBuffer { pub fn expand_or_collapse_diff_hunks_inner( &mut self, - ranges: impl IntoIterator, ExcerptId)>, + ranges: impl IntoIterator, Option)>, expand: bool, cx: &mut Context, ) -> Vec> { @@ -2913,18 +2414,34 @@ impl MultiBuffer { let mut snapshot = self.snapshot.get_mut(); let mut excerpt_edits = Vec::new(); let mut last_hunk_row = None; - for (range, end_excerpt_id) in ranges { + for (range, end_anchor) in ranges { for diff_hunk in snapshot.diff_hunks_in_range(range) { - if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() { + if let Some(end_anchor) = &end_anchor + && let Some(hunk_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + && hunk_end_anchor.cmp(end_anchor, snapshot).is_gt() + { + continue; + } + let hunk_range = diff_hunk.multi_buffer_range; + if let Some(excerpt_start_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.start) + && hunk_range.start.to_point(snapshot) < excerpt_start_anchor.to_point(snapshot) + { continue; } if last_hunk_row.is_some_and(|row| row >= diff_hunk.row_range.start) { continue; } - let start = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.start); - let end = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.end); - let start = snapshot.excerpt_offset_for_anchor(&start); - let end = snapshot.excerpt_offset_for_anchor(&end); + let mut start = snapshot.excerpt_offset_for_anchor(&hunk_range.start); + let mut end = snapshot.excerpt_offset_for_anchor(&hunk_range.end); + if let Some(excerpt_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + { + let excerpt_end = snapshot.excerpt_offset_for_anchor(&excerpt_end_anchor); + start = start.min(excerpt_end); + end = end.min(excerpt_end); + }; last_hunk_row = Some(diff_hunk.row_range.start); excerpt_edits.push(text::Edit { old: start..end, @@ -2947,15 +2464,18 @@ impl MultiBuffer { cx: &mut Context, ) { let snapshot = self.snapshot.borrow().clone(); - let ranges = ranges.iter().map(move |range| { - let end_excerpt_id = range.end.excerpt_id; - let range = range.to_point(&snapshot); - let mut peek_end = range.end; - if range.end.row < snapshot.max_row().0 { - peek_end = Point::new(range.end.row + 1, 0); - }; - (range.start..peek_end, end_excerpt_id) - }); + let ranges = + ranges.iter().map(move |range| { + let excerpt_end = snapshot.excerpt_containing(range.end..range.end).and_then( + |(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end), + ); + let range = range.to_point(&snapshot); + let mut peek_end = range.end; + if range.end.row < snapshot.max_row().0 { + peek_end = Point::new(range.end.row + 1, 0); + }; + (range.start..peek_end, excerpt_end) + }); let edits = self.expand_or_collapse_diff_hunks_inner(ranges, expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); @@ -2967,184 +2487,6 @@ impl MultiBuffer { }); } - pub fn resize_excerpt( - &mut self, - id: ExcerptId, - range: Range, - cx: &mut Context, - ) { - self.sync_mut(cx); - - let mut snapshot = self.snapshot.get_mut(); - let locator = snapshot.excerpt_locator_for_id(id); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - excerpt.range.context.start = range.start; - excerpt.range.context.end = range.end; - excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); - cx.notify(); - } - - pub fn expand_excerpts( - &mut self, - ids: impl IntoIterator, - line_count: u32, - direction: ExpandExcerptDirection, - cx: &mut Context, - ) { - if line_count == 0 { - return; - } - self.sync_mut(cx); - if !self.excerpts_by_path.is_empty() { - self.expand_excerpts_with_paths(ids, line_count, direction, cx); - return; - } - let mut snapshot = self.snapshot.get_mut(); - - let ids = ids.into_iter().collect::>(); - let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - for locator in &locators { - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - let up_line_count = if direction.should_expand_up() { - line_count - } else { - 0 - }; - - let start_row = excerpt - .range - .context - .start - .to_point(&excerpt.buffer) - .row - .saturating_sub(up_line_count); - let start_point = Point::new(start_row, 0); - excerpt.range.context.start = excerpt.buffer.anchor_before(start_point); - - let down_line_count = if direction.should_expand_down() { - line_count - } else { - 0 - }; - - let mut end_point = excerpt.buffer.clip_point( - excerpt.range.context.end.to_point(&excerpt.buffer) - + Point::new(down_line_count, 0), - Bias::Left, - ); - end_point.column = excerpt.buffer.line_len(end_point.row); - excerpt.range.context.end = excerpt.buffer.anchor_after(end_point); - excerpt.max_buffer_row = end_point.row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - } - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsExpanded { ids }); - cx.notify(); - } - #[ztracing::instrument(skip_all)] fn sync(&self, cx: &App) { let changed = self.buffer_changed_since_sync.replace(false); @@ -3162,17 +2504,19 @@ impl MultiBuffer { } } - fn sync_mut(&mut self, cx: &App) { + fn sync_mut(&mut self, cx: &App) -> &mut MultiBufferSnapshot { + let snapshot = self.snapshot.get_mut(); let changed = self.buffer_changed_since_sync.replace(false); if !changed { - return; + return snapshot; } - let edits = - Self::sync_from_buffer_changes(self.snapshot.get_mut(), &self.buffers, &self.diffs, cx); + let edits = Self::sync_from_buffer_changes(snapshot, &self.buffers, &self.diffs, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } + + snapshot } fn sync_from_buffer_changes( @@ -3183,8 +2527,10 @@ impl MultiBuffer { ) -> Vec> { let MultiBufferSnapshot { excerpts, - buffer_locators: _, diffs: buffer_diff, + buffers: buffer_snapshots, + path_keys_by_index: _, + indices_by_path_key: _, diff_transforms: _, non_text_state_update_count, edit_count, @@ -3193,8 +2539,6 @@ impl MultiBuffer { has_conflict, has_inverted_diff: _, singleton: _, - excerpt_ids: _, - replaced_excerpts: _, trailing_excerpt_update_count: _, all_diff_hunks_expanded: _, show_deleted_hunks: _, @@ -3227,28 +2571,32 @@ impl MultiBuffer { buffer_diff.edit(diffs_to_add, ()); } - let mut excerpts_to_edit = Vec::new(); + let mut paths_to_edit = Vec::new(); let mut non_text_state_updated = false; let mut edited = false; for buffer_state in buffers.values() { let buffer = buffer_state.buffer.read(cx); - let version = buffer.version(); + let last_snapshot = buffer_snapshots + .get(&buffer.remote_id()) + .expect("each buffer should have a snapshot"); + let current_version = buffer.version(); let non_text_state_update_count = buffer.non_text_state_update_count(); - let buffer_edited = version.changed_since(&buffer_state.last_version.borrow()); - let buffer_non_text_state_updated = - non_text_state_update_count > buffer_state.last_non_text_state_update_count.get(); + let buffer_edited = + current_version.changed_since(last_snapshot.buffer_snapshot.version()); + let buffer_non_text_state_updated = non_text_state_update_count + > last_snapshot.buffer_snapshot.non_text_state_update_count(); if buffer_edited || buffer_non_text_state_updated { - *buffer_state.last_version.borrow_mut() = version; - buffer_state - .last_non_text_state_update_count - .set(non_text_state_update_count); - excerpts_to_edit.extend( - buffer_state - .excerpts - .iter() - .map(|locator| (locator, buffer_state.buffer.clone(), buffer_edited)), - ); + paths_to_edit.push(( + last_snapshot.path_key.clone(), + last_snapshot.path_key_index, + buffer_state.buffer.clone(), + if buffer_edited { + Some(last_snapshot.buffer_snapshot.version().clone()) + } else { + None + }, + )); } edited |= buffer_edited; @@ -3266,55 +2614,64 @@ impl MultiBuffer { *non_text_state_update_count += 1; } - excerpts_to_edit.sort_unstable_by_key(|&(locator, _, _)| locator); + paths_to_edit.sort_unstable_by_key(|(path, _, _, _)| path.clone()); let mut edits = Vec::new(); let mut new_excerpts = SumTree::default(); - let mut cursor = excerpts.cursor::, ExcerptOffset>>(()); + let mut cursor = excerpts.cursor::(()); - for (locator, buffer, buffer_edited) in excerpts_to_edit { - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - let old_excerpt = cursor.item().unwrap(); + for (path, path_key_index, buffer, prev_version) in paths_to_edit { + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); let buffer = buffer.read(cx); let buffer_id = buffer.remote_id(); - let mut new_excerpt; - if buffer_edited { - edits.extend( - buffer - .edits_since_in_range::( - old_excerpt.buffer.version(), - old_excerpt.range.context.clone(), - ) - .map(|edit| { - let excerpt_old_start = cursor.start().1; - let excerpt_new_start = - ExcerptDimension(new_excerpts.summary().text.len); - let old_start = excerpt_old_start + edit.old.start; - let old_end = excerpt_old_start + edit.old.end; - let new_start = excerpt_new_start + edit.new.start; - let new_end = excerpt_new_start + edit.new.end; - Edit { - old: old_start..old_end, - new: new_start..new_end, - } - }), - ); - new_excerpt = Excerpt::new( - old_excerpt.id, - locator.clone(), - buffer_id, - Arc::new(buffer.snapshot()), - old_excerpt.range.clone(), - old_excerpt.has_trailing_newline, - ); - } else { - new_excerpt = old_excerpt.clone(); - new_excerpt.buffer = Arc::new(buffer.snapshot()); - } + buffer_snapshots.insert( + buffer_id, + BufferStateSnapshot { + path_key: path.clone(), + path_key_index, + buffer_snapshot: buffer.snapshot(), + }, + ); - new_excerpts.push(new_excerpt, ()); - cursor.next(); + if let Some(prev_version) = &prev_version { + while let Some(old_excerpt) = cursor.item() + && &old_excerpt.path_key == &path + { + edits.extend( + buffer + .edits_since_in_range::( + prev_version, + old_excerpt.range.context.clone(), + ) + .map(|edit| { + let excerpt_old_start = cursor.start().len(); + let excerpt_new_start = + ExcerptDimension(new_excerpts.summary().text.len); + let old_start = excerpt_old_start + edit.old.start; + let old_end = excerpt_old_start + edit.old.end; + let new_start = excerpt_new_start + edit.new.start; + let new_end = excerpt_new_start + edit.new.end; + Edit { + old: old_start..old_end, + new: new_start..new_end, + } + }), + ); + + let excerpt = Excerpt::new( + old_excerpt.path_key.clone(), + old_excerpt.path_key_index, + &buffer.snapshot(), + old_excerpt.range.clone(), + old_excerpt.has_trailing_newline, + ); + new_excerpts.push(excerpt, ()); + cursor.next(); + } + } else { + new_excerpts.append(cursor.slice(&path, Bias::Right), ()); + }; } new_excerpts.append(cursor.suffix(), ()); @@ -3416,7 +2773,8 @@ impl MultiBuffer { { return true; } - hunk.hunk_start_anchor.is_valid(&excerpt.buffer) + hunk.hunk_start_anchor + .is_valid(&excerpt.buffer_snapshot(&snapshot)) }), _ => true, }; @@ -3513,10 +2871,10 @@ impl MultiBuffer { // Recompute the expanded hunks in the portion of the excerpt that // intersects the edit. if let Some(diff) = find_diff_state(&snapshot.diffs, excerpt.buffer_id) { - let buffer = &excerpt.buffer; + let buffer_snapshot = &excerpt.buffer_snapshot(&snapshot); let excerpt_start = *excerpts.start(); let excerpt_end = excerpt_start + excerpt.text_summary.len; - let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer_snapshot); let excerpt_buffer_end = excerpt_buffer_start + excerpt.text_summary.len; let edit_buffer_start = excerpt_buffer_start + edit.new.start.saturating_sub(excerpt_start); @@ -3535,7 +2893,6 @@ impl MultiBuffer { log::trace!("skipping hunk that starts before excerpt"); continue; } - hunk_buffer_range.end.to_point(&excerpt.buffer); let hunk_excerpt_start = excerpt_start + hunk_buffer_range.start.saturating_sub(excerpt_buffer_start); let hunk_excerpt_end = excerpt_end @@ -3548,9 +2905,10 @@ impl MultiBuffer { ); if !hunk_buffer_range.is_empty() { let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: true, }; *end_of_current_insert = @@ -3558,23 +2916,24 @@ impl MultiBuffer { } } } else { - let edit_anchor_range = buffer.anchor_before(edit_buffer_start) - ..buffer.anchor_after(edit_buffer_end); - for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) { + let edit_anchor_range = buffer_snapshot.anchor_before(edit_buffer_start) + ..buffer_snapshot.anchor_after(edit_buffer_end); + for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer_snapshot) { if hunk.is_created_file() && !all_diff_hunks_expanded { continue; } - let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); + let hunk_buffer_range = hunk.buffer_range.to_offset(buffer_snapshot); if hunk_buffer_range.start < excerpt_buffer_start { log::trace!("skipping hunk that starts before excerpt"); continue; } let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: false, }; @@ -3599,7 +2958,7 @@ impl MultiBuffer { } DiffChangeKind::ExpandOrCollapseHunks { expand } => { let intersects = hunk_buffer_range.is_empty() - || hunk_buffer_range.end > edit_buffer_start; + || (hunk_buffer_range.end > edit_buffer_start); if *expand { intersects || was_previously_expanded || all_diff_hunks_expanded } else { @@ -3613,9 +2972,8 @@ impl MultiBuffer { if should_expand_hunk { did_expand_hunks = true; log::trace!( - "expanding hunk {:?}, excerpt:{:?}", + "expanding hunk {:?}", hunk_excerpt_start..hunk_excerpt_end, - excerpt.id ); if !hunk.diff_base_byte_range.is_empty() @@ -3639,7 +2997,7 @@ impl MultiBuffer { DiffTransform::DeletedHunk { base_text_byte_range: hunk.diff_base_byte_range.clone(), summary: base_text_summary, - buffer_id: excerpt.buffer_id, + buffer_id: buffer_snapshot.remote_id(), hunk_info, has_trailing_newline, }, @@ -3766,11 +3124,13 @@ impl MultiBuffer { pub fn toggle_single_diff_hunk(&mut self, range: Range, cx: &mut Context) { let snapshot = self.snapshot(cx); - let excerpt_id = range.end.excerpt_id; + let excerpt_end = snapshot + .excerpt_containing(range.end..range.end) + .and_then(|(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end)); let point_range = range.to_point(&snapshot); let expand = !self.single_hunk_is_expanded(range, cx); let edits = - self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_id)], expand, cx); + self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_end)], expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } @@ -3896,38 +3256,15 @@ impl MultiBuffer { use std::env; use util::RandomCharIter; - let max_excerpts = env::var("MAX_EXCERPTS") + let max_buffers = env::var("MAX_BUFFERS") .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) .unwrap_or(5); let mut buffers = Vec::new(); for _ in 0..mutation_count { - if rng.random_bool(0.05) { - log::info!("Clearing multi-buffer"); - self.clear(cx); - continue; - } else if rng.random_bool(0.1) && !self.excerpt_ids().is_empty() { - let ids = self.excerpt_ids(); - let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(rng).copied()); - } - - let line_count = rng.random_range(0..5); - - log::info!("Expanding excerpts {excerpts:?} by {line_count} lines"); - - self.expand_excerpts( - excerpts.iter().cloned(), - line_count, - ExpandExcerptDirection::UpAndDown, - cx, - ); - continue; - } - - let excerpt_ids = self.excerpt_ids(); - if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) { + let snapshot = self.snapshot(cx); + let buffer_ids = snapshot.all_buffer_ids().collect::>(); + if buffer_ids.is_empty() || (rng.random() && buffer_ids.len() < max_buffers) { let buffer_handle = if rng.random() || self.buffers.is_empty() { let text = RandomCharIter::new(&mut *rng).take(10).collect::(); buffers.push(cx.new(|cx| Buffer::local(text, cx))); @@ -3944,12 +3281,21 @@ impl MultiBuffer { let buffer = buffer_handle.read(cx); let buffer_text = buffer.text(); + let buffer_snapshot = buffer.snapshot(); + let mut next_min_start_ix = 0; let ranges = (0..rng.random_range(0..5)) - .map(|_| { - let end_ix = - buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left); - ExcerptRange::new(start_ix..end_ix) + .filter_map(|_| { + if next_min_start_ix >= buffer.len() { + return None; + } + let end_ix = buffer.clip_offset( + rng.random_range(next_min_start_ix..=buffer.len()), + Bias::Right, + ); + let start_ix = buffer + .clip_offset(rng.random_range(next_min_start_ix..=end_ix), Bias::Left); + next_min_start_ix = buffer.text().ceil_char_boundary(end_ix + 1); + Some(ExcerptRange::new(start_ix..end_ix)) }) .collect::>(); log::info!( @@ -3961,21 +3307,27 @@ impl MultiBuffer { .map(|r| &buffer_text[r.context.clone()]) .collect::>() ); - - let excerpt_id = - self.insert_excerpts_after(ExcerptId::max(), buffer_handle, ranges, cx); - log::info!("Inserted with ids: {:?}", excerpt_id); + + let path_key = PathKey::for_buffer(&buffer_handle, cx); + self.set_merged_excerpt_ranges_for_path( + path_key.clone(), + buffer_handle, + &buffer_snapshot, + ranges, + cx, + ); + log::info!("Inserted with path_key: {:?}", path_key); } else { - let remove_count = rng.random_range(1..=excerpt_ids.len()); - let mut excerpts_to_remove = excerpt_ids - .choose_multiple(rng, remove_count) - .cloned() - .collect::>(); - let snapshot = self.snapshot.borrow(); - excerpts_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - log::info!("Removing excerpts {:?}", excerpts_to_remove); - self.remove_excerpts(excerpts_to_remove, cx); + let path_key = self + .snapshot + .borrow() + .buffers + .get(&buffer_ids.choose(rng).unwrap()) + .unwrap() + .path_key + .clone(); + log::info!("Removing excerpts {:?}", path_key); + self.remove_excerpts(path_key, cx); } } } @@ -4083,7 +3435,7 @@ impl MultiBufferSnapshot { } pub fn diff_hunks(&self) -> impl Iterator + '_ { - self.diff_hunks_in_range(Anchor::min()..Anchor::max()) + self.diff_hunks_in_range(Anchor::Min..Anchor::Max) } pub fn diff_hunks_in_range( @@ -4121,6 +3473,7 @@ impl MultiBufferSnapshot { })) }) .filter_map(move |(range, (hunk, is_inverted), excerpt)| { + let buffer_snapshot = excerpt.buffer_snapshot(self); if range.start != range.end && range.end == query_range.start && !hunk.range.is_empty() { return None; @@ -4139,12 +3492,12 @@ impl MultiBufferSnapshot { if self.show_deleted_hunks || is_inverted { let hunk_start_offset = if is_inverted { Anchor::in_buffer( - excerpt.id, - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start), + excerpt.path_key_index, + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start), ) .to_offset(self) } else { - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start) + Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) .to_offset(self) }; @@ -4155,7 +3508,8 @@ impl MultiBufferSnapshot { if !is_inverted { word_diffs.extend(hunk.buffer_word_diffs.into_iter().map(|diff| { - Anchor::range_in_buffer(excerpt.id, diff).to_offset(self) + Anchor::range_in_buffer(excerpt.path_key_index, diff) + .to_offset(self) })); } word_diffs @@ -4163,8 +3517,8 @@ impl MultiBufferSnapshot { .unwrap_or_default(); let buffer_range = if is_inverted { - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start) - ..excerpt.buffer.anchor_before(hunk.diff_base_byte_range.end) + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start) + ..buffer_snapshot.anchor_before(hunk.diff_base_byte_range.end) } else { hunk.buffer_range.clone() }; @@ -4175,10 +3529,11 @@ impl MultiBufferSnapshot { } else { DiffHunkStatusKind::Modified }; + let multi_buffer_range = + Anchor::range_in_buffer(excerpt.path_key_index, buffer_range.clone()); Some(MultiBufferDiffHunk { row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row), - buffer_id: excerpt.buffer_id, - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), buffer_range, word_diffs, diff_base_byte_range: BufferOffset(hunk.diff_base_byte_range.start) @@ -4187,6 +3542,8 @@ impl MultiBufferSnapshot { kind: status_kind, secondary: hunk.secondary_status, }, + excerpt_range: excerpt.range.clone(), + multi_buffer_range, }) }) } @@ -4211,19 +3568,12 @@ impl MultiBufferSnapshot { }) } - pub fn excerpt_ids_for_range( - &self, - range: Range, - ) -> impl Iterator + '_ { - self.excerpts_for_range(range).map(|excerpt| excerpt.id) - } - pub fn buffer_ids_for_range( &self, range: Range, ) -> impl Iterator + '_ { self.excerpts_for_range(range) - .map(|excerpt| excerpt.buffer_id) + .map(|excerpt| excerpt.buffer_snapshot(self).remote_id()) } /// Resolves the given [`text::Anchor`]s to [`crate::Anchor`]s if the anchor is within a visible excerpt. @@ -4236,69 +3586,66 @@ impl MultiBufferSnapshot { let anchors = anchors.into_iter(); let mut result = Vec::with_capacity(anchors.size_hint().0); let mut anchors = anchors.peekable(); - let mut cursor = self.excerpts.cursor::>(()); + let mut cursor = self.excerpts.cursor::(()); 'anchors: while let Some(anchor) = anchors.peek() { - let Some(buffer_id) = anchor.buffer_id else { - anchors.next(); - result.push(None); - continue 'anchors; - }; - let mut same_buffer_anchors = - anchors.peeking_take_while(|a| a.buffer_id.is_some_and(|b| buffer_id == b)); + let buffer_id = anchor.buffer_id; + let mut same_buffer_anchors = anchors.peeking_take_while(|a| a.buffer_id == buffer_id); - if let Some(locators) = self.buffer_locators.get(&buffer_id) { + if let Some(buffer) = self.buffers.get(&buffer_id) { + let path = &buffer.path_key; let Some(mut next) = same_buffer_anchors.next() else { continue 'anchors; }; - 'excerpts: for locator in locators.iter() { - if cursor.seek_forward(&Some(locator), Bias::Left) - && let Some(excerpt) = cursor.item() - { - loop { - // anchor is before the first excerpt - if excerpt - .range - .context - .start - .cmp(&next, &excerpt.buffer) - .is_gt() - { - // so we skip it and try the next anchor - result.push(None); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is within the excerpt - } else if excerpt - .range - .context - .end - .cmp(&next, &excerpt.buffer) - .is_ge() - { - // record it and all following anchors that are within - result.push(Some(Anchor::in_buffer(excerpt.id, next))); - result.extend( - same_buffer_anchors - .peeking_take_while(|a| { - excerpt - .range - .context - .end - .cmp(a, &excerpt.buffer) - .is_ge() - }) - .map(|a| Some(Anchor::in_buffer(excerpt.id, a))), - ); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is after the excerpt, try the next one - } else { - continue 'excerpts; + cursor.seek_forward(path, Bias::Left); + 'excerpts: loop { + let Some(excerpt) = cursor.item() else { + break; + }; + if &excerpt.path_key != path { + break; + } + let buffer_snapshot = excerpt.buffer_snapshot(self); + + loop { + // anchor is before the first excerpt + if excerpt + .range + .context + .start + .cmp(&next, &buffer_snapshot) + .is_gt() + { + // so we skip it and try the next anchor + result.push(None); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, + } + // anchor is within the excerpt + } else if excerpt + .range + .context + .end + .cmp(&next, &buffer_snapshot) + .is_ge() + { + // record it and all following anchors that are within + result.push(Some(Anchor::in_buffer(excerpt.path_key_index, next))); + result.extend( + same_buffer_anchors + .peeking_take_while(|a| { + excerpt.range.context.end.cmp(a, &buffer_snapshot).is_ge() + }) + .map(|a| Some(Anchor::in_buffer(excerpt.path_key_index, a))), + ); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, } + // anchor is after the excerpt, try the next one + } else { + cursor.next(); + continue 'excerpts; } } } @@ -4311,79 +3658,31 @@ impl MultiBufferSnapshot { result } - pub fn ranges_to_buffer_ranges( - &self, - ranges: impl Iterator>, - ) -> impl Iterator, ExcerptId)> { - ranges.flat_map(|range| { - self.range_to_buffer_ranges((Bound::Included(range.start), Bound::Included(range.end))) - .into_iter() - }) - } - - pub fn range_to_buffer_ranges( - &self, - range: R, - ) -> Vec<(&BufferSnapshot, Range, ExcerptId)> - where - R: RangeBounds, - T: ToOffset, - { - self.range_to_buffer_ranges_with_context(range) - .into_iter() - .map(|(buffer, range, id, _context)| (buffer, range, id)) - .collect() - } - - pub fn range_to_buffer_ranges_with_context( + pub fn range_to_buffer_ranges( &self, - range: R, + range: Range, ) -> Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, - )> - where - R: RangeBounds, - T: ToOffset, - { - let start = match range.start_bound() { - Bound::Included(start) => start.to_offset(self), - Bound::Excluded(_) => panic!("excluded start bound not supported"), - Bound::Unbounded => MultiBufferOffset::ZERO, - }; - let end_bound = match range.end_bound() { - Bound::Included(end) => Bound::Included(end.to_offset(self)), - Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)), - Bound::Unbounded => Bound::Unbounded, - }; - let bounds = (Bound::Included(start), end_bound); - + ExcerptRange, + )> { let mut cursor = self.cursor::(); + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); cursor.seek(&start); let mut result: Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, + ExcerptRange, )> = Vec::new(); while let Some(region) = cursor.region() { - let dominated_by_end_bound = match end_bound { - Bound::Included(end) => region.range.start > end, - Bound::Excluded(end) => region.range.start >= end, - Bound::Unbounded => false, - }; - if dominated_by_end_bound { + if region.range.start >= end { break; } if region.is_main_buffer { let start_overshoot = start.saturating_sub(region.range.start); - let end_offset = match end_bound { - Bound::Included(end) | Bound::Excluded(end) => end, - Bound::Unbounded => region.range.end, - }; + let end_offset = end; let end_overshoot = end_offset.saturating_sub(region.range.start); let start = region .buffer_range @@ -4393,34 +3692,46 @@ impl MultiBufferSnapshot { .buffer_range .end .min(region.buffer_range.start + end_overshoot); - let context = region.excerpt.range.context.clone(); - if let Some(prev) = result.last_mut().filter(|(_, prev_range, excerpt_id, _)| { - *excerpt_id == region.excerpt.id && prev_range.end == start - }) { + let excerpt_range = region.excerpt.range.clone(); + if let Some(prev) = + result + .last_mut() + .filter(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() == region.buffer.remote_id() + && prev_range.end == start + && prev_excerpt.context.start == excerpt_range.context.start + }) + { prev.1.end = end; } else { - result.push((region.buffer, start..end, region.excerpt.id, context)); + result.push((region.buffer.clone(), start..end, excerpt_range)); } } cursor.next(); } - if let Some(excerpt) = cursor.excerpt() { - let dominated_by_prev_excerpt = - result.last().is_some_and(|(_, _, id, _)| *id == excerpt.id); - if !dominated_by_prev_excerpt && excerpt.text_summary.len == 0 { - let excerpt_position = self.len(); - if bounds.contains(&excerpt_position) { - let buffer_offset = - BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)); - let context = excerpt.range.context.clone(); - result.push(( - &excerpt.buffer, - buffer_offset..buffer_offset, - excerpt.id, - context, - )); - } + if let Some(excerpt) = cursor.excerpt() + && excerpt.text_summary.len == 0 + && end == self.len() + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + + let buffer_offset = + BufferOffset(excerpt.range.context.start.to_offset(buffer_snapshot)); + let excerpt_range = excerpt.range.clone(); + if result + .last_mut() + .is_none_or(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() != buffer_snapshot.remote_id() + || prev_range.end != buffer_offset + || prev_excerpt.context.start != excerpt_range.context.start + }) + { + result.push(( + buffer_snapshot.clone(), + buffer_offset..buffer_offset, + excerpt_range, + )); } } @@ -4430,14 +3741,7 @@ impl MultiBufferSnapshot { pub fn range_to_buffer_ranges_with_deleted_hunks( &self, range: Range, - ) -> impl Iterator< - Item = ( - &BufferSnapshot, - Range, - ExcerptId, - Option, - ), - > + '_ { + ) -> impl Iterator, Option)> + '_ { let start = range.start.to_offset(self); let end = range.end.to_offset(self); @@ -4460,18 +3764,12 @@ impl MultiBufferSnapshot { .end .min(region.buffer_range.start + end_overshoot); - let region_excerpt_id = region.excerpt.id; let deleted_hunk_anchor = if region.is_main_buffer { None } else { Some(self.anchor_before(region.range.start)) }; - let result = ( - region.buffer, - start..end, - region_excerpt_id, - deleted_hunk_anchor, - ); + let result = (region.buffer, start..end, deleted_hunk_anchor); cursor.next(); Some(result) }) @@ -4504,7 +3802,7 @@ impl MultiBufferSnapshot { + AddAssign + Ord, { - let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None; + let mut current_excerpt_metadata: Option<(ExcerptRange, I)> = None; let mut cursor = self.cursor::(); // Find the excerpt and buffer offset where the given range ends. @@ -4519,7 +3817,7 @@ impl MultiBufferSnapshot { ::default() }; buffer_end = buffer_end + overshoot; - range_end = Some((region.excerpt.id, buffer_end)); + range_end = Some((region.excerpt.range.clone(), buffer_end)); break; } cursor.next(); @@ -4544,11 +3842,12 @@ impl MultiBufferSnapshot { iter::from_fn(move || { loop { let excerpt = cursor.excerpt()?; + let buffer_snapshot = excerpt.buffer_snapshot(self); // If we have already retrieved metadata for this excerpt, continue to use it. let metadata_iter = if let Some((_, metadata)) = current_excerpt_metadata .as_mut() - .filter(|(excerpt_id, _)| *excerpt_id == excerpt.id) + .filter(|(excerpt_info, _)| excerpt_info == &excerpt.range) { Some(metadata) } @@ -4571,16 +3870,20 @@ impl MultiBufferSnapshot { .range .context .end - .summary::(&excerpt.buffer); - if let Some((end_excerpt_id, end_buffer_offset)) = range_end - && excerpt.id == end_excerpt_id + .summary::(&buffer_snapshot); + if let Some((end_excerpt, end_buffer_offset)) = &range_end + && &excerpt.range == end_excerpt { - buffer_end = buffer_end.min(end_buffer_offset); + buffer_end = buffer_end.min(*end_buffer_offset); } - get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end).map(|iterator| { - &mut current_excerpt_metadata.insert((excerpt.id, iterator)).1 - }) + get_buffer_metadata(&buffer_snapshot, buffer_start..buffer_end).map( + |iterator| { + &mut current_excerpt_metadata + .insert((excerpt.range.clone(), iterator)) + .1 + }, + ) }; // Visit each metadata item. @@ -4644,8 +3947,8 @@ impl MultiBufferSnapshot { // When there are no more metadata items for this excerpt, move to the next excerpt. else { current_excerpt_metadata.take(); - if let Some((end_excerpt_id, _)) = range_end - && excerpt.id == end_excerpt_id + if let Some((end_excerpt, _)) = &range_end + && &excerpt.range == end_excerpt { return None; } @@ -4668,12 +3971,14 @@ impl MultiBufferSnapshot { cursor.seek_to_start_of_current_excerpt(); let excerpt = cursor.excerpt()?; - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let excerpt_end = excerpt.range.context.end.to_offset(&excerpt.buffer); - let current_position = self - .anchor_before(offset) - .text_anchor - .to_offset(&excerpt.buffer); + let buffer = excerpt.buffer_snapshot(self); + let excerpt_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_end = excerpt.range.context.end.to_offset(buffer); + let current_position = match self.anchor_before(offset) { + Anchor::Min => 0, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.text_anchor().to_offset(buffer), + Anchor::Max => unreachable!(), + }; if let Some(diff) = self.diff_state(excerpt.buffer_id) { if let Some(main_buffer) = &diff.main_buffer { @@ -4683,24 +3988,22 @@ impl MultiBufferSnapshot { if hunk.diff_base_byte_range.end >= current_position { continue; } - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = + Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } } else { - let excerpt_end = excerpt - .buffer - .anchor_before(excerpt_end.min(current_position)); - for hunk in diff.hunks_intersecting_range_rev( - excerpt.range.context.start..excerpt_end, - &excerpt.buffer, - ) { - let hunk_end = hunk.buffer_range.end.to_offset(&excerpt.buffer); + let excerpt_end = buffer.anchor_before(excerpt_end.min(current_position)); + for hunk in diff + .hunks_intersecting_range_rev(excerpt.range.context.start..excerpt_end, buffer) + { + let hunk_end = hunk.buffer_range.end.to_offset(buffer); if hunk_end >= current_position { continue; } - let start = - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4709,6 +4012,7 @@ impl MultiBufferSnapshot { loop { cursor.prev_excerpt(); let excerpt = cursor.excerpt()?; + let buffer = excerpt.buffer_snapshot(self); let Some(diff) = self.diff_state(excerpt.buffer_id) else { continue; @@ -4716,24 +4020,25 @@ impl MultiBufferSnapshot { if let Some(main_buffer) = &diff.main_buffer { let Some(hunk) = diff .hunks_intersecting_base_text_range_rev( - excerpt.range.context.to_offset(&excerpt.buffer), + excerpt.range.context.to_offset(buffer), main_buffer, ) .next() else { continue; }; - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } else { let Some(hunk) = diff - .hunks_intersecting_range_rev(excerpt.range.context.clone(), &excerpt.buffer) + .hunks_intersecting_range_rev(excerpt.range.context.clone(), buffer) .next() else { continue; }; - let start = Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4808,16 +4113,17 @@ impl MultiBufferSnapshot { .map(|ch| classifier.kind(ch)) } + pub fn all_buffer_ids(&self) -> impl Iterator + '_ { + self.buffers.iter().map(|(id, _)| *id) + } + pub fn is_singleton(&self) -> bool { self.singleton } - pub fn as_singleton(&self) -> Option<(ExcerptId, BufferId, &BufferSnapshot)> { - if self.singleton { - self.excerpts - .iter() - .next() - .map(|e| (e.id, e.buffer_id, &*e.buffer)) + pub fn as_singleton(&self) -> Option<&BufferSnapshot> { + if self.is_singleton() { + Some(self.excerpts.first()?.buffer_snapshot(&self)) } else { None } @@ -4914,11 +4220,11 @@ impl MultiBufferSnapshot { range: MultiBufferOffset::ZERO..MultiBufferOffset::ZERO, excerpts: self.excerpts.cursor(()), diff_transforms: self.diff_transforms.cursor(()), - diffs: &self.diffs, diff_base_chunks: None, excerpt_chunks: None, buffer_chunk: None, language_aware, + snapshot: self, }; let range = range.start.to_offset(self)..range.end.to_offset(self); chunks.seek(range); @@ -5065,7 +4371,7 @@ impl MultiBufferSnapshot { && !region.is_main_buffer { let main_buffer_position = cursor.main_buffer_position()?; - let buffer_snapshot = &cursor.excerpt()?.buffer; + let buffer_snapshot = cursor.excerpt()?.buffer_snapshot(self); return Some((buffer_snapshot, main_buffer_position)); } else if buffer_offset > BufferOffset(region.buffer.len()) { return None; @@ -5073,10 +4379,7 @@ impl MultiBufferSnapshot { Some((region.buffer, buffer_offset)) } - pub fn point_to_buffer_point( - &self, - point: Point, - ) -> Option<(&BufferSnapshot, Point, ExcerptId)> { + pub fn point_to_buffer_point(&self, point: Point) -> Option<(&BufferSnapshot, Point)> { let mut cursor = self.cursor::(); cursor.seek(&point); let region = cursor.region()?; @@ -5087,11 +4390,14 @@ impl MultiBufferSnapshot { && region.has_trailing_newline && !region.is_main_buffer { - return Some((&excerpt.buffer, cursor.main_buffer_position()?, excerpt.id)); + return Some(( + &excerpt.buffer_snapshot(self), + cursor.main_buffer_position()?, + )); } else if buffer_point > region.buffer.max_point() { return None; } - Some((region.buffer, buffer_point, excerpt.id)) + Some((region.buffer, buffer_point)) } pub fn suggested_indents( @@ -5408,21 +4714,20 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(()); cursor.seek(&range.start, Bias::Right); if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut end_before_newline = cursor.end(); if excerpt.has_trailing_newline { end_before_newline -= 1; } - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let start_in_excerpt = excerpt_start + (range.start - *cursor.start()); let end_in_excerpt = excerpt_start + (cmp::min(end_before_newline, range.end) - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - start_in_excerpt..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + start_in_excerpt..end_in_excerpt, + ), ); if range.end > end_before_newline { @@ -5437,16 +4742,15 @@ impl MultiBufferSnapshot { .summary::<_, ExcerptDimension>(&range.end, Bias::Right) .0; if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); range.end = cmp::max(*cursor.start(), range.end); - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let end_in_excerpt = excerpt_start + (range.end - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - excerpt_start..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + excerpt_start..end_in_excerpt, + ), ); } } @@ -5464,38 +4768,42 @@ impl MultiBufferSnapshot { + Add, MBD::TextDimension: Sub + Ord, { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - let locator = self.excerpt_locator_for_id(excerpt_id); - let (start, _, mut item) = self - .excerpts - .find::((), locator, Bias::Left); - let mut start = MBD::from_summary(&start.text); - if item.is_none() && excerpt_id == ExcerptId::max() { - item = self.excerpts.last(); - if let Some(last_summary) = self.excerpts.last_summary() { - start = start - ::from_text_summary(&last_summary.text.into()); + let target = anchor.seek_target(self); + let anchor = match anchor { + Anchor::Min => { + return MBD::default(); } - } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + return MBD::from_summary(&self.text_summary()); + } + }; + + let (start, _, item) = self + .excerpts + .find::((), &target, Bias::Left); + let start = MBD::from_summary(&start.text); let excerpt_start_position = ExcerptDimension(start); if self.diff_transforms.is_empty() { if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { + if !excerpt.contains(anchor, self) { return excerpt_start_position.0; } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { @@ -5513,26 +4821,27 @@ impl MultiBufferSnapshot { diff_transforms_cursor.next(); if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - return self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + if !excerpt.contains(anchor, self) { + return self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { @@ -5542,16 +4851,16 @@ impl MultiBufferSnapshot { if diff_transforms_cursor.start().0 < position { diff_transforms_cursor.seek_forward(&position, Bias::Left); } - self.resolve_summary_for_anchor( - &anchor, + self.summary_for_anchor_with_excerpt_position( + *anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, ) } else { diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ) @@ -5562,9 +4871,9 @@ impl MultiBufferSnapshot { /// Maps an anchor's excerpt-space position to its output-space position by /// walking the diff transforms. The cursor is shared across consecutive /// calls, so it may already be partway through the transform list. - fn resolve_summary_for_anchor( + fn summary_for_anchor_with_excerpt_position( &self, - anchor: &Anchor, + anchor: ExcerptAnchor, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5595,7 +4904,7 @@ impl MultiBufferSnapshot { hunk_info, .. }) => { - if let Some(diff_base_anchor) = &anchor.diff_base_anchor + if let Some(diff_base_anchor) = anchor.diff_base_anchor && let Some(base_text) = self.diff_state(*buffer_id).map(|diff| diff.base_text()) && diff_base_anchor.is_valid(&base_text) @@ -5619,7 +4928,7 @@ impl MultiBufferSnapshot { } } else if at_transform_end && anchor - .text_anchor + .text_anchor() .cmp(&hunk_info.hunk_start_anchor, excerpt_buffer) .is_gt() { @@ -5654,9 +4963,9 @@ impl MultiBufferSnapshot { } /// Like `resolve_summary_for_anchor` but optimized for min/max anchors. - fn resolve_summary_for_min_or_max_anchor( + fn summary_for_excerpt_position_without_hunks( &self, - anchor: &Anchor, + bias: Bias, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5673,7 +4982,7 @@ impl MultiBufferSnapshot { // A right-biased anchor at a transform boundary belongs to the // *next* transform, so advance past the current one. - if anchor.text_anchor.bias == Bias::Right && at_transform_end { + if bias == Bias::Right && at_transform_end { diff_transforms.next(); continue; } @@ -5689,27 +4998,27 @@ impl MultiBufferSnapshot { } fn excerpt_offset_for_anchor(&self, anchor: &Anchor) -> ExcerptOffset { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - let locator = self.excerpt_locator_for_id(anchor.excerpt_id); + let anchor = match anchor { + Anchor::Min => return ExcerptOffset::default(), + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => return self.excerpts.summary().len(), + }; + let mut cursor = self.excerpts.cursor::(()); + let target = anchor.seek_target(self); - cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && anchor.excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek(&target, Bias::Left); - let mut position = cursor.start().1; + let mut position = cursor.start().len(); if let Some(excerpt) = cursor.item() - && (excerpt.id == anchor.excerpt_id || anchor.excerpt_id == ExcerptId::max()) + && excerpt.contains(anchor, self) { - let excerpt_buffer_start = excerpt - .buffer - .offset_for_anchor(&excerpt.range.context.start); - let excerpt_buffer_end = excerpt.buffer.offset_for_anchor(&excerpt.range.context.end); + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_start = + buffer_snapshot.offset_for_anchor(&excerpt.range.context.start); + let excerpt_buffer_end = buffer_snapshot.offset_for_anchor(&excerpt.range.context.end); let buffer_position = cmp::min( excerpt_buffer_end, - excerpt.buffer.offset_for_anchor(&anchor.text_anchor), + buffer_snapshot.offset_for_anchor(&anchor.text_anchor()), ); if buffer_position > excerpt_buffer_start { position += buffer_position - excerpt_buffer_start; @@ -5718,13 +5027,6 @@ impl MultiBufferSnapshot { position } - pub fn latest_excerpt_id(&self, mut excerpt_id: ExcerptId) -> ExcerptId { - while let Some(replacement) = self.replaced_excerpts.get(&excerpt_id) { - excerpt_id = *replacement; - } - excerpt_id - } - pub fn summaries_for_anchors<'a, MBD, I>(&'a self, anchors: I) -> Vec where MBD: MultiBufferDimension @@ -5743,43 +5045,56 @@ impl MultiBufferSnapshot { let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - - let excerpt_anchors = anchors.peeking_take_while(|anchor| { - self.latest_excerpt_id(anchor.excerpt_id) == excerpt_id - }); + let target = anchor.seek_target(self); + let excerpt_anchor = match anchor { + Anchor::Min => { + summaries.push(MBD::default()); + anchors.next(); + continue; + } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + summaries.push(MBD::from_summary(&self.text_summary())); + anchors.next(); + continue; + } + }; - let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek_forward(locator, Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek_forward(&target, Bias::Left); let excerpt_start_position = ExcerptDimension(MBD::from_summary(&cursor.start().text)); if let Some(excerpt) = cursor.item() { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + let buffer_snapshot = excerpt.buffer_snapshot(self); + if !excerpt.contains(&excerpt_anchor, self) { + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); continue; } let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); - for (buffer_summary, anchor) in excerpt - .buffer + .summary::(buffer_snapshot); + for (buffer_summary, excerpt_anchor) in buffer_snapshot .summaries_for_anchors_with_payload::( - excerpt_anchors.map(|a| (&a.text_anchor, a)), + std::iter::from_fn(|| { + let excerpt_anchor = anchors.peek()?.excerpt_anchor()?; + if !excerpt.contains(&excerpt_anchor, self) { + return None; + } + anchors.next(); + Some((excerpt_anchor.text_anchor(), excerpt_anchor)) + }), ) { let summary = cmp::min(excerpt_buffer_end, buffer_summary); @@ -5792,21 +5107,22 @@ impl MultiBufferSnapshot { diff_transforms_cursor.seek_forward(&position, Bias::Left); } - summaries.push(self.resolve_summary_for_anchor( - anchor, + summaries.push(self.summary_for_anchor_with_excerpt_position( + excerpt_anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, )); } } else { diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); } } @@ -5853,92 +5169,27 @@ impl MultiBufferSnapshot { }) } - pub fn refresh_anchors<'a, I>(&'a self, anchors: I) -> Vec<(usize, Anchor, bool)> - where - I: 'a + IntoIterator, - { - let mut anchors = anchors.into_iter().enumerate().peekable(); - let mut cursor = self.excerpts.cursor::>(()); - cursor.next(); - - let mut result = Vec::new(); - - while let Some((_, anchor)) = anchors.peek() { - let old_excerpt_id = anchor.excerpt_id; - - // Find the location where this anchor's excerpt should be. - let old_locator = self.excerpt_locator_for_id(old_excerpt_id); - cursor.seek_forward(&Some(old_locator), Bias::Left); - - let next_excerpt = cursor.item(); - let prev_excerpt = cursor.prev_item(); - - // Process all of the anchors for this excerpt. - while let Some((anchor_ix, &anchor)) = - anchors.next_if(|(_, anchor)| anchor.excerpt_id == old_excerpt_id) - { - let mut anchor = anchor; - - // Leave min and max anchors unchanged if invalid or - // if the old excerpt still exists at this location - let mut kept_position = next_excerpt - .is_some_and(|e| e.id == old_excerpt_id && e.contains(&anchor)) - || old_excerpt_id == ExcerptId::max() - || old_excerpt_id == ExcerptId::min(); - - // If the old excerpt no longer exists at this location, then attempt to - // find an equivalent position for this anchor in an adjacent excerpt. - if !kept_position { - for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) { - if excerpt.contains(&anchor) { - anchor.excerpt_id = excerpt.id; - kept_position = true; - break; - } - } - } - - // If there's no adjacent excerpt that contains the anchor's position, - // then report that the anchor has lost its position. - if !kept_position { - anchor = if let Some(excerpt) = next_excerpt { - let mut text_anchor = excerpt - .range - .context - .start - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.end, &excerpt.buffer) - .is_gt() - { - text_anchor = excerpt.range.context.end; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if let Some(excerpt) = prev_excerpt { - let mut text_anchor = excerpt - .range - .context - .end - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.start, &excerpt.buffer) - .is_lt() - { - text_anchor = excerpt.range.context.start; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if anchor.text_anchor.bias == Bias::Left { - Anchor::min() - } else { - Anchor::max() - }; + pub fn excerpts_for_buffer( + &self, + buffer_id: BufferId, + ) -> impl Iterator> { + if let Some(buffer_state) = self.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = self.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; } - - result.push((anchor_ix, anchor, kept_position)); - } + cursor.next(); + Some(excerpt.range.clone()) + })) + } else { + None } - result.sort_unstable_by(|a, b| a.1.cmp(&b.1, self)); - result + .into_iter() + .flatten() } pub fn anchor_before(&self, position: T) -> Anchor { @@ -5993,132 +5244,158 @@ impl MultiBufferSnapshot { let mut excerpts = self .excerpts - .cursor::>>(()); + .cursor::>(()); excerpts.seek(&excerpt_offset, Bias::Right); if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left { excerpts.prev(); } if let Some(excerpt) = excerpts.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0); if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 { overshoot -= 1; bias = Bias::Right; } - let buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let text_anchor = - excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias)); - let anchor = Anchor::in_buffer(excerpt.id, text_anchor); - match diff_base_anchor { + let buffer_start = excerpt.range.context.start.to_offset(&buffer_snapshot); + let text_anchor = excerpt.clip_anchor( + buffer_snapshot.anchor_at(buffer_start + overshoot, bias), + self, + ); + let anchor = ExcerptAnchor::in_buffer(excerpt.path_key_index, text_anchor); + let anchor = match diff_base_anchor { Some(diff_base_anchor) => anchor.with_diff_base_anchor(diff_base_anchor), None => anchor, - } + }; + anchor.into() } else if excerpt_offset == ExcerptDimension(MultiBufferOffset::ZERO) && bias == Bias::Left { - Anchor::min() + Anchor::Min } else { - Anchor::max() + Anchor::Max } } - /// Wraps the [`text::Anchor`] in a [`crate::Anchor`] if this multi-buffer is a singleton. - pub fn as_singleton_anchor(&self, text_anchor: text::Anchor) -> Option { - let (excerpt, buffer, _) = self.as_singleton()?; - if text_anchor.buffer_id.is_none_or(|id| id == buffer) { - Some(Anchor::in_buffer(excerpt, text_anchor)) - } else { - None - } + /// Lifts a buffer anchor to a multibuffer anchor without checking against excerpt boundaries. Returns `None` if there are no excerpts for the buffer + pub fn anchor_in_buffer(&self, anchor: text::Anchor) -> Option { + let path_key_index = self.path_key_index_for_buffer(anchor.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, anchor)) } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_range_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: Range, - ) -> Option> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - - Some( - Self::anchor_in_excerpt_(excerpt, text_anchor.start)? - ..Self::anchor_in_excerpt_(excerpt, text_anchor.end)?, - ) - } + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn anchor_in_excerpt(&self, text_anchor: text::Anchor) -> Option { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor, &buffer_snapshot) { + return Some(Anchor::in_buffer(excerpt.path_key_index, text_anchor)); + } + } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: text::Anchor, - ) -> Option { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Self::anchor_in_excerpt_(excerpt, text_anchor) + None } - /// Same as [`MultiBuffer::anchor_in_excerpt`], but more efficient than calling it multiple times. - pub fn anchors_in_excerpt( + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn buffer_anchor_range_to_anchor_range( &self, - excerpt_id: ExcerptId, - text_anchors: impl IntoIterator, - ) -> Option>> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Some( - text_anchors - .into_iter() - .map(|text_anchor| Self::anchor_in_excerpt_(excerpt, text_anchor)), - ) - } - - fn anchor_in_excerpt_(excerpt: &Excerpt, text_anchor: text::Anchor) -> Option { - match text_anchor.buffer_id { - Some(buffer_id) if buffer_id == excerpt.buffer_id => (), - Some(_) => return None, - None if text_anchor.is_max() || text_anchor.is_min() => { - return Some(Anchor::in_buffer(excerpt.id, text_anchor)); + text_anchor: Range, + ) -> Option> { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.start.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor.start, &buffer_snapshot) + && excerpt.range.contains(&text_anchor.end, &buffer_snapshot) + { + return Some(Anchor::range_in_buffer(excerpt.path_key_index, text_anchor)); } - None => return None, - } - - let context = &excerpt.range.context; - if context.start.cmp(&text_anchor, &excerpt.buffer).is_gt() - || context.end.cmp(&text_anchor, &excerpt.buffer).is_lt() - { - return None; } - Some(Anchor::in_buffer(excerpt.id, text_anchor)) - } - - pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - Some(self.excerpt(excerpt_id)?.range.context.clone()) + None } - pub fn excerpt_range_for_excerpt( + /// Returns a buffer anchor and its buffer snapshot for the given anchor, if it is in the multibuffer. + pub fn anchor_to_buffer_anchor( &self, - excerpt_id: ExcerptId, - ) -> Option> { - Some(self.excerpt(excerpt_id)?.range.clone()) + anchor: Anchor, + ) -> Option<(text::Anchor, &BufferSnapshot)> { + match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.start, buffer)) + } + Anchor::Excerpt(excerpt_anchor) => { + let buffer = self.buffer_for_id(excerpt_anchor.buffer_id())?; + Some((excerpt_anchor.text_anchor, buffer)) + } + Anchor::Max => { + let excerpt = self.excerpts.last()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.end, buffer)) + } + } } pub fn can_resolve(&self, anchor: &Anchor) -> bool { - if anchor.is_min() || anchor.is_max() { + match anchor { // todo(lw): should be `!self.is_empty()` - true - } else if let Some(excerpt) = self.excerpt(anchor.excerpt_id) { - excerpt.buffer.can_resolve(&anchor.text_anchor) - } else { - false + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => { + let Some(target) = excerpt_anchor.try_seek_target(self) else { + return false; + }; + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + excerpt + .buffer_snapshot(self) + .can_resolve(&excerpt_anchor.text_anchor()) + } } } - pub fn excerpts( - &self, - ) -> impl Iterator)> { - self.excerpts - .iter() - .map(|excerpt| (excerpt.id, &*excerpt.buffer, excerpt.range.clone())) + pub fn excerpts(&self) -> impl Iterator> { + self.excerpts.iter().map(|excerpt| excerpt.range.clone()) } fn cursor<'a, MBD, BD>(&'a self) -> MultiBufferCursor<'a, MBD, BD> @@ -6131,35 +5408,17 @@ impl MultiBufferSnapshot { MultiBufferCursor { excerpts, diff_transforms, - diffs: &self.diffs, cached_region: OnceCell::new(), + snapshot: self, } } - pub fn excerpt_before(&self, excerpt_id: ExcerptId) -> Option> { - let start_locator = self.excerpt_locator_for_id(excerpt_id); - let mut excerpts = self - .excerpts - .cursor::, ExcerptOffset>>(()); - excerpts.seek(&Some(start_locator), Bias::Left); + pub fn excerpt_before(&self, anchor: Anchor) -> Option> { + let target = anchor.try_seek_target(&self)?; + let mut excerpts = self.excerpts.cursor::(()); + excerpts.seek(&target, Bias::Left); excerpts.prev(); - - let mut diff_transforms = self - .diff_transforms - .cursor::>(()); - diff_transforms.seek(&excerpts.start().1, Bias::Left); - if diff_transforms.end().excerpt_dimension < excerpts.start().1 { - diff_transforms.next(); - } - - let excerpt = excerpts.item()?; - Some(MultiBufferExcerpt { - excerpt, - offset: diff_transforms.start().output_dimension.0, - buffer_offset: BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)), - excerpt_offset: excerpts.start().1, - diff_transforms, - }) + Some(excerpts.item()?.range.clone()) } pub fn excerpt_boundaries_in_range( @@ -6204,7 +5463,7 @@ impl MultiBufferSnapshot { } else { cursor.seek_to_start_of_current_excerpt(); } - let mut prev_region = cursor + let mut prev_excerpt = cursor .fetch_excerpt_with_range() .map(|(excerpt, _)| excerpt); @@ -6219,7 +5478,7 @@ impl MultiBufferSnapshot { let (next_excerpt, next_range) = cursor.fetch_excerpt_with_range()?; cursor.next_excerpt_forwards(); if !bounds.contains(&next_range.start.key) { - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); continue; } @@ -6230,18 +5489,20 @@ impl MultiBufferSnapshot { self.max_point() }; - let prev = prev_region.as_ref().map(|region| ExcerptInfo { - id: region.id, - buffer: region.buffer.clone(), - buffer_id: region.buffer_id, - range: region.range.clone(), + let prev = prev_excerpt.as_ref().map(|excerpt| ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + excerpt.path_key_index, + excerpt.range.context.start, + ), + range: excerpt.range.clone(), end_row: MultiBufferRow(next_region_start.row), }); - let next = ExcerptInfo { - id: next_excerpt.id, - buffer: next_excerpt.buffer.clone(), - buffer_id: next_excerpt.buffer_id, + let next = ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + next_excerpt.path_key_index, + next_excerpt.range.context.start, + ), range: next_excerpt.range.clone(), end_row: if next_excerpt.has_trailing_newline { MultiBufferRow(next_region_end.row - 1) @@ -6252,7 +5513,7 @@ impl MultiBufferSnapshot { let row = MultiBufferRow(next_region_start.row); - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); return Some(ExcerptBoundary { row, prev, next }); } @@ -6267,6 +5528,91 @@ impl MultiBufferSnapshot { self.non_text_state_update_count } + /// Allows converting several ranges within the same excerpt between buffer offsets and multibuffer offsets. + /// + /// If the input range is contained in a single excerpt, invokes the callback with the full range of that excerpt + /// and the input range both converted to buffer coordinates. The buffer ranges returned by the callback are lifted back + /// to multibuffer offsets and returned. + /// + /// Returns `None` if the input range spans multiple excerpts. + pub fn map_excerpt_ranges<'a, T>( + &'a self, + position: Range, + f: impl FnOnce( + &'a BufferSnapshot, + ExcerptRange, + Range, + ) -> Vec<(Range, T)>, + ) -> Option, T)>> { + let mut cursor = self.cursor::(); + cursor.seek(&position.start); + + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let excerpt = cursor.excerpt()?; + let excerpt_start = *cursor.excerpts.start(); + let input_buffer_start = cursor.buffer_position_at(&position.start)?; + + cursor.seek_forward(&position.end); + if cursor.excerpt()? != excerpt { + return None; + } + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let input_buffer_end = cursor.buffer_position_at(&position.end)?; + let input_buffer_range = input_buffer_start..input_buffer_end; + let buffer = excerpt.buffer_snapshot(self); + let excerpt_context_range = excerpt.range.context.to_offset(buffer); + let excerpt_context_range = + BufferOffset(excerpt_context_range.start)..BufferOffset(excerpt_context_range.end); + let excerpt_primary_range = excerpt.range.primary.to_offset(buffer); + let excerpt_primary_range = + BufferOffset(excerpt_primary_range.start)..BufferOffset(excerpt_primary_range.end); + let results = f( + buffer, + ExcerptRange { + context: excerpt_context_range.clone(), + primary: excerpt_primary_range, + }, + input_buffer_range, + ); + let mut diff_transforms = cursor.diff_transforms; + Some( + results + .into_iter() + .map(|(buffer_range, metadata)| { + let clamped_start = buffer_range + .start + .max(excerpt_context_range.start) + .min(excerpt_context_range.end); + let clamped_end = buffer_range + .end + .max(clamped_start) + .min(excerpt_context_range.end); + let excerpt_offset_start = + excerpt_start + (clamped_start.0 - excerpt_context_range.start.0); + let excerpt_offset_end = + excerpt_start + (clamped_end.0 - excerpt_context_range.start.0); + + diff_transforms.seek(&excerpt_offset_start, Bias::Right); + let mut output_start = diff_transforms.start().output_dimension; + output_start += + excerpt_offset_start - diff_transforms.start().excerpt_dimension; + + diff_transforms.seek_forward(&excerpt_offset_end, Bias::Right); + let mut output_end = diff_transforms.start().output_dimension; + output_end += excerpt_offset_end - diff_transforms.start().excerpt_dimension; + + (output_start.0..output_end.0, metadata) + }) + .collect(), + ) + } + /// Returns the smallest enclosing bracket ranges containing the given range or /// None if no brackets contain range or the range is not contained in a single /// excerpt @@ -6281,32 +5627,31 @@ impl MultiBufferSnapshot { >, ) -> Option<(Range, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let buffer = excerpt.buffer(); - let excerpt_buffer_range = excerpt.buffer_range(); - - // Filter to ranges contained in the excerpt - let range_filter = |open: Range, close: Range| -> bool { - excerpt_buffer_range.contains(&BufferOffset(open.start)) - && excerpt_buffer_range.contains(&BufferOffset(close.end)) - && range_filter.is_none_or(|filter| { - filter( - buffer, - BufferOffset(open.start)..BufferOffset(close.end), - BufferOffset(close.start)..BufferOffset(close.end), - ) - }) - }; - - let (open, close) = excerpt.buffer().innermost_enclosing_bracket_ranges( - excerpt.map_range_to_buffer(range), - Some(&range_filter), - )?; - - Some(( - excerpt.map_range_from_buffer(BufferOffset(open.start)..BufferOffset(open.end)), - excerpt.map_range_from_buffer(BufferOffset(close.start)..BufferOffset(close.end)), - )) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let filter = |open: Range, close: Range| -> bool { + excerpt_range.context.start.0 <= open.start + && close.end <= excerpt_range.context.end.0 + && range_filter.is_none_or(|filter| { + filter( + buffer, + BufferOffset(open.start)..BufferOffset(close.end), + BufferOffset(close.start)..BufferOffset(close.end), + ) + }) + }; + let Some((open, close)) = + buffer.innermost_enclosing_bracket_ranges(input_buffer_range, Some(&filter)) + else { + return Vec::new(); + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + let [(open, _), (close, _)] = results.try_into().ok()?; + Some((open, close)) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6314,30 +5659,33 @@ impl MultiBufferSnapshot { pub fn enclosing_bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - - Some( - excerpt - .buffer() - .enclosing_bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - if excerpt.contains_buffer_range(open_range.start..close_range.end) { - Some(( - excerpt.map_range_from_buffer(open_range), - excerpt.map_range_from_buffer(close_range), - )) - } else { - None - } - }), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .enclosing_bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6348,54 +5696,55 @@ impl MultiBufferSnapshot { options: TreeSitterOptions, ) -> impl Iterator, TextObject)> + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.excerpt_containing(range.clone()) - .map(|mut excerpt| { - excerpt - .buffer() - .text_object_ranges(excerpt.map_range_to_buffer(range), options) - .filter_map(move |(range, text_object)| { - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - Some((excerpt.map_range_from_buffer(range), text_object)) - } else { - None - } - }) - }) - .into_iter() - .flatten() + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .text_object_ranges(input_buffer_range, options) + .filter(|(range, _)| { + excerpt_range.context.start.0 <= range.start + && range.end <= excerpt_range.context.end.0 + }) + .map(|(range, text_object)| { + ( + BufferOffset(range.start)..BufferOffset(range.end), + text_object, + ) + }) + .collect() + }) + .into_iter() + .flatten() } - /// Returns bracket range pairs overlapping the given `range` or returns None if the `range` is - /// not contained in a single excerpt pub fn bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - Some( - excerpt - .buffer() - .bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - excerpt - .contains_buffer_range(open_range.start..close_range.end) - .then(|| BracketMatch { - open_range: excerpt.map_range_from_buffer(open_range), - close_range: excerpt.map_range_from_buffer(close_range), - color_index: pair.color_index, - newline_only: pair.newline_only, - syntax_layer_depth: pair.syntax_layer_depth, - }) - }) - .map(BracketMatch::bracket_ranges), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } pub fn redacted_ranges<'a, T: ToOffset>( @@ -6448,7 +5797,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(start_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.next(); region = cursor.region()?; } @@ -6470,11 +5819,11 @@ impl MultiBufferSnapshot { .line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.next(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6490,7 +5839,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(end_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.prev(); region = cursor.region()?; } @@ -6514,11 +5863,11 @@ impl MultiBufferSnapshot { .reversed_line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.prev(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6788,7 +6137,7 @@ impl MultiBufferSnapshot { fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { self.excerpts .first() - .map(|excerpt| &excerpt.buffer) + .map(|excerpt| excerpt.buffer_snapshot(self)) .map(|buffer| LanguageSettings::for_buffer_snapshot(buffer, None, cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::ZERO, cx)) } @@ -6840,7 +6189,7 @@ impl MultiBufferSnapshot { pub fn has_diagnostics(&self) -> bool { self.excerpts .iter() - .any(|excerpt| excerpt.buffer.has_diagnostics()) + .any(|excerpt| excerpt.buffer_snapshot(self).has_diagnostics()) } pub fn diagnostic_group( @@ -6919,7 +6268,12 @@ impl MultiBufferSnapshot { .map(|entry| (entry.range, entry.diagnostic)), ) }) - .map(|(range, diagnostic, b)| (b.buffer_id, DiagnosticEntryRef { diagnostic, range })) + .map(|(range, diagnostic, excerpt)| { + ( + excerpt.buffer_snapshot(self).remote_id(), + DiagnosticEntryRef { diagnostic, range }, + ) + }) } pub fn syntax_ancestor( @@ -6927,41 +6281,52 @@ impl MultiBufferSnapshot { range: Range, ) -> Option<(tree_sitter::Node<'_>, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let node = excerpt - .buffer() - .syntax_ancestor(excerpt.map_range_to_buffer(range))?; - let node_range = node.byte_range(); - let node_range = BufferOffset(node_range.start)..BufferOffset(node_range.end); - if !excerpt.contains_buffer_range(node_range.clone()) { - return None; - }; - Some((node, excerpt.map_range_from_buffer(node_range))) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let Some(node) = buffer.syntax_ancestor(input_buffer_range) else { + return vec![]; + }; + let node_range = node.byte_range(); + if excerpt_range.context.start.0 <= node_range.start + && node_range.end <= excerpt_range.context.end.0 + { + vec![( + BufferOffset(node_range.start)..BufferOffset(node_range.end), + node, + )] + } else { + vec![] + } + })?; + let (output_range, node) = results.into_iter().next()?; + Some((node, output_range)) } pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { - let (excerpt_id, _, buffer) = self.as_singleton()?; - let outline = buffer.outline(theme); + let buffer_snapshot = self.as_singleton()?; + let excerpt = self.excerpts.first()?; + let path_key_index = excerpt.path_key_index; + let outline = buffer_snapshot.outline(theme); Some(Outline::new( outline .items .into_iter() - .flat_map(|item| { - Some(OutlineItem { - depth: item.depth, - range: self.anchor_range_in_excerpt(excerpt_id, item.range)?, - source_range_for_text: self - .anchor_range_in_excerpt(excerpt_id, item.source_range_for_text)?, - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item.body_range.and_then(|body_range| { - self.anchor_range_in_excerpt(excerpt_id, body_range) - }), - annotation_range: item.annotation_range.and_then(|annotation_range| { - self.anchor_range_in_excerpt(excerpt_id, annotation_range) - }), - }) + .map(|item| OutlineItem { + depth: item.depth, + range: Anchor::range_in_buffer(path_key_index, item.range), + source_range_for_text: Anchor::range_in_buffer( + path_key_index, + item.source_range_for_text, + ), + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item + .body_range + .map(|body_range| Anchor::range_in_buffer(path_key_index, body_range)), + annotation_range: item.annotation_range.map(|annotation_range| { + Anchor::range_in_buffer(path_key_index, annotation_range) + }), }) .collect(), )) @@ -6973,173 +6338,90 @@ impl MultiBufferSnapshot { theme: Option<&SyntaxTheme>, ) -> Option<(BufferId, Vec>)> { let anchor = self.anchor_before(offset); - let excerpt @ &Excerpt { - id: excerpt_id, - buffer_id, - ref buffer, - .. - } = self.excerpt(anchor.excerpt_id)?; - if cfg!(debug_assertions) { - match anchor.text_anchor.buffer_id { - // we clearly are hitting this according to sentry, but in what situations can this occur? - Some(anchor_buffer_id) => { - assert_eq!( - anchor_buffer_id, buffer_id, - "anchor {anchor:?} does not match with resolved excerpt {excerpt:?}" - ) - } - None => assert!(anchor.is_max()), - } - }; + let target = anchor.try_seek_target(&self)?; + let (_, _, excerpt) = self.excerpts.find((), &target, Bias::Left); + let excerpt = excerpt?; + let buffer_snapshot = excerpt.buffer_snapshot(self); Some(( - buffer_id, - buffer - .symbols_containing(anchor.text_anchor, theme) + buffer_snapshot.remote_id(), + buffer_snapshot + .symbols_containing( + anchor + .excerpt_anchor() + .map(|anchor| anchor.text_anchor()) + .unwrap_or(text::Anchor::min_for_buffer(buffer_snapshot.remote_id())), + theme, + ) .into_iter() .flat_map(|item| { Some(OutlineItem { depth: item.depth, source_range_for_text: Anchor::range_in_buffer( - excerpt_id, + excerpt.path_key_index, item.source_range_for_text, ), - range: Anchor::range_in_buffer(excerpt_id, item.range), + range: Anchor::range_in_buffer(excerpt.path_key_index, item.range), text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), - annotation_range: item - .annotation_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), + body_range: item.body_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), + annotation_range: item.annotation_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), }) }) .collect(), )) } - fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator { - self.try_excerpt_locator_for_id(id) - .unwrap_or_else(|| panic!("invalid excerpt id {id:?}")) + pub fn buffer_for_path(&self, path: &PathKey) -> Option<&BufferSnapshot> { + let (_, _, excerpt) = self + .excerpts + .find::((), path, Bias::Left); + Some(excerpt?.buffer_snapshot(self)) } - fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> { - if id == ExcerptId::min() { - Some(Locator::min_ref()) - } else if id == ExcerptId::max() { - Some(Locator::max_ref()) - } else { - let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); - if let Some(entry) = item - && entry.id == id - { - return Some(&entry.locator); - } - None - } + pub fn path_for_buffer(&self, buffer_id: BufferId) -> Option<&PathKey> { + Some(&self.buffers.get(&buffer_id)?.path_key) } - /// Returns the locators referenced by the given excerpt IDs, sorted by locator. - fn excerpt_locators_for_ids( - &self, - ids: impl IntoIterator, - ) -> SmallVec<[Locator; 1]> { - let mut sorted_ids = ids.into_iter().collect::>(); - sorted_ids.sort_unstable(); - sorted_ids.dedup(); - let mut locators = SmallVec::new(); - - while sorted_ids.last() == Some(&ExcerptId::max()) { - sorted_ids.pop(); - locators.push(Locator::max()); - } - - let mut sorted_ids = sorted_ids.into_iter().peekable(); - locators.extend( - sorted_ids - .peeking_take_while(|excerpt| *excerpt == ExcerptId::min()) - .map(|_| Locator::min()), - ); - - let mut cursor = self.excerpt_ids.cursor::(()); - for id in sorted_ids { - if cursor.seek_forward(&id, Bias::Left) { - locators.push(cursor.item().unwrap().locator.clone()); - } else { - panic!("invalid excerpt id {:?}", id); - } - } + pub(crate) fn path_key_index_for_buffer(&self, buffer_id: BufferId) -> Option { + let snapshot = self.buffers.get(&buffer_id)?; + Some(snapshot.path_key_index) + } - locators.sort_unstable(); - locators + fn first_excerpt_for_buffer(&self, buffer_id: BufferId) -> Option<&Excerpt> { + let path_key = &self.buffers.get(&buffer_id)?.path_key; + self.first_excerpt_for_path(path_key) } - pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option { - Some(self.excerpt(excerpt_id)?.buffer_id) + fn first_excerpt_for_path(&self, path_key: &PathKey) -> Option<&Excerpt> { + let (_, _, first_excerpt) = + self.excerpts + .find::((), path_key, Bias::Left); + first_excerpt } - pub fn buffer_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<&BufferSnapshot> { - Some(&self.excerpt(excerpt_id)?.buffer) + pub fn buffer_for_id(&self, id: BufferId) -> Option<&BufferSnapshot> { + self.buffers.get(&id).map(|state| &state.buffer_snapshot) } - pub fn range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - let mut cursor = self - .excerpts - .cursor::, ExcerptPoint>>(()); - let locator = self.excerpt_locator_for_id(excerpt_id); - let mut sought_exact = cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - sought_exact = true; - cursor.prev(); - } else if excerpt_id == ExcerptId::min() { - sought_exact = true; - } - if sought_exact { - let start = cursor.start().1; - let end = cursor.end().1; - let mut diff_transforms = self - .diff_transforms - .cursor::>>(()); - diff_transforms.seek(&start, Bias::Left); - let overshoot = start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - diff_transforms.seek(&end, Bias::Right); - let overshoot = end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - Some(start.0..end.0) - } else { - None - } + fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option { + self.path_keys_by_index.get(&anchor.path).cloned() } - /// Returns the excerpt for the given id. The returned excerpt is guaranteed - /// to have the latest excerpt id for the one passed in and will also remap - /// `ExcerptId::max()` to the corresponding excertp ID. - /// - /// Callers of this function should generally use the resulting excerpt's `id` field - /// afterwards. - fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { - let excerpt_id = self.latest_excerpt_id(excerpt_id); - let locator = self.try_excerpt_locator_for_id(excerpt_id)?; - let (_, _, item) = - self.excerpts - .find::, _>((), &Some(locator), Bias::Left); - if let Some(excerpt) = item - && excerpt.id == excerpt_id - { - return Some(excerpt); - } else if item.is_none() && excerpt_id == ExcerptId::max() { - return self.excerpts.last(); - } - None + pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> PathKey { + self.try_path_for_anchor(anchor) + .expect("invalid anchor: path was never added to multibuffer") } /// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts pub fn excerpt_containing( &self, range: Range, - ) -> Option> { + ) -> Option<(&BufferSnapshot, ExcerptRange)> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut cursor = self.cursor::(); cursor.seek(&range.start); @@ -7147,31 +6429,15 @@ impl MultiBufferSnapshot { let start_excerpt = cursor.excerpt()?; if range.end != range.start { cursor.seek_forward(&range.end); - if cursor.excerpt()?.id != start_excerpt.id { + if cursor.excerpt()? != start_excerpt { return None; } } - cursor.seek_to_start_of_current_excerpt(); - let region = cursor.region()?; - let offset = region.range.start; - let buffer_offset = start_excerpt.buffer_start_offset(); - let excerpt_offset = *cursor.excerpts.start(); - Some(MultiBufferExcerpt { - diff_transforms: cursor.diff_transforms, - excerpt: start_excerpt, - offset, - buffer_offset, - excerpt_offset, - }) - } - - pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option { - if let Some(id) = anchor.text_anchor.buffer_id { - return Some(id); - } - let excerpt = self.excerpt_containing(anchor..anchor)?; - Some(excerpt.buffer_id()) + Some(( + start_excerpt.buffer_snapshot(self), + start_excerpt.range.clone(), + )) } pub fn selections_in_range<'a>( @@ -7180,27 +6446,34 @@ impl MultiBufferSnapshot { include_local: bool, ) -> impl 'a + Iterator)> { let mut cursor = self.excerpts.cursor::(()); - let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); - let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); - cursor.seek(start_locator, Bias::Left); + cursor.seek(&range.start.seek_target(self), Bias::Left); cursor - .take_while(move |excerpt| excerpt.locator <= *end_locator) + .take_while(move |excerpt| { + let excerpt_start = + Anchor::in_buffer(excerpt.path_key_index, excerpt.range.context.start); + excerpt_start.cmp(&range.end, self).is_le() + }) .flat_map(move |excerpt| { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut query_range = excerpt.range.context.start..excerpt.range.context.end; - if excerpt.id == range.start.excerpt_id { - query_range.start = range.start.text_anchor; + if let Some(excerpt_anchor) = range.start.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.start = excerpt_anchor.text_anchor(); } - if excerpt.id == range.end.excerpt_id { - query_range.end = range.end.text_anchor; + if let Some(excerpt_anchor) = range.end.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.end = excerpt_anchor.text_anchor(); } - excerpt - .buffer + buffer_snapshot .selections_in_range(query_range, include_local) .flat_map(move |(replica_id, line_mode, cursor_shape, selections)| { selections.map(move |selection| { - let mut start = Anchor::in_buffer(excerpt.id, selection.start); - let mut end = Anchor::in_buffer(excerpt.id, selection.end); + let mut start = + Anchor::in_buffer(excerpt.path_key_index, selection.start); + let mut end = Anchor::in_buffer(excerpt.path_key_index, selection.end); if range.start.cmp(&start, self).is_gt() { start = range.start; } @@ -7237,91 +6510,251 @@ impl MultiBufferSnapshot { find_diff_state(&self.diffs, buffer_id) } - pub fn total_changed_lines(&self) -> (u32, u32) { - let summary = self.diffs.summary(); - (summary.added_rows, summary.removed_rows) - } + pub fn total_changed_lines(&self) -> (u32, u32) { + let summary = self.diffs.summary(); + (summary.added_rows, summary.removed_rows) + } + + pub fn all_diff_hunks_expanded(&self) -> bool { + self.all_diff_hunks_expanded + } + + /// Visually annotates a position or range with the `Debug` representation of a value. The + /// callsite of this function is used as a key - previous annotations will be removed. + #[cfg(debug_assertions)] + #[track_caller] + pub fn debug(&self, ranges: &R, value: V) + where + R: debug::ToMultiBufferDebugRanges, + V: std::fmt::Debug, + { + self.debug_with_key(std::panic::Location::caller(), ranges, value); + } + + /// Visually annotates a position or range with the `Debug` representation of a value. Previous + /// debug annotations with the same key will be removed. The key is also used to determine the + /// annotation's color. + #[cfg(debug_assertions)] + #[track_caller] + pub fn debug_with_key(&self, key: &K, ranges: &R, value: V) + where + K: std::hash::Hash + 'static, + R: debug::ToMultiBufferDebugRanges, + V: std::fmt::Debug, + { + let text_ranges = ranges + .to_multi_buffer_debug_ranges(self) + .into_iter() + .flat_map(|range| { + self.range_to_buffer_ranges(range) + .into_iter() + .map(|(buffer_snapshot, range, _)| { + buffer_snapshot.anchor_after(range.start) + ..buffer_snapshot.anchor_before(range.end) + }) + }) + .collect(); + text::debug::GlobalDebugRanges::with_locked(|debug_ranges| { + debug_ranges.insert(key, text_ranges, format!("{value:?}").into()) + }); + } + + fn excerpt_edits_for_diff_change( + &self, + path: &PathKey, + diff_change_range: Range, + ) -> Vec>> { + let mut excerpt_edits = Vec::new(); + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path, Bias::Left); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_range = excerpt.range.context.to_offset(buffer_snapshot); + let excerpt_start = cursor.start().clone(); + let excerpt_len = excerpt.text_summary.len; + cursor.next(); + if diff_change_range.end < excerpt_buffer_range.start + || diff_change_range.start > excerpt_buffer_range.end + { + continue; + } + let diff_change_start_in_excerpt = diff_change_range + .start + .saturating_sub(excerpt_buffer_range.start); + let diff_change_end_in_excerpt = diff_change_range + .end + .saturating_sub(excerpt_buffer_range.start); + let edit_start = excerpt_start.len() + diff_change_start_in_excerpt.min(excerpt_len); + let edit_end = excerpt_start.len() + diff_change_end_in_excerpt.min(excerpt_len); + excerpt_edits.push(Edit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }); + } + excerpt_edits + } + + fn excerpts_for_path<'a>( + &'a self, + path_key: &'a PathKey, + ) -> impl Iterator> + 'a { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path_key, Bias::Left); + cursor + .take_while(move |item| &item.path_key == path_key) + .map(|excerpt| excerpt.range.clone()) + } + + /// If the given multibuffer range is contained in a single excerpt and contains no deleted hunks, + /// returns the corresponding buffer range. + /// + /// Otherwise, returns None. + pub fn range_to_buffer_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> + where + MBD: MultiBufferDimension + Ord + Sub + ops::AddAssign<::Output>, + MBD::TextDimension: AddAssign<::Output>, + { + let mut cursor = self.cursor::(); + cursor.seek(&range.start); + + let start_region = cursor.region()?.clone(); + + while let Some(region) = cursor.region() + && region.range.end < range.end + { + if !region.is_main_buffer { + return None; + } + cursor.next(); + } + + let end_region = cursor.region()?; + if end_region.buffer.remote_id() != start_region.buffer.remote_id() { + return None; + } + + let mut buffer_start = start_region.buffer_range.start; + buffer_start += range.start - start_region.range.start; + let mut buffer_end = end_region.buffer_range.start; + buffer_end += range.end - end_region.range.start; + + Some((start_region.buffer, buffer_start..buffer_end)) + } + + /// If the two endpoints of the range lie in the same excerpt, return the corresponding + /// buffer range. Intervening deleted hunks are allowed. + pub fn anchor_range_to_buffer_anchor_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&range.start.seek_target(&self), Bias::Left); + + let start_excerpt = cursor.item()?; + + let snapshot = start_excerpt.buffer_snapshot(&self); + + cursor.seek(&range.end.seek_target(&self), Bias::Left); + + let end_excerpt = cursor.item()?; + + if start_excerpt != end_excerpt { + return None; + } + + if let Anchor::Excerpt(excerpt_anchor) = range.start + && (excerpt_anchor.path != start_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + if let Anchor::Excerpt(excerpt_anchor) = range.end + && (excerpt_anchor.path != end_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + + Some(( + snapshot, + range.start.text_anchor_in(snapshot)..range.end.text_anchor_in(snapshot), + )) + } + + /// Returns all nonempty intersections of the given buffer range with excerpts in the multibuffer in order. + /// + /// The multibuffer ranges are split to not intersect deleted hunks. + pub fn buffer_range_to_excerpt_ranges( + &self, + range: Range, + ) -> impl Iterator> { + assert!(range.start.buffer_id == range.end.buffer_id); + + let buffer_id = range.start.buffer_id; + self.buffers + .get(&buffer_id) + .map(|buffer_state_snapshot| { + let path_key_index = buffer_state_snapshot.path_key_index; + let buffer_snapshot = &buffer_state_snapshot.buffer_snapshot; + let buffer_range = range.to_offset(buffer_snapshot); + + let start = Anchor::in_buffer(path_key_index, range.start).to_offset(self); + let mut cursor = self.cursor::(); + cursor.seek(&start); + std::iter::from_fn(move || { + while let Some(region) = cursor.region() + && !region.is_main_buffer + { + cursor.next(); + } - pub fn all_diff_hunks_expanded(&self) -> bool { - self.all_diff_hunks_expanded - } + let region = cursor.region()?; + if region.buffer.remote_id() != buffer_id + || region.buffer_range.start > BufferOffset(buffer_range.end) + { + return None; + } - /// Visually annotates a position or range with the `Debug` representation of a value. The - /// callsite of this function is used as a key - previous annotations will be removed. - #[cfg(debug_assertions)] - #[track_caller] - pub fn debug(&self, ranges: &R, value: V) - where - R: debug::ToMultiBufferDebugRanges, - V: std::fmt::Debug, - { - self.debug_with_key(std::panic::Location::caller(), ranges, value); - } + let start = region + .buffer_range + .start + .max(BufferOffset(buffer_range.start)); + let mut end = region.buffer_range.end.min(BufferOffset(buffer_range.end)); - /// Visually annotates a position or range with the `Debug` representation of a value. Previous - /// debug annotations with the same key will be removed. The key is also used to determine the - /// annotation's color. - #[cfg(debug_assertions)] - #[track_caller] - pub fn debug_with_key(&self, key: &K, ranges: &R, value: V) - where - K: std::hash::Hash + 'static, - R: debug::ToMultiBufferDebugRanges, - V: std::fmt::Debug, - { - let text_ranges = ranges - .to_multi_buffer_debug_ranges(self) - .into_iter() - .flat_map(|range| { - self.range_to_buffer_ranges(range.start..=range.end) - .into_iter() - .map(|(buffer, range, _excerpt_id)| { - buffer.anchor_after(range.start)..buffer.anchor_before(range.end) - }) + cursor.next(); + while let Some(region) = cursor.region() + && region.is_main_buffer + && region.buffer.remote_id() == buffer_id + && region.buffer_range.start <= end + { + end = end + .max(region.buffer_range.end) + .min(BufferOffset(buffer_range.end)); + cursor.next(); + } + + let multibuffer_range = Anchor::range_in_buffer( + path_key_index, + buffer_snapshot.anchor_range_inside(start..end), + ); + Some(multibuffer_range) + }) }) - .collect(); - text::debug::GlobalDebugRanges::with_locked(|debug_ranges| { - debug_ranges.insert(key, text_ranges, format!("{value:?}").into()) - }); + .into_iter() + .flatten() } - fn excerpt_edits_for_diff_change( - &self, - buffer_state: &BufferState, - diff_change_range: Range, - ) -> Vec>> { - let mut excerpt_edits = Vec::new(); - for locator in &buffer_state.excerpts { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator - { - let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); - if diff_change_range.end < excerpt_buffer_range.start - || diff_change_range.start > excerpt_buffer_range.end - { - continue; - } - let excerpt_start = cursor.start().1; - let excerpt_len = excerpt.text_summary.len; - let diff_change_start_in_excerpt = diff_change_range - .start - .saturating_sub(excerpt_buffer_range.start); - let diff_change_end_in_excerpt = diff_change_range - .end - .saturating_sub(excerpt_buffer_range.start); - let edit_start = excerpt_start + diff_change_start_in_excerpt.min(excerpt_len); - let edit_end = excerpt_start + diff_change_end_in_excerpt.min(excerpt_len); - excerpt_edits.push(Edit { - old: edit_start..edit_end, - new: edit_start..edit_end, - }); - } - } - excerpt_edits + pub fn buffers_with_paths<'a>( + &'a self, + ) -> impl 'a + Iterator { + self.buffers + .values() + .map(|buffer| (&buffer.buffer_snapshot, &buffer.path_key)) } /// Returns the number of graphemes in `range`. @@ -7350,27 +6783,74 @@ impl MultiBufferSnapshot { #[cfg(any(test, feature = "test-support"))] fn check_invariants(&self) { let excerpts = self.excerpts.items(()); - let excerpt_ids = self.excerpt_ids.items(()); + + let mut all_buffer_path_keys = HashSet::default(); + for buffer in self.buffers.values() { + let path_key = buffer.path_key.clone(); + assert!( + all_buffer_path_keys.insert(path_key), + "path key reused for multiple buffers: {:#?}", + self.buffers + ); + } + + let all_excerpt_path_keys = HashSet::from_iter(excerpts.iter().map(|e| e.path_key.clone())); for (ix, excerpt) in excerpts.iter().enumerate() { - if ix == 0 { - if excerpt.locator <= Locator::min() { - panic!("invalid first excerpt locator {:?}", excerpt.locator); + if ix > 0 { + let prev = &excerpts[ix - 1]; + + if excerpt.path_key < prev.path_key { + panic!("excerpt path_keys are out-of-order: {:#?}", excerpts); + } else if excerpt.path_key == prev.path_key { + assert_eq!( + excerpt.buffer_id, prev.buffer_id, + "excerpts with same path_key have different buffer_ids: {:#?}", + excerpts + ); + if excerpt + .start_anchor() + .cmp(&prev.end_anchor(), &self) + .is_le() + { + panic!("excerpt anchors are out-of-order: {:#?}", excerpts); + } + if excerpt + .start_anchor() + .cmp(&excerpt.end_anchor(), &self) + .is_ge() + { + panic!("excerpt with backward range: {:#?}", excerpts); + } } - } else if excerpt.locator <= excerpts[ix - 1].locator { - panic!("excerpts are out-of-order: {:?}", excerpts); } - } - for (ix, entry) in excerpt_ids.iter().enumerate() { - if ix == 0 { - if entry.id.cmp(&ExcerptId::min(), self).is_le() { - panic!("invalid first excerpt id {:?}", entry.id); - } - } else if entry.id <= excerpt_ids[ix - 1].id { - panic!("excerpt ids are out-of-order: {:?}", excerpt_ids); + if ix < excerpts.len() - 1 { + assert!( + excerpt.has_trailing_newline, + "non-trailing excerpt has no trailing newline: {:#?}", + excerpts + ); + } else { + assert!( + !excerpt.has_trailing_newline, + "trailing excerpt has trailing newline: {:#?}", + excerpts + ); } + assert!( + all_buffer_path_keys.contains(&excerpt.path_key), + "excerpt path key not found in active path keys: {:#?}", + excerpt.path_key + ); + assert_eq!( + self.path_keys_by_index.get(&excerpt.path_key_index), + Some(&excerpt.path_key), + "excerpt path key index does not match path key: {:#?}", + excerpt.path_key, + ); } + assert_eq!(all_buffer_path_keys, all_excerpt_path_keys); if self.diff_transforms.summary().input != self.excerpts.summary().text { panic!( @@ -7518,7 +6998,7 @@ where && self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id) + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end) { self.excerpts.next(); } @@ -7584,13 +7064,13 @@ where DiffTransform::DeletedHunk { hunk_info, .. } => self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id), + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end), }) } fn main_buffer_position(&self) -> Option { let excerpt = self.excerpts.item()?; - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut buffer_start = buffer_context_start; let overshoot = self.diff_transforms.end().excerpt_dimension - *self.excerpts.start(); @@ -7598,6 +7078,19 @@ where Some(buffer_start) } + fn buffer_position_at(&self, output_position: &MBD) -> Option { + let excerpt = self.excerpts.item()?; + let buffer = excerpt.buffer_snapshot(self.snapshot); + let buffer_context_start = excerpt.range.context.start.summary::(buffer); + let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; + if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { + excerpt_offset += *output_position - self.diff_transforms.start().output_dimension.0; + } + let mut result = buffer_context_start; + result += excerpt_offset - *self.excerpts.start(); + Some(result) + } + fn build_region(&self) -> Option> { let excerpt = self.excerpts.item()?; match self.diff_transforms.item()? { @@ -7608,7 +7101,7 @@ where hunk_info, .. } => { - let diff = find_diff_state(self.diffs, *buffer_id)?; + let diff = find_diff_state(&self.snapshot.diffs, *buffer_id)?; let buffer = diff.base_text(); let mut rope_cursor = buffer.as_rope().cursor(0); let buffer_start = rope_cursor.summary::(base_text_byte_range.start); @@ -7632,7 +7125,7 @@ where DiffTransform::BufferContent { inserted_hunk_info, .. } => { - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut start = self.diff_transforms.start().output_dimension.0; @@ -7726,28 +7219,47 @@ where impl Excerpt { fn new( - id: ExcerptId, - locator: Locator, - buffer_id: BufferId, - buffer: Arc, + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: &BufferSnapshot, range: ExcerptRange, has_trailing_newline: bool, ) -> Self { Excerpt { - id, - locator, - max_buffer_row: range.context.end.to_point(&buffer).row, - text_summary: buffer - .text_summary_for_range::(range.context.to_offset(&buffer)), - buffer_id, - buffer, + path_key, + path_key_index, + buffer_id: buffer_snapshot.remote_id(), + max_buffer_row: range.context.end.to_point(&buffer_snapshot).row, + text_summary: buffer_snapshot.text_summary_for_range::( + range.context.to_offset(&buffer_snapshot), + ), range, has_trailing_newline, } } - fn chunks_in_range(&self, range: Range, language_aware: bool) -> ExcerptChunks<'_> { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn buffer_snapshot<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + &snapshot + .buffers + .get(&self.buffer_id) + .expect("buffer snapshot not found for excerpt") + .buffer_snapshot + } + + fn buffer(&self, multibuffer: &MultiBuffer) -> Entity { + multibuffer + .buffer(self.buffer_id) + .expect("buffer entity not found for excerpt") + } + + fn chunks_in_range<'a>( + &'a self, + range: Range, + language_aware: bool, + snapshot: &'a MultiBufferSnapshot, + ) -> ExcerptChunks<'a> { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); @@ -7755,17 +7267,23 @@ impl Excerpt { && range.start <= self.text_summary.len && range.end > self.text_summary.len; - let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware); + let content_chunks = buffer.chunks(chunks_start..chunks_end, language_aware); ExcerptChunks { - excerpt_id: self.id, content_chunks, has_footer, + end: self.end_anchor(), } } - fn seek_chunks(&self, excerpt_chunks: &mut ExcerptChunks, range: Range) { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn seek_chunks( + &self, + excerpt_chunks: &mut ExcerptChunks, + range: Range, + snapshot: &MultiBufferSnapshot, + ) { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); excerpt_chunks.content_chunks.seek(chunks_start..chunks_end); @@ -7774,218 +7292,43 @@ impl Excerpt { && range.end > self.text_summary.len; } - fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { - if text_anchor - .cmp(&self.range.context.start, &self.buffer) - .is_lt() - { + fn clip_anchor( + &self, + text_anchor: text::Anchor, + snapshot: &MultiBufferSnapshot, + ) -> text::Anchor { + let buffer = self.buffer_snapshot(snapshot); + if text_anchor.cmp(&self.range.context.start, buffer).is_lt() { self.range.context.start - } else if text_anchor - .cmp(&self.range.context.end, &self.buffer) - .is_gt() - { + } else if text_anchor.cmp(&self.range.context.end, buffer).is_gt() { self.range.context.end } else { text_anchor } } - fn contains(&self, anchor: &Anchor) -> bool { - (anchor.text_anchor.buffer_id == None - || anchor.text_anchor.buffer_id == Some(self.buffer_id)) - && self - .range - .context - .start - .cmp(&anchor.text_anchor, &self.buffer) - .is_le() + pub(crate) fn contains(&self, anchor: &ExcerptAnchor, snapshot: &MultiBufferSnapshot) -> bool { + self.path_key_index == anchor.path + && self.buffer_id == anchor.text_anchor.buffer_id && self .range - .context - .end - .cmp(&anchor.text_anchor, &self.buffer) - .is_ge() - } - - /// The [`Excerpt`]'s start offset in its [`Buffer`] - fn buffer_start_offset(&self) -> BufferOffset { - BufferOffset(self.range.context.start.to_offset(&self.buffer)) - } - - /// The [`Excerpt`]'s end offset in its [`Buffer`] - fn buffer_end_offset(&self) -> BufferOffset { - self.buffer_start_offset() + self.text_summary.len - } -} - -impl<'a> MultiBufferExcerpt<'a> { - pub fn id(&self) -> ExcerptId { - self.excerpt.id - } - - pub fn buffer_id(&self) -> BufferId { - self.excerpt.buffer_id - } - - pub fn start_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.start) - } - - pub fn end_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.end) - } - - pub fn buffer(&self) -> &'a BufferSnapshot { - &self.excerpt.buffer - } - - pub fn buffer_range(&self) -> Range { - self.buffer_offset - ..BufferOffset( - self.excerpt - .range - .context - .end - .to_offset(&self.excerpt.buffer.text), - ) - } - - pub fn start_offset(&self) -> MultiBufferOffset { - self.offset - } - - /// Maps an offset within the [`MultiBuffer`] to an offset within the [`Buffer`] - pub fn map_offset_to_buffer(&mut self, offset: MultiBufferOffset) -> BufferOffset { - self.map_range_to_buffer(offset..offset).start - } - - /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] - pub fn map_range_to_buffer(&mut self, range: Range) -> Range { - self.diff_transforms - .seek(&OutputDimension(range.start), Bias::Right); - let start = self.map_offset_to_buffer_internal(range.start); - let end = if range.end > range.start { - self.diff_transforms - .seek_forward(&OutputDimension(range.end), Bias::Right); - self.map_offset_to_buffer_internal(range.end) - } else { - start - }; - start..end - } - - fn map_offset_to_buffer_internal(&self, offset: MultiBufferOffset) -> BufferOffset { - let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; - if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { - excerpt_offset += offset - self.diff_transforms.start().output_dimension.0; - }; - let offset_in_excerpt = excerpt_offset.saturating_sub(self.excerpt_offset); - self.buffer_offset + offset_in_excerpt - } - - /// Map an offset within the [`Buffer`] to an offset within the [`MultiBuffer`] - pub fn map_offset_from_buffer(&mut self, buffer_offset: BufferOffset) -> MultiBufferOffset { - self.map_range_from_buffer(buffer_offset..buffer_offset) - .start - } - - /// Map a range within the [`Buffer`] to a range within the [`MultiBuffer`] - pub fn map_range_from_buffer( - &mut self, - buffer_range: Range, - ) -> Range { - if buffer_range.start < self.buffer_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = buffer_range.start - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms.seek(&excerpt_seek_dim, Bias::Right); - if self.diff_transforms.start().excerpt_dimension > excerpt_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - let start = self.diff_transforms.start().output_dimension.0 + overshoot; - - let end = if buffer_range.start < buffer_range.end { - let overshoot = buffer_range.end - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms - .seek_forward(&excerpt_seek_dim, Bias::Right); - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - // todo(lw): Clamp end to the excerpt boundaries - self.diff_transforms.start().output_dimension.0 + overshoot - } else { - start - }; - - start..end - } - - /// Returns true if the entirety of the given range is in the buffer's excerpt - pub fn contains_buffer_range(&self, range: Range) -> bool { - range.start >= self.excerpt.buffer_start_offset() - && range.end <= self.excerpt.buffer_end_offset() - } - - /// Returns true if any part of the given range is in the buffer's excerpt - pub fn contains_partial_buffer_range(&self, range: Range) -> bool { - range.start <= self.excerpt.buffer_end_offset() - && range.end >= self.excerpt.buffer_start_offset() - } - - pub fn max_buffer_row(&self) -> u32 { - self.excerpt.max_buffer_row - } -} - -impl ExcerptId { - pub fn min() -> Self { - Self(0) - } - - pub fn max() -> Self { - Self(u32::MAX) - } - - pub fn to_proto(self) -> u64 { - self.0 as _ + .contains(&anchor.text_anchor(), self.buffer_snapshot(snapshot)) } - pub fn from_proto(proto: u64) -> Self { - Self(proto as _) + fn start_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.start) } - pub fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> cmp::Ordering { - let a = snapshot.excerpt_locator_for_id(*self); - let b = snapshot.excerpt_locator_for_id(*other); - a.cmp(b).then_with(|| self.0.cmp(&other.0)) + fn end_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.end) } } -impl From for usize { - fn from(val: ExcerptId) -> Self { - val.0 as usize - } -} - -impl fmt::Debug for Excerpt { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Excerpt") - .field("id", &self.id) - .field("locator", &self.locator) - .field("buffer_id", &self.buffer_id) - .field("range", &self.range) - .field("text_summary", &self.text_summary) - .field("has_trailing_newline", &self.has_trailing_newline) - .finish() +impl PartialEq for Excerpt { + fn eq(&self, other: &Self) -> bool { + self.path_key_index == other.path_key_index + && self.buffer_id == other.buffer_id + && self.range.context == other.range.context } } @@ -7998,8 +7341,8 @@ impl sum_tree::Item for Excerpt { text += TextSummary::from("\n"); } ExcerptSummary { - excerpt_id: self.id, - excerpt_locator: self.locator.clone(), + path_key: self.path_key.clone(), + max_anchor: Some(self.range.context.end), widest_line_number: self.max_buffer_row, text: text.into(), count: 1, @@ -8007,22 +7350,6 @@ impl sum_tree::Item for Excerpt { } } -impl sum_tree::Item for ExcerptIdMapping { - type Summary = ExcerptId; - - fn summary(&self, _cx: ()) -> Self::Summary { - self.id - } -} - -impl sum_tree::KeyedItem for ExcerptIdMapping { - type Key = ExcerptId; - - fn key(&self) -> Self::Key { - self.id - } -} - impl DiffTransform { fn hunk_info(&self) -> Option { match self { @@ -8071,45 +7398,98 @@ impl sum_tree::ContextLessSummary for DiffTransformSummary { } } -impl sum_tree::ContextLessSummary for ExcerptId { - fn zero() -> Self { - Self(0) +impl sum_tree::Dimension<'_, ExcerptSummary> for PathKey { + fn zero(_: ::Context<'_>) -> Self { + PathKey::min() } - fn add_summary(&mut self, summary: &Self) { - *self = cmp::max(*self, *summary); + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self = summary.path_key.clone(); + } +} + +impl sum_tree::Dimension<'_, ExcerptSummary> for MultiBufferOffset { + fn zero(_: ::Context<'_>) -> Self { + MultiBufferOffset::ZERO + } + + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self += summary.text.len } } impl sum_tree::ContextLessSummary for ExcerptSummary { fn zero() -> Self { - Self::default() + Self::min() } fn add_summary(&mut self, summary: &Self) { debug_assert!( - summary.excerpt_locator > self.excerpt_locator - || self.excerpt_locator == Locator::min(), - "Excerpt locators must be in ascending order: {:?} > {:?}", - summary.excerpt_locator, - self.excerpt_locator + summary.path_key >= self.path_key, + "Path keys must be in ascending order: {:?} > {:?}", + summary.path_key, + self.path_key ); - self.excerpt_locator = summary.excerpt_locator.clone(); + + self.path_key = summary.path_key.clone(); + self.max_anchor = summary.max_anchor; self.text += summary.text; self.widest_line_number = cmp::max(self.widest_line_number, summary.widest_line_number); self.count += summary.count; } } -impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, Option<&'a Locator>> for Locator { - fn cmp(&self, cursor_location: &Option<&'a Locator>, _: ()) -> cmp::Ordering { - Ord::cmp(&Some(self), cursor_location) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarget { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + match self { + AnchorSeekTarget::Excerpt { + path_key, + anchor, + snapshot, + } => { + let path_comparison = Ord::cmp(path_key, &cursor_location.path_key); + if path_comparison.is_ne() { + path_comparison + } else if let Some(snapshot) = snapshot { + if anchor.text_anchor.buffer_id != snapshot.remote_id() { + Ordering::Greater + } else if let Some(max_anchor) = cursor_location.max_anchor { + debug_assert_eq!(max_anchor.buffer_id, snapshot.remote_id()); + anchor.text_anchor().cmp(&max_anchor, snapshot) + } else { + Ordering::Greater + } + } else { + // shouldn't happen because we expect this buffer not to have any excerpts + // (otherwise snapshot would have been Some) + Ordering::Equal + } + } + // This should be dead code because Empty is only constructed for an empty snapshot + AnchorSeekTarget::Empty => Ordering::Equal, + } } } -impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for Locator { - fn cmp(&self, cursor_location: &ExcerptSummary, _: ()) -> cmp::Ordering { - Ord::cmp(self, &cursor_location.excerpt_locator) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for PathKey { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.path_key) } } @@ -8126,26 +7506,6 @@ where } } -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(&summary.excerpt_locator); - } -} - -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(summary.excerpt_id); - } -} - #[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] struct OutputDimension(T); @@ -8201,7 +7561,7 @@ where } } -#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] +#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug, Default)] struct ExcerptDimension(T); impl PartialEq for ExcerptDimension { @@ -8361,18 +7721,14 @@ impl Iterator for MultiBufferRows<'_> { .excerpts .item() .or(self.cursor.excerpts.prev_item())?; - let last_row = last_excerpt - .range - .context - .end - .to_point(&last_excerpt.buffer) - .row; + let buffer_snapshot = last_excerpt.buffer_snapshot(self.cursor.snapshot); + let last_row = last_excerpt.range.context.end.to_point(buffer_snapshot).row; let first_row = last_excerpt .range .context .start - .to_point(&last_excerpt.buffer) + .to_point(buffer_snapshot) .row; let expand_info = if self.is_singleton { @@ -8381,7 +7737,7 @@ impl Iterator for MultiBufferRows<'_> { let needs_expand_up = first_row == last_row && last_row > 0 && !region.diff_hunk_status.is_some_and(|d| d.is_deleted()); - let needs_expand_down = last_row < last_excerpt.buffer.max_point().row; + let needs_expand_down = last_row < buffer_snapshot.max_point().row; if needs_expand_up && needs_expand_down { Some(ExpandExcerptDirection::UpAndDown) @@ -8394,7 +7750,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: last_excerpt.id, + start_anchor: Anchor::Excerpt(last_excerpt.start_anchor()), }) }; self.point += Point::new(1, 0); @@ -8436,7 +7792,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: region.excerpt.id, + start_anchor: Anchor::Excerpt(region.excerpt.start_anchor()), }) }; @@ -8488,18 +7844,20 @@ impl<'a> MultiBufferChunks<'a> { if let Some(excerpt_chunks) = self .excerpt_chunks .as_mut() - .filter(|chunks| excerpt.id == chunks.excerpt_id) + .filter(|chunks| excerpt.end_anchor() == chunks.end) { excerpt.seek_chunks( excerpt_chunks, (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), + self.snapshot, ); } else { self.excerpt_chunks = Some(excerpt.chunks_in_range( (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), self.language_aware, + self.snapshot, )); } } else { @@ -8521,6 +7879,7 @@ impl<'a> MultiBufferChunks<'a> { self.excerpt_chunks = Some(excerpt.chunks_in_range( 0..(self.excerpt_offset_range.end - *self.excerpts.start()), self.language_aware, + self.snapshot, )); } } @@ -8636,7 +7995,8 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } chunks } else { - let base_buffer = &find_diff_state(self.diffs, *buffer_id)?.base_text(); + let base_buffer = + &find_diff_state(&self.snapshot.diffs, *buffer_id)?.base_text(); base_buffer.chunks(base_text_start..base_text_end, self.language_aware) }; @@ -8833,12 +8193,6 @@ impl ToPoint for PointUtf16 { } } -impl From for EntityId { - fn from(id: ExcerptId) -> Self { - EntityId::from(id.0 as u64) - } -} - #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index e44a38e4abed8438bcdcbf1f2c8c55c465d98e2d..b0e541ed11d1e9200b22ce682cf3175fae30e8cf 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -77,22 +77,19 @@ fn test_buffer_point_to_anchor_at_end_of_singleton_buffer(cx: &mut App) { let buffer = cx.new(|cx| Buffer::local("abc", cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); - let excerpt_id = multibuffer + let anchor = multibuffer .read(cx) - .excerpt_ids() - .into_iter() - .next() + .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx) .unwrap(); - let anchor = multibuffer + let (anchor, _) = multibuffer .read(cx) - .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx); + .snapshot(cx) + .anchor_to_buffer_anchor(anchor) + .unwrap(); assert_eq!( anchor, - Some(Anchor::in_buffer( - excerpt_id, - buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)), - )) + buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)), ); } @@ -346,7 +343,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { ); let snapshot = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); + multibuffer.remove_excerpts(PathKey::sorted(1), cx); multibuffer.snapshot(cx) }); @@ -373,7 +370,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { boundary.row, boundary .next - .buffer + .buffer(snapshot) .text_for_range(boundary.next.range.context) .collect::(), starts_new_buffer, @@ -440,7 +437,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -480,7 +477,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -521,7 +518,7 @@ async fn test_diff_hunks_in_range_query_starting_at_added_row(cx: &mut TestAppCo multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -766,12 +763,27 @@ fn test_excerpt_events(cx: &mut App) { cx.subscribe( &leader_multibuffer, move |follower, _, event, cx| match event.clone() { - Event::ExcerptsAdded { + Event::BufferRangesUpdated { buffer, - predecessor, - excerpts, - } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), - Event::ExcerptsRemoved { ids, .. } => follower.remove_excerpts(ids, cx), + path_key, + ranges, + } => { + let buffer_snapshot = buffer.read(cx).snapshot(); + follower.set_merged_excerpt_ranges_for_path( + path_key, + buffer, + &buffer_snapshot, + ranges, + cx, + ); + } + Event::BuffersRemoved { + removed_buffer_ids, .. + } => { + for id in removed_buffer_ids { + follower.remove_excerpts_for_buffer(id, cx); + } + } Event::Edited { .. } => { *follower_edit_event_count.write() += 1; } @@ -885,9 +897,14 @@ fn test_expand_excerpts(cx: &mut App) { drop(snapshot); multibuffer.update(cx, |multibuffer, cx| { - let line_zero = multibuffer.snapshot(cx).anchor_before(Point::new(0, 0)); + let multibuffer_snapshot = multibuffer.snapshot(cx); + let line_zero = multibuffer_snapshot.anchor_before(Point::new(0, 0)); multibuffer.expand_excerpts( - multibuffer.excerpt_ids(), + multibuffer.snapshot(cx).excerpts().map(|excerpt| { + multibuffer_snapshot + .anchor_in_excerpt(excerpt.context.start) + .unwrap() + }), 1, ExpandExcerptDirection::UpAndDown, cx, @@ -1184,16 +1201,10 @@ fn test_multibuffer_anchors(cx: &mut App) { .to_offset(&old_snapshot), MultiBufferOffset(0) ); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); buffer_1.update(cx, |buffer, cx| { buffer.edit([(0..0, "W")], None, cx); @@ -1270,153 +1281,6 @@ fn test_multibuffer_anchors(cx: &mut App) { ); } -#[gpui::test] -fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { - let buffer_1 = cx.new(|cx| Buffer::local("abcd", cx)); - let buffer_2 = cx.new(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - - // Create an insertion id in buffer 1 that doesn't exist in buffer 2. - // Add an excerpt from buffer 1 that spans this new insertion. - buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); - let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - let buffer_1_snapshot = buffer_1.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(0), - buffer_1, - &buffer_1_snapshot, - vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))], - cx, - ); - multibuffer.excerpt_ids().into_iter().next().unwrap() - }); - - let snapshot_1 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_1.text(), "abcd123"); - - // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx); - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((6..10).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - let mut ids = multibuffer - .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx) - .into_iter() - .map(|(id, _, _)| id); - (ids.next().unwrap(), ids.next().unwrap()) - }); - let snapshot_2 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); - - // The old excerpt id doesn't get reused. - assert_ne!(excerpt_id_2, excerpt_id_1); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The current excerpts are from a different buffer, so we don't attempt to - // resolve the old text anchor in the new buffer. - assert_eq!( - snapshot_2.summary_for_anchor::( - &snapshot_1.anchor_before(MultiBufferOffset(2)) - ), - MultiBufferOffset(0) - ); - assert_eq!( - snapshot_2.summaries_for_anchors::(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)) - ]), - vec![MultiBufferOffset(0), MultiBufferOffset(0)] - ); - - // Refresh anchors from the old snapshot. The return value indicates that both - // anchors lost their original excerpt. - let refresh = snapshot_2.refresh_anchors(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)), - ]); - assert_eq!( - refresh, - &[ - (0, snapshot_2.anchor_before(MultiBufferOffset(0)), false), - (1, snapshot_2.anchor_after(MultiBufferOffset(0)), false), - ] - ); - - // Replace the middle excerpt with a smaller excerpt in buffer 2, - // that intersects the old excerpt. - multibuffer.update(cx, |multibuffer, cx| { - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((5..8).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - }); - - let snapshot_3 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The third anchor can't be resolved, since its excerpt has been removed, - // so it resolves to the same position as its predecessor. - let anchors = [ - snapshot_2.anchor_before(MultiBufferOffset(0)), - snapshot_2.anchor_after(MultiBufferOffset(2)), - snapshot_2.anchor_after(MultiBufferOffset(6)), - snapshot_2.anchor_after(MultiBufferOffset(14)), - ]; - assert_eq!( - snapshot_3.summaries_for_anchors::(&anchors), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(9), - MultiBufferOffset(13) - ] - ); - - let new_anchors = snapshot_3.refresh_anchors(&anchors); - assert_eq!( - new_anchors.iter().map(|a| (a.0, a.2)).collect::>(), - &[(0, true), (1, true), (2, true), (3, true)] - ); - assert_eq!( - snapshot_3.summaries_for_anchors::(new_anchors.iter().map(|a| &a.1)), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(7), - MultiBufferOffset(13) - ] - ); -} - #[gpui::test] async fn test_basic_diff_hunks(cx: &mut TestAppContext) { let text = indoc!( @@ -1467,7 +1331,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1513,7 +1377,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); assert_new_snapshot( &multibuffer, @@ -1700,7 +1564,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1751,7 +1615,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { // Now collapse all diff hunks multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2097,6 +1961,203 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_update_excerpt_ranges_for_path(cx: &mut TestAppContext) { + let buffer = cx.new(|cx| { + Buffer::local( + indoc! { + "row 0 + row 1 + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + row 8 + row 9 + row 10 + row 11 + row 12 + row 13 + row 14 + "}, + cx, + ) + }); + let path = PathKey::with_sort_prefix(0, rel_path("test.rs").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4), Point::row_range(8..10)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + ----- + row 8 + row 9 + row 10 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(12..13)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 12 + row 13 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..5)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![ + Point::row_range(0..1), + Point::row_range(6..8), + Point::row_range(12..13), + ], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 0 + row 1 + ----- + row 6 + row 7 + row 8 + ----- + row 12 + row 13 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(7..9)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 6 + row 7 + row 8 + row 9 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..3), Point::row_range(6..7)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + ----- + row 6 + row 7 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..6)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + "}, + ); +} + #[gpui::test] fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { let buf1 = cx.new(|cx| { @@ -2178,6 +2239,405 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { ); } +#[gpui::test] +fn test_set_excerpts_for_path_replaces_previous_buffer(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| { + Buffer::local( + indoc! { + "alpha + beta + gamma + delta + epsilon + ", + }, + cx, + ) + }); + let buffer_b = cx.new(|cx| { + Buffer::local( + indoc! { + "one + two + three + four + ", + }, + cx, + ) + }); + let path: PathKey = PathKey::with_sort_prefix(0, rel_path("shared/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + let removed_buffer_ids: Arc>> = Default::default(); + multibuffer.update(cx, |_, cx| { + let removed_buffer_ids = removed_buffer_ids.clone(); + cx.subscribe(&multibuffer, move |_, _, event, _| { + if let Event::BuffersRemoved { + removed_buffer_ids: ids, + } = event + { + removed_buffer_ids.write().extend(ids.iter().copied()); + } + }) + .detach(); + }); + + let ranges_a = vec![Point::row_range(0..1), Point::row_range(3..4)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_a.clone(), ranges_a.clone(), 0, cx); + }); + let (anchor_a1, anchor_a2) = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_a.read(cx).snapshot(); + let mut anchors = ranges_a.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }); + ( + anchors.next().expect("should have first anchor"), + anchors.next().expect("should have second anchor"), + ) + }); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + alpha + beta + ----- + delta + epsilon + " + }, + ); + + let buffer_a_id = buffer_a.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + }); + + let ranges_b = vec![Point::row_range(1..2)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_b.clone(), ranges_b.clone(), 1, cx); + }); + let anchor_b = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_b.read(cx).snapshot(); + ranges_b + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .next() + .expect("should have an anchor") + }); + + let buffer_b_id = buffer_b.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + !snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_b_id), + ); + assert!( + multibuffer.buffer(buffer_a_id).is_none(), + "old buffer should be fully removed from the multibuffer" + ); + assert!( + multibuffer.buffer(buffer_b_id).is_some(), + "new buffer should be present in the multibuffer" + ); + }); + assert!( + removed_buffer_ids.read().contains(&buffer_a_id), + "BuffersRemoved event should have been emitted for the old buffer" + ); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + one + two + three + four + " + }, + ); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + anchor_a1.start.cmp(&anchor_b.start, &snapshot); + anchor_a1.end.cmp(&anchor_b.end, &snapshot); + anchor_a1.start.cmp(&anchor_a2.start, &snapshot); + anchor_a1.end.cmp(&anchor_a2.end, &snapshot); + }); +} + +#[gpui::test] +fn test_stale_anchor_after_buffer_removal_and_path_reuse(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx)); + let buffer_b = cx.new(|cx| Buffer::local("xxx\nyyy\nzzz\n", cx)); + let buffer_other = cx.new(|cx| Buffer::local("111\n222\n333\n", cx)); + let path = PathKey::with_sort_prefix(0, rel_path("the/path").into_arc()); + let other_path = PathKey::with_sort_prefix(1, rel_path("other/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_a.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + other_path.clone(), + buffer_other.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + buffer_a.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "INSERTED ")], + None, + cx, + ); + }); + + let stale_anchor = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 5)) + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts(path.clone(), cx); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_b.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); +} + +#[gpui::test] +async fn test_map_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + { + (aaa) + (bbb) + (ccc) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + let text = indoc!( + " + { + (aaa) + (CCC) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 1), + Point::new(7, 0)..Point::new(10, 1), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + pretty_assertions::assert_eq!( + actual_diff, + indoc!( + " + { + (aaa) + - (bbb) + - (ccc) + + (CCC) + } [\u{2193}] + [ [\u{2191}] + (ddd) + (EEE) + ] [\u{2193}]" + ) + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(1, 3))..snapshot.point_to_offset(Point::new(1, 3)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(1, 3)..Point::new(1, 3), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(0, 0)..Point::new(3, 1), + ); + vec![ + (input_range.start..BufferOffset(input_range.start.0 + 3), ()), + (excerpt_range.context, ()), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(2, 2), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 7), buffer)), + (), + ), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(0, 0), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 0), buffer)), + (), + ), + ] + }, + ), + Some(vec![ + ( + snapshot.point_to_offset(Point::new(1, 3)) + ..snapshot.point_to_offset(Point::new(1, 6)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(5, 1)), + () + ), + ( + snapshot.point_to_offset(Point::new(4, 2)) + ..snapshot.point_to_offset(Point::new(4, 7)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(4, 0)), + () + ), + ]), + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(5, 0))..snapshot.point_to_offset(Point::new(7, 0)), + |_, _, range| vec![(range, ())], + ), + None, + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(8, 3)..Point::new(8, 6), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(7, 0)..Point::new(10, 1), + ); + vec![(input_range, ())] + }, + ), + Some(vec![( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + (), + )]), + ); +} + #[gpui::test] async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { let base_text_1 = indoc!( @@ -2273,7 +2733,7 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2423,101 +2883,136 @@ struct ReferenceMultibuffer { excerpts: Vec, diffs: HashMap>, inverted_diffs: HashMap, Entity)>, + expanded_diff_hunks_by_buffer: HashMap>, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceExcerpt { - id: ExcerptId, + path_key: PathKey, + path_key_index: PathKeyIndex, buffer: Entity, range: Range, - expanded_diff_hunks: Vec, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceRegion { buffer_id: Option, range: Range, buffer_range: Option>, status: Option, - excerpt_id: Option, + excerpt_range: Option>, + excerpt_path_key_index: Option, } impl ReferenceMultibuffer { - fn expand_excerpts(&mut self, excerpts: &HashSet, line_count: u32, cx: &App) { - if line_count == 0 { + fn expand_excerpts( + &mut self, + excerpts: &HashSet>, + line_count: u32, + cx: &mut App, + ) { + use text::AnchorRangeExt as _; + + if line_count == 0 || excerpts.is_empty() { return; } - for id in excerpts { - let excerpt = self.excerpts.iter_mut().find(|e| e.id == *id).unwrap(); - let snapshot = excerpt.buffer.read(cx).snapshot(); - let mut point_range = excerpt.range.to_point(&snapshot); - point_range.start = Point::new(point_range.start.row.saturating_sub(line_count), 0); - point_range.end = - snapshot.clip_point(Point::new(point_range.end.row + line_count, 0), Bias::Left); - point_range.end.column = snapshot.line_len(point_range.end.row); - excerpt.range = - snapshot.anchor_before(point_range.start)..snapshot.anchor_after(point_range.end); + let mut excerpts_by_buffer: HashMap>> = + HashMap::default(); + for excerpt in excerpts { + excerpts_by_buffer + .entry(excerpt.context.start.buffer_id) + .or_default() + .push(excerpt.clone()) } - } - fn remove_excerpt(&mut self, id: ExcerptId, cx: &App) { - let ix = self - .excerpts - .iter() - .position(|excerpt| excerpt.id == id) - .unwrap(); - let excerpt = self.excerpts.remove(ix); - let buffer = excerpt.buffer.read(cx); - let buffer_id = buffer.remote_id(); - log::info!( - "Removing excerpt {}: {:?}", - ix, - buffer - .text_for_range(excerpt.range.to_offset(buffer)) - .collect::(), - ); - if !self - .excerpts - .iter() - .any(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) - { - self.diffs.remove(&buffer_id); - self.inverted_diffs.remove(&buffer_id); + for (buffer_id, excerpts_to_expand) in excerpts_by_buffer { + let mut buffer = None; + let mut buffer_snapshot = None; + let mut path = None; + let mut path_key_index = None; + let mut new_ranges = + self.excerpts + .iter() + .filter(|excerpt| excerpt.range.start.buffer_id == buffer_id) + .map(|excerpt| { + let snapshot = excerpt.buffer.read(cx).snapshot(); + let mut range = excerpt.range.to_point(&snapshot); + if excerpts_to_expand.iter().any(|info| { + excerpt.range.contains_anchor(info.context.start, &snapshot) + }) { + range.start = Point::new(range.start.row.saturating_sub(line_count), 0); + range.end = snapshot + .clip_point(Point::new(range.end.row + line_count, 0), Bias::Left); + range.end.column = snapshot.line_len(range.end.row); + } + buffer = Some(excerpt.buffer.clone()); + buffer_snapshot = Some(snapshot); + path = Some(excerpt.path_key.clone()); + path_key_index = Some(excerpt.path_key_index); + ExcerptRange::new(range) + }) + .collect::>(); + + new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); + + self.set_excerpts( + path.unwrap(), + path_key_index.unwrap(), + buffer.unwrap(), + &buffer_snapshot.unwrap(), + new_ranges, + cx, + ); } } - fn insert_excerpt_after( + fn set_excerpts( &mut self, - prev_id: ExcerptId, - new_excerpt_id: ExcerptId, - (buffer_handle, anchor_range): (Entity, Range), + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + ranges: Vec>, + cx: &mut App, ) { - let excerpt_ix = if prev_id == ExcerptId::max() { - self.excerpts.len() - } else { - self.excerpts - .iter() - .position(|excerpt| excerpt.id == prev_id) - .unwrap() - + 1 - }; - self.excerpts.insert( - excerpt_ix, - ReferenceExcerpt { - id: new_excerpt_id, - buffer: buffer_handle, - range: anchor_range, - expanded_diff_hunks: Vec::new(), - }, + self.excerpts.retain(|excerpt| { + excerpt.path_key != path_key && excerpt.buffer.entity_id() != buffer.entity_id() + }); + + let ranges = MultiBuffer::merge_excerpt_ranges(&ranges); + + let (Ok(ix) | Err(ix)) = self + .excerpts + .binary_search_by(|probe| probe.path_key.cmp(&path_key)); + self.excerpts.splice( + ix..ix, + ranges.into_iter().map(|range| ReferenceExcerpt { + path_key: path_key.clone(), + path_key_index, + buffer: buffer.clone(), + range: buffer_snapshot.anchor_before(range.context.start) + ..buffer_snapshot.anchor_after(range.context.end), + }), ); + self.update_expanded_diff_hunks_for_buffer(buffer_snapshot.remote_id(), cx); } - fn expand_diff_hunks(&mut self, excerpt_id: ExcerptId, range: Range, cx: &App) { + fn expand_diff_hunks(&mut self, path_key: PathKey, range: Range, cx: &App) { let excerpt = self .excerpts .iter_mut() - .find(|e| e.id == excerpt_id) + .find(|e| { + e.path_key == path_key + && e.range + .start + .cmp(&range.start, &e.buffer.read(cx).snapshot()) + .is_le() + && e.range + .end + .cmp(&range.end, &e.buffer.read(cx).snapshot()) + .is_ge() + }) .unwrap(); let buffer = excerpt.buffer.read(cx).snapshot(); let buffer_id = buffer.remote_id(); @@ -2530,36 +3025,39 @@ impl ReferenceMultibuffer { let Some(diff) = self.diffs.get(&buffer_id) else { return; }; - let excerpt_range = excerpt.range.to_offset(&buffer); + let excerpt_range = excerpt.range.to_point(&buffer); + let expanded_diff_hunks = self + .expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default(); for hunk in diff .read(cx) .snapshot(cx) .hunks_intersecting_range(range, &buffer) { - let hunk_range = hunk.buffer_range.to_offset(&buffer); + let hunk_range = hunk.buffer_range.to_point(&buffer); if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end { continue; } - if let Err(ix) = excerpt - .expanded_diff_hunks + if let Err(ix) = expanded_diff_hunks .binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer)) { log::info!( - "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}", + "expanding diff hunk {:?}. excerpt range: {:?}, buffer {:?}", hunk_range, - excerpt_id, - excerpt_range + excerpt_range, + buffer.remote_id() ); - excerpt - .expanded_diff_hunks - .insert(ix, hunk.buffer_range.start); + expanded_diff_hunks.insert(ix, hunk.buffer_range.start); } else { - log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}"); + log::trace!("hunk {hunk_range:?} already expanded in excerpt"); } } } fn expected_content(&self, cx: &App) -> (String, Vec, HashSet) { + use util::maybe; + let mut text = String::new(); let mut regions = Vec::::new(); let mut excerpt_boundary_rows = HashSet::default(); @@ -2599,7 +3097,8 @@ impl ReferenceMultibuffer { (offset..hunk_base_range.start).to_point(&buffer), ), status: None, - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } } @@ -2613,7 +3112,8 @@ impl ReferenceMultibuffer { range: len..text.len(), buffer_range: Some(hunk_base_range.to_point(&buffer)), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } @@ -2629,7 +3129,8 @@ impl ReferenceMultibuffer { range: len..text.len(), buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), status: None, - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } else { let diff = self.diffs.get(&buffer_id).unwrap().read(cx).snapshot(cx); @@ -2651,10 +3152,18 @@ impl ReferenceMultibuffer { continue; } - if !excerpt.expanded_diff_hunks.iter().any(|expanded_anchor| { - expanded_anchor.to_offset(buffer).max(buffer_range.start) - == hunk_range.start.max(buffer_range.start) - }) { + if !self + .expanded_diff_hunks_by_buffer + .get(&buffer_id) + .cloned() + .into_iter() + .flatten() + .any(|expanded_anchor| { + expanded_anchor + .cmp(&hunk.buffer_range.start, buffer) + .is_eq() + }) + { log::trace!("skipping a hunk that's not marked as expanded"); continue; } @@ -2674,7 +3183,8 @@ impl ReferenceMultibuffer { range: len..text.len(), buffer_range: Some((offset..hunk_range.start).to_point(&buffer)), status: None, - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } @@ -2695,7 +3205,8 @@ impl ReferenceMultibuffer { hunk.diff_base_byte_range.to_point(&base_buffer), ), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } @@ -2712,7 +3223,8 @@ impl ReferenceMultibuffer { range, buffer_range: Some((offset..hunk_range.end).to_point(&buffer)), status: Some(DiffHunkStatus::added(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }; offset = hunk_range.end; regions.push(region); @@ -2728,7 +3240,8 @@ impl ReferenceMultibuffer { range: len..text.len(), buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), status: None, - excerpt_id: Some(excerpt.id), + excerpt_range: Some(excerpt.range.clone()), + excerpt_path_key_index: Some(excerpt.path_key_index), }); } } @@ -2740,7 +3253,8 @@ impl ReferenceMultibuffer { range: 0..1, buffer_range: Some(Point::new(0, 0)..Point::new(0, 1)), status: None, - excerpt_id: None, + excerpt_range: None, + excerpt_path_key_index: None, }); } else { text.pop(); @@ -2756,7 +3270,7 @@ impl ReferenceMultibuffer { .iter() .position(|region| region.range.contains(&ix)) .map_or(RowInfo::default(), |region_ix| { - let region = ®ions[region_ix]; + let region = regions[region_ix].clone(); let buffer_row = region.buffer_range.as_ref().map(|buffer_range| { buffer_range.start.row + text[region.range.start..ix].matches('\n').count() as u32 @@ -2764,13 +3278,13 @@ impl ReferenceMultibuffer { let main_buffer = self .excerpts .iter() - .find(|e| e.id == region.excerpt_id.unwrap()) + .find(|e| e.range == region.excerpt_range.clone().unwrap()) .map(|e| e.buffer.clone()); let is_excerpt_start = region_ix == 0 - || ®ions[region_ix - 1].excerpt_id != ®ion.excerpt_id + || ®ions[region_ix - 1].excerpt_range != ®ion.excerpt_range || regions[region_ix - 1].range.is_empty(); let mut is_excerpt_end = region_ix == regions.len() - 1 - || ®ions[region_ix + 1].excerpt_id != ®ion.excerpt_id; + || ®ions[region_ix + 1].excerpt_range != ®ion.excerpt_range; let is_start = !text[region.range.start..ix].contains('\n'); let mut is_end = if region.range.end > text.len() { !text[ix..].contains('\n') @@ -2784,7 +3298,7 @@ impl ReferenceMultibuffer { && !text[ix..].contains("\n") && (region.status == Some(DiffHunkStatus::added_none()) || region.status.is_some_and(|s| s.is_deleted())) - && regions[region_ix + 1].excerpt_id == region.excerpt_id + && regions[region_ix + 1].excerpt_range == region.excerpt_range && regions[region_ix + 1].range.start == text.len() { is_end = true; @@ -2816,12 +3330,18 @@ impl ReferenceMultibuffer { wrapped_buffer_row: None, multibuffer_row: Some(multibuffer_row), - expand_info: expand_direction.zip(region.excerpt_id).map( - |(direction, excerpt_id)| ExpandInfo { + expand_info: maybe!({ + let direction = expand_direction?; + let excerpt_range = region.excerpt_range?; + let path_key_index = region.excerpt_path_key_index?; + Some(ExpandInfo { direction, - excerpt_id, - }, - ), + start_anchor: Anchor::in_buffer( + path_key_index, + excerpt_range.start, + ), + }) + }), } }); ix += line.len() + 1; @@ -2832,41 +3352,10 @@ impl ReferenceMultibuffer { (text, row_infos, excerpt_boundary_rows) } - fn diffs_updated(&mut self, cx: &App) { - for excerpt in &mut self.excerpts { - let buffer = excerpt.buffer.read(cx).snapshot(); - let buffer_id = buffer.remote_id(); - - // Skip inverted diff excerpts - hunks are always expanded - if self.inverted_diffs.contains_key(&buffer_id) { - continue; - } - - let excerpt_range = excerpt.range.to_offset(&buffer); - let Some(diff) = self.diffs.get(&buffer_id) else { - continue; - }; - let diff = diff.read(cx).snapshot(cx); - let mut hunks = diff.hunks_in_row_range(0..u32::MAX, &buffer).peekable(); - excerpt.expanded_diff_hunks.retain(|hunk_anchor| { - if !hunk_anchor.is_valid(&buffer) { - return false; - } - while let Some(hunk) = hunks.peek() { - match hunk.buffer_range.start.cmp(hunk_anchor, &buffer) { - cmp::Ordering::Less => { - hunks.next(); - } - cmp::Ordering::Equal => { - let hunk_range = hunk.buffer_range.to_offset(&buffer); - return hunk_range.end >= excerpt_range.start - && hunk_range.start <= excerpt_range.end; - } - cmp::Ordering::Greater => break, - } - } - false - }); + fn diffs_updated(&mut self, cx: &mut App) { + let buffer_ids = self.diffs.keys().copied().collect::>(); + for buffer_id in buffer_ids { + self.update_expanded_diff_hunks_for_buffer(buffer_id, cx); } } @@ -2885,6 +3374,46 @@ impl ReferenceMultibuffer { self.inverted_diffs .insert(base_text_buffer_id, (diff, main_buffer)); } + + fn update_expanded_diff_hunks_for_buffer(&mut self, buffer_id: BufferId, cx: &mut App) { + let excerpts = self + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) + .collect::>(); + let Some(buffer) = excerpts.first().map(|excerpt| excerpt.buffer.clone()) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let Some(diff) = self.diffs.get(&buffer_id) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let diff = diff.read(cx).snapshot(cx); + let hunks = diff + .hunks_in_row_range(0..u32::MAX, &buffer_snapshot) + .collect::>(); + self.expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default() + .retain(|hunk_anchor| { + if !hunk_anchor.is_valid(&buffer_snapshot) { + return false; + } + + let Ok(ix) = hunks.binary_search_by(|hunk| { + hunk.buffer_range.start.cmp(hunk_anchor, &buffer_snapshot) + }) else { + return false; + }; + let hunk_range = hunks[ix].buffer_range.to_point(&buffer_snapshot); + excerpts.iter().any(|excerpt| { + let excerpt_range = excerpt.range.to_point(&buffer_snapshot); + hunk_range.start >= excerpt_range.start && hunk_range.start <= excerpt_range.end + }) + }); + } } #[gpui::test(iterations = 100)] @@ -2917,7 +3446,7 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { .collect::>(); ranges.sort_by_key(|range| range.start); log::info!("Setting ranges: {:?}", row_ranges(&ranges)); - let (created, _) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::for_buffer(&buf, cx), buf.clone(), @@ -2927,15 +3456,16 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { ) }); - assert_eq!(created.len(), ranges.len()); - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let mut last_end = None; let mut seen_ranges = Vec::default(); - for (_, buf, range) in snapshot.excerpts() { - let start = range.context.start.to_point(buf); - let end = range.context.end.to_point(buf); + for info in snapshot.excerpts() { + let buffer_snapshot = snapshot + .buffer_for_id(info.context.start.buffer_id) + .unwrap(); + let start = info.context.start.to_point(buffer_snapshot); + let end = info.context.end.to_point(buffer_snapshot); seen_ranges.push(start..end); if let Some(last_end) = last_end.take() { @@ -2987,23 +3517,32 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { }); cx.update(|cx| reference.diffs_updated(cx)); } - 15..=19 if !reference.excerpts.is_empty() => { + 15..=24 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { - let ids = multibuffer.excerpt_ids(); + let snapshot = multibuffer.snapshot(cx); + let infos = snapshot.excerpts().collect::>(); let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(&mut rng).copied()); + for _ in 0..rng.random_range(0..infos.len()) { + excerpts.extend(infos.choose(&mut rng).cloned()); } let line_count = rng.random_range(0..5); let excerpt_ixs = excerpts .iter() - .map(|id| reference.excerpts.iter().position(|e| e.id == *id).unwrap()) + .map(|info| { + reference + .excerpts + .iter() + .position(|e| e.range == info.context) + .unwrap() + }) .collect::>(); log::info!("Expanding excerpts {excerpt_ixs:?} by {line_count} lines"); multibuffer.expand_excerpts( - excerpts.iter().cloned(), + excerpts + .iter() + .map(|info| snapshot.anchor_in_excerpt(info.context.end).unwrap()), line_count, ExpandExcerptDirection::UpAndDown, cx, @@ -3012,25 +3551,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { reference.expand_excerpts(&excerpts, line_count, cx); }); } - 20..=29 if !reference.excerpts.is_empty() => { - let mut ids_to_remove = vec![]; - for _ in 0..rng.random_range(1..=3) { - let Some(excerpt) = reference.excerpts.choose(&mut rng) else { - break; - }; - let id = excerpt.id; - cx.update(|cx| reference.remove_excerpt(id, cx)); - ids_to_remove.push(id); - } - let snapshot = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts(ids_to_remove, cx) - }); - } - 30..=39 if !reference.excerpts.is_empty() => { + 25..=34 if !reference.excerpts.is_empty() => { let multibuffer = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let offset = multibuffer.clip_offset( @@ -3046,32 +3567,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { anchors.push(multibuffer.anchor_at(offset, bias)); anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); } - 40..=44 if !anchors.is_empty() => { - let multibuffer = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - let prev_len = anchors.len(); - anchors = multibuffer - .refresh_anchors(&anchors) - .into_iter() - .map(|a| a.1) - .collect(); - - // Ensure the newly-refreshed anchors point to a valid excerpt and don't - // overshoot its boundaries. - assert_eq!(anchors.len(), prev_len); - for anchor in &anchors { - if anchor.excerpt_id == ExcerptId::min() - || anchor.excerpt_id == ExcerptId::max() - { - continue; - } - - let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap(); - assert_eq!(excerpt.id, anchor.excerpt_id); - assert!(excerpt.contains(anchor)); - } - } - 45..=55 if !reference.excerpts.is_empty() => { + 35..=45 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); let excerpt_ix = rng.random_range(0..reference.excerpts.len()); @@ -3085,20 +3581,19 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let start = excerpt.range.start; let end = excerpt.range.end; - let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap() - ..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap(); + let range = snapshot.anchor_in_excerpt(start).unwrap() + ..snapshot.anchor_in_excerpt(end).unwrap(); log::info!( - "expanding diff hunks in range {:?} (excerpt id {:?}, index {excerpt_ix:?}, buffer id {:?})", - range.to_offset(&snapshot), - excerpt.id, + "expanding diff hunks in range {:?} (excerpt index {excerpt_ix:?}, buffer id {:?})", + range.to_point(&snapshot), buffer_id, ); - reference.expand_diff_hunks(excerpt.id, start..end, cx); + reference.expand_diff_hunks(excerpt.path_key.clone(), start..end, cx); multibuffer.expand_diff_hunks(vec![range], cx); }); } - 56..=85 if needs_diff_calculation => { + 46..=75 if needs_diff_calculation => { multibuffer.update(cx, |multibuffer, cx| { for buffer in multibuffer.all_buffers() { let snapshot = buffer.read(cx).snapshot(); @@ -3129,13 +3624,6 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { // Decide if we're creating a new buffer or reusing an existing one let create_new_buffer = buffers.is_empty() || rng.random_bool(0.4); - let prev_excerpt_ix = rng.random_range(0..=reference.excerpts.len()); - let prev_excerpt_id = reference - .excerpts - .get(prev_excerpt_ix) - .map_or(ExcerptId::max(), |e| e.id); - let excerpt_ix = (prev_excerpt_ix + 1).min(reference.excerpts.len()); - let (excerpt_buffer, diff, inverted_main_buffer) = if create_new_buffer { let create_inverted = rng.random_bool(0.3); @@ -3213,43 +3701,45 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } }; - let (range, anchor_range) = excerpt_buffer.read_with(cx, |buffer, _| { - let end_row = rng.random_range(0..=buffer.max_point().row); - let start_row = rng.random_range(0..=end_row); - let end_ix = buffer.point_to_offset(Point::new(end_row, 0)); - let start_ix = buffer.point_to_offset(Point::new(start_row, 0)); - let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); - - log::info!( - "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", - excerpt_ix, - reference.excerpts.len(), - buffer.remote_id(), - buffer.text(), - start_ix..end_ix, - &buffer.text()[start_ix..end_ix] - ); - - (start_ix..end_ix, anchor_range) + let excerpt_buffer_snapshot = + excerpt_buffer.read_with(cx, |excerpt_buffer, _| excerpt_buffer.snapshot()); + let mut ranges = reference + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer == excerpt_buffer) + .map(|excerpt| excerpt.range.to_point(&excerpt_buffer_snapshot)) + .collect::>(); + mutate_excerpt_ranges(&mut rng, &mut ranges, &excerpt_buffer_snapshot, 1); + let ranges = ranges + .iter() + .cloned() + .map(ExcerptRange::new) + .collect::>(); + let path = cx.update(|cx| PathKey::for_buffer(&excerpt_buffer, cx)); + let path_key_index = multibuffer.update(cx, |multibuffer, _| { + multibuffer.get_or_create_path_key_index(&path) }); - let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .insert_excerpts_after( - prev_excerpt_id, - excerpt_buffer.clone(), - [ExcerptRange::new(range.clone())], - cx, - ) - .pop() - .unwrap() + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + path.clone(), + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges.clone(), + cx, + ) }); - reference.insert_excerpt_after( - prev_excerpt_id, - excerpt_id, - (excerpt_buffer.clone(), anchor_range), - ); + cx.update(|cx| { + reference.set_excerpts( + path, + path_key_index, + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges, + cx, + ) + }); let excerpt_buffer_id = excerpt_buffer.read_with(cx, |buffer, _| buffer.remote_id()); @@ -3283,6 +3773,38 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } } +fn mutate_excerpt_ranges( + rng: &mut StdRng, + existing_ranges: &mut Vec>, + buffer: &BufferSnapshot, + operations: u32, +) { + let mut ranges_to_add = Vec::new(); + + for _ in 0..operations { + match rng.random_range(0..5) { + 0..=1 if !existing_ranges.is_empty() => { + let index = rng.random_range(0..existing_ranges.len()); + log::info!("Removing excerpt at index {index}"); + existing_ranges.remove(index); + } + _ => { + let end_row = rng.random_range(0..=buffer.max_point().row); + let start_row = rng.random_range(0..=end_row); + log::info!( + "Inserting excerpt for buffer {:?}, row range {:?}", + buffer.remote_id(), + start_row..end_row + ); + ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, 0)); + } + } + } + + existing_ranges.extend(ranges_to_add); + existing_ranges.sort_by(|l, r| l.start.cmp(&r.start)); +} + fn check_multibuffer( multibuffer: &MultiBuffer, reference: &ReferenceMultibuffer, @@ -3364,24 +3886,15 @@ fn check_multibuffer( .unwrap() + 1 ); - let reference_ranges = reference - .excerpts - .iter() - .map(|excerpt| { - ( - excerpt.id, - excerpt.range.to_offset(&excerpt.buffer.read(cx).snapshot()), - ) - }) - .collect::>(); for i in 0..snapshot.len().0 { - let excerpt = snapshot + let (_, excerpt_range) = snapshot .excerpt_containing(MultiBufferOffset(i)..MultiBufferOffset(i)) .unwrap(); - assert_eq!( - excerpt.buffer_range().start.0..excerpt.buffer_range().end.0, - reference_ranges[&excerpt.id()] - ); + reference + .excerpts + .iter() + .find(|reference_excerpt| reference_excerpt.range == excerpt_range.context) + .expect("corresponding excerpt should exist in reference multibuffer"); } assert_consistent_line_numbers(&snapshot); @@ -3560,8 +4073,8 @@ fn test_history(cx: &mut App) { assert_eq!( multibuffer.edited_ranges_for_transaction(transaction_1, cx), &[ - Point::new(0, 0)..Point::new(0, 2), - Point::new(1, 0)..Point::new(1, 2) + MultiBufferOffset(0)..MultiBufferOffset(2), + MultiBufferOffset(7)..MultiBufferOffset(9), ] ); @@ -3777,7 +4290,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { }); cx.run_until_parked(); - let mut ids = vec![]; let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.set_all_diff_hunks_expanded(cx); @@ -3797,7 +4309,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ); multibuffer.add_diff(diff_1.clone(), cx); multibuffer.add_diff(diff_2.clone(), cx); - ids = multibuffer.excerpt_ids(); multibuffer }); @@ -3821,11 +4332,21 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ), ); - let anchor_1 = Anchor::in_buffer(ids[0], text::Anchor::MIN); + let anchor_1 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_1.read(cx).remote_id())) + .unwrap() + }); let point_1 = snapshot.summaries_for_anchors::([&anchor_1])[0]; assert_eq!(point_1, Point::new(0, 0)); - let anchor_2 = Anchor::in_buffer(ids[1], text::Anchor::MIN); + let anchor_2 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_2.read(cx).remote_id())) + .unwrap() + }); let point_2 = snapshot.summaries_for_anchors::([&anchor_2])[0]; assert_eq!(point_2, Point::new(3, 0)); } @@ -3851,7 +4372,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { cx, ); multibuffer.add_diff(diff_1.clone(), cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); multibuffer }); @@ -3884,7 +4405,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (_, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(translated_offset.0, "one\n".len()); - let (_, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (_, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(translated_point, Point::new(1, 0)); // The same, for an excerpt that's not at the end of the multibuffer. @@ -3927,7 +4448,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (buffer, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_offset.0, "one\n".len()); - let (buffer, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (buffer, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_point, Point::new(1, 0)); } @@ -3967,6 +4488,7 @@ fn format_diff( }; let expand = info .expand_info + .as_ref() .map(|expand_info| match expand_info.direction { ExpandExcerptDirection::Up => " [↑]", ExpandExcerptDirection::Down => " [↓]", @@ -4310,9 +4832,15 @@ fn assert_excerpts_match( ) { let mut output = String::new(); multibuffer.read_with(cx, |multibuffer, cx| { - for (_, buffer, range) in multibuffer.snapshot(cx).excerpts() { + let snapshot = multibuffer.snapshot(cx); + for excerpt in multibuffer.snapshot(cx).excerpts() { output.push_str("-----\n"); - output.extend(buffer.text_for_range(range.context)); + output.extend( + snapshot + .buffer_for_id(excerpt.context.start.buffer_id) + .unwrap() + .text_for_range(excerpt.context), + ); if !output.ends_with('\n') { output.push('\n'); } @@ -4525,14 +5053,14 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { if let Some((buffer, offset)) = snapshot.point_to_buffer_offset(snapshot.max_point()) { assert!(offset.0 <= buffer.len()); } - if let Some((buffer, point, _)) = snapshot.point_to_buffer_point(snapshot.max_point()) { + if let Some((buffer, point)) = snapshot.point_to_buffer_point(snapshot.max_point()) { assert!(point <= buffer.max_point()); } } fn assert_line_indents(snapshot: &MultiBufferSnapshot) { let max_row = snapshot.max_point().row; - let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); + let buffer_id = snapshot.excerpts().next().unwrap().context.start.buffer_id; let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) @@ -4720,7 +5248,8 @@ fn test_random_chunk_bitmaps_with_diffs(cx: &mut App, mut rng: StdRng) { let mut diffs = Vec::new(); multibuffer.update(cx, |multibuffer, cx| { - for buffer_id in multibuffer.excerpt_buffer_ids() { + let snapshot = multibuffer.snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { if rng.random_bool(0.7) { if let Some(buffer_handle) = multibuffer.buffer(buffer_id) { let buffer_text = buffer_handle.read(cx).text(); @@ -4881,7 +5410,7 @@ fn collect_word_diffs( }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); @@ -4996,38 +5525,40 @@ fn test_excerpts_containment_functions(cx: &mut App) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id, excerpt_3_id) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer_1.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + let (excerpt_1_info, excerpt_2_info, excerpt_3_info) = + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(1), - buffer_2.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + buffer_2.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(2), - buffer_3.clone(), - [Point::new(0, 0)..Point::new(0, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), + buffer_3.clone(), + [Point::new(0, 0)..Point::new(0, 3)], + 0, + cx, + ); - let mut ids = multibuffer.excerpt_ids().into_iter(); - ( - ids.next().unwrap(), - ids.next().unwrap(), - ids.next().unwrap(), - ) - }); + let snapshot = multibuffer.snapshot(cx); + let mut excerpts = snapshot.excerpts(); + ( + excerpts.next().unwrap(), + excerpts.next().unwrap(), + excerpts.next().unwrap(), + ) + }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5045,24 +5576,24 @@ fn test_excerpts_containment_functions(cx: &mut App) { let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p00).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); // Cursor at very end of excerpt 3 let excerpts: Vec<_> = snapshot.excerpts_for_range(p43..p43).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_3_id); + assert_eq!(excerpts[0].range, excerpt_3_info); let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p23).collect(); assert_eq!(excerpts.len(), 2); - assert_eq!(excerpts[0].id, excerpt_1_id); - assert_eq!(excerpts[1].id, excerpt_2_id); + assert_eq!(excerpts[0].range, excerpt_1_info); + assert_eq!(excerpts[1].range, excerpt_2_info); // This range represent an selection with end-point just inside excerpt_2 // Today we only expand the first excerpt, but another interpretation that // we could consider is expanding both here let excerpts: Vec<_> = snapshot.excerpts_for_range(p10..p20).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); //// Test that `excerpts_for_range` and `excerpt_containing` agree for all single offsets (cursor positions) for offset in 0..=snapshot.len().0 { @@ -5074,15 +5605,15 @@ fn test_excerpts_containment_functions(cx: &mut App) { "Expected exactly one excerpt for offset {offset}", ); - let excerpt_containing = snapshot.excerpt_containing(offset..offset); - assert!( - excerpt_containing.is_some(), - "Expected excerpt_containing to find excerpt for offset {offset}", - ); + let (_, excerpt_containing) = + snapshot + .excerpt_containing(offset..offset) + .unwrap_or_else(|| { + panic!("Expected excerpt_containing to find excerpt for offset {offset}") + }); assert_eq!( - excerpts_for_range[0].id, - excerpt_containing.unwrap().id(), + excerpts_for_range[0].range, excerpt_containing, "excerpts_for_range and excerpt_containing should agree for offset {offset}", ); } @@ -5090,9 +5621,8 @@ fn test_excerpts_containment_functions(cx: &mut App) { //// Test `excerpt_containing` behavior with ranges: // Ranges intersecting a single-excerpt - let containing = snapshot.excerpt_containing(p00..p13); - assert!(containing.is_some()); - assert_eq!(containing.unwrap().id(), excerpt_1_id); + let (_, containing) = snapshot.excerpt_containing(p00..p13).unwrap(); + assert_eq!(containing, excerpt_1_info); // Ranges intersecting multiple excerpts (should return None) let containing = snapshot.excerpt_containing(p20..p40); @@ -5103,14 +5633,12 @@ fn test_excerpts_containment_functions(cx: &mut App) { } #[gpui::test] -fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { - use std::ops::Bound; - +fn test_range_to_buffer_ranges(cx: &mut App) { let buffer_1 = cx.new(|cx| Buffer::local("aaa\nbbb", cx)); let buffer_2 = cx.new(|cx| Buffer::local("ccc", cx)); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), buffer_1.clone(), @@ -5126,10 +5654,6 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 0, cx, ); - - let excerpt_ids = multibuffer.excerpt_ids(); - - (excerpt_ids[0], excerpt_ids[1]) }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5143,41 +5667,15 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 1, "Half-open range ending at excerpt start should EXCLUDE that excerpt" ); - assert_eq!(ranges_half_open[0].2, excerpt_1_id); - - let ranges_inclusive = snapshot.range_to_buffer_ranges(Point::zero()..=excerpt_2_start); - assert_eq!( - ranges_inclusive.len(), - 2, - "Inclusive range ending at excerpt start should INCLUDE that excerpt" - ); - assert_eq!(ranges_inclusive[0].2, excerpt_1_id); - assert_eq!(ranges_inclusive[1].2, excerpt_2_id); - - let ranges_unbounded = - snapshot.range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); - assert_eq!( - ranges_unbounded.len(), - 2, - "Unbounded end should include all excerpts" - ); - assert_eq!(ranges_unbounded[0].2, excerpt_1_id); - assert_eq!(ranges_unbounded[1].2, excerpt_2_id); - - let ranges_excluded_end = snapshot.range_to_buffer_ranges(( - Bound::Included(Point::zero()), - Bound::Excluded(excerpt_2_start), - )); + assert_eq!(ranges_half_open[0].1, BufferOffset(0)..BufferOffset(7)); assert_eq!( - ranges_excluded_end.len(), - 1, - "Excluded end bound should exclude excerpt starting at that point" + ranges_half_open[0].0.remote_id(), + buffer_1.read(cx).remote_id() ); - assert_eq!(ranges_excluded_end[0].2, excerpt_1_id); let buffer_empty = cx.new(|cx| Buffer::local("", cx)); let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (te_excerpt_1_id, te_excerpt_2_id) = + let (_te_excerpt_1_info, _te_excerpt_2_info) = multibuffer_trailing_empty.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), @@ -5195,8 +5693,9 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { cx, ); - let excerpt_ids = multibuffer.excerpt_ids(); - (excerpt_ids[0], excerpt_ids[1]) + let snapshot = multibuffer.snapshot(cx); + let mut infos = snapshot.excerpts(); + (infos.next().unwrap(), infos.next().unwrap()) }); let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx); @@ -5207,29 +5706,130 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { let ranges_half_open_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..max_point); assert_eq!( ranges_half_open_max.len(), - 1, - "Half-open range to max_point should EXCLUDE trailing empty excerpt at max_point" + 2, + "Should include trailing empty excerpts" + ); + assert_eq!(ranges_half_open_max[1].1, BufferOffset(0)..BufferOffset(0)); +} + +#[gpui::test] +async fn test_buffer_range_to_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + aaa + bbb + ccc + ddd + eee + ppp + qqq + rrr + fff + ggg + hhh + " + ); + let text = indoc!( + " + aaa + BBB + ddd + eee + ppp + qqq + rrr + FFF + ggg + hhh + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 3), + Point::new(7, 0)..Point::new(9, 3), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + let expected_diff = indoc!( + " + aaa + - bbb + - ccc + + BBB + ddd + eee [\u{2193}] + - fff [\u{2191}] + + FFF + ggg + hhh [\u{2193}]" ); - assert_eq!(ranges_half_open_max[0].2, te_excerpt_1_id); + pretty_assertions::assert_eq!(actual_diff, expected_diff); + + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let ranges_inclusive_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..=max_point); + let query_spanning_deleted_hunk = buffer_snapshot.anchor_after(Point::new(0, 0)) + ..buffer_snapshot.anchor_before(Point::new(1, 3)); assert_eq!( - ranges_inclusive_max.len(), - 2, - "Inclusive range to max_point should INCLUDE trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_deleted_hunk) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(0, 0)..Point::new(1, 0), + Point::new(3, 0)..Point::new(3, 3), + ], ); - assert_eq!(ranges_inclusive_max[0].2, te_excerpt_1_id); - assert_eq!(ranges_inclusive_max[1].2, te_excerpt_2_id); - let ranges_unbounded_trailing = snapshot_trailing - .range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); + let query_within_contiguous_main_buffer = buffer_snapshot.anchor_after(Point::new(1, 0)) + ..buffer_snapshot.anchor_before(Point::new(2, 3)); assert_eq!( - ranges_unbounded_trailing.len(), - 2, - "Unbounded end should include trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_within_contiguous_main_buffer) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![Point::new(3, 0)..Point::new(4, 3)], + ); + + let query_spanning_both_excerpts = buffer_snapshot.anchor_after(Point::new(2, 0)) + ..buffer_snapshot.anchor_before(Point::new(8, 3)); + assert_eq!( + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_both_excerpts) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(4, 0)..Point::new(5, 3), + Point::new(7, 0)..Point::new(8, 3), + ], ); - assert_eq!(ranges_unbounded_trailing[0].2, te_excerpt_1_id); - assert_eq!(ranges_unbounded_trailing[1].2, te_excerpt_2_id); } #[gpui::test] @@ -5275,17 +5875,14 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) let (anchor_in_e_b2, anchor_in_e_b3) = multibuffer.read_with(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); - let excerpt_ids: Vec = snapshot.excerpts().map(|(id, _, _)| id).collect(); - assert_eq!(excerpt_ids.len(), 4, "expected 4 excerpts (3×B + 1×C)"); - - let e_b2_id = excerpt_ids[1]; - let e_b3_id = excerpt_ids[2]; + let excerpt_infos = snapshot.excerpts().collect::>(); + assert_eq!(excerpt_infos.len(), 4, "expected 4 excerpts (3×B + 1×C)"); - let e_b2 = snapshot.excerpt(e_b2_id).expect("E_B2 should exist"); - let e_b3 = snapshot.excerpt(e_b3_id).expect("E_B3 should exist"); + let e_b2_info = excerpt_infos[1].clone(); + let e_b3_info = excerpt_infos[2].clone(); - let anchor_b2 = Anchor::in_buffer(e_b2_id, e_b2.range.context.start); - let anchor_b3 = Anchor::in_buffer(e_b3_id, e_b3.range.context.start); + let anchor_b2 = snapshot.anchor_in_excerpt(e_b2_info.context.start).unwrap(); + let anchor_b3 = snapshot.anchor_in_excerpt(e_b3_info.context.start).unwrap(); (anchor_b2, anchor_b3) }); diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 09d17d7b7fe2e9e666ba6c5777216c9c8ba4dea0..5c2123d0f9c1b09c16fd99531973df81c45140f7 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -1,24 +1,20 @@ -use std::{mem, ops::Range, sync::Arc}; +use std::{ops::Range, rc::Rc, sync::Arc}; -use collections::HashSet; use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; -use text::{Bias, OffsetRangeExt, locator::Locator}; -use util::{post_inc, rel_path::RelPath}; +use sum_tree::{Dimensions, SumTree}; +use text::{Bias, BufferId, Edit, OffsetRangeExt, Patch}; +use util::rel_path::RelPath; use ztracing::instrument; use crate::{ - Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges, + Anchor, BufferState, BufferStateSnapshot, DiffChangeKind, Event, Excerpt, ExcerptOffset, + ExcerptRange, ExcerptSummary, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, + PathKeyIndex, build_excerpt_ranges, remove_diff_state, }; -#[derive(Debug, Clone)] -pub struct PathExcerptInsertResult { - pub excerpt_ids: Vec, - pub added_new_excerpt: bool, -} - #[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] pub struct PathKey { // Used by the derived PartialOrd & Ord @@ -27,6 +23,13 @@ pub struct PathKey { } impl PathKey { + pub fn min() -> Self { + Self { + sort_prefix: None, + path: RelPath::empty().into_arc(), + } + } + pub fn sorted(sort_prefix: u64) -> Self { Self { sort_prefix: Some(sort_prefix), @@ -55,41 +58,17 @@ impl PathKey { } impl MultiBuffer { - pub fn paths(&self) -> impl Iterator + '_ { - self.excerpts_by_path.keys() - } - - pub fn excerpts_for_path(&self, path: &PathKey) -> impl '_ + Iterator { - self.excerpts_by_path - .get(path) - .map(|excerpts| excerpts.as_slice()) - .unwrap_or_default() - .iter() - .copied() - } - - pub fn path_for_excerpt(&self, excerpt: ExcerptId) -> Option { - self.paths_by_excerpt.get(&excerpt).cloned() - } - - pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - if let Some(to_remove) = self.excerpts_by_path.remove(&path) { - self.remove_excerpts(to_remove, cx) - } - } - pub fn buffer_for_path(&self, path: &PathKey, cx: &App) -> Option> { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - self.buffer(excerpt.buffer_id) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + self.buffer(excerpt.context.start.buffer_id) } pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start)) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + let path_key_index = snapshot.path_key_index_for_buffer(excerpt.context.start.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, excerpt.context.start)) } pub fn set_excerpts_for_buffer( @@ -98,12 +77,14 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let path = PathKey::for_buffer(&buffer, cx); self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx) } /// Sets excerpts, returns `true` if at least one new excerpt was added. + /// + /// Any existing excerpts for this buffer or this path will be replaced by the provided ranges. #[instrument(skip_all)] pub fn set_excerpts_for_path( &mut self, @@ -112,20 +93,83 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + /// Like [`Self::set_excerpts_for_path`], but expands the provided ranges to cover any overlapping existing excerpts + /// for the same buffer and path. + /// + /// Existing excerpts that do not overlap any of the provided ranges are discarded. + pub fn update_excerpts_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> bool { + let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); + let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); + let merged = self.merge_new_with_existing_excerpt_ranges( + &path, &buffer_snapshot, - new, - counts, + excerpt_ranges, cx, - ) + ); + + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + pub fn merge_new_with_existing_excerpt_ranges( + &self, + path: &PathKey, + buffer_snapshot: &BufferSnapshot, + mut excerpt_ranges: Vec>, + cx: &App, + ) -> Vec> { + let multibuffer_snapshot = self.snapshot(cx); + + if multibuffer_snapshot.path_for_buffer(buffer_snapshot.remote_id()) == Some(path) { + excerpt_ranges.sort_by_key(|range| range.context.start); + let mut combined_ranges = Vec::new(); + let mut new_ranges = excerpt_ranges.into_iter().peekable(); + for existing_range in + multibuffer_snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) + { + let existing_range = ExcerptRange { + context: existing_range.context.to_point(buffer_snapshot), + primary: existing_range.primary.to_point(buffer_snapshot), + }; + while let Some(new_range) = new_ranges.peek() + && new_range.context.end < existing_range.context.start + { + combined_ranges.push(new_range.clone()); + new_ranges.next(); + } + + if let Some(new_range) = new_ranges.peek() + && new_range.context.start <= existing_range.context.end + { + combined_ranges.push(existing_range) + } + } + combined_ranges.extend(new_ranges); + excerpt_ranges = combined_ranges; + } + + excerpt_ranges.sort_by_key(|range| range.context.start); + Self::merge_excerpt_ranges(&excerpt_ranges) } pub fn set_excerpt_ranges_for_path( @@ -135,17 +179,11 @@ impl MultiBuffer { buffer_snapshot: &BufferSnapshot, excerpt_ranges: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - buffer_snapshot, - new, - counts, - cx, - ) + ) -> bool { + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, buffer_snapshot, merged, cx); + inserted } pub fn set_anchored_excerpts_for_path( @@ -161,350 +199,505 @@ impl MultiBuffer { let mut app = cx.to_async(); async move { let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = app + let (ranges, merged_excerpt_ranges) = app .background_spawn(async move { - let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); + let point_ranges = ranges.iter().map(|range| range.to_point(&snapshot)); let excerpt_ranges = - build_excerpt_ranges(ranges, context_line_count, &snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - (excerpt_ranges, new, counts) + build_excerpt_ranges(point_ranges, context_line_count, &snapshot); + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + (ranges, merged) }) .await; multi_buffer .update(&mut app, move |multi_buffer, cx| { - let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( + let (_, path_key_index) = multi_buffer.set_merged_excerpt_ranges_for_path( path_key, buffer, - excerpt_ranges, &buffer_snapshot, - new, - counts, + merged_excerpt_ranges, cx, ); ranges + .into_iter() + .map(|range| Anchor::range_in_buffer(path_key_index, range)) + .collect() }) .ok() .unwrap_or_default() } } - pub(super) fn expand_excerpts_with_paths( + pub fn expand_excerpts( &mut self, - ids: impl IntoIterator, + anchors: impl IntoIterator, line_count: u32, direction: ExpandExcerptDirection, cx: &mut Context, ) { - let mut sorted_ids: Vec = ids.into_iter().collect(); - sorted_ids.sort_by(|a, b| { - let path_a = self.paths_by_excerpt.get(a); - let path_b = self.paths_by_excerpt.get(b); - path_a.cmp(&path_b) - }); - let grouped = sorted_ids - .into_iter() - .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) + if line_count == 0 { + return; + } + + let snapshot = self.snapshot(cx); + let mut sorted_anchors = anchors .into_iter() - .filter_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) + .filter_map(|anchor| anchor.excerpt_anchor()) .collect::>(); - let snapshot = self.snapshot(cx); - - for (path, ids) in grouped.into_iter() { - let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { + if sorted_anchors.is_empty() { + return; + } + sorted_anchors.sort_by(|a, b| a.cmp(b, &snapshot)); + let buffers = sorted_anchors.into_iter().chunk_by(|anchor| anchor.path); + let mut cursor = snapshot.excerpts.cursor::(()); + + for (path_index, excerpt_anchors) in &buffers { + let path = snapshot + .path_keys_by_index + .get(&path_index) + .expect("anchor from wrong multibuffer"); + + let mut excerpt_anchors = excerpt_anchors.peekable(); + let mut ranges = Vec::new(); + + cursor.seek_forward(path, Bias::Left); + let Some((buffer, buffer_snapshot)) = cursor + .item() + .map(|excerpt| (excerpt.buffer(&self), excerpt.buffer_snapshot(&snapshot))) + else { continue; }; - let ids_to_expand = HashSet::from_iter(ids); - let mut excerpt_id_ = None; - let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { - let excerpt = snapshot.excerpt(*excerpt_id)?; - let excerpt_id = excerpt.id; - if excerpt_id_.is_none() { - excerpt_id_ = Some(excerpt_id); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let mut range = ExcerptRange { + context: excerpt.range.context.to_point(buffer_snapshot), + primary: excerpt.range.primary.to_point(buffer_snapshot), + }; + + let mut needs_expand = false; + while excerpt_anchors.peek().is_some_and(|anchor| { + excerpt + .range + .contains(&anchor.text_anchor(), buffer_snapshot) + }) { + needs_expand = true; + excerpt_anchors.next(); } - let mut context = excerpt.range.context.to_point(&excerpt.buffer); - if ids_to_expand.contains(&excerpt_id) { + if needs_expand { match direction { ExpandExcerptDirection::Up => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; } ExpandExcerptDirection::Down => { - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } ExpandExcerptDirection::UpAndDown => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } } } - Some(ExcerptRange { - context, - primary: excerpt.range.primary.to_point(&excerpt.buffer), - }) - }); - let mut merged_ranges: Vec> = Vec::new(); - for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() - && last_range.context.end >= range.context.start - { - last_range.context.end = range.context.end; - continue; - } - merged_ranges.push(range) + ranges.push(range); + cursor.next(); } - let Some(excerpt_id) = excerpt_id_ else { - continue; - }; - let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(excerpt_id) else { - continue; - }; - let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { - continue; - }; + ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); - let buffer_snapshot = buffer.read(cx).snapshot(); - self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); + self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx); } } /// Sets excerpts, returns `true` if at least one new excerpt was added. - fn set_merged_excerpt_ranges_for_path( + pub(crate) fn set_merged_excerpt_ranges_for_path( &mut self, path: PathKey, buffer: Entity, - ranges: Vec>, buffer_snapshot: &BufferSnapshot, - new: Vec>, - counts: Vec, + new: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let insert_result = self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); - - let mut result = Vec::new(); - let mut ranges = ranges.into_iter(); - for (excerpt_id, range_count) in insert_result - .excerpt_ids + ) -> (bool, PathKeyIndex) + where + T: language::ToOffset, + { + let anchor_ranges = new .into_iter() - .zip(counts.into_iter()) - { - for range in ranges.by_ref().take(range_count) { - let range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - ); - result.push(range) - } + .map(|r| ExcerptRange { + context: buffer_snapshot.anchor_before(r.context.start) + ..buffer_snapshot.anchor_after(r.context.end), + primary: buffer_snapshot.anchor_before(r.primary.start) + ..buffer_snapshot.anchor_after(r.primary.end), + }) + .collect::>(); + let inserted = + self.update_path_excerpts(path.clone(), buffer, buffer_snapshot, &anchor_ranges, cx); + let path_key_index = self.get_or_create_path_key_index(&path); + (inserted, path_key_index) + } + + pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex { + let mut snapshot = self.snapshot.borrow_mut(); + + if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) { + return existing; } - (result, insert_result.added_new_excerpt) + + let index = snapshot + .path_keys_by_index + .last() + .map(|(index, _)| PathKeyIndex(index.0 + 1)) + .unwrap_or(PathKeyIndex(0)); + snapshot.path_keys_by_index.insert(index, path_key.clone()); + snapshot.indices_by_path_key.insert(path_key.clone(), index); + index } pub fn update_path_excerpts( &mut self, - path: PathKey, + path_key: PathKey, buffer: Entity, buffer_snapshot: &BufferSnapshot, - new: Vec>, + to_insert: &Vec>, cx: &mut Context, - ) -> PathExcerptInsertResult { - let mut insert_after = self - .excerpts_by_path - .range(..path.clone()) - .next_back() - .and_then(|(_, value)| value.last().copied()) - .unwrap_or(ExcerptId::min()); - - let existing = self - .excerpts_by_path - .get(&path) - .cloned() - .unwrap_or_default(); - let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = existing.into_iter().peekable(); - - let mut excerpt_ids = Vec::new(); - let mut to_remove = Vec::new(); - let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); - let mut added_a_new_excerpt = false; - let snapshot = self.snapshot(cx); + ) -> bool { + let path_key_index = self.get_or_create_path_key_index(&path_key); + if let Some(old_path_key) = self + .snapshot(cx) + .path_for_buffer(buffer_snapshot.remote_id()) + && old_path_key != &path_key + { + self.remove_excerpts(old_path_key.clone(), cx); + } - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.get_mut().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; + if to_insert.len() == 0 { + self.remove_excerpts(path_key.clone(), cx); - let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); + return false; + } + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); - let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); - excerpts_cursor.next(); + let buffer_id = buffer_snapshot.remote_id(); - loop { - let existing = if let Some(&existing_id) = existing_iter.peek() { - let locator = snapshot.excerpt_locator_for_id(existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts_cursor.item() { - if excerpt.buffer_id != buffer_snapshot.remote_id() { - to_remove.push(existing_id); - existing_iter.next(); - continue; - } - Some((existing_id, excerpt.range.context.to_point(buffer_snapshot))) - } else { - None - } - } else { - None + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + + let new_ranges = to_insert.clone(); + let mut to_insert = to_insert.iter().peekable(); + let mut patch = Patch::empty(); + let mut added_new_excerpt = false; + + new_excerpts.append(cursor.slice(&path_key, Bias::Left), ()); + + // handle the case where the path key used to be associated + // with a different buffer by removing its excerpts. + if let Some(excerpt) = cursor.item() + && &excerpt.path_key == &path_key + && excerpt.buffer_id != buffer_id + { + let old_buffer_id = excerpt.buffer_id; + self.buffers.remove(&old_buffer_id); + snapshot.buffers.remove(&old_buffer_id); + remove_diff_state(&mut snapshot.diffs, old_buffer_id); + self.diffs.remove(&old_buffer_id); + let before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![old_buffer_id], + }); + } + + while let Some(excerpt) = cursor.item() + && excerpt.path_key == path_key + { + assert_eq!(excerpt.buffer_id, buffer_id); + let Some(next_excerpt) = to_insert.peek() else { + break; }; + if &excerpt.range == *next_excerpt { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: cursor.position.1..cursor.position.1, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + new_excerpts.push(excerpt.clone(), ()); + to_insert.next(); + cursor.next(); + continue; + } - let new = new_iter.peek(); - // Try to merge the next new range or existing excerpt into the last - // queued insert. - if let Some((last_id, last)) = to_insert.last_mut() { - // Next new range overlaps the last queued insert: absorb it by - // extending the insert's end. - if let Some(new) = new - && last.context.end >= new.context.start - { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } - // Next existing excerpt overlaps the last queued insert: absorb - // it by extending the insert's end, and record the existing - // excerpt as replaced so anchors in it resolve to the new one. - if let Some((existing_id, existing_range)) = &existing - && last.context.end >= existing_range.start - { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } + if excerpt + .range + .context + .start + .cmp(&next_excerpt.context.start, &buffer_snapshot) + .is_le() + { + // remove old excerpt + let before = cursor.position.1; + cursor.next(); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + } else { + // insert new excerpt + let next_excerpt = to_insert.next().unwrap(); + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); } + } - match (new, existing) { - (None, None) => break, + // remove any further trailing excerpts + let mut before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + // if we removed the previous last excerpt, remove the trailing newline from the new last excerpt + if cursor.item().is_none() && to_insert.peek().is_none() { + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + before.0.0 = before + .0 + .0 + .checked_sub(1) + .expect("should have preceding excerpt"); + excerpt.has_trailing_newline = false; + } + }, + (), + ); + } + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); - // No more new ranges; remove the remaining existing excerpt. - (None, Some((existing_id, _))) => { - existing_iter.next(); - to_remove.push(existing_id); - } + while let Some(next_excerpt) = to_insert.next() { + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); + } - // No more existing excerpts; queue the new range for insertion. - (Some(_), None) => { - added_a_new_excerpt = true; - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } + let suffix_start = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + if !suffix.is_empty() { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: suffix_start..suffix_start, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + } + new_excerpts.append(suffix, ()); + drop(cursor); + + snapshot.excerpts = new_excerpts; + snapshot.buffers.insert( + buffer_id, + BufferStateSnapshot { + path_key: path_key.clone(), + path_key_index, + buffer_snapshot: buffer_snapshot.clone(), + }, + ); + + self.buffers.entry(buffer_id).or_insert_with(|| { + self.buffer_changed_since_sync.replace(true); + buffer.update(cx, |buffer, _| { + buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); + }); + BufferState { + _subscriptions: [ + cx.observe(&buffer, |_, _, cx| cx.notify()), + cx.subscribe(&buffer, Self::on_buffer_event), + ], + buffer: buffer.clone(), + } + }); - // Existing excerpt ends before the new range starts, so it - // has no corresponding new range and must be removed. Flush - // pending inserts and advance `insert_after` past it so that - // future inserts receive locators *after* this excerpt's - // locator, preserving forward ordering. - (Some(new), Some((_, existing_range))) - if existing_range.end < new.context.start => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - to_remove.push(insert_after); - } - // New range ends before the existing excerpt starts, so the - // new range has no corresponding existing excerpt. Queue it - // for insertion at the current `insert_after` position - // (before the existing excerpt), which is the correct - // spatial ordering. - (Some(new), Some((_, existing_range))) - if existing_range.start > new.context.end => - { - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } - // Exact match: keep the existing excerpt in place, flush - // any pending inserts before it, and use it as the new - // `insert_after` anchor. - (Some(new), Some((_, existing_range))) - if existing_range.start == new.context.start - && existing_range.end == new.context.end => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - excerpt_ids.push(insert_after); - new_iter.next(); - } + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + } - // Partial overlap: replace the existing excerpt with a new - // one whose range is the union of both, and record the - // replacement so that anchors in the old excerpt resolve to - // the new one. - (Some(_), Some((_, existing_range))) => { - let existing_id = existing_iter.next().unwrap(); - let new_id = next_excerpt_id(); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(existing_id, new_id); - to_remove.push(existing_id); - let mut range = new_iter.next().unwrap(); - range.context.start = range.context.start.min(existing_range.start); - range.context.end = range.context.end.max(existing_range.end); - excerpt_ids.push(new_id); - to_insert.push((new_id, range)); - } - }; + let edits = Self::sync_diff_transforms( + &mut snapshot, + patch.into_inner(), + DiffChangeKind::BufferEdited, + ); + if !edits.is_empty() { + self.subscriptions.publish(edits); } - self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); - // todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly - to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)); - self.remove_excerpts(to_remove, cx); + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.emit(Event::BufferRangesUpdated { + buffer, + path_key: path_key.clone(), + ranges: new_ranges, + }); + cx.notify(); - if excerpt_ids.is_empty() { - self.excerpts_by_path.remove(&path); - } else { - let snapshot = &*self.snapshot.get_mut(); - let excerpt_ids = excerpt_ids - .iter() - .dedup() - .cloned() - // todo(lw): There is a logic bug somewhere that causes excerpt_ids to not necessarily be in order by locator - .sorted_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)) - .collect(); - for &excerpt_id in &excerpt_ids { - self.paths_by_excerpt.insert(excerpt_id, path.clone()); - } - self.excerpts_by_path.insert(path, excerpt_ids); + added_new_excerpt + } + + pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context) { + let snapshot = self.sync_mut(cx); + let Some(path) = snapshot.path_for_buffer(buffer).cloned() else { + return; + }; + self.remove_excerpts(path, cx); + } + + pub fn remove_excerpts(&mut self, path: PathKey, cx: &mut Context) { + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); + + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); + let mut edit_start = cursor.position.1; + let mut buffer_id = None; + if let Some(excerpt) = cursor.item() + && excerpt.path_key == path + { + buffer_id = Some(excerpt.buffer_id); } + cursor.seek(&path, Bias::Right); + let edit_end = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + new_excerpts.append(suffix, ()); + + if let Some(buffer_id) = buffer_id { + snapshot.buffers.remove(&buffer_id); + remove_diff_state(&mut snapshot.diffs, buffer_id); + self.buffers.remove(&buffer_id); + self.diffs.remove(&buffer_id); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![buffer_id], + }) + } + drop(cursor); + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + excerpt.has_trailing_newline = false; + edit_start.0.0 = edit_start + .0 + .0 + .checked_sub(1) + .expect("should have at least one excerpt"); + } + }, + (), + ) + } + + let edit = Edit { + old: edit_start..edit_end, + new: edit_start..edit_start, + }; + snapshot.excerpts = new_excerpts; - PathExcerptInsertResult { - excerpt_ids, - added_new_excerpt: added_a_new_excerpt, + let edits = + Self::sync_diff_transforms(&mut snapshot, vec![edit], DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); } + + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.notify(); } } diff --git a/crates/multi_buffer/src/transaction.rs b/crates/multi_buffer/src/transaction.rs index a65e394c8f1834a95ccbc70532aa03d2a3e6e34c..a3afe55cd6928b9e908d0249af5fb8fe7fc4bbe4 100644 --- a/crates/multi_buffer/src/transaction.rs +++ b/crates/multi_buffer/src/transaction.rs @@ -2,15 +2,15 @@ use gpui::{App, Context, Entity}; use language::{self, Buffer, TransactionId}; use std::{ collections::HashMap, - ops::{AddAssign, Range, Sub}, + ops::Range, time::{Duration, Instant}, }; use sum_tree::Bias; use text::BufferId; -use crate::{BufferState, MultiBufferDimension}; +use crate::{Anchor, BufferState, MultiBufferOffset}; -use super::{Event, ExcerptSummary, MultiBuffer}; +use super::{Event, MultiBuffer}; #[derive(Clone)] pub(super) struct History { @@ -314,71 +314,50 @@ impl MultiBuffer { } } - pub fn edited_ranges_for_transaction( + pub fn edited_ranges_for_transaction( &self, transaction_id: TransactionId, cx: &App, - ) -> Vec> - where - D: MultiBufferDimension - + Ord - + Sub - + AddAssign, - D::TextDimension: PartialOrd + Sub, - { + ) -> Vec> { let Some(transaction) = self.history.transaction(transaction_id) else { return Vec::new(); }; - let mut ranges = Vec::new(); let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::(()); + let mut buffer_anchors = Vec::new(); for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { - let Some(buffer_state) = self.buffers.get(buffer_id) else { + let Some(buffer) = self.buffer(*buffer_id) else { continue; }; + let Some(excerpt) = snapshot.first_excerpt_for_buffer(*buffer_id) else { + continue; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer = buffer_state.buffer.read(cx); - for range in - buffer.edited_ranges_for_transaction_id::(*buffer_transaction) + for range in buffer + .read(cx) + .edited_ranges_for_transaction_id::(*buffer_transaction) { - for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *excerpt_id - { - let excerpt_buffer_start = excerpt - .range - .context - .start - .summary::(buffer); - let excerpt_buffer_end = excerpt - .range - .context - .end - .summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_summary(&cursor.start().text); - - let mut start = excerpt_start; - start += range.start - excerpt_buffer_start; - let mut end = excerpt_start; - end += range.end - excerpt_buffer_start; - - ranges.push(start..end); - break; - } - } - } + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.start, Bias::Left), + )); + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.end, Bias::Right), + )); } } + buffer_anchors.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - ranges.sort_by_key(|range| range.start); - ranges + snapshot + .summaries_for_anchors(buffer_anchors.iter()) + .as_chunks::<2>() + .0 + .iter() + .map(|&[s, e]| s..e) + .collect::>() } pub fn merge_transactions( diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index a03c87d9f68e41dd29d9d614f714db47083831ef..af5671632fdac175e5d31ae15c5890d439b7860f 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -79,29 +79,37 @@ fn outline_for_editor( cx: &mut App, ) -> Option>>> { let multibuffer = editor.read(cx).buffer().read(cx).snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = multibuffer.as_singleton()?; + let buffer_snapshot = multibuffer.as_singleton()?; let buffer_id = buffer_snapshot.remote_id(); let task = editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)); Some(cx.background_executor().spawn(async move { task.await .into_iter() - .map(|item| OutlineItem { - depth: item.depth, - range: Anchor::range_in_buffer(excerpt_id, item.range), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - item.source_range_for_text, - ), - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), - annotation_range: item - .annotation_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), + .filter_map(|item| { + Some(OutlineItem { + depth: item.depth, + range: multibuffer.anchor_in_buffer(item.range.start)? + ..multibuffer.anchor_in_buffer(item.range.end)?, + source_range_for_text: multibuffer + .anchor_in_buffer(item.source_range_for_text.start)? + ..multibuffer.anchor_in_buffer(item.source_range_for_text.end)?, + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item.body_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + annotation_range: item.annotation_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + }) }) .collect() })) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index aa6f89cb8c11c40d4121ab12720069ee7fe66844..b7d5afcb687c017fdf253717a9dae2c95c55b53b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -1,11 +1,11 @@ mod outline_panel_settings; use anyhow::Context as _; -use collections::{BTreeSet, HashMap, HashSet, hash_map}; +use collections::{BTreeSet, HashMap, HashSet}; use db::kvp::KeyValueStore; use editor::{ - AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange, - MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects, + AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptRange, MultiBufferSnapshot, + RangeToAnchorExt, SelectionEffects, display_map::ToDisplayPoint, items::{entry_git_aware_label_color, entry_label_color}, scroll::{Autoscroll, ScrollAnchor}, @@ -129,12 +129,12 @@ pub struct OutlinePanel { selected_entry: SelectedEntry, active_item: Option, _subscriptions: Vec, - new_entries_for_fs_update: HashSet, + new_entries_for_fs_update: HashSet, fs_entries_update_task: Task<()>, cached_entries_update_task: Task<()>, reveal_selection_task: Task>, outline_fetch_tasks: HashMap>, - excerpts: HashMap>, + buffers: HashMap, cached_entries: Vec, filter_editor: Entity, mode: ItemsDisplayMode, @@ -334,42 +334,41 @@ enum CollapsedEntry { Dir(WorktreeId, ProjectEntryId), File(WorktreeId, BufferId), ExternalFile(BufferId), - Excerpt(BufferId, ExcerptId), - Outline(BufferId, ExcerptId, Range), + Excerpt(ExcerptRange), + Outline(Range), } -#[derive(Debug)] -struct Excerpt { - range: ExcerptRange, - outlines: ExcerptOutlines, +struct BufferOutlines { + excerpts: Vec>, + outlines: OutlineState, } -impl Excerpt { +impl BufferOutlines { fn invalidate_outlines(&mut self) { - if let ExcerptOutlines::Outlines(valid_outlines) = &mut self.outlines { - self.outlines = ExcerptOutlines::Invalidated(std::mem::take(valid_outlines)); + if let OutlineState::Outlines(valid_outlines) = &mut self.outlines { + self.outlines = OutlineState::Invalidated(std::mem::take(valid_outlines)); } } fn iter_outlines(&self) -> impl Iterator { match &self.outlines { - ExcerptOutlines::Outlines(outlines) => outlines.iter(), - ExcerptOutlines::Invalidated(outlines) => outlines.iter(), - ExcerptOutlines::NotFetched => [].iter(), + OutlineState::Outlines(outlines) => outlines.iter(), + OutlineState::Invalidated(outlines) => outlines.iter(), + OutlineState::NotFetched => [].iter(), } } fn should_fetch_outlines(&self) -> bool { match &self.outlines { - ExcerptOutlines::Outlines(_) => false, - ExcerptOutlines::Invalidated(_) => true, - ExcerptOutlines::NotFetched => true, + OutlineState::Outlines(_) => false, + OutlineState::Invalidated(_) => true, + OutlineState::NotFetched => true, } } } #[derive(Debug)] -enum ExcerptOutlines { +enum OutlineState { Outlines(Vec), Invalidated(Vec), NotFetched, @@ -536,54 +535,24 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -struct OutlineEntryExcerpt { - id: ExcerptId, - buffer_id: BufferId, - range: ExcerptRange, -} - -#[derive(Clone, Debug, Eq)] -struct OutlineEntryOutline { - buffer_id: BufferId, - excerpt_id: ExcerptId, - outline: Outline, -} - -impl PartialEq for OutlineEntryOutline { - fn eq(&self, other: &Self) -> bool { - self.buffer_id == other.buffer_id - && self.excerpt_id == other.excerpt_id - && self.outline.depth == other.outline.depth - && self.outline.range == other.outline.range - && self.outline.text == other.outline.text - } -} - -impl Hash for OutlineEntryOutline { - fn hash(&self, state: &mut H) { - ( - self.buffer_id, - self.excerpt_id, - self.outline.depth, - &self.outline.range, - &self.outline.text, - ) - .hash(state); - } -} - #[derive(Clone, Debug, PartialEq, Eq)] enum OutlineEntry { - Excerpt(OutlineEntryExcerpt), - Outline(OutlineEntryOutline), + Excerpt(ExcerptRange), + Outline(Outline), } impl OutlineEntry { - fn ids(&self) -> (BufferId, ExcerptId) { + fn buffer_id(&self) -> BufferId { match self { - OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), - OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), + OutlineEntry::Excerpt(excerpt) => excerpt.context.start.buffer_id, + OutlineEntry::Outline(outline) => outline.range.start.buffer_id, + } + } + + fn range(&self) -> Range { + match self { + OutlineEntry::Excerpt(excerpt) => excerpt.context.clone(), + OutlineEntry::Outline(outline) => outline.range.clone(), } } } @@ -593,7 +562,7 @@ struct FsEntryFile { worktree_id: WorktreeId, entry: GitEntry, buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryFile { @@ -631,7 +600,7 @@ impl Hash for FsEntryDirectory { #[derive(Debug, Clone, Eq)] struct FsEntryExternalFile { buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryExternalFile { @@ -787,10 +756,8 @@ impl OutlinePanel { if ¤t_theme != new_theme { outline_panel_settings = *new_settings; current_theme = new_theme.clone(); - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } outlines_invalidated = true; let update_cached_items = outline_panel.update_non_fs_items(window, cx); @@ -809,30 +776,23 @@ impl OutlinePanel { let new_depth = new_settings.expand_outlines_with_depth; - for (buffer_id, excerpts) in &outline_panel.excerpts { - for (excerpt_id, excerpt) in excerpts { - if let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines { - for outline in outlines { - if outline_panel - .outline_children_cache - .get(buffer_id) - .and_then(|children_map| { - let key = - (outline.range.clone(), outline.depth); - children_map.get(&key) - }) - .copied() - .unwrap_or(false) - && (new_depth == 0 || outline.depth >= new_depth) - { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - *buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - } + for (buffer_id, buffer) in &outline_panel.buffers { + if let OutlineState::Outlines(outlines) = &buffer.outlines { + for outline in outlines { + if outline_panel + .outline_children_cache + .get(buffer_id) + .and_then(|children_map| { + let key = (outline.range.clone(), outline.depth); + children_map.get(&key) + }) + .copied() + .unwrap_or(false) + && (new_depth == 0 || outline.depth >= new_depth) + { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); } } } @@ -852,7 +812,7 @@ impl OutlinePanel { if !outlines_invalidated { let new_document_symbols = outline_panel - .excerpts + .buffers .keys() .filter_map(|buffer_id| { let buffer = outline_panel @@ -867,10 +827,8 @@ impl OutlinePanel { .collect(); if new_document_symbols != document_symbols_by_buffer { document_symbols_by_buffer = new_document_symbols; - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -914,7 +872,7 @@ impl OutlinePanel { cached_entries_update_task: Task::ready(()), reveal_selection_task: Task::ready(Ok(())), outline_fetch_tasks: HashMap::default(), - excerpts: HashMap::default(), + buffers: HashMap::default(), cached_entries: Vec::new(), _subscriptions: vec![ settings_subscription, @@ -1110,16 +1068,13 @@ impl OutlinePanel { PanelEntry::Fs(FsEntry::ExternalFile(file)) => { change_selection = false; scroll_to_buffer = Some(file.buffer_id); - multi_buffer_snapshot.excerpts().find_map( - |(excerpt_id, buffer_snapshot, excerpt_range)| { - if buffer_snapshot.remote_id() == file.buffer_id { - multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) - } else { - None - } - }, - ) + multi_buffer_snapshot.excerpts().find_map(|excerpt_range| { + if excerpt_range.context.start.buffer_id == file.buffer_id { + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) + } else { + None + } + }) } PanelEntry::Fs(FsEntry::File(file)) => { @@ -1132,26 +1087,20 @@ impl OutlinePanel { .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { - active_multi_buffer - .read(cx) - .excerpts_for_buffer(buffer.read(cx).remote_id(), cx) + multi_buffer_snapshot.excerpts_for_buffer(buffer.read(cx).remote_id()) }) - .and_then(|excerpts| { - let (excerpt_id, _, excerpt_range) = excerpts.first()?; - multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) + .and_then(|mut excerpts| { + let excerpt_range = excerpts.next()?; + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) }) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) - .or_else(|| { - multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) - }), + .anchor_in_excerpt(outline.range.start) + .or_else(|| multi_buffer_snapshot.anchor_in_excerpt(outline.range.end)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { change_selection = false; change_focus = false; - multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) + multi_buffer_snapshot.anchor_in_excerpt(excerpt.context.start) } PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; @@ -1359,12 +1308,12 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { PanelEntry::Fs(FsEntry::File(file)) => { - file.buffer_id == excerpt.buffer_id - && file.excerpts.contains(&excerpt.id) + file.buffer_id == excerpt.context.start.buffer_id + && file.excerpts.contains(&excerpt) } PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { - external_file.buffer_id == excerpt.buffer_id - && external_file.excerpts.contains(&excerpt.id) + external_file.buffer_id == excerpt.context.start.buffer_id + && external_file.excerpts.contains(&excerpt) } _ => false, }) @@ -1372,8 +1321,16 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Outline(outline)) => { previous_entries.find(|entry| { if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { - outline.buffer_id == excerpt.buffer_id - && outline.excerpt_id == excerpt.id + if outline.range.start.buffer_id != excerpt.context.start.buffer_id { + return false; + } + let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(outline.range.start.buffer_id, cx) + else { + return false; + }; + excerpt.contains(&outline.range.start, &buffer_snapshot) + || excerpt.contains(&outline.range.end, &buffer_snapshot) } else { false } @@ -1584,13 +1541,11 @@ impl OutlinePanel { Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) + } + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + Some(CollapsedEntry::Outline(outline.range.clone())) } - PanelEntry::Outline(OutlineEntry::Outline(outline)) => Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )), PanelEntry::Search(_) => return, }; let Some(collapsed_entry) = entry_to_expand else { @@ -1691,14 +1646,10 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .collapsed_entries - .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), - PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - self.collapsed_entries.insert(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) - } + .insert(CollapsedEntry::Excerpt(excerpt.clone())), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => self + .collapsed_entries + .insert(CollapsedEntry::Outline(outline.range.clone())), PanelEntry::Search(_) => false, }; @@ -1753,31 +1704,26 @@ impl OutlinePanel { } } - for (&buffer_id, excerpts) in &self.excerpts { - for (&excerpt_id, excerpt) in excerpts { - match &excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + for (_buffer_id, buffer) in &self.buffers { + match &buffer.outlines { + OutlineState::Outlines(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::Invalidated(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + } + OutlineState::Invalidated(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::NotFetched => {} } - to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + OutlineState::NotFetched => {} } + to_uncollapse.extend( + buffer + .excerpts + .iter() + .map(|excerpt| CollapsedEntry::Excerpt(excerpt.clone())), + ); } for cached in &self.cached_entries { @@ -1844,14 +1790,10 @@ impl OutlinePanel { .. }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) + Some(CollapsedEntry::Outline(outline.range.clone())) } PanelEntry::Search(_) => None, }, @@ -1939,17 +1881,13 @@ impl OutlinePanel { } } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - let collapsed_entry = CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - ); + let collapsed_entry = CollapsedEntry::Outline(outline.range.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -2103,6 +2041,8 @@ impl OutlinePanel { let project = self.project.clone(); self.reveal_selection_task = cx.spawn_in(window, async move |outline_panel, cx| { cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let multibuffer_snapshot = + editor.read_with(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)); let entry_with_selection = outline_panel.update_in(cx, |outline_panel, window, cx| { outline_panel.location_for_editor_selection(&editor, window, cx) @@ -2132,14 +2072,28 @@ impl OutlinePanel { }) }), PanelEntry::Outline(outline_entry) => { - let (buffer_id, excerpt_id) = outline_entry.ids(); + let buffer_id = outline_entry.buffer_id(); + let outline_range = outline_entry.range(); outline_panel.update(cx, |outline_panel, cx| { outline_panel .collapsed_entries .remove(&CollapsedEntry::ExternalFile(buffer_id)); - outline_panel - .collapsed_entries - .remove(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + if let Some(buffer_snapshot) = + outline_panel.buffer_snapshot_for_id(buffer_id, cx) + { + outline_panel.collapsed_entries.retain(|entry| match entry { + CollapsedEntry::Excerpt(excerpt_range) => { + let intersects = excerpt_range.context.start.buffer_id + == buffer_id + && (excerpt_range + .contains(&outline_range.start, &buffer_snapshot) + || excerpt_range + .contains(&outline_range.end, &buffer_snapshot)); + !intersects + } + _ => true, + }); + } let project = outline_panel.project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx) @@ -2160,11 +2114,9 @@ impl OutlinePanel { })? } PanelEntry::Fs(FsEntry::ExternalFile(..)) => None, - PanelEntry::Search(SearchEntry { match_range, .. }) => match_range - .start - .text_anchor - .buffer_id - .or(match_range.end.text_anchor.buffer_id) + PanelEntry::Search(SearchEntry { match_range, .. }) => multibuffer_snapshot + .anchor_to_buffer_anchor(match_range.start) + .map(|(anchor, _)| anchor.buffer_id) .map(|buffer_id| { outline_panel.update(cx, |outline_panel, cx| { outline_panel @@ -2246,30 +2198,30 @@ impl OutlinePanel { fn render_excerpt( &self, - excerpt: &OutlineEntryExcerpt, + excerpt: &ExcerptRange, depth: usize, window: &mut Window, cx: &mut Context, ) -> Option> { - let item_id = ElementId::from(excerpt.id.to_proto() as usize); + let item_id = ElementId::from(format!("{excerpt:?}")); let is_active = match self.selected_entry() { Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { - selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + selected_excerpt == excerpt } _ => false, }; let has_outlines = self - .excerpts - .get(&excerpt.buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { - ExcerptOutlines::Outlines(outlines) => Some(outlines), - ExcerptOutlines::Invalidated(outlines) => Some(outlines), - ExcerptOutlines::NotFetched => None, + .buffers + .get(&excerpt.context.start.buffer_id) + .and_then(|buffer| match &buffer.outlines { + OutlineState::Outlines(outlines) => Some(outlines), + OutlineState::Invalidated(outlines) => Some(outlines), + OutlineState::NotFetched => None, }) .is_some_and(|outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); + .contains(&CollapsedEntry::Excerpt(excerpt.clone())); let color = entry_label_color(is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2279,7 +2231,7 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; + let label = self.excerpt_label(&excerpt, cx)?; let label_element = Label::new(label) .single_line() .color(color) @@ -2297,13 +2249,8 @@ impl OutlinePanel { )) } - fn excerpt_label( - &self, - buffer_id: BufferId, - range: &ExcerptRange, - cx: &App, - ) -> Option { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; + fn excerpt_label(&self, range: &ExcerptRange, cx: &App) -> Option { + let buffer_snapshot = self.buffer_snapshot_for_id(range.context.start.buffer_id, cx)?; let excerpt_range = range.context.to_point(&buffer_snapshot); Some(format!( "Lines {}- {}", @@ -2314,19 +2261,19 @@ impl OutlinePanel { fn render_outline( &self, - outline: &OutlineEntryOutline, + outline: &Outline, depth: usize, string_match: Option<&StringMatch>, window: &mut Window, cx: &mut Context, ) -> Stateful
{ let item_id = ElementId::from(SharedString::from(format!( - "{:?}|{:?}{:?}|{:?}", - outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + "{:?}|{:?}", + outline.range, &outline.text, ))); let label_element = outline::render_item( - &outline.outline, + &outline, string_match .map(|string_match| string_match.ranges().collect::>()) .unwrap_or_default(), @@ -2335,26 +2282,22 @@ impl OutlinePanel { .into_any_element(); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { - outline == selected && outline.outline == selected.outline - } + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => outline == selected, _ => false, }; let has_children = self .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() .unwrap_or(false); - let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )); + let is_expanded = !self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())); let icon = if has_children { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2784,7 +2727,7 @@ impl OutlinePanel { let mut new_collapsed_entries = HashSet::default(); let mut new_unfolded_dirs = HashMap::default(); let mut root_entries = HashSet::default(); - let mut new_excerpts = HashMap::>::default(); + let mut new_buffers = HashMap::::default(); let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| { let git_store = outline_panel.project.read(cx).git_store().clone(); new_collapsed_entries = outline_panel.collapsed_entries.clone(); @@ -2793,13 +2736,18 @@ impl OutlinePanel { multi_buffer_snapshot.excerpts().fold( HashMap::default(), - |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| { + |mut buffer_excerpts, excerpt_range| { + let Some(buffer_snapshot) = multi_buffer_snapshot + .buffer_for_id(excerpt_range.context.start.buffer_id) + else { + return buffer_excerpts; + }; let buffer_id = buffer_snapshot.remote_id(); let file = File::from_dyn(buffer_snapshot.file()); let entry_id = file.and_then(|file| file.project_entry_id()); let worktree = file.map(|file| file.worktree.read(cx).snapshot()); - let is_new = new_entries.contains(&excerpt_id) - || !outline_panel.excerpts.contains_key(&buffer_id); + let is_new = new_entries.contains(&buffer_id) + || !outline_panel.buffers.contains_key(&buffer_id); let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx); let status = git_store .read(cx) @@ -2813,29 +2761,28 @@ impl OutlinePanel { (is_new, is_folded, Vec::new(), entry_id, worktree, status) }) .2 - .push(excerpt_id); + .push(excerpt_range.clone()); - let outlines = match outline_panel - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) - { - Some(old_excerpt) => match &old_excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - ExcerptOutlines::Outlines(outlines.clone()) + new_buffers + .entry(buffer_id) + .or_insert_with(|| { + let outlines = match outline_panel.buffers.get(&buffer_id) { + Some(old_buffer) => match &old_buffer.outlines { + OutlineState::Outlines(outlines) => { + OutlineState::Outlines(outlines.clone()) + } + OutlineState::Invalidated(_) => OutlineState::NotFetched, + OutlineState::NotFetched => OutlineState::NotFetched, + }, + None => OutlineState::NotFetched, + }; + BufferOutlines { + outlines, + excerpts: Vec::new(), } - ExcerptOutlines::Invalidated(_) => ExcerptOutlines::NotFetched, - ExcerptOutlines::NotFetched => ExcerptOutlines::NotFetched, - }, - None => ExcerptOutlines::NotFetched, - }; - new_excerpts.entry(buffer_id).or_default().insert( - excerpt_id, - Excerpt { - range: excerpt_range, - outlines, - }, - ); + }) + .excerpts + .push(excerpt_range); buffer_excerpts }, ) @@ -2856,7 +2803,7 @@ impl OutlinePanel { BTreeMap::>::default(); let mut worktree_excerpts = HashMap::< WorktreeId, - HashMap)>, + HashMap>)>, >::default(); let mut external_excerpts = HashMap::default(); @@ -3134,7 +3081,7 @@ impl OutlinePanel { outline_panel .update_in(cx, |outline_panel, window, cx| { outline_panel.new_entries_for_fs_update.clear(); - outline_panel.excerpts = new_excerpts; + outline_panel.buffers = new_buffers; outline_panel.collapsed_entries = new_collapsed_entries; outline_panel.unfolded_dirs = new_unfolded_dirs; outline_panel.fs_entries = new_fs_entries; @@ -3144,7 +3091,7 @@ impl OutlinePanel { // Only update cached entries if we don't have outlines to fetch // If we do have outlines to fetch, let fetch_outdated_outlines handle the update - if outline_panel.excerpt_fetch_ranges(cx).is_empty() { + if outline_panel.buffers_to_fetch().is_empty() { outline_panel.update_cached_entries(debounce, window, cx); } @@ -3192,8 +3139,15 @@ impl OutlinePanel { item_handle: new_active_item.downgrade_item(), active_editor: new_active_editor.downgrade(), }); - self.new_entries_for_fs_update - .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids()); + self.new_entries_for_fs_update.extend( + new_active_editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); self.selected_entry.invalidate(); self.update_fs_entries(new_active_editor, None, window, cx); } @@ -3211,7 +3165,7 @@ impl OutlinePanel { self.fs_entries.clear(); self.fs_entries_depth.clear(); self.fs_children_count.clear(); - self.excerpts.clear(); + self.buffers.clear(); self.cached_entries = Vec::new(); self.selected_entry = SelectedEntry::None; self.pinned = false; @@ -3225,23 +3179,14 @@ impl OutlinePanel { window: &mut Window, cx: &mut Context, ) -> Option { - let selection = editor.update(cx, |editor, cx| { - editor - .selections - .newest::(&editor.display_snapshot(cx)) - .head() - }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); let multi_buffer = editor.read(cx).buffer(); let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let (excerpt_id, buffer, _) = editor - .read(cx) - .buffer() - .read(cx) - .excerpt_containing(selection, cx)?; - let buffer_id = buffer.read(cx).remote_id(); + let anchor = editor.update(cx, |editor, _| editor.selections.newest_anchor().head()); + let selection_display_point = anchor.to_display_point(&editor_snapshot); + let (anchor, _) = multi_buffer_snapshot.anchor_to_buffer_anchor(anchor)?; - if editor.read(cx).is_buffer_folded(buffer_id, cx) { + if editor.read(cx).is_buffer_folded(anchor.buffer_id, cx) { return self .fs_entries .iter() @@ -3254,14 +3199,12 @@ impl OutlinePanel { | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: other_buffer_id, .. - }) => buffer_id == *other_buffer_id, + }) => anchor.buffer_id == *other_buffer_id, }) .cloned() .map(PanelEntry::Fs); } - let selection_display_point = selection.to_display_point(&editor_snapshot); - match &self.mode { ItemsDisplayMode::Search(search_state) => search_state .matches @@ -3298,32 +3241,31 @@ impl OutlinePanel { }) }), ItemsDisplayMode::Outline => self.outline_location( - buffer_id, - excerpt_id, + anchor, multi_buffer_snapshot, editor_snapshot, selection_display_point, + cx, ), } } fn outline_location( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, + selection_anchor: Anchor, multi_buffer_snapshot: editor::MultiBufferSnapshot, editor_snapshot: editor::EditorSnapshot, selection_display_point: DisplayPoint, + cx: &App, ) -> Option { let excerpt_outlines = self - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) + .buffers + .get(&selection_anchor.buffer_id) .into_iter() - .flat_map(|excerpt| excerpt.iter_outlines()) + .flat_map(|buffer| buffer.iter_outlines()) .flat_map(|outline| { let range = multi_buffer_snapshot - .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?; + .buffer_anchor_range_to_anchor_range(outline.range.clone())?; Some(( range.start.to_display_point(&editor_snapshot) ..range.end.to_display_point(&editor_snapshot), @@ -3411,16 +3353,16 @@ impl OutlinePanel { .cloned(); let closest_container = match outline_item { - Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { - buffer_id, - excerpt_id, - outline, - })), + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(outline)), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { + if excerpt.context.start.buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(excerpt.context.start.buffer_id, cx) + && excerpt.contains(&selection_anchor, &buffer_snapshot) + { Some(cached_entry.entry.clone()) } else { None @@ -3430,6 +3372,7 @@ impl OutlinePanel { FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: file_buffer_id, excerpts: file_excerpts, + .. }) | FsEntry::File(FsEntryFile { buffer_id: file_buffer_id, @@ -3437,7 +3380,13 @@ impl OutlinePanel { .. }), ) => { - if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { + if *file_buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(*file_buffer_id, cx) + && file_excerpts.iter().any(|excerpt| { + excerpt.contains(&selection_anchor, &buffer_snapshot) + }) + { Some(cached_entry.entry.clone()) } else { None @@ -3452,18 +3401,17 @@ impl OutlinePanel { } fn fetch_outdated_outlines(&mut self, window: &mut Window, cx: &mut Context) { - let excerpt_fetch_ranges = self.excerpt_fetch_ranges(cx); - if excerpt_fetch_ranges.is_empty() { + let buffers_to_fetch = self.buffers_to_fetch(); + if buffers_to_fetch.is_empty() { return; } let first_update = Arc::new(AtomicBool::new(true)); - for (buffer_id, (_buffer_snapshot, excerpt_ranges)) in excerpt_fetch_ranges { + for buffer_id in buffers_to_fetch { let outline_task = self.active_editor().map(|editor| { editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)) }); - let excerpt_ids = excerpt_ranges.keys().copied().collect::>(); let first_update = first_update.clone(); self.outline_fetch_tasks.insert( @@ -3498,40 +3446,26 @@ impl OutlinePanel { Some(UPDATE_DEBOUNCE) }; - for excerpt_id in &excerpt_ids { - if let Some(excerpt) = outline_panel - .excerpts - .entry(buffer_id) - .or_default() - .get_mut(excerpt_id) + if let Some(buffer) = outline_panel.buffers.get_mut(&buffer_id) { + buffer.outlines = OutlineState::Outlines(fetched_outlines.clone()); + + if let Some(default_depth) = pending_default_depth + && let OutlineState::Outlines(outlines) = &buffer.outlines { - excerpt.outlines = - ExcerptOutlines::Outlines(fetched_outlines.clone()); - - if let Some(default_depth) = pending_default_depth - && let ExcerptOutlines::Outlines(outlines) = - &excerpt.outlines - { - outlines - .iter() - .filter(|outline| { - (default_depth == 0 - || outline.depth >= default_depth) - && outlines_with_children.contains(&( - outline.range.clone(), - outline.depth, - )) - }) - .for_each(|outline| { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - }); - } + outlines + .iter() + .filter(|outline| { + (default_depth == 0 || outline.depth >= default_depth) + && outlines_with_children.contains(&( + outline.range.clone(), + outline.depth, + )) + }) + .for_each(|outline| { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); + }); } } @@ -3548,73 +3482,35 @@ impl OutlinePanel { .is_some_and(|active_editor| active_editor.read(cx).buffer().read(cx).is_singleton()) } - fn invalidate_outlines(&mut self, ids: &[ExcerptId]) { + fn invalidate_outlines(&mut self, ids: &[BufferId]) { self.outline_fetch_tasks.clear(); let mut ids = ids.iter().collect::>(); - for excerpts in self.excerpts.values_mut() { - ids.retain(|id| { - if let Some(excerpt) = excerpts.get_mut(id) { - excerpt.invalidate_outlines(); - false - } else { - true - } - }); + for (buffer_id, buffer) in self.buffers.iter_mut() { + if ids.remove(&buffer_id) { + buffer.invalidate_outlines(); + } if ids.is_empty() { break; } } } - fn excerpt_fetch_ranges( - &self, - cx: &App, - ) -> HashMap< - BufferId, - ( - BufferSnapshot, - HashMap>, - ), - > { + fn buffers_to_fetch(&self) -> HashSet { self.fs_entries .iter() - .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { + .fold(HashSet::default(), |mut buffers_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(FsEntryFile { - buffer_id, - excerpts: file_excerpts, - .. - }) - | FsEntry::ExternalFile(FsEntryExternalFile { - buffer_id, - excerpts: file_excerpts, - }) => { - let excerpts = self.excerpts.get(buffer_id); - for &file_excerpt in file_excerpts { - if let Some(excerpt) = excerpts - .and_then(|excerpts| excerpts.get(&file_excerpt)) - .filter(|excerpt| excerpt.should_fetch_outlines()) - { - match excerpts_to_fetch.entry(*buffer_id) { - hash_map::Entry::Occupied(mut o) => { - o.get_mut().1.insert(file_excerpt, excerpt.range.clone()); - } - hash_map::Entry::Vacant(v) => { - if let Some(buffer_snapshot) = - self.buffer_snapshot_for_id(*buffer_id, cx) - { - v.insert((buffer_snapshot, HashMap::default())) - .1 - .insert(file_excerpt, excerpt.range.clone()); - } - } - } - } + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + if let Some(buffer) = self.buffers.get(buffer_id) + && buffer.should_fetch_outlines() + { + buffers_to_fetch.insert(*buffer_id); } } FsEntry::Directory(..) => {} } - excerpts_to_fetch + buffers_to_fetch }) } @@ -4012,13 +3908,12 @@ impl OutlinePanel { } else { None }; - if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider + if let Some((buffer_id, _entry_excerpts)) = excerpts_to_consider && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) { - outline_panel.add_excerpt_entries( + outline_panel.add_buffer_entries( &mut generation_state, buffer_id, - entry_excerpts, depth, track_matches, is_singleton, @@ -4166,7 +4061,7 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state .match_candidates - .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + .push(StringMatchCandidate::new(id, &outline_entry.text)), PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { @@ -4333,131 +4228,118 @@ impl OutlinePanel { update_cached_entries } - fn add_excerpt_entries( + fn add_buffer_entries( &mut self, state: &mut GenerationState, buffer_id: BufferId, - entries_to_add: &[ExcerptId], parent_depth: usize, track_matches: bool, is_singleton: bool, query: Option<&str>, cx: &mut Context, ) { - if let Some(excerpts) = self.excerpts.get(&buffer_id) { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); + let Some(buffer) = self.buffers.get(&buffer_id) else { + return; + }; - for &excerpt_id in entries_to_add { - let Some(excerpt) = excerpts.get(&excerpt_id) else { - continue; - }; - let excerpt_depth = parent_depth + 1; - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { - buffer_id, - id: excerpt_id, - range: excerpt.range.clone(), - })), - excerpt_depth, - cx, - ); + let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); - let mut outline_base_depth = excerpt_depth + 1; - if is_singleton { - outline_base_depth = 0; - state.clear(); - } else if query.is_none() - && self - .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)) - { - continue; - } + for excerpt in &buffer.excerpts { + let excerpt_depth = parent_depth + 1; + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), + excerpt_depth, + cx, + ); + + let mut outline_base_depth = excerpt_depth + 1; + if is_singleton { + outline_base_depth = 0; + state.clear(); + } else if query.is_none() + && self + .collapsed_entries + .contains(&CollapsedEntry::Excerpt(excerpt.clone())) + { + continue; + } - let mut last_depth_at_level: Vec>> = vec![None; 10]; + let mut last_depth_at_level: Vec>> = vec![None; 10]; - let all_outlines: Vec<_> = excerpt.iter_outlines().collect(); + let all_outlines: Vec<_> = buffer.iter_outlines().collect(); - let mut outline_has_children = HashMap::default(); - let mut visible_outlines = Vec::new(); - let mut collapsed_state: Option<(usize, Range)> = None; + let mut outline_has_children = HashMap::default(); + let mut visible_outlines = Vec::new(); + let mut collapsed_state: Option<(usize, Range)> = None; - for (i, &outline) in all_outlines.iter().enumerate() { - let has_children = all_outlines - .get(i + 1) - .map(|next| next.depth > outline.depth) - .unwrap_or(false); + for (i, &outline) in all_outlines.iter().enumerate() { + let has_children = all_outlines + .get(i + 1) + .map(|next| next.depth > outline.depth) + .unwrap_or(false); - outline_has_children - .insert((outline.range.clone(), outline.depth), has_children); + outline_has_children.insert((outline.range.clone(), outline.depth), has_children); - let mut should_include = true; + let mut should_include = true; - if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { - if outline.depth <= *collapsed_depth { + if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { + if outline.depth <= *collapsed_depth { + collapsed_state = None; + } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { + let outline_start = outline.range.start; + if outline_start + .cmp(&collapsed_range.start, buffer_snapshot) + .is_ge() + && outline_start + .cmp(&collapsed_range.end, buffer_snapshot) + .is_lt() + { + should_include = false; // Skip - inside collapsed range + } else { collapsed_state = None; - } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { - let outline_start = outline.range.start; - if outline_start - .cmp(&collapsed_range.start, buffer_snapshot) - .is_ge() - && outline_start - .cmp(&collapsed_range.end, buffer_snapshot) - .is_lt() - { - should_include = false; // Skip - inside collapsed range - } else { - collapsed_state = None; - } } } + } - // Check if this outline itself is collapsed - if should_include - && self.collapsed_entries.contains(&CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )) - { - collapsed_state = Some((outline.depth, outline.range.clone())); - } + // Check if this outline itself is collapsed + if should_include + && self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())) + { + collapsed_state = Some((outline.depth, outline.range.clone())); + } - if should_include { - visible_outlines.push(outline); - } + if should_include { + visible_outlines.push(outline); } + } - self.outline_children_cache - .entry(buffer_id) - .or_default() - .extend(outline_has_children); + self.outline_children_cache + .entry(buffer_id) + .or_default() + .extend(outline_has_children); - for outline in visible_outlines { - let outline_entry = OutlineEntryOutline { - buffer_id, - excerpt_id, - outline: outline.clone(), - }; + for outline in visible_outlines { + let outline_entry = outline.clone(); - if outline.depth < last_depth_at_level.len() { - last_depth_at_level[outline.depth] = Some(outline.range.clone()); - // Clear deeper levels when we go back to a shallower depth - for d in (outline.depth + 1)..last_depth_at_level.len() { - last_depth_at_level[d] = None; - } + if outline.depth < last_depth_at_level.len() { + last_depth_at_level[outline.depth] = Some(outline.range.clone()); + // Clear deeper levels when we go back to a shallower depth + for d in (outline.depth + 1)..last_depth_at_level.len() { + last_depth_at_level[d] = None; } - - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), - outline_base_depth + outline.depth, - cx, - ); } + + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), + outline_base_depth + outline.depth, + cx, + ); } } } @@ -4483,32 +4365,37 @@ impl OutlinePanel { FsEntry::File(file) => &file.excerpts, } .iter() - .copied() + .cloned() .collect::>(); let depth = if is_singleton { 0 } else { parent_depth + 1 }; - let new_search_matches = search_state - .matches - .iter() - .filter(|(match_range, _)| { - related_excerpts.contains(&match_range.start.excerpt_id) - || related_excerpts.contains(&match_range.end.excerpt_id) - }) - .filter(|(match_range, _)| { - let editor = active_editor.read(cx); - let snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start) - && editor.is_buffer_folded(buffer_id, cx) - { - return false; - } - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end) - && editor.is_buffer_folded(buffer_id, cx) - { + let new_search_matches = search_state.matches.iter().filter(|(match_range, _)| { + let editor = active_editor.read(cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + if !related_excerpts.iter().any(|excerpt| { + let (Some(start), Some(end)) = ( + snapshot.anchor_in_buffer(excerpt.context.start), + snapshot.anchor_in_buffer(excerpt.context.end), + ) else { return false; - } - true - }); + }; + let excerpt_range = start..end; + excerpt_range.overlaps(match_range, &snapshot) + }) { + return false; + }; + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.start) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.end) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + true + }); let new_search_entries = new_search_matches .map(|(match_range, search_data)| SearchEntry { @@ -4626,10 +4513,10 @@ impl OutlinePanel { + folded_dirs.entries.len().saturating_sub(1) * "/".len() } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self - .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) + .excerpt_label(&excerpt, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -5212,31 +5099,21 @@ fn subscribe_for_editor_events( outline_panel.reveal_entry_for_selection(editor.clone(), window, cx); cx.notify(); } - EditorEvent::ExcerptsAdded { excerpts, .. } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { outline_panel - .new_entries_for_fs_update - .extend(excerpts.iter().map(|&(excerpt_id, _)| excerpt_id)); + .buffers + .retain(|buffer_id, _| !removed_buffer_ids.contains(buffer_id)); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsRemoved { ids, .. } => { - let mut ids = ids.iter().collect::>(); - for excerpts in outline_panel.excerpts.values_mut() { - excerpts.retain(|excerpt_id, _| !ids.remove(excerpt_id)); - if ids.is_empty() { - break; - } - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + outline_panel + .new_entries_for_fs_update + .insert(buffer.read(cx).remote_id()); + outline_panel.invalidate_outlines(&[buffer.read(cx).remote_id()]); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsExpanded { ids } => { - outline_panel.invalidate_outlines(ids); - let update_cached_items = outline_panel.update_non_fs_items(window, cx); - if update_cached_items { - outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); - } - } - EditorEvent::ExcerptsEdited { ids } => { - outline_panel.invalidate_outlines(ids); + EditorEvent::BuffersEdited { buffer_ids } => { + outline_panel.invalidate_outlines(buffer_ids); let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); @@ -5250,29 +5127,20 @@ fn subscribe_for_editor_events( outline_panel.new_entries_for_fs_update.extend( ids.iter() .filter(|id| { - outline_panel - .excerpts - .iter() - .find_map(|(buffer_id, excerpts)| { - if excerpts.contains_key(id) { - ignore_selections_change |= outline_panel - .preserve_selection_on_buffer_fold_toggles - .remove(buffer_id); - Some(buffer_id) - } else { - None - } - }) - .map(|buffer_id| { - if editor.read(cx).is_buffer_folded(*buffer_id, cx) { - latest_folded_buffer_id = Some(*buffer_id); - false - } else { - latest_unfolded_buffer_id = Some(*buffer_id); - true - } - }) - .unwrap_or(true) + if outline_panel.buffers.contains_key(&id) { + ignore_selections_change |= outline_panel + .preserve_selection_on_buffer_fold_toggles + .remove(&id); + if editor.read(cx).is_buffer_folded(**id, cx) { + latest_folded_buffer_id = Some(**id); + false + } else { + latest_unfolded_buffer_id = Some(**id); + true + } + } else { + false + } }) .copied(), ); @@ -5308,10 +5176,8 @@ fn subscribe_for_editor_events( outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } EditorEvent::Reparsed(buffer_id) => { - if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + if let Some(buffer) = outline_panel.buffers.get_mut(buffer_id) { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -5319,10 +5185,8 @@ fn subscribe_for_editor_events( } } EditorEvent::OutlineSymbolsChanged => { - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } if matches!( outline_panel.selected_entry(), @@ -6875,7 +6739,7 @@ outline: struct OutlineEntryExcerpt PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Excerpt(_) => continue, OutlineEntry::Outline(outline_entry) => { - format!("outline: {}", outline_entry.outline.text) + format!("outline: {}", outline_entry.text) } }, PanelEntry::Search(search_entry) => { @@ -7243,10 +7107,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7333,9 +7196,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7711,10 +7574,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -8173,7 +8035,7 @@ outline: struct Foo <==== selected outline_panel.read_with(cx, |panel, _cx| { panel.selected_entry().and_then(|entry| match entry { PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - Some(outline.outline.text.clone()) + Some(outline.text.clone()) } _ => None, }) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 8fd18d784dd37d845ffa76c9483b30ae77577a03..8f0c7e80d1abc5c49fc4e5de9346eba35f250f14 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4706,12 +4706,11 @@ impl Repository { .commit_oid_to_index .insert(commit_data.sha, graph_data.commit_data.len()); graph_data.commit_data.push(commit_data); - - cx.emit(RepositoryEvent::GraphEvent( - graph_data_key.clone(), - GitGraphEvent::CountUpdated(graph_data.commit_data.len()), - )); } + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); }); match &graph_data { diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index 7865e8f20ca0e4dbc9d06c2ffd808fe4090634ed..0f01c6350ece89569535dca571c28597ff77384b 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -585,7 +585,7 @@ async fn raw_to_buffer_semantic_tokens( } Some(BufferSemanticToken { - range: buffer_snapshot.anchor_range_around(start..end), + range: buffer_snapshot.anchor_range_inside(start..end), token_type: token.token_type, token_modifiers: token.token_modifiers, }) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 96b82a16930543028b7588a843433c6a70bf34e6..0c1b8942cc26976d51d406bfa9f67da714110623 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1032,6 +1032,8 @@ impl DirectoryLister { } } +pub const CURRENT_PROJECT_FEATURES: &[&str] = &["new-style-anchors"]; + #[cfg(feature = "test-support")] pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext { trigger_kind: lsp::CompletionTriggerKind::INVOKED, @@ -1644,6 +1646,10 @@ impl Project { project_id: remote_id, committer_email: committer.email, committer_name: committer.name, + features: CURRENT_PROJECT_FEATURES + .iter() + .map(|s| s.to_string()) + .collect(), }) .await?; Self::from_join_project_response( diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 8603a904acd2c0cd52fcdc9d102be0f2efeb0636..6601b0744aa770917390e03b16ae93d3bc7f637f 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -1771,7 +1771,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { DiagnosticSet::from_sorted_entries( vec![DiagnosticEntry { diagnostic: Default::default(), - range: Anchor::MIN..Anchor::MAX, + range: Anchor::min_max_range_for_buffer(buffer.remote_id()), }], &buffer.snapshot(), ), @@ -8525,9 +8525,10 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) { unstaged_diff.update(cx, |unstaged_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - unstaged_diff - .snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + unstaged_diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &unstaged_diff.base_text(cx).text(), &[( @@ -8616,8 +8617,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[ @@ -8658,8 +8661,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text(cx).text(), &[( @@ -8688,8 +8693,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( @@ -8710,8 +8717,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index 01f4bda9e9f450ed65d4f6cb8dc9abc7c35451dd..69bd844ee743ef9038beb25b98b9b31ffb130b2c 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -212,10 +212,15 @@ message Selection { } message EditorAnchor { - uint64 excerpt_id = 1; + optional uint64 excerpt_id = 1; Anchor anchor = 2; } +message PathKey { + optional uint64 sort_prefix = 1; + string path = 2; +} + enum CursorShape { CursorBar = 0; CursorBlock = 1; diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index 4d2bf62eade7aaf633ea899cd106e8d9cb3be25d..aa964c64cd04db71a71ac081e034be10cbf95048 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -174,6 +174,7 @@ message ShareProject { reserved 3; bool is_ssh_project = 4; optional bool windows_paths = 5; + repeated string features = 6; } message ShareProjectResponse { @@ -193,6 +194,7 @@ message JoinProject { uint64 project_id = 1; optional string committer_email = 2; optional string committer_name = 3; + repeated string features = 4; } message JoinProjectResponse { @@ -204,6 +206,7 @@ message JoinProjectResponse { repeated string language_server_capabilities = 8; ChannelRole role = 6; bool windows_paths = 9; + repeated string features = 10; reserved 7; } @@ -359,6 +362,8 @@ message UpdateView { reserved 7; double scroll_x = 8; double scroll_y = 9; + repeated PathExcerpts updated_paths = 10; + repeated uint64 deleted_buffers = 11; } } @@ -385,6 +390,7 @@ message View { reserved 8; double scroll_x = 9; double scroll_y = 10; + repeated PathExcerpts path_excerpts = 11; } message ChannelView { @@ -407,6 +413,19 @@ message Excerpt { Anchor primary_end = 6; } +message ExcerptRange { + Anchor context_start = 1; + Anchor context_end = 2; + Anchor primary_start = 3; + Anchor primary_end = 4; +} + +message PathExcerpts { + PathKey path_key = 1; + uint64 buffer_id = 2; + repeated ExcerptRange ranges = 3; +} + message Contact { uint64 user_id = 1; bool online = 2; diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 22987f6c56669e1972a9bfc940449991d9f55642..7194e8868fd2a0015edd5c18c96f2fe164206fb7 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2068,9 +2068,16 @@ mod tests { ) .await; + // Open a file path (not a directory) so that the worktree root is a + // file. This means `active_project_directory` returns `None`, which + // causes `DevContainerContext::from_workspace` to return `None`, + // preventing `open_dev_container` from spawning real I/O (docker + // commands, shell environment loading) that is incompatible with the + // test scheduler. The modal is still created and the re-entrancy + // guard that this test validates is still exercised. cx.update(|cx| { open_paths( - &[PathBuf::from(path!("/project"))], + &[PathBuf::from(path!("/project/src/main.rs"))], app_state, workspace::OpenOptions::default(), cx, diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index 3611b55ec65c94695e4e8835fa7afe8badc80a29..869568edfcdbe9260a13aaa5c0ed7eed6b87e675 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -96,6 +96,7 @@ impl From for RemoteConnectionOptions { container_id: conn.container_id, upload_binary_over_docker_exec: false, use_podman: conn.use_podman, + remote_env: conn.remote_env, }) } } diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 26592a8035d50caa4e267a5478d5aceb9fba6e3e..404b0673ab8cf220385d1a0ce41a40156d469a01 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -11,6 +11,7 @@ use dev_container::{ }; use editor::Editor; +use extension_host::ExtensionStore; use futures::{FutureExt, channel::oneshot, future::Shared}; use gpui::{ Action, AnyElement, App, ClickEvent, ClipboardItem, Context, DismissEvent, Entity, @@ -41,6 +42,7 @@ use std::{ atomic::{self, AtomicUsize}, }, }; + use ui::{ CommonAnimationExt, IconButtonShape, KeyBinding, List, ListItem, ListSeparator, Modal, ModalFooter, ModalHeader, Navigable, NavigableEntry, Section, Tooltip, WithScrollbar, @@ -1854,10 +1856,13 @@ impl RemoteServerProjects { ) { let replace_window = window.window_handle().downcast::(); let app_state = Arc::downgrade(&app_state); + cx.spawn_in(window, async move |entity, cx| { - let (connection, starting_dir) = - match start_dev_container_with_config(context, config).await { - Ok((c, s)) => (Connection::DevContainer(c), s), + let environment = context.environment(cx).await; + + let (dev_container_connection, starting_dir) = + match start_dev_container_with_config(context, config, environment).await { + Ok((c, s)) => (c, s), Err(e) => { log::error!("Failed to start dev container: {:?}", e); cx.prompt( @@ -1881,6 +1886,16 @@ impl RemoteServerProjects { return; } }; + cx.update(|_, cx| { + ExtensionStore::global(cx).update(cx, |this, cx| { + for extension in &dev_container_connection.extension_ids { + log::info!("Installing extension {extension} from devcontainer"); + this.install_latest_extension(Arc::from(extension.clone()), cx); + } + }) + }) + .log_err(); + entity .update(cx, |_, cx| { cx.emit(DismissEvent); @@ -1891,7 +1906,7 @@ impl RemoteServerProjects { return; }; let result = open_remote_project( - connection.into(), + Connection::DevContainer(dev_container_connection).into(), vec![starting_dir].into_iter().map(PathBuf::from).collect(), app_state, OpenOptions { diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 74076b58e35bd1ea7759927bad255925e7f7d9b9..2b935e50fa49054a2668a71d30818fdd2fb57b1d 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -6,6 +6,7 @@ use collections::HashMap; use parking_lot::Mutex; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use semver::Version as SemanticVersion; +use std::collections::BTreeMap; use std::time::Instant; use std::{ path::{Path, PathBuf}, @@ -36,6 +37,7 @@ pub struct DockerConnectionOptions { pub remote_user: String, pub upload_binary_over_docker_exec: bool, pub use_podman: bool, + pub remote_env: BTreeMap, } pub(crate) struct DockerExecConnection { @@ -499,10 +501,14 @@ impl DockerExecConnection { args.push("-u".to_string()); args.push(self.connection_options.remote_user.clone()); + for (k, v) in self.connection_options.remote_env.iter() { + args.push("-e".to_string()); + args.push(format!("{k}={v}")); + } + for (k, v) in env.iter() { args.push("-e".to_string()); - let env_declaration = format!("{}={}", k, v); - args.push(env_declaration); + args.push(format!("{k}={v}")); } args.push(self.connection_options.container_id.clone()); @@ -632,6 +638,11 @@ impl RemoteConnection for DockerExecConnection { }; let mut docker_args = vec!["exec".to_string()]; + + for (k, v) in self.connection_options.remote_env.iter() { + docker_args.push("-e".to_string()); + docker_args.push(format!("{k}={v}")); + } for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { if let Some(value) = std::env::var(env_var).ok() { docker_args.push("-e".to_string()); @@ -768,9 +779,14 @@ impl RemoteConnection for DockerExecConnection { docker_args.push(parsed_working_dir); } + for (k, v) in self.connection_options.remote_env.iter() { + docker_args.push("-e".to_string()); + docker_args.push(format!("{k}={v}")); + } + for (k, v) in env.iter() { docker_args.push("-e".to_string()); - docker_args.push(format!("{}={}", k, v)); + docker_args.push(format!("{k}={v}")); } match interactive { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index cab8e20cd22e1f4155232f36416be77d4f2ca24d..93fbab59a6f1b9da0cb9faf0657fc4a1c5f679bd 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -3550,7 +3550,16 @@ mod tests { // Manually unfold one buffer (simulating a chevron click) let first_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[0] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(0) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(first_buffer_id, cx); @@ -3564,7 +3573,16 @@ mod tests { // Manually unfold the second buffer too let second_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[1] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(1) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(second_buffer_id, cx); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 991f8d1076a985e1413b0045aa42d424f094cd9c..1bccf1ae52fb2c52a8d01e53aabb1b3ff5c7c16f 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -11,8 +11,8 @@ use crate::{ use anyhow::Context as _; use collections::HashMap; use editor::{ - Anchor, Editor, EditorEvent, EditorSettings, ExcerptId, MAX_TAB_TITLE_LEN, MultiBuffer, - PathKey, SelectionEffects, + Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey, + SelectionEffects, actions::{Backtab, FoldAll, SelectAll, Tab, UnfoldAll}, items::active_match_index, multibuffer_context_lines, @@ -342,41 +342,32 @@ impl ProjectSearch { } fn remove_deleted_buffers(&mut self, cx: &mut Context) { - let (deleted_paths, removed_excerpt_ids) = { - let excerpts = self.excerpts.read(cx); - let deleted_paths: Vec = excerpts - .paths() - .filter(|path| { - excerpts.buffer_for_path(path, cx).is_some_and(|buffer| { - buffer - .read(cx) - .file() - .is_some_and(|file| file.disk_state().is_deleted()) - }) - }) - .cloned() - .collect(); - - let removed_excerpt_ids: collections::HashSet = deleted_paths - .iter() - .flat_map(|path| excerpts.excerpts_for_path(path)) - .collect(); - - (deleted_paths, removed_excerpt_ids) - }; + let deleted_buffer_ids = self + .excerpts + .read(cx) + .all_buffers_iter() + .filter(|buffer| { + buffer + .read(cx) + .file() + .is_some_and(|file| file.disk_state().is_deleted()) + }) + .map(|buffer| buffer.read(cx).remote_id()) + .collect::>(); - if deleted_paths.is_empty() { + if deleted_buffer_ids.is_empty() { return; } - self.excerpts.update(cx, |excerpts, cx| { - for path in deleted_paths { - excerpts.remove_excerpts_for_path(path, cx); + let snapshot = self.excerpts.update(cx, |excerpts, cx| { + for buffer_id in deleted_buffer_ids { + excerpts.remove_excerpts_for_buffer(buffer_id, cx); } + excerpts.snapshot(cx) }); self.match_ranges - .retain(|range| !removed_excerpt_ids.contains(&range.start.excerpt_id)); + .retain(|range| snapshot.anchor_to_buffer_anchor(range.start).is_some()); cx.notify(); } @@ -2990,7 +2981,13 @@ pub mod tests { .read(cx) .buffer() .read(cx) - .excerpt_buffer_ids()[0] + .snapshot(cx) + .excerpts() + .next() + .unwrap() + .context + .start + .buffer_id }) .expect("should read buffer ids"); diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index f9d3376a26b8d84d89e563b21a969bfca68ee2f7..7ec6a6b5bbdee57cbe75c13d1abe5277ac4f1825 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -81,11 +81,14 @@ pub enum SidebarSide { )] #[serde(rename_all = "snake_case")] pub enum ThinkingBlockDisplay { + /// Thinking blocks fully expand during streaming, then auto-collapse + /// when the model finishes thinking. Users can re-expand after collapse. + #[default] + Auto, /// Thinking blocks auto-expand with a height constraint during streaming, /// then remain in their constrained state when complete. Users can click /// to fully expand or collapse. - #[default] - Automatic, + Preview, /// Thinking blocks are always fully expanded by default (no height constraint). AlwaysExpanded, /// Thinking blocks are always collapsed by default. @@ -156,10 +159,10 @@ pub struct AgentSettingsContent { /// /// Default: "primary_screen" pub notify_when_agent_waiting: Option, - /// Whether to play a sound when the agent has either completed its response, or needs user input. + /// When to play a sound when the agent has either completed its response, or needs user input. /// - /// Default: false - pub play_sound_when_agent_done: Option, + /// Default: never + pub play_sound_when_agent_done: Option, /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane. /// /// Default: true @@ -344,6 +347,37 @@ pub enum NotifyWhenAgentWaiting { Never, } +#[derive( + Copy, + Clone, + Default, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum PlaySoundWhenAgentDone { + #[default] + Never, + WhenHidden, + Always, +} + +impl PlaySoundWhenAgentDone { + pub fn should_play(&self, visible: bool) -> bool { + match self { + PlaySoundWhenAgentDone::Never => false, + PlaySoundWhenAgentDone::WhenHidden => !visible, + PlaySoundWhenAgentDone::Always => true, + } + } +} + #[with_fallible_options] #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] pub struct LanguageModelSelection { diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 6e0021d6c49d80628206151545476ffcd644516a..f8c64191dfe2602744e783f6d52484c45a7756d2 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -65,7 +65,8 @@ macro_rules! settings_overrides { } } } -use std::collections::BTreeSet; +use std::collections::{BTreeMap, BTreeSet}; +use std::hash::Hash; use std::sync::Arc; pub use util::serde::default_true; @@ -1023,6 +1024,8 @@ pub struct DevContainerConnection { pub remote_user: String, pub container_id: String, pub use_podman: bool, + pub extension_ids: Vec, + pub remote_env: BTreeMap, } #[with_fallible_options] diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 08a597dc992913e144ba70e30c1a81b2ab8de1aa..8496620f9b4db94f93b2ea65952423b73512e724 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -7278,7 +7278,7 @@ fn ai_page(cx: &App) -> SettingsPage { }), SettingsPageItem::SettingItem(SettingItem { title: "Play Sound When Agent Done", - description: "Whether to play a sound when the agent has either completed its response, or needs user input.", + description: "When to play a sound when the agent has either completed its response, or needs user input.", field: Box::new(SettingField { json_path: Some("agent.play_sound_when_agent_done"), pick: |settings_content| { @@ -7340,7 +7340,7 @@ fn ai_page(cx: &App) -> SettingsPage { }), SettingsPageItem::SettingItem(SettingItem { title: "Thinking Display", - description: "How thinking blocks should be displayed by default. 'Automatic' auto-expands with a height constraint during streaming. 'Always Expanded' shows full content. 'Always Collapsed' keeps them collapsed.", + description: "How thinking blocks should be displayed by default. 'Auto' fully expands during streaming, then auto-collapses when done. 'Preview' auto-expands with a height constraint during streaming. 'Always Expanded' shows full content. 'Always Collapsed' keeps them collapsed.", field: Box::new(SettingField { json_path: Some("agent.thinking_display"), pick: |settings_content| { diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 89268b66f4c2f20411358eb63925187c6c3f382d..70aaaa15412793aae54c7c29fe8a2613854c8adb 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -523,6 +523,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 72099378bdfd3bcf9dc900a210f35fd11e77ba1f..9bef59fee04ed847b8311192728cb0bbfa59ce7f 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -4504,6 +4504,7 @@ impl Sidebar { let archive_view = cx.new(|cx| { ThreadsArchiveView::new( + active_workspace.downgrade(), agent_connection_store.clone(), agent_server_store.clone(), window, diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index da351ad410d078e79aa4c3038fcf88184bc648fa..d83cdfc830fc1abb19b8d05261aba711dbb14c1d 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -321,13 +321,11 @@ pub fn task_contexts( }) .unwrap_or_default(); - let latest_selection = active_editor.as_ref().map(|active_editor| { - active_editor - .read(cx) - .selections - .newest_anchor() - .head() - .text_anchor + let latest_selection = active_editor.as_ref().and_then(|active_editor| { + let snapshot = active_editor.read(cx).buffer().read(cx).snapshot(cx); + snapshot + .anchor_to_buffer_anchor(active_editor.read(cx).selections.newest_anchor().head()) + .map(|(anchor, _)| anchor) }); let mut worktree_abs_paths = workspace diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 5c4cce0f11d7db7b7593631e796c0f5e3d50adab..4dbe0e377afb86d176e8cd336e186d209a9d3c78 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -24,7 +24,7 @@ pub struct Anchor { /// Whether this anchor stays attached to the character *before* or *after* /// the offset. pub bias: Bias, - pub buffer_id: Option, + pub buffer_id: BufferId, } impl Debug for Anchor { @@ -46,28 +46,7 @@ impl Debug for Anchor { } impl Anchor { - pub const MIN: Self = Self { - timestamp_replica_id: clock::Lamport::MIN.replica_id, - timestamp_value: clock::Lamport::MIN.value, - offset: u32::MIN, - bias: Bias::Left, - buffer_id: None, - }; - - pub const MAX: Self = Self { - timestamp_replica_id: clock::Lamport::MAX.replica_id, - timestamp_value: clock::Lamport::MAX.value, - offset: u32::MAX, - bias: Bias::Right, - buffer_id: None, - }; - - pub fn new( - timestamp: clock::Lamport, - offset: u32, - bias: Bias, - buffer_id: Option, - ) -> Self { + pub fn new(timestamp: clock::Lamport, offset: u32, bias: Bias, buffer_id: BufferId) -> Self { Self { timestamp_replica_id: timestamp.replica_id, timestamp_value: timestamp.value, @@ -83,7 +62,7 @@ impl Anchor { timestamp_value: clock::Lamport::MIN.value, offset: u32::MIN, bias: Bias::Left, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -93,7 +72,7 @@ impl Anchor { timestamp_value: clock::Lamport::MAX.value, offset: u32::MAX, bias: Bias::Right, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -171,7 +150,7 @@ impl Anchor { pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool { if self.is_min() || self.is_max() { true - } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) { + } else if self.buffer_id != buffer.remote_id { false } else { let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else { @@ -207,6 +186,18 @@ impl Anchor { value: self.timestamp_value, } } + + pub fn opaque_id(&self) -> [u8; 20] { + let mut bytes = [0u8; 20]; + let buffer_id: u64 = self.buffer_id.into(); + bytes[0..8].copy_from_slice(&buffer_id.to_le_bytes()); + bytes[8..12].copy_from_slice(&self.offset.to_le_bytes()); + bytes[12..16].copy_from_slice(&self.timestamp_value.to_le_bytes()); + let replica_id = self.timestamp_replica_id.as_u16(); + bytes[16..18].copy_from_slice(&replica_id.to_le_bytes()); + bytes[18] = self.bias as u8; + bytes + } } pub trait OffsetRangeExt { @@ -237,6 +228,7 @@ where pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Ordering; fn overlaps(&self, b: &Range, buffer: &BufferSnapshot) -> bool; + fn contains_anchor(&self, b: Anchor, buffer: &BufferSnapshot) -> bool; } impl AnchorRangeExt for Range { @@ -250,4 +242,8 @@ impl AnchorRangeExt for Range { fn overlaps(&self, other: &Range, buffer: &BufferSnapshot) -> bool { self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt() } + + fn contains_anchor(&self, other: Anchor, buffer: &BufferSnapshot) -> bool { + self.start.cmp(&other, buffer).is_le() && self.end.cmp(&other, buffer).is_ge() + } } diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index eff3d0af110763074d7ca9fdc7842d45eece03c1..376d284473d09df16b93a609c8d49c443aa8a4ab 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -56,7 +56,10 @@ where if edit.is_empty() { return; } + self.push_maybe_empty(edit); + } + pub fn push_maybe_empty(&mut self, edit: Edit) { if let Some(last) = self.0.last_mut() { if last.old.end >= edit.old.start { last.old.end = edit.old.end; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b8f2ce6ce9b66040b4e633d28bfb42e1791a38ca..026f1272790740c9c2277004e8e96800d87bab15 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2377,7 +2377,7 @@ impl BufferSnapshot { pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); self.summaries_for_anchors_with_payload::(anchors.map(|a| (a, ()))) @@ -2390,7 +2390,7 @@ impl BufferSnapshot { ) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); let mut fragment_cursor = self @@ -2406,7 +2406,7 @@ impl BufferSnapshot { return (D::from_text_summary(&self.visible_text.summary()), payload); } - let Some(insertion) = self.try_find_fragment(anchor) else { + let Some(insertion) = self.try_find_fragment(&anchor) else { panic!( "invalid insertion for buffer {}@{:?} with anchor {:?}", self.remote_id(), @@ -2457,7 +2457,7 @@ impl BufferSnapshot { } else if anchor.is_max() { self.visible_text.len() } else { - debug_assert_eq!(anchor.buffer_id, Some(self.remote_id)); + debug_assert_eq!(anchor.buffer_id, self.remote_id); debug_assert!( self.version.observed(anchor.timestamp()), "Anchor timestamp {:?} not observed by buffer {:?}", @@ -2489,7 +2489,7 @@ impl BufferSnapshot { #[cold] fn panic_bad_anchor(&self, anchor: &Anchor) -> ! { - if anchor.buffer_id.is_some_and(|id| id != self.remote_id) { + if anchor.buffer_id != self.remote_id { panic!( "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}", self.remote_id, self.version @@ -2553,12 +2553,12 @@ impl BufferSnapshot { } /// Returns an anchor range for the given input position range that is anchored to the text in the range. - pub fn anchor_range_around(&self, position: Range) -> Range { + pub fn anchor_range_inside(&self, position: Range) -> Range { self.anchor_after(position.start)..self.anchor_before(position.end) } /// Returns an anchor range for the given input position range that is anchored to the text before and after. - pub fn anchor_range_between(&self, position: Range) -> Range { + pub fn anchor_range_outside(&self, position: Range) -> Range { self.anchor_before(position.start)..self.anchor_after(position.end) } @@ -2608,7 +2608,7 @@ impl BufferSnapshot { fragment.timestamp, fragment.insertion_offset + overshoot as u32, bias, - Some(self.remote_id), + self.remote_id, ) } } @@ -2616,8 +2616,7 @@ impl BufferSnapshot { pub fn can_resolve(&self, anchor: &Anchor) -> bool { anchor.is_min() || anchor.is_max() - || (Some(self.remote_id) == anchor.buffer_id - && self.version.observed(anchor.timestamp())) + || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp())) } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2643,7 +2642,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn anchored_edits_since<'a, D>( @@ -2653,7 +2655,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.anchored_edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn edits_since_in_range<'a, D>( @@ -2916,13 +2921,13 @@ impl bool> Iterator for Ed fragment.timestamp, fragment.insertion_offset, Bias::Right, - Some(self.buffer_id), + self.buffer_id, ); let end_anchor = Anchor::new( fragment.timestamp, fragment.insertion_offset + fragment.len, Bias::Left, - Some(self.buffer_id), + self.buffer_id, ); if !fragment.was_visible(self.since, self.undos) && fragment.visible { diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index e3766e73bbc29d9548f785018e9f4aa40ab968a1..a9218564b5567d86f097781b224ac0658a0d5221 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -117,7 +117,7 @@ impl ActiveToolchain { cx: &mut Context, ) { let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { let subscription = cx.subscribe_in( diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index 7447975aa835c7a4c73068d20b55619f7db5231c..010003cd572f85b1aa8e6d31b0fc0a511f2ebd7f 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -584,11 +584,11 @@ impl ToolchainSelector { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); let language_name = buffer.read(cx).language()?.name(); diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 3da30838ca8313b68608e432ce1e76870157c1fd..2012defc47d9cccea87849fa41470ad1183b552f 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -1,14 +1,15 @@ use std::{ops::Range, rc::Rc}; use gpui::{ - AbsoluteLength, AppContext, Context, DefiniteLength, DragMoveEvent, Entity, EntityId, - FocusHandle, Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, - Stateful, UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list, + AbsoluteLength, AppContext as _, DefiniteLength, DragMoveEvent, Entity, EntityId, FocusHandle, + Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, Stateful, + UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list, }; +use itertools::intersperse_with; use crate::{ ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component, - ComponentScope, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator, + ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator, InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce, ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled, StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex, @@ -16,20 +17,20 @@ use crate::{ table_row::{IntoTableRow as _, TableRow}, v_flex, }; -use itertools::intersperse_with; pub mod table_row; #[cfg(test)] mod tests; const RESIZE_COLUMN_WIDTH: f32 = 8.0; +const RESIZE_DIVIDER_WIDTH: f32 = 1.0; /// Represents an unchecked table row, which is a vector of elements. /// Will be converted into `TableRow` internally pub type UncheckedTableRow = Vec; #[derive(Debug)] -struct DraggedColumn(usize); +pub(crate) struct DraggedColumn(pub(crate) usize); struct UniformListData { render_list_of_rows_fn: @@ -110,106 +111,103 @@ impl TableInteractionState { view.update(cx, |view, cx| f(view, e, window, cx)).ok(); } } +} - /// Renders invisible resize handles overlaid on top of table content. - /// - /// - Spacer: invisible element that matches the width of table column content - /// - Divider: contains the actual resize handle that users can drag to resize columns - /// - /// Structure: [spacer] [divider] [spacer] [divider] [spacer] - /// - /// Business logic: - /// 1. Creates spacers matching each column width - /// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns) - /// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize - /// 4. Returns an absolute-positioned overlay that sits on top of table content - fn render_resize_handles( - &self, - column_widths: &TableRow, - resizable_columns: &TableRow, - initial_sizes: &TableRow, - columns: Option>, - window: &mut Window, - cx: &mut App, - ) -> AnyElement { - let spacers = column_widths - .as_slice() - .iter() - .map(|width| base_cell_style(Some(*width)).into_any_element()); - - let mut column_ix = 0; - let resizable_columns_shared = Rc::new(resizable_columns.clone()); - let initial_sizes_shared = Rc::new(initial_sizes.clone()); - let mut resizable_columns_iter = resizable_columns.as_slice().iter(); - - // Insert dividers between spacers (column content) - let dividers = intersperse_with(spacers, || { - let resizable_columns = Rc::clone(&resizable_columns_shared); - let initial_sizes = Rc::clone(&initial_sizes_shared); - window.with_id(column_ix, |window| { - let mut resize_divider = div() - // This is required because this is evaluated at a different time than the use_state call above - .id(column_ix) - .relative() - .top_0() - .w_px() - .h_full() - .bg(cx.theme().colors().border.opacity(0.8)); - - let mut resize_handle = div() - .id("column-resize-handle") - .absolute() - .left_neg_0p5() - .w(px(RESIZE_COLUMN_WIDTH)) - .h_full(); - - if resizable_columns_iter - .next() - .is_some_and(TableResizeBehavior::is_resizable) - { - let hovered = window.use_state(cx, |_window, _cx| false); - - resize_divider = resize_divider.when(*hovered.read(cx), |div| { - div.bg(cx.theme().colors().border_focused) - }); - - resize_handle = resize_handle - .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered)) - .cursor_col_resize() - .when_some(columns.clone(), |this, columns| { - this.on_click(move |event, window, cx| { - if event.click_count() >= 2 { - columns.update(cx, |columns, _| { - columns.on_double_click( - column_ix, - &initial_sizes, - &resizable_columns, - window, - ); - }) - } +/// Renders invisible resize handles overlaid on top of table content. +/// +/// - Spacer: invisible element that matches the width of table column content +/// - Divider: contains the actual resize handle that users can drag to resize columns +/// +/// Structure: [spacer] [divider] [spacer] [divider] [spacer] +/// +/// Business logic: +/// 1. Creates spacers matching each column width +/// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns) +/// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize +/// 4. Returns an absolute-positioned overlay that sits on top of table content +fn render_resize_handles( + column_widths: &TableRow, + resizable_columns: &TableRow, + initial_sizes: &TableRow, + columns: Option>, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + let spacers = column_widths + .as_slice() + .iter() + .map(|width| base_cell_style(Some(*width)).into_any_element()); + + let mut column_ix = 0; + let resizable_columns_shared = Rc::new(resizable_columns.clone()); + let initial_sizes_shared = Rc::new(initial_sizes.clone()); + let mut resizable_columns_iter = resizable_columns.as_slice().iter(); + + let dividers = intersperse_with(spacers, || { + let resizable_columns = Rc::clone(&resizable_columns_shared); + let initial_sizes = Rc::clone(&initial_sizes_shared); + window.with_id(column_ix, |window| { + let mut resize_divider = div() + .id(column_ix) + .relative() + .top_0() + .w(px(RESIZE_DIVIDER_WIDTH)) + .h_full() + .bg(cx.theme().colors().border.opacity(0.8)); + + let mut resize_handle = div() + .id("column-resize-handle") + .absolute() + .left_neg_0p5() + .w(px(RESIZE_COLUMN_WIDTH)) + .h_full(); + + if resizable_columns_iter + .next() + .is_some_and(TableResizeBehavior::is_resizable) + { + let hovered = window.use_state(cx, |_window, _cx| false); + + resize_divider = resize_divider.when(*hovered.read(cx), |div| { + div.bg(cx.theme().colors().border_focused) + }); + + resize_handle = resize_handle + .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered)) + .cursor_col_resize() + .when_some(columns.clone(), |this, columns| { + this.on_click(move |event, window, cx| { + if event.click_count() >= 2 { + columns.update(cx, |columns, _| { + columns.on_double_click( + column_ix, + &initial_sizes, + &resizable_columns, + window, + ); + }) + } - cx.stop_propagation(); - }) + cx.stop_propagation(); }) - .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| { - cx.new(|_cx| gpui::Empty) - }) - } + }) + .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| { + cx.new(|_cx| gpui::Empty) + }) + } - column_ix += 1; - resize_divider.child(resize_handle).into_any_element() - }) - }); + column_ix += 1; + resize_divider.child(resize_handle).into_any_element() + }) + }); - h_flex() - .id("resize-handles") - .absolute() - .inset_0() - .w_full() - .children(dividers) - .into_any_element() - } + h_flex() + .id("resize-handles") + .absolute() + .inset_0() + .w_full() + .children(dividers) + .into_any_element() } #[derive(Debug, Copy, Clone, PartialEq)] @@ -233,25 +231,181 @@ impl TableResizeBehavior { } } -pub struct TableColumnWidths { - widths: TableRow, - visible_widths: TableRow, - cached_bounds_width: Pixels, - initialized: bool, +pub enum ColumnWidthConfig { + /// Static column widths (no resize handles). + Static { + widths: StaticColumnWidths, + /// Controls widths of the whole table. + table_width: Option, + }, + /// Redistributable columns — dragging redistributes the fixed available space + /// among columns without changing the overall table width. + Redistributable { + columns_state: Entity, + table_width: Option, + }, +} + +pub enum StaticColumnWidths { + /// All columns share space equally (flex-1 / Length::Auto). + Auto, + /// Each column has a specific width. + Explicit(TableRow), } -impl TableColumnWidths { - pub fn new(cols: usize, _: &mut App) -> Self { +impl ColumnWidthConfig { + /// Auto-width columns, auto-size table. + pub fn auto() -> Self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + table_width: None, + } + } + + /// Redistributable columns with no fixed table width. + pub fn redistributable(columns_state: Entity) -> Self { + ColumnWidthConfig::Redistributable { + columns_state, + table_width: None, + } + } + + /// Auto-width columns, fixed table width. + pub fn auto_with_table_width(width: impl Into) -> Self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + table_width: Some(width.into()), + } + } + + /// Column widths for rendering. + pub fn widths_to_render(&self, cx: &App) -> Option> { + match self { + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Auto, + .. + } => None, + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Explicit(widths), + .. + } => Some(widths.map_cloned(Length::Definite)), + ColumnWidthConfig::Redistributable { + columns_state: entity, + .. + } => { + let state = entity.read(cx); + Some(state.preview_widths.map_cloned(Length::Definite)) + } + } + } + + /// Table-level width. + pub fn table_width(&self) -> Option { + match self { + ColumnWidthConfig::Static { table_width, .. } + | ColumnWidthConfig::Redistributable { table_width, .. } => { + table_width.map(Length::Definite) + } + } + } + + /// ListHorizontalSizingBehavior for uniform_list. + pub fn list_horizontal_sizing(&self) -> ListHorizontalSizingBehavior { + match self.table_width() { + Some(_) => ListHorizontalSizingBehavior::Unconstrained, + None => ListHorizontalSizingBehavior::FitList, + } + } + + /// Render resize handles overlay if applicable. + pub fn render_resize_handles(&self, window: &mut Window, cx: &mut App) -> Option { + match self { + ColumnWidthConfig::Redistributable { + columns_state: entity, + .. + } => { + let (column_widths, resize_behavior, initial_widths) = { + let state = entity.read(cx); + ( + state.preview_widths.map_cloned(Length::Definite), + state.resize_behavior.clone(), + state.initial_widths.clone(), + ) + }; + Some(render_resize_handles( + &column_widths, + &resize_behavior, + &initial_widths, + Some(entity.clone()), + window, + cx, + )) + } + _ => None, + } + } + + /// Returns info needed for header double-click-to-reset, if applicable. + pub fn header_resize_info(&self, cx: &App) -> Option { + match self { + ColumnWidthConfig::Redistributable { columns_state, .. } => { + let state = columns_state.read(cx); + Some(HeaderResizeInfo { + columns_state: columns_state.downgrade(), + resize_behavior: state.resize_behavior.clone(), + initial_widths: state.initial_widths.clone(), + }) + } + _ => None, + } + } +} + +#[derive(Clone)] +pub struct HeaderResizeInfo { + pub columns_state: WeakEntity, + pub resize_behavior: TableRow, + pub initial_widths: TableRow, +} + +pub struct RedistributableColumnsState { + pub(crate) initial_widths: TableRow, + pub(crate) committed_widths: TableRow, + pub(crate) preview_widths: TableRow, + pub(crate) resize_behavior: TableRow, + pub(crate) cached_table_width: Pixels, +} + +impl RedistributableColumnsState { + pub fn new( + cols: usize, + initial_widths: UncheckedTableRow>, + resize_behavior: UncheckedTableRow, + ) -> Self { + let widths: TableRow = initial_widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols); Self { - widths: vec![DefiniteLength::default(); cols].into_table_row(cols), - visible_widths: vec![DefiniteLength::default(); cols].into_table_row(cols), - cached_bounds_width: Default::default(), - initialized: false, + initial_widths: widths.clone(), + committed_widths: widths.clone(), + preview_widths: widths, + resize_behavior: resize_behavior.into_table_row(cols), + cached_table_width: Default::default(), } } pub fn cols(&self) -> usize { - self.widths.cols() + self.committed_widths.cols() + } + + pub fn initial_widths(&self) -> &TableRow { + &self.initial_widths + } + + pub fn resize_behavior(&self) -> &TableRow { + &self.resize_behavior } fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { @@ -264,19 +418,19 @@ impl TableColumnWidths { } } - fn on_double_click( + pub(crate) fn on_double_click( &mut self, double_click_position: usize, initial_sizes: &TableRow, resize_behavior: &TableRow, window: &mut Window, ) { - let bounds_width = self.cached_bounds_width; + let bounds_width = self.cached_table_width; let rem_size = window.rem_size(); let initial_sizes = initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); let widths = self - .widths + .committed_widths .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); let updated_widths = Self::reset_to_initial_size( @@ -285,53 +439,16 @@ impl TableColumnWidths { initial_sizes, resize_behavior, ); - self.widths = updated_widths.map(DefiniteLength::Fraction); - self.visible_widths = self.widths.clone(); // previously was copy + self.committed_widths = updated_widths.map(DefiniteLength::Fraction); + self.preview_widths = self.committed_widths.clone(); } - fn reset_to_initial_size( + pub(crate) fn reset_to_initial_size( col_idx: usize, mut widths: TableRow, initial_sizes: TableRow, resize_behavior: &TableRow, ) -> TableRow { - // RESET: - // Part 1: - // Figure out if we should shrink/grow the selected column - // Get diff which represents the change in column we want to make initial size delta curr_size = diff - // - // Part 2: We need to decide which side column we should move and where - // - // If we want to grow our column we should check the left/right columns diff to see what side - // has a greater delta than their initial size. Likewise, if we shrink our column we should check - // the left/right column diffs to see what side has the smallest delta. - // - // Part 3: resize - // - // col_idx represents the column handle to the right of an active column - // - // If growing and right has the greater delta { - // shift col_idx to the right - // } else if growing and left has the greater delta { - // shift col_idx - 1 to the left - // } else if shrinking and the right has the greater delta { - // shift - // } { - // - // } - // } - // - // if we need to shrink, then if the right - // - - // DRAGGING - // we get diff which represents the change in the _drag handle_ position - // -diff => dragging left -> - // grow the column to the right of the handle as much as we can shrink columns to the left of the handle - // +diff => dragging right -> growing handles column - // grow the column to the left of the handle as much as we can shrink columns to the right of the handle - // - let diff = initial_sizes[col_idx] - widths[col_idx]; let left_diff = @@ -376,10 +493,9 @@ impl TableColumnWidths { widths } - fn on_drag_move( + pub(crate) fn on_drag_move( &mut self, drag_event: &DragMoveEvent, - resize_behavior: &TableRow, window: &mut Window, cx: &mut Context, ) { @@ -391,43 +507,42 @@ impl TableColumnWidths { let bounds_width = bounds.right() - bounds.left(); let col_idx = drag_event.drag(cx).0; - let column_handle_width = Self::get_fraction( - &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_COLUMN_WIDTH))), + let divider_width = Self::get_fraction( + &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), bounds_width, rem_size, ); let mut widths = self - .widths + .committed_widths .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); for length in widths[0..=col_idx].iter() { - col_position += length + column_handle_width; + col_position += length + divider_width; } let mut total_length_ratio = col_position; for length in widths[col_idx + 1..].iter() { total_length_ratio += length; } - let cols = resize_behavior.cols(); - total_length_ratio += (cols - 1 - col_idx) as f32 * column_handle_width; + let cols = self.resize_behavior.cols(); + total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; let drag_fraction = drag_fraction * total_length_ratio; - let diff = drag_fraction - col_position - column_handle_width / 2.0; + let diff = drag_fraction - col_position - divider_width / 2.0; - Self::drag_column_handle(diff, col_idx, &mut widths, resize_behavior); + Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); - self.visible_widths = widths.map(DefiniteLength::Fraction); + self.preview_widths = widths.map(DefiniteLength::Fraction); } - fn drag_column_handle( + pub(crate) fn drag_column_handle( diff: f32, col_idx: usize, widths: &mut TableRow, resize_behavior: &TableRow, ) { - // if diff > 0.0 then go right if diff > 0.0 { Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); } else { @@ -435,7 +550,7 @@ impl TableColumnWidths { } } - fn propagate_resize_diff( + pub(crate) fn propagate_resize_diff( diff: f32, col_idx: usize, widths: &mut TableRow, @@ -493,44 +608,16 @@ impl TableColumnWidths { } } -pub struct TableWidths { - initial: TableRow, - current: Option>, - resizable: TableRow, -} - -impl TableWidths { - pub fn new(widths: TableRow>) -> Self { - let widths = widths.map(Into::into); - - let expected_length = widths.cols(); - TableWidths { - initial: widths, - current: None, - resizable: vec![TableResizeBehavior::None; expected_length] - .into_table_row(expected_length), - } - } - - fn lengths(&self, cx: &App) -> TableRow { - self.current - .as_ref() - .map(|entity| entity.read(cx).visible_widths.map_cloned(Length::Definite)) - .unwrap_or_else(|| self.initial.map_cloned(Length::Definite)) - } -} - /// A table component #[derive(RegisterComponent, IntoElement)] pub struct Table { striped: bool, show_row_borders: bool, show_row_hover: bool, - width: Option, headers: Option>, rows: TableContents, interaction_state: Option>, - col_widths: Option, + column_width_config: ColumnWidthConfig, map_row: Option), &mut Window, &mut App) -> AnyElement>>, use_ui_font: bool, empty_table_callback: Option AnyElement>>, @@ -547,15 +634,14 @@ impl Table { striped: false, show_row_borders: true, show_row_hover: true, - width: None, headers: None, rows: TableContents::Vec(Vec::new()), interaction_state: None, map_row: None, use_ui_font: true, empty_table_callback: None, - col_widths: None, disable_base_cell_style: false, + column_width_config: ColumnWidthConfig::auto(), } } @@ -626,10 +712,18 @@ impl Table { self } - /// Sets the width of the table. - /// Will enable horizontal scrolling if [`Self::interactable`] is also called. - pub fn width(mut self, width: impl Into) -> Self { - self.width = Some(width.into()); + /// Sets a fixed table width with auto column widths. + /// + /// This is a shorthand for `.width_config(ColumnWidthConfig::auto_with_table_width(width))`. + /// For resizable columns or explicit column widths, use [`Table::width_config`] directly. + pub fn width(mut self, width: impl Into) -> Self { + self.column_width_config = ColumnWidthConfig::auto_with_table_width(width); + self + } + + /// Sets the column width configuration for the table. + pub fn width_config(mut self, config: ColumnWidthConfig) -> Self { + self.column_width_config = config; self } @@ -637,10 +731,8 @@ impl Table { /// /// Vertical scrolling will be enabled by default if the table is taller than its container. /// - /// Horizontal scrolling will only be enabled if [`Self::width`] is also called, otherwise - /// the list will always shrink the table columns to fit their contents I.e. If [`Self::uniform_list`] - /// is used without a width and with [`Self::interactable`], the [`ListHorizontalSizingBehavior`] will - /// be set to [`ListHorizontalSizingBehavior::FitList`]. + /// Horizontal scrolling will only be enabled if a table width is set via [`ColumnWidthConfig`], + /// otherwise the list will always shrink the table columns to fit their contents. pub fn interactable(mut self, interaction_state: &Entity) -> Self { self.interaction_state = Some(interaction_state.downgrade()); self @@ -666,36 +758,6 @@ impl Table { self } - pub fn column_widths(mut self, widths: UncheckedTableRow>) -> Self { - if self.col_widths.is_none() { - self.col_widths = Some(TableWidths::new(widths.into_table_row(self.cols))); - } - self - } - - pub fn resizable_columns( - mut self, - resizable: UncheckedTableRow, - column_widths: &Entity, - cx: &mut App, - ) -> Self { - if let Some(table_widths) = self.col_widths.as_mut() { - table_widths.resizable = resizable.into_table_row(self.cols); - let column_widths = table_widths - .current - .get_or_insert_with(|| column_widths.clone()); - - column_widths.update(cx, |widths, _| { - if !widths.initialized { - widths.initialized = true; - widths.widths = table_widths.initial.clone(); - widths.visible_widths = widths.widths.clone(); - } - }) - } - self - } - pub fn no_ui_font(mut self) -> Self { self.use_ui_font = false; self @@ -812,11 +874,7 @@ pub fn render_table_row( pub fn render_table_header( headers: TableRow, table_context: TableRenderContext, - columns_widths: Option<( - WeakEntity, - TableRow, - TableRow, - )>, + resize_info: Option, entity_id: Option, cx: &mut App, ) -> impl IntoElement { @@ -837,9 +895,7 @@ pub fn render_table_header( .flex() .flex_row() .items_center() - .justify_between() .w_full() - .p_2() .border_b_1() .border_color(cx.theme().colors().border) .children( @@ -850,34 +906,33 @@ pub fn render_table_header( .zip(column_widths.into_vec()) .map(|((header_idx, h), width)| { base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() .child(h) .id(ElementId::NamedInteger( shared_element_id.clone(), header_idx as u64, )) - .when_some( - columns_widths.as_ref().cloned(), - |this, (column_widths, resizables, initial_sizes)| { - if resizables[header_idx].is_resizable() { - this.on_click(move |event, window, cx| { - if event.click_count() > 1 { - column_widths - .update(cx, |column, _| { - column.on_double_click( - header_idx, - &initial_sizes, - &resizables, - window, - ); - }) - .ok(); - } - }) - } else { - this - } - }, - ) + .when_some(resize_info.as_ref().cloned(), |this, info| { + if info.resize_behavior[header_idx].is_resizable() { + this.on_click(move |event, window, cx| { + if event.click_count() > 1 { + info.columns_state + .update(cx, |column, _| { + column.on_double_click( + header_idx, + &info.initial_widths, + &info.resize_behavior, + window, + ); + }) + .ok(); + } + }) + } else { + this + } + }) }), ) } @@ -901,7 +956,7 @@ impl TableRenderContext { show_row_borders: table.show_row_borders, show_row_hover: table.show_row_hover, total_row_count: table.rows.len(), - column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)), + column_widths: table.column_width_config.widths_to_render(cx), map_row: table.map_row.clone(), use_ui_font: table.use_ui_font, disable_base_cell_style: table.disable_base_cell_style, @@ -913,48 +968,52 @@ impl RenderOnce for Table { fn render(mut self, window: &mut Window, cx: &mut App) -> impl IntoElement { let table_context = TableRenderContext::new(&self, cx); let interaction_state = self.interaction_state.and_then(|state| state.upgrade()); - let current_widths = self - .col_widths - .as_ref() - .and_then(|widths| Some((widths.current.as_ref()?, widths.resizable.clone()))) - .map(|(curr, resize_behavior)| (curr.downgrade(), resize_behavior)); - let current_widths_with_initial_sizes = self - .col_widths + let header_resize_info = interaction_state .as_ref() - .and_then(|widths| { - Some(( - widths.current.as_ref()?, - widths.resizable.clone(), - widths.initial.clone(), - )) - }) - .map(|(curr, resize_behavior, initial)| (curr.downgrade(), resize_behavior, initial)); + .and_then(|_| self.column_width_config.header_resize_info(cx)); - let width = self.width; + let table_width = self.column_width_config.table_width(); + let horizontal_sizing = self.column_width_config.list_horizontal_sizing(); let no_rows_rendered = self.rows.is_empty(); + // Extract redistributable entity for drag/drop/prepaint handlers + let redistributable_entity = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { + columns_state: entity, + .. + } => Some(entity.downgrade()), + _ => None, + }); + + let resize_handles = interaction_state + .as_ref() + .and_then(|_| self.column_width_config.render_resize_handles(window, cx)); + let table = div() - .when_some(width, |this, width| this.w(width)) + .when_some(table_width, |this, width| this.w(width)) .h_full() .v_flex() .when_some(self.headers.take(), |this, headers| { this.child(render_table_header( headers, table_context.clone(), - current_widths_with_initial_sizes, + header_resize_info, interaction_state.as_ref().map(Entity::entity_id), cx, )) }) - .when_some(current_widths, { - |this, (widths, resize_behavior)| { + .when_some(redistributable_entity, { + |this, widths| { this.on_drag_move::({ let widths = widths.clone(); move |e, window, cx| { widths .update(cx, |widths, cx| { - widths.on_drag_move(e, &resize_behavior, window, cx); + widths.on_drag_move(e, window, cx); }) .ok(); } @@ -965,7 +1024,7 @@ impl RenderOnce for Table { widths .update(cx, |widths, _| { // This works because all children x axis bounds are the same - widths.cached_bounds_width = + widths.cached_table_width = bounds[0].right() - bounds[0].left(); }) .ok(); @@ -974,10 +1033,9 @@ impl RenderOnce for Table { .on_drop::(move |_, _, cx| { widths .update(cx, |widths, _| { - widths.widths = widths.visible_widths.clone(); + widths.committed_widths = widths.preview_widths.clone(); }) .ok(); - // Finish the resize operation }) } }) @@ -1029,11 +1087,7 @@ impl RenderOnce for Table { .size_full() .flex_grow() .with_sizing_behavior(ListSizingBehavior::Auto) - .with_horizontal_sizing_behavior(if width.is_some() { - ListHorizontalSizingBehavior::Unconstrained - } else { - ListHorizontalSizingBehavior::FitList - }) + .with_horizontal_sizing_behavior(horizontal_sizing) .when_some( interaction_state.as_ref(), |this, state| { @@ -1063,25 +1117,7 @@ impl RenderOnce for Table { .with_sizing_behavior(ListSizingBehavior::Auto), ), }) - .when_some( - self.col_widths.as_ref().zip(interaction_state.as_ref()), - |parent, (table_widths, state)| { - parent.child(state.update(cx, |state, cx| { - let resizable_columns = &table_widths.resizable; - let column_widths = table_widths.lengths(cx); - let columns = table_widths.current.clone(); - let initial_sizes = &table_widths.initial; - state.render_resize_handles( - &column_widths, - resizable_columns, - initial_sizes, - columns, - window, - cx, - ) - })) - }, - ); + .when_some(resize_handles, |parent, handles| parent.child(handles)); if let Some(state) = interaction_state.as_ref() { let scrollbars = state diff --git a/crates/ui/src/components/data_table/tests.rs b/crates/ui/src/components/data_table/tests.rs index f0982a8aa5abe5f5a9351ebaaaf4072ca17839e6..0936cd3088cc50bc08bf0a0a09d9a6fa7a2cdaf0 100644 --- a/crates/ui/src/components/data_table/tests.rs +++ b/crates/ui/src/components/data_table/tests.rs @@ -82,7 +82,7 @@ mod reset_column_size { let cols = initial_sizes.len(); let resize_behavior_vec = parse_resize_behavior(resize_behavior, total_1, cols); let resize_behavior = TableRow::from_vec(resize_behavior_vec, cols); - let result = TableColumnWidths::reset_to_initial_size( + let result = RedistributableColumnsState::reset_to_initial_size( column_index, TableRow::from_vec(widths, cols), TableRow::from_vec(initial_sizes, cols), @@ -259,7 +259,7 @@ mod drag_handle { let distance = distance as f32 / total_1; let mut widths_table_row = TableRow::from_vec(widths, cols); - TableColumnWidths::drag_column_handle( + RedistributableColumnsState::drag_column_handle( distance, column_index, &mut widths_table_row, diff --git a/crates/util/src/command.rs b/crates/util/src/command.rs index 44db592640bc70362b924ffca674fd02a4126f3a..a131d3c15b9fed351cc1d3a86bad7771b7d53167 100644 --- a/crates/util/src/command.rs +++ b/crates/util/src/command.rs @@ -68,6 +68,10 @@ impl Command { self } + pub fn get_args(&self) -> impl Iterator { + self.0.get_args() + } + pub fn env(&mut self, key: impl AsRef, val: impl AsRef) -> &mut Self { self.0.env(key, val); self @@ -129,4 +133,8 @@ impl Command { pub async fn status(&mut self) -> std::io::Result { self.0.status().await } + + pub fn get_program(&self) -> &OsStr { + self.0.get_program() + } } diff --git a/crates/util/src/command/darwin.rs b/crates/util/src/command/darwin.rs index 347fc8180ed9325d4f36a3fcce2f3c68964321d5..a3d7561f4e3cfde1f6ff33cdc469af071044fa0b 100644 --- a/crates/util/src/command/darwin.rs +++ b/crates/util/src/command/darwin.rs @@ -104,6 +104,10 @@ impl Command { self } + pub fn get_args(&self) -> impl Iterator { + self.args.iter().map(|s| s.as_os_str()) + } + pub fn env(&mut self, key: impl AsRef, val: impl AsRef) -> &mut Self { self.envs .insert(key.as_ref().to_owned(), Some(val.as_ref().to_owned())); @@ -217,6 +221,10 @@ impl Command { let mut child = self.spawn()?; child.status().await } + + pub fn get_program(&self) -> &OsStr { + self.program.as_os_str() + } } #[derive(Debug)] diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 362fed2df3543c5571f83db2c964a8c17fcebcb3..fd19a5dc400a24b9f27617c44bd71fe38073c757 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1348,7 +1348,7 @@ impl Position { let snapshot = editor.snapshot(window, cx); let target = match self { Position::Line { row, offset } => { - if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(anchor) = editor.active_buffer(cx).and_then(|buffer| { editor.buffer().read(cx).buffer_point_to_anchor( &buffer, Point::new(row.saturating_sub(1), 0), @@ -2336,7 +2336,7 @@ impl Vim { match c { '%' => { self.update_editor(cx, |_, editor, cx| { - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(file) = buffer.read(cx).file() && let Some(local) = file.as_local() { diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index c1e766c03a897facb3c7acf76b3ef7811e6910a8..d2c8f4b78dcde8c4f2135b63ee3d07f04e01ebd5 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -648,6 +648,7 @@ impl Vim { self.search = SearchState { direction: searchable::Direction::Next, count: 1, + cmd_f_search: false, prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode: self.mode, diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 579af3d314ef114381de892b147d8d0a540656fb..6bf2afd09ae07ff8453a481a8d6e6e6a254e670f 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1,5 +1,6 @@ use editor::{ Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset, + ToPoint as _, display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint}, movement::{ self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point, @@ -11,6 +12,7 @@ use multi_buffer::MultiBufferRow; use schemars::JsonSchema; use serde::Deserialize; use std::{f64, ops::Range}; + use workspace::searchable::Direction; use crate::{ @@ -2340,39 +2342,19 @@ fn start_of_next_sentence( fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(mut excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let snapshot = map.buffer_snapshot(); + let Some((buffer_snapshot, _)) = snapshot.point_to_buffer_point(point) else { + return display_point; + }; + + let Some(anchor) = snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after( + buffer_snapshot.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), + )) else { return display_point; }; - let offset = excerpt.buffer().point_to_offset( - excerpt - .buffer() - .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), - ); - let buffer_range = excerpt.buffer_range(); - if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 { - let point = map - .buffer_snapshot() - .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset))); - return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left); - } - for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() { - let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer) - ..language::ToOffset::to_offset(&range.context.end, buffer); - if offset >= excerpt_range.start && offset <= excerpt_range.end { - let text_anchor = buffer.anchor_after(offset); - let anchor = Anchor::in_buffer(excerpt, text_anchor); - return anchor.to_display_point(map); - } else if offset <= excerpt_range.start { - let anchor = Anchor::in_buffer(excerpt, range.context.start); - return anchor.to_display_point(map); - } - } map.clip_point( - map.point_to_display_point( - map.buffer_snapshot().clip_point(point, Bias::Left), - Bias::Left, - ), + map.point_to_display_point(anchor.to_point(snapshot), Bias::Left), Bias::Left, ) } diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 118805586118e36269a1f0c1d1d619058133da30..b54a0262744afddbefbd3d4ce5a737dfe3ee7502 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -932,7 +932,7 @@ impl Vim { Vim::take_forced_motion(cx); self.update_editor(cx, |vim, editor, cx| { let selection = editor.selections.newest_anchor(); - let Some((buffer, point, _)) = editor + let Some((buffer, point)) = editor .buffer() .read(cx) .point_to_buffer_point(selection.head(), cx) diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 248f43c08192182cb266dbfc43a5a769f87429cd..6a8394f44710b7e241b7ba38f4913899a5afbce6 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -284,6 +284,7 @@ impl Vim { self.search = SearchState { direction, count, + cmd_f_search: false, prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode, @@ -298,6 +299,7 @@ impl Vim { let current_mode = self.mode; self.search = Default::default(); self.search.prior_mode = current_mode; + self.search.cmd_f_search = true; cx.propagate(); } @@ -957,6 +959,45 @@ mod test { cx.assert_editor_state("«oneˇ» one one one"); } + #[gpui::test] + async fn test_non_vim_search_in_vim_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.cx.set_state("ˇone one one one"); + cx.run_until_parked(); + cx.simulate_keystrokes("cmd-f"); + cx.run_until_parked(); + + cx.assert_state("«oneˇ» one one one", Mode::Visual); + cx.simulate_keystrokes("enter"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Visual); + cx.simulate_keystrokes("shift-enter"); + cx.run_until_parked(); + cx.assert_state("«oneˇ» one one one", Mode::Visual); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + cx.assert_state("«oneˇ» one one one", Mode::Visual); + } + + #[gpui::test] + async fn test_non_vim_search_in_vim_insert_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.set_state("ˇone one one one", Mode::Insert); + cx.run_until_parked(); + cx.simulate_keystrokes("cmd-f"); + cx.run_until_parked(); + + cx.assert_state("«oneˇ» one one one", Mode::Insert); + cx.simulate_keystrokes("enter"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Insert); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + cx.assert_state("one «oneˇ» one one", Mode::Insert); + } + #[gpui::test] async fn test_visual_star_hash(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 1c96ba74b455c5d94e53a0ab9c78cd3ae8af5b3c..67b4b16b178e75316eb10b051ab9153737777e3f 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -203,33 +203,24 @@ fn find_mini_delimiters( is_valid_delimiter: &DelimiterPredicate, ) -> Option> { let point = map.clip_at_line_end(display_point).to_point(map); - let offset = point.to_offset(&map.buffer_snapshot()); + let offset = map.buffer_snapshot().point_to_offset(point); let line_range = get_line_range(map, point); let visible_line_range = get_visible_line_range(&line_range); let snapshot = &map.buffer_snapshot(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let buffer_offset = excerpt.map_offset_to_buffer(offset); - let bracket_filter = |open: Range, close: Range| { - is_valid_delimiter(buffer, open.start, close.start) - }; - - // Try to find delimiters in visible range first let ranges = map .buffer_snapshot() .bracket_ranges(visible_line_range) .map(|ranges| { ranges.filter_map(|(open, close)| { - // Convert the ranges from multibuffer space to buffer space as - // that is what `is_valid_delimiter` expects, otherwise it might - // panic as the values might be out of bounds. - let buffer_open = excerpt.map_range_to_buffer(open.clone()); - let buffer_close = excerpt.map_range_to_buffer(close.clone()); + let (buffer, buffer_open) = + snapshot.range_to_buffer_range::(open.clone())?; + let (_, buffer_close) = + snapshot.range_to_buffer_range::(close.clone())?; - if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) { + if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) { Some((open, close)) } else { None @@ -247,18 +238,31 @@ fn find_mini_delimiters( ); } - // Fall back to innermost enclosing brackets - let (open_bracket, close_bracket) = buffer - .innermost_enclosing_bracket_ranges(buffer_offset..buffer_offset, Some(&bracket_filter))?; + let results = snapshot.map_excerpt_ranges(offset..offset, |buffer, _, input_range| { + let buffer_offset = input_range.start.0; + let bracket_filter = |open: Range, close: Range| { + is_valid_delimiter(buffer, open.start, close.start) + }; + let Some((open, close)) = buffer.innermost_enclosing_bracket_ranges( + buffer_offset..buffer_offset, + Some(&bracket_filter), + ) else { + return vec![]; + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + + if results.len() < 2 { + return None; + } Some( DelimiterRange { - open: excerpt.map_range_from_buffer( - BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end), - ), - close: excerpt.map_range_from_buffer( - BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end), - ), + open: results[0].0.clone(), + close: results[1].0.clone(), } .to_display_range(map, around), ) @@ -935,61 +939,64 @@ pub fn surrounding_html_tag( } let snapshot = &map.buffer_snapshot(); - let offset = head.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - // Find the most closest to current offset - let mut cursor = buffer.syntax_layer_at(offset)?.node().walk(); - let mut last_child_node = cursor.node(); - while cursor.goto_first_child_for_byte(offset.0).is_some() { - last_child_node = cursor.node(); - } - - let mut last_child_node = Some(last_child_node); - while let Some(cur_node) = last_child_node { - if cur_node.child_count() >= 2 { - let first_child = cur_node.child(0); - let last_child = cur_node.child(cur_node.child_count() as u32 - 1); - if let (Some(first_child), Some(last_child)) = (first_child, last_child) { - let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); - let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); - // It needs to be handled differently according to the selection length - let is_valid = if range.end.to_offset(map, Bias::Left) - - range.start.to_offset(map, Bias::Left) - <= 1 - { - offset.0 <= last_child.end_byte() - } else { - excerpt - .map_offset_to_buffer(range.start.to_offset(map, Bias::Left)) - .0 - >= first_child.start_byte() - && excerpt - .map_offset_to_buffer(range.end.to_offset(map, Bias::Left)) - .0 - <= last_child.start_byte() + 1 - }; - if open_tag.is_some() && open_tag == close_tag && is_valid { - let range = if around { - first_child.byte_range().start..last_child.byte_range().end - } else { - first_child.byte_range().end..last_child.byte_range().start - }; - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - let result = excerpt.map_range_from_buffer(range); - return Some( - result.start.to_display_point(map)..result.end.to_display_point(map), - ); + let head_offset = head.to_offset(map, Bias::Left); + let range_start = range.start.to_offset(map, Bias::Left); + let range_end = range.end.to_offset(map, Bias::Left); + let head_is_start = head_offset <= range_start; + + let results = snapshot.map_excerpt_ranges( + range_start..range_end, + |buffer, _excerpt_range, input_buffer_range| { + let buffer_offset = if head_is_start { + input_buffer_range.start + } else { + input_buffer_range.end + }; + + let Some(layer) = buffer.syntax_layer_at(buffer_offset) else { + return Vec::new(); + }; + let mut cursor = layer.node().walk(); + let mut last_child_node = cursor.node(); + while cursor.goto_first_child_for_byte(buffer_offset.0).is_some() { + last_child_node = cursor.node(); + } + + let mut last_child_node = Some(last_child_node); + while let Some(cur_node) = last_child_node { + if cur_node.child_count() >= 2 { + let first_child = cur_node.child(0); + let last_child = cur_node.child(cur_node.child_count() as u32 - 1); + if let (Some(first_child), Some(last_child)) = (first_child, last_child) { + let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); + let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); + let is_valid = if range_end.saturating_sub(range_start) <= 1 { + buffer_offset.0 <= last_child.end_byte() + } else { + input_buffer_range.start.0 >= first_child.start_byte() + && input_buffer_range.end.0 <= last_child.start_byte() + 1 + }; + if open_tag.is_some() && open_tag == close_tag && is_valid { + let buffer_range = if around { + first_child.byte_range().start..last_child.byte_range().end + } else { + first_child.byte_range().end..last_child.byte_range().start + }; + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } } } + last_child_node = cur_node.parent(); } - } - last_child_node = cur_node.parent(); - } - None + Vec::new() + }, + )?; + + let (result, ()) = results.into_iter().next()?; + Some(result.start.to_display_point(map)..result.end.to_display_point(map)) } /// Returns a range that surrounds the word and following whitespace @@ -1163,44 +1170,55 @@ fn text_object( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - if let Some(buffer_range) = matches.first() { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - - let around = target.around()?; - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == around { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - let around_range = matches.first()?; - - let mut matches: Vec> = buffer - .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.start); - if let Some(buffer_range) = matches.first() - && !buffer_range.is_empty() - { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end); - let buffer_range = excerpt.map_range_from_buffer(around_range); - return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map)); + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + if let Some(buffer_range) = matches.first() { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + + let Some(around) = target.around() else { + return vec![]; + }; + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == around { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + let Some(around_range) = matches.first() else { + return vec![]; + }; + + let mut matches: Vec> = buffer + .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.start); + if let Some(buffer_range) = matches.first() + && !buffer_range.is_empty() + { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + vec![( + BufferOffset(around_range.start)..BufferOffset(around_range.end), + (), + )] + })?; + + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn argument( @@ -1211,16 +1229,11 @@ fn argument( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - fn comma_delimited_range_at( buffer: &BufferSnapshot, mut offset: BufferOffset, include_comma: bool, ) -> Option> { - // Seek to the first non-whitespace character offset += buffer .chars_at(offset) .take_while(|c| c.is_whitespace()) @@ -1228,25 +1241,20 @@ fn argument( .sum::(); let bracket_filter = |open: Range, close: Range| { - // Filter out empty ranges if open.end == close.start { return false; } - // If the cursor is outside the brackets, ignore them if open.start == offset.0 || close.end == offset.0 { return false; } - // TODO: Is there any better way to filter out string brackets? - // Used to filter out string brackets matches!( buffer.chars_at(open.start).next(), Some('(' | '[' | '{' | '<' | '|') ) }; - // Find the brackets containing the cursor let (open_bracket, close_bracket) = buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?; @@ -1256,7 +1264,6 @@ fn argument( let node = layer.node(); let mut cursor = node.walk(); - // Loop until we find the smallest node whose parent covers the bracket range. This node is the argument in the parent argument list let mut parent_covers_bracket_range = false; loop { let node = cursor.node(); @@ -1268,20 +1275,17 @@ fn argument( } parent_covers_bracket_range = covers_bracket_range; - // Unable to find a child node with a parent that covers the bracket range, so no argument to select cursor.goto_first_child_for_byte(offset.0)?; } let mut argument_node = cursor.node(); - // If the child node is the open bracket, move to the next sibling. if argument_node.byte_range() == open_bracket { if !cursor.goto_next_sibling() { return Some(inner_bracket_range); } argument_node = cursor.node(); } - // While the child node is the close bracket or a comma, move to the previous sibling while argument_node.byte_range() == close_bracket || argument_node.kind() == "," { if !cursor.goto_previous_sibling() { return Some(inner_bracket_range); @@ -1292,14 +1296,11 @@ fn argument( } } - // The start and end of the argument range, defaulting to the start and end of the argument node let mut start = argument_node.start_byte(); let mut end = argument_node.end_byte(); let mut needs_surrounding_comma = include_comma; - // Seek backwards to find the start of the argument - either the previous comma or the opening bracket. - // We do this because multiple nodes can represent a single argument, such as with rust `vec![a.b.c, d.e.f]` while cursor.goto_previous_sibling() { let prev = cursor.node(); @@ -1317,7 +1318,6 @@ fn argument( } } - // Do the same for the end of the argument, extending to next comma or the end of the argument list while cursor.goto_next_sibling() { let next = cursor.node(); @@ -1326,7 +1326,6 @@ fn argument( break; } else if next.kind() == "," { if needs_surrounding_comma { - // Select up to the beginning of the next argument if there is one, otherwise to the end of the comma if let Some(next_arg) = next.next_sibling() { end = next_arg.start_byte(); } else { @@ -1342,14 +1341,17 @@ fn argument( Some(BufferOffset(start)..BufferOffset(end)) } - let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?; + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + match comma_delimited_range_at(buffer, buffer_offset, around) { + Some(result) => vec![(result, ())], + None => vec![], + } + })?; - if excerpt.contains_buffer_range(result.clone()) { - let result = excerpt.map_range_from_buffer(result); - Some(result.start.to_display_point(map)..result.end.to_display_point(map)) - } else { - None - } + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn indent( @@ -3369,7 +3371,12 @@ mod test { // but, since this is being set manually, the language isn't // automatically set. let editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); if let Some(buffer) = multi_buffer.read(cx).buffer(buffer_ids[1]) { buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language::rust_lang()), cx); diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 2ae4abe33a0fbb4bc6f8a838e60dc0857949e0dc..4dd557199ab9aebe0a2b26438bdaa0e321a956b2 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -426,7 +426,7 @@ impl MarksState { name.clone(), buffer .read(cx) - .summaries_for_anchors::(anchors) + .summaries_for_anchors::(anchors.iter().copied()) .collect(), ) }) @@ -492,7 +492,14 @@ impl MarksState { { let buffer_marks = old_marks .into_iter() - .map(|(k, v)| (k, v.into_iter().map(|anchor| anchor.text_anchor).collect())) + .map(|(k, v)| { + ( + k, + v.into_iter() + .filter_map(|anchor| anchor.raw_text_anchor()) + .collect(), + ) + }) .collect(); self.buffer_marks .insert(buffer.read(cx).remote_id(), buffer_marks); @@ -569,6 +576,7 @@ impl MarksState { anchors: Vec, cx: &mut Context, ) { + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); let buffer = multibuffer.read(cx).as_singleton(); let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx)); @@ -602,7 +610,7 @@ impl MarksState { name.clone(), anchors .into_iter() - .map(|anchor| anchor.text_anchor) + .filter_map(|anchor| Some(multibuffer_snapshot.anchor_to_buffer_anchor(anchor)?.0)) .collect(), ); if !self.watched_buffers.contains_key(&buffer_id) { @@ -629,12 +637,13 @@ impl MarksState { return Some(Mark::Local(anchors.get(name)?.clone())); } - let (excerpt_id, buffer_id, _) = multi_buffer.read(cx).read(cx).as_singleton()?; - if let Some(anchors) = self.buffer_marks.get(&buffer_id) { + let multibuffer_snapshot = multi_buffer.read(cx).snapshot(cx); + let buffer_snapshot = multibuffer_snapshot.as_singleton()?; + if let Some(anchors) = self.buffer_marks.get(&buffer_snapshot.remote_id()) { let text_anchors = anchors.get(name)?; let anchors = text_anchors .iter() - .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor)) + .filter_map(|anchor| multibuffer_snapshot.anchor_in_excerpt(*anchor)) .collect(); return Some(Mark::Local(anchors)); } @@ -895,14 +904,13 @@ impl VimGlobals { } } '%' => editor.and_then(|editor| { - let selection = editor - .selections - .newest::(&editor.display_snapshot(cx)); - if let Some((_, buffer, _)) = editor - .buffer() - .read(cx) - .excerpt_containing(selection.head(), cx) - { + let multibuffer = editor.buffer().read(cx); + let snapshot = multibuffer.snapshot(cx); + let selection = editor.selections.newest_anchor(); + let buffer = snapshot + .anchor_to_buffer_anchor(selection.head()) + .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id)); + if let Some(buffer) = buffer { buffer .read(cx) .file() @@ -1022,6 +1030,7 @@ impl Clone for ReplayableAction { pub struct SearchState { pub direction: Direction, pub count: usize, + pub cmd_f_search: bool, pub prior_selections: Vec>, pub prior_operator: Option, diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 2d0ec4f69a0aaa93b191933565b9db27d8fb3198..961729e0e24a66a624e30ca7c72bfe5f13e10bca 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -2117,7 +2117,12 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { ); let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); // fold all but the second buffer, so that we test navigating between two // adjacent folded buffers, as well as folded buffers at the start and // end the multibuffer @@ -2262,7 +2267,13 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { " }); cx.update_editor(|editor, _, cx| { - let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids(); + let buffer_ids = editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); editor.fold_buffer(buffer_ids[1], cx); }); diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 05046899b6164f7c5884e3ad64ad69caaeb2015f..6e1849340f17b776a34546dd9a118dc55e8dab84 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -432,8 +432,12 @@ pub fn init(cx: &mut App) { .and_then(|item| item.act_as::(cx)) .and_then(|editor| editor.read(cx).addon::().cloned()); let Some(vim) = vim else { return }; - vim.entity.update(cx, |_, cx| { - cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx)) + vim.entity.update(cx, |vim, cx| { + if !vim.search.cmd_f_search { + cx.defer_in(window, |vim, window, cx| vim.search_submit(window, cx)) + } else { + cx.propagate() + } }) }); workspace.register_action(|_, _: &GoToTab, window, cx| { @@ -2086,7 +2090,7 @@ impl Vim { VimEditorSettingsState { cursor_shape: self.cursor_shape(cx), clip_at_line_ends: self.clip_at_line_ends(), - collapse_matches: !HelixModeSetting::get_global(cx).0, + collapse_matches: !HelixModeSetting::get_global(cx).0 && !self.search.cmd_f_search, input_enabled: self.editor_input_enabled(), expects_character_input: self.expects_character_input(), autoindent: self.should_autoindent(), diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index dbf2accf3dd9910426ca3557daf9cee0e5b0a82b..ce54765e3ff81fde015d465d18b03cea44bbbe8f 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -5,7 +5,7 @@ use gpui::{ DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, PromptLevel, Render, ScrollHandle, Task, TextStyleRefinement, UnderlineStyle, WeakEntity, svg, }; -use markdown::{Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use parking_lot::Mutex; use project::project_settings::ProjectSettings; use settings::Settings; @@ -401,8 +401,7 @@ impl Render for LanguageServerPrompt { MarkdownElement::new(self.markdown.clone(), markdown_style(window, cx)) .text_size(TextSize::Small.rems(cx)) .code_block_renderer(markdown::CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: false, + copy_button_visibility: CopyButtonVisibility::Hidden, border: false, }) .on_url_click(|link, _, cx| cx.open_url(&link)), @@ -1227,10 +1226,8 @@ where let mut display = format!("{err:#}"); if !display.ends_with('\n') { display.push('.'); - display.push(' ') } - let detail = - f(err, window, cx).unwrap_or_else(|| format!("{display}Please try again.")); + let detail = f(err, window, cx).unwrap_or(display); window.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"], cx) }) { prompt.await.ok(); diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 3fa4800afb6088e0d106c8b60a835073978e598c..c5f78eef6c4a7403589cb4e947326f9fe87ec610 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -1,6 +1,7 @@ use crate::{ AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace, WorkspaceSettings, + notifications::DetachAndPromptErr, pane_group::element::pane_axis, workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical}, }; @@ -438,14 +439,19 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> { let app_state = self.app_state.clone(); this.cursor_pointer().on_mouse_down( MouseButton::Left, - move |_, _, cx| { + move |_, window, cx| { crate::join_in_room_project( leader_project_id, leader_user_id, app_state.clone(), cx, ) - .detach_and_log_err(cx); + .detach_and_prompt_err( + "Failed to join project", + window, + cx, + |error, _, _| Some(format!("{error:#}")), + ); }, ) }, diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 334ad0925fd62a2ea529ed0e755d605924be266c..d38602ea768e8edc4f3de1ec439e67f0ee432a63 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -971,6 +971,9 @@ impl Domain for WorkspaceDb { sql!( ALTER TABLE remote_connections ADD COLUMN use_podman BOOLEAN; ), + sql!( + ALTER TABLE remote_connections ADD COLUMN remote_env TEXT; + ), ]; // Allow recovering from bad migration that was initially shipped to nightly @@ -1500,6 +1503,7 @@ impl WorkspaceDb { let mut name = None; let mut container_id = None; let mut use_podman = None; + let mut remote_env = None; match options { RemoteConnectionOptions::Ssh(options) => { kind = RemoteConnectionKind::Ssh; @@ -1518,6 +1522,7 @@ impl WorkspaceDb { name = Some(options.name); use_podman = Some(options.use_podman); user = Some(options.remote_user); + remote_env = serde_json::to_string(&options.remote_env).ok(); } #[cfg(any(test, feature = "test-support"))] RemoteConnectionOptions::Mock(options) => { @@ -1536,6 +1541,7 @@ impl WorkspaceDb { name, container_id, use_podman, + remote_env, ) } @@ -1549,6 +1555,7 @@ impl WorkspaceDb { name: Option, container_id: Option, use_podman: Option, + remote_env: Option, ) -> Result { if let Some(id) = this.select_row_bound(sql!( SELECT id @@ -1582,8 +1589,9 @@ impl WorkspaceDb { distro, name, container_id, - use_podman - ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) + use_podman, + remote_env + ) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9) RETURNING id ))?(( kind.serialize(), @@ -1594,6 +1602,7 @@ impl WorkspaceDb { name, container_id, use_podman, + remote_env, ))? .context("failed to insert remote project")?; Ok(RemoteConnectionId(id)) @@ -1695,13 +1704,13 @@ impl WorkspaceDb { fn remote_connections(&self) -> Result> { Ok(self.select(sql!( SELECT - id, kind, host, port, user, distro, container_id, name, use_podman + id, kind, host, port, user, distro, container_id, name, use_podman, remote_env FROM remote_connections ))?()? .into_iter() .filter_map( - |(id, kind, host, port, user, distro, container_id, name, use_podman)| { + |(id, kind, host, port, user, distro, container_id, name, use_podman, remote_env)| { Some(( RemoteConnectionId(id), Self::remote_connection_from_row( @@ -1713,6 +1722,7 @@ impl WorkspaceDb { container_id, name, use_podman, + remote_env, )?, )) }, @@ -1724,9 +1734,9 @@ impl WorkspaceDb { &self, id: RemoteConnectionId, ) -> Result { - let (kind, host, port, user, distro, container_id, name, use_podman) = + let (kind, host, port, user, distro, container_id, name, use_podman, remote_env) = self.select_row_bound(sql!( - SELECT kind, host, port, user, distro, container_id, name, use_podman + SELECT kind, host, port, user, distro, container_id, name, use_podman, remote_env FROM remote_connections WHERE id = ? ))?(id.0)? @@ -1740,6 +1750,7 @@ impl WorkspaceDb { container_id, name, use_podman, + remote_env, ) .context("invalid remote_connection row") } @@ -1753,6 +1764,7 @@ impl WorkspaceDb { container_id: Option, name: Option, use_podman: Option, + remote_env: Option, ) -> Option { match RemoteConnectionKind::deserialize(&kind)? { RemoteConnectionKind::Wsl => Some(RemoteConnectionOptions::Wsl(WslConnectionOptions { @@ -1766,12 +1778,15 @@ impl WorkspaceDb { ..Default::default() })), RemoteConnectionKind::Docker => { + let remote_env: BTreeMap = + serde_json::from_str(&remote_env?).ok()?; Some(RemoteConnectionOptions::Docker(DockerConnectionOptions { container_id: container_id?, name: name?, remote_user: user?, upload_binary_over_docker_exec: false, use_podman: use_podman?, + remote_env, })) } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ae05c2c59012b2caf217ac54a80b377aee87f09d..aa692ab39a6084126c9b15b07856549364b13842 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5528,7 +5528,9 @@ impl Workspace { if let Some(project_id) = other_project_id { let app_state = self.app_state.clone(); crate::join_in_room_project(project_id, remote_participant.user.id, app_state, cx) - .detach_and_log_err(cx); + .detach_and_prompt_err("Failed to join project", window, cx, |error, _, _| { + Some(format!("{error:#}")) + }); } } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index c24371667e7d3f984f0960f6b3f18d5d0f1e5f4c..9feaa59c9762208e4e4e85748f21a7a3e0afc3db 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.231.0" +version = "0.232.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index e5713e90df397a01af850af55338897f9d437e55..17ce65bea4f3354bec1efd9b14d1b0ae08a6263f 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -109,7 +109,7 @@ use { image::RgbaImage, project::{AgentId, Project}, project_panel::ProjectPanel, - settings::{NotifyWhenAgentWaiting, Settings as _}, + settings::{NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings as _}, settings_ui::SettingsWindow, std::{ any::Any, @@ -231,7 +231,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> agent_settings::AgentSettings::override_global( agent_settings::AgentSettings { notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::Never, ..agent_settings::AgentSettings::get_global(cx).clone() }, cx, diff --git a/crates/zed/src/zed/telemetry_log.rs b/crates/zed/src/zed/telemetry_log.rs index cc07783f57b27cc57a281089effb208fc3947050..7df7e83d25804edb1a7a73abf055d9adaf080a90 100644 --- a/crates/zed/src/zed/telemetry_log.rs +++ b/crates/zed/src/zed/telemetry_log.rs @@ -12,7 +12,7 @@ use gpui::{ StyleRefinement, Task, TextStyleRefinement, Window, list, prelude::*, }; use language::LanguageRegistry; -use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; +use markdown::{CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownStyle}; use project::Project; use settings::Settings; use telemetry_events::{Event, EventWrapper}; @@ -424,8 +424,11 @@ impl TelemetryLogView { }, ) .code_block_renderer(CodeBlockRenderer::Default { - copy_button: false, - copy_button_on_hover: expanded, + copy_button_visibility: if expanded { + CopyButtonVisibility::VisibleOnHover + } else { + CopyButtonVisibility::Hidden + }, border: false, }), ), diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index e1de9fba5e79d56ef73236b2e07c70c93819a2c7..28ee927e4ab4110e6e46a4a8d551093243d72a09 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -292,13 +292,16 @@ The default value is `false`. ### Sound Notification -Control whether to hear a notification sound when the agent is done generating changes or needs your input. -The default value is `false`. +Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `never`. + +- `"never"` (default) — Never play the sound. +- `"when_hidden"` — Only play the sound when the agent panel is not visible. +- `"always"` — Always play the sound on completion. ```json [settings] { "agent": { - "play_sound_when_agent_done": true + "play_sound_when_agent_done": "never" } } ``` diff --git a/flake.lock b/flake.lock index 4228411894ebc0472e1a2c7fbc0656eb73c5dfe4..c32629aedd533082e43ea3667f1b9cdc6dccfd1b 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1774313767, - "narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=", + "lastModified": 1769737823, + "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=", "owner": "ipetkov", "repo": "crane", - "rev": "3d9df76e29656c679c744968b17fbaf28f0e923d", + "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba", "type": "github" }, "original": { @@ -20,11 +20,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1772408722, - "narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=", + "lastModified": 1769996383, + "narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3", + "rev": "57928607ea566b5db3ad13af0e57e921e6b12381", "type": "github" }, "original": { @@ -35,11 +35,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1774709303, - "narHash": "sha256-D3Q07BbIA2KnTcSXIqqu9P586uWxN74zNoCH3h2ESHg=", + "lastModified": 1769789167, + "narHash": "sha256-kKB3bqYJU5nzYeIROI82Ef9VtTbu4uA3YydSk/Bioa8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8110df5ad7abf5d4c0f6fb0f8f978390e77f9685", + "rev": "62c8382960464ceb98ea593cb8321a2cf8f9e3e5", "type": "github" }, "original": { @@ -51,11 +51,11 @@ }, "nixpkgs-lib": { "locked": { - "lastModified": 1772328832, - "narHash": "sha256-e+/T/pmEkLP6BHhYjx6GmwP5ivonQQn0bJdH9YrRB+Q=", + "lastModified": 1769909678, + "narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=", "owner": "nix-community", "repo": "nixpkgs.lib", - "rev": "c185c7a5e5dd8f9add5b2f8ebeff00888b070742", + "rev": "72716169fe93074c333e8d0173151350670b824c", "type": "github" }, "original": { @@ -79,11 +79,11 @@ ] }, "locked": { - "lastModified": 1774840424, - "narHash": "sha256-3Oi4mBKzOCFQYLUyEjyc0s5cnlNj1MzmhpVKoLptpe8=", + "lastModified": 1775013181, + "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "d9f52b51548e76ab8b6e7d647763047ebdec835c", + "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be", "type": "github" }, "original": { diff --git a/nix/build.nix b/nix/build.nix index 9270abbe6f747e0ed78400d13561eadd97edd184..2f283f83a4d8b215d12933178f1e9b3b33617067 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -38,6 +38,8 @@ libxfixes, libxkbcommon, libxrandr, + libx11, + libxcb, nodejs_22, openssl, perl, @@ -181,8 +183,8 @@ let wayland gpu-lib libglvnd - xorg.libX11 - xorg.libxcb + libx11 + libxcb libdrm libgbm libva diff --git a/nix/livekit-libwebrtc/package.nix b/nix/livekit-libwebrtc/package.nix index 4c0d99926200e619b567cf7a90549f4f882eda42..2a07f5c2170e2db00eb1547b2b820e015f8683ff 100644 --- a/nix/livekit-libwebrtc/package.nix +++ b/nix/livekit-libwebrtc/package.nix @@ -37,6 +37,8 @@ libxfixes, libxrandr, libxtst, + libx11, + libxi, pipewire, xorg, }: @@ -224,8 +226,8 @@ stdenv.mkDerivation { libxrandr libxtst pipewire - xorg.libX11 - xorg.libXi + libx11 + libxi ]); preConfigure = ''