Detailed changes
@@ -10159,6 +10159,7 @@ dependencies = [
"language",
"log",
"markdown",
+ "project",
"settings",
"tempfile",
"theme_settings",
@@ -22242,7 +22243,7 @@ dependencies = [
[[package]]
name = "zed_glsl"
-version = "0.2.2"
+version = "0.2.3"
dependencies = [
"zed_extension_api 0.1.0",
]
@@ -22256,7 +22257,7 @@ dependencies = [
[[package]]
name = "zed_proto"
-version = "0.3.1"
+version = "0.3.2"
dependencies = [
"zed_extension_api 0.7.0",
]
@@ -598,6 +598,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }],
"alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
// Change to open path modal for existing remote connection by setting the parameter
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
@@ -1275,6 +1276,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
@@ -668,6 +668,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
"cmd-ctrl-b": "branches::OpenRecent",
@@ -1375,6 +1376,7 @@
"alt-down": "markdown::ScrollDownByItem",
"cmd-up": "markdown::ScrollToTop",
"cmd-down": "markdown::ScrollToBottom",
+ "cmd-f": "buffer_search::Deploy",
},
},
{
@@ -1300,6 +1300,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
@@ -1096,6 +1096,7 @@
"ctrl-e": "markdown::ScrollDown",
"g g": "markdown::ScrollToTop",
"shift-g": "markdown::ScrollToBottom",
+ "/": "buffer_search::Deploy",
},
},
{
@@ -1144,6 +1144,11 @@
//
// Default: false
"show_turn_stats": false,
+ // Whether to show the merge conflict indicator in the status bar
+ // that offers to resolve conflicts using the agent.
+ //
+ // Default: true
+ "show_merge_conflict_indicator": true,
},
// Whether the screen sharing icon is shown in the os status bar.
"show_call_status_icon": true,
@@ -19,7 +19,9 @@ pub enum MentionUri {
File {
abs_path: PathBuf,
},
- PastedImage,
+ PastedImage {
+ name: String,
+ },
Directory {
abs_path: PathBuf,
},
@@ -155,7 +157,9 @@ impl MentionUri {
include_warnings,
})
} else if path.starts_with("/agent/pasted-image") {
- Ok(Self::PastedImage)
+ let name =
+ single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string());
+ Ok(Self::PastedImage { name })
} else if path.starts_with("/agent/untitled-buffer") {
let fragment = url
.fragment()
@@ -227,7 +231,7 @@ impl MentionUri {
.unwrap_or_default()
.to_string_lossy()
.into_owned(),
- MentionUri::PastedImage => "Image".to_string(),
+ MentionUri::PastedImage { name } => name.clone(),
MentionUri::Symbol { name, .. } => name.clone(),
MentionUri::Thread { name, .. } => name.clone(),
MentionUri::Rule { name, .. } => name.clone(),
@@ -296,7 +300,7 @@ impl MentionUri {
MentionUri::File { abs_path } => {
FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into())
}
- MentionUri::PastedImage => IconName::Image.path().into(),
+ MentionUri::PastedImage { .. } => IconName::Image.path().into(),
MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx)
.unwrap_or_else(|| IconName::Folder.path().into()),
MentionUri::Symbol { .. } => IconName::Code.path().into(),
@@ -322,10 +326,18 @@ impl MentionUri {
url.set_path(&abs_path.to_string_lossy());
url
}
- MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
+ MentionUri::PastedImage { name } => {
+ let mut url = Url::parse("zed:///agent/pasted-image").unwrap();
+ url.query_pairs_mut().append_pair("name", name);
+ url
+ }
MentionUri::Directory { abs_path } => {
let mut url = Url::parse("file:///").unwrap();
- url.set_path(&abs_path.to_string_lossy());
+ let mut path = abs_path.to_string_lossy().into_owned();
+ if !path.ends_with('/') && !path.ends_with('\\') {
+ path.push('/');
+ }
+ url.set_path(&path);
url
}
MentionUri::Symbol {
@@ -490,6 +502,21 @@ mod tests {
assert_eq!(uri.to_uri().to_string(), expected);
}
+ #[test]
+ fn test_directory_uri_round_trip_without_trailing_slash() {
+ let uri = MentionUri::Directory {
+ abs_path: PathBuf::from(path!("/path/to/dir")),
+ };
+ let serialized = uri.to_uri().to_string();
+ assert!(serialized.ends_with('/'), "directory URI must end with /");
+ let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap();
+ assert!(
+ matches!(parsed, MentionUri::Directory { .. }),
+ "expected Directory variant, got {:?}",
+ parsed
+ );
+ }
+
#[test]
fn test_parse_symbol_uri() {
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
@@ -253,7 +253,7 @@ impl UserMessage {
)
.ok();
}
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be used in mention content")
}
MentionUri::Directory { .. } => {
@@ -595,6 +595,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions,
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
@@ -176,6 +176,7 @@ pub struct AgentSettings {
pub use_modifier_to_send: bool,
pub message_editor_min_lines: usize,
pub show_turn_stats: bool,
+ pub show_merge_conflict_indicator: bool,
pub tool_permissions: ToolPermissions,
pub new_thread_location: NewThreadLocation,
}
@@ -629,6 +630,7 @@ impl Settings for AgentSettings {
use_modifier_to_send: agent.use_modifier_to_send.unwrap(),
message_editor_min_lines: agent.message_editor_min_lines.unwrap(),
show_turn_stats: agent.show_turn_stats.unwrap(),
+ show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(),
tool_permissions: compile_tool_permissions(agent.tool_permissions),
new_thread_location: agent.new_thread_location.unwrap_or_default(),
}
@@ -734,6 +734,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions: Default::default(),
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
@@ -816,13 +816,10 @@ impl ThreadView {
}
}
}));
- if self.parent_id.is_none() {
- self.suppress_merge_conflict_notification(cx);
- }
generation
}
- pub fn stop_turn(&mut self, generation: usize, cx: &mut Context<Self>) {
+ pub fn stop_turn(&mut self, generation: usize, _cx: &mut Context<Self>) {
if self.turn_fields.turn_generation != generation {
return;
}
@@ -833,25 +830,6 @@ impl ThreadView {
.map(|started| started.elapsed());
self.turn_fields.last_turn_tokens = self.turn_fields.turn_tokens.take();
self.turn_fields._turn_timer_task = None;
- if self.parent_id.is_none() {
- self.unsuppress_merge_conflict_notification(cx);
- }
- }
-
- fn suppress_merge_conflict_notification(&self, cx: &mut Context<Self>) {
- self.workspace
- .update(cx, |workspace, cx| {
- workspace.suppress_notification(&workspace::merge_conflict_notification_id(), cx);
- })
- .ok();
- }
-
- fn unsuppress_merge_conflict_notification(&self, cx: &mut Context<Self>) {
- self.workspace
- .update(cx, |workspace, _cx| {
- workspace.unsuppress(workspace::merge_conflict_notification_id());
- })
- .ok();
}
pub fn update_turn_tokens(&mut self, cx: &App) {
@@ -8819,7 +8797,7 @@ pub(crate) fn open_link(
.open_path(path, None, true, window, cx)
.detach_and_log_err(cx);
}
- MentionUri::PastedImage => {}
+ MentionUri::PastedImage { .. } => {}
MentionUri::Directory { abs_path } => {
let project = workspace.project();
let Some(entry_id) = project.update(cx, |project, cx| {
@@ -154,7 +154,7 @@ impl MentionSet {
MentionUri::Selection { abs_path: None, .. } => Task::ready(Err(anyhow!(
"Untitled buffer selection mentions are not supported for paste"
))),
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::TerminalSelection { .. }
| MentionUri::MergeConflict { .. } => {
Task::ready(Err(anyhow!("Unsupported mention URI type for paste")))
@@ -283,7 +283,7 @@ impl MentionSet {
include_errors,
include_warnings,
} => self.confirm_mention_for_diagnostics(include_errors, include_warnings, cx),
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be included in completions");
Task::ready(Err(anyhow!(
"pasted imaged URI should not be included in completions"
@@ -739,9 +739,11 @@ pub(crate) async fn insert_images_as_context(
return;
}
- let replacement_text = MentionUri::PastedImage.as_link().to_string();
-
for (image, name) in images {
+ let mention_uri = MentionUri::PastedImage {
+ name: name.to_string(),
+ };
+ let replacement_text = mention_uri.as_link().to_string();
let Some((text_anchor, multibuffer_anchor)) = editor
.update_in(cx, |editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
@@ -804,7 +806,13 @@ pub(crate) async fn insert_images_as_context(
.shared();
mention_set.update(cx, |mention_set, _cx| {
- mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone())
+ mention_set.insert_mention(
+ crease_id,
+ MentionUri::PastedImage {
+ name: name.to_string(),
+ },
+ task.clone(),
+ )
});
if task
@@ -873,7 +881,7 @@ pub(crate) fn paste_images_as_context(
Some(window.spawn(cx, async move |mut cx| {
use itertools::Itertools;
- let default_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_name: SharedString = "Image".into();
let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard
.into_entries()
.filter_map(|entry| match entry {
@@ -261,7 +261,7 @@ async fn resolve_pasted_context_items(
) -> (Vec<ResolvedPastedContextItem>, Vec<Entity<Worktree>>) {
let mut items = Vec::new();
let mut added_worktrees = Vec::new();
- let default_image_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_image_name: SharedString = "Image".into();
for entry in entries {
match entry {
@@ -812,7 +812,9 @@ impl MessageEditor {
)
.uri(match uri {
MentionUri::File { .. } => Some(uri.to_uri().to_string()),
- MentionUri::PastedImage => None,
+ MentionUri::PastedImage { .. } => {
+ Some(uri.to_uri().to_string())
+ }
other => {
debug_panic!(
"unexpected mention uri for image: {:?}",
@@ -1638,7 +1640,9 @@ impl MessageEditor {
let mention_uri = if let Some(uri) = uri {
MentionUri::parse(&uri, path_style)
} else {
- Ok(MentionUri::PastedImage)
+ Ok(MentionUri::PastedImage {
+ name: "Image".to_string(),
+ })
};
let Some(mention_uri) = mention_uri.log_err() else {
continue;
@@ -4074,6 +4078,11 @@ mod tests {
&mut cx,
);
+ let image_name = temporary_image_path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .unwrap_or("Image")
+ .to_string();
std::fs::remove_file(&temporary_image_path).expect("remove temp png");
let expected_file_uri = MentionUri::File {
@@ -4081,12 +4090,16 @@ mod tests {
}
.to_uri()
.to_string();
- let expected_image_uri = MentionUri::PastedImage.to_uri().to_string();
+ let expected_image_uri = MentionUri::PastedImage {
+ name: image_name.clone(),
+ }
+ .to_uri()
+ .to_string();
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
- format!("[@Image]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
+ format!("[@{image_name}]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
);
});
@@ -4094,7 +4107,7 @@ mod tests {
assert_eq!(contents.len(), 2);
assert!(contents.iter().any(|(uri, mention)| {
- *uri == MentionUri::PastedImage && matches!(mention, Mention::Image(_))
+ matches!(uri, MentionUri::PastedImage { .. }) && matches!(mention, Mention::Image(_))
}));
assert!(contents.iter().any(|(uri, mention)| {
*uri == MentionUri::File {
@@ -184,7 +184,7 @@ fn open_mention_uri(
MentionUri::Fetch { url } => {
cx.open_url(url.as_str());
}
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::Selection { abs_path: None, .. }
| MentionUri::Diagnostics { .. }
| MentionUri::TerminalSelection { .. }
@@ -1086,6 +1086,7 @@ impl SearchableItem for DapLogView {
// DAP log is read-only.
replacement: false,
selection: false,
+ select_all: true,
}
}
fn active_match_index(
@@ -60,7 +60,8 @@ pub(crate) enum ShutdownAction {
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub(crate) struct MountDefinition {
- pub(crate) source: String,
+ #[serde(default)]
+ pub(crate) source: Option<String>,
pub(crate) target: String,
#[serde(rename = "type")]
pub(crate) mount_type: Option<String>,
@@ -68,23 +69,23 @@ pub(crate) struct MountDefinition {
impl Display for MountDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(
- f,
- "type={},source={},target={},consistency=cached",
- self.mount_type.clone().unwrap_or_else(|| {
- if self.source.starts_with('/')
- || self.source.starts_with("\\\\")
- || self.source.get(1..3) == Some(":\\")
- || self.source.get(1..3) == Some(":/")
+ let mount_type = self.mount_type.clone().unwrap_or_else(|| {
+ if let Some(source) = &self.source {
+ if source.starts_with('/')
+ || source.starts_with("\\\\")
+ || source.get(1..3) == Some(":\\")
+ || source.get(1..3) == Some(":/")
{
- "bind".to_string()
- } else {
- "volume".to_string()
+ return "bind".to_string();
}
- }),
- self.source,
- self.target
- )
+ }
+ "volume".to_string()
+ });
+ write!(f, "type={}", mount_type)?;
+ if let Some(source) = &self.source {
+ write!(f, ",source={}", source)?;
+ }
+ write!(f, ",target={},consistency=cached", self.target)
}
}
@@ -257,13 +258,6 @@ impl DevContainer {
}
return DevContainerBuildType::None;
}
-
- pub(crate) fn has_features(&self) -> bool {
- self.features
- .as_ref()
- .map(|features| !features.is_empty())
- .unwrap_or(false)
- }
}
// Custom deserializer that parses the entire customizations object as a
@@ -454,8 +448,6 @@ where
}
}
- let source = source
- .ok_or_else(|| D::Error::custom(format!("mount string missing 'source': {}", s)))?;
let target = target
.ok_or_else(|| D::Error::custom(format!("mount string missing 'target': {}", s)))?;
@@ -509,9 +501,6 @@ where
}
}
- let source = source.ok_or_else(|| {
- D::Error::custom(format!("mount string missing 'source': {}", s))
- })?;
let target = target.ok_or_else(|| {
D::Error::custom(format!("mount string missing 'target': {}", s))
})?;
@@ -880,7 +869,7 @@ mod test {
])),
container_user: Some("myUser".to_string()),
mounts: Some(vec![MountDefinition {
- source: "/localfolder/app".to_string(),
+ source: Some("/localfolder/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("volume".to_string()),
}]),
@@ -889,7 +878,7 @@ mod test {
override_command: Some(true),
workspace_folder: Some("/workspaces".to_string()),
workspace_mount: Some(MountDefinition {
- source: "/app".to_string(),
+ source: Some("/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("bind".to_string())
}),
@@ -1323,12 +1312,12 @@ mod test {
container_user: Some("myUser".to_string()),
mounts: Some(vec![
MountDefinition {
- source: "/localfolder/app".to_string(),
+ source: Some("/localfolder/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("volume".to_string()),
},
MountDefinition {
- source: "dev-containers-cli-bashhistory".to_string(),
+ source: Some("dev-containers-cli-bashhistory".to_string()),
target: "/home/node/commandhistory".to_string(),
mount_type: None,
}
@@ -1338,7 +1327,7 @@ mod test {
override_command: Some(true),
workspace_folder: Some("/workspaces".to_string()),
workspace_mount: Some(MountDefinition {
- source: "/folder".to_string(),
+ source: Some("/folder".to_string()),
target: "/workspace".to_string(),
mount_type: Some("bind".to_string())
}),
@@ -1363,7 +1352,7 @@ mod test {
#[test]
fn mount_definition_should_use_bind_type_for_unix_absolute_paths() {
let mount = MountDefinition {
- source: "/home/user/project".to_string(),
+ source: Some("/home/user/project".to_string()),
target: "/workspaces/project".to_string(),
mount_type: None,
};
@@ -1379,7 +1368,7 @@ mod test {
#[test]
fn mount_definition_should_use_bind_type_for_windows_unc_paths() {
let mount = MountDefinition {
- source: "\\\\server\\share\\project".to_string(),
+ source: Some("\\\\server\\share\\project".to_string()),
target: "/workspaces/project".to_string(),
mount_type: None,
};
@@ -1395,7 +1384,7 @@ mod test {
#[test]
fn mount_definition_should_use_bind_type_for_windows_absolute_paths() {
let mount = MountDefinition {
- source: "C:\\Users\\mrg\\cli".to_string(),
+ source: Some("C:\\Users\\mrg\\cli".to_string()),
target: "/workspaces/cli".to_string(),
mount_type: None,
};
@@ -1407,4 +1396,17 @@ mod test {
"Expected mount type 'bind' for Windows absolute path, but got: {rendered}"
);
}
+
+ #[test]
+ fn mount_definition_should_omit_source_when_none() {
+ let mount = MountDefinition {
+ source: None,
+ target: "/tmp".to_string(),
+ mount_type: Some("tmpfs".to_string()),
+ };
+
+ let rendered = mount.to_string();
+
+ assert_eq!(rendered, "type=tmpfs,target=/tmp,consistency=cached");
+ }
}
@@ -317,13 +317,6 @@ impl DevContainerManifest {
let root_image_tag = self.get_base_image_from_config().await?;
let root_image = self.docker_client.inspect(&root_image_tag).await?;
- if dev_container.build_type() == DevContainerBuildType::Image
- && !dev_container.has_features()
- {
- log::debug!("No resources to download. Proceeding with just the image");
- return Ok(());
- }
-
let temp_base = std::env::temp_dir().join("devcontainer-zed");
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
@@ -701,10 +694,29 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
}
let dev_container = self.dev_container();
match dev_container.build_type() {
- DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => {
+ DevContainerBuildType::Image => {
+ let built_docker_image = self.build_docker_image().await?;
+ let Some(base_image) = dev_container.image.as_ref() else {
+ log::error!("Dev container is using and image which can't be referenced");
+ return Err(DevContainerError::DevContainerParseFailed);
+ };
+ let built_docker_image = self
+ .update_remote_user_uid(built_docker_image, base_image)
+ .await?;
+
+ let resources = self.build_merged_resources(built_docker_image)?;
+ Ok(DevContainerBuildResources::Docker(resources))
+ }
+ DevContainerBuildType::Dockerfile => {
let built_docker_image = self.build_docker_image().await?;
+ let Some(features_build_info) = &self.features_build_info else {
+ log::error!(
+ "Can't attempt to build update UID dockerfile before initial docker build"
+ );
+ return Err(DevContainerError::DevContainerParseFailed);
+ };
let built_docker_image = self
- .update_remote_user_uid(built_docker_image, None)
+ .update_remote_user_uid(built_docker_image, &features_build_info.image_tag)
.await?;
let resources = self.build_merged_resources(built_docker_image)?;
@@ -816,7 +828,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
let (main_service_name, main_service) =
find_primary_service(&docker_compose_resources, self)?;
- let built_service_image = if main_service
+ let (built_service_image, built_service_image_tag) = if main_service
.build
.as_ref()
.map(|b| b.dockerfile.as_ref())
@@ -905,16 +917,19 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
self.docker_client
.docker_compose_build(&docker_compose_resources.files, &self.project_name())
.await?;
- self.docker_client
- .inspect(&features_build_info.image_tag)
- .await?
+ (
+ self.docker_client
+ .inspect(&features_build_info.image_tag)
+ .await?,
+ &features_build_info.image_tag,
+ )
} else if let Some(image) = &main_service.image {
if dev_container
.features
.as_ref()
.is_none_or(|features| features.is_empty())
{
- self.docker_client.inspect(image).await?
+ (self.docker_client.inspect(image).await?, image)
} else {
if !supports_buildkit {
self.build_feature_content_image().await?;
@@ -994,9 +1009,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
.docker_compose_build(&docker_compose_resources.files, &self.project_name())
.await?;
- self.docker_client
- .inspect(&features_build_info.image_tag)
- .await?
+ (
+ self.docker_client
+ .inspect(&features_build_info.image_tag)
+ .await?,
+ &features_build_info.image_tag,
+ )
}
} else {
log::error!("Docker compose must have either image or dockerfile defined");
@@ -1004,7 +1022,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
};
let built_service_image = self
- .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag))
+ .update_remote_user_uid(built_service_image, built_service_image_tag)
.await?;
let resources = self.build_merged_resources(built_service_image)?;
@@ -1074,11 +1092,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
.filter_map(|mount| {
if let Some(mount_type) = &mount.mount_type
&& mount_type.to_lowercase() == "volume"
+ && let Some(source) = &mount.source
{
Some((
- mount.source.clone(),
+ source.clone(),
DockerComposeVolume {
- name: mount.source.clone(),
+ name: source.clone(),
},
))
} else {
@@ -1312,7 +1331,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
async fn update_remote_user_uid(
&self,
image: DockerInspect,
- _override_tag: Option<&str>,
+ _base_image: &str,
) -> Result<DockerInspect, DevContainerError> {
Ok(image)
}
@@ -1320,7 +1339,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
async fn update_remote_user_uid(
&self,
image: DockerInspect,
- override_tag: Option<&str>,
+ base_image: &str,
) -> Result<DockerInspect, DevContainerError> {
let dev_container = self.dev_container();
@@ -1394,18 +1413,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
DevContainerError::FilesystemError
})?;
- let updated_image_tag = override_tag
- .map(|t| t.to_string())
- .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag));
+ let updated_image_tag = format!("{}-uid", features_build_info.image_tag);
let mut command = Command::new(self.docker_client.docker_cli());
command.args(["build"]);
command.args(["-f", &dockerfile_path.display().to_string()]);
command.args(["-t", &updated_image_tag]);
- command.args([
- "--build-arg",
- &format!("BASE_IMAGE={}", features_build_info.image_tag),
- ]);
+ command.args(["--build-arg", &format!("BASE_IMAGE={}", base_image)]);
command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]);
command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]);
command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]);
@@ -1731,7 +1745,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
};
Ok(MountDefinition {
- source: self.local_workspace_folder(),
+ source: Some(self.local_workspace_folder()),
target: format!("/workspaces/{}", project_directory_name.display()),
mount_type: None,
})
@@ -2384,6 +2398,8 @@ mod test {
use serde_json_lenient::Value;
use util::{command::Command, paths::SanitizedPath};
+ #[cfg(not(target_os = "windows"))]
+ use crate::docker::DockerComposeServicePort;
use crate::{
DevContainerConfig, DevContainerContext,
command_json::CommandRunner,
@@ -3311,8 +3327,6 @@ chmod +x ./install.sh
#[cfg(not(target_os = "windows"))]
#[gpui::test]
async fn test_spawns_devcontainer_with_docker_compose(cx: &mut TestAppContext) {
- use crate::docker::DockerComposeServicePort;
-
cx.executor().allow_parking();
env_logger::try_init().ok();
let given_devcontainer_contents = r#"
@@ -3563,7 +3577,7 @@ ENV DOCKER_BUILDKIT=1
])),
volumes: vec![
MountDefinition {
- source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(),
+ source: Some("dind-var-lib-docker-42dad4b4ca7b8ced".to_string()),
target: "/var/lib/docker".to_string(),
mount_type: Some("volume".to_string())
}
@@ -4296,6 +4310,175 @@ chmod +x ./install.sh
}))
}
+ #[cfg(not(target_os = "windows"))]
+ #[gpui::test]
+ async fn test_spawns_devcontainer_with_plain_image(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "image": "test_image:latest",
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+ let files = test_dependencies.fs.files();
+ let uid_dockerfile = files
+ .iter()
+ .find(|f| {
+ f.file_name()
+ .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+ })
+ .expect("to be found");
+ let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+ assert_eq!(
+ &uid_dockerfile,
+ r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+ if [ -z "$OLD_UID" ]; then \
+ echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+ elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+ echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+ elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+ echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+ else \
+ if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+ FREE_GID=65532; \
+ while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+ echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+ sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+ fi; \
+ echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+ sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+ if [ "$OLD_GID" != "$NEW_GID" ]; then \
+ sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+ fi; \
+ chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+ fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+"#
+ );
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ #[gpui::test]
+ async fn test_spawns_devcontainer_with_docker_compose_and_plain_image(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "dockerComposeFile": "docker-compose-plain.yml",
+ "service": "app",
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ test_dependencies
+ .fs
+ .atomic_write(
+ PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose-plain.yml"),
+ r#"
+services:
+ app:
+ image: test_image:latest
+ command: sleep infinity
+ volumes:
+ - ..:/workspace:cached
+ "#
+ .trim()
+ .to_string(),
+ )
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+ let files = test_dependencies.fs.files();
+ let uid_dockerfile = files
+ .iter()
+ .find(|f| {
+ f.file_name()
+ .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+ })
+ .expect("to be found");
+ let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+ assert_eq!(
+ &uid_dockerfile,
+ r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+ if [ -z "$OLD_UID" ]; then \
+ echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+ elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+ echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+ elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+ echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+ else \
+ if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+ FREE_GID=65532; \
+ while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+ echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+ sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+ fi; \
+ echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+ sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+ if [ "$OLD_GID" != "$NEW_GID" ]; then \
+ sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+ fi; \
+ chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+ fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+"#
+ );
+ }
+
pub(crate) struct RecordedExecCommand {
pub(crate) _container_id: String,
pub(crate) _remote_folder: String,
@@ -4418,6 +4601,24 @@ chmod +x ./install.sh
state: None,
});
}
+ if id == "test_image:latest" {
+ return Ok(DockerInspect {
+ id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104"
+ .to_string(),
+ config: DockerInspectConfig {
+ labels: DockerConfigLabels {
+ metadata: Some(vec![HashMap::from([(
+ "remoteUser".to_string(),
+ Value::String("node".to_string()),
+ )])]),
+ },
+ env: Vec::new(),
+ image_user: Some("root".to_string()),
+ },
+ mounts: None,
+ state: None,
+ });
+ }
Err(DevContainerError::DockerNotAvailable)
}
@@ -4444,7 +4645,7 @@ chmod +x ./install.sh
additional_contexts: None,
}),
volumes: vec![MountDefinition {
- source: "../..".to_string(),
+ source: Some("../..".to_string()),
target: "/workspaces".to_string(),
mount_type: Some("bind".to_string()),
}],
@@ -4457,7 +4658,7 @@ chmod +x ./install.sh
DockerComposeService {
image: Some("postgres:14.1".to_string()),
volumes: vec![MountDefinition {
- source: "postgres-data".to_string(),
+ source: Some("postgres-data".to_string()),
target: "/var/lib/postgresql/data".to_string(),
mount_type: Some("volume".to_string()),
}],
@@ -4472,6 +4673,25 @@ chmod +x ./install.sh
)]),
}));
}
+ if config_files.len() == 1
+ && config_files.get(0)
+ == Some(&PathBuf::from(
+ "/path/to/local/project/.devcontainer/docker-compose-plain.yml",
+ ))
+ {
+ return Ok(Some(DockerComposeConfig {
+ name: None,
+ services: HashMap::from([(
+ "app".to_string(),
+ DockerComposeService {
+ image: Some("test_image:latest".to_string()),
+ command: vec!["sleep".to_string(), "infinity".to_string()],
+ ..Default::default()
+ },
+ )]),
+ ..Default::default()
+ }));
+ }
Err(DevContainerError::DockerNotAvailable)
}
async fn docker_compose_build(
@@ -56,12 +56,11 @@ impl DockerInspectConfig {
pub(crate) fn env_as_map(&self) -> Result<HashMap<String, String>, DevContainerError> {
let mut map = HashMap::new();
for env_var in &self.env {
- let parts: Vec<&str> = env_var.split("=").collect();
- if parts.len() != 2 {
- log::error!("Unable to parse {env_var} into and environment key-value");
+ let Some((key, value)) = env_var.split_once('=') else {
+ log::error!("Unable to parse {env_var} into an environment key-value");
return Err(DevContainerError::DevContainerParseFailed);
- }
- map.insert(parts[0].to_string(), parts[1].to_string());
+ };
+ map.insert(key.to_string(), value.to_string());
}
Ok(map)
}
@@ -142,6 +141,7 @@ pub(crate) struct DockerComposeService {
pub(crate) build: Option<DockerComposeServiceBuild>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) privileged: Option<bool>,
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) volumes: Vec<MountDefinition>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) env_file: Option<Vec<String>>,
@@ -149,6 +149,12 @@ pub(crate) struct DockerComposeService {
pub(crate) ports: Vec<DockerComposeServicePort>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) network_mode: Option<String>,
+ #[serde(
+ default,
+ skip_serializing_if = "Vec::is_empty",
+ deserialize_with = "deserialize_nullable_vec"
+ )]
+ pub(crate) command: Vec<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
@@ -422,12 +428,8 @@ where
values
.iter()
.filter_map(|v| {
- let parts: Vec<&str> = v.split("=").collect();
- if parts.len() != 2 {
- None
- } else {
- Some((parts[0].to_string(), parts[1].to_string()))
- }
+ let (key, value) = v.split_once('=')?;
+ Some((key.to_string(), value.to_string()))
})
.collect(),
))
@@ -459,6 +461,14 @@ where
deserializer.deserialize_any(LabelsVisitor)
}
+fn deserialize_nullable_vec<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
+where
+ D: Deserializer<'de>,
+ T: Deserialize<'de>,
+{
+ Option::<Vec<T>>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
+}
+
fn deserialize_nullable_labels<'de, D>(deserializer: D) -> Result<DockerConfigLabels, D::Error>
where
D: Deserializer<'de>,
@@ -533,6 +543,46 @@ mod test {
},
};
+ #[test]
+ fn should_parse_simple_env_var() {
+ let config = super::DockerInspectConfig {
+ labels: super::DockerConfigLabels { metadata: None },
+ image_user: None,
+ env: vec!["KEY=value".to_string()],
+ };
+
+ let map = config.env_as_map().unwrap();
+ assert_eq!(map.get("KEY").unwrap(), "value");
+ }
+
+ #[test]
+ fn should_parse_env_var_with_equals_in_value() {
+ let config = super::DockerInspectConfig {
+ labels: super::DockerConfigLabels { metadata: None },
+ image_user: None,
+ env: vec!["COMPLEX=key=val other>=1.0".to_string()],
+ };
+
+ let map = config.env_as_map().unwrap();
+ assert_eq!(map.get("COMPLEX").unwrap(), "key=val other>=1.0");
+ }
+
+ #[test]
+ fn should_parse_simple_label() {
+ let json = r#"{"volumes": [], "labels": ["com.example.key=value"]}"#;
+ let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap();
+ let labels = service.labels.unwrap();
+ assert_eq!(labels.get("com.example.key").unwrap(), "value");
+ }
+
+ #[test]
+ fn should_parse_label_with_equals_in_value() {
+ let json = r#"{"volumes": [], "labels": ["com.example.key=value=with=equals"]}"#;
+ let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap();
+ let labels = service.labels.unwrap();
+ assert_eq!(labels.get("com.example.key").unwrap(), "value=with=equals");
+ }
+
#[test]
fn should_create_docker_inspect_command() {
let docker = Docker::new("docker");
@@ -987,12 +1037,13 @@ mod test {
(
"app".to_string(),
DockerComposeService {
+ command: vec!["sleep".to_string(), "infinity".to_string()],
image: Some(
"mcr.microsoft.com/devcontainers/rust:2-1-bookworm".to_string(),
),
volumes: vec![MountDefinition {
mount_type: Some("bind".to_string()),
- source: "/path/to".to_string(),
+ source: Some("/path/to".to_string()),
target: "/workspaces".to_string(),
}],
network_mode: Some("service:db".to_string()),
@@ -1022,7 +1073,7 @@ mod test {
image: Some("postgres:14.1".to_string()),
volumes: vec![MountDefinition {
mount_type: Some("volume".to_string()),
- source: "postgres-data".to_string(),
+ source: Some("postgres-data".to_string()),
target: "/var/lib/postgresql/data".to_string(),
}],
..Default::default()
@@ -1114,6 +1165,51 @@ mod test {
assert!(config.volumes.is_empty());
}
+ #[test]
+ fn should_deserialize_compose_with_missing_volumes_field() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "sidecar": {
+ "image": "ubuntu:24.04"
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("sidecar").unwrap();
+ assert!(service.volumes.is_empty());
+ }
+
+ #[test]
+ fn should_deserialize_compose_volume_without_source() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "app": {
+ "image": "ubuntu:24.04",
+ "volumes": [
+ {
+ "type": "tmpfs",
+ "target": "/tmp"
+ }
+ ]
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("app").unwrap();
+ assert_eq!(service.volumes.len(), 1);
+ assert_eq!(service.volumes[0].source, None);
+ assert_eq!(service.volumes[0].target, "/tmp");
+ assert_eq!(service.volumes[0].mount_type, Some("tmpfs".to_string()));
+ }
+
#[test]
fn should_deserialize_inspect_without_labels() {
let given_config = r#"
@@ -1630,6 +1630,7 @@ impl SearchableItem for Editor {
regex: true,
replacement: false,
selection: false,
+ select_all: true,
find_in_results: true,
}
} else {
@@ -1639,6 +1640,7 @@ impl SearchableItem for Editor {
regex: true,
replacement: true,
selection: true,
+ select_all: true,
find_in_results: false,
}
}
@@ -10,6 +10,7 @@ use git::{
GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
},
+ stash::GitStash,
status::{
DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
UnmergedStatus,
@@ -61,6 +62,7 @@ pub struct FakeGitRepositoryState {
pub simulated_create_worktree_error: Option<String>,
pub refs: HashMap<String, String>,
pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
+ pub stash_entries: GitStash,
}
impl FakeGitRepositoryState {
@@ -81,6 +83,7 @@ impl FakeGitRepositoryState {
remotes: HashMap::default(),
graph_commits: Vec::new(),
commit_history: Vec::new(),
+ stash_entries: Default::default(),
}
}
}
@@ -428,13 +431,13 @@ impl GitRepository for FakeGitRepository {
}
fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
- async { Ok(git::stash::GitStash::default()) }.boxed()
+ self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
}
fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
self.with_state_async(false, move |state| {
let current_branch = &state.current_branch_name;
- Ok(state
+ let mut branches = state
.branches
.iter()
.map(|branch_name| {
@@ -452,7 +455,11 @@ impl GitRepository for FakeGitRepository {
upstream: None,
}
})
- .collect())
+ .collect::<Vec<_>>();
+ // compute snapshot expects these to be sorted by ref_name
+ // because that's what git itself does
+ branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
+ Ok(branches)
})
}
@@ -1148,7 +1148,7 @@ impl GitGraph {
}
}
}
- RepositoryEvent::BranchChanged => {
+ RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
self.pending_select_sha = None;
// Only invalidate if we scanned atleast once,
// meaning we are not inside the initial repo loading state
@@ -1157,6 +1157,12 @@ impl GitGraph {
self.invalidate_state(cx);
}
}
+ RepositoryEvent::StashEntriesChanged if self.log_source == LogSource::All => {
+ self.pending_select_sha = None;
+ if repository.read(cx).scan_id > 1 {
+ self.invalidate_state(cx);
+ }
+ }
RepositoryEvent::GraphEvent(_, _) => {}
_ => {}
}
@@ -2388,9 +2394,8 @@ impl GitGraph {
let local_y = position_y - canvas_bounds.origin.y;
if local_y >= px(0.) && local_y < canvas_bounds.size.height {
- let row_in_viewport = (local_y / self.row_height).floor() as usize;
- let scroll_rows = (scroll_offset_y / self.row_height).floor() as usize;
- let absolute_row = scroll_rows + row_in_viewport;
+ let absolute_y = local_y + scroll_offset_y;
+ let absolute_row = (absolute_y / self.row_height).floor() as usize;
if absolute_row < self.graph_data.commits.len() {
return Some(absolute_row);
@@ -3737,8 +3742,8 @@ mod tests {
assert!(
observed_repository_events
.iter()
- .any(|event| matches!(event, RepositoryEvent::BranchChanged)),
- "initial repository scan should emit BranchChanged"
+ .any(|event| matches!(event, RepositoryEvent::HeadChanged)),
+ "initial repository scan should emit HeadChanged"
);
let commit_count_after = repository.read_with(cx, |repo, _| {
repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default())
@@ -3856,11 +3861,220 @@ mod tests {
);
cx.run_until_parked();
- let commit_count_after_switch_back =
+ // Verify graph data is reloaded from repository cache on switch back
+ let reloaded_commit_count =
git_graph.read_with(&*cx, |graph, _| graph.graph_data.commits.len());
assert_eq!(
- initial_commit_count, commit_count_after_switch_back,
- "graph_data should be repopulated from cache after switching back to the same repo"
+ reloaded_commit_count,
+ commits.len(),
+ "graph data should be reloaded after switching back"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_graph_data_reloaded_after_stash_change(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ Path::new("/project"),
+ json!({
+ ".git": {},
+ "file.txt": "content",
+ }),
+ )
+ .await;
+
+ let initial_head = Oid::from_bytes(&[1; 20]).unwrap();
+ let initial_stash = Oid::from_bytes(&[2; 20]).unwrap();
+ let updated_head = Oid::from_bytes(&[3; 20]).unwrap();
+ let updated_stash = Oid::from_bytes(&[4; 20]).unwrap();
+
+ fs.set_graph_commits(
+ Path::new("/project/.git"),
+ vec![
+ Arc::new(InitialGraphCommitData {
+ sha: initial_head,
+ parents: smallvec![initial_stash],
+ ref_names: vec!["HEAD".into(), "refs/heads/main".into()],
+ }),
+ Arc::new(InitialGraphCommitData {
+ sha: initial_stash,
+ parents: smallvec![],
+ ref_names: vec!["refs/stash".into()],
+ }),
+ ],
+ );
+ fs.with_git_state(Path::new("/project/.git"), true, |state| {
+ state.stash_entries = git::stash::GitStash {
+ entries: vec![git::stash::StashEntry {
+ index: 0,
+ oid: initial_stash,
+ message: "initial stash".to_string(),
+ branch: Some("main".to_string()),
+ timestamp: 1,
+ }]
+ .into(),
+ };
+ })
+ .unwrap();
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+ cx.run_until_parked();
+
+ let repository = project.read_with(cx, |project, cx| {
+ project
+ .active_repository(cx)
+ .expect("should have a repository")
+ });
+
+ let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
+ workspace::MultiWorkspace::test_new(project.clone(), window, cx)
+ });
+ let workspace_weak =
+ multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade());
+ let git_graph = cx.new_window_entity(|window, cx| {
+ GitGraph::new(
+ repository.read(cx).id,
+ project.read(cx).git_store().clone(),
+ workspace_weak,
+ window,
+ cx,
+ )
+ });
+ cx.run_until_parked();
+
+ let initial_shas = git_graph.read_with(&*cx, |graph, _| {
+ graph
+ .graph_data
+ .commits
+ .iter()
+ .map(|commit| commit.data.sha)
+ .collect::<Vec<_>>()
+ });
+ assert_eq!(initial_shas, vec![initial_head, initial_stash]);
+
+ fs.set_graph_commits(
+ Path::new("/project/.git"),
+ vec![
+ Arc::new(InitialGraphCommitData {
+ sha: updated_head,
+ parents: smallvec![updated_stash],
+ ref_names: vec!["HEAD".into(), "refs/heads/main".into()],
+ }),
+ Arc::new(InitialGraphCommitData {
+ sha: updated_stash,
+ parents: smallvec![],
+ ref_names: vec!["refs/stash".into()],
+ }),
+ ],
);
+ fs.with_git_state(Path::new("/project/.git"), true, |state| {
+ state.stash_entries = git::stash::GitStash {
+ entries: vec![git::stash::StashEntry {
+ index: 0,
+ oid: updated_stash,
+ message: "updated stash".to_string(),
+ branch: Some("main".to_string()),
+ timestamp: 1,
+ }]
+ .into(),
+ };
+ })
+ .unwrap();
+
+ project
+ .update(cx, |project, cx| project.git_scans_complete(cx))
+ .await;
+ cx.run_until_parked();
+
+ cx.draw(
+ point(px(0.), px(0.)),
+ gpui::size(px(1200.), px(800.)),
+ |_, _| git_graph.clone().into_any_element(),
+ );
+ cx.run_until_parked();
+
+ let reloaded_shas = git_graph.read_with(&*cx, |graph, _| {
+ graph
+ .graph_data
+ .commits
+ .iter()
+ .map(|commit| commit.data.sha)
+ .collect::<Vec<_>>()
+ });
+ assert_eq!(reloaded_shas, vec![updated_head, updated_stash]);
+ }
+
+ #[gpui::test]
+ async fn test_git_graph_row_at_position_rounding(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ Path::new("/project"),
+ serde_json::json!({
+ ".git": {},
+ "file.txt": "content",
+ }),
+ )
+ .await;
+
+ let mut rng = StdRng::seed_from_u64(42);
+ let commits = generate_random_commit_dag(&mut rng, 10, false);
+ fs.set_graph_commits(Path::new("/project/.git"), commits.clone());
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+ cx.run_until_parked();
+
+ let repository = project.read_with(cx, |project, cx| {
+ project
+ .active_repository(cx)
+ .expect("should have a repository")
+ });
+
+ let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
+ workspace::MultiWorkspace::test_new(project.clone(), window, cx)
+ });
+
+ let workspace_weak =
+ multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade());
+
+ let git_graph = cx.new_window_entity(|window, cx| {
+ GitGraph::new(
+ repository.read(cx).id,
+ project.read(cx).git_store().clone(),
+ workspace_weak,
+ window,
+ cx,
+ )
+ });
+ cx.run_until_parked();
+
+ git_graph.update(cx, |graph, cx| {
+ assert!(
+ graph.graph_data.commits.len() >= 10,
+ "graph should load dummy commits"
+ );
+
+ graph.row_height = px(20.0);
+ let origin_y = px(100.0);
+ graph.graph_canvas_bounds.set(Some(Bounds {
+ origin: point(px(0.0), origin_y),
+ size: gpui::size(px(100.0), px(1000.0)),
+ }));
+
+ graph.table_interaction_state.update(cx, |state, _| {
+ state.set_scroll_offset(point(px(0.0), px(-15.0)))
+ });
+ let pos_y = origin_y + px(10.0);
+ let absolute_calc_row = graph.row_at_position(pos_y, cx);
+
+ assert_eq!(
+ absolute_calc_row,
+ Some(1),
+ "Row calculation should yield absolute row exactly"
+ );
+ });
}
}
@@ -6,19 +6,19 @@ use editor::{
display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
};
use gpui::{
- App, Context, DismissEvent, Entity, InteractiveElement as _, ParentElement as _, Subscription,
- Task, WeakEntity,
+ App, ClickEvent, Context, Empty, Entity, InteractiveElement as _, ParentElement as _,
+ Subscription, Task, WeakEntity,
};
use language::{Anchor, Buffer, BufferId};
use project::{
ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _,
- git_store::{GitStoreEvent, RepositoryEvent},
+ git_store::{GitStore, GitStoreEvent, RepositoryEvent},
};
use settings::Settings;
-use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc};
-use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*};
+use std::{ops::Range, sync::Arc};
+use ui::{ButtonLike, Divider, Tooltip, prelude::*};
use util::{ResultExt as _, debug_panic, maybe};
-use workspace::{Workspace, notifications::simple_message_notification::MessageNotification};
+use workspace::{StatusItemView, Workspace, item::ItemHandle};
use zed_actions::agent::{
ConflictContent, ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent,
};
@@ -433,74 +433,6 @@ fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec<String> {
paths
}
-pub(crate) fn register_conflict_notification(
- workspace: &mut Workspace,
- cx: &mut Context<Workspace>,
-) {
- let git_store = workspace.project().read(cx).git_store().clone();
-
- let last_shown_paths: Rc<RefCell<HashSet<String>>> = Rc::new(RefCell::new(HashSet::default()));
-
- cx.subscribe(&git_store, move |workspace, _git_store, event, cx| {
- let conflicts_changed = matches!(
- event,
- GitStoreEvent::ConflictsUpdated
- | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _)
- );
- if !AgentSettings::get_global(cx).enabled(cx) || !conflicts_changed {
- return;
- }
- let project = workspace.project().read(cx);
- if project.is_via_collab() {
- return;
- }
-
- if workspace.is_notification_suppressed(workspace::merge_conflict_notification_id()) {
- return;
- }
-
- let paths = collect_conflicted_file_paths(project, cx);
- let notification_id = workspace::merge_conflict_notification_id();
- let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
-
- if paths.is_empty() {
- last_shown_paths.borrow_mut().clear();
- workspace.dismiss_notification(¬ification_id, cx);
- } else if *last_shown_paths.borrow() != current_paths_set {
- // Only show the notification if the set of conflicted paths has changed.
- // This prevents re-showing after the user dismisses it while working on the same conflicts.
- *last_shown_paths.borrow_mut() = current_paths_set;
- let file_count = paths.len();
- workspace.show_notification(notification_id, cx, |cx| {
- cx.new(|cx| {
- let message = format!(
- "{file_count} file{} have unresolved merge conflicts",
- if file_count == 1 { "" } else { "s" }
- );
-
- MessageNotification::new(message, cx)
- .primary_message("Resolve with Agent")
- .primary_icon(IconName::ZedAssistant)
- .primary_icon_color(Color::Muted)
- .primary_on_click({
- let paths = paths.clone();
- move |window, cx| {
- window.dispatch_action(
- Box::new(ResolveConflictedFilesWithAgent {
- conflicted_file_paths: paths.clone(),
- }),
- cx,
- );
- cx.emit(DismissEvent);
- }
- })
- })
- });
- }
- })
- .detach();
-}
-
pub(crate) fn resolve_conflict(
editor: WeakEntity<Editor>,
resolved_conflict: ConflictRegion,
@@ -573,3 +505,171 @@ pub(crate) fn resolve_conflict(
}
})
}
+
+pub struct MergeConflictIndicator {
+ project: Entity<Project>,
+ conflicted_paths: Vec<String>,
+ last_shown_paths: HashSet<String>,
+ dismissed: bool,
+ _subscription: Subscription,
+}
+
+impl MergeConflictIndicator {
+ pub fn new(workspace: &Workspace, cx: &mut Context<Self>) -> Self {
+ let project = workspace.project().clone();
+ let git_store = project.read(cx).git_store().clone();
+
+ let subscription = cx.subscribe(&git_store, Self::on_git_store_event);
+
+ let conflicted_paths = collect_conflicted_file_paths(project.read(cx), cx);
+ let last_shown_paths: HashSet<String> = conflicted_paths.iter().cloned().collect();
+
+ Self {
+ project,
+ conflicted_paths,
+ last_shown_paths,
+ dismissed: false,
+ _subscription: subscription,
+ }
+ }
+
+ fn on_git_store_event(
+ &mut self,
+ _git_store: Entity<GitStore>,
+ event: &GitStoreEvent,
+ cx: &mut Context<Self>,
+ ) {
+ let conflicts_changed = matches!(
+ event,
+ GitStoreEvent::ConflictsUpdated
+ | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _)
+ );
+
+ let agent_settings = AgentSettings::get_global(cx);
+ if !agent_settings.enabled(cx)
+ || !agent_settings.show_merge_conflict_indicator
+ || !conflicts_changed
+ {
+ return;
+ }
+
+ let project = self.project.read(cx);
+ if project.is_via_collab() {
+ return;
+ }
+
+ let paths = collect_conflicted_file_paths(project, cx);
+ let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
+
+ if paths.is_empty() {
+ self.conflicted_paths.clear();
+ self.last_shown_paths.clear();
+ self.dismissed = false;
+ cx.notify();
+ } else if self.last_shown_paths != current_paths_set {
+ self.last_shown_paths = current_paths_set;
+ self.conflicted_paths = paths;
+ self.dismissed = false;
+ cx.notify();
+ }
+ }
+
+ fn resolve_with_agent(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ window.dispatch_action(
+ Box::new(ResolveConflictedFilesWithAgent {
+ conflicted_file_paths: self.conflicted_paths.clone(),
+ }),
+ cx,
+ );
+ self.dismissed = true;
+ cx.notify();
+ }
+
+ fn dismiss(&mut self, _: &ClickEvent, _window: &mut Window, cx: &mut Context<Self>) {
+ self.dismissed = true;
+ cx.notify();
+ }
+}
+
+impl Render for MergeConflictIndicator {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let agent_settings = AgentSettings::get_global(cx);
+ if !agent_settings.enabled(cx)
+ || !agent_settings.show_merge_conflict_indicator
+ || self.conflicted_paths.is_empty()
+ || self.dismissed
+ {
+ return Empty.into_any_element();
+ }
+
+ let file_count = self.conflicted_paths.len();
+
+ let message: SharedString = format!(
+ "Resolve Merge Conflict{} with Agent",
+ if file_count == 1 { "" } else { "s" }
+ )
+ .into();
+
+ let tooltip_label: SharedString = format!(
+ "Found {} {} across the codebase",
+ file_count,
+ if file_count == 1 {
+ "conflict"
+ } else {
+ "conflicts"
+ }
+ )
+ .into();
+
+ let border_color = cx.theme().colors().text_accent.opacity(0.2);
+
+ h_flex()
+ .h(rems_from_px(22.))
+ .rounded_sm()
+ .border_1()
+ .border_color(border_color)
+ .child(
+ ButtonLike::new("update-button")
+ .child(
+ h_flex()
+ .h_full()
+ .gap_1()
+ .child(
+ Icon::new(IconName::GitMergeConflict)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new(message).size(LabelSize::Small)),
+ )
+ .tooltip(move |_, cx| {
+ Tooltip::with_meta(
+ tooltip_label.clone(),
+ None,
+ "Click to Resolve with Agent",
+ cx,
+ )
+ })
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.resolve_with_agent(window, cx);
+ })),
+ )
+ .child(
+ div().border_l_1().border_color(border_color).child(
+ IconButton::new("dismiss-merge-conflicts", IconName::Close)
+ .icon_size(IconSize::XSmall)
+ .on_click(cx.listener(Self::dismiss)),
+ ),
+ )
+ .into_any_element()
+ }
+}
+
+impl StatusItemView for MergeConflictIndicator {
+ fn set_active_pane_item(
+ &mut self,
+ _: Option<&dyn ItemHandle>,
+ _window: &mut Window,
+ _: &mut Context<Self>,
+ ) {
+ }
+}
@@ -780,7 +780,7 @@ impl GitPanel {
move |this, _git_store, event, window, cx| match event {
GitStoreEvent::RepositoryUpdated(
_,
- RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged,
+ RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged,
true,
)
| GitStoreEvent::RepositoryAdded
@@ -47,6 +47,8 @@ pub mod stash_picker;
pub mod text_diff_view;
pub mod worktree_picker;
+pub use conflict_view::MergeConflictIndicator;
+
pub fn init(cx: &mut App) {
editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx);
commit_view::init(cx);
@@ -62,7 +64,6 @@ pub fn init(cx: &mut App) {
git_panel::register(workspace);
repository_selector::register(workspace);
git_picker::register(workspace);
- conflict_view::register_conflict_notification(workspace, cx);
let project = workspace.project().read(cx);
if project.is_read_only(cx) {
@@ -880,6 +880,7 @@ impl SearchableItem for LspLogView {
// LSP log is read-only.
replacement: false,
selection: false,
+ select_all: true,
}
}
fn active_match_index(
@@ -154,6 +154,8 @@ impl MarkdownStyle {
base_text_style: text_style.clone(),
syntax: cx.theme().syntax().clone(),
selection_background_color: colors.element_selection_background,
+ rule_color: colors.border,
+ block_quote_border_color: colors.border,
code_block_overflow_x_scroll: true,
heading_level_styles: Some(HeadingLevelStyles {
h1: Some(TextStyleRefinement {
@@ -261,6 +263,8 @@ pub struct Markdown {
copied_code_blocks: HashSet<ElementId>,
code_block_scroll_handles: BTreeMap<usize, ScrollHandle>,
context_menu_selected_text: Option<String>,
+ search_highlights: Vec<Range<usize>>,
+ active_search_highlight: Option<usize>,
}
#[derive(Clone, Copy, Default)]
@@ -428,6 +432,8 @@ impl Markdown {
copied_code_blocks: HashSet::default(),
code_block_scroll_handles: BTreeMap::default(),
context_menu_selected_text: None,
+ search_highlights: Vec::new(),
+ active_search_highlight: None,
};
this.parse(cx);
this
@@ -539,6 +545,8 @@ impl Markdown {
self.autoscroll_request = None;
self.pending_parse = None;
self.should_reparse = false;
+ self.search_highlights.clear();
+ self.active_search_highlight = None;
// Don't clear parsed_markdown here - keep existing content visible until new parse completes
self.parse(cx);
}
@@ -574,6 +582,40 @@ impl Markdown {
}
}
+ pub fn set_search_highlights(
+ &mut self,
+ highlights: Vec<Range<usize>>,
+ active: Option<usize>,
+ cx: &mut Context<Self>,
+ ) {
+ self.search_highlights = highlights;
+ self.active_search_highlight = active;
+ cx.notify();
+ }
+
+ pub fn clear_search_highlights(&mut self, cx: &mut Context<Self>) {
+ if !self.search_highlights.is_empty() || self.active_search_highlight.is_some() {
+ self.search_highlights.clear();
+ self.active_search_highlight = None;
+ cx.notify();
+ }
+ }
+
+ pub fn set_active_search_highlight(&mut self, active: Option<usize>, cx: &mut Context<Self>) {
+ if self.active_search_highlight != active {
+ self.active_search_highlight = active;
+ cx.notify();
+ }
+ }
+
+ pub fn search_highlights(&self) -> &[Range<usize>] {
+ &self.search_highlights
+ }
+
+ pub fn active_search_highlight(&self) -> Option<usize> {
+ self.active_search_highlight
+ }
+
fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context<Self>) {
if self.selection.end <= self.selection.start {
return;
@@ -1082,18 +1124,18 @@ impl MarkdownElement {
builder.pop_div();
}
- fn paint_selection(
- &self,
+ fn paint_highlight_range(
bounds: Bounds<Pixels>,
+ start: usize,
+ end: usize,
+ color: Hsla,
rendered_text: &RenderedText,
window: &mut Window,
- cx: &mut App,
) {
- let selection = self.markdown.read(cx).selection.clone();
- let selection_start = rendered_text.position_for_source_index(selection.start);
- let selection_end = rendered_text.position_for_source_index(selection.end);
+ let start_pos = rendered_text.position_for_source_index(start);
+ let end_pos = rendered_text.position_for_source_index(end);
if let Some(((start_position, start_line_height), (end_position, end_line_height))) =
- selection_start.zip(selection_end)
+ start_pos.zip(end_pos)
{
if start_position.y == end_position.y {
window.paint_quad(quad(
@@ -1102,7 +1144,7 @@ impl MarkdownElement {
point(end_position.x, end_position.y + end_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1114,7 +1156,7 @@ impl MarkdownElement {
point(bounds.right(), start_position.y + start_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1127,7 +1169,7 @@ impl MarkdownElement {
point(bounds.right(), end_position.y),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1140,7 +1182,7 @@ impl MarkdownElement {
point(end_position.x, end_position.y + end_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1149,6 +1191,52 @@ impl MarkdownElement {
}
}
+ fn paint_selection(
+ &self,
+ bounds: Bounds<Pixels>,
+ rendered_text: &RenderedText,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let selection = self.markdown.read(cx).selection.clone();
+ Self::paint_highlight_range(
+ bounds,
+ selection.start,
+ selection.end,
+ self.style.selection_background_color,
+ rendered_text,
+ window,
+ );
+ }
+
+ fn paint_search_highlights(
+ &self,
+ bounds: Bounds<Pixels>,
+ rendered_text: &RenderedText,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let markdown = self.markdown.read(cx);
+ let active_index = markdown.active_search_highlight;
+ let colors = cx.theme().colors();
+
+ for (i, highlight_range) in markdown.search_highlights.iter().enumerate() {
+ let color = if Some(i) == active_index {
+ colors.search_active_match_background
+ } else {
+ colors.search_match_background
+ };
+ Self::paint_highlight_range(
+ bounds,
+ highlight_range.start,
+ highlight_range.end,
+ color,
+ rendered_text,
+ window,
+ );
+ }
+ }
+
fn paint_mouse_listeners(
&mut self,
hitbox: &Hitbox,
@@ -1953,6 +2041,7 @@ impl Element for MarkdownElement {
self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx);
rendered_markdown.element.paint(window, cx);
+ self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx);
self.paint_selection(bounds, &rendered_markdown.text, window, cx);
}
}
@@ -21,6 +21,7 @@ gpui.workspace = true
language.workspace = true
log.workspace = true
markdown.workspace = true
+project.workspace = true
settings.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -1,4 +1,5 @@
use std::cmp::min;
+use std::ops::Range;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
@@ -16,11 +17,15 @@ use markdown::{
CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont,
MarkdownOptions, MarkdownStyle,
};
+use project::search::SearchQuery;
use settings::Settings;
use theme_settings::ThemeSettings;
use ui::{WithScrollbar, prelude::*};
use util::normalize_path;
-use workspace::item::{Item, ItemHandle};
+use workspace::item::{Item, ItemBufferKind, ItemHandle};
+use workspace::searchable::{
+ Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle,
+};
use workspace::{OpenOptions, OpenVisible, Pane, Workspace};
use crate::{
@@ -382,6 +387,7 @@ impl MarkdownPreviewView {
markdown.reset(contents, cx);
});
view.sync_preview_to_source_index(selection_start, should_reveal_selection, cx);
+ cx.emit(SearchEvent::MatchesInvalidated);
}
view.pending_update_task = None;
cx.notify();
@@ -751,6 +757,7 @@ impl Focusable for MarkdownPreviewView {
}
impl EventEmitter<()> for MarkdownPreviewView {}
+impl EventEmitter<SearchEvent> for MarkdownPreviewView {}
impl Item for MarkdownPreviewView {
type Event = ();
@@ -775,6 +782,18 @@ impl Item for MarkdownPreviewView {
}
fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {}
+
+ fn buffer_kind(&self, _cx: &App) -> ItemBufferKind {
+ ItemBufferKind::Singleton
+ }
+
+ fn as_searchable(
+ &self,
+ handle: &Entity<Self>,
+ _: &App,
+ ) -> Option<Box<dyn SearchableItemHandle>> {
+ Some(Box::new(handle.clone()))
+ }
}
impl Render for MarkdownPreviewView {
@@ -807,6 +826,140 @@ impl Render for MarkdownPreviewView {
}
}
+impl SearchableItem for MarkdownPreviewView {
+ type Match = Range<usize>;
+
+ fn supported_options(&self) -> SearchOptions {
+ SearchOptions {
+ case: true,
+ word: true,
+ regex: true,
+ replacement: false,
+ selection: false,
+ select_all: false,
+ find_in_results: false,
+ }
+ }
+
+ fn get_matches(&self, _window: &mut Window, cx: &mut App) -> (Vec<Self::Match>, SearchToken) {
+ (
+ self.markdown.read(cx).search_highlights().to_vec(),
+ SearchToken::default(),
+ )
+ }
+
+ fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ let had_highlights = !self.markdown.read(cx).search_highlights().is_empty();
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.clear_search_highlights(cx);
+ });
+ if had_highlights {
+ cx.emit(SearchEvent::MatchesInvalidated);
+ }
+ }
+
+ fn update_matches(
+ &mut self,
+ matches: &[Self::Match],
+ active_match_index: Option<usize>,
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let old_highlights = self.markdown.read(cx).search_highlights();
+ let changed = old_highlights != matches;
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.set_search_highlights(matches.to_vec(), active_match_index, cx);
+ });
+ if changed {
+ cx.emit(SearchEvent::MatchesInvalidated);
+ }
+ }
+
+ fn query_suggestion(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> String {
+ self.markdown.read(cx).selected_text().unwrap_or_default()
+ }
+
+ fn activate_match(
+ &mut self,
+ index: usize,
+ matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(match_range) = matches.get(index) {
+ let start = match_range.start;
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.set_active_search_highlight(Some(index), cx);
+ markdown.request_autoscroll_to_source_index(start, cx);
+ });
+ cx.emit(SearchEvent::ActiveMatchChanged);
+ }
+ }
+
+ fn select_matches(
+ &mut self,
+ _matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) {
+ }
+
+ fn replace(
+ &mut self,
+ _: &Self::Match,
+ _: &SearchQuery,
+ _token: SearchToken,
+ _window: &mut Window,
+ _: &mut Context<Self>,
+ ) {
+ }
+
+ fn find_matches(
+ &mut self,
+ query: Arc<SearchQuery>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Vec<Self::Match>> {
+ let source = self.markdown.read(cx).source().to_string();
+ cx.background_spawn(async move { query.search_str(&source) })
+ }
+
+ fn active_match_index(
+ &mut self,
+ direction: Direction,
+ matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Option<usize> {
+ if matches.is_empty() {
+ return None;
+ }
+
+ let markdown = self.markdown.read(cx);
+ let current_source_index = markdown
+ .active_search_highlight()
+ .and_then(|i| markdown.search_highlights().get(i))
+ .map(|m| m.start)
+ .or(self.active_source_index)
+ .unwrap_or(0);
+
+ match direction {
+ Direction::Next => matches
+ .iter()
+ .position(|m| m.start >= current_source_index)
+ .or(Some(0)),
+ Direction::Prev => matches
+ .iter()
+ .rposition(|m| m.start <= current_source_index)
+ .or(Some(matches.len().saturating_sub(1))),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use crate::markdown_preview_view::ImageSource;
@@ -287,6 +287,7 @@ pub struct RepositorySnapshot {
pub original_repo_abs_path: Arc<Path>,
pub path_style: PathStyle,
pub branch: Option<Branch>,
+ pub branch_list: Arc<[Branch]>,
pub head_commit: Option<CommitDetails>,
pub scan_id: u64,
pub merge: MergeDetails,
@@ -428,7 +429,8 @@ pub enum GitGraphEvent {
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum RepositoryEvent {
StatusesChanged,
- BranchChanged,
+ HeadChanged,
+ BranchListChanged,
StashEntriesChanged,
GitWorktreeListChanged,
PendingOpsChanged { pending_ops: SumTree<PendingOps> },
@@ -3687,6 +3689,7 @@ impl RepositorySnapshot {
.unwrap_or_else(|| work_directory_abs_path.clone()),
work_directory_abs_path,
branch: None,
+ branch_list: Arc::from([]),
head_commit: None,
scan_id: 0,
merge: Default::default(),
@@ -4049,11 +4052,17 @@ impl Repository {
.shared();
cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
- RepositoryEvent::BranchChanged => {
+ RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
if this.scan_id > 1 {
this.initial_graph_data.clear();
}
}
+ RepositoryEvent::StashEntriesChanged => {
+ if this.scan_id > 1 {
+ this.initial_graph_data
+ .retain(|(log_source, _), _| *log_source != LogSource::All);
+ }
+ }
_ => {}
})
.detach();
@@ -5596,7 +5605,7 @@ impl Repository {
log::info!("head branch after scan is {branch:?}");
let snapshot = this.update(&mut cx, |this, cx| {
this.snapshot.branch = branch;
- cx.emit(RepositoryEvent::BranchChanged);
+ cx.emit(RepositoryEvent::HeadChanged);
this.snapshot.clone()
})?;
if let Some(updates_tx) = updates_tx {
@@ -6464,7 +6473,7 @@ impl Repository {
.as_ref()
.map(proto_to_commit_details);
if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
- cx.emit(RepositoryEvent::BranchChanged)
+ cx.emit(RepositoryEvent::HeadChanged)
}
self.snapshot.branch = new_branch;
self.snapshot.head_commit = new_head_commit;
@@ -7402,7 +7411,8 @@ async fn compute_snapshot(
}
})
.await?;
- let branch = branches.into_iter().find(|branch| branch.is_head);
+ let branch = branches.iter().find(|branch| branch.is_head).cloned();
+ let branch_list: Arc<[Branch]> = branches.into();
let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
.into_iter()
@@ -7425,14 +7435,16 @@ async fn compute_snapshot(
.await?;
let snapshot = this.update(cx, |this, cx| {
- let branch_changed =
+ let head_changed =
branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
+ let branch_list_changed = *branch_list != *this.snapshot.branch_list;
let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
this.snapshot = RepositorySnapshot {
id,
work_directory_abs_path,
branch,
+ branch_list: branch_list.clone(),
head_commit,
remote_origin_url,
remote_upstream_url,
@@ -7441,8 +7453,12 @@ async fn compute_snapshot(
..prev_snapshot
};
- if branch_changed {
- cx.emit(RepositoryEvent::BranchChanged);
+ if head_changed {
+ cx.emit(RepositoryEvent::HeadChanged);
+ }
+
+ if branch_list_changed {
+ cx.emit(RepositoryEvent::BranchListChanged);
}
if worktrees_changed {
@@ -70,7 +70,7 @@ impl BranchDiff {
}
GitStoreEvent::RepositoryUpdated(
event_repo_id,
- RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged,
+ RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged,
_,
) => this
.repo
@@ -620,4 +620,56 @@ impl SearchQuery {
Self::Text { .. } => None,
}
}
+
+ pub fn search_str(&self, text: &str) -> Vec<Range<usize>> {
+ if self.as_str().is_empty() {
+ return Vec::new();
+ }
+
+ let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
+
+ let mut matches = Vec::new();
+ match self {
+ Self::Text {
+ search, whole_word, ..
+ } => {
+ for mat in search.find_iter(text.as_bytes()) {
+ if *whole_word {
+ let prev_char = text[..mat.start()].chars().last();
+ let next_char = text[mat.end()..].chars().next();
+ if prev_char.is_some_and(&is_word_char)
+ || next_char.is_some_and(&is_word_char)
+ {
+ continue;
+ }
+ }
+ matches.push(mat.start()..mat.end());
+ }
+ }
+ Self::Regex {
+ regex,
+ multiline,
+ one_match_per_line,
+ ..
+ } => {
+ if *multiline {
+ for mat in regex.find_iter(text).flatten() {
+ matches.push(mat.start()..mat.end());
+ }
+ } else {
+ let mut line_offset = 0;
+ for line in text.split('\n') {
+ for mat in regex.find_iter(line).flatten() {
+ matches.push((line_offset + mat.start())..(line_offset + mat.end()));
+ if *one_match_per_line {
+ break;
+ }
+ }
+ line_offset += line.len() + 1;
+ }
+ }
+ }
+ }
+ matches
+ }
}
@@ -11161,7 +11161,7 @@ async fn test_odd_events_for_ignored_dirs(
assert_eq!(
repository_updates.lock().drain(..).collect::<Vec<_>>(),
vec![
- RepositoryEvent::BranchChanged,
+ RepositoryEvent::HeadChanged,
RepositoryEvent::StatusesChanged,
RepositoryEvent::StatusesChanged,
],
@@ -140,11 +140,20 @@ impl PickerDelegate for ProjectSymbolsDelegate {
);
editor.update(cx, |editor, cx| {
+ let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else {
+ return;
+ };
+ let text_anchor = buffer_snapshot.anchor_before(position);
+ let Some(anchor) = multibuffer_snapshot.anchor_in_buffer(text_anchor)
+ else {
+ return;
+ };
editor.change_selections(
SelectionEffects::scroll(Autoscroll::center()),
window,
cx,
- |s| s.select_ranges([position..position]),
+ |s| s.select_ranges([anchor..anchor]),
);
});
})?;
@@ -2003,7 +2003,7 @@ mod tests {
use std::path::PathBuf;
use editor::Editor;
- use gpui::{TestAppContext, UpdateGlobal, WindowHandle};
+ use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle};
use serde_json::json;
use settings::SettingsStore;
@@ -2242,6 +2242,71 @@ mod tests {
.unwrap();
}
+ #[gpui::test]
+ async fn test_dev_container_modal_not_dismissed_on_backdrop_click(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/project"),
+ json!({
+ ".devcontainer": {
+ "devcontainer.json": "{}"
+ },
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+
+ cx.update(|cx| {
+ open_paths(
+ &[PathBuf::from(path!("/project"))],
+ app_state,
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::<MultiWorkspace>().unwrap());
+
+ cx.run_until_parked();
+
+ cx.dispatch_action(*multi_workspace, OpenDevContainer);
+
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ assert!(
+ multi_workspace
+ .active_modal::<RemoteServerProjects>(cx)
+ .is_some(),
+ "Dev container modal should be open"
+ );
+ })
+ .unwrap();
+
+ // Click outside the modal (on the backdrop) to try to dismiss it
+ let mut vcx = VisualTestContext::from_window(*multi_workspace, cx);
+ vcx.simulate_click(gpui::point(px(1.0), px(1.0)), gpui::Modifiers::default());
+
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ assert!(
+ multi_workspace
+ .active_modal::<RemoteServerProjects>(cx)
+ .is_some(),
+ "Dev container modal should remain open during creation"
+ );
+ })
+ .unwrap();
+ }
+
#[gpui::test]
async fn test_open_dev_container_action_with_multiple_configs(cx: &mut TestAppContext) {
let app_state = init_test(cx);
@@ -54,7 +54,7 @@ use util::{
rel_path::RelPath,
};
use workspace::{
- AppState, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace,
+ AppState, DismissDecision, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace,
notifications::{DetachAndPromptErr, NotificationId},
open_remote_project_with_existing_connection,
};
@@ -69,6 +69,7 @@ pub struct RemoteServerProjects {
create_new_window: bool,
dev_container_picker: Option<Entity<Picker<DevContainerPickerDelegate>>>,
_subscription: Subscription,
+ allow_dismissal: bool,
}
struct CreateRemoteServer {
@@ -920,6 +921,7 @@ impl RemoteServerProjects {
create_new_window,
dev_container_picker: None,
_subscription,
+ allow_dismissal: true,
}
}
@@ -1140,6 +1142,7 @@ impl RemoteServerProjects {
}
fn view_in_progress_dev_container(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.allow_dismissal = false;
self.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new(
DevContainerCreationProgress::Creating,
cx,
@@ -1309,6 +1312,7 @@ impl RemoteServerProjects {
cx.emit(DismissEvent);
}
_ => {
+ self.allow_dismissal = true;
self.mode = Mode::default_mode(&self.ssh_config_servers, cx);
self.focus_handle(cx).focus(window, cx);
cx.notify();
@@ -1875,6 +1879,7 @@ impl RemoteServerProjects {
.ok();
entity
.update_in(cx, |remote_server_projects, window, cx| {
+ remote_server_projects.allow_dismissal = true;
remote_server_projects.mode =
Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new(
DevContainerCreationProgress::Error(format!("{e}")),
@@ -1897,7 +1902,8 @@ impl RemoteServerProjects {
.log_err();
entity
- .update(cx, |_, cx| {
+ .update(cx, |this, cx| {
+ this.allow_dismissal = true;
cx.emit(DismissEvent);
})
.log_err();
@@ -2948,7 +2954,15 @@ fn get_text(element: &Entity<Editor>, cx: &mut App) -> String {
element.read(cx).text(cx).trim().to_string()
}
-impl ModalView for RemoteServerProjects {}
+impl ModalView for RemoteServerProjects {
+ fn on_before_dismiss(
+ &mut self,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) -> DismissDecision {
+ DismissDecision::Dismiss(self.allow_dismissal)
+ }
+}
impl Focusable for RemoteServerProjects {
fn focus_handle(&self, cx: &App) -> FocusHandle {
@@ -411,12 +411,16 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.child({
- let open_action = workspace::Open::default();
+ let open_action = workspace::Open {
+ create_new_window: false,
+ };
+
Button::new("open_local_folder", "Add Local Project")
.key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx))
- .on_click(move |_, window, cx| {
- window.dispatch_action(open_action.boxed_clone(), cx)
- })
+ .on_click(cx.listener(move |_, _, window, cx| {
+ window.dispatch_action(open_action.boxed_clone(), cx);
+ cx.emit(DismissEvent);
+ }))
})
.child(
Button::new("open_remote_folder", "Add Remote Project")
@@ -427,7 +431,7 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
},
cx,
))
- .on_click(|_, window, cx| {
+ .on_click(cx.listener(|_, _, window, cx| {
window.dispatch_action(
OpenRemote {
from_existing_connection: false,
@@ -435,8 +439,9 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
}
.boxed_clone(),
cx,
- )
- }),
+ );
+ cx.emit(DismissEvent);
+ })),
)
.into_any(),
)
@@ -177,6 +177,13 @@ impl PythonEnvKernelSpecification {
kernelspec: self.kernelspec.clone(),
}
}
+
+ pub fn is_uv(&self) -> bool {
+ matches!(
+ self.environment_kind.as_deref(),
+ Some("uv" | "uv (Workspace)")
+ )
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -87,6 +87,7 @@ pub fn install_ipykernel_and_assign(
let python_path = env_spec.path.clone();
let env_name = env_spec.name.clone();
+ let is_uv = env_spec.is_uv();
let env_spec = env_spec.clone();
struct IpykernelInstall;
@@ -109,11 +110,25 @@ pub fn install_ipykernel_and_assign(
let window_handle = window.window_handle();
let install_task = cx.background_spawn(async move {
- let output = util::command::new_command(python_path.to_string_lossy().as_ref())
- .args(&["-m", "pip", "install", "ipykernel"])
- .output()
- .await
- .context("failed to run pip install ipykernel")?;
+ let output = if is_uv {
+ util::command::new_command("uv")
+ .args(&[
+ "pip",
+ "install",
+ "ipykernel",
+ "--python",
+ &python_path.to_string_lossy(),
+ ])
+ .output()
+ .await
+ .context("failed to run uv pip install ipykernel")?
+ } else {
+ util::command::new_command(python_path.to_string_lossy().as_ref())
+ .args(&["-m", "pip", "install", "ipykernel"])
+ .output()
+ .await
+ .context("failed to run pip install ipykernel")?
+ };
if output.status.success() {
anyhow::Ok(())
@@ -146,6 +161,11 @@ pub fn install_ipykernel_and_assign(
window_handle
.update(cx, |_, window, cx| {
+ let store = ReplStore::global(cx);
+ store.update(cx, |store, cx| {
+ store.mark_ipykernel_installed(cx, &env_spec);
+ });
+
let updated_spec =
KernelSpecification::PythonEnv(PythonEnvKernelSpecification {
has_ipykernel: true,
@@ -13,8 +13,8 @@ use settings::{Settings, SettingsStore};
use util::rel_path::RelPath;
use crate::kernels::{
- Kernel, list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications,
- wsl_kernel_specifications,
+ Kernel, PythonEnvKernelSpecification, list_remote_kernelspecs, local_kernel_specifications,
+ python_env_kernel_specifications, wsl_kernel_specifications,
};
use crate::{JupyterSettings, KernelSpecification, Session};
@@ -136,6 +136,23 @@ impl ReplStore {
cx.notify();
}
+ pub fn mark_ipykernel_installed(
+ &mut self,
+ cx: &mut Context<Self>,
+ spec: &PythonEnvKernelSpecification,
+ ) {
+ for specs in self.kernel_specifications_for_worktree.values_mut() {
+ for kernel_spec in specs.iter_mut() {
+ if let KernelSpecification::PythonEnv(env_spec) = kernel_spec {
+ if env_spec == spec {
+ env_spec.has_ipykernel = true;
+ }
+ }
+ }
+ }
+ cx.notify();
+ }
+
pub fn refresh_python_kernelspecs(
&mut self,
worktree_id: WorktreeId,
@@ -291,6 +291,7 @@ impl Render for BufferSearchBar {
regex,
replacement,
selection,
+ select_all,
find_in_results,
} = self.supported_options(cx);
@@ -461,14 +462,16 @@ impl Render for BufferSearchBar {
))
});
- el.child(render_action_button(
- "buffer-search-nav-button",
- IconName::SelectAll,
- Default::default(),
- "Select All Matches",
- &SelectAllMatches,
- query_focus,
- ))
+ el.when(select_all, |el| {
+ el.child(render_action_button(
+ "buffer-search-nav-button",
+ IconName::SelectAll,
+ Default::default(),
+ "Select All Matches",
+ &SelectAllMatches,
+ query_focus.clone(),
+ ))
+ })
.child(matches_column)
})
.when(find_in_results, |el| {
@@ -209,6 +209,11 @@ pub struct AgentSettingsContent {
///
/// Default: false
pub show_turn_stats: Option<bool>,
+ /// Whether to show the merge conflict indicator in the status bar
+ /// that offers to resolve conflicts using the agent.
+ ///
+ /// Default: true
+ pub show_merge_conflict_indicator: Option<bool>,
/// Per-tool permission rules for granular control over which tool actions
/// require confirmation.
///
@@ -7516,6 +7516,24 @@ fn ai_page(cx: &App) -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Show Merge Conflict Indicator",
+ description: "Whether to show the merge conflict indicator in the status bar that offers to resolve conflicts using the agent.",
+ field: Box::new(SettingField {
+ json_path: Some("agent.show_merge_conflict_indicator"),
+ pick: |settings_content| {
+ settings_content.agent.as_ref()?.show_merge_conflict_indicator.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .agent
+ .get_or_insert_default()
+ .show_merge_conflict_indicator = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
]);
items.into_boxed_slice()
@@ -4759,6 +4759,120 @@ async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut Tes
);
}
+#[gpui::test]
+async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+
+ fs.insert_tree(
+ "/project",
+ serde_json::json!({
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.add_linked_worktree_for_repo(
+ Path::new("/project/.git"),
+ false,
+ git::repository::Worktree {
+ path: PathBuf::from("/wt-feature-a"),
+ ref_name: Some("refs/heads/feature-a".into()),
+ sha: "abc".into(),
+ is_main: false,
+ },
+ )
+ .await;
+
+ cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
+
+ // Only a linked worktree workspace is open — no workspace for /project.
+ let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await;
+ worktree_project
+ .update(cx, |p, cx| p.git_scans_complete(cx))
+ .await;
+
+ let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
+ MultiWorkspace::test_new(worktree_project.clone(), window, cx)
+ });
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ // Save a legacy thread: folder_paths = main repo, main_worktree_paths = empty.
+ let legacy_session = acp::SessionId::new(Arc::from("legacy-main-thread"));
+ cx.update(|_, cx| {
+ let metadata = ThreadMetadata {
+ session_id: legacy_session.clone(),
+ agent_id: agent::ZED_AGENT_ID.clone(),
+ title: "Legacy Main Thread".into(),
+ updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
+ created_at: None,
+ folder_paths: PathList::new(&[PathBuf::from("/project")]),
+ main_worktree_paths: PathList::default(),
+ archived: false,
+ };
+ ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
+ });
+ cx.run_until_parked();
+
+ multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
+ cx.run_until_parked();
+
+ // The legacy thread should appear in the sidebar under the project group.
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert!(
+ entries.iter().any(|e| e.contains("Legacy Main Thread")),
+ "legacy thread should be visible: {entries:?}",
+ );
+
+ // Verify only 1 workspace before clicking.
+ assert_eq!(
+ multi_workspace.read_with(cx, |mw, _| mw.workspaces().len()),
+ 1,
+ );
+
+ // Focus and select the legacy thread, then confirm.
+ open_and_focus_sidebar(&sidebar, cx);
+ let thread_index = sidebar.read_with(cx, |sidebar, _| {
+ sidebar
+ .contents
+ .entries
+ .iter()
+ .position(|e| e.session_id().is_some_and(|id| id == &legacy_session))
+ .expect("legacy thread should be in entries")
+ });
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = Some(thread_index);
+ });
+ cx.dispatch_action(Confirm);
+ cx.run_until_parked();
+
+ let new_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ let new_path_list =
+ new_workspace.read_with(cx, |_, cx| workspace_path_list(&new_workspace, cx));
+ assert_eq!(
+ new_path_list,
+ PathList::new(&[PathBuf::from("/project")]),
+ "the new workspace should be for the main repo, not the linked worktree",
+ );
+}
+
mod property_test {
use super::*;
@@ -1820,6 +1820,7 @@ impl SearchableItem for TerminalView {
regex: true,
replacement: false,
selection: false,
+ select_all: false,
find_in_results: false,
}
}
@@ -474,6 +474,16 @@ impl MultiWorkspace {
self.project_group_keys.push(project_group_key);
}
+ pub fn restore_project_group_keys(&mut self, keys: Vec<ProjectGroupKey>) {
+ let mut restored = keys;
+ for existing_key in &self.project_group_keys {
+ if !restored.contains(existing_key) {
+ restored.push(existing_key.clone());
+ }
+ }
+ self.project_group_keys = restored;
+ }
+
pub fn project_group_keys(&self) -> impl Iterator<Item = &ProjectGroupKey> {
self.project_group_keys.iter()
}
@@ -639,7 +649,7 @@ impl MultiWorkspace {
if let Some(workspace) = self
.workspaces
.iter()
- .find(|ws| ws.read(cx).project_group_key(cx).path_list() == &path_list)
+ .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == path_list)
.cloned()
{
self.activate(workspace.clone(), window, cx);
@@ -337,15 +337,20 @@ pub fn read_serialized_multi_workspaces(
window_groups
.into_iter()
- .map(|group| {
+ .filter_map(|group| {
let window_id = group.first().and_then(|sw| sw.window_id);
let state = window_id
.map(|wid| read_multi_workspace_state(wid, cx))
.unwrap_or_default();
- model::SerializedMultiWorkspace {
- workspaces: group,
+ let active_workspace = state
+ .active_workspace_id
+ .and_then(|id| group.iter().position(|ws| ws.workspace_id == id))
+ .or(Some(0))
+ .and_then(|index| group.into_iter().nth(index))?;
+ Some(model::SerializedMultiWorkspace {
+ active_workspace,
state,
- }
+ })
})
.collect()
}
@@ -2488,11 +2493,20 @@ pub fn delete_unloaded_items(
#[cfg(test)]
mod tests {
use super::*;
- use crate::persistence::model::{
- SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace,
+ use crate::{
+ multi_workspace::MultiWorkspace,
+ persistence::{
+ model::{
+ SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
+ SessionWorkspace,
+ },
+ read_multi_workspace_state,
+ },
};
- use gpui;
+ use feature_flags::FeatureFlagAppExt;
+ use gpui::AppContext as _;
use pretty_assertions::assert_eq;
+ use project::{Project, ProjectGroupKey};
use remote::SshConnectionOptions;
use serde_json::json;
use std::{thread, time::Duration};
@@ -2507,12 +2521,6 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4042,35 +4050,30 @@ mod tests {
let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx));
- // Should produce 3 groups: window 10, window 20, and the orphan.
+ // Should produce 3 results: window 10, window 20, and the orphan.
assert_eq!(results.len(), 3);
- // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open.
+ // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open.
let group_10 = &results[0];
- assert_eq!(group_10.workspaces.len(), 2);
+ assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2));
assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2)));
assert_eq!(group_10.state.sidebar_open, true);
- // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed.
+ // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed.
let group_20 = &results[1];
- assert_eq!(group_20.workspaces.len(), 1);
+ assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3));
assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3)));
assert_eq!(group_20.state.sidebar_open, false);
- // Orphan group: no window_id, so state is default.
+ // Orphan: no active_workspace_id, falls back to first workspace (id 4).
let group_none = &results[2];
- assert_eq!(group_none.workspaces.len(), 1);
+ assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4));
assert_eq!(group_none.state.active_workspace_id, None);
assert_eq!(group_none.state.sidebar_open, false);
}
#[gpui::test]
async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4116,12 +4119,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4179,11 +4176,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4271,11 +4263,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4378,11 +4365,6 @@ mod tests {
#[gpui::test]
async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4484,10 +4466,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4545,10 +4523,6 @@ mod tests {
#[gpui::test]
async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4704,4 +4678,219 @@ mod tests {
assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]);
assert_eq!(result[2].0, WorkspaceId(4));
}
+
+ #[gpui::test]
+ async fn test_restore_window_with_linked_worktree_and_multiple_project_groups(
+ cx: &mut gpui::TestAppContext,
+ ) {
+ crate::tests::init_test(cx);
+
+ cx.update(|cx| {
+ cx.set_staff(true);
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ });
+
+ let fs = fs::FakeFs::new(cx.executor());
+
+ // Main git repo at /repo
+ fs.insert_tree(
+ "/repo",
+ json!({
+ ".git": {
+ "HEAD": "ref: refs/heads/main",
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature"
+ }
+ }
+ },
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // Linked worktree checkout pointing back to /repo
+ fs.insert_tree(
+ "/worktree-feature",
+ json!({
+ ".git": "gitdir: /repo/.git/worktrees/feature",
+ "src": { "lib.rs": "" }
+ }),
+ )
+ .await;
+
+ // --- Phase 1: Set up the original multi-workspace window ---
+
+ let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await;
+ let project_1_linked_worktree =
+ Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await;
+
+ // Wait for git discovery to finish.
+ cx.run_until_parked();
+
+ // Create a second, unrelated project so we have two distinct project groups.
+ fs.insert_tree(
+ "/other-project",
+ json!({
+ ".git": { "HEAD": "ref: refs/heads/main" },
+ "readme.md": ""
+ }),
+ )
+ .await;
+ let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await;
+ cx.run_until_parked();
+
+ // Create the MultiWorkspace with project_2, then add the main repo
+ // and its linked worktree. The linked worktree is added last and
+ // becomes the active workspace.
+ let (multi_workspace, cx) = cx
+ .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx));
+
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1.clone(), window, cx);
+ });
+
+ let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx)
+ });
+
+ // Assign database IDs and set up session bindings so serialization
+ // writes real rows.
+ multi_workspace.update_in(cx, |mw, _, cx| {
+ for workspace in mw.workspaces() {
+ workspace.update(cx, |ws, _cx| {
+ ws.set_random_database_id();
+ });
+ }
+ });
+
+ // Flush serialization for each individual workspace (writes to SQLite)
+ // and for the MultiWorkspace (writes to KVP).
+ let tasks = multi_workspace.update_in(cx, |mw, window, cx| {
+ let session_id = mw.workspace().read(cx).session_id();
+ let window_id_u64 = window.window_handle().window_id().as_u64();
+
+ let mut tasks: Vec<Task<()>> = Vec::new();
+ for workspace in mw.workspaces() {
+ tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx)));
+ if let Some(db_id) = workspace.read(cx).database_id() {
+ let db = WorkspaceDb::global(cx);
+ let session_id = session_id.clone();
+ tasks.push(cx.background_spawn(async move {
+ db.set_session_binding(db_id, session_id, Some(window_id_u64))
+ .await
+ .log_err();
+ }));
+ }
+ }
+ mw.serialize(cx);
+ tasks
+ });
+ cx.run_until_parked();
+ for task in tasks {
+ task.await;
+ }
+ cx.run_until_parked();
+
+ let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id());
+ assert!(
+ active_db_id.is_some(),
+ "Active workspace should have a database ID"
+ );
+
+ // --- Phase 2: Read back and verify the serialized state ---
+
+ let session_id = multi_workspace
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id())
+ .unwrap();
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+ let session_workspaces = db
+ .last_session_workspace_locations(&session_id, None, fs.as_ref())
+ .await
+ .expect("should load session workspaces");
+ assert!(
+ !session_workspaces.is_empty(),
+ "Should have at least one session workspace"
+ );
+
+ let multi_workspaces =
+ cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx));
+ assert_eq!(
+ multi_workspaces.len(),
+ 1,
+ "All workspaces share one window, so there should be exactly one multi-workspace"
+ );
+
+ let serialized = &multi_workspaces[0];
+ assert_eq!(
+ serialized.active_workspace.workspace_id,
+ active_db_id.unwrap(),
+ );
+ assert_eq!(serialized.state.project_group_keys.len(), 2,);
+
+ // Verify the serialized project group keys round-trip back to the
+ // originals.
+ let restored_keys: Vec<ProjectGroupKey> = serialized
+ .state
+ .project_group_keys
+ .iter()
+ .cloned()
+ .map(Into::into)
+ .collect();
+ let expected_keys = vec![
+ ProjectGroupKey::new(None, PathList::new(&["/other-project"])),
+ ProjectGroupKey::new(None, PathList::new(&["/repo"])),
+ ];
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Deserialized project group keys should match the originals"
+ );
+
+ // --- Phase 3: Restore the window and verify the result ---
+
+ let app_state =
+ multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone());
+
+ let serialized_mw = multi_workspaces.into_iter().next().unwrap();
+ let restored_handle: gpui::WindowHandle<MultiWorkspace> = cx
+ .update(|_, cx| {
+ cx.spawn(async move |mut cx| {
+ crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await
+ })
+ })
+ .await
+ .expect("restore_multiworkspace should succeed");
+
+ cx.run_until_parked();
+
+ // The restored window should have the same project group keys.
+ let restored_keys: Vec<ProjectGroupKey> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, _cx| {
+ mw.project_group_keys().cloned().collect()
+ })
+ .unwrap();
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Restored window should have the same project group keys as the original"
+ );
+
+ // The active workspace in the restored window should have the linked
+ // worktree paths.
+ let active_paths: Vec<PathBuf> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, cx| {
+ mw.workspace()
+ .read(cx)
+ .root_paths(cx)
+ .into_iter()
+ .map(|p: Arc<Path>| p.to_path_buf())
+ .collect()
+ })
+ .unwrap();
+ assert_eq!(
+ active_paths,
+ vec![PathBuf::from("/worktree-feature")],
+ "The restored active workspace should be the linked worktree project"
+ );
+ }
}
@@ -77,6 +77,17 @@ impl From<ProjectGroupKey> for SerializedProjectGroupKey {
}
}
+impl From<SerializedProjectGroupKey> for ProjectGroupKey {
+ fn from(value: SerializedProjectGroupKey) -> Self {
+ let path_list = PathList::deserialize(&value.path_list);
+ let host = match value.location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(opts) => Some(opts),
+ };
+ ProjectGroupKey::new(host, path_list)
+ }
+}
+
/// Per-window state for a MultiWorkspace, persisted to KVP.
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct MultiWorkspaceState {
@@ -88,11 +99,11 @@ pub struct MultiWorkspaceState {
}
/// The serialized state of a single MultiWorkspace window from a previous session:
-/// all workspaces that shared the window, which one was active, and whether the
-/// sidebar was open.
+/// the active workspace to restore plus window-level state (project group keys,
+/// sidebar).
#[derive(Debug, Clone)]
pub struct SerializedMultiWorkspace {
- pub workspaces: Vec<SessionWorkspace>,
+ pub active_workspace: SessionWorkspace,
pub state: MultiWorkspaceState,
}
@@ -55,6 +55,7 @@ pub struct SearchOptions {
/// Specifies whether the supports search & replace.
pub replacement: bool,
pub selection: bool,
+ pub select_all: bool,
pub find_in_results: bool,
}
@@ -78,6 +79,7 @@ pub trait SearchableItem: Item + EventEmitter<SearchEvent> {
regex: true,
replacement: true,
selection: true,
+ select_all: true,
find_in_results: false,
}
}
@@ -7,7 +7,7 @@ use std::{
};
use collections::{HashMap, HashSet};
-use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, WeakEntity};
+use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, ScrollHandle, WeakEntity};
use project::{
WorktreeId,
@@ -17,7 +17,8 @@ use project::{
use smallvec::SmallVec;
use theme::ActiveTheme;
use ui::{
- AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, prelude::*,
+ AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, WithScrollbar,
+ prelude::*,
};
use crate::{DismissDecision, ModalView, ToggleWorktreeSecurity};
@@ -29,6 +30,7 @@ pub struct SecurityModal {
worktree_store: WeakEntity<WorktreeStore>,
remote_host: Option<RemoteHostLocation>,
focus_handle: FocusHandle,
+ project_list_scroll_handle: ScrollHandle,
trusted: Option<bool>,
}
@@ -63,16 +65,17 @@ impl ModalView for SecurityModal {
}
impl Render for SecurityModal {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
if self.restricted_paths.is_empty() {
self.dismiss(cx);
return v_flex().into_any_element();
}
- let header_label = if self.restricted_paths.len() == 1 {
- "Unrecognized Project"
+ let restricted_count = self.restricted_paths.len();
+ let header_label: SharedString = if restricted_count == 1 {
+ "Unrecognized Project".into()
} else {
- "Unrecognized Projects"
+ format!("Unrecognized Projects ({})", restricted_count).into()
};
let trust_label = self.build_trust_label();
@@ -102,32 +105,61 @@ impl Render for SecurityModal {
.child(Icon::new(IconName::Warning).color(Color::Warning))
.child(Label::new(header_label)),
)
- .children(self.restricted_paths.values().filter_map(|restricted_path| {
- let abs_path = if restricted_path.is_file {
- restricted_path.abs_path.parent()
- } else {
- Some(restricted_path.abs_path.as_ref())
- }?;
- let label = match &restricted_path.host {
- Some(remote_host) => match &remote_host.user_name {
- Some(user_name) => format!(
- "{} ({}@{})",
- self.shorten_path(abs_path).display(),
- user_name,
- remote_host.host_identifier
- ),
- None => format!(
- "{} ({})",
- self.shorten_path(abs_path).display(),
- remote_host.host_identifier
- ),
- },
- None => self.shorten_path(abs_path).display().to_string(),
- };
- Some(h_flex()
- .pl(IconSize::default().rems() + rems(0.5))
- .child(Label::new(label).color(Color::Muted)))
- })),
+ .child(
+ div()
+ .size_full()
+ .vertical_scrollbar_for(&self.project_list_scroll_handle, window, cx)
+ .child(
+ v_flex()
+ .id("paths_container")
+ .max_h_24()
+ .overflow_y_scroll()
+ .track_scroll(&self.project_list_scroll_handle)
+ .children(
+ self.restricted_paths.values().filter_map(
+ |restricted_path| {
+ let abs_path = if restricted_path.is_file {
+ restricted_path.abs_path.parent()
+ } else {
+ Some(restricted_path.abs_path.as_ref())
+ }?;
+ let label = match &restricted_path.host {
+ Some(remote_host) => {
+ match &remote_host.user_name {
+ Some(user_name) => format!(
+ "{} ({}@{})",
+ self.shorten_path(abs_path)
+ .display(),
+ user_name,
+ remote_host.host_identifier
+ ),
+ None => format!(
+ "{} ({})",
+ self.shorten_path(abs_path)
+ .display(),
+ remote_host.host_identifier
+ ),
+ }
+ }
+ None => self
+ .shorten_path(abs_path)
+ .display()
+ .to_string(),
+ };
+ Some(
+ h_flex()
+ .pl(
+ IconSize::default().rems() + rems(0.5),
+ )
+ .child(
+ Label::new(label).color(Color::Muted),
+ ),
+ )
+ },
+ ),
+ ),
+ ),
+ ),
)
.child(
v_flex()
@@ -219,6 +251,7 @@ impl SecurityModal {
remote_host: remote_host.map(|host| host.into()),
restricted_paths: HashMap::default(),
focus_handle: cx.focus_handle(),
+ project_list_scroll_handle: ScrollHandle::new(),
trust_parents: false,
home_dir: std::env::home_dir(),
trusted: None,
@@ -84,8 +84,8 @@ use persistence::{SerializedWindowBounds, model::SerializedWorkspace};
pub use persistence::{
WorkspaceDb, delete_unloaded_items,
model::{
- DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
- SessionWorkspace,
+ DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace,
+ SerializedWorkspaceLocation, SessionWorkspace,
},
read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
@@ -656,13 +656,25 @@ impl From<WorkspaceId> for i64 {
}
}
-fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, cx: &mut App) {
+fn prompt_and_open_paths(
+ app_state: Arc<AppState>,
+ options: PathPromptOptions,
+ create_new_window: bool,
+ cx: &mut App,
+) {
if let Some(workspace_window) = local_workspace_windows(cx).into_iter().next() {
workspace_window
.update(cx, |multi_workspace, window, cx| {
let workspace = multi_workspace.workspace().clone();
workspace.update(cx, |workspace, cx| {
- prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
+ prompt_for_open_path_and_open(
+ workspace,
+ app_state,
+ options,
+ create_new_window,
+ window,
+ cx,
+ );
});
})
.ok();
@@ -682,7 +694,14 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
window.activate_window();
let workspace = multi_workspace.workspace().clone();
workspace.update(cx, |workspace, cx| {
- prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
+ prompt_for_open_path_and_open(
+ workspace,
+ app_state,
+ options,
+ create_new_window,
+ window,
+ cx,
+ );
});
})?;
anyhow::Ok(())
@@ -743,7 +762,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx))
.on_action(|_: &Reload, cx| reload(cx))
- .on_action(|_: &Open, cx: &mut App| {
+ .on_action(|action: &Open, cx: &mut App| {
let app_state = AppState::global(cx);
prompt_and_open_paths(
app_state,
@@ -753,6 +772,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
multiple: true,
prompt: None,
},
+ action.create_new_window,
cx,
);
})
@@ -767,6 +787,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
multiple: true,
prompt: None,
},
+ true,
cx,
);
});
@@ -7693,11 +7714,6 @@ impl GlobalAnyActiveCall {
}
}
-pub fn merge_conflict_notification_id() -> NotificationId {
- struct MergeConflictNotification;
- NotificationId::unique::<MergeConflictNotification>()
-}
-
/// Workspace-local view of a remote participant's location.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ParticipantLocation {
@@ -8621,30 +8637,32 @@ pub async fn last_session_workspace_locations(
.log_err()
}
-pub struct MultiWorkspaceRestoreResult {
- pub window_handle: WindowHandle<MultiWorkspace>,
- pub errors: Vec<anyhow::Error>,
-}
-
pub async fn restore_multiworkspace(
multi_workspace: SerializedMultiWorkspace,
app_state: Arc<AppState>,
cx: &mut AsyncApp,
-) -> anyhow::Result<MultiWorkspaceRestoreResult> {
- let SerializedMultiWorkspace { workspaces, state } = multi_workspace;
- let mut group_iter = workspaces.into_iter();
- let first = group_iter
- .next()
- .context("window group must not be empty")?;
-
- let window_handle = if first.paths.is_empty() {
- cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx))
- .await?
+) -> anyhow::Result<WindowHandle<MultiWorkspace>> {
+ let SerializedMultiWorkspace {
+ active_workspace,
+ state,
+ } = multi_workspace;
+ let MultiWorkspaceState {
+ sidebar_open,
+ project_group_keys,
+ sidebar_state,
+ ..
+ } = state;
+
+ let window_handle = if active_workspace.paths.is_empty() {
+ cx.update(|cx| {
+ open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx)
+ })
+ .await?
} else {
let OpenResult { window, .. } = cx
.update(|cx| {
Workspace::new_local(
- first.paths.paths().to_vec(),
+ active_workspace.paths.paths().to_vec(),
app_state.clone(),
None,
None,
@@ -8657,65 +8675,17 @@ pub async fn restore_multiworkspace(
window
};
- let mut errors = Vec::new();
-
- for session_workspace in group_iter {
- let error = if session_workspace.paths.is_empty() {
- cx.update(|cx| {
- open_workspace_by_id(
- session_workspace.workspace_id,
- app_state.clone(),
- Some(window_handle),
- cx,
- )
- })
- .await
- .err()
- } else {
- cx.update(|cx| {
- Workspace::new_local(
- session_workspace.paths.paths().to_vec(),
- app_state.clone(),
- Some(window_handle),
- None,
- None,
- OpenMode::Add,
- cx,
- )
- })
- .await
- .err()
- };
-
- if let Some(error) = error {
- errors.push(error);
- }
- }
-
- if let Some(target_id) = state.active_workspace_id {
+ if !project_group_keys.is_empty() {
+ let restored_keys: Vec<ProjectGroupKey> =
+ project_group_keys.into_iter().map(Into::into).collect();
window_handle
- .update(cx, |multi_workspace, window, cx| {
- let target_index = multi_workspace
- .workspaces()
- .iter()
- .position(|ws| ws.read(cx).database_id() == Some(target_id));
- let index = target_index.unwrap_or(0);
- if let Some(workspace) = multi_workspace.workspaces().get(index).cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
- })
- .ok();
- } else {
- window_handle
- .update(cx, |multi_workspace, window, cx| {
- if let Some(workspace) = multi_workspace.workspaces().first().cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
+ .update(cx, |multi_workspace, _window, _cx| {
+ multi_workspace.restore_project_group_keys(restored_keys);
})
.ok();
}
- if state.sidebar_open {
+ if sidebar_open {
window_handle
.update(cx, |multi_workspace, _, cx| {
multi_workspace.open_sidebar(cx);
@@ -8723,8 +8693,7 @@ pub async fn restore_multiworkspace(
.ok();
}
- if let Some(sidebar_state) = &state.sidebar_state {
- let sidebar_state = sidebar_state.clone();
+ if let Some(sidebar_state) = sidebar_state {
window_handle
.update(cx, |multi_workspace, window, cx| {
if let Some(sidebar) = multi_workspace.sidebar() {
@@ -8741,10 +8710,7 @@ pub async fn restore_multiworkspace(
})
.ok();
- Ok(MultiWorkspaceRestoreResult {
- window_handle,
- errors,
- })
+ Ok(window_handle)
}
actions!(
@@ -9359,7 +9325,7 @@ pub fn open_workspace_by_id(
pub fn open_paths(
abs_paths: &[PathBuf],
app_state: Arc<AppState>,
- open_options: OpenOptions,
+ mut open_options: OpenOptions,
cx: &mut App,
) -> Task<anyhow::Result<OpenResult>> {
let abs_paths = abs_paths.to_vec();
@@ -9384,10 +9350,9 @@ pub fn open_paths(
let all_metadatas = futures::future::join_all(all_paths)
.await
.into_iter()
- .filter_map(|result| result.ok().flatten())
- .collect::<Vec<_>>();
+ .filter_map(|result| result.ok().flatten());
- if all_metadatas.iter().all(|file| !file.is_dir) {
+ if all_metadatas.into_iter().all(|file| !file.is_dir) {
cx.update(|cx| {
let windows = workspace_windows_for_location(
&SerializedWorkspaceLocation::Local,
@@ -9409,6 +9374,35 @@ pub fn open_paths(
}
}
+ // Fallback for directories: when no flag is specified and no existing
+ // workspace matched, add the directory as a new workspace in the
+ // active window's MultiWorkspace (instead of opening a new window).
+ if open_options.open_new_workspace.is_none() && existing.is_none() {
+ let target_window = cx.update(|cx| {
+ let windows = workspace_windows_for_location(
+ &SerializedWorkspaceLocation::Local,
+ cx,
+ );
+ let window = cx
+ .active_window()
+ .and_then(|window| window.downcast::<MultiWorkspace>())
+ .filter(|window| windows.contains(window))
+ .or_else(|| windows.into_iter().next());
+ window.filter(|window| {
+ window.read(cx).is_ok_and(|mw| mw.multi_workspace_enabled(cx))
+ })
+ });
+
+ if let Some(window) = target_window {
+ open_options.requesting_window = Some(window);
+ window
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .log_err();
+ }
+ }
+
let open_in_dev_container = open_options.open_in_dev_container;
let result = if let Some((existing, target_workspace)) = existing {
@@ -1363,16 +1363,10 @@ pub(crate) async fn restore_or_create_workspace(
let mut tasks = Vec::new();
for multi_workspace in multi_workspaces {
- match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await {
- Ok(result) => {
- for error in result.errors {
- log::error!("Failed to restore workspace in group: {error:#}");
- results.push(Err(error));
- }
- }
- Err(e) => {
- results.push(Err(e));
- }
+ if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await
+ {
+ log::error!("Failed to restore workspace: {error:#}");
+ results.push(Err(error));
}
}
@@ -502,12 +502,15 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut App) {
cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
let line_ending_indicator =
cx.new(|_| line_ending_selector::LineEndingIndicator::default());
+ let merge_conflict_indicator =
+ cx.new(|cx| git_ui::MergeConflictIndicator::new(workspace, cx));
workspace.status_bar().update(cx, |status_bar, cx| {
status_bar.add_left_item(search_button, window, cx);
status_bar.add_left_item(lsp_button, window, cx);
status_bar.add_left_item(diagnostic_summary, window, cx);
status_bar.add_left_item(active_file_name, window, cx);
status_bar.add_left_item(activity_indicator, window, cx);
+ status_bar.add_left_item(merge_conflict_indicator, window, cx);
status_bar.add_right_item(edit_prediction_ui, window, cx);
status_bar.add_right_item(active_buffer_encoding, window, cx);
status_bar.add_right_item(active_buffer_language, window, cx);
@@ -2603,18 +2606,33 @@ mod tests {
})
.await
.unwrap();
- assert_eq!(cx.read(|cx| cx.windows().len()), 2);
-
- // Replace existing windows
- let window = cx
- .update(|cx| cx.windows()[0].downcast::<MultiWorkspace>())
+ assert_eq!(cx.read(|cx| cx.windows().len()), 1);
+ cx.run_until_parked();
+ multi_workspace_1
+ .update(cx, |multi_workspace, _window, cx| {
+ assert_eq!(multi_workspace.workspaces().len(), 2);
+ assert!(multi_workspace.sidebar_open());
+ let workspace = multi_workspace.workspace().read(cx);
+ assert_eq!(
+ workspace
+ .worktrees(cx)
+ .map(|w| w.read(cx).abs_path())
+ .collect::<Vec<_>>(),
+ &[
+ Path::new(path!("/root/c")).into(),
+ Path::new(path!("/root/d")).into(),
+ ]
+ );
+ })
.unwrap();
+
+ // Opening with -n (open_new_workspace: Some(true)) still creates a new window.
cx.update(|cx| {
open_paths(
&[PathBuf::from(path!("/root/e"))],
app_state,
workspace::OpenOptions {
- requesting_window: Some(window),
+ open_new_workspace: Some(true),
..Default::default()
},
cx,
@@ -2624,23 +2642,6 @@ mod tests {
.unwrap();
cx.background_executor.run_until_parked();
assert_eq!(cx.read(|cx| cx.windows().len()), 2);
- let multi_workspace_1 = cx
- .update(|cx| cx.windows()[0].downcast::<MultiWorkspace>())
- .unwrap();
- multi_workspace_1
- .update(cx, |multi_workspace, window, cx| {
- let workspace = multi_workspace.workspace().read(cx);
- assert_eq!(
- workspace
- .worktrees(cx)
- .map(|w| w.read(cx).abs_path())
- .collect::<Vec<_>>(),
- &[Path::new(path!("/root/e")).into()]
- );
- assert!(workspace.right_dock().read(cx).is_open());
- assert!(workspace.active_pane().focus_handle(cx).is_focused(window));
- })
- .unwrap();
}
#[gpui::test]
@@ -2721,7 +2722,6 @@ mod tests {
.await
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 1);
- let window1 = cx.update(|cx| cx.active_window().unwrap());
cx.update(|cx| {
open_paths(
@@ -2735,6 +2735,8 @@ mod tests {
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ // Opening a directory with default options adds to the existing window
+ // rather than creating a new one.
cx.update(|cx| {
open_paths(
&[PathBuf::from(path!("/root/dir2"))],
@@ -2745,25 +2747,23 @@ mod tests {
})
.await
.unwrap();
- assert_eq!(cx.update(|cx| cx.windows().len()), 2);
- let window2 = cx.update(|cx| cx.active_window().unwrap());
- assert!(window1 != window2);
- cx.update_window(window1, |_, window, _| window.activate_window())
- .unwrap();
+ assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ // Opening a directory with -n creates a new window.
cx.update(|cx| {
open_paths(
- &[PathBuf::from(path!("/root/dir2/c"))],
+ &[PathBuf::from(path!("/root/dir2"))],
app_state.clone(),
- workspace::OpenOptions::default(),
+ workspace::OpenOptions {
+ open_new_workspace: Some(true),
+ ..Default::default()
+ },
cx,
)
})
.await
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 2);
- // should have opened in window2 because that has dir2 visibly open (window1 has it open, but not in the project panel)
- assert!(cx.update(|cx| cx.active_window().unwrap()) == window2);
}
#[gpui::test]
@@ -5957,7 +5957,9 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) {
use collections::HashMap;
+ use project::ProjectGroupKey;
use session::Session;
+ use util::path_list::PathList;
use workspace::{OpenMode, Workspace, WorkspaceId};
let app_state = init_test(cx);
@@ -6117,94 +6119,50 @@ mod tests {
.filter_map(|window| window.downcast::<MultiWorkspace>())
.collect()
});
+ assert_eq!(restored_windows.len(), 2,);
+
+ // Identify restored windows by their active workspace root paths.
+ let (restored_a, restored_b) = {
+ let (mut with_dir1, mut with_dir3) = (None, None);
+ for window in &restored_windows {
+ let active_paths = window
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx))
+ .unwrap();
+ if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) {
+ with_dir1 = Some(window);
+ } else {
+ with_dir3 = Some(window);
+ }
+ }
+ (
+ with_dir1.expect("expected a window with dir1 active"),
+ with_dir3.expect("expected a window with dir3 active"),
+ )
+ };
- assert_eq!(
- restored_windows.len(),
- 2,
- "expected 2 restored windows, got {}",
- restored_windows.len()
- );
-
- let workspace_counts: Vec<usize> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len())
- .unwrap()
- })
- .collect();
- let mut sorted_counts = workspace_counts.clone();
- sorted_counts.sort();
- assert_eq!(
- sorted_counts,
- vec![1, 2],
- "expected one window with 1 workspace and one with 2, got {workspace_counts:?}"
- );
-
- let dir1_path: Arc<Path> = Path::new(dir1).into();
- let dir2_path: Arc<Path> = Path::new(dir2).into();
- let dir3_path: Arc<Path> = Path::new(dir3).into();
-
- let all_restored_paths: Vec<Vec<Vec<Arc<Path>>>> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, cx| {
- multi_workspace
- .workspaces()
- .iter()
- .map(|ws| ws.read(cx).root_paths(cx))
- .collect()
- })
- .unwrap()
+ // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys.
+ restored_a
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![
+ ProjectGroupKey::new(None, PathList::new(&[dir1])),
+ ProjectGroupKey::new(None, PathList::new(&[dir2])),
+ ]
+ );
+ assert_eq!(mw.workspaces().len(), 1);
})
- .collect();
-
- let two_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 2)
- .expect("expected a window with 2 workspaces");
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir1_path)),
- "2-workspace window should contain dir1, got {two_ws_window:?}"
- );
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir2_path)),
- "2-workspace window should contain dir2, got {two_ws_window:?}"
- );
-
- let one_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 1)
- .expect("expected a window with 1 workspace");
- assert!(
- one_ws_window[0].contains(&dir3_path),
- "1-workspace window should contain dir3, got {one_ws_window:?}"
- );
-
- // --- Verify the active workspace is preserved ---
- for window in &restored_windows {
- let (active_paths, workspace_count) = window
- .read_with(cx, |multi_workspace, cx| {
- let active = multi_workspace.workspace();
- (
- active.read(cx).root_paths(cx),
- multi_workspace.workspaces().len(),
- )
- })
- .unwrap();
+ .unwrap();
- if workspace_count == 2 {
- assert!(
- active_paths.contains(&dir1_path),
- "2-workspace window should have dir1 active, got {active_paths:?}"
- );
- } else {
- assert!(
- active_paths.contains(&dir3_path),
- "1-workspace window should have dir3 active, got {active_paths:?}"
+ // Window B (dir3): 1 workspace, 1 project group key.
+ restored_b
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))]
);
- }
- }
+ assert_eq!(mw.workspaces().len(), 1);
+ })
+ .unwrap();
}
}
@@ -1,6 +1,6 @@
[package]
name = "zed_glsl"
-version = "0.2.2"
+version = "0.2.3"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "glsl"
name = "GLSL"
description = "GLSL support."
-version = "0.2.2"
+version = "0.2.3"
schema_version = 1
authors = ["Mikayla Maki <mikayla@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
@@ -1,6 +1,6 @@
[package]
name = "zed_proto"
-version = "0.3.1"
+version = "0.3.2"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "proto"
name = "Proto"
description = "Protocol Buffers support."
-version = "0.3.1"
+version = "0.3.2"
schema_version = 1
authors = ["Zed Industries <support@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))