Detailed changes
@@ -332,6 +332,7 @@ dependencies = [
"buffer_diff",
"chrono",
"client",
+ "clock",
"cloud_api_types",
"collections",
"command_palette_hooks",
@@ -366,6 +367,7 @@ dependencies = [
"markdown",
"menu",
"multi_buffer",
+ "node_runtime",
"notifications",
"ordered-float 2.10.1",
"parking_lot",
@@ -378,6 +380,9 @@ dependencies = [
"proto",
"rand 0.9.2",
"release_channel",
+ "remote",
+ "remote_connection",
+ "remote_server",
"reqwest_client",
"rope",
"rules_library",
@@ -16080,19 +16085,29 @@ dependencies = [
"agent_ui",
"anyhow",
"chrono",
+ "client",
+ "clock",
"editor",
+ "extension",
"fs",
"git",
"gpui",
+ "http_client",
+ "language",
"language_model",
"log",
"menu",
+ "node_runtime",
"platform_title_bar",
"pretty_assertions",
"project",
"prompt_store",
"recent_projects",
+ "release_channel",
"remote",
+ "remote_connection",
+ "remote_server",
+ "semver",
"serde",
"serde_json",
"settings",
@@ -82,6 +82,8 @@ prompt_store.workspace = true
proto.workspace = true
rand.workspace = true
release_channel.workspace = true
+remote.workspace = true
+remote_connection.workspace = true
rope.workspace = true
rules_library.workspace = true
schemars.workspace = true
@@ -115,17 +117,23 @@ reqwest_client = { workspace = true, optional = true }
acp_thread = { workspace = true, features = ["test-support"] }
agent = { workspace = true, features = ["test-support"] }
buffer_diff = { workspace = true, features = ["test-support"] }
-
+client = { workspace = true, features = ["test-support"] }
+clock = { workspace = true, features = ["test-support"] }
db = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
eval_utils.workspace = true
gpui = { workspace = true, "features" = ["test-support"] }
+http_client = { workspace = true, features = ["test-support"] }
indoc.workspace = true
language = { workspace = true, "features" = ["test-support"] }
languages = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] }
+node_runtime = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
+remote = { workspace = true, features = ["test-support"] }
+remote_connection = { workspace = true, features = ["test-support"] }
+remote_server = { workspace = true, features = ["test-support"] }
semver.workspace = true
reqwest_client.workspace = true
@@ -65,6 +65,7 @@ use project::git_store::{GitStoreEvent, RepositoryEvent};
use project::project_settings::ProjectSettings;
use project::{Project, ProjectPath, Worktree, linked_worktree_short_name};
use prompt_store::{PromptStore, UserPromptId};
+use remote::RemoteConnectionOptions;
use rules_library::{RulesLibrary, open_rules_library};
use settings::TerminalDockPosition;
use settings::{Settings, update_settings_file};
@@ -77,8 +78,8 @@ use ui::{
};
use util::{ResultExt as _, debug_panic};
use workspace::{
- CollaboratorId, DraggedSelection, DraggedTab, OpenMode, OpenResult, PathList,
- SerializedPathList, ToggleWorkspaceSidebar, ToggleZoom, Workspace, WorkspaceId,
+ CollaboratorId, DraggedSelection, DraggedTab, PathList, SerializedPathList,
+ ToggleWorkspaceSidebar, ToggleZoom, Workspace, WorkspaceId,
dock::{DockPosition, Panel, PanelEvent},
};
use zed_actions::{
@@ -2941,6 +2942,24 @@ impl AgentPanel {
.absolute_path(&project_path, cx)
});
+ let remote_connection_options = self.project.read(cx).remote_connection_options(cx);
+
+ if remote_connection_options.is_some() {
+ let is_disconnected = self
+ .project
+ .read(cx)
+ .remote_client()
+ .is_some_and(|client| client.read(cx).is_disconnected());
+ if is_disconnected {
+ self.set_worktree_creation_error(
+ "Cannot create worktree: remote connection is not active".into(),
+ window,
+ cx,
+ );
+ return;
+ }
+ }
+
let workspace = self.workspace.clone();
let window_handle = window
.window_handle()
@@ -3067,25 +3086,21 @@ impl AgentPanel {
}
};
- let app_state = match workspace.upgrade() {
- Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?,
- None => {
- this.update_in(cx, |this, window, cx| {
- this.set_worktree_creation_error(
- "Workspace no longer available".into(),
- window,
- cx,
- );
- })?;
- return anyhow::Ok(());
- }
- };
+ if workspace.upgrade().is_none() {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ "Workspace no longer available".into(),
+ window,
+ cx,
+ );
+ })?;
+ return anyhow::Ok(());
+ }
let this_for_error = this.clone();
if let Err(err) = Self::open_worktree_workspace_and_start_thread(
this,
all_paths,
- app_state,
window_handle,
active_file_path,
path_remapping,
@@ -3093,6 +3108,7 @@ impl AgentPanel {
has_non_git,
content,
selected_agent,
+ remote_connection_options,
cx,
)
.await
@@ -3118,7 +3134,6 @@ impl AgentPanel {
async fn open_worktree_workspace_and_start_thread(
this: WeakEntity<Self>,
all_paths: Vec<PathBuf>,
- app_state: Arc<workspace::AppState>,
window_handle: Option<gpui::WindowHandle<workspace::MultiWorkspace>>,
active_file_path: Option<PathBuf>,
path_remapping: Vec<(PathBuf, PathBuf)>,
@@ -3126,25 +3141,34 @@ impl AgentPanel {
has_non_git: bool,
content: Vec<acp::ContentBlock>,
selected_agent: Option<Agent>,
+ remote_connection_options: Option<RemoteConnectionOptions>,
cx: &mut AsyncWindowContext,
) -> Result<()> {
- let OpenResult {
- window: new_window_handle,
- workspace: new_workspace,
- ..
- } = cx
- .update(|_window, cx| {
- Workspace::new_local(
- all_paths,
- app_state,
- window_handle,
- None,
- None,
- OpenMode::Add,
- cx,
- )
- })?
- .await?;
+ let window_handle = window_handle
+ .ok_or_else(|| anyhow!("No window handle available for workspace creation"))?;
+
+ let workspace_task = window_handle.update(cx, |multi_workspace, window, cx| {
+ let path_list = PathList::new(&all_paths);
+ let active_workspace = multi_workspace.workspace().clone();
+
+ multi_workspace.find_or_create_workspace(
+ path_list,
+ remote_connection_options,
+ None,
+ move |connection_options, window, cx| {
+ remote_connection::connect_with_modal(
+ &active_workspace,
+ connection_options,
+ window,
+ cx,
+ )
+ },
+ window,
+ cx,
+ )
+ })?;
+
+ let new_workspace = workspace_task.await?;
let panels_task = new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task());
@@ -3180,7 +3204,7 @@ impl AgentPanel {
auto_submit: true,
};
- new_window_handle.update(cx, |_multi_workspace, window, cx| {
+ window_handle.update(cx, |_multi_workspace, window, cx| {
new_workspace.update(cx, |workspace, cx| {
if has_non_git {
let toast_id = workspace::notifications::NotificationId::unique::<AgentPanel>();
@@ -3265,7 +3289,7 @@ impl AgentPanel {
});
})?;
- new_window_handle.update(cx, |multi_workspace, window, cx| {
+ window_handle.update(cx, |multi_workspace, window, cx| {
multi_workspace.activate(new_workspace.clone(), window, cx);
new_workspace.update(cx, |workspace, cx| {
@@ -6646,4 +6670,218 @@ mod tests {
);
});
}
+
+ #[gpui::test]
+ async fn test_worktree_creation_for_remote_project(
+ cx: &mut TestAppContext,
+ server_cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+
+ let app_state = cx.update(|cx| {
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+
+ let app_state = workspace::AppState::test(cx);
+ workspace::init(app_state.clone(), cx);
+ app_state
+ });
+
+ server_cx.update(|cx| {
+ release_channel::init(semver::Version::new(0, 0, 0), cx);
+ });
+
+ // Set up the remote server side with a git repo.
+ let server_fs = FakeFs::new(server_cx.executor());
+ server_fs
+ .insert_tree(
+ "/project",
+ json!({
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+ server_fs.set_branch_name(Path::new("/project/.git"), Some("main"));
+
+ // Create a mock remote connection.
+ let (opts, server_session, _) = remote::RemoteClient::fake_server(cx, server_cx);
+
+ server_cx.update(remote_server::HeadlessProject::init);
+ let server_executor = server_cx.executor();
+ let _headless = server_cx.new(|cx| {
+ remote_server::HeadlessProject::new(
+ remote_server::HeadlessAppState {
+ session: server_session,
+ fs: server_fs.clone(),
+ http_client: Arc::new(http_client::BlockedHttpClient),
+ node_runtime: node_runtime::NodeRuntime::unavailable(),
+ languages: Arc::new(language::LanguageRegistry::new(server_executor.clone())),
+ extension_host_proxy: Arc::new(extension::ExtensionHostProxy::new()),
+ startup_time: Instant::now(),
+ },
+ false,
+ cx,
+ )
+ });
+
+ // Connect the client side and build a remote project.
+ // Use a separate Client to avoid double-registering proto handlers
+ // (Workspace::test_new creates its own WorkspaceStore from the
+ // project's client).
+ let remote_client = remote::RemoteClient::connect_mock(opts, cx).await;
+ let project = cx.update(|cx| {
+ let project_client = client::Client::new(
+ Arc::new(clock::FakeSystemClock::new()),
+ http_client::FakeHttpClient::with_404_response(),
+ cx,
+ );
+ let user_store = cx.new(|cx| client::UserStore::new(project_client.clone(), cx));
+ project::Project::remote(
+ remote_client,
+ project_client,
+ node_runtime::NodeRuntime::unavailable(),
+ user_store,
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ false,
+ cx,
+ )
+ });
+
+ // Open the remote path as a worktree in the project.
+ let worktree_path = Path::new("/project");
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(worktree_path, true, cx)
+ })
+ .await
+ .expect("should be able to open remote worktree");
+ cx.run_until_parked();
+
+ // Verify the project is indeed remote.
+ project.read_with(cx, |project, cx| {
+ assert!(!project.is_local(), "project should be remote, not local");
+ assert!(
+ project.remote_connection_options(cx).is_some(),
+ "project should have remote connection options"
+ );
+ });
+
+ // Create the workspace and agent panel.
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
+
+ let workspace = multi_workspace
+ .read_with(cx, |mw, _cx| mw.workspace().clone())
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ // Register a callback so new workspaces also get an AgentPanel.
+ cx.update(|cx| {
+ cx.observe_new(
+ |workspace: &mut Workspace,
+ window: Option<&mut Window>,
+ cx: &mut Context<Workspace>| {
+ if let Some(window) = window {
+ let panel = cx.new(|cx| AgentPanel::new(workspace, None, window, cx));
+ workspace.add_panel(panel, window, cx);
+ }
+ },
+ )
+ .detach();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+ cx.run_until_parked();
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ let panel = cx.new(|cx| AgentPanel::new(workspace, None, window, cx));
+ workspace.add_panel(panel.clone(), window, cx);
+ panel
+ });
+
+ cx.run_until_parked();
+
+ // Open a thread.
+ panel.update_in(cx, |panel, window, cx| {
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::default_response()),
+ window,
+ cx,
+ );
+ });
+ cx.run_until_parked();
+
+ // Set start_thread_in to LinkedWorktree to bypass git worktree
+ // creation and directly test workspace opening for a known path.
+ let linked_path = PathBuf::from("/project");
+ panel.update_in(cx, |panel, window, cx| {
+ panel.set_start_thread_in(
+ &StartThreadIn::LinkedWorktree {
+ path: linked_path.clone(),
+ display_name: "project".to_string(),
+ },
+ window,
+ cx,
+ );
+ });
+
+ // Trigger worktree creation.
+ let content = vec![acp::ContentBlock::Text(acp::TextContent::new(
+ "Hello from remote test",
+ ))];
+ panel.update_in(cx, |panel, window, cx| {
+ panel.handle_worktree_requested(
+ content,
+ WorktreeCreationArgs::Linked {
+ worktree_path: linked_path,
+ },
+ window,
+ cx,
+ );
+ });
+
+ // The refactored code uses `find_or_create_workspace`, which
+ // finds the existing remote workspace (matching paths + host)
+ // and reuses it instead of creating a new connection.
+ cx.run_until_parked();
+
+ // The task should have completed: the existing workspace was
+ // found and reused.
+ panel.read_with(cx, |panel, _cx| {
+ assert!(
+ panel.worktree_creation_status.is_none(),
+ "worktree creation should have completed, but status is: {:?}",
+ panel.worktree_creation_status
+ );
+ });
+
+ // The existing remote workspace was reused — no new workspace
+ // should have been created.
+ multi_workspace
+ .read_with(cx, |multi_workspace, cx| {
+ let project = workspace.read(cx).project().clone();
+ assert!(
+ !project.read(cx).is_local(),
+ "workspace project should still be remote, not local"
+ );
+ assert_eq!(
+ multi_workspace.workspaces().count(),
+ 1,
+ "existing remote workspace should be reused, not a new one created"
+ );
+ })
+ .unwrap();
+ }
}
@@ -532,6 +532,7 @@ impl RejoinedProject {
root_name: worktree.root_name.clone(),
visible: worktree.visible,
abs_path: worktree.abs_path.clone(),
+ root_repo_common_dir: None,
})
.collect(),
collaborators: self
@@ -1894,6 +1894,7 @@ async fn join_project(
root_name: worktree.root_name.clone(),
visible: worktree.visible,
abs_path: worktree.abs_path.clone(),
+ root_repo_common_dir: None,
})
.collect::<Vec<_>>();
@@ -648,6 +648,7 @@ async fn open_remote_worktree(
paths,
app_state,
window_to_use,
+ None,
cx,
)
.await?;
@@ -4430,7 +4430,8 @@ impl LspStore {
WorktreeStoreEvent::WorktreeReleased(..)
| WorktreeStoreEvent::WorktreeOrderChanged
| WorktreeStoreEvent::WorktreeUpdatedGitRepositories(..)
- | WorktreeStoreEvent::WorktreeDeletedEntry(..) => {}
+ | WorktreeStoreEvent::WorktreeDeletedEntry(..)
+ | WorktreeStoreEvent::WorktreeUpdatedRootRepoCommonDir(..) => {}
}
}
@@ -360,6 +360,7 @@ pub enum Event {
WorktreeOrderChanged,
WorktreeRemoved(WorktreeId),
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
+ WorktreeUpdatedRootRepoCommonDir(WorktreeId),
DiskBasedDiagnosticsStarted {
language_server_id: LanguageServerId,
},
@@ -3681,6 +3682,9 @@ impl Project {
}
// Listen to the GitStore instead.
WorktreeStoreEvent::WorktreeUpdatedGitRepositories(_, _) => {}
+ WorktreeStoreEvent::WorktreeUpdatedRootRepoCommonDir(worktree_id) => {
+ cx.emit(Event::WorktreeUpdatedRootRepoCommonDir(*worktree_id));
+ }
}
}
@@ -6093,6 +6097,7 @@ impl Project {
/// workspaces by main repos.
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ProjectGroupKey {
+ /// The paths of the main worktrees for this project group.
paths: PathList,
host: Option<RemoteConnectionOptions>,
}
@@ -91,6 +91,7 @@ pub enum WorktreeStoreEvent {
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
WorktreeUpdatedGitRepositories(WorktreeId, UpdatedGitRepositoriesSet),
WorktreeDeletedEntry(WorktreeId, ProjectEntryId),
+ WorktreeUpdatedRootRepoCommonDir(WorktreeId),
}
impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
@@ -712,6 +713,7 @@ impl WorktreeStore {
root_name,
visible,
abs_path: response.canonicalized_path,
+ root_repo_common_dir: response.root_repo_common_dir,
},
client,
path_style,
@@ -812,7 +814,11 @@ impl WorktreeStore {
// The worktree root itself has been deleted (for single-file worktrees)
// The worktree will be removed via the observe_release callback
}
- worktree::Event::UpdatedRootRepoCommonDir => {}
+ worktree::Event::UpdatedRootRepoCommonDir => {
+ cx.emit(WorktreeStoreEvent::WorktreeUpdatedRootRepoCommonDir(
+ worktree_id,
+ ));
+ }
}
})
.detach();
@@ -1049,6 +1055,9 @@ impl WorktreeStore {
root_name: worktree.root_name_str().to_owned(),
visible: worktree.is_visible(),
abs_path: worktree.abs_path().to_string_lossy().into_owned(),
+ root_repo_common_dir: worktree
+ .root_repo_common_dir()
+ .map(|p| p.to_string_lossy().into_owned()),
}
})
.collect()
@@ -40,6 +40,7 @@ message AddWorktree {
message AddWorktreeResponse {
uint64 worktree_id = 1;
string canonicalized_path = 2;
+ optional string root_repo_common_dir = 3;
}
message RemoveWorktree {
@@ -62,6 +63,7 @@ message WorktreeMetadata {
string root_name = 2;
bool visible = 3;
string abs_path = 4;
+ optional string root_repo_common_dir = 5;
}
message ProjectPath {
@@ -132,7 +132,7 @@ pub async fn open_remote_project(
app_state: Arc<AppState>,
open_options: workspace::OpenOptions,
cx: &mut AsyncApp,
-) -> Result<()> {
+) -> Result<WindowHandle<MultiWorkspace>> {
let created_new_window = open_options.requesting_window.is_none();
let (existing, open_visible) = find_existing_workspace(
@@ -193,7 +193,7 @@ pub async fn open_remote_project(
.collect::<Vec<_>>();
navigate_to_positions(&existing_window, items, &paths_with_positions, cx);
- return Ok(());
+ return Ok(existing_window);
}
// If the remote connection is dead (e.g. server not running after failed reconnect),
// fall through to establish a fresh connection instead of showing an error.
@@ -341,7 +341,7 @@ pub async fn open_remote_project(
.update(cx, |_, window, _| window.remove_window())
.ok();
}
- return Ok(());
+ return Ok(window);
}
};
@@ -436,7 +436,7 @@ pub async fn open_remote_project(
});
})
.ok();
- Ok(())
+ Ok(window)
}
pub fn navigate_to_positions(
@@ -502,7 +502,7 @@ impl ProjectPicker {
.log_err()?;
let items = open_remote_project_with_existing_connection(
- connection, project, paths, app_state, window, cx,
+ connection, project, paths, app_state, window, None, cx,
)
.await
.log_err();
@@ -9,7 +9,7 @@ pub use remote_client::OpenWslPath;
pub use remote_client::{
CommandTemplate, ConnectionIdentifier, ConnectionState, Interactive, RemoteArch, RemoteClient,
RemoteClientDelegate, RemoteClientEvent, RemoteConnection, RemoteConnectionOptions, RemoteOs,
- RemotePlatform, connect,
+ RemotePlatform, connect, has_active_connection,
};
pub use transport::docker::DockerConnectionOptions;
pub use transport::ssh::{SshConnectionOptions, SshPortForwardOption};
@@ -377,6 +377,20 @@ pub async fn connect(
.map_err(|e| e.cloned())
}
+/// Returns `true` if the global [`ConnectionPool`] already has a live
+/// connection for the given options. Callers can use this to decide
+/// whether to show interactive UI (e.g., a password modal) before
+/// connecting.
+pub fn has_active_connection(opts: &RemoteConnectionOptions, cx: &App) -> bool {
+ cx.try_global::<ConnectionPool>().is_some_and(|pool| {
+ matches!(
+ pool.connections.get(opts),
+ Some(ConnectionPoolEntry::Connected(remote))
+ if remote.upgrade().is_some_and(|r| !r.has_been_killed())
+ )
+ })
+}
+
impl RemoteClient {
pub fn new(
unique_identifier: ConnectionIdentifier,
@@ -19,7 +19,7 @@ use ui::{
prelude::*,
};
use ui_input::{ERASED_EDITOR_FACTORY, ErasedEditor};
-use workspace::{DismissDecision, ModalView};
+use workspace::{DismissDecision, ModalView, Workspace};
pub struct RemoteConnectionPrompt {
connection_string: SharedString,
@@ -536,6 +536,142 @@ impl RemoteClientDelegate {
}
}
+/// Shows a [`RemoteConnectionModal`] on the given workspace and establishes
+/// a remote connection. This is a convenience wrapper around
+/// [`RemoteConnectionModal`] and [`connect`] suitable for use as the
+/// `connect_remote` callback in [`MultiWorkspace::find_or_create_workspace`].
+///
+/// When the global connection pool already has a live connection for the
+/// given options, the modal is skipped entirely and the connection is
+/// reused silently.
+pub fn connect_with_modal(
+ workspace: &Entity<Workspace>,
+ connection_options: RemoteConnectionOptions,
+ window: &mut Window,
+ cx: &mut App,
+) -> Task<Result<Option<Entity<RemoteClient>>>> {
+ if remote::has_active_connection(&connection_options, cx) {
+ return connect_reusing_pool(connection_options, cx);
+ }
+
+ workspace.update(cx, |workspace, cx| {
+ workspace.toggle_modal(window, cx, |window, cx| {
+ RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx)
+ });
+ let Some(modal) = workspace.active_modal::<RemoteConnectionModal>(cx) else {
+ return Task::ready(Err(anyhow::anyhow!(
+ "Failed to open remote connection dialog"
+ )));
+ };
+ let prompt = modal.read(cx).prompt.clone();
+ connect(
+ ConnectionIdentifier::setup(),
+ connection_options,
+ prompt,
+ window,
+ cx,
+ )
+ })
+}
+
+/// Creates a [`RemoteClient`] by reusing an existing connection from the
+/// global pool. No interactive UI is shown. This should only be called
+/// when [`remote::has_active_connection`] returns `true`.
+fn connect_reusing_pool(
+ connection_options: RemoteConnectionOptions,
+ cx: &mut App,
+) -> Task<Result<Option<Entity<RemoteClient>>>> {
+ let delegate: Arc<dyn remote::RemoteClientDelegate> = Arc::new(BackgroundRemoteClientDelegate);
+
+ cx.spawn(async move |cx| {
+ let connection = remote::connect(connection_options, delegate.clone(), cx).await?;
+
+ let (_cancel_guard, cancel_rx) = oneshot::channel::<()>();
+ cx.update(|cx| {
+ RemoteClient::new(
+ ConnectionIdentifier::setup(),
+ connection,
+ cancel_rx,
+ delegate,
+ cx,
+ )
+ })
+ .await
+ })
+}
+
+/// Delegate for remote connections that reuse an existing pooled
+/// connection. Password prompts are not expected (the SSH transport
+/// is already established), but server binary downloads are supported
+/// via [`AutoUpdater`].
+struct BackgroundRemoteClientDelegate;
+
+impl remote::RemoteClientDelegate for BackgroundRemoteClientDelegate {
+ fn ask_password(
+ &self,
+ prompt: String,
+ _tx: oneshot::Sender<EncryptedPassword>,
+ _cx: &mut AsyncApp,
+ ) {
+ log::warn!(
+ "Pooled remote connection unexpectedly requires a password \
+ (prompt: {prompt})"
+ );
+ }
+
+ fn set_status(&self, _status: Option<&str>, _cx: &mut AsyncApp) {}
+
+ fn download_server_binary_locally(
+ &self,
+ platform: RemotePlatform,
+ release_channel: ReleaseChannel,
+ version: Option<Version>,
+ cx: &mut AsyncApp,
+ ) -> Task<anyhow::Result<PathBuf>> {
+ cx.spawn(async move |cx| {
+ AutoUpdater::download_remote_server_release(
+ release_channel,
+ version.clone(),
+ platform.os.as_str(),
+ platform.arch.as_str(),
+ |_status, _cx| {},
+ cx,
+ )
+ .await
+ .with_context(|| {
+ format!(
+ "Downloading remote server binary (version: {}, os: {}, arch: {})",
+ version
+ .as_ref()
+ .map(|v| format!("{v}"))
+ .unwrap_or("unknown".to_string()),
+ platform.os,
+ platform.arch,
+ )
+ })
+ })
+ }
+
+ fn get_download_url(
+ &self,
+ platform: RemotePlatform,
+ release_channel: ReleaseChannel,
+ version: Option<Version>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<Option<String>>> {
+ cx.spawn(async move |cx| {
+ AutoUpdater::get_remote_server_release_url(
+ release_channel,
+ version,
+ platform.os.as_str(),
+ platform.arch.as_str(),
+ cx,
+ )
+ .await
+ })
+ }
+}
+
pub fn connect(
unique_identifier: ConnectionIdentifier,
connection_options: RemoteConnectionOptions,
@@ -523,6 +523,9 @@ impl HeadlessProject {
proto::AddWorktreeResponse {
worktree_id: worktree.id().to_proto(),
canonicalized_path: canonicalized.to_string_lossy().into_owned(),
+ root_repo_common_dir: worktree
+ .root_repo_common_dir()
+ .map(|p| p.to_string_lossy().into_owned()),
}
});
@@ -33,6 +33,7 @@ platform_title_bar.workspace = true
project.workspace = true
recent_projects.workspace = true
remote.workspace = true
+remote_connection.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
@@ -50,7 +51,11 @@ acp_thread = { workspace = true, features = ["test-support"] }
agent = { workspace = true, features = ["test-support"] }
agent_ui = { workspace = true, features = ["test-support"] }
editor.workspace = true
+extension.workspace = true
+language = { workspace = true, features = ["test-support"] }
language_model = { workspace = true, features = ["test-support"] }
+release_channel.workspace = true
+semver.workspace = true
pretty_assertions.workspace = true
prompt_store.workspace = true
recent_projects = { workspace = true, features = ["test-support"] }
@@ -58,6 +63,13 @@ serde_json.workspace = true
fs = { workspace = true, features = ["test-support"] }
git.workspace = true
gpui = { workspace = true, features = ["test-support"] }
+client = { workspace = true, features = ["test-support"] }
+clock = { workspace = true, features = ["test-support"] }
+http_client = { workspace = true, features = ["test-support"] }
+node_runtime = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
+remote = { workspace = true, features = ["test-support"] }
+remote_connection = { workspace = true, features = ["test-support"] }
+remote_server = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -43,12 +43,12 @@ use ui::{
WithScrollbar, prelude::*,
};
use util::ResultExt as _;
-use util::path_list::{PathList, SerializedPathList};
+use util::path_list::PathList;
use workspace::{
AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent,
- NextProject, NextThread, Open, PreviousProject, PreviousThread, ShowFewerThreads,
- ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide, Toast, ToggleWorkspaceSidebar,
- Workspace, notifications::NotificationId, sidebar_side_context_menu,
+ NextProject, NextThread, Open, PreviousProject, PreviousThread, SerializedProjectGroupKey,
+ ShowFewerThreads, ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide, Toast,
+ ToggleWorkspaceSidebar, Workspace, notifications::NotificationId, sidebar_side_context_menu,
};
use zed_actions::OpenRecent;
@@ -96,9 +96,9 @@ struct SerializedSidebar {
#[serde(default)]
width: Option<f32>,
#[serde(default)]
- collapsed_groups: Vec<SerializedPathList>,
+ collapsed_groups: Vec<SerializedProjectGroupKey>,
#[serde(default)]
- expanded_groups: Vec<(SerializedPathList, usize)>,
+ expanded_groups: Vec<(SerializedProjectGroupKey, usize)>,
#[serde(default)]
active_view: SerializedSidebarView,
}
@@ -167,7 +167,25 @@ struct ActiveThreadInfo {
#[derive(Clone)]
enum ThreadEntryWorkspace {
Open(Entity<Workspace>),
- Closed(PathList),
+ Closed {
+ /// The paths this thread uses (may point to linked worktrees).
+ folder_paths: PathList,
+ /// The project group this thread belongs to.
+ project_group_key: ProjectGroupKey,
+ },
+}
+
+impl ThreadEntryWorkspace {
+ fn is_remote(&self, cx: &App) -> bool {
+ match self {
+ ThreadEntryWorkspace::Open(workspace) => {
+ !workspace.read(cx).project().read(cx).is_local()
+ }
+ ThreadEntryWorkspace::Closed {
+ project_group_key, ..
+ } => project_group_key.host().is_some(),
+ }
+ }
}
#[derive(Clone)]
@@ -253,7 +271,7 @@ impl ListEntry {
match self {
ListEntry::Thread(thread) => match &thread.workspace {
ThreadEntryWorkspace::Open(ws) => vec![ws.clone()],
- ThreadEntryWorkspace::Closed(_) => Vec::new(),
+ ThreadEntryWorkspace::Closed { .. } => Vec::new(),
},
ListEntry::DraftThread { workspace, .. } => {
if let Some(ws) = workspace {
@@ -378,6 +396,19 @@ fn worktree_info_from_thread_paths(
})
}
+/// Shows a [`RemoteConnectionModal`] on the given workspace and establishes
+/// an SSH connection. Suitable for passing to
+/// [`MultiWorkspace::find_or_create_workspace`] as the `connect_remote`
+/// argument.
+fn connect_remote(
+ modal_workspace: Entity<Workspace>,
+ connection_options: RemoteConnectionOptions,
+ window: &mut Window,
+ cx: &mut Context<MultiWorkspace>,
+) -> gpui::Task<anyhow::Result<Option<Entity<remote::RemoteClient>>>> {
+ remote_connection::connect_with_modal(&modal_workspace, connection_options, window, cx)
+}
+
/// The sidebar re-derives its entire entry list from scratch on every
/// change via `update_entries` → `rebuild_contents`. Avoid adding
/// incremental or inter-event coordination state — if something can
@@ -396,8 +427,8 @@ pub struct Sidebar {
/// Tracks which sidebar entry is currently active (highlighted).
active_entry: Option<ActiveEntry>,
hovered_thread_index: Option<usize>,
- collapsed_groups: HashSet<PathList>,
- expanded_groups: HashMap<PathList, usize>,
+ collapsed_groups: HashSet<ProjectGroupKey>,
+ expanded_groups: HashMap<ProjectGroupKey, usize>,
/// Updated only in response to explicit user actions (clicking a
/// thread, confirming in the thread switcher, etc.) — never from
/// background data changes. Used to sort the thread switcher popup.
@@ -408,6 +439,7 @@ pub struct Sidebar {
thread_last_message_sent_or_queued: HashMap<acp::SessionId, DateTime<Utc>>,
thread_switcher: Option<Entity<ThreadSwitcher>>,
_thread_switcher_subscriptions: Vec<gpui::Subscription>,
+ pending_remote_thread_activation: Option<acp::SessionId>,
view: SidebarView,
recent_projects_popover_handle: PopoverMenuHandle<SidebarRecentProjects>,
project_header_menu_ix: Option<usize>,
@@ -494,6 +526,7 @@ impl Sidebar {
thread_last_message_sent_or_queued: HashMap::new(),
thread_switcher: None,
_thread_switcher_subscriptions: Vec::new(),
+ pending_remote_thread_activation: None,
view: SidebarView::default(),
recent_projects_popover_handle: PopoverMenuHandle::default(),
project_header_menu_ix: None,
@@ -704,26 +737,31 @@ impl Sidebar {
result
}
- /// Finds the main worktree workspace for a project group.
- fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option<Entity<Workspace>> {
- let mw = self.multi_workspace.upgrade()?;
- mw.read(cx).workspace_for_paths(path_list, cx)
- }
-
/// Opens a new workspace for a group that has no open workspaces.
fn open_workspace_for_group(
&mut self,
- path_list: &PathList,
+ project_group_key: &ProjectGroupKey,
window: &mut Window,
cx: &mut Context<Self>,
) {
let Some(multi_workspace) = self.multi_workspace.upgrade() else {
return;
};
+ let path_list = project_group_key.path_list().clone();
+ let host = project_group_key.host();
+ let provisional_key = Some(project_group_key.clone());
+ let active_workspace = multi_workspace.read(cx).workspace().clone();
multi_workspace
.update(cx, |this, cx| {
- this.find_or_create_local_workspace(path_list.clone(), window, cx)
+ this.find_or_create_workspace(
+ path_list,
+ host,
+ provisional_key,
+ |options, window, cx| connect_remote(active_workspace, options, window, cx),
+ window,
+ cx,
+ )
})
.detach_and_log_err(cx);
}
@@ -763,15 +801,25 @@ impl Sidebar {
// also appears as a "draft" (no messages yet).
if let Some(active_ws) = &active_workspace {
if let Some(panel) = active_ws.read(cx).panel::<AgentPanel>(cx) {
- if panel.read(cx).active_thread_is_draft(cx)
- || panel.read(cx).active_conversation_view().is_none()
- {
- let conversation_parent_id = panel
- .read(cx)
- .active_conversation_view()
- .and_then(|cv| cv.read(cx).parent_id(cx));
- let preserving_thread =
- if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry {
+ let active_thread_is_draft = panel.read(cx).active_thread_is_draft(cx);
+ let active_conversation_view = panel.read(cx).active_conversation_view();
+
+ if active_thread_is_draft || active_conversation_view.is_none() {
+ if active_conversation_view.is_none()
+ && let Some(session_id) = self.pending_remote_thread_activation.clone()
+ {
+ self.active_entry = Some(ActiveEntry::Thread {
+ session_id,
+ workspace: active_ws.clone(),
+ });
+ } else {
+ let conversation_parent_id =
+ active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx));
+ let preserving_thread = if let Some(ActiveEntry::Thread {
+ session_id,
+ ..
+ }) = &self.active_entry
+ {
self.active_entry_workspace() == Some(active_ws)
&& conversation_parent_id
.as_ref()
@@ -779,14 +827,16 @@ impl Sidebar {
} else {
false
};
- if !preserving_thread {
- self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
+ if !preserving_thread {
+ self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
+ }
}
- } else if let Some(session_id) = panel
- .read(cx)
- .active_conversation_view()
- .and_then(|cv| cv.read(cx).parent_id(cx))
+ } else if let Some(session_id) =
+ active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx))
{
+ if self.pending_remote_thread_activation.as_ref() == Some(&session_id) {
+ self.pending_remote_thread_activation = None;
+ }
self.active_entry = Some(ActiveEntry::Thread {
session_id,
workspace: active_ws.clone(),
@@ -832,14 +882,13 @@ impl Sidebar {
};
for (group_key, group_workspaces) in mw.project_groups(cx) {
- let path_list = group_key.path_list().clone();
- if path_list.paths().is_empty() {
+ if group_key.path_list().paths().is_empty() {
continue;
}
let label = group_key.display_name();
- let is_collapsed = self.collapsed_groups.contains(&path_list);
+ let is_collapsed = self.collapsed_groups.contains(&group_key);
let should_load_threads = !is_collapsed || !query.is_empty();
let is_active = active_workspace
@@ -876,7 +925,10 @@ impl Sidebar {
workspace_by_path_list
.get(&row.folder_paths)
.map(|ws| ThreadEntryWorkspace::Open((*ws).clone()))
- .unwrap_or_else(|| ThreadEntryWorkspace::Closed(row.folder_paths.clone()))
+ .unwrap_or_else(|| ThreadEntryWorkspace::Closed {
+ folder_paths: row.folder_paths.clone(),
+ project_group_key: group_key.clone(),
+ })
};
// Build a ThreadEntry from a metadata row.
@@ -907,7 +959,7 @@ impl Sidebar {
// linked worktree the thread was opened in.
for row in thread_store
.read(cx)
- .entries_for_main_worktree_path(&path_list)
+ .entries_for_main_worktree_path(group_key.path_list())
.cloned()
{
if !seen_session_ids.insert(row.session_id.clone()) {
@@ -921,7 +973,11 @@ impl Sidebar {
// must be queried by their `folder_paths`.
// Load any legacy threads for the main worktrees of this project group.
- for row in thread_store.read(cx).entries_for_path(&path_list).cloned() {
+ for row in thread_store
+ .read(cx)
+ .entries_for_path(group_key.path_list())
+ .cloned()
+ {
if !seen_session_ids.insert(row.session_id.clone()) {
continue;
}
@@ -953,7 +1009,10 @@ impl Sidebar {
}
threads.push(make_thread_entry(
row,
- ThreadEntryWorkspace::Closed(worktree_path_list.clone()),
+ ThreadEntryWorkspace::Closed {
+ folder_paths: worktree_path_list.clone(),
+ project_group_key: group_key.clone(),
+ },
));
}
}
@@ -1031,10 +1090,13 @@ impl Sidebar {
} else {
let store = ThreadMetadataStore::global(cx).read(cx);
store
- .entries_for_main_worktree_path(&path_list)
+ .entries_for_main_worktree_path(group_key.path_list())
.next()
.is_some()
- || store.entries_for_path(&path_list).next().is_some()
+ || store
+ .entries_for_path(group_key.path_list())
+ .next()
+ .is_some()
};
if !query.is_empty() {
@@ -1161,7 +1223,7 @@ impl Sidebar {
let total = threads.len();
- let extra_batches = self.expanded_groups.get(&path_list).copied().unwrap_or(0);
+ let extra_batches = self.expanded_groups.get(&group_key).copied().unwrap_or(0);
let threads_to_show =
DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN);
let count = threads_to_show.min(total);
@@ -1312,7 +1374,7 @@ impl Sidebar {
ListEntry::ViewMore {
key,
is_fully_expanded,
- } => self.render_view_more(ix, key.path_list(), *is_fully_expanded, is_selected, cx),
+ } => self.render_view_more(ix, key, *is_fully_expanded, is_selected, cx),
ListEntry::DraftThread {
key,
workspace,
@@ -1376,7 +1438,6 @@ impl Sidebar {
has_threads: bool,
cx: &mut Context<Self>,
) -> AnyElement {
- let path_list = key.path_list();
let host = key.host();
let id_prefix = if is_sticky { "sticky-" } else { "" };
@@ -1384,16 +1445,27 @@ impl Sidebar {
let disclosure_id = SharedString::from(format!("disclosure-{ix}"));
let group_name = SharedString::from(format!("{id_prefix}header-group-{ix}"));
- let is_collapsed = self.collapsed_groups.contains(path_list);
+ let is_collapsed = self.collapsed_groups.contains(key);
let (disclosure_icon, disclosure_tooltip) = if is_collapsed {
(IconName::ChevronRight, "Expand Project")
} else {
(IconName::ChevronDown, "Collapse Project")
};
- let path_list_for_toggle = path_list.clone();
- let path_list_for_collapse = path_list.clone();
- let view_more_expanded = self.expanded_groups.contains_key(path_list);
+ let has_new_thread_entry = self
+ .contents
+ .entries
+ .get(ix + 1)
+ .is_some_and(|entry| matches!(entry, ListEntry::DraftThread { .. }));
+ let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx);
+ let workspace = self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx)
+ .workspace_for_paths(key.path_list(), key.host().as_ref(), cx)
+ });
+
+ let key_for_toggle = key.clone();
+ let key_for_collapse = key.clone();
+ let view_more_expanded = self.expanded_groups.contains_key(key);
let label = if highlight_positions.is_empty() {
Label::new(label.clone())
@@ -1442,7 +1514,7 @@ impl Sidebar {
.tooltip(Tooltip::text(disclosure_tooltip))
.on_click(cx.listener(move |this, _, window, cx| {
this.selection = None;
- this.toggle_collapse(&path_list_for_toggle, window, cx);
+ this.toggle_collapse(&key_for_toggle, window, cx);
})),
)
.child(label)
@@ -1500,59 +1572,64 @@ impl Sidebar {
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Collapse Displayed Threads"))
.on_click(cx.listener({
- let path_list_for_collapse = path_list_for_collapse.clone();
+ let key_for_collapse = key_for_collapse.clone();
move |this, _, _window, cx| {
this.selection = None;
- this.expanded_groups.remove(&path_list_for_collapse);
+ this.expanded_groups.remove(&key_for_collapse);
this.serialize(cx);
this.update_entries(cx);
}
})),
)
})
- .child({
- let path_list = path_list.clone();
- let focus_handle = self.focus_handle.clone();
- IconButton::new(
- SharedString::from(format!(
- "{id_prefix}project-header-new-thread-{ix}",
- )),
- IconName::Plus,
- )
- .icon_size(IconSize::Small)
- .tooltip(move |_, cx| {
- Tooltip::for_action_in("New Thread", &NewThread, &focus_handle, cx)
- })
- .on_click(cx.listener(
- move |this, _, window, cx| {
- this.collapsed_groups.remove(&path_list);
- this.selection = None;
- let workspace = this
- .active_workspace(cx)
- .filter(|ws| {
- let key = ws.read(cx).project_group_key(cx);
- *key.path_list() == path_list
- })
- .or_else(|| this.workspace_for_group(&path_list, cx));
- if let Some(workspace) = workspace {
- this.create_new_thread(&workspace, window, cx);
- } else {
- this.open_workspace_for_group(&path_list, window, cx);
- }
- },
- ))
- }),
+ .when_some(
+ workspace.filter(|_| show_new_thread_button),
+ |this, workspace| {
+ let key = key.clone();
+ let focus_handle = self.focus_handle.clone();
+ this.child(
+ IconButton::new(
+ SharedString::from(format!(
+ "{id_prefix}project-header-new-thread-{ix}",
+ )),
+ IconName::Plus,
+ )
+ .icon_size(IconSize::Small)
+ .tooltip(move |_, cx| {
+ Tooltip::for_action_in(
+ "New Thread",
+ &NewThread,
+ &focus_handle,
+ cx,
+ )
+ })
+ .on_click(cx.listener(
+ move |this, _, window, cx| {
+ this.collapsed_groups.remove(&key);
+ this.selection = None;
+ this.create_new_thread(&workspace, window, cx);
+ },
+ )),
+ )
+ },
+ ),
)
.map(|this| {
if !has_threads && is_active {
this
} else {
- let path_list = path_list.clone();
+ let key = key.clone();
this.cursor_pointer()
.when(!is_active, |this| this.hover(|s| s.bg(hover_color)))
.tooltip(Tooltip::text("Open Workspace"))
.on_click(cx.listener(move |this, _, window, cx| {
- if let Some(workspace) = this.workspace_for_group(&path_list, cx) {
+ if let Some(workspace) = this.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx).workspace_for_paths(
+ key.path_list(),
+ key.host().as_ref(),
+ cx,
+ )
+ }) {
this.active_entry = Some(ActiveEntry::Draft(workspace.clone()));
if let Some(multi_workspace) = this.multi_workspace.upgrade() {
multi_workspace.update(cx, |multi_workspace, cx| {
@@ -1565,7 +1642,7 @@ impl Sidebar {
});
}
} else {
- this.open_workspace_for_group(&path_list, window, cx);
+ this.open_workspace_for_group(&key, window, cx);
}
}))
}
@@ -1784,14 +1861,14 @@ impl Sidebar {
fn toggle_collapse(
&mut self,
- path_list: &PathList,
+ project_group_key: &ProjectGroupKey,
_window: &mut Window,
cx: &mut Context<Self>,
) {
- if self.collapsed_groups.contains(path_list) {
- self.collapsed_groups.remove(path_list);
+ if self.collapsed_groups.contains(project_group_key) {
+ self.collapsed_groups.remove(project_group_key);
} else {
- self.collapsed_groups.insert(path_list.clone());
+ self.collapsed_groups.insert(project_group_key.clone());
}
self.serialize(cx);
self.update_entries(cx);
@@ -1965,8 +2042,8 @@ impl Sidebar {
match entry {
ListEntry::ProjectHeader { key, .. } => {
- let path_list = key.path_list().clone();
- self.toggle_collapse(&path_list, window, cx);
+ let key = key.clone();
+ self.toggle_collapse(&key, window, cx);
}
ListEntry::Thread(thread) => {
let metadata = thread.metadata.clone();
@@ -1975,10 +2052,16 @@ impl Sidebar {
let workspace = workspace.clone();
self.activate_thread(metadata, &workspace, false, window, cx);
}
- ThreadEntryWorkspace::Closed(path_list) => {
+ ThreadEntryWorkspace::Closed {
+ folder_paths,
+ project_group_key,
+ } => {
+ let folder_paths = folder_paths.clone();
+ let project_group_key = project_group_key.clone();
self.open_workspace_and_activate_thread(
metadata,
- path_list.clone(),
+ folder_paths,
+ &project_group_key,
window,
cx,
);
@@ -1990,25 +2073,25 @@ impl Sidebar {
is_fully_expanded,
..
} => {
- let path_list = key.path_list().clone();
+ let key = key.clone();
if *is_fully_expanded {
- self.reset_thread_group_expansion(&path_list, cx);
+ self.reset_thread_group_expansion(&key, cx);
} else {
- self.expand_thread_group(&path_list, cx);
+ self.expand_thread_group(&key, cx);
}
}
ListEntry::DraftThread { key, workspace, .. } => {
- if let Some(workspace) = workspace.clone() {
+ let key = key.clone();
+ let workspace = workspace.clone();
+ if let Some(workspace) = workspace.or_else(|| {
+ self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx)
+ .workspace_for_paths(key.path_list(), key.host().as_ref(), cx)
+ })
+ }) {
self.create_new_thread(&workspace, window, cx);
} else {
- let path_list = key.path_list().clone();
- if let Some(workspace) = self.workspace_for_group(&path_list, cx) {
- if !AgentPanel::is_visible(&workspace, cx) {
- workspace.update(cx, |workspace, cx| {
- workspace.focus_panel::<AgentPanel>(window, cx);
- });
- }
- }
+ self.open_workspace_for_group(&key, window, cx);
}
}
}
@@ -2174,7 +2257,8 @@ impl Sidebar {
fn open_workspace_and_activate_thread(
&mut self,
metadata: ThreadMetadata,
- path_list: PathList,
+ folder_paths: PathList,
+ project_group_key: &ProjectGroupKey,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -2182,12 +2266,40 @@ impl Sidebar {
return;
};
+ let pending_session_id = metadata.session_id.clone();
+ let is_remote = project_group_key.host().is_some();
+ if is_remote {
+ self.pending_remote_thread_activation = Some(pending_session_id.clone());
+ }
+
+ let host = project_group_key.host();
+ let provisional_key = Some(project_group_key.clone());
+ let active_workspace = multi_workspace.read(cx).workspace().clone();
+
let open_task = multi_workspace.update(cx, |this, cx| {
- this.find_or_create_local_workspace(path_list, window, cx)
+ this.find_or_create_workspace(
+ folder_paths,
+ host,
+ provisional_key,
+ |options, window, cx| connect_remote(active_workspace, options, window, cx),
+ window,
+ cx,
+ )
});
cx.spawn_in(window, async move |this, cx| {
- let workspace = open_task.await?;
+ let result = open_task.await;
+
+ if result.is_err() || is_remote {
+ this.update(cx, |this, _cx| {
+ if this.pending_remote_thread_activation.as_ref() == Some(&pending_session_id) {
+ this.pending_remote_thread_activation = None;
+ }
+ })
+ .ok();
+ }
+
+ let workspace = result?;
this.update_in(cx, |this, window, cx| {
this.activate_thread(metadata, &workspace, false, window, cx);
})?;
@@ -2234,6 +2346,18 @@ impl Sidebar {
if let Some(workspace) = active_workspace {
self.activate_thread_locally(&metadata, &workspace, false, window, cx);
+ } else {
+ let path_list = metadata.folder_paths.clone();
+ if let Some((target_window, workspace)) =
+ self.find_open_workspace_for_path_list(&path_list, cx)
+ {
+ self.activate_thread_in_other_window(metadata, workspace, target_window, cx);
+ } else {
+ // Archived thread metadata doesn't carry the remote host,
+ // so we construct a local-only key as a best-effort fallback.
+ let key = ProjectGroupKey::new(None, path_list.clone());
+ self.open_workspace_and_activate_thread(metadata, path_list, &key, window, cx);
+ }
}
return;
}
@@ -2266,7 +2390,10 @@ impl Sidebar {
cx,
);
} else {
- this.open_workspace_and_activate_thread(metadata, path_list, window, cx);
+ let key = ProjectGroupKey::new(None, path_list.clone());
+ this.open_workspace_and_activate_thread(
+ metadata, path_list, &key, window, cx,
+ );
}
})?;
return anyhow::Ok(());
@@ -2329,9 +2456,11 @@ impl Sidebar {
if let Some(updated_metadata) = updated_metadata {
let new_paths = updated_metadata.folder_paths.clone();
this.update_in(cx, |this, window, cx| {
+ let key = ProjectGroupKey::new(None, new_paths.clone());
this.open_workspace_and_activate_thread(
updated_metadata,
new_paths,
+ &key,
window,
cx,
);
@@ -2354,9 +2483,8 @@ impl Sidebar {
match self.contents.entries.get(ix) {
Some(ListEntry::ProjectHeader { key, .. }) => {
- if self.collapsed_groups.contains(key.path_list()) {
- let path_list = key.path_list().clone();
- self.collapsed_groups.remove(&path_list);
+ if self.collapsed_groups.contains(key) {
+ self.collapsed_groups.remove(key);
self.update_entries(cx);
} else if ix + 1 < self.contents.entries.len() {
self.selection = Some(ix + 1);
@@ -2378,8 +2506,8 @@ impl Sidebar {
match self.contents.entries.get(ix) {
Some(ListEntry::ProjectHeader { key, .. }) => {
- if !self.collapsed_groups.contains(key.path_list()) {
- self.collapsed_groups.insert(key.path_list().clone());
+ if !self.collapsed_groups.contains(key) {
+ self.collapsed_groups.insert(key.clone());
self.update_entries(cx);
}
}
@@ -2390,7 +2518,7 @@ impl Sidebar {
if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(i)
{
self.selection = Some(i);
- self.collapsed_groups.insert(key.path_list().clone());
+ self.collapsed_groups.insert(key.clone());
self.update_entries(cx);
break;
}
@@ -2425,12 +2553,11 @@ impl Sidebar {
if let Some(header_ix) = header_ix {
if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_ix)
{
- let path_list = key.path_list();
- if self.collapsed_groups.contains(path_list) {
- self.collapsed_groups.remove(path_list);
+ if self.collapsed_groups.contains(key) {
+ self.collapsed_groups.remove(key);
} else {
self.selection = Some(header_ix);
- self.collapsed_groups.insert(path_list.clone());
+ self.collapsed_groups.insert(key.clone());
}
self.update_entries(cx);
}
@@ -2445,7 +2572,7 @@ impl Sidebar {
) {
for entry in &self.contents.entries {
if let ListEntry::ProjectHeader { key, .. } = entry {
- self.collapsed_groups.insert(key.path_list().clone());
+ self.collapsed_groups.insert(key.clone());
}
}
self.update_entries(cx);
@@ -2549,7 +2676,9 @@ impl Sidebar {
ThreadEntryWorkspace::Open(ws) => {
PathList::new(&ws.read(cx).root_paths(cx))
}
- ThreadEntryWorkspace::Closed(paths) => paths.clone(),
+ ThreadEntryWorkspace::Closed { folder_paths, .. } => {
+ folder_paths.clone()
+ }
};
Some((t.metadata.clone(), workspace_paths))
}
@@ -2574,9 +2703,12 @@ impl Sidebar {
}
let multi_workspace = self.multi_workspace.upgrade()?;
+ // Thread metadata doesn't carry host info yet, so we pass
+ // `None` here. This may match a local workspace with the same
+ // paths instead of the intended remote one.
let workspace = multi_workspace
.read(cx)
- .workspace_for_paths(folder_paths, cx)?;
+ .workspace_for_paths(folder_paths, None, cx)?;
// Don't remove the main worktree workspace — the project
// header always provides access to it.
@@ -2690,7 +2822,7 @@ impl Sidebar {
if let Some(workspace) = self
.multi_workspace
.upgrade()
- .and_then(|mw| mw.read(cx).workspace_for_paths(folder_paths, cx))
+ .and_then(|mw| mw.read(cx).workspace_for_paths(folder_paths, None, cx))
{
if let Some(panel) = workspace.read(cx).panel::<AgentPanel>(cx) {
let panel_shows_archived = panel
@@ -2714,11 +2846,10 @@ impl Sidebar {
// `rebuild_contents` will reconcile `active_entry` once the thread
// finishes loading.
if let Some(metadata) = neighbor {
- if let Some(workspace) = self
- .multi_workspace
- .upgrade()
- .and_then(|mw| mw.read(cx).workspace_for_paths(&metadata.folder_paths, cx))
- {
+ if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx)
+ .workspace_for_paths(&metadata.folder_paths, None, cx)
+ }) {
self.activate_workspace(&workspace, window, cx);
Self::load_agent_thread_in_workspace(&workspace, metadata, true, window, cx);
return;
@@ -2735,9 +2866,9 @@ impl Sidebar {
let mw = mw.read(cx);
// Find the group's main workspace (whose root paths match
// the project group key, not the thread's folder paths).
- let thread_workspace = mw.workspace_for_paths(folder_paths, cx)?;
+ let thread_workspace = mw.workspace_for_paths(folder_paths, None, cx)?;
let group_key = thread_workspace.read(cx).project_group_key(cx);
- mw.workspace_for_paths(group_key.path_list(), cx)
+ mw.workspace_for_paths(group_key.path_list(), None, cx)
})
.or_else(|| self.active_entry_workspace().cloned());
@@ -2915,7 +3046,7 @@ impl Sidebar {
fn mru_threads_for_switcher(&self, cx: &App) -> Vec<ThreadSwitcherEntry> {
let mut current_header_label: Option<SharedString> = None;
- let mut current_header_path_list: Option<PathList> = None;
+ let mut current_header_key: Option<ProjectGroupKey> = None;
let mut entries: Vec<ThreadSwitcherEntry> = self
.contents
.entries
@@ -2923,15 +3054,23 @@ impl Sidebar {
.filter_map(|entry| match entry {
ListEntry::ProjectHeader { label, key, .. } => {
current_header_label = Some(label.clone());
- current_header_path_list = Some(key.path_list().clone());
+ current_header_key = Some(key.clone());
None
}
ListEntry::Thread(thread) => {
let workspace = match &thread.workspace {
ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()),
- ThreadEntryWorkspace::Closed(_) => current_header_path_list
- .as_ref()
- .and_then(|pl| self.workspace_for_group(pl, cx)),
+ ThreadEntryWorkspace::Closed { .. } => {
+ current_header_key.as_ref().and_then(|key| {
+ self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx).workspace_for_paths(
+ key.path_list(),
+ key.host().as_ref(),
+ cx,
+ )
+ })
+ })
+ }
}?;
let notified = self
.contents
@@ -3219,10 +3358,13 @@ impl Sidebar {
.unwrap_or(thread.metadata.updated_at),
);
+ let is_remote = thread.workspace.is_remote(cx);
+
ThreadItem::new(id, title)
.base_bg(sidebar_bg)
.icon(thread.icon)
.status(thread.status)
+ .is_remote(is_remote)
.when_some(thread.icon_from_external_svg.clone(), |this, svg| {
this.custom_icon_from_external_svg(svg)
})
@@ -3305,10 +3447,14 @@ impl Sidebar {
ThreadEntryWorkspace::Open(workspace) => {
this.activate_thread(metadata.clone(), workspace, false, window, cx);
}
- ThreadEntryWorkspace::Closed(path_list) => {
+ ThreadEntryWorkspace::Closed {
+ folder_paths,
+ project_group_key,
+ } => {
this.open_workspace_and_activate_thread(
metadata.clone(),
- path_list.clone(),
+ folder_paths.clone(),
+ project_group_key,
window,
cx,
);
@@ -3388,12 +3534,12 @@ impl Sidebar {
fn render_view_more(
&self,
ix: usize,
- path_list: &PathList,
+ key: &ProjectGroupKey,
is_fully_expanded: bool,
is_selected: bool,
cx: &mut Context<Self>,
) -> AnyElement {
- let path_list = path_list.clone();
+ let key = key.clone();
let id = SharedString::from(format!("view-more-{}", ix));
let label: SharedString = if is_fully_expanded {
@@ -3409,9 +3555,9 @@ impl Sidebar {
.on_click(cx.listener(move |this, _, _window, cx| {
this.selection = None;
if is_fully_expanded {
- this.reset_thread_group_expansion(&path_list, cx);
+ this.reset_thread_group_expansion(&key, cx);
} else {
- this.expand_thread_group(&path_list, cx);
+ this.expand_thread_group(&key, cx);
}
}))
.into_any_element()
@@ -3434,7 +3580,13 @@ impl Sidebar {
.find(|&&header_ix| header_ix <= selected_ix)
.and_then(|&header_ix| match &self.contents.entries[header_ix] {
ListEntry::ProjectHeader { key, .. } => {
- self.workspace_for_group(key.path_list(), cx)
+ self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx).workspace_for_paths(
+ key.path_list(),
+ key.host().as_ref(),
+ cx,
+ )
+ })
}
_ => None,
})
@@ -3480,8 +3632,8 @@ impl Sidebar {
fn active_project_group_key(&self, cx: &App) -> Option<ProjectGroupKey> {
let multi_workspace = self.multi_workspace.upgrade()?;
- let mw = multi_workspace.read(cx);
- Some(mw.workspace().read(cx).project_group_key(cx))
+ let multi_workspace = multi_workspace.read(cx);
+ Some(multi_workspace.project_group_key_for_workspace(multi_workspace.workspace(), cx))
}
fn active_project_header_position(&self, cx: &App) -> Option<usize> {
@@ -3525,18 +3677,21 @@ impl Sidebar {
else {
return;
};
- let path_list = key.path_list().clone();
+ let key = key.clone();
// Uncollapse the target group so that threads become visible.
- self.collapsed_groups.remove(&path_list);
+ self.collapsed_groups.remove(&key);
- if let Some(workspace) = self.workspace_for_group(&path_list, cx) {
+ if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| {
+ mw.read(cx)
+ .workspace_for_paths(key.path_list(), key.host().as_ref(), cx)
+ }) {
multi_workspace.update(cx, |multi_workspace, cx| {
multi_workspace.activate(workspace, window, cx);
multi_workspace.retain_active_workspace(cx);
});
} else {
- self.open_workspace_for_group(&path_list, window, cx);
+ self.open_workspace_for_group(&key, window, cx);
}
}
@@ -3598,8 +3753,19 @@ impl Sidebar {
let workspace = workspace.clone();
self.activate_thread(metadata, &workspace, true, window, cx);
}
- ThreadEntryWorkspace::Closed(path_list) => {
- self.open_workspace_and_activate_thread(metadata, path_list.clone(), window, cx);
+ ThreadEntryWorkspace::Closed {
+ folder_paths,
+ project_group_key,
+ } => {
+ let folder_paths = folder_paths.clone();
+ let project_group_key = project_group_key.clone();
+ self.open_workspace_and_activate_thread(
+ metadata,
+ folder_paths,
+ &project_group_key,
+ window,
+ cx,
+ );
}
}
}
@@ -3617,26 +3783,40 @@ impl Sidebar {
self.cycle_thread_impl(false, window, cx);
}
- fn expand_thread_group(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
- let current = self.expanded_groups.get(path_list).copied().unwrap_or(0);
- self.expanded_groups.insert(path_list.clone(), current + 1);
+ fn expand_thread_group(&mut self, project_group_key: &ProjectGroupKey, cx: &mut Context<Self>) {
+ let current = self
+ .expanded_groups
+ .get(project_group_key)
+ .copied()
+ .unwrap_or(0);
+ self.expanded_groups
+ .insert(project_group_key.clone(), current + 1);
self.serialize(cx);
self.update_entries(cx);
}
- fn reset_thread_group_expansion(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
- self.expanded_groups.remove(path_list);
+ fn reset_thread_group_expansion(
+ &mut self,
+ project_group_key: &ProjectGroupKey,
+ cx: &mut Context<Self>,
+ ) {
+ self.expanded_groups.remove(project_group_key);
self.serialize(cx);
self.update_entries(cx);
}
- fn collapse_thread_group(&mut self, path_list: &PathList, cx: &mut Context<Self>) {
- match self.expanded_groups.get(path_list).copied() {
+ fn collapse_thread_group(
+ &mut self,
+ project_group_key: &ProjectGroupKey,
+ cx: &mut Context<Self>,
+ ) {
+ match self.expanded_groups.get(project_group_key).copied() {
Some(batches) if batches > 1 => {
- self.expanded_groups.insert(path_list.clone(), batches - 1);
+ self.expanded_groups
+ .insert(project_group_key.clone(), batches - 1);
}
Some(_) => {
- self.expanded_groups.remove(path_list);
+ self.expanded_groups.remove(project_group_key);
}
None => return,
}
@@ -3653,7 +3833,7 @@ impl Sidebar {
let Some(active_key) = self.active_project_group_key(cx) else {
return;
};
- self.expand_thread_group(active_key.path_list(), cx);
+ self.expand_thread_group(&active_key, cx);
}
fn on_show_fewer_threads(
@@ -3665,7 +3845,7 @@ impl Sidebar {
let Some(active_key) = self.active_project_group_key(cx) else {
return;
};
- self.collapse_thread_group(active_key.path_list(), cx);
+ self.collapse_thread_group(&active_key, cx);
}
fn on_new_thread(
@@ -60,6 +60,75 @@ fn has_thread_entry(sidebar: &Sidebar, session_id: &acp::SessionId) -> bool {
.any(|entry| matches!(entry, ListEntry::Thread(t) if &t.metadata.session_id == session_id))
}
+#[track_caller]
+fn assert_remote_project_integration_sidebar_state(
+ sidebar: &mut Sidebar,
+ main_thread_id: &acp::SessionId,
+ remote_thread_id: &acp::SessionId,
+) {
+ let mut project_headers = sidebar.contents.entries.iter().filter_map(|entry| {
+ if let ListEntry::ProjectHeader { label, .. } = entry {
+ Some(label.as_ref())
+ } else {
+ None
+ }
+ });
+
+ let Some(project_header) = project_headers.next() else {
+ panic!("expected exactly one sidebar project header named `project`, found none");
+ };
+ assert_eq!(
+ project_header, "project",
+ "expected the only sidebar project header to be `project`"
+ );
+ if let Some(unexpected_header) = project_headers.next() {
+ panic!(
+ "expected exactly one sidebar project header named `project`, found extra header `{unexpected_header}`"
+ );
+ }
+
+ let mut saw_main_thread = false;
+ let mut saw_remote_thread = false;
+ for entry in &sidebar.contents.entries {
+ match entry {
+ ListEntry::ProjectHeader { label, .. } => {
+ assert_eq!(
+ label.as_ref(),
+ "project",
+ "expected the only sidebar project header to be `project`"
+ );
+ }
+ ListEntry::Thread(thread) if &thread.metadata.session_id == main_thread_id => {
+ saw_main_thread = true;
+ }
+ ListEntry::Thread(thread) if &thread.metadata.session_id == remote_thread_id => {
+ saw_remote_thread = true;
+ }
+ ListEntry::Thread(thread) => {
+ let title = thread.metadata.title.as_ref();
+ panic!(
+ "unexpected sidebar thread while simulating remote project integration flicker: title=`{title}`"
+ );
+ }
+ ListEntry::ViewMore { .. } => {
+ panic!(
+ "unexpected `View More` entry while simulating remote project integration flicker"
+ );
+ }
+ ListEntry::DraftThread { .. } => {}
+ }
+ }
+
+ assert!(
+ saw_main_thread,
+ "expected the sidebar to keep showing `Main Thread` under `project`"
+ );
+ assert!(
+ saw_remote_thread,
+ "expected the sidebar to keep showing `Worktree Thread` under `project`"
+ );
+}
+
async fn init_test_project(
worktree_path: &str,
cx: &mut TestAppContext,
@@ -260,7 +329,7 @@ fn visible_entries_as_strings(
highlight_positions: _,
..
} => {
- let icon = if sidebar.collapsed_groups.contains(key.path_list()) {
+ let icon = if sidebar.collapsed_groups.contains(key) {
">"
} else {
"v"
@@ -323,15 +392,13 @@ async fn test_serialization_round_trip(cx: &mut TestAppContext) {
save_n_test_threads(3, &project, cx).await;
- let path_list = project.read_with(cx, |project, cx| {
- project.project_group_key(cx).path_list().clone()
- });
+ let project_group_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
// Set a custom width, collapse the group, and expand "View More".
sidebar.update_in(cx, |sidebar, window, cx| {
sidebar.set_width(Some(px(420.0)), cx);
- sidebar.toggle_collapse(&path_list, window, cx);
- sidebar.expanded_groups.insert(path_list.clone(), 2);
+ sidebar.toggle_collapse(&project_group_key, window, cx);
+ sidebar.expanded_groups.insert(project_group_key.clone(), 2);
});
cx.run_until_parked();
@@ -369,8 +436,8 @@ async fn test_serialization_round_trip(cx: &mut TestAppContext) {
assert_eq!(collapsed1, collapsed2);
assert_eq!(expanded1, expanded2);
assert_eq!(width1, px(420.0));
- assert!(collapsed1.contains(&path_list));
- assert_eq!(expanded1.get(&path_list), Some(&2));
+ assert!(collapsed1.contains(&project_group_key));
+ assert_eq!(expanded1.get(&project_group_key), Some(&2));
}
#[gpui::test]
@@ -593,9 +660,7 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse
save_n_test_threads(17, &project, cx).await;
- let path_list = project.read_with(cx, |project, cx| {
- project.project_group_key(cx).path_list().clone()
- });
+ let project_group_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -620,8 +685,13 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Expand again by one batch
sidebar.update_in(cx, |s, _window, cx| {
- let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0);
- s.expanded_groups.insert(path_list.clone(), current + 1);
+ let current = s
+ .expanded_groups
+ .get(&project_group_key)
+ .copied()
+ .unwrap_or(0);
+ s.expanded_groups
+ .insert(project_group_key.clone(), current + 1);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -633,8 +703,13 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Expand one more time - should show all 17 threads with Collapse button
sidebar.update_in(cx, |s, _window, cx| {
- let current = s.expanded_groups.get(&path_list).copied().unwrap_or(0);
- s.expanded_groups.insert(path_list.clone(), current + 1);
+ let current = s
+ .expanded_groups
+ .get(&project_group_key)
+ .copied()
+ .unwrap_or(0);
+ s.expanded_groups
+ .insert(project_group_key.clone(), current + 1);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -647,7 +722,7 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Click collapse - should go back to showing 5 threads
sidebar.update_in(cx, |s, _window, cx| {
- s.expanded_groups.remove(&path_list);
+ s.expanded_groups.remove(&project_group_key);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -667,9 +742,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
save_n_test_threads(1, &project, cx).await;
- let path_list = project.read_with(cx, |project, cx| {
- project.project_group_key(cx).path_list().clone()
- });
+ let project_group_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -681,7 +754,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
// Collapse
sidebar.update_in(cx, |s, window, cx| {
- s.toggle_collapse(&path_list, window, cx);
+ s.toggle_collapse(&project_group_key, window, cx);
});
cx.run_until_parked();
@@ -692,7 +765,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
// Expand
sidebar.update_in(cx, |s, window, cx| {
- s.toggle_collapse(&path_list, window, cx);
+ s.toggle_collapse(&project_group_key, window, cx);
});
cx.run_until_parked();
@@ -714,7 +787,8 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
let collapsed_path = PathList::new(&[std::path::PathBuf::from("/collapsed")]);
sidebar.update_in(cx, |s, _window, _cx| {
- s.collapsed_groups.insert(collapsed_path.clone());
+ s.collapsed_groups
+ .insert(project::ProjectGroupKey::new(None, collapsed_path.clone()));
s.contents
.notified_threads
.insert(acp::SessionId::new(Arc::from("t-5")));
@@ -2012,7 +2086,8 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo
sidebar.update_in(cx, |sidebar, window, cx| {
sidebar.selection = None;
let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- sidebar.toggle_collapse(&path_list, window, cx);
+ let project_group_key = project::ProjectGroupKey::new(None, path_list);
+ sidebar.toggle_collapse(&project_group_key, window, cx);
});
assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None);
@@ -6102,17 +6177,17 @@ async fn test_linked_worktree_workspace_reachable_after_adding_unrelated_project
// Force a full sidebar rebuild with all groups expanded.
sidebar.update_in(cx, |sidebar, _window, cx| {
sidebar.collapsed_groups.clear();
- let path_lists: Vec<PathList> = sidebar
+ let group_keys: Vec<project::ProjectGroupKey> = sidebar
.contents
.entries
.iter()
.filter_map(|entry| match entry {
- ListEntry::ProjectHeader { key, .. } => Some(key.path_list().clone()),
+ ListEntry::ProjectHeader { key, .. } => Some(key.clone()),
_ => None,
})
.collect();
- for path_list in path_lists {
- sidebar.expanded_groups.insert(path_list, 10_000);
+ for group_key in group_keys {
+ sidebar.expanded_groups.insert(group_key, 10_000);
}
sidebar.update_entries(cx);
});
@@ -6524,17 +6599,17 @@ mod property_test {
fn update_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
sidebar.update_in(cx, |sidebar, _window, cx| {
sidebar.collapsed_groups.clear();
- let path_lists: Vec<PathList> = sidebar
+ let group_keys: Vec<project::ProjectGroupKey> = sidebar
.contents
.entries
.iter()
.filter_map(|entry| match entry {
- ListEntry::ProjectHeader { key, .. } => Some(key.path_list().clone()),
+ ListEntry::ProjectHeader { key, .. } => Some(key.clone()),
_ => None,
})
.collect();
- for path_list in path_lists {
- sidebar.expanded_groups.insert(path_list, 10_000);
+ for group_key in group_keys {
+ sidebar.expanded_groups.insert(group_key, 10_000);
}
sidebar.update_entries(cx);
});
@@ -6872,3 +6947,294 @@ mod property_test {
}
}
}
+
+#[gpui::test]
+async fn test_remote_project_integration_does_not_briefly_render_as_separate_project(
+ cx: &mut TestAppContext,
+ server_cx: &mut TestAppContext,
+) {
+ init_test(cx);
+
+ cx.update(|cx| {
+ release_channel::init(semver::Version::new(0, 0, 0), cx);
+ });
+
+ let app_state = cx.update(|cx| {
+ let app_state = workspace::AppState::test(cx);
+ workspace::init(app_state.clone(), cx);
+ app_state
+ });
+
+ // Set up the remote server side.
+ let server_fs = FakeFs::new(server_cx.executor());
+ server_fs
+ .insert_tree(
+ "/project",
+ serde_json::json!({
+ ".git": {},
+ "src": { "main.rs": "fn main() {}" }
+ }),
+ )
+ .await;
+ server_fs.set_branch_name(Path::new("/project/.git"), Some("main"));
+
+ // Create the linked worktree checkout path on the remote server,
+ // but do not yet register it as a git-linked worktree. The real
+ // regrouping update in this test should happen only after the
+ // sidebar opens the closed remote thread.
+ server_fs
+ .insert_tree(
+ "/project-wt-1",
+ serde_json::json!({
+ "src": { "main.rs": "fn main() {}" }
+ }),
+ )
+ .await;
+
+ server_cx.update(|cx| {
+ release_channel::init(semver::Version::new(0, 0, 0), cx);
+ });
+
+ let (original_opts, server_session, _) = remote::RemoteClient::fake_server(cx, server_cx);
+
+ server_cx.update(remote_server::HeadlessProject::init);
+ let server_executor = server_cx.executor();
+ let _headless = server_cx.new(|cx| {
+ remote_server::HeadlessProject::new(
+ remote_server::HeadlessAppState {
+ session: server_session,
+ fs: server_fs.clone(),
+ http_client: Arc::new(http_client::BlockedHttpClient),
+ node_runtime: node_runtime::NodeRuntime::unavailable(),
+ languages: Arc::new(language::LanguageRegistry::new(server_executor.clone())),
+ extension_host_proxy: Arc::new(extension::ExtensionHostProxy::new()),
+ startup_time: std::time::Instant::now(),
+ },
+ false,
+ cx,
+ )
+ });
+
+ // Connect the client side and build a remote project.
+ let remote_client = remote::RemoteClient::connect_mock(original_opts.clone(), cx).await;
+ let project = cx.update(|cx| {
+ let project_client = client::Client::new(
+ Arc::new(clock::FakeSystemClock::new()),
+ http_client::FakeHttpClient::with_404_response(),
+ cx,
+ );
+ let user_store = cx.new(|cx| client::UserStore::new(project_client.clone(), cx));
+ project::Project::remote(
+ remote_client,
+ project_client,
+ node_runtime::NodeRuntime::unavailable(),
+ user_store,
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ false,
+ cx,
+ )
+ });
+
+ // Open the remote worktree.
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(Path::new("/project"), true, cx)
+ })
+ .await
+ .expect("should open remote worktree");
+ cx.run_until_parked();
+
+ // Verify the project is remote.
+ project.read_with(cx, |project, cx| {
+ assert!(!project.is_local(), "project should be remote");
+ assert!(
+ project.remote_connection_options(cx).is_some(),
+ "project should have remote connection options"
+ );
+ });
+
+ cx.update(|cx| <dyn fs::Fs>::set_global(app_state.fs.clone(), cx));
+
+ // Create MultiWorkspace with the remote project.
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ cx.run_until_parked();
+
+ // Save a thread for the main remote workspace (folder_paths match
+ // the open workspace, so it will be classified as Open).
+ let main_thread_id = acp::SessionId::new(Arc::from("main-thread"));
+ save_thread_metadata(
+ main_thread_id.clone(),
+ "Main Thread".into(),
+ chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
+ None,
+ &project,
+ cx,
+ );
+ cx.run_until_parked();
+
+ // Save a thread whose folder_paths point to a linked worktree path
+ // that doesn't have an open workspace ("/project-wt-1"), but whose
+ // main_worktree_paths match the project group key so it appears
+ // in the sidebar under the same remote group. This simulates a
+ // linked worktree workspace that was closed.
+ let remote_thread_id = acp::SessionId::new(Arc::from("remote-thread"));
+ let main_worktree_paths =
+ project.read_with(cx, |p, cx| p.project_group_key(cx).path_list().clone());
+ cx.update(|_window, cx| {
+ let metadata = ThreadMetadata {
+ session_id: remote_thread_id.clone(),
+ agent_id: agent::ZED_AGENT_ID.clone(),
+ title: "Worktree Thread".into(),
+ updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 1).unwrap(),
+ created_at: None,
+ folder_paths: PathList::new(&[PathBuf::from("/project-wt-1")]),
+ main_worktree_paths,
+ archived: false,
+ };
+ ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
+ });
+ cx.run_until_parked();
+
+ focus_sidebar(&sidebar, cx);
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = sidebar.contents.entries.iter().position(|entry| {
+ matches!(
+ entry,
+ ListEntry::Thread(thread) if thread.metadata.session_id == remote_thread_id
+ )
+ });
+ });
+
+ let saw_separate_project_header = Arc::new(std::sync::atomic::AtomicBool::new(false));
+ let saw_separate_project_header_for_observer = saw_separate_project_header.clone();
+
+ sidebar
+ .update(cx, |_, cx| {
+ cx.observe_self(move |sidebar, _cx| {
+ let mut project_headers = sidebar.contents.entries.iter().filter_map(|entry| {
+ if let ListEntry::ProjectHeader { label, .. } = entry {
+ Some(label.as_ref())
+ } else {
+ None
+ }
+ });
+
+ let Some(project_header) = project_headers.next() else {
+ saw_separate_project_header_for_observer
+ .store(true, std::sync::atomic::Ordering::SeqCst);
+ return;
+ };
+
+ if project_header != "project" || project_headers.next().is_some() {
+ saw_separate_project_header_for_observer
+ .store(true, std::sync::atomic::Ordering::SeqCst);
+ }
+ })
+ })
+ .detach();
+
+ multi_workspace.update(cx, |multi_workspace, cx| {
+ let workspace = multi_workspace.workspace().clone();
+ workspace.update(cx, |workspace: &mut Workspace, cx| {
+ let remote_client = workspace
+ .project()
+ .read(cx)
+ .remote_client()
+ .expect("main remote project should have a remote client");
+ remote_client.update(cx, |remote_client: &mut remote::RemoteClient, cx| {
+ remote_client.force_server_not_running(cx);
+ });
+ });
+ });
+ cx.run_until_parked();
+
+ let (server_session_2, connect_guard_2) =
+ remote::RemoteClient::fake_server_with_opts(&original_opts, cx, server_cx);
+ let _headless_2 = server_cx.new(|cx| {
+ remote_server::HeadlessProject::new(
+ remote_server::HeadlessAppState {
+ session: server_session_2,
+ fs: server_fs.clone(),
+ http_client: Arc::new(http_client::BlockedHttpClient),
+ node_runtime: node_runtime::NodeRuntime::unavailable(),
+ languages: Arc::new(language::LanguageRegistry::new(server_executor.clone())),
+ extension_host_proxy: Arc::new(extension::ExtensionHostProxy::new()),
+ startup_time: std::time::Instant::now(),
+ },
+ false,
+ cx,
+ )
+ });
+ drop(connect_guard_2);
+
+ let window = cx.windows()[0];
+ cx.update_window(window, |_, window, cx| {
+ window.dispatch_action(Confirm.boxed_clone(), cx);
+ })
+ .unwrap();
+
+ cx.run_until_parked();
+
+ let new_workspace = multi_workspace.read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.workspaces().count(),
+ 2,
+ "confirming a closed remote thread should open a second workspace"
+ );
+ mw.workspaces()
+ .find(|workspace| workspace.entity_id() != mw.workspace().entity_id())
+ .unwrap()
+ .clone()
+ });
+
+ server_fs
+ .add_linked_worktree_for_repo(
+ Path::new("/project/.git"),
+ true,
+ git::repository::Worktree {
+ path: PathBuf::from("/project-wt-1"),
+ ref_name: Some("refs/heads/feature-wt".into()),
+ sha: "abc123".into(),
+ is_main: false,
+ },
+ )
+ .await;
+
+ server_cx.run_until_parked();
+ cx.run_until_parked();
+ server_cx.run_until_parked();
+ cx.run_until_parked();
+
+ let entries_after_update = visible_entries_as_strings(&sidebar, cx);
+ let group_after_update = new_workspace.read_with(cx, |workspace, cx| {
+ workspace.project().read(cx).project_group_key(cx)
+ });
+
+ assert_eq!(
+ group_after_update,
+ project.read_with(cx, |project, cx| project.project_group_key(cx)),
+ "expected the remote worktree workspace to be grouped under the main remote project after the real update; \
+ final sidebar entries: {:?}",
+ entries_after_update,
+ );
+
+ sidebar.update(cx, |sidebar, _cx| {
+ assert_remote_project_integration_sidebar_state(
+ sidebar,
+ &main_thread_id,
+ &remote_thread_id,
+ );
+ });
+
+ assert!(
+ !saw_separate_project_header.load(std::sync::atomic::Ordering::SeqCst),
+ "sidebar briefly rendered the remote worktree as a separate project during the real remote open/update sequence; \
+ final group: {:?}; final sidebar entries: {:?}",
+ group_after_update,
+ entries_after_update,
+ );
+}
@@ -54,6 +54,7 @@ pub struct ThreadItem {
project_paths: Option<Arc<[PathBuf]>>,
project_name: Option<SharedString>,
worktrees: Vec<ThreadItemWorktreeInfo>,
+ is_remote: bool,
on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
action_slot: Option<AnyElement>,
@@ -86,6 +87,7 @@ impl ThreadItem {
project_paths: None,
project_name: None,
worktrees: Vec::new(),
+ is_remote: false,
on_click: None,
on_hover: Box::new(|_, _, _| {}),
action_slot: None,
@@ -179,6 +181,11 @@ impl ThreadItem {
self
}
+ pub fn is_remote(mut self, is_remote: bool) -> Self {
+ self.is_remote = is_remote;
+ self
+ }
+
pub fn hovered(mut self, hovered: bool) -> Self {
self.hovered = hovered;
self
@@ -443,10 +450,11 @@ impl RenderOnce for ThreadItem {
.join("\n")
.into();
- let worktree_tooltip_title = if self.worktrees.len() > 1 {
- "Thread Running in Local Git Worktrees"
- } else {
- "Thread Running in a Local Git Worktree"
+ let worktree_tooltip_title = match (self.is_remote, self.worktrees.len() > 1) {
+ (true, true) => "Thread Running in Remote Git Worktrees",
+ (true, false) => "Thread Running in a Remote Git Worktree",
+ (false, true) => "Thread Running in Local Git Worktrees",
+ (false, false) => "Thread Running in a Local Git Worktree",
};
// Deduplicate chips by name — e.g. two paths both named
@@ -6,8 +6,10 @@ use gpui::{
actions, deferred, px,
};
use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupKey};
+use remote::RemoteConnectionOptions;
use settings::Settings;
pub use settings::SidebarSide;
+use std::collections::{HashMap, HashSet};
use std::future::Future;
use std::path::Path;
use std::path::PathBuf;
@@ -22,6 +24,7 @@ use ui::{ContextMenu, right_click_menu};
const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0);
+use crate::open_remote_project_with_existing_connection;
use crate::{
CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode,
Panel, Workspace, WorkspaceId, client_side_decorations,
@@ -299,6 +302,7 @@ pub struct MultiWorkspace {
workspaces: Vec<Entity<Workspace>>,
active_workspace: ActiveWorkspace,
project_group_keys: Vec<ProjectGroupKey>,
+ provisional_project_group_keys: HashMap<EntityId, ProjectGroupKey>,
sidebar: Option<Box<dyn SidebarHandle>>,
sidebar_open: bool,
sidebar_overlay: Option<AnyView>,
@@ -351,6 +355,7 @@ impl MultiWorkspace {
Self {
window_id: window.window_handle().window_id(),
project_group_keys: Vec::new(),
+ provisional_project_group_keys: HashMap::default(),
workspaces: Vec::new(),
active_workspace: ActiveWorkspace::Transient(workspace),
sidebar: None,
@@ -559,6 +564,16 @@ impl MultiWorkspace {
this.add_project_group_key(workspace.read(cx).project_group_key(cx));
}
}
+ project::Event::WorktreeUpdatedRootRepoCommonDir(_) => {
+ if let Some(workspace) = workspace.upgrade() {
+ this.maybe_clear_provisional_project_group_key(&workspace, cx);
+ this.add_project_group_key(
+ this.project_group_key_for_workspace(&workspace, cx),
+ );
+ this.remove_stale_project_group_keys(cx);
+ cx.notify();
+ }
+ }
_ => {}
}
})
@@ -583,6 +598,53 @@ impl MultiWorkspace {
self.project_group_keys.insert(0, project_group_key);
}
+ pub fn set_provisional_project_group_key(
+ &mut self,
+ workspace: &Entity<Workspace>,
+ project_group_key: ProjectGroupKey,
+ ) {
+ self.provisional_project_group_keys
+ .insert(workspace.entity_id(), project_group_key.clone());
+ self.add_project_group_key(project_group_key);
+ }
+
+ pub fn project_group_key_for_workspace(
+ &self,
+ workspace: &Entity<Workspace>,
+ cx: &App,
+ ) -> ProjectGroupKey {
+ self.provisional_project_group_keys
+ .get(&workspace.entity_id())
+ .cloned()
+ .unwrap_or_else(|| workspace.read(cx).project_group_key(cx))
+ }
+
+ fn maybe_clear_provisional_project_group_key(
+ &mut self,
+ workspace: &Entity<Workspace>,
+ cx: &App,
+ ) {
+ let live_key = workspace.read(cx).project_group_key(cx);
+ if self
+ .provisional_project_group_keys
+ .get(&workspace.entity_id())
+ .is_some_and(|key| *key == live_key)
+ {
+ self.provisional_project_group_keys
+ .remove(&workspace.entity_id());
+ }
+ }
+
+ fn remove_stale_project_group_keys(&mut self, cx: &App) {
+ let workspace_keys: HashSet<ProjectGroupKey> = self
+ .workspaces
+ .iter()
+ .map(|workspace| self.project_group_key_for_workspace(workspace, cx))
+ .collect();
+ self.project_group_keys
+ .retain(|key| workspace_keys.contains(key));
+ }
+
pub fn restore_project_group_keys(&mut self, keys: Vec<ProjectGroupKey>) {
let mut restored: Vec<ProjectGroupKey> = Vec::with_capacity(keys.len());
for key in keys {
@@ -616,7 +678,7 @@ impl MultiWorkspace {
.map(|key| (key.clone(), Vec::new()))
.collect::<Vec<_>>();
for workspace in &self.workspaces {
- let key = workspace.read(cx).project_group_key(cx);
+ let key = self.project_group_key_for_workspace(workspace, cx);
if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) {
workspaces.push(workspace.clone());
}
@@ -629,9 +691,9 @@ impl MultiWorkspace {
project_group_key: &ProjectGroupKey,
cx: &App,
) -> impl Iterator<Item = &Entity<Workspace>> {
- self.workspaces
- .iter()
- .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key)
+ self.workspaces.iter().filter(move |workspace| {
+ self.project_group_key_for_workspace(workspace, cx) == *project_group_key
+ })
}
pub fn remove_folder_from_project_group(
@@ -792,14 +854,104 @@ impl MultiWorkspace {
)
}
- /// Finds an existing workspace whose root paths exactly match the given path list.
- pub fn workspace_for_paths(&self, path_list: &PathList, cx: &App) -> Option<Entity<Workspace>> {
+ /// Finds an existing workspace whose root paths and host exactly match.
+ pub fn workspace_for_paths(
+ &self,
+ path_list: &PathList,
+ host: Option<&RemoteConnectionOptions>,
+ cx: &App,
+ ) -> Option<Entity<Workspace>> {
self.workspaces
.iter()
- .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == *path_list)
+ .find(|ws| {
+ let key = ws.read(cx).project_group_key(cx);
+ key.host().as_ref() == host
+ && PathList::new(&ws.read(cx).root_paths(cx)) == *path_list
+ })
.cloned()
}
+ /// Finds an existing workspace whose paths match, or creates a new one.
+ ///
+ /// For local projects (`host` is `None`), this delegates to
+ /// [`Self::find_or_create_local_workspace`]. For remote projects, it
+ /// tries an exact path match and, if no existing workspace is found,
+ /// calls `connect_remote` to establish a connection and creates a new
+ /// remote workspace.
+ ///
+ /// The `connect_remote` closure is responsible for any user-facing
+ /// connection UI (e.g. password prompts). It receives the connection
+ /// options and should return a [`Task`] that resolves to the
+ /// [`RemoteClient`] session, or `None` if the connection was
+ /// cancelled.
+ pub fn find_or_create_workspace(
+ &mut self,
+ paths: PathList,
+ host: Option<RemoteConnectionOptions>,
+ provisional_project_group_key: Option<ProjectGroupKey>,
+ connect_remote: impl FnOnce(
+ RemoteConnectionOptions,
+ &mut Window,
+ &mut Context<Self>,
+ ) -> Task<Result<Option<Entity<remote::RemoteClient>>>>
+ + 'static,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Workspace>>> {
+ if let Some(workspace) = self.workspace_for_paths(&paths, host.as_ref(), cx) {
+ self.activate(workspace.clone(), window, cx);
+ return Task::ready(Ok(workspace));
+ }
+
+ let Some(connection_options) = host else {
+ return self.find_or_create_local_workspace(paths, window, cx);
+ };
+
+ let app_state = self.workspace().read(cx).app_state().clone();
+ let window_handle = window.window_handle().downcast::<MultiWorkspace>();
+ let connect_task = connect_remote(connection_options.clone(), window, cx);
+ let paths_vec = paths.paths().to_vec();
+
+ cx.spawn(async move |_this, cx| {
+ let session = connect_task
+ .await?
+ .ok_or_else(|| anyhow::anyhow!("Remote connection was cancelled"))?;
+
+ let new_project = cx.update(|cx| {
+ Project::remote(
+ session,
+ app_state.client.clone(),
+ app_state.node_runtime.clone(),
+ app_state.user_store.clone(),
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ true,
+ cx,
+ )
+ });
+
+ let window_handle =
+ window_handle.ok_or_else(|| anyhow::anyhow!("Window is not a MultiWorkspace"))?;
+
+ open_remote_project_with_existing_connection(
+ connection_options,
+ new_project,
+ paths_vec,
+ app_state,
+ window_handle,
+ provisional_project_group_key,
+ cx,
+ )
+ .await?;
+
+ window_handle.update(cx, |multi_workspace, window, cx| {
+ let workspace = multi_workspace.workspace().clone();
+ multi_workspace.add(workspace.clone(), window, cx);
+ workspace
+ })
+ })
+ }
+
/// Finds an existing workspace in this multi-workspace whose paths match,
/// or creates a new one (deserializing its saved state from the database).
/// Never searches other windows or matches workspaces with a superset of
@@ -810,7 +962,7 @@ impl MultiWorkspace {
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Workspace>>> {
- if let Some(workspace) = self.workspace_for_paths(&path_list, cx) {
+ if let Some(workspace) = self.workspace_for_paths(&path_list, None, cx) {
self.activate(workspace.clone(), window, cx);
return Task::ready(Ok(workspace));
}
@@ -930,7 +1082,7 @@ impl MultiWorkspace {
/// Promotes a former transient workspace into the persistent list.
/// Returns the index of the newly inserted workspace.
fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
- let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+ let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
self.add_project_group_key(project_group_key);
self.workspaces.push(workspace.clone());
cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
@@ -967,7 +1119,7 @@ impl MultiWorkspace {
if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
index
} else {
- let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+ let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
Self::subscribe_to_workspace(&workspace, window, cx);
self.sync_sidebar_to_workspace(&workspace, cx);
@@ -86,7 +86,7 @@ pub use persistence::{
WorkspaceDb, delete_unloaded_items,
model::{
DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace,
- SerializedWorkspaceLocation, SessionWorkspace,
+ SerializedProjectGroupKey, SerializedWorkspaceLocation, SessionWorkspace,
},
read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
@@ -8717,12 +8717,6 @@ pub async fn restore_multiworkspace(
active_workspace,
state,
} = multi_workspace;
- let MultiWorkspaceState {
- sidebar_open,
- project_group_keys,
- sidebar_state,
- ..
- } = state;
let workspace_result = if active_workspace.paths.is_empty() {
cx.update(|cx| {
@@ -8750,9 +8744,8 @@ pub async fn restore_multiworkspace(
Err(err) => {
log::error!("Failed to restore active workspace: {err:#}");
- // Try each project group's paths as a fallback.
let mut fallback_handle = None;
- for key in &project_group_keys {
+ for key in &state.project_group_keys {
let key: ProjectGroupKey = key.clone().into();
let paths = key.path_list().paths().to_vec();
match cx
@@ -8783,20 +8776,47 @@ pub async fn restore_multiworkspace(
}
};
- if !project_group_keys.is_empty() {
- let fs = app_state.fs.clone();
+ apply_restored_multiworkspace_state(window_handle, &state, app_state.fs.clone(), cx).await;
+
+ window_handle
+ .update(cx, |_, window, _cx| {
+ window.activate_window();
+ })
+ .ok();
+
+ Ok(window_handle)
+}
+pub async fn apply_restored_multiworkspace_state(
+ window_handle: WindowHandle<MultiWorkspace>,
+ state: &MultiWorkspaceState,
+ fs: Arc<dyn fs::Fs>,
+ cx: &mut AsyncApp,
+) {
+ let MultiWorkspaceState {
+ sidebar_open,
+ project_group_keys,
+ sidebar_state,
+ ..
+ } = state;
+
+ if !project_group_keys.is_empty() {
// Resolve linked worktree paths to their main repo paths so
// stale keys from previous sessions get normalized and deduped.
let mut resolved_keys: Vec<ProjectGroupKey> = Vec::new();
- for key in project_group_keys.into_iter().map(ProjectGroupKey::from) {
+ for key in project_group_keys
+ .iter()
+ .cloned()
+ .map(ProjectGroupKey::from)
+ {
if key.path_list().paths().is_empty() {
continue;
}
let mut resolved_paths = Vec::new();
for path in key.path_list().paths() {
- if let Some(common_dir) =
- project::discover_root_repo_common_dir(path, fs.as_ref()).await
+ if key.host().is_none()
+ && let Some(common_dir) =
+ project::discover_root_repo_common_dir(path, fs.as_ref()).await
{
let main_path = common_dir.parent().unwrap_or(&common_dir);
resolved_paths.push(main_path.to_path_buf());
@@ -8817,7 +8837,7 @@ pub async fn restore_multiworkspace(
.ok();
}
- if sidebar_open {
+ if *sidebar_open {
window_handle
.update(cx, |multi_workspace, _, cx| {
multi_workspace.open_sidebar(cx);
@@ -8829,20 +8849,12 @@ pub async fn restore_multiworkspace(
window_handle
.update(cx, |multi_workspace, window, cx| {
if let Some(sidebar) = multi_workspace.sidebar() {
- sidebar.restore_serialized_state(&sidebar_state, window, cx);
+ sidebar.restore_serialized_state(sidebar_state, window, cx);
}
multi_workspace.serialize(cx);
})
.ok();
}
-
- window_handle
- .update(cx, |_, window, _cx| {
- window.activate_window();
- })
- .ok();
-
- Ok(window_handle)
}
actions!(
@@ -9771,6 +9783,7 @@ pub fn open_remote_project_with_new_connection(
serialized_workspace,
app_state,
window,
+ None,
cx,
)
.await
@@ -9783,6 +9796,7 @@ pub fn open_remote_project_with_existing_connection(
paths: Vec<PathBuf>,
app_state: Arc<AppState>,
window: WindowHandle<MultiWorkspace>,
+ provisional_project_group_key: Option<ProjectGroupKey>,
cx: &mut AsyncApp,
) -> Task<Result<Vec<Option<Box<dyn ItemHandle>>>>> {
cx.spawn(async move |cx| {
@@ -9796,6 +9810,7 @@ pub fn open_remote_project_with_existing_connection(
serialized_workspace,
app_state,
window,
+ provisional_project_group_key,
cx,
)
.await
@@ -9809,6 +9824,7 @@ async fn open_remote_project_inner(
serialized_workspace: Option<SerializedWorkspace>,
app_state: Arc<AppState>,
window: WindowHandle<MultiWorkspace>,
+ provisional_project_group_key: Option<ProjectGroupKey>,
cx: &mut AsyncApp,
) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
let db = cx.update(|cx| WorkspaceDb::global(cx));
@@ -9869,6 +9885,9 @@ async fn open_remote_project_inner(
workspace
});
+ if let Some(project_group_key) = provisional_project_group_key.clone() {
+ multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key);
+ }
multi_workspace.activate(new_workspace.clone(), window, cx);
new_workspace
})?;
@@ -510,7 +510,7 @@ impl Worktree {
cx: &mut App,
) -> Entity<Self> {
cx.new(|cx: &mut Context<Self>| {
- let snapshot = Snapshot::new(
+ let mut snapshot = Snapshot::new(
WorktreeId::from_proto(worktree.id),
RelPath::from_proto(&worktree.root_name)
.unwrap_or_else(|_| RelPath::empty().into()),
@@ -518,6 +518,10 @@ impl Worktree {
path_style,
);
+ snapshot.root_repo_common_dir = worktree
+ .root_repo_common_dir
+ .map(|p| SanitizedPath::new_arc(Path::new(&p)));
+
let background_snapshot = Arc::new(Mutex::new((
snapshot.clone(),
Vec::<proto::UpdateWorktree>::new(),
@@ -676,6 +680,9 @@ impl Worktree {
root_name: self.root_name().to_proto(),
visible: self.is_visible(),
abs_path: self.abs_path().to_string_lossy().into_owned(),
+ root_repo_common_dir: self
+ .root_repo_common_dir()
+ .map(|p| p.to_string_lossy().into_owned()),
}
}
@@ -2430,9 +2437,12 @@ impl Snapshot {
self.entries_by_path.edit(entries_by_path_edits, ());
self.entries_by_id.edit(entries_by_id_edits, ());
- self.root_repo_common_dir = update
+ if let Some(dir) = update
.root_repo_common_dir
- .map(|p| SanitizedPath::new_arc(Path::new(&p)));
+ .map(|p| SanitizedPath::new_arc(Path::new(&p)))
+ {
+ self.root_repo_common_dir = Some(dir);
+ }
self.scan_id = update.scan_id as usize;
if update.is_last_update {
@@ -7,7 +7,7 @@ mod zed;
use agent::{SharedThread, ThreadStore};
use agent_client_protocol;
use agent_ui::AgentPanel;
-use anyhow::{Context as _, Error, Result};
+use anyhow::{Context as _, Result};
use clap::Parser;
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore, parse_zed_link};
@@ -1357,54 +1357,56 @@ pub(crate) async fn restore_or_create_workspace(
cx: &mut AsyncApp,
) -> Result<()> {
let kvp = cx.update(|cx| KeyValueStore::global(cx));
- if let Some((multi_workspaces, remote_workspaces)) = restorable_workspaces(cx, &app_state).await
- {
- let mut results: Vec<Result<(), Error>> = Vec::new();
- let mut tasks = Vec::new();
-
+ if let Some(multi_workspaces) = restorable_workspaces(cx, &app_state).await {
+ let mut error_count = 0;
for multi_workspace in multi_workspaces {
- if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await
- {
- log::error!("Failed to restore workspace: {error:#}");
- results.push(Err(error));
- }
- }
+ let result = match &multi_workspace.active_workspace.location {
+ SerializedWorkspaceLocation::Local => {
+ restore_multiworkspace(multi_workspace, app_state.clone(), cx)
+ .await
+ .map(|_| ())
+ }
+ SerializedWorkspaceLocation::Remote(connection_options) => {
+ let mut connection_options = connection_options.clone();
+ if let RemoteConnectionOptions::Ssh(options) = &mut connection_options {
+ cx.update(|cx| {
+ RemoteSettings::get_global(cx)
+ .fill_connection_options_from_settings(options)
+ });
+ }
- for session_workspace in remote_workspaces {
- let app_state = app_state.clone();
- let SerializedWorkspaceLocation::Remote(mut connection_options) =
- session_workspace.location
- else {
- continue;
+ let paths = multi_workspace
+ .active_workspace
+ .paths
+ .paths()
+ .iter()
+ .map(PathBuf::from)
+ .collect::<Vec<_>>();
+ let state = multi_workspace.state.clone();
+ async {
+ let window = open_remote_project(
+ connection_options,
+ paths,
+ app_state.clone(),
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ .await?;
+ workspace::apply_restored_multiworkspace_state(
+ window,
+ &state,
+ app_state.fs.clone(),
+ cx,
+ )
+ .await;
+ Ok::<(), anyhow::Error>(())
+ }
+ .await
+ }
};
- let paths = session_workspace.paths;
- if let RemoteConnectionOptions::Ssh(options) = &mut connection_options {
- cx.update(|cx| {
- RemoteSettings::get_global(cx).fill_connection_options_from_settings(options)
- });
- }
- let task = cx.spawn(async move |cx| {
- recent_projects::open_remote_project(
- connection_options,
- paths.paths().iter().map(PathBuf::from).collect(),
- app_state,
- workspace::OpenOptions::default(),
- cx,
- )
- .await
- .map_err(|e| anyhow::anyhow!(e))
- });
- tasks.push(task);
- }
- // Wait for all window groups and remote workspaces to open concurrently
- results.extend(future::join_all(tasks).await);
-
- // Show notifications for any errors that occurred
- let mut error_count = 0;
- for result in results {
- if let Err(e) = result {
- log::error!("Failed to restore workspace: {}", e);
+ if let Err(error) = result {
+ log::error!("Failed to restore workspace: {error:#}");
error_count += 1;
}
}
@@ -1487,17 +1489,9 @@ pub(crate) async fn restore_or_create_workspace(
async fn restorable_workspaces(
cx: &mut AsyncApp,
app_state: &Arc<AppState>,
-) -> Option<(
- Vec<workspace::SerializedMultiWorkspace>,
- Vec<SessionWorkspace>,
-)> {
+) -> Option<Vec<workspace::SerializedMultiWorkspace>> {
let locations = restorable_workspace_locations(cx, app_state).await?;
- let (remote_workspaces, local_workspaces) = locations
- .into_iter()
- .partition(|sw| matches!(sw.location, SerializedWorkspaceLocation::Remote(_)));
- let multi_workspaces =
- cx.update(|cx| workspace::read_serialized_multi_workspaces(local_workspaces, cx));
- Some((multi_workspaces, remote_workspaces))
+ Some(cx.update(|cx| workspace::read_serialized_multi_workspaces(locations, cx)))
}
pub(crate) async fn restorable_workspace_locations(
@@ -2052,6 +2052,7 @@ pub fn open_new_ssh_project_from_project(
cx,
)
.await
+ .map(|_| ())
})
}