Detailed changes
@@ -391,6 +391,9 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::OpenContext>)
.add_request_handler(forward_mutating_project_request::<proto::CreateContext>)
.add_request_handler(forward_mutating_project_request::<proto::SynchronizeContexts>)
+ .add_request_handler(forward_mutating_project_request::<proto::Stage>)
+ .add_request_handler(forward_mutating_project_request::<proto::Unstage>)
+ .add_request_handler(forward_mutating_project_request::<proto::Commit>)
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)
.add_request_handler({
@@ -1,6 +1,7 @@
use crate::worktree_store::{WorktreeStore, WorktreeStoreEvent};
use crate::{Project, ProjectPath};
-use anyhow::anyhow;
+use anyhow::{anyhow, Context as _};
+use client::ProjectId;
use futures::channel::mpsc;
use futures::{SinkExt as _, StreamExt as _};
use git::{
@@ -11,13 +12,16 @@ use gpui::{
App, AppContext as _, Context, Entity, EventEmitter, SharedString, Subscription, WeakEntity,
};
use language::{Buffer, LanguageRegistry};
+use rpc::{proto, AnyProtoClient};
use settings::WorktreeId;
use std::sync::Arc;
use text::Rope;
use util::maybe;
-use worktree::{RepositoryEntry, StatusEntry};
+use worktree::{ProjectEntryId, RepositoryEntry, StatusEntry};
pub struct GitState {
+ project_id: Option<ProjectId>,
+ client: Option<AnyProtoClient>,
repositories: Vec<RepositoryHandle>,
active_index: Option<usize>,
update_sender: mpsc::UnboundedSender<(Message, mpsc::Sender<anyhow::Error>)>,
@@ -28,13 +32,24 @@ pub struct GitState {
#[derive(Clone)]
pub struct RepositoryHandle {
git_state: WeakEntity<GitState>,
- worktree_id: WorktreeId,
- repository_entry: RepositoryEntry,
- git_repo: Option<Arc<dyn GitRepository>>,
+ pub worktree_id: WorktreeId,
+ pub repository_entry: RepositoryEntry,
+ git_repo: Option<GitRepo>,
commit_message: Entity<Buffer>,
update_sender: mpsc::UnboundedSender<(Message, mpsc::Sender<anyhow::Error>)>,
}
+#[derive(Clone)]
+enum GitRepo {
+ Local(Arc<dyn GitRepository>),
+ Remote {
+ project_id: ProjectId,
+ client: AnyProtoClient,
+ worktree_id: WorktreeId,
+ work_directory_id: ProjectEntryId,
+ },
+}
+
impl PartialEq<Self> for RepositoryHandle {
fn eq(&self, other: &Self) -> bool {
self.worktree_id == other.worktree_id
@@ -52,10 +67,10 @@ impl PartialEq<RepositoryEntry> for RepositoryHandle {
}
enum Message {
- StageAndCommit(Arc<dyn GitRepository>, Rope, Vec<RepoPath>),
- Commit(Arc<dyn GitRepository>, Rope),
- Stage(Arc<dyn GitRepository>, Vec<RepoPath>),
- Unstage(Arc<dyn GitRepository>, Vec<RepoPath>),
+ StageAndCommit(GitRepo, Rope, Vec<RepoPath>),
+ Commit(GitRepo, Rope),
+ Stage(GitRepo, Vec<RepoPath>),
+ Unstage(GitRepo, Vec<RepoPath>),
}
pub enum Event {
@@ -68,6 +83,8 @@ impl GitState {
pub fn new(
worktree_store: &Entity<WorktreeStore>,
languages: Arc<LanguageRegistry>,
+ client: Option<AnyProtoClient>,
+ project_id: Option<ProjectId>,
cx: &mut Context<'_, Self>,
) -> Self {
let (update_sender, mut update_receiver) =
@@ -79,13 +96,117 @@ impl GitState {
.spawn(async move {
match msg {
Message::StageAndCommit(repo, message, paths) => {
- repo.stage_paths(&paths)?;
- repo.commit(&message.to_string())?;
+ match repo {
+ GitRepo::Local(repo) => {
+ repo.stage_paths(&paths)?;
+ repo.commit(&message.to_string())?;
+ }
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => {
+ client
+ .request(proto::Stage {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ paths: paths
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending stage request")?;
+ client
+ .request(proto::Commit {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ message: message.to_string(),
+ })
+ .await
+ .context("sending commit request")?;
+ }
+ }
+
+ Ok(())
+ }
+ Message::Stage(repo, paths) => {
+ match repo {
+ GitRepo::Local(repo) => repo.stage_paths(&paths)?,
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => {
+ client
+ .request(proto::Stage {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ paths: paths
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending stage request")?;
+ }
+ }
+ Ok(())
+ }
+ Message::Unstage(repo, paths) => {
+ match repo {
+ GitRepo::Local(repo) => repo.unstage_paths(&paths)?,
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => {
+ client
+ .request(proto::Unstage {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ paths: paths
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending unstage request")?;
+ }
+ }
+ Ok(())
+ }
+ Message::Commit(repo, message) => {
+ match repo {
+ GitRepo::Local(repo) => repo.commit(&message.to_string())?,
+ GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id,
+ work_directory_id,
+ } => {
+ client
+ .request(proto::Commit {
+ project_id: project_id.0,
+ worktree_id: worktree_id.to_proto(),
+ work_directory_id: work_directory_id.to_proto(),
+ // TODO implement collaborative commit message buffer instead and use it
+ // If it works, remove `commit_with_message` method.
+ message: message.to_string(),
+ })
+ .await
+ .context("sending commit request")?;
+ }
+ }
Ok(())
}
- Message::Stage(repo, paths) => repo.stage_paths(&paths),
- Message::Unstage(repo, paths) => repo.unstage_paths(&paths),
- Message::Commit(repo, message) => repo.commit(&message.to_string()),
}
})
.await;
@@ -99,7 +220,9 @@ impl GitState {
let _subscription = cx.subscribe(worktree_store, Self::on_worktree_store_event);
GitState {
+ project_id,
languages,
+ client,
repositories: Vec::new(),
active_index: None,
update_sender,
@@ -123,6 +246,8 @@ impl GitState {
let mut new_repositories = Vec::new();
let mut new_active_index = None;
let this = cx.weak_entity();
+ let client = self.client.clone();
+ let project_id = self.project_id;
worktree_store.update(cx, |worktree_store, cx| {
for worktree in worktree_store.worktrees() {
@@ -132,7 +257,18 @@ impl GitState {
let git_repo = worktree
.as_local()
.and_then(|local_worktree| local_worktree.get_local_repo(repo))
- .map(|local_repo| local_repo.repo().clone());
+ .map(|local_repo| local_repo.repo().clone())
+ .map(GitRepo::Local)
+ .or_else(|| {
+ let client = client.clone()?;
+ let project_id = project_id?;
+ Some(GitRepo::Remote {
+ project_id,
+ client,
+ worktree_id: worktree.id(),
+ work_directory_id: repo.work_directory_id(),
+ })
+ });
let existing = self
.repositories
.iter()
@@ -340,6 +476,21 @@ impl RepositoryHandle {
});
}
+ pub fn commit_with_message(
+ &self,
+ message: String,
+ err_sender: mpsc::Sender<anyhow::Error>,
+ ) -> anyhow::Result<()> {
+ let Some(git_repo) = self.git_repo.clone() else {
+ return Ok(());
+ };
+ let result = self
+ .update_sender
+ .unbounded_send((Message::Commit(git_repo, message.into()), err_sender));
+ anyhow::ensure!(result.is_ok(), "Failed to submit commit operation");
+ Ok(())
+ }
+
pub fn commit_all(&self, mut err_sender: mpsc::Sender<anyhow::Error>, cx: &mut App) {
let Some(git_repo) = self.git_repo.clone() else {
return;
@@ -30,7 +30,9 @@ mod yarn;
use crate::git::GitState;
use anyhow::{anyhow, Context as _, Result};
use buffer_store::{BufferChangeSet, BufferStore, BufferStoreEvent};
-use client::{proto, Client, Collaborator, PendingEntitySubscription, TypedEnvelope, UserStore};
+use client::{
+ proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
+};
use clock::ReplicaId;
use collections::{BTreeSet, HashMap, HashSet};
use debounced_delay::DebouncedDelay;
@@ -45,7 +47,7 @@ use image_store::{ImageItemEvent, ImageStoreEvent};
use ::git::{
blame::Blame,
- repository::{Branch, GitRepository},
+ repository::{Branch, GitRepository, RepoPath},
status::FileStatus,
};
use gpui::{
@@ -606,6 +608,10 @@ impl Project {
client.add_model_request_handler(Self::handle_open_new_buffer);
client.add_model_message_handler(Self::handle_create_buffer_for_peer);
+ client.add_model_request_handler(Self::handle_stage);
+ client.add_model_request_handler(Self::handle_unstage);
+ client.add_model_request_handler(Self::handle_commit);
+
WorktreeStore::init(&client);
BufferStore::init(&client);
LspStore::init(&client);
@@ -695,8 +701,9 @@ impl Project {
)
});
- let git_state =
- Some(cx.new(|cx| GitState::new(&worktree_store, languages.clone(), cx)));
+ let git_state = Some(
+ cx.new(|cx| GitState::new(&worktree_store, languages.clone(), None, None, cx)),
+ );
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
@@ -816,8 +823,15 @@ impl Project {
});
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
- let git_state =
- Some(cx.new(|cx| GitState::new(&worktree_store, languages.clone(), cx)));
+ let git_state = Some(cx.new(|cx| {
+ GitState::new(
+ &worktree_store,
+ languages.clone(),
+ Some(ssh_proto.clone()),
+ Some(ProjectId(SSH_PROJECT_ID)),
+ cx,
+ )
+ }));
cx.subscribe(&ssh, Self::on_ssh_event).detach();
cx.observe(&ssh, |_, _, cx| cx.notify()).detach();
@@ -874,6 +888,7 @@ impl Project {
toolchain_store: Some(toolchain_store),
};
+ // ssh -> local machine handlers
let ssh = ssh.read(cx);
ssh.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.buffer_store);
@@ -1014,8 +1029,16 @@ impl Project {
SettingsObserver::new_remote(worktree_store.clone(), task_store.clone(), cx)
})?;
- let git_state =
- Some(cx.new(|cx| GitState::new(&worktree_store, languages.clone(), cx))).transpose()?;
+ let git_state = Some(cx.new(|cx| {
+ GitState::new(
+ &worktree_store,
+ languages.clone(),
+ Some(client.clone().into()),
+ Some(ProjectId(remote_id)),
+ cx,
+ )
+ }))
+ .transpose()?;
let this = cx.new(|cx| {
let replica_id = response.payload.replica_id as ReplicaId;
@@ -3946,6 +3969,123 @@ impl Project {
Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
}
+ async fn handle_stage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Stage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state()
+ .context("missing git state")?
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .stage_entries(entries, err_sender)
+ .context("staging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during staging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
+
+ async fn handle_unstage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Unstage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state()
+ .context("missing git state")?
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .unstage_entries(entries, err_sender)
+ .context("unstaging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during unstaging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
+
+ async fn handle_commit(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Commit>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state()
+ .context("missing git state")?
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let commit_message = envelope.payload.message;
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .commit_with_message(commit_message, err_sender)
+ .context("unstaging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during unstaging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
+
fn respond_to_open_buffer_request(
this: Entity<Self>,
buffer: Entity<Buffer>,
@@ -308,6 +308,10 @@ message Envelope {
GetStagedTextResponse get_staged_text_response = 289;
RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290;
+
+ Stage stage = 293;
+ Unstage unstage = 294;
+ Commit commit = 295; // current max
}
reserved 87 to 88;
@@ -2633,3 +2637,24 @@ message RegisterBufferWithLanguageServers{
uint64 project_id = 1;
uint64 buffer_id = 2;
}
+
+message Stage {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ repeated string paths = 4;
+}
+
+message Unstage {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ repeated string paths = 4;
+}
+
+message Commit {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ uint64 work_directory_id = 3;
+ string message = 4;
+}
@@ -156,6 +156,7 @@ messages!(
(CancelCall, Foreground),
(ChannelMessageSent, Foreground),
(ChannelMessageUpdate, Foreground),
+ (Commit, Background),
(ComputeEmbeddings, Background),
(ComputeEmbeddingsResponse, Background),
(CopyProjectEntry, Foreground),
@@ -288,6 +289,7 @@ messages!(
(ShareProject, Foreground),
(ShareProjectResponse, Foreground),
(ShowContacts, Foreground),
+ (Stage, Background),
(StartLanguageServer, Foreground),
(SubscribeToChannels, Foreground),
(SynchronizeBuffers, Foreground),
@@ -297,6 +299,7 @@ messages!(
(Test, Foreground),
(Unfollow, Foreground),
(UnshareProject, Foreground),
+ (Unstage, Background),
(UpdateBuffer, Foreground),
(UpdateBufferFile, Foreground),
(UpdateChannelBuffer, Foreground),
@@ -387,6 +390,7 @@ request_messages!(
),
(Call, Ack),
(CancelCall, Ack),
+ (Commit, Ack),
(CopyProjectEntry, ProjectEntryResponse),
(ComputeEmbeddings, ComputeEmbeddingsResponse),
(CreateChannel, CreateChannelResponse),
@@ -463,6 +467,7 @@ request_messages!(
(RespondToChannelInvite, Ack),
(RespondToContactRequest, Ack),
(SaveBuffer, BufferSaved),
+ (Stage, Ack),
(FindSearchCandidates, FindSearchCandidatesResponse),
(SendChannelMessage, SendChannelMessageResponse),
(SetChannelMemberRole, Ack),
@@ -471,6 +476,7 @@ request_messages!(
(SynchronizeBuffers, SynchronizeBuffersResponse),
(TaskContextForLocation, TaskContext),
(Test, Test),
+ (Unstage, Ack),
(UpdateBuffer, Ack),
(UpdateParticipantLocation, Ack),
(UpdateProject, Ack),
@@ -516,6 +522,7 @@ entity_messages!(
BufferReloaded,
BufferSaved,
CloseBuffer,
+ Commit,
CopyProjectEntry,
CreateBufferForPeer,
CreateProjectEntry,
@@ -556,10 +563,12 @@ entity_messages!(
ResolveCompletionDocumentation,
ResolveInlayHint,
SaveBuffer,
+ Stage,
StartLanguageServer,
SynchronizeBuffers,
TaskContextForLocation,
UnshareProject,
+ Unstage,
UpdateBuffer,
UpdateBufferFile,
UpdateDiagnosticSummary,
@@ -1,18 +1,22 @@
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context as _, Result};
use extension::ExtensionHostProxy;
use extension_host::headless_host::HeadlessExtensionStore;
use fs::Fs;
+use futures::channel::mpsc;
+use git::repository::RepoPath;
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
use http_client::HttpClient;
use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
use node_runtime::NodeRuntime;
use project::{
buffer_store::{BufferStore, BufferStoreEvent},
+ git::GitState,
project_settings::SettingsObserver,
search::SearchQuery,
task_store::TaskStore,
worktree_store::WorktreeStore,
- LspStore, LspStoreEvent, PrettierStore, ProjectPath, ToolchainStore, WorktreeId,
+ LspStore, LspStoreEvent, PrettierStore, ProjectEntryId, ProjectPath, ToolchainStore,
+ WorktreeId,
};
use remote::ssh_session::ChannelClient;
use rpc::{
@@ -40,6 +44,7 @@ pub struct HeadlessProject {
pub next_entry_id: Arc<AtomicUsize>,
pub languages: Arc<LanguageRegistry>,
pub extensions: Entity<HeadlessExtensionStore>,
+ pub git_state: Entity<GitState>,
}
pub struct HeadlessAppState {
@@ -77,6 +82,10 @@ impl HeadlessProject {
store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
store
});
+
+ let git_state =
+ cx.new(|cx| GitState::new(&worktree_store, languages.clone(), None, None, cx));
+
let buffer_store = cx.new(|cx| {
let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
@@ -164,6 +173,7 @@ impl HeadlessProject {
let client: AnyProtoClient = session.clone().into();
+ // local_machine -> ssh handlers
session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
@@ -188,6 +198,10 @@ impl HeadlessProject {
client.add_model_request_handler(BufferStore::handle_update_buffer);
client.add_model_message_handler(BufferStore::handle_close_buffer);
+ client.add_model_request_handler(Self::handle_stage);
+ client.add_model_request_handler(Self::handle_unstage);
+ client.add_model_request_handler(Self::handle_commit);
+
client.add_request_handler(
extensions.clone().downgrade(),
HeadlessExtensionStore::handle_sync_extensions,
@@ -215,6 +229,7 @@ impl HeadlessProject {
next_entry_id: Default::default(),
languages,
extensions,
+ git_state,
}
}
@@ -602,6 +617,120 @@ impl HeadlessProject {
log::debug!("Received ping from client");
Ok(proto::Ack {})
}
+
+ async fn handle_stage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Stage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .stage_entries(entries, err_sender)
+ .context("staging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during staging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
+
+ async fn handle_unstage(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Unstage>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let entries = envelope
+ .payload
+ .paths
+ .into_iter()
+ .map(PathBuf::from)
+ .map(RepoPath::new)
+ .collect();
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .unstage_entries(entries, err_sender)
+ .context("unstaging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during unstaging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
+
+ async fn handle_commit(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::Commit>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
+ let repository_handle = this.update(&mut cx, |project, cx| {
+ let repository_handle = project
+ .git_state
+ .read(cx)
+ .all_repositories()
+ .into_iter()
+ .find(|repository_handle| {
+ repository_handle.worktree_id == worktree_id
+ && repository_handle.repository_entry.work_directory_id()
+ == work_directory_id
+ })
+ .context("missing repository handle")?;
+ anyhow::Ok(repository_handle)
+ })??;
+
+ let commit_message = envelope.payload.message;
+ let (err_sender, mut err_receiver) = mpsc::channel(1);
+ repository_handle
+ .commit_with_message(commit_message, err_sender)
+ .context("unstaging entries")?;
+ if let Some(error) = err_receiver.next().await {
+ Err(error.context("error during unstaging"))
+ } else {
+ Ok(proto::Ack {})
+ }
+ }
}
fn prompt_to_proto(
@@ -1530,11 +1530,6 @@ impl LocalWorktree {
self.settings.clone()
}
- pub fn local_git_repo(&self, path: &Path) -> Option<Arc<dyn GitRepository>> {
- self.local_repo_for_path(path)
- .map(|local_repo| local_repo.repo_ptr.clone())
- }
-
pub fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
self.git_repositories.get(&repo.work_directory_id)
}