From 7b6ac4fe388a483615add00a91b86632f56eea90 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Thu, 30 Oct 2025 08:04:19 +0100 Subject: [PATCH] git: Move PendingOperation into GitStore --- crates/git_ui/src/git_panel.rs | 50 ++++++---- crates/project/src/git_store.rs | 171 +++++++++++++++++--------------- 2 files changed, 125 insertions(+), 96 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 9ff8602a18fd1a7eec5804deecee5c21921c6eee..937627a2355a5889bd54dfd12a6a1dfa3d6a29af 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -279,6 +279,7 @@ enum TargetStatus { Unchanged, } +#[derive(Debug)] struct PendingOperation { finished: bool, target_status: TargetStatus, @@ -1240,19 +1241,21 @@ impl GitPanel { }; let (stage, repo_paths) = match entry { GitListEntry::Status(status_entry) => { - if status_entry.status.staging().is_fully_staged() { + let repo_paths = vec![status_entry.clone()]; + let stage = if let Some(status) = self.entry_staging(&status_entry) { + !status.is_fully_staged() + } else if status_entry.status.staging().is_fully_staged() { if let Some(op) = self.bulk_staging.clone() && op.anchor == status_entry.repo_path { self.bulk_staging = None; } - - (false, vec![status_entry.clone()]) + false } else { self.set_bulk_staging_anchor(status_entry.repo_path.clone(), cx); - - (true, vec![status_entry.clone()]) - } + true + }; + (stage, repo_paths) } GitListEntry::Header(section) => { let goal_staged_state = !self.header_state(section.header).selected(); @@ -2797,7 +2800,7 @@ impl GitPanel { && let Some(index) = bulk_staging_anchor_new_index && let Some(entry) = self.entries.get(index) && let Some(entry) = entry.status_entry() - && self.entry_staging(entry) == StageStatus::Staged + && self.entry_staging(entry).unwrap_or(entry.staging) == StageStatus::Staged { self.bulk_staging = bulk_staging; } @@ -2845,24 +2848,36 @@ impl GitPanel { self.entry_count += 1; if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) { self.conflicted_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.conflicted_staged_count += 1; } } else if status_entry.status.is_created() { self.new_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.new_staged_count += 1; } } else { self.tracked_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.tracked_staged_count += 1; } } } } - fn entry_staging(&self, entry: &GitStatusEntry) -> StageStatus { + fn entry_staging(&self, entry: &GitStatusEntry) -> Option { for pending in self.pending.iter().rev() { if pending .entries @@ -2870,14 +2885,14 @@ impl GitPanel { .any(|pending_entry| pending_entry.repo_path == entry.repo_path) { match pending.target_status { - TargetStatus::Staged => return StageStatus::Staged, - TargetStatus::Unstaged => return StageStatus::Unstaged, + TargetStatus::Staged => return Some(StageStatus::Staged), + TargetStatus::Unstaged => return Some(StageStatus::Unstaged), TargetStatus::Reverted => continue, TargetStatus::Unchanged => continue, } } } - entry.staging + None } pub(crate) fn has_staged_changes(&self) -> bool { @@ -3710,7 +3725,8 @@ impl GitPanel { let ix = self.entry_by_path(&repo_path, cx)?; let entry = self.entries.get(ix)?; - let entry_staging = self.entry_staging(entry.status_entry()?); + let status = entry.status_entry()?; + let entry_staging = self.entry_staging(status).unwrap_or(status.staging); let checkbox = Checkbox::new("stage-file", entry_staging.as_bool().into()) .disabled(!self.has_write_access(cx)) @@ -4004,8 +4020,8 @@ impl GitPanel { let checkbox_id: ElementId = ElementId::Name(format!("entry_{}_{}_checkbox", display_name, ix).into()); - let entry_staging = self.entry_staging(entry); - let mut is_staged: ToggleState = self.entry_staging(entry).as_bool().into(); + let entry_staging = self.entry_staging(entry).unwrap_or(entry.staging); + let mut is_staged: ToggleState = entry_staging.as_bool().into(); if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() { is_staged = ToggleState::Selected; } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 736c96f34e171c4fde83c2db032484456144ae5a..827f2eb51d1c239415aed9c2c3c90a90cef205f2 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -336,7 +336,7 @@ pub struct GitJob { key: Option, } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] enum GitJobKey { WriteIndex(RepoPath), ReloadBufferDiffBases, @@ -3716,20 +3716,15 @@ impl Repository { Some(self.git_store.upgrade()?.read(cx).buffer_store.clone()) } - pub fn stage_entries( + fn save_buffers<'a>( &self, - entries: Vec, + entries: impl IntoIterator, cx: &mut Context, - ) -> Task> { - if entries.is_empty() { - return Task::ready(Ok(())); - } - let id = self.id; - + ) -> Vec>> { let mut save_futures = Vec::new(); if let Some(buffer_store) = self.buffer_store(cx) { buffer_store.update(cx, |buffer_store, cx| { - for path in &entries { + for path in entries { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; @@ -3745,37 +3740,61 @@ impl Repository { } }) } + save_futures + } + + pub fn stage_entries( + &self, + entries: Vec, + cx: &mut Context, + ) -> Task> { + if entries.is_empty() { + return Task::ready(Ok(())); + } + + let id = self.id; + let save_tasks = self.save_buffers(&entries, cx); + let job_key = match entries.len() { + 1 => Some(GitJobKey::WriteIndex(entries[0].clone())), + _ => None, + }; + let paths: Vec<_> = entries.iter().map(|p| p.as_unix_str()).collect(); + let status = format!("git add {}", paths.join(" ")); cx.spawn(async move |this, cx| { - for save_future in save_futures { - save_future.await?; + for save_task in save_tasks { + save_task.await?; } this.update(cx, |this, _| { - this.send_job(None, move |git_repo, _cx| async move { - match git_repo { - RepositoryState::Local { - backend, - environment, - .. - } => backend.stage_paths(entries, environment.clone()).await, - RepositoryState::Remote { project_id, client } => { - client - .request(proto::Stage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending stage request")?; + this.send_keyed_job( + job_key, + Some(status.into()), + move |git_repo, _cx| async move { + match git_repo { + RepositoryState::Local { + backend, + environment, + .. + } => backend.stage_paths(entries, environment.clone()).await, + RepositoryState::Remote { project_id, client } => { + client + .request(proto::Stage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending stage request")?; - Ok(()) + Ok(()) + } } - } - }) + }, + ) })? .await??; @@ -3791,58 +3810,50 @@ impl Repository { if entries.is_empty() { return Task::ready(Ok(())); } - let id = self.id; - let mut save_futures = Vec::new(); - if let Some(buffer_store) = self.buffer_store(cx) { - buffer_store.update(cx, |buffer_store, cx| { - for path in &entries { - let Some(project_path) = self.repo_path_to_project_path(path, cx) else { - continue; - }; - if let Some(buffer) = buffer_store.get_by_path(&project_path) - && buffer - .read(cx) - .file() - .is_some_and(|file| file.disk_state().exists()) - && buffer.read(cx).has_unsaved_edits() - { - save_futures.push(buffer_store.save_buffer(buffer, cx)); - } - } - }) - } + let id = self.id; + let save_tasks = self.save_buffers(&entries, cx); + let job_key = match entries.len() { + 1 => Some(GitJobKey::WriteIndex(entries[0].clone())), + _ => None, + }; + let paths: Vec<_> = entries.iter().map(|p| p.as_unix_str()).collect(); + let status = format!("git reset {}", paths.join(" ")); cx.spawn(async move |this, cx| { - for save_future in save_futures { - save_future.await?; + for save_task in save_tasks { + save_task.await?; } this.update(cx, |this, _| { - this.send_job(None, move |git_repo, _cx| async move { - match git_repo { - RepositoryState::Local { - backend, - environment, - .. - } => backend.unstage_paths(entries, environment).await, - RepositoryState::Remote { project_id, client } => { - client - .request(proto::Unstage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending unstage request")?; + this.send_keyed_job( + job_key, + Some(status.into()), + move |git_repo, _cx| async move { + match git_repo { + RepositoryState::Local { + backend, + environment, + .. + } => backend.unstage_paths(entries, environment).await, + RepositoryState::Remote { project_id, client } => { + client + .request(proto::Unstage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending unstage request")?; - Ok(()) + Ok(()) + } } - } - }) + }, + ) })? .await??; @@ -3856,6 +3867,7 @@ impl Repository { .filter(|entry| !entry.status.staging().is_fully_staged()) .map(|entry| entry.repo_path) .collect(); + dbg!(&to_stage); self.stage_entries(to_stage, cx) } @@ -3865,6 +3877,7 @@ impl Repository { .filter(|entry| entry.status.staging().has_staged()) .map(|entry| entry.repo_path) .collect(); + dbg!(&to_unstage); self.unstage_entries(to_unstage, cx) } @@ -4830,7 +4843,7 @@ impl Repository { { continue; } - (job.job)(state.clone(), cx).await; + (job.job)(state.clone(), cx).await; // We will block here... } else if let Some(job) = job_rx.next().await { jobs.push_back(job); } else {