Detailed changes
@@ -6482,6 +6482,7 @@ dependencies = [
"smol",
"tempfile",
"text",
+ "thiserror 2.0.17",
"time",
"trash",
"util",
@@ -13349,6 +13350,8 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
+ "fs",
+ "futures 0.3.32",
"git",
"git_ui",
"gpui",
@@ -571,7 +571,7 @@ encoding_rs = "0.8"
exec = "0.3.1"
fancy-regex = "0.17.0"
fork = "0.4.0"
-futures = "0.3"
+futures = "0.3.32"
futures-concurrency = "7.7.1"
futures-lite = "1.13"
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" }
@@ -936,6 +936,8 @@
"alt-ctrl-shift-c": "workspace::CopyRelativePath",
"undo": "project_panel::Undo",
"ctrl-z": "project_panel::Undo",
+ "redo": "project_panel::Redo",
+ "ctrl-shift-z": "project_panel::Redo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -991,6 +991,7 @@
"cmd-alt-c": "workspace::CopyPath",
"alt-cmd-shift-c": "workspace::CopyRelativePath",
"cmd-z": "project_panel::Undo",
+ "cmd-shift-z": "project_panel::Redo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -929,6 +929,7 @@
"shift-alt-c": "project_panel::CopyPath",
"ctrl-k ctrl-shift-c": "workspace::CopyRelativePath",
"ctrl-z": "project_panel::Undo",
+ "ctrl-shift-z": "project_panel::Redo",
"enter": "project_panel::Rename",
"f2": "project_panel::Rename",
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
@@ -777,7 +777,7 @@ impl ActionLog {
initial_version == current_version && current_content == tracked_content;
if is_ai_only_content {
- buffer
+ let task = buffer
.read(cx)
.entry_id(cx)
.and_then(|entry_id| {
@@ -785,7 +785,12 @@ impl ActionLog {
project.delete_entry(entry_id, false, cx)
})
})
- .unwrap_or(Task::ready(Ok(())))
+ .unwrap_or_else(|| Task::ready(Ok(None)));
+
+ cx.background_spawn(async move {
+ task.await?;
+ Ok(())
+ })
} else {
// Not sure how to disentangle edits made by the user
// from edits made by the AI at this point.
@@ -63,6 +63,6 @@ impl FeatureFlag for ProjectPanelUndoRedoFeatureFlag {
const NAME: &'static str = "project-panel-undo-redo";
fn enabled_for_staff() -> bool {
- false
+ true
}
}
@@ -32,6 +32,7 @@ parking_lot.workspace = true
paths.workspace = true
rope.workspace = true
proto.workspace = true
+thiserror.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true
@@ -115,21 +115,16 @@ pub trait Fs: Send + Sync {
/// system trash.
async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>;
- /// Moves a directory to the system trash.
+ /// Moves a file or directory to the system trash.
/// Returns a [`TrashedEntry`] that can be used to keep track of the
- /// location of the trashed directory in the system's trash.
- async fn trash_dir(&self, path: &Path) -> Result<TrashedEntry>;
+ /// location of the trashed item in the system's trash.
+ async fn trash(&self, path: &Path, options: RemoveOptions) -> Result<TrashedEntry>;
/// Removes a file from the filesystem.
/// There is no expectation that the file will be preserved in the system
/// trash.
async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>;
- /// Moves a file to the system trash.
- /// Returns a [`TrashedEntry`] that can be used to keep track of the
- /// location of the trashed file in the system's trash.
- async fn trash_file(&self, path: &Path) -> Result<TrashedEntry>;
-
async fn open_handle(&self, path: &Path) -> Result<Arc<dyn FileHandle>>;
async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>>;
async fn load(&self, path: &Path) -> Result<String> {
@@ -175,7 +170,7 @@ pub trait Fs: Send + Sync {
async fn restore(
&self,
trashed_entry: TrashedEntry,
- ) -> std::result::Result<(), TrashRestoreError>;
+ ) -> std::result::Result<PathBuf, TrashRestoreError>;
#[cfg(feature = "test-support")]
fn as_fake(&self) -> Arc<FakeFs> {
@@ -188,7 +183,7 @@ pub trait Fs: Send + Sync {
// tests from changes to that crate's API surface.
/// Represents a file or directory that has been moved to the system trash,
/// retaining enough information to restore it to its original location.
-#[derive(Clone, PartialEq)]
+#[derive(Clone, PartialEq, Debug)]
pub struct TrashedEntry {
/// Platform-specific identifier for the file/directory in the trash.
///
@@ -196,9 +191,9 @@ pub struct TrashedEntry {
/// * macOS & Windows โ Full path to the file/directory in the system's
/// trash.
pub id: OsString,
- /// Original name of the file/directory before it was moved to the trash.
+ /// Name of the file/directory at the time of trashing, including extension.
pub name: OsString,
- /// Original parent directory.
+ /// Absolute path to the parent directory at the time of trashing.
pub original_parent: PathBuf,
}
@@ -225,13 +220,13 @@ impl TrashedEntry {
}
}
-#[derive(Debug)]
+#[derive(Debug, thiserror::Error)]
pub enum TrashRestoreError {
- /// The specified `path` was not found in the system's trash.
+ #[error("The specified `path` ({}) was not found in the system's trash.", path.display())]
NotFound { path: PathBuf },
- /// A file or directory already exists at the restore destination.
+ #[error("File or directory ({}) already exists at the restore destination.", path.display())]
Collision { path: PathBuf },
- /// Any other platform-specific error.
+ #[error("Unknown error ({description})")]
Unknown { description: String },
}
@@ -801,12 +796,26 @@ impl Fs for RealFs {
}
}
- async fn trash_file(&self, path: &Path) -> Result<TrashedEntry> {
- Ok(trash::delete_with_info(path)?.into())
- }
+ async fn trash(&self, path: &Path, _options: RemoveOptions) -> Result<TrashedEntry> {
+ // We must make the path absolute or trash will make a weird abomination
+ // of the zed working directory (not usually the worktree) and whatever
+ // the path variable holds.
+ let path = self
+ .canonicalize(path)
+ .await
+ .context("Could not canonicalize the path of the file")?;
+
+ let (tx, rx) = futures::channel::oneshot::channel();
+ std::thread::Builder::new()
+ .name("trash file or dir".to_string())
+ .spawn(|| tx.send(trash::delete_with_info(path)))
+ .expect("The os can spawn threads");
- async fn trash_dir(&self, path: &Path) -> Result<TrashedEntry> {
- self.trash_file(path).await
+ Ok(rx
+ .await
+ .context("Tx dropped or fs.restore panicked")?
+ .context("Could not trash file or dir")?
+ .into())
}
async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>> {
@@ -1258,8 +1267,19 @@ impl Fs for RealFs {
async fn restore(
&self,
trashed_entry: TrashedEntry,
- ) -> std::result::Result<(), TrashRestoreError> {
- trash::restore_all([trashed_entry.into_trash_item()]).map_err(Into::into)
+ ) -> std::result::Result<PathBuf, TrashRestoreError> {
+ let restored_item_path = trashed_entry.original_parent.join(&trashed_entry.name);
+
+ let (tx, rx) = futures::channel::oneshot::channel();
+ std::thread::Builder::new()
+ .name("restore trashed item".to_string())
+ .spawn(move || {
+ let res = trash::restore_all([trashed_entry.into_trash_item()]);
+ tx.send(res)
+ })
+ .expect("The OS can spawn a threads");
+ rx.await.expect("Restore all never panics")?;
+ Ok(restored_item_path)
}
}
@@ -2794,16 +2814,17 @@ impl Fs for FakeFs {
self.remove_dir_inner(path, options).await.map(|_| ())
}
- async fn trash_dir(&self, path: &Path) -> Result<TrashedEntry> {
+ async fn trash(&self, path: &Path, options: RemoveOptions) -> Result<TrashedEntry> {
let normalized_path = normalize_path(path);
let parent_path = normalized_path.parent().context("cannot remove the root")?;
let base_name = normalized_path.file_name().unwrap();
- let options = RemoveOptions {
- recursive: true,
- ..Default::default()
+ let result = if self.is_dir(path).await {
+ self.remove_dir_inner(path, options).await?
+ } else {
+ self.remove_file_inner(path, options).await?
};
- match self.remove_dir_inner(path, options).await? {
+ match result {
Some(fake_entry) => {
let trashed_entry = TrashedEntry {
id: base_name.to_str().unwrap().into(),
@@ -2823,27 +2844,6 @@ impl Fs for FakeFs {
self.remove_file_inner(path, options).await.map(|_| ())
}
- async fn trash_file(&self, path: &Path) -> Result<TrashedEntry> {
- let normalized_path = normalize_path(path);
- let parent_path = normalized_path.parent().context("cannot remove the root")?;
- let base_name = normalized_path.file_name().unwrap();
-
- match self.remove_file_inner(path, Default::default()).await? {
- Some(fake_entry) => {
- let trashed_entry = TrashedEntry {
- id: base_name.to_str().unwrap().into(),
- name: base_name.to_str().unwrap().into(),
- original_parent: parent_path.to_path_buf(),
- };
-
- let mut state = self.state.lock();
- state.trash.push((trashed_entry.clone(), fake_entry));
- Ok(trashed_entry)
- }
- None => anyhow::bail!("{normalized_path:?} does not exist"),
- }
- }
-
async fn open_sync(&self, path: &Path) -> Result<Box<dyn io::Read + Send + Sync>> {
let bytes = self.load_internal(path).await?;
Ok(Box::new(io::Cursor::new(bytes)))
@@ -3092,10 +3092,7 @@ impl Fs for FakeFs {
receiver
}
- async fn restore(
- &self,
- trashed_entry: TrashedEntry,
- ) -> std::result::Result<(), TrashRestoreError> {
+ async fn restore(&self, trashed_entry: TrashedEntry) -> Result<PathBuf, TrashRestoreError> {
let mut state = self.state.lock();
let Some((trashed_entry, fake_entry)) = state
@@ -3126,7 +3123,8 @@ impl Fs for FakeFs {
match result {
Ok(_) => {
state.trash.retain(|(entry, _)| *entry != trashed_entry);
- Ok(())
+ state.emit_event([(path.clone(), Some(PathEventKind::Created))]);
+ Ok(path)
}
Err(_) => {
// For now we'll just assume that this failed because it was a
@@ -628,64 +628,66 @@ async fn test_realfs_symlink_loop_metadata(executor: BackgroundExecutor) {
}
#[gpui::test]
-async fn test_fake_fs_trash_file(executor: BackgroundExecutor) {
+async fn test_fake_fs_trash(executor: BackgroundExecutor) {
let fs = FakeFs::new(executor.clone());
fs.insert_tree(
path!("/root"),
json!({
+ "src": {
+ "file_c.txt": "File C",
+ "file_d.txt": "File D"
+ },
"file_a.txt": "File A",
"file_b.txt": "File B",
}),
)
.await;
+ // Trashing a file.
let root_path = PathBuf::from(path!("/root"));
let path = path!("/root/file_a.txt").as_ref();
let trashed_entry = fs
- .trash_file(path)
+ .trash(path, Default::default())
.await
.expect("should be able to trash {path:?}");
assert_eq!(trashed_entry.name, "file_a.txt");
assert_eq!(trashed_entry.original_parent, root_path);
- assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]);
+ assert_eq!(
+ fs.files(),
+ vec![
+ PathBuf::from(path!("/root/file_b.txt")),
+ PathBuf::from(path!("/root/src/file_c.txt")),
+ PathBuf::from(path!("/root/src/file_d.txt"))
+ ]
+ );
let trash_entries = fs.trash_entries();
assert_eq!(trash_entries.len(), 1);
assert_eq!(trash_entries[0].name, "file_a.txt");
assert_eq!(trash_entries[0].original_parent, root_path);
-}
-#[gpui::test]
-async fn test_fake_fs_trash_dir(executor: BackgroundExecutor) {
- let fs = FakeFs::new(executor.clone());
- fs.insert_tree(
- path!("/root"),
- json!({
- "src": {
- "file_a.txt": "File A",
- "file_b.txt": "File B",
- },
- "file_c.txt": "File C",
- }),
- )
- .await;
-
- let root_path = PathBuf::from(path!("/root"));
+ // Trashing a directory.
let path = path!("/root/src").as_ref();
let trashed_entry = fs
- .trash_dir(path)
+ .trash(
+ path,
+ RemoveOptions {
+ recursive: true,
+ ..Default::default()
+ },
+ )
.await
.expect("should be able to trash {path:?}");
assert_eq!(trashed_entry.name, "src");
assert_eq!(trashed_entry.original_parent, root_path);
- assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
+ assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_b.txt"))]);
let trash_entries = fs.trash_entries();
- assert_eq!(trash_entries.len(), 1);
- assert_eq!(trash_entries[0].name, "src");
- assert_eq!(trash_entries[0].original_parent, root_path);
+ assert_eq!(trash_entries.len(), 2);
+ assert_eq!(trash_entries[1].name, "src");
+ assert_eq!(trash_entries[1].original_parent, root_path);
}
#[gpui::test]
@@ -704,8 +706,8 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) {
.await;
// Providing a non-existent `TrashedEntry` should result in an error.
- let id: OsString = "/trash/file_c.txt".into();
- let name: OsString = "file_c.txt".into();
+ let id = OsString::from("/trash/file_c.txt");
+ let name = OsString::from("file_c.txt");
let original_parent = PathBuf::from(path!("/root"));
let trashed_entry = TrashedEntry {
id,
@@ -719,7 +721,7 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) {
// it as part of its list of files, restore it and verify that the list of
// files and trash has been updated accordingly.
let path = path!("/root/src/file_a.txt").as_ref();
- let trashed_entry = fs.trash_file(path).await.unwrap();
+ let trashed_entry = fs.trash(path, Default::default()).await.unwrap();
assert_eq!(fs.trash_entries().len(), 1);
assert_eq!(
@@ -745,8 +747,12 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) {
// Deleting and restoring a directory should also remove all of its files
// but create a single trashed entry, which should be removed after
// restoration.
+ let options = RemoveOptions {
+ recursive: true,
+ ..Default::default()
+ };
let path = path!("/root/src/").as_ref();
- let trashed_entry = fs.trash_dir(path).await.unwrap();
+ let trashed_entry = fs.trash(path, options).await.unwrap();
assert_eq!(fs.trash_entries().len(), 1);
assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
@@ -766,7 +772,7 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) {
// A collision error should be returned in case a file is being restored to
// a path where a file already exists.
let path = path!("/root/src/file_a.txt").as_ref();
- let trashed_entry = fs.trash_file(path).await.unwrap();
+ let trashed_entry = fs.trash(path, Default::default()).await.unwrap();
assert_eq!(fs.trash_entries().len(), 1);
assert_eq!(
@@ -798,8 +804,12 @@ async fn test_fake_fs_restore(executor: BackgroundExecutor) {
// A collision error should be returned in case a directory is being
// restored to a path where a directory already exists.
+ let options = RemoveOptions {
+ recursive: true,
+ ..Default::default()
+ };
let path = path!("/root/src/").as_ref();
- let trashed_entry = fs.trash_dir(path).await.unwrap();
+ let trashed_entry = fs.trash(path, options).await.unwrap();
assert_eq!(fs.trash_entries().len(), 2);
assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/file_c.txt"))]);
@@ -2566,7 +2566,7 @@ impl Project {
path: ProjectPath,
trash: bool,
cx: &mut Context<Self>,
- ) -> Option<Task<Result<()>>> {
+ ) -> Option<Task<Result<Option<TrashedEntry>>>> {
let entry = self.entry_for_path(&path, cx)?;
self.delete_entry(entry.id, trash, cx)
}
@@ -2577,7 +2577,7 @@ impl Project {
entry_id: ProjectEntryId,
trash: bool,
cx: &mut Context<Self>,
- ) -> Option<Task<Result<()>>> {
+ ) -> Option<Task<Result<Option<TrashedEntry>>>> {
let worktree = self.worktree_for_entry(entry_id, cx)?;
cx.emit(Event::DeletedEntry(worktree.read(cx).id(), entry_id));
worktree.update(cx, |worktree, cx| {
@@ -2585,6 +2585,27 @@ impl Project {
})
}
+ #[inline]
+ pub fn restore_entry(
+ &self,
+ worktree_id: WorktreeId,
+ trash_entry: TrashedEntry,
+ cx: &mut Context<'_, Self>,
+ ) -> Task<Result<ProjectPath>> {
+ let Some(worktree) = self.worktree_for_id(worktree_id, cx) else {
+ return Task::ready(Err(anyhow!("No worktree for id {worktree_id:?}")));
+ };
+
+ cx.spawn(async move |_, cx| {
+ Worktree::restore_entry(trash_entry, worktree, cx)
+ .await
+ .map(|rel_path_buf| ProjectPath {
+ worktree_id: worktree_id,
+ path: Arc::from(rel_path_buf.as_rel_path()),
+ })
+ })
+ }
+
#[inline]
pub fn expand_entry(
&mut self,
@@ -22,6 +22,7 @@ collections.workspace = true
command_palette_hooks.workspace = true
editor.workspace = true
file_icons.workspace = true
+futures.workspace = true
git_ui.workspace = true
git.workspace = true
gpui.workspace = true
@@ -48,6 +49,7 @@ zed_actions.workspace = true
telemetry.workspace = true
notifications.workspace = true
feature_flags.workspace = true
+fs.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
@@ -47,16 +47,16 @@ use settings::{
update_settings_file,
};
use smallvec::SmallVec;
-use std::ops::Neg;
-use std::{any::TypeId, time::Instant};
use std::{
+ any::TypeId,
cell::OnceCell,
cmp,
collections::HashSet,
+ ops::Neg,
ops::Range,
path::{Path, PathBuf},
sync::Arc,
- time::Duration,
+ time::{Duration, Instant},
};
use theme_settings::ThemeSettings;
use ui::{
@@ -84,7 +84,7 @@ use zed_actions::{
use crate::{
project_panel_settings::ProjectPanelScrollbarProxy,
- undo::{ProjectPanelOperation, UndoManager},
+ undo::{Change, UndoManager},
};
const PROJECT_PANEL_KEY: &str = "ProjectPanel";
@@ -401,6 +401,8 @@ actions!(
CompareMarkedFiles,
/// Undoes the last file operation.
Undo,
+ /// Redoes the last undone file operation.
+ Redo,
]
);
@@ -861,6 +863,7 @@ impl ProjectPanel {
.detach();
let scroll_handle = UniformListScrollHandle::new();
+ let weak_project_panel = cx.weak_entity();
let mut this = Self {
project: project.clone(),
hover_scroll_task: None,
@@ -896,7 +899,7 @@ impl ProjectPanel {
unfolded_dir_ids: Default::default(),
},
update_visible_entries_task: Default::default(),
- undo_manager: UndoManager::new(workspace.weak_handle()),
+ undo_manager: UndoManager::new(workspace.weak_handle(), weak_project_panel, &cx),
};
this.update_visible_entries(None, false, false, window, cx);
@@ -1176,6 +1179,11 @@ impl ProjectPanel {
"Undo",
Box::new(Undo),
)
+ .action_disabled_when(
+ !self.undo_manager.can_redo(),
+ "Redo",
+ Box::new(Redo),
+ )
})
.when(is_remote, |menu| {
menu.separator()
@@ -1874,16 +1882,12 @@ impl ProjectPanel {
// Record the operation if the edit was applied
if new_entry.is_ok() {
let operation = if let Some(old_entry) = edited_entry {
- ProjectPanelOperation::Rename {
- old_path: (worktree_id, old_entry.path).into(),
- new_path: new_project_path,
- }
+ Change::Renamed((worktree_id, old_entry.path).into(), new_project_path)
} else {
- ProjectPanelOperation::Create {
- project_path: new_project_path,
- }
+ Change::Created(new_project_path)
};
- project_panel.undo_manager.record(operation);
+
+ project_panel.undo_manager.record([operation]).log_err();
}
cx.notify();
@@ -2136,9 +2140,12 @@ impl ProjectPanel {
}
}
- pub fn undo(&mut self, _: &Undo, _window: &mut Window, cx: &mut Context<Self>) {
- self.undo_manager.undo(cx);
- cx.notify();
+ pub fn undo(&mut self, _: &Undo, _window: &mut Window, _cx: &mut Context<Self>) {
+ self.undo_manager.undo().log_err();
+ }
+
+ pub fn redo(&mut self, _: &Redo, _window: &mut Window, _cx: &mut Context<Self>) {
+ self.undo_manager.redo().log_err();
}
fn rename_impl(
@@ -2331,6 +2338,7 @@ impl ProjectPanel {
Some((
selection.entry_id,
+ selection.worktree_id,
project_path.path.file_name()?.to_string(),
))
})
@@ -2346,7 +2354,7 @@ impl ProjectPanel {
"Are you sure you want to permanently delete"
};
let prompt = match file_paths.first() {
- Some((_, path)) if file_paths.len() == 1 => {
+ Some((_, _, path)) if file_paths.len() == 1 => {
let unsaved_warning = if dirty_buffers > 0 {
"\n\nIt has unsaved changes, which will be lost."
} else {
@@ -2361,7 +2369,7 @@ impl ProjectPanel {
let truncated_path_counts = file_paths.len() - CUTOFF_POINT;
let mut paths = file_paths
.iter()
- .map(|(_, path)| path.clone())
+ .map(|(_, _, path)| path.clone())
.take(CUTOFF_POINT)
.collect::<Vec<_>>();
paths.truncate(CUTOFF_POINT);
@@ -2372,7 +2380,7 @@ impl ProjectPanel {
}
paths
} else {
- file_paths.iter().map(|(_, path)| path.clone()).collect()
+ file_paths.iter().map(|(_, _, path)| path.clone()).collect()
};
let unsaved_warning = if dirty_buffers == 0 {
String::new()
@@ -2409,8 +2417,11 @@ impl ProjectPanel {
{
return anyhow::Ok(());
}
- for (entry_id, _) in file_paths {
- panel
+
+ let mut changes = Vec::new();
+
+ for (entry_id, worktree_id, _) in file_paths {
+ let trashed_entry = panel
.update(cx, |panel, cx| {
panel
.project
@@ -2418,8 +2429,19 @@ impl ProjectPanel {
.context("no such entry")
})??
.await?;
+
+ // Keep track of trashed change so that we can then record
+ // all of the changes at once, such that undoing and redoing
+ // restores or trashes all files in batch.
+ if trash && let Some(trashed_entry) = trashed_entry {
+ changes.push(Change::Trashed(worktree_id, trashed_entry));
+ }
}
panel.update_in(cx, |panel, window, cx| {
+ if trash {
+ panel.undo_manager.record(changes).log_err();
+ }
+
if let Some(next_selection) = next_selection {
panel.update_visible_entries(
Some((next_selection.worktree_id, next_selection.entry_id)),
@@ -3071,8 +3093,8 @@ impl ProjectPanel {
enum PasteTask {
Rename {
task: Task<Result<CreatedEntry>>,
- old_path: ProjectPath,
- new_path: ProjectPath,
+ from: ProjectPath,
+ to: ProjectPath,
},
Copy {
task: Task<Result<Option<Entry>>>,
@@ -3089,14 +3111,14 @@ impl ProjectPanel {
let clip_entry_id = clipboard_entry.entry_id;
let destination: ProjectPath = (worktree_id, new_path).into();
let task = if clipboard_entries.is_cut() {
- let old_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
+ let original_path = self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
let task = self.project.update(cx, |project, cx| {
project.rename_entry(clip_entry_id, destination.clone(), cx)
});
PasteTask::Rename {
task,
- old_path,
- new_path: destination,
+ from: original_path,
+ to: destination,
}
} else {
let task = self.project.update(cx, |project, cx| {
@@ -3113,21 +3135,16 @@ impl ProjectPanel {
cx.spawn_in(window, async move |project_panel, mut cx| {
let mut last_succeed = None;
- let mut operations = Vec::new();
+ let mut changes = Vec::new();
for task in paste_tasks {
match task {
- PasteTask::Rename {
- task,
- old_path,
- new_path,
- } => {
+ PasteTask::Rename { task, from, to } => {
if let Some(CreatedEntry::Included(entry)) = task
.await
.notify_workspace_async_err(workspace.clone(), &mut cx)
{
- operations
- .push(ProjectPanelOperation::Rename { old_path, new_path });
+ changes.push(Change::Renamed(from, to));
last_succeed = Some(entry);
}
}
@@ -3136,9 +3153,7 @@ impl ProjectPanel {
.await
.notify_workspace_async_err(workspace.clone(), &mut cx)
{
- operations.push(ProjectPanelOperation::Create {
- project_path: destination,
- });
+ changes.push(Change::Created(destination));
last_succeed = Some(entry);
}
}
@@ -3147,7 +3162,7 @@ impl ProjectPanel {
project_panel
.update(cx, |this, _| {
- this.undo_manager.record_batch(operations);
+ this.undo_manager.record(changes).log_err();
})
.ok();
@@ -4371,6 +4386,20 @@ impl ProjectPanel {
this.marked_entries.clear();
this.update_visible_entries(new_selection, false, false, window, cx);
}
+
+ let changes: Vec<Change> = opened_entries
+ .iter()
+ .filter_map(|entry_id| {
+ worktree.read(cx).entry_for_id(*entry_id).map(|entry| {
+ Change::Created(ProjectPath {
+ worktree_id,
+ path: entry.path.clone(),
+ })
+ })
+ })
+ .collect();
+
+ this.undo_manager.record(changes).log_err();
})
}
.log_err()
@@ -4449,33 +4478,30 @@ impl ProjectPanel {
cx.spawn_in(window, async move |project_panel, cx| {
let mut last_succeed = None;
- let mut operations = Vec::new();
+ let mut changes = Vec::new();
for task in copy_tasks.into_iter() {
if let Some(Some(entry)) = task.await.log_err() {
last_succeed = Some(entry.id);
- operations.push(ProjectPanelOperation::Create {
- project_path: (worktree_id, entry.path).into(),
- });
+ changes.push(Change::Created((worktree_id, entry.path).into()));
}
}
// update selection
if let Some(entry_id) = last_succeed {
- project_panel
- .update_in(cx, |project_panel, window, cx| {
- project_panel.selection = Some(SelectedEntry {
- worktree_id,
- entry_id,
- });
-
- project_panel.undo_manager.record_batch(operations);
+ project_panel.update_in(cx, |project_panel, window, cx| {
+ project_panel.selection = Some(SelectedEntry {
+ worktree_id,
+ entry_id,
+ });
+ // if only one entry was dragged and it was disambiguated, open the rename editor
+ if item_count == 1 && disambiguation_range.is_some() {
+ project_panel.rename_impl(disambiguation_range, window, cx);
+ }
- // if only one entry was dragged and it was disambiguated, open the rename editor
- if item_count == 1 && disambiguation_range.is_some() {
- project_panel.rename_impl(disambiguation_range, window, cx);
- }
- })
- .ok();
+ project_panel.undo_manager.record(changes)
+ })??;
}
+
+ std::result::Result::Ok::<(), anyhow::Error>(())
})
.detach();
Some(())
@@ -4551,7 +4577,7 @@ impl ProjectPanel {
let workspace = self.workspace.clone();
if folded_selection_info.is_empty() {
cx.spawn_in(window, async move |project_panel, mut cx| {
- let mut operations = Vec::new();
+ let mut changes = Vec::new();
for (entry_id, task) in move_tasks {
if let Some(CreatedEntry::Included(new_entry)) = task
.await
@@ -4560,16 +4586,16 @@ impl ProjectPanel {
if let (Some(old_path), Some(worktree_id)) =
(old_paths.get(&entry_id), destination_worktree_id)
{
- operations.push(ProjectPanelOperation::Rename {
- old_path: old_path.clone(),
- new_path: (worktree_id, new_entry.path).into(),
- });
+ changes.push(Change::Renamed(
+ old_path.clone(),
+ (worktree_id, new_entry.path).into(),
+ ));
}
}
}
project_panel
.update(cx, |this, _| {
- this.undo_manager.record_batch(operations);
+ this.undo_manager.record(changes).log_err();
})
.ok();
})
@@ -4587,10 +4613,10 @@ impl ProjectPanel {
if let (Some(old_path), Some(worktree_id)) =
(old_paths.get(&entry_id), destination_worktree_id)
{
- operations.push(ProjectPanelOperation::Rename {
- old_path: old_path.clone(),
- new_path: (worktree_id, new_entry.path.clone()).into(),
- });
+ operations.push(Change::Renamed(
+ old_path.clone(),
+ (worktree_id, new_entry.path.clone()).into(),
+ ));
}
move_results.push((entry_id, new_entry));
}
@@ -4602,7 +4628,7 @@ impl ProjectPanel {
project_panel
.update(cx, |this, _| {
- this.undo_manager.record_batch(operations);
+ this.undo_manager.record(operations).log_err();
})
.ok();
@@ -6640,6 +6666,7 @@ impl Render for ProjectPanel {
.on_action(cx.listener(Self::compare_marked_files))
.when(cx.has_flag::<ProjectPanelUndoRedoFeatureFlag>(), |el| {
el.on_action(cx.listener(Self::undo))
+ .on_action(cx.listener(Self::redo))
})
.when(!project.is_read_only(cx), |el| {
el.on_action(cx.listener(Self::new_file))
@@ -7333,3 +7360,4 @@ fn git_status_indicator(git_status: GitSummary) -> Option<(&'static str, Color)>
#[cfg(test)]
mod project_panel_tests;
+mod tests;
@@ -1,4 +1,5 @@
use super::*;
+// use crate::undo::tests::{build_create_operation, build_rename_operation};
use collections::HashSet;
use editor::MultiBufferOffset;
use gpui::{Empty, Entity, TestAppContext, VisualTestContext};
@@ -1994,555 +1995,6 @@ async fn test_copy_paste_nested_and_root_entries(cx: &mut gpui::TestAppContext)
);
}
-#[gpui::test]
-async fn test_undo_rename(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "a.txt": "",
- "b.txt": "",
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- select_path(&panel, "root/a.txt", cx);
- panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
- cx.run_until_parked();
-
- let confirm = panel.update_in(cx, |panel, window, cx| {
- panel
- .filename_editor
- .update(cx, |editor, cx| editor.set_text("renamed.txt", window, cx));
- panel.confirm_edit(true, window, cx).unwrap()
- });
- confirm.await.unwrap();
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/renamed.txt", cx).is_some(),
- "File should be renamed to renamed.txt"
- );
- assert_eq!(
- find_project_entry(&panel, "root/a.txt", cx),
- None,
- "Original file should no longer exist"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/a.txt", cx).is_some(),
- "File should be restored to original name after undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/renamed.txt", cx),
- None,
- "Renamed file should no longer exist after undo"
- );
-}
-
-#[gpui::test]
-async fn test_undo_create_file(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "existing.txt": "",
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- select_path(&panel, "root", cx);
- panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
- cx.run_until_parked();
-
- let confirm = panel.update_in(cx, |panel, window, cx| {
- panel
- .filename_editor
- .update(cx, |editor, cx| editor.set_text("new.txt", window, cx));
- panel.confirm_edit(true, window, cx).unwrap()
- });
- confirm.await.unwrap();
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/new.txt", cx).is_some(),
- "New file should exist"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert_eq!(
- find_project_entry(&panel, "root/new.txt", cx),
- None,
- "New file should be removed after undo"
- );
- assert!(
- find_project_entry(&panel, "root/existing.txt", cx).is_some(),
- "Existing file should still be present"
- );
-}
-
-#[gpui::test]
-async fn test_undo_create_directory(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "existing.txt": "",
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- select_path(&panel, "root", cx);
- panel.update_in(cx, |panel, window, cx| {
- panel.new_directory(&NewDirectory, window, cx)
- });
- cx.run_until_parked();
-
- let confirm = panel.update_in(cx, |panel, window, cx| {
- panel
- .filename_editor
- .update(cx, |editor, cx| editor.set_text("new_dir", window, cx));
- panel.confirm_edit(true, window, cx).unwrap()
- });
- confirm.await.unwrap();
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/new_dir", cx).is_some(),
- "New directory should exist"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert_eq!(
- find_project_entry(&panel, "root/new_dir", cx),
- None,
- "New directory should be removed after undo"
- );
-}
-
-#[gpui::test]
-async fn test_undo_cut_paste(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "src": {
- "file.txt": "content",
- },
- "dst": {},
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- toggle_expand_dir(&panel, "root/src", cx);
-
- select_path_with_mark(&panel, "root/src/file.txt", cx);
- panel.update_in(cx, |panel, window, cx| {
- panel.cut(&Default::default(), window, cx);
- });
-
- select_path(&panel, "root/dst", cx);
- panel.update_in(cx, |panel, window, cx| {
- panel.paste(&Default::default(), window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/dst/file.txt", cx).is_some(),
- "File should be moved to dst"
- );
- assert_eq!(
- find_project_entry(&panel, "root/src/file.txt", cx),
- None,
- "File should no longer be in src"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/src/file.txt", cx).is_some(),
- "File should be back in src after undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/dst/file.txt", cx),
- None,
- "File should no longer be in dst after undo"
- );
-}
-
-#[gpui::test]
-async fn test_undo_drag_single_entry(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "src": {
- "main.rs": "",
- },
- "dst": {},
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- toggle_expand_dir(&panel, "root/src", cx);
-
- panel.update(cx, |panel, _| panel.marked_entries.clear());
- select_path_with_mark(&panel, "root/src/main.rs", cx);
- drag_selection_to(&panel, "root/dst", false, cx);
-
- assert!(
- find_project_entry(&panel, "root/dst/main.rs", cx).is_some(),
- "File should be in dst after drag"
- );
- assert_eq!(
- find_project_entry(&panel, "root/src/main.rs", cx),
- None,
- "File should no longer be in src after drag"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/src/main.rs", cx).is_some(),
- "File should be back in src after undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/dst/main.rs", cx),
- None,
- "File should no longer be in dst after undo"
- );
-}
-
-#[gpui::test]
-async fn test_undo_drag_multiple_entries(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "src": {
- "alpha.txt": "",
- "beta.txt": "",
- },
- "dst": {},
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- toggle_expand_dir(&panel, "root/src", cx);
-
- panel.update(cx, |panel, _| panel.marked_entries.clear());
- select_path_with_mark(&panel, "root/src/alpha.txt", cx);
- select_path_with_mark(&panel, "root/src/beta.txt", cx);
- drag_selection_to(&panel, "root/dst", false, cx);
-
- assert!(
- find_project_entry(&panel, "root/dst/alpha.txt", cx).is_some(),
- "alpha.txt should be in dst after drag"
- );
- assert!(
- find_project_entry(&panel, "root/dst/beta.txt", cx).is_some(),
- "beta.txt should be in dst after drag"
- );
-
- // A single undo should revert the entire batch
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/src/alpha.txt", cx).is_some(),
- "alpha.txt should be back in src after undo"
- );
- assert!(
- find_project_entry(&panel, "root/src/beta.txt", cx).is_some(),
- "beta.txt should be back in src after undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/dst/alpha.txt", cx),
- None,
- "alpha.txt should no longer be in dst after undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/dst/beta.txt", cx),
- None,
- "beta.txt should no longer be in dst after undo"
- );
-}
-
-#[gpui::test]
-async fn test_multiple_sequential_undos(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "a.txt": "",
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- select_path(&panel, "root/a.txt", cx);
- panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
- cx.run_until_parked();
- let confirm = panel.update_in(cx, |panel, window, cx| {
- panel
- .filename_editor
- .update(cx, |editor, cx| editor.set_text("b.txt", window, cx));
- panel.confirm_edit(true, window, cx).unwrap()
- });
- confirm.await.unwrap();
- cx.run_until_parked();
-
- assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
-
- select_path(&panel, "root", cx);
- panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
- cx.run_until_parked();
- let confirm = panel.update_in(cx, |panel, window, cx| {
- panel
- .filename_editor
- .update(cx, |editor, cx| editor.set_text("c.txt", window, cx));
- panel.confirm_edit(true, window, cx).unwrap()
- });
- confirm.await.unwrap();
- cx.run_until_parked();
-
- assert!(find_project_entry(&panel, "root/b.txt", cx).is_some());
- assert!(find_project_entry(&panel, "root/c.txt", cx).is_some());
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert_eq!(
- find_project_entry(&panel, "root/c.txt", cx),
- None,
- "c.txt should be removed after first undo"
- );
- assert!(
- find_project_entry(&panel, "root/b.txt", cx).is_some(),
- "b.txt should still exist after first undo"
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/a.txt", cx).is_some(),
- "a.txt should be restored after second undo"
- );
- assert_eq!(
- find_project_entry(&panel, "root/b.txt", cx),
- None,
- "b.txt should no longer exist after second undo"
- );
-}
-
-#[gpui::test]
-async fn test_undo_with_empty_stack(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "a.txt": "",
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert!(
- find_project_entry(&panel, "root/a.txt", cx).is_some(),
- "File tree should be unchanged after undo on empty stack"
- );
-}
-
-#[gpui::test]
-async fn test_undo_batch(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "src": {
- "main.rs": "// Code!"
- }
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- let worktree_id = project.update(cx, |project, cx| {
- project.visible_worktrees(cx).next().unwrap().read(cx).id()
- });
- cx.run_until_parked();
-
- // Since there currently isn't a way to both create a folder and the file
- // within it as two separate operations batched under the same
- // `ProjectPanelOperation::Batch` operation, we'll simply record those
- // ourselves, knowing that the filesystem already has the folder and file
- // being provided in the operations.
- panel.update(cx, |panel, _cx| {
- panel.undo_manager.record_batch(vec![
- ProjectPanelOperation::Create {
- project_path: ProjectPath {
- worktree_id,
- path: Arc::from(rel_path("src/main.rs")),
- },
- },
- ProjectPanelOperation::Create {
- project_path: ProjectPath {
- worktree_id,
- path: Arc::from(rel_path("src/")),
- },
- },
- ]);
- });
-
- // Ensure that `src/main.rs` is present in the filesystem before proceeding,
- // otherwise this test is irrelevant.
- assert_eq!(fs.files(), vec![PathBuf::from(path!("/root/src/main.rs"))]);
- assert_eq!(
- fs.directories(false),
- vec![
- PathBuf::from(path!("/")),
- PathBuf::from(path!("/root/")),
- PathBuf::from(path!("/root/src/"))
- ]
- );
-
- panel.update_in(cx, |panel, window, cx| {
- panel.undo(&Undo, window, cx);
- });
- cx.run_until_parked();
-
- assert_eq!(fs.files().len(), 0);
- assert_eq!(
- fs.directories(false),
- vec![PathBuf::from(path!("/")), PathBuf::from(path!("/root/"))]
- );
-}
-
#[gpui::test]
async fn test_paste_external_paths(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -7348,7 +6800,11 @@ async fn test_selection_fallback_to_next_highest_worktree(cx: &mut gpui::TestApp
);
}
-fn toggle_expand_dir(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn toggle_expand_dir(
+ panel: &Entity<ProjectPanel>,
+ path: &str,
+ cx: &mut VisualTestContext,
+) {
let path = rel_path(path);
panel.update_in(cx, |panel, window, cx| {
for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9700,7 +9156,7 @@ async fn test_hide_hidden_entries(cx: &mut gpui::TestAppContext) {
);
}
-fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
let path = rel_path(path);
panel.update_in(cx, |panel, window, cx| {
for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9722,7 +9178,11 @@ fn select_path(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestCont
cx.run_until_parked();
}
-fn select_path_with_mark(panel: &Entity<ProjectPanel>, path: &str, cx: &mut VisualTestContext) {
+pub(crate) fn select_path_with_mark(
+ panel: &Entity<ProjectPanel>,
+ path: &str,
+ cx: &mut VisualTestContext,
+) {
let path = rel_path(path);
panel.update(cx, |panel, cx| {
for worktree in panel.project.read(cx).worktrees(cx).collect::<Vec<_>>() {
@@ -9810,7 +9270,7 @@ fn set_folded_active_ancestor(
});
}
-fn drag_selection_to(
+pub(crate) fn drag_selection_to(
panel: &Entity<ProjectPanel>,
target_path: &str,
is_file: bool,
@@ -9835,7 +9295,7 @@ fn drag_selection_to(
cx.executor().run_until_parked();
}
-fn find_project_entry(
+pub(crate) fn find_project_entry(
panel: &Entity<ProjectPanel>,
path: &str,
cx: &mut VisualTestContext,
@@ -0,0 +1 @@
+pub(crate) mod undo;
@@ -0,0 +1,384 @@
+#![cfg(test)]
+
+use collections::HashSet;
+use fs::{FakeFs, Fs};
+use gpui::{Entity, VisualTestContext};
+use project::Project;
+use serde_json::{Value, json};
+use std::path::Path;
+use std::sync::Arc;
+use workspace::MultiWorkspace;
+
+use crate::project_panel_tests::{self, find_project_entry, select_path};
+use crate::{NewDirectory, NewFile, ProjectPanel, Redo, Rename, Trash, Undo};
+
+struct TestContext {
+ panel: Entity<ProjectPanel>,
+ fs: Arc<FakeFs>,
+ cx: VisualTestContext,
+}
+
+// Using the `util::path` macro requires a string literal, which would mean that
+// callers of, for example, `rename`, would now need to know about `/` and
+// use `path!` in tests.
+//
+// As such, we define it as a function here to make the helper methods more
+// ergonomic for our use case.
+fn path(path: impl AsRef<str>) -> String {
+ let path = path.as_ref();
+ #[cfg(target_os = "windows")]
+ {
+ let mut path = path.replace("/", "\\");
+ if path.starts_with("\\") {
+ path = format!("C:{}", &path);
+ }
+ path
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ {
+ path.to_string()
+ }
+}
+
+impl TestContext {
+ async fn undo(&mut self) {
+ self.panel.update_in(&mut self.cx, |panel, window, cx| {
+ panel.undo(&Undo, window, cx);
+ });
+ self.cx.run_until_parked();
+ }
+ async fn redo(&mut self) {
+ self.panel.update_in(&mut self.cx, |panel, window, cx| {
+ panel.redo(&Redo, window, cx);
+ });
+ self.cx.run_until_parked();
+ }
+
+ /// Note this only works when every file has an extension
+ fn assert_fs_state_is(&mut self, state: &[&str]) {
+ let state: HashSet<_> = state
+ .into_iter()
+ .map(|s| path(format!("/workspace/{s}")))
+ .chain([path("/workspace"), path("/")])
+ .map(|s| Path::new(&s).to_path_buf())
+ .collect();
+
+ let dirs: HashSet<_> = state
+ .iter()
+ .map(|p| match p.extension() {
+ Some(_) => p.parent().unwrap_or(Path::new(&path("/"))).to_owned(),
+ None => p.clone(),
+ })
+ .collect();
+
+ assert_eq!(
+ self.fs
+ .directories(true)
+ .into_iter()
+ .collect::<HashSet<_>>(),
+ dirs
+ );
+ assert_eq!(
+ self.fs.paths(true).into_iter().collect::<HashSet<_>>(),
+ state
+ );
+ }
+
+ fn assert_exists(&mut self, file: &str) {
+ assert!(
+ find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx).is_some(),
+ "{file} should exist"
+ );
+ }
+
+ fn assert_not_exists(&mut self, file: &str) {
+ assert_eq!(
+ find_project_entry(&self.panel, &format!("workspace/{file}"), &mut self.cx),
+ None,
+ "{file} should not exist"
+ );
+ }
+
+ async fn rename(&mut self, from: &str, to: &str) {
+ let from = format!("workspace/{from}");
+ let Self { panel, cx, .. } = self;
+ select_path(&panel, &from, cx);
+ panel.update_in(cx, |panel, window, cx| panel.rename(&Rename, window, cx));
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text(to, window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+ }
+
+ async fn create_file(&mut self, path: &str) {
+ let Self { panel, cx, .. } = self;
+ select_path(&panel, "workspace", cx);
+ panel.update_in(cx, |panel, window, cx| panel.new_file(&NewFile, window, cx));
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text(path, window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+ }
+
+ async fn create_directory(&mut self, path: &str) {
+ let Self { panel, cx, .. } = self;
+
+ select_path(&panel, "workspace", cx);
+ panel.update_in(cx, |panel, window, cx| {
+ panel.new_directory(&NewDirectory, window, cx)
+ });
+ cx.run_until_parked();
+
+ let confirm = panel.update_in(cx, |panel, window, cx| {
+ panel
+ .filename_editor
+ .update(cx, |editor, cx| editor.set_text(path, window, cx));
+ panel.confirm_edit(true, window, cx).unwrap()
+ });
+ confirm.await.unwrap();
+ cx.run_until_parked();
+ }
+
+ /// Drags the `files` to the provided `directory`.
+ fn drag(&mut self, files: &[&str], directory: &str) {
+ self.panel
+ .update(&mut self.cx, |panel, _| panel.marked_entries.clear());
+ files.into_iter().for_each(|file| {
+ project_panel_tests::select_path_with_mark(
+ &self.panel,
+ &format!("workspace/{file}"),
+ &mut self.cx,
+ )
+ });
+ project_panel_tests::drag_selection_to(
+ &self.panel,
+ &format!("workspace/{directory}"),
+ false,
+ &mut self.cx,
+ );
+ }
+
+ /// Only supports files in root (otherwise would need toggle_expand_dir).
+ /// For undo redo the paths themselves do not matter so this is fine
+ async fn cut(&mut self, file: &str) {
+ project_panel_tests::select_path_with_mark(
+ &self.panel,
+ &format!("workspace/{file}"),
+ &mut self.cx,
+ );
+ self.panel.update_in(&mut self.cx, |panel, window, cx| {
+ panel.cut(&Default::default(), window, cx);
+ });
+ }
+
+ /// Only supports files in root (otherwise would need toggle_expand_dir).
+ /// For undo redo the paths themselves do not matter so this is fine
+ async fn paste(&mut self, directory: &str) {
+ select_path(&self.panel, &format!("workspace/{directory}"), &mut self.cx);
+ self.panel.update_in(&mut self.cx, |panel, window, cx| {
+ panel.paste(&Default::default(), window, cx);
+ });
+ self.cx.run_until_parked();
+ }
+
+ async fn trash(&mut self, paths: &[&str]) {
+ paths.iter().for_each(|p| {
+ project_panel_tests::select_path_with_mark(
+ &self.panel,
+ &format!("workspace/{p}"),
+ &mut self.cx,
+ )
+ });
+
+ self.panel.update_in(&mut self.cx, |panel, window, cx| {
+ panel.trash(&Trash { skip_prompt: true }, window, cx);
+ });
+
+ self.cx.run_until_parked();
+ }
+
+ /// The test tree is:
+ /// ```txt
+ /// a.txt
+ /// b.txt
+ /// ```
+ /// a and b are empty, x has the text "content" inside
+ async fn new(cx: &mut gpui::TestAppContext) -> TestContext {
+ Self::new_with_tree(
+ cx,
+ json!({
+ "a.txt": "",
+ "b.txt": "",
+ }),
+ )
+ .await
+ }
+
+ async fn new_with_tree(cx: &mut gpui::TestAppContext, tree: Value) -> TestContext {
+ project_panel_tests::init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/workspace", tree).await;
+ let project = Project::test(fs.clone(), ["/workspace".as_ref()], cx).await;
+ let window =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let mut cx = VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(&mut cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ TestContext { panel, fs, cx }
+ }
+}
+
+#[gpui::test]
+async fn rename_undo_redo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.rename("a.txt", "renamed.txt").await;
+ cx.assert_fs_state_is(&["b.txt", "renamed.txt"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+
+ cx.redo().await;
+ cx.assert_fs_state_is(&["b.txt", "renamed.txt"]);
+}
+
+#[gpui::test]
+async fn create_undo_redo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+ let path = path("/workspace/c.txt");
+
+ cx.create_file("c.txt").await;
+ cx.assert_exists("c.txt");
+
+ // We'll now insert some content into `c.txt` in order to ensure that, after
+ // undoing the trash operation, i.e., when the file is restored, the actual
+ // file's contents are preserved instead of a new one with the same path
+ // being created.
+ cx.fs.write(Path::new(&path), b"Hello!").await.unwrap();
+
+ cx.undo().await;
+ cx.assert_not_exists("c.txt");
+
+ cx.redo().await;
+ cx.assert_exists("c.txt");
+ assert_eq!(cx.fs.load(Path::new(&path)).await.unwrap(), "Hello!");
+}
+
+#[gpui::test]
+async fn create_dir_undo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.create_directory("new_dir").await;
+ cx.assert_exists("new_dir");
+ cx.undo().await;
+ cx.assert_not_exists("new_dir");
+}
+
+#[gpui::test]
+async fn cut_paste_undo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.create_directory("files").await;
+ cx.cut("a.txt").await;
+ cx.paste("files").await;
+ cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "files/"]);
+
+ cx.redo().await;
+ cx.assert_fs_state_is(&["b.txt", "files/", "files/a.txt"]);
+}
+
+#[gpui::test]
+async fn drag_undo_redo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.create_directory("src").await;
+ cx.create_file("src/a.rs").await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]);
+
+ cx.drag(&["src/a.rs"], "");
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/a.rs"]);
+
+ cx.redo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "a.rs", "src/"]);
+}
+
+#[gpui::test]
+async fn drag_multiple_undo_redo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.create_directory("src").await;
+ cx.create_file("src/x.rs").await;
+ cx.create_file("src/y.rs").await;
+
+ cx.drag(&["src/x.rs", "src/y.rs"], "");
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "src/", "src/x.rs", "src/y.rs"]);
+
+ cx.redo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt", "x.rs", "y.rs", "src/"]);
+}
+
+#[gpui::test]
+async fn two_sequential_undos(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.rename("a.txt", "x.txt").await;
+ cx.create_file("y.txt").await;
+ cx.assert_fs_state_is(&["b.txt", "x.txt", "y.txt"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["b.txt", "x.txt"]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+}
+
+#[gpui::test]
+async fn undo_without_history(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ // Undoing without any history should just result in the filesystem state
+ // remaining unchanged.
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt"])
+}
+
+#[gpui::test]
+async fn trash_undo_redo(cx: &mut gpui::TestAppContext) {
+ let mut cx = TestContext::new(cx).await;
+
+ cx.trash(&["a.txt", "b.txt"]).await;
+ cx.assert_fs_state_is(&[]);
+
+ cx.undo().await;
+ cx.assert_fs_state_is(&["a.txt", "b.txt"]);
+
+ cx.redo().await;
+ cx.assert_fs_state_is(&[]);
+}
@@ -1,286 +1,558 @@
-use anyhow::anyhow;
-use gpui::{AppContext, SharedString, Task, WeakEntity};
-use project::ProjectPath;
-use std::collections::VecDeque;
-use ui::{App, IntoElement, Label, ParentElement, Styled, v_flex};
+//! # Undo Manager
+//!
+//! ## Operations and Results
+//!
+//! Undo and Redo actions execute an operation against the filesystem, producing
+//! a result that is recorded back into the history in place of the original
+//! entry. Each result is the semantic inverse of its paired operation, so the
+//! cycle can repeat for continued undo and redo.
+//!
+//! Operations Results
+//! โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
+//! Create(ProjectPath) โ Created(ProjectPath)
+//! Trash(ProjectPath) โ Trashed(TrashedEntry)
+//! Rename(ProjectPath, ProjectPath) โ Renamed(ProjectPath, ProjectPath)
+//! Restore(TrashedEntry) โ Restored(ProjectPath)
+//! Batch(Vec<Operation>) โ Batch(Vec<Result>)
+//!
+//!
+//! ## History and Cursor
+//!
+//! The undo manager maintains an operation history with a cursor position (โ).
+//! Recording an operation appends it to the history and advances the cursor to
+//! the end. The cursor separates past entries (left of โ) from future entries
+//! (right of โ).
+//!
+//! โ **Undo**: Takes the history entry just *before* โ, executes its inverse,
+//! records the result back in its place, and moves โ one step to the left.
+//! โ **Redo**: Takes the history entry just *at* โ, executes its inverse,
+//! records the result back in its place, and advances โ one step to the right.
+//!
+//!
+//! ## Example
+//!
+//! User Operation Create(src/main.rs)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 +++cursor+++
+//!
+//! User Operation Rename(README.md, readme.md)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 Renamed(README.md, readme.md)
+//! 2 +++cursor+++
+//!
+//! User Operation Create(CONTRIBUTING.md)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 Renamed(README.md, readme.md)
+//! 2 Created(CONTRIBUTING.md) โโโ
+//! 3 +++cursor+++ โ(before the cursor)
+//! โ
+//! โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
+//! Redoing will take the result at the cursor position, convert that into the
+//! operation that can revert that result, execute that operation and replace
+//! the result in the history with the new result, obtained from running the
+//! inverse operation, advancing the cursor position.
+//! โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
+//! โ
+//! โ
+//! User Operation Undo v
+//! Execute Created(CONTRIBUTING.md) โโโโโโโโ> Trash(CONTRIBUTING.md)
+//! Record Trashed(TrashedEntry(1))
+//! History
+//! 0 Created(src/main.rs)
+//! 1 Renamed(README.md, readme.md) โโ
+//! 2 +++cursor+++ โ(before the cursor)
+//! 2 Trashed(TrashedEntry(1)) โ
+//! โ
+//! User Operation Undo v
+//! Execute Renamed(README.md, readme.md) โโโ> Rename(readme.md, README.md)
+//! Record Renamed(readme.md, README.md)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 +++cursor+++
+//! 1 Renamed(readme.md, README.md) โโ (at the cursor)
+//! 2 Trashed(TrashedEntry(1)) โ
+//! โ
+//! โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
+//! Redoing will take the result at the cursor position, convert that into the
+//! operation that can revert that result, execute that operation and replace
+//! the result in the history with the new result, obtained from running the
+//! inverse operation, advancing the cursor position.
+//! โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฌโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
+//! โ
+//! โ
+//! User Operation Redo v
+//! Execute Renamed(readme.md, README.md) โโโ> Rename(README.md, readme.md)
+//! Record Renamed(README.md, readme.md)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 Renamed(README.md, readme.md)
+//! 2 +++cursor+++
+//! 2 Trashed(TrashedEntry(1))โโโโโ (at the cursor)
+//! โ
+//! User Operation Redo v
+//! Execute Trashed(TrashedEntry(1)) โโโโโโโโ> Restore(TrashedEntry(1))
+//! Record Restored(ProjectPath)
+//! History
+//! 0 Created(src/main.rs)
+//! 1 Renamed(README.md, readme.md)
+//! 2 Restored(ProjectPath)
+//! 2 +++cursor+++
+
+//!
+//! create A; A
+//! rename A -> B; B
+//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s)
+//! remove B _
+//! create B B
+//! put important content in B B
+//! undo manger renames (does not hang) A
+//! remove A _
+//! user sad
+
+//!
+//! create A; A
+//! rename A -> B; B
+//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s)
+//! create C B
+//! -- src/c.rs
+//! --
+
+//!
+//! create docs/files/ directory docs/files/
+//! create docs/files/a.txt docs/files/
+//! undo (rename B -> A) (takes 10s for some reason) B (still b cause it's hanging for 10s)
+//! create C B
+//! -- src/c.rs
+//! --
+
+//! List of "tainted files" that the user may not operate on
+
+use crate::ProjectPanel;
+use anyhow::{Context, Result, anyhow};
+use fs::TrashedEntry;
+use futures::channel::mpsc;
+use gpui::{AppContext, AsyncApp, SharedString, Task, WeakEntity};
+use project::{ProjectPath, WorktreeId};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::{collections::VecDeque, sync::Arc};
+use ui::App;
use workspace::{
Workspace,
notifications::{NotificationId, simple_message_notification::MessageNotification},
};
+use worktree::CreatedEntry;
-const MAX_UNDO_OPERATIONS: usize = 10_000;
+enum Operation {
+ Trash(ProjectPath),
+ Rename(ProjectPath, ProjectPath),
+ Restore(WorktreeId, TrashedEntry),
+ Batch(Vec<Operation>),
+}
-#[derive(Clone)]
-pub enum ProjectPanelOperation {
- Batch(Vec<ProjectPanelOperation>),
- Create {
- project_path: ProjectPath,
- },
- Rename {
- old_path: ProjectPath,
- new_path: ProjectPath,
- },
+impl Operation {
+ async fn execute(self, undo_manager: &Inner, cx: &mut AsyncApp) -> Result<Change> {
+ Ok(match self {
+ Operation::Trash(project_path) => {
+ let trash_entry = undo_manager.trash(&project_path, cx).await?;
+ Change::Trashed(project_path.worktree_id, trash_entry)
+ }
+ Operation::Rename(from, to) => {
+ undo_manager.rename(&from, &to, cx).await?;
+ Change::Renamed(from, to)
+ }
+ Operation::Restore(worktree_id, trashed_entry) => {
+ let project_path = undo_manager.restore(worktree_id, trashed_entry, cx).await?;
+ Change::Restored(project_path)
+ }
+ Operation::Batch(operations) => {
+ let mut res = Vec::new();
+ for op in operations {
+ res.push(Box::pin(op.execute(undo_manager, cx)).await?);
+ }
+ Change::Batched(res)
+ }
+ })
+ }
}
-pub struct UndoManager {
+#[derive(Clone, Debug)]
+pub(crate) enum Change {
+ Created(ProjectPath),
+ Trashed(WorktreeId, TrashedEntry),
+ Renamed(ProjectPath, ProjectPath),
+ Restored(ProjectPath),
+ Batched(Vec<Change>),
+}
+
+impl Change {
+ fn to_inverse(self) -> Operation {
+ match self {
+ Change::Created(project_path) => Operation::Trash(project_path),
+ Change::Trashed(worktree_id, trashed_entry) => {
+ Operation::Restore(worktree_id, trashed_entry)
+ }
+ Change::Renamed(from, to) => Operation::Rename(to, from),
+ Change::Restored(project_path) => Operation::Trash(project_path),
+ // When inverting a batch of operations, we reverse the order of
+ // operations to handle dependencies between them. For example, if a
+ // batch contains the following order of operations:
+ //
+ // 1. Create `src/`
+ // 2. Create `src/main.rs`
+ //
+ // If we first tried to revert the directory creation, it would fail
+ // because there's still files inside the directory.
+ Change::Batched(changes) => {
+ Operation::Batch(changes.into_iter().rev().map(Change::to_inverse).collect())
+ }
+ }
+ }
+}
+
+// Imagine pressing undo 10000+ times?!
+const MAX_UNDO_OPERATIONS: usize = 10_000;
+
+struct Inner {
workspace: WeakEntity<Workspace>,
- stack: VecDeque<ProjectPanelOperation>,
- /// Maximum number of operations to keep on the undo stack.
+ panel: WeakEntity<ProjectPanel>,
+ history: VecDeque<Change>,
+ cursor: usize,
+ /// Maximum number of operations to keep on the undo history.
limit: usize,
+ can_undo: Arc<AtomicBool>,
+ can_redo: Arc<AtomicBool>,
+ rx: mpsc::Receiver<UndoMessage>,
+}
+
+/// pls arc this
+#[derive(Clone)]
+pub struct UndoManager {
+ tx: mpsc::Sender<UndoMessage>,
+ can_undo: Arc<AtomicBool>,
+ can_redo: Arc<AtomicBool>,
}
impl UndoManager {
- pub fn new(workspace: WeakEntity<Workspace>) -> Self {
- Self::new_with_limit(workspace, MAX_UNDO_OPERATIONS)
+ pub fn new(
+ workspace: WeakEntity<Workspace>,
+ panel: WeakEntity<ProjectPanel>,
+ cx: &App,
+ ) -> Self {
+ let (tx, rx) = mpsc::channel(1024);
+ let inner = Inner::new(workspace, panel, rx);
+
+ let this = Self {
+ tx,
+ can_undo: Arc::clone(&inner.can_undo),
+ can_redo: Arc::clone(&inner.can_redo),
+ };
+
+ cx.spawn(async move |cx| inner.manage_undo_and_redo(cx.clone()).await)
+ .detach();
+
+ this
}
- pub fn new_with_limit(workspace: WeakEntity<Workspace>, limit: usize) -> Self {
+ pub fn undo(&mut self) -> Result<()> {
+ self.tx
+ .try_send(UndoMessage::Undo)
+ .context("Undo and redo task can not keep up")
+ }
+ pub fn redo(&mut self) -> Result<()> {
+ self.tx
+ .try_send(UndoMessage::Redo)
+ .context("Undo and redo task can not keep up")
+ }
+ pub fn record(&mut self, changes: impl IntoIterator<Item = Change>) -> Result<()> {
+ self.tx
+ .try_send(UndoMessage::Changed(changes.into_iter().collect()))
+ .context("Undo and redo task can not keep up")
+ }
+ /// just for the UI, an undo may still fail if there are concurrent file
+ /// operations happening.
+ pub fn can_undo(&self) -> bool {
+ self.can_undo.load(Ordering::Relaxed)
+ }
+ /// just for the UI, an undo may still fail if there are concurrent file
+ /// operations happening.
+ pub fn can_redo(&self) -> bool {
+ self.can_redo.load(Ordering::Relaxed)
+ }
+}
+
+#[derive(Debug)]
+enum UndoMessage {
+ Changed(Vec<Change>),
+ Undo,
+ Redo,
+}
+
+impl UndoMessage {
+ fn error_title(&self) -> &'static str {
+ match self {
+ UndoMessage::Changed(_) => {
+ "this is a bug in the manage_undo_and_redo task please report"
+ }
+ UndoMessage::Undo => "Undo failed",
+ UndoMessage::Redo => "Redo failed",
+ }
+ }
+}
+
+impl Inner {
+ async fn manage_undo_and_redo(mut self, mut cx: AsyncApp) {
+ loop {
+ let Ok(new) = self.rx.recv().await else {
+ // project panel got closed
+ return;
+ };
+
+ let error_title = new.error_title();
+ let res = match new {
+ UndoMessage::Changed(changes) => {
+ self.record(changes);
+ Ok(())
+ }
+ UndoMessage::Undo => {
+ let res = self.undo(&mut cx).await;
+ let _ = self.panel.update(&mut cx, |_, cx| cx.notify());
+ res
+ }
+ UndoMessage::Redo => {
+ let res = self.redo(&mut cx).await;
+ let _ = self.panel.update(&mut cx, |_, cx| cx.notify());
+ res
+ }
+ };
+
+ if let Err(e) = res {
+ Self::show_error(error_title, self.workspace.clone(), e.to_string(), &mut cx);
+ }
+
+ self.can_undo.store(self.can_undo(), Ordering::Relaxed);
+ self.can_redo.store(self.can_redo(), Ordering::Relaxed);
+ }
+ }
+}
+
+impl Inner {
+ pub fn new(
+ workspace: WeakEntity<Workspace>,
+ panel: WeakEntity<ProjectPanel>,
+ rx: mpsc::Receiver<UndoMessage>,
+ ) -> Self {
+ Self::new_with_limit(workspace, panel, MAX_UNDO_OPERATIONS, rx)
+ }
+
+ pub fn new_with_limit(
+ workspace: WeakEntity<Workspace>,
+ panel: WeakEntity<ProjectPanel>,
+ limit: usize,
+ rx: mpsc::Receiver<UndoMessage>,
+ ) -> Self {
Self {
workspace,
+ panel,
+ history: VecDeque::new(),
+ cursor: 0usize,
limit,
- stack: VecDeque::new(),
+ can_undo: Arc::new(AtomicBool::new(false)),
+ can_redo: Arc::new(AtomicBool::new(false)),
+ rx,
}
}
pub fn can_undo(&self) -> bool {
- !self.stack.is_empty()
+ self.cursor > 0
}
- pub fn undo(&mut self, cx: &mut App) {
- if let Some(operation) = self.stack.pop_back() {
- let task = self.revert_operation(operation, cx);
- let workspace = self.workspace.clone();
-
- cx.spawn(async move |cx| {
- let errors = task.await;
- if !errors.is_empty() {
- cx.update(|cx| {
- let messages = errors
- .iter()
- .map(|err| SharedString::from(err.to_string()))
- .collect();
-
- Self::show_errors(workspace, messages, cx)
- })
- }
- })
- .detach();
+ pub fn can_redo(&self) -> bool {
+ self.cursor < self.history.len()
+ }
+
+ pub async fn undo(&mut self, cx: &mut AsyncApp) -> Result<()> {
+ if !self.can_undo() {
+ return Ok(());
}
+
+ // Undo failure:
+ //
+ // History
+ // 0 Created(src/main.rs)
+ // 1 Renamed(README.md, readme.md) โโ
+ // 2 +++cursor+++ โ(before the cursor)
+ // 2 Trashed(TrashedEntry(1)) โ
+ // โ
+ // User Operation Undo v
+ // Failed execute Renamed(README.md, readme.md) โโโ> Rename(readme.md, README.md)
+ // Record nothing
+ // History
+ // 0 Created(src/main.rs)
+ // 1 +++cursor+++
+ // 1 Trashed(TrashedEntry(1)) -----
+ // |(at the cursor)
+ // User Operation Redo v
+ // Execute Trashed(TrashedEntry(1)) โโโโโโโโ> Restore(TrashedEntry(1))
+ // Record Restored(ProjectPath)
+ // History
+ // 0 Created(src/main.rs)
+ // 1 Restored(ProjectPath)
+ // 1 +++cursor+++
+
+ // We always want to move the cursor back regardless of whether undoing
+ // succeeds or fails, otherwise the cursor could end up pointing to a
+ // position outside of the history, as we remove the change before the
+ // cursor, in case undo fails.
+ let before_cursor = self.cursor - 1; // see docs above
+ self.cursor -= 1; // take a step back into the past
+
+ // If undoing fails, the user would be in a stuck state from which
+ // manual intervention would likely be needed in order to undo. As such,
+ // we remove the change from the `history` even before attempting to
+ // execute its inversion.
+ let undo_change = self
+ .history
+ .remove(before_cursor)
+ .expect("we can undo")
+ .to_inverse()
+ .execute(self, cx)
+ .await?;
+ self.history.insert(before_cursor, undo_change);
+ Ok(())
}
- pub fn record(&mut self, operation: ProjectPanelOperation) {
- if self.stack.len() >= self.limit {
- self.stack.pop_front();
+ pub async fn redo(&mut self, cx: &mut AsyncApp) -> Result<()> {
+ if !self.can_redo() {
+ return Ok(());
}
- self.stack.push_back(operation);
+ // If redoing fails, the user would be in a stuck state from which
+ // manual intervention would likely be needed in order to redo. As such,
+ // we remove the change from the `history` even before attempting to
+ // execute its inversion.
+ let redo_change = self
+ .history
+ .remove(self.cursor)
+ .expect("we can redo")
+ .to_inverse()
+ .execute(self, cx)
+ .await?;
+ self.history.insert(self.cursor, redo_change);
+ self.cursor += 1;
+ Ok(())
}
- pub fn record_batch(&mut self, operations: impl IntoIterator<Item = ProjectPanelOperation>) {
- let mut operations = operations.into_iter().collect::<Vec<_>>();
- let operation = match operations.len() {
+ /// Passed in changes will always be performed as a single step
+ pub fn record(&mut self, mut changes: Vec<Change>) {
+ let change = match changes.len() {
0 => return,
- 1 => operations.pop().unwrap(),
- _ => ProjectPanelOperation::Batch(operations),
+ 1 => changes.remove(0),
+ _ => Change::Batched(changes),
};
- self.record(operation);
+ // When recording a new change, discard any changes that could still be
+ // redone.
+ if self.cursor < self.history.len() {
+ self.history.drain(self.cursor..);
+ }
+
+ // Ensure that the number of recorded changes does not exceed the
+ // maximum amount of tracked changes.
+ if self.history.len() >= self.limit {
+ self.history.pop_front();
+ } else {
+ self.cursor += 1;
+ }
+
+ self.history.push_back(change);
}
- /// Attempts to revert the provided `operation`, returning a vector of errors
- /// in case there was any failure while reverting the operation.
- ///
- /// For all operations other than [`crate::undo::ProjectPanelOperation::Batch`], a maximum
- /// of one error is returned.
- fn revert_operation(
+ async fn rename(
&self,
- operation: ProjectPanelOperation,
- cx: &mut App,
- ) -> Task<Vec<anyhow::Error>> {
- match operation {
- ProjectPanelOperation::Create { project_path } => {
- let Some(workspace) = self.workspace.upgrade() else {
- return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
- };
-
- let result = workspace.update(cx, |workspace, cx| {
- workspace.project().update(cx, |project, cx| {
- let entry_id = project
- .entry_for_path(&project_path, cx)
- .map(|entry| entry.id)
- .ok_or_else(|| anyhow!("No entry for path."))?;
-
- project
- .delete_entry(entry_id, true, cx)
- .ok_or_else(|| anyhow!("Failed to trash entry."))
- })
- });
-
- let task = match result {
- Ok(task) => task,
- Err(err) => return Task::ready(vec![err]),
- };
-
- cx.spawn(async move |_| match task.await {
- Ok(_) => vec![],
- Err(err) => vec![err],
- })
- }
- ProjectPanelOperation::Rename { old_path, new_path } => {
- let Some(workspace) = self.workspace.upgrade() else {
- return Task::ready(vec![anyhow!("Failed to obtain workspace.")]);
- };
-
- let result = workspace.update(cx, |workspace, cx| {
- workspace.project().update(cx, |project, cx| {
- let entry_id = project
- .entry_for_path(&new_path, cx)
- .map(|entry| entry.id)
- .ok_or_else(|| anyhow!("No entry for path."))?;
-
- Ok(project.rename_entry(entry_id, old_path.clone(), cx))
- })
- });
-
- let task = match result {
- Ok(task) => task,
- Err(err) => return Task::ready(vec![err]),
- };
-
- cx.spawn(async move |_| match task.await {
- Ok(_) => vec![],
- Err(err) => vec![err],
+ from: &ProjectPath,
+ to: &ProjectPath,
+ cx: &mut AsyncApp,
+ ) -> Result<CreatedEntry> {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Err(anyhow!("Failed to obtain workspace."));
+ };
+
+ let res: Result<Task<Result<CreatedEntry>>> = workspace.update(cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ let entry_id = project
+ .entry_for_path(from, cx)
+ .map(|entry| entry.id)
+ .ok_or_else(|| anyhow!("No entry for path."))?;
+
+ Ok(project.rename_entry(entry_id, to.clone(), cx))
+ })
+ });
+
+ res?.await
+ }
+
+ async fn trash(&self, project_path: &ProjectPath, cx: &mut AsyncApp) -> Result<TrashedEntry> {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Err(anyhow!("Failed to obtain workspace."));
+ };
+
+ workspace
+ .update(cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ let entry_id = project
+ .entry_for_path(&project_path, cx)
+ .map(|entry| entry.id)
+ .ok_or_else(|| anyhow!("No entry for path."))?;
+
+ project
+ .delete_entry(entry_id, true, cx)
+ .ok_or_else(|| anyhow!("Worktree entry should exist"))
})
- }
- ProjectPanelOperation::Batch(operations) => {
- // When reverting operations in a batch, we reverse the order of
- // operations to handle dependencies between them. For example,
- // if a batch contains the following order of operations:
- //
- // 1. Create `src/`
- // 2. Create `src/main.rs`
- //
- // If we first try to revert the directory creation, it would
- // fail because there's still files inside the directory.
- // Operations are also reverted sequentially in order to avoid
- // this same problem.
- let tasks: Vec<_> = operations
- .into_iter()
- .rev()
- .map(|operation| self.revert_operation(operation, cx))
- .collect();
-
- cx.spawn(async move |_| {
- let mut errors = Vec::new();
- for task in tasks {
- errors.extend(task.await);
- }
- errors
+ })?
+ .await
+ .and_then(|entry| {
+ entry.ok_or_else(|| anyhow!("When trashing we should always get a trashentry"))
+ })
+ }
+
+ async fn restore(
+ &self,
+ worktree_id: WorktreeId,
+ trashed_entry: TrashedEntry,
+ cx: &mut AsyncApp,
+ ) -> Result<ProjectPath> {
+ let Some(workspace) = self.workspace.upgrade() else {
+ return Err(anyhow!("Failed to obtain workspace."));
+ };
+
+ workspace
+ .update(cx, |workspace, cx| {
+ workspace.project().update(cx, |project, cx| {
+ project.restore_entry(worktree_id, trashed_entry, cx)
})
- }
- }
+ })
+ .await
}
- /// Displays a notification with the list of provided errors ensuring that,
- /// when more than one error is provided, which can be the case when dealing
- /// with undoing a [`crate::undo::ProjectPanelOperation::Batch`], a list is
- /// displayed with each of the errors, instead of a single message.
- fn show_errors(workspace: WeakEntity<Workspace>, messages: Vec<SharedString>, cx: &mut App) {
+ /// Displays a notification with the provided `title` and `error`.
+ fn show_error(
+ title: impl Into<SharedString>,
+ workspace: WeakEntity<Workspace>,
+ error: String,
+ cx: &mut AsyncApp,
+ ) {
workspace
.update(cx, move |workspace, cx| {
let notification_id =
NotificationId::Named(SharedString::new_static("project_panel_undo"));
workspace.show_notification(notification_id, cx, move |cx| {
- cx.new(|cx| {
- if let [err] = messages.as_slice() {
- MessageNotification::new(err.to_string(), cx)
- .with_title("Failed to undo Project Panel Operation")
- } else {
- MessageNotification::new_from_builder(cx, move |_, _| {
- v_flex()
- .gap_1()
- .children(
- messages
- .iter()
- .map(|message| Label::new(format!("- {message}"))),
- )
- .into_any_element()
- })
- .with_title("Failed to undo Project Panel Operations")
- }
- })
+ cx.new(|cx| MessageNotification::new(error, cx).with_title(title))
})
})
.ok();
}
}
-
-#[cfg(test)]
-mod test {
- use crate::{
- ProjectPanel, project_panel_tests,
- undo::{ProjectPanelOperation, UndoManager},
- };
- use gpui::{Entity, TestAppContext, VisualTestContext};
- use project::{FakeFs, Project, ProjectPath};
- use std::sync::Arc;
- use util::rel_path::rel_path;
- use workspace::MultiWorkspace;
-
- struct TestContext {
- project: Entity<Project>,
- panel: Entity<ProjectPanel>,
- }
-
- async fn init_test(cx: &mut TestAppContext) -> TestContext {
- project_panel_tests::init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
- let window =
- cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace = window
- .read_with(cx, |mw, _| mw.workspace().clone())
- .unwrap();
- let cx = &mut VisualTestContext::from_window(window.into(), cx);
- let panel = workspace.update_in(cx, ProjectPanel::new);
- cx.run_until_parked();
-
- TestContext { project, panel }
- }
-
- #[gpui::test]
- async fn test_limit(cx: &mut TestAppContext) {
- let test_context = init_test(cx).await;
- let worktree_id = test_context.project.update(cx, |project, cx| {
- project.visible_worktrees(cx).next().unwrap().read(cx).id()
- });
-
- let build_create_operation = |file_name: &str| ProjectPanelOperation::Create {
- project_path: ProjectPath {
- path: Arc::from(rel_path(file_name)),
- worktree_id,
- },
- };
-
- // Since we're updating the `ProjectPanel`'s undo manager with one whose
- // limit is 3 operations, we only need to create 4 operations which
- // we'll record, in order to confirm that the oldest operation is
- // evicted.
- let operation_a = build_create_operation("file_a.txt");
- let operation_b = build_create_operation("file_b.txt");
- let operation_c = build_create_operation("file_c.txt");
- let operation_d = build_create_operation("file_d.txt");
-
- test_context.panel.update(cx, move |panel, _cx| {
- panel.undo_manager = UndoManager::new_with_limit(panel.workspace.clone(), 3);
- panel.undo_manager.record(operation_a);
- panel.undo_manager.record(operation_b);
- panel.undo_manager.record(operation_c);
- panel.undo_manager.record(operation_d);
-
- assert_eq!(panel.undo_manager.stack.len(), 3);
- });
- }
-}
@@ -8,7 +8,8 @@ use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
use encoding_rs::Encoding;
use fs::{
- Fs, MTime, PathEvent, PathEventKind, RemoveOptions, Watcher, copy_recursive, read_dir_items,
+ Fs, MTime, PathEvent, PathEventKind, RemoveOptions, TrashedEntry, Watcher, copy_recursive,
+ read_dir_items,
};
use futures::{
FutureExt as _, Stream, StreamExt,
@@ -70,7 +71,7 @@ use text::{LineEnding, Rope};
use util::{
ResultExt, maybe,
paths::{PathMatcher, PathStyle, SanitizedPath, home_dir},
- rel_path::RelPath,
+ rel_path::{RelPath, RelPathBuf},
};
pub use worktree_settings::WorktreeSettings;
@@ -848,7 +849,7 @@ impl Worktree {
entry_id: ProjectEntryId,
trash: bool,
cx: &mut Context<Worktree>,
- ) -> Option<Task<Result<()>>> {
+ ) -> Option<Task<Result<Option<TrashedEntry>>>> {
let task = match self {
Worktree::Local(this) => this.delete_entry(entry_id, trash, cx),
Worktree::Remote(this) => this.delete_entry(entry_id, trash, cx),
@@ -870,6 +871,20 @@ impl Worktree {
Some(task)
}
+ pub async fn restore_entry(
+ trash_entry: TrashedEntry,
+ worktree: Entity<Self>,
+ cx: &mut AsyncApp,
+ ) -> Result<RelPathBuf> {
+ let is_local = worktree.read_with(cx, |this, _| this.is_local());
+ if is_local {
+ LocalWorktree::restore_entry(trash_entry, worktree, cx).await
+ } else {
+ // TODO(dino): Add support for restoring entries in remote worktrees.
+ Err(anyhow!("Unsupported"))
+ }
+ }
+
fn get_children_ids_recursive(&self, path: &RelPath, ids: &mut Vec<ProjectEntryId>) {
let children_iter = self.child_entries(path);
for child in children_iter {
@@ -1685,35 +1700,46 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
trash: bool,
cx: &Context<Worktree>,
- ) -> Option<Task<Result<()>>> {
+ ) -> Option<Task<Result<Option<TrashedEntry>>>> {
let entry = self.entry_for_id(entry_id)?.clone();
let abs_path = self.absolutize(&entry.path);
let fs = self.fs.clone();
let delete = cx.background_spawn(async move {
- if entry.is_file() {
- if trash {
- fs.trash_file(&abs_path).await?;
- } else {
+ let trashed_entry = match (entry.is_file(), trash) {
+ (true, true) => Some(fs.trash(&abs_path, Default::default()).await?),
+ (false, true) => Some(
+ fs.trash(
+ &abs_path,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await?,
+ ),
+ (true, false) => {
fs.remove_file(&abs_path, Default::default()).await?;
+ None
}
- } else if trash {
- fs.trash_dir(&abs_path).await?;
- } else {
- fs.remove_dir(
- &abs_path,
- RemoveOptions {
- recursive: true,
- ignore_if_not_exists: false,
- },
- )
- .await?;
- }
- anyhow::Ok(entry.path)
+ (false, false) => {
+ fs.remove_dir(
+ &abs_path,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await?;
+ None
+ }
+ };
+
+ anyhow::Ok((trashed_entry, entry.path))
});
Some(cx.spawn(async move |this, cx| {
- let path = delete.await?;
+ let (trashed_entry, path) = delete.await?;
this.update(cx, |this, _| {
this.as_local_mut()
.unwrap()
@@ -1721,10 +1747,39 @@ impl LocalWorktree {
})?
.recv()
.await;
- Ok(())
+
+ Ok(trashed_entry)
}))
}
+ pub async fn restore_entry(
+ trash_entry: TrashedEntry,
+ this: Entity<Worktree>,
+ cx: &mut AsyncApp,
+ ) -> Result<RelPathBuf> {
+ let Some((fs, worktree_abs_path, path_style)) = this.read_with(cx, |this, _cx| {
+ let local_worktree = match this {
+ Worktree::Local(local_worktree) => local_worktree,
+ Worktree::Remote(_) => return None,
+ };
+
+ let fs = local_worktree.fs.clone();
+ let path_style = local_worktree.path_style();
+ Some((fs, Arc::clone(local_worktree.abs_path()), path_style))
+ }) else {
+ return Err(anyhow!("Localworktree should not change into a remote one"));
+ };
+
+ let path_buf = fs.restore(trash_entry).await?;
+ let path = path_buf
+ .strip_prefix(worktree_abs_path)
+ .context("Could not strip prefix")?;
+ let path = RelPath::new(&path, path_style)?;
+ let path = path.into_owned();
+
+ Ok(path)
+ }
+
pub fn copy_external_entries(
&self,
target_directory: Arc<RelPath>,
@@ -2092,7 +2147,7 @@ impl RemoteWorktree {
entry_id: ProjectEntryId,
trash: bool,
cx: &Context<Worktree>,
- ) -> Option<Task<Result<()>>> {
+ ) -> Option<Task<Result<Option<TrashedEntry>>>> {
let response = self.client.request(proto::DeleteProjectEntry {
project_id: self.project_id,
entry_id: entry_id.to_proto(),
@@ -2112,6 +2167,12 @@ impl RemoteWorktree {
let snapshot = &mut this.background_snapshot.lock().0;
snapshot.delete_entry(entry_id);
this.snapshot = snapshot.clone();
+
+ // TODO: How can we actually track the deleted entry when
+ // working in remote? We likely only need to keep this
+ // information on the remote side in order to support restoring
+ // the trashed file.
+ None
})
}))
}
@@ -2578,15 +2639,14 @@ impl Snapshot {
}
pub fn entry_for_path(&self, path: &RelPath) -> Option<&Entry> {
- self.traverse_from_path(true, true, true, path)
- .entry()
- .and_then(|entry| {
- if entry.path.as_ref() == path {
- Some(entry)
- } else {
- None
- }
- })
+ let entry = self.traverse_from_path(true, true, true, path).entry();
+ entry.and_then(|entry| {
+ if entry.path.as_ref() == path {
+ Some(entry)
+ } else {
+ None
+ }
+ })
}
/// Resolves a path to an executable using the following heuristics:
@@ -2207,7 +2207,14 @@ fn randomly_mutate_worktree(
match rng.random_range(0_u32..100) {
0..=33 if entry.path.as_ref() != RelPath::empty() => {
log::info!("deleting entry {:?} ({})", entry.path, entry.id.to_usize());
- worktree.delete_entry(entry.id, false, cx).unwrap()
+ let task = worktree
+ .delete_entry(entry.id, false, cx)
+ .unwrap_or_else(|| Task::ready(Ok(None)));
+
+ cx.background_spawn(async move {
+ task.await?;
+ Ok(())
+ })
}
_ => {
if entry.is_dir() {