@@ -3,7 +3,7 @@ mod go_locator {
use dap::{DapLocator, adapters::DebugAdapterName};
use gpui::TestAppContext;
use project::debugger::locators::go::{DelveLaunchRequest, GoLocator};
- use task::{HideStrategy, RevealStrategy, RevealTarget, Shell, TaskTemplate};
+ use task::{HideStrategy, RevealStrategy, RevealTarget, SaveStrategy, Shell, TaskTemplate};
#[gpui::test]
async fn test_create_scenario_for_go_build(_: &mut TestAppContext) {
let locator = GoLocator;
@@ -22,6 +22,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -49,6 +50,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -187,6 +189,7 @@ mod go_locator {
tags: vec![],
show_summary: true,
show_command: true,
+ save: SaveStrategy::default(),
};
let scenario = locator
@@ -221,6 +224,7 @@ mod python_locator {
shell: task::Shell::System,
show_summary: false,
show_command: false,
+ save: task::SaveStrategy::default(),
};
let expected_scenario = DebugScenario {
@@ -72,6 +72,9 @@ pub struct TaskTemplate {
/// Whether to show the command line in the task output.
#[serde(default = "default_true")]
pub show_command: bool,
+ /// Which edited buffers to save before running the task.
+ #[serde(default)]
+ pub save: SaveStrategy,
}
#[derive(Deserialize, Eq, PartialEq, Clone, Debug)]
@@ -109,6 +112,19 @@ pub enum HideStrategy {
OnSuccess,
}
+/// Which edited buffers to save before running a task.
+#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum SaveStrategy {
+ #[default]
+ /// Save all edited buffers.
+ All,
+ /// Save the current buffer.
+ Current,
+ /// Don't save any buffers.
+ None,
+}
+
/// A group of Tasks defined in a JSON file.
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
pub struct TaskTemplates(pub Vec<TaskTemplate>);
@@ -271,6 +287,7 @@ impl TaskTemplate {
show_summary: self.show_summary,
show_command: self.show_command,
show_rerun: true,
+ save: self.save,
},
})
}
@@ -1072,7 +1089,6 @@ mod tests {
command,
..TaskTemplate::default()
};
-
assert!(task.unknown_variables().is_empty());
}
}
@@ -6,11 +6,13 @@ use language::Buffer;
use project::{TaskSourceKind, WorktreeId};
use remote::ConnectionState;
use task::{
- DebugScenario, ResolvedTask, SharedTaskContext, SpawnInTerminal, TaskContext, TaskTemplate,
+ DebugScenario, ResolvedTask, SaveStrategy, SharedTaskContext, SpawnInTerminal, TaskContext,
+ TaskTemplate,
};
use ui::Window;
+use util::TryFutureExt;
-use crate::{Toast, Workspace, notifications::NotificationId};
+use crate::{SaveIntent, Toast, Workspace, notifications::NotificationId};
impl Workspace {
pub fn schedule_task(
@@ -73,28 +75,57 @@ impl Workspace {
});
}
- if let Some(terminal_provider) = self.terminal_provider.as_ref() {
- let task_status = terminal_provider.spawn(spawn_in_terminal, window, cx);
-
- let task = cx.spawn(async |w, cx| {
- let res = cx.background_spawn(task_status).await;
- match res {
- Some(Ok(status)) => {
- if status.success() {
- log::debug!("Task spawn succeeded");
- } else {
- log::debug!("Task spawn failed, code: {:?}", status.code());
- }
+ if self.terminal_provider.is_some() {
+ let task = cx.spawn_in(window, async move |workspace, cx| {
+ let save_action = match spawn_in_terminal.save {
+ SaveStrategy::All => {
+ let save_all = workspace.update_in(cx, |workspace, window, cx| {
+ let task = workspace.save_all_internal(SaveIntent::SaveAll, window, cx);
+ // Match the type of the other arm by ignoring the bool value returned
+ cx.background_spawn(async { task.await.map(|_| ()) })
+ });
+ save_all.ok()
}
- Some(Err(e)) => {
- log::error!("Task spawn failed: {e:#}");
- _ = w.update(cx, |w, cx| {
- let id = NotificationId::unique::<ResolvedTask>();
- w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx);
- })
+ SaveStrategy::Current => {
+ let save_current = workspace.update_in(cx, |workspace, window, cx| {
+ workspace.save_active_item(SaveIntent::SaveAll, window, cx)
+ });
+ save_current.ok()
}
- None => log::debug!("Task spawn got cancelled"),
+ SaveStrategy::None => None,
};
+ if let Some(save_action) = save_action {
+ save_action.log_err().await;
+ }
+
+ let spawn_task = workspace.update_in(cx, |workspace, window, cx| {
+ workspace
+ .terminal_provider
+ .as_ref()
+ .map(|terminal_provider| {
+ terminal_provider.spawn(spawn_in_terminal, window, cx)
+ })
+ });
+ if let Some(spawn_task) = spawn_task.ok().flatten() {
+ let res = cx.background_spawn(spawn_task).await;
+ match res {
+ Some(Ok(status)) => {
+ if status.success() {
+ log::debug!("Task spawn succeeded");
+ } else {
+ log::debug!("Task spawn failed, code: {:?}", status.code());
+ }
+ }
+ Some(Err(e)) => {
+ log::error!("Task spawn failed: {e:#}");
+ _ = workspace.update(cx, |w, cx| {
+ let id = NotificationId::unique::<ResolvedTask>();
+ w.show_toast(Toast::new(id, format!("Task spawn failed: {e}")), cx);
+ })
+ }
+ None => log::debug!("Task spawn got cancelled"),
+ };
+ }
});
self.scheduled_tasks.push(task);
}
@@ -134,3 +165,166 @@ impl Workspace {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ TerminalProvider,
+ item::test::{TestItem, TestProjectItem},
+ register_serializable_item,
+ };
+ use gpui::{App, TestAppContext};
+ use parking_lot::Mutex;
+ use project::{FakeFs, Project, TaskSourceKind};
+ use serde_json::json;
+ use std::sync::Arc;
+ use task::TaskTemplate;
+
+ struct Fixture {
+ workspace: Entity<Workspace>,
+ item: Entity<TestItem>,
+ task: ResolvedTask,
+ dirty_before_spawn: Arc<Mutex<Option<bool>>>,
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_all(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::All).await;
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false));
+ assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty));
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_current(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::Current).await;
+ // Add a second inactive dirty item
+ let inactive = add_test_item(&fixture.workspace, "file2.txt", false, cx);
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ // The active item (fixture.item) should be saved
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(false));
+ assert!(cx.read(|cx| !fixture.item.read(cx).is_dirty));
+ // The inactive item should not be saved
+ assert!(cx.read(|cx| inactive.read(cx).is_dirty));
+ }
+
+ #[gpui::test]
+ async fn test_schedule_resolved_task_save_none(cx: &mut TestAppContext) {
+ let (fixture, cx) = create_fixture(cx, SaveStrategy::None).await;
+ fixture.workspace.update_in(cx, |workspace, window, cx| {
+ workspace.schedule_resolved_task(
+ TaskSourceKind::UserInput,
+ fixture.task,
+ false,
+ window,
+ cx,
+ );
+ });
+ cx.executor().run_until_parked();
+
+ assert_eq!(*fixture.dirty_before_spawn.lock(), Some(true));
+ assert!(cx.read(|cx| fixture.item.read(cx).is_dirty));
+ }
+
+ async fn create_fixture(
+ cx: &mut TestAppContext,
+ save_strategy: SaveStrategy,
+ ) -> (Fixture, &mut gpui::VisualTestContext) {
+ cx.update(|cx| {
+ let settings_store = settings::SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ register_serializable_item::<TestItem>(cx);
+ });
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root", json!({ "file.txt": "dirty" }))
+ .await;
+ let project = Project::test(fs.clone(), ["/root".as_ref()], cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+
+ // Add a dirty item to the workspace
+ let item = add_test_item(&workspace, "file.txt", true, cx);
+
+ let template = TaskTemplate {
+ label: "test".to_string(),
+ command: "echo".to_string(),
+ save: save_strategy,
+ ..Default::default()
+ };
+ let task = template
+ .resolve_task("test", &task::TaskContext::default())
+ .unwrap();
+ let dirty_before_spawn: Arc<Mutex<Option<bool>>> = Arc::default();
+ let terminal_provider = Box::new(TestTerminalProvider {
+ item: item.clone(),
+ dirty_before_spawn: dirty_before_spawn.clone(),
+ });
+ workspace.update(cx, |workspace, _| {
+ workspace.terminal_provider = Some(terminal_provider);
+ });
+ let fixture = Fixture {
+ workspace,
+ item,
+ task,
+ dirty_before_spawn,
+ };
+ (fixture, cx)
+ }
+
+ fn add_test_item(
+ workspace: &Entity<Workspace>,
+ name: &str,
+ active: bool,
+ cx: &mut gpui::VisualTestContext,
+ ) -> Entity<TestItem> {
+ let item = cx.new(|cx| {
+ TestItem::new(cx)
+ .with_dirty(true)
+ .with_project_items(&[TestProjectItem::new(1, name, cx)])
+ });
+ workspace.update_in(cx, |workspace, window, cx| {
+ let pane = workspace.active_pane().clone();
+ workspace.add_item(pane, Box::new(item.clone()), None, true, active, window, cx);
+ });
+ item
+ }
+
+ struct TestTerminalProvider {
+ item: Entity<TestItem>,
+ dirty_before_spawn: Arc<Mutex<Option<bool>>>,
+ }
+
+ impl TerminalProvider for TestTerminalProvider {
+ fn spawn(
+ &self,
+ _task: task::SpawnInTerminal,
+ _window: &mut ui::Window,
+ cx: &mut App,
+ ) -> Task<Option<Result<ExitStatus>>> {
+ *self.dirty_before_spawn.lock() = Some(cx.read_entity(&self.item, |e, _| e.is_dirty));
+ Task::ready(Some(Ok(ExitStatus::default())))
+ }
+ }
+}