Cargo.lock 🔗
@@ -10161,6 +10161,7 @@ dependencies = [
name = "tasks_ui"
version = "0.1.0"
dependencies = [
+ "anyhow",
"editor",
"file_icons",
"fuzzy",
Kirill Bulatov and Conrad Irwin created
Release Notes:
- Enabled Zed tasks on remote projects with ssh connection string
specified
---------
Co-authored-by: Conrad Irwin <conrad@zed.dev>
Cargo.lock | 1
crates/collab/src/db/queries/projects.rs | 30 +
crates/collab/src/rpc.rs | 31 +
crates/editor/src/editor.rs | 193 ++++++-----
crates/editor/src/tasks.rs | 89 +----
crates/project/src/project.rs | 405 +++++++++++++++++++++++++
crates/project/src/project_tests.rs | 277 ++++++++--------
crates/project/src/task_inventory.rs | 266 +++++++++-------
crates/rpc/proto/zed.proto | 75 ++++
crates/rpc/src/proto.rs | 10
crates/task/src/lib.rs | 45 ++
crates/tasks_ui/Cargo.toml | 1
crates/tasks_ui/src/lib.rs | 251 +++++++++------
crates/tasks_ui/src/modal.rs | 131 +++++--
crates/workspace/src/tasks.rs | 39 --
crates/zed/src/zed.rs | 6
16 files changed, 1,250 insertions(+), 600 deletions(-)
@@ -10161,6 +10161,7 @@ dependencies = [
name = "tasks_ui"
version = "0.1.0"
dependencies = [
+ "anyhow",
"editor",
"file_icons",
"fuzzy",
@@ -1101,6 +1101,36 @@ impl Database {
.map(|guard| guard.into_inner())
}
+ /// Returns the host connection for a request to join a shared project.
+ pub async fn host_for_owner_project_request(
+ &self,
+ project_id: ProjectId,
+ _connection_id: ConnectionId,
+ user_id: UserId,
+ ) -> Result<ConnectionId> {
+ self.project_transaction(project_id, |tx| async move {
+ let (project, dev_server_project) = project::Entity::find_by_id(project_id)
+ .find_also_related(dev_server_project::Entity)
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("no such project"))?;
+
+ let Some(dev_server_project) = dev_server_project else {
+ return Err(anyhow!("not a dev server project"))?;
+ };
+ let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
+ .one(&*tx)
+ .await?
+ .ok_or_else(|| anyhow!("no such dev server"))?;
+ if dev_server.user_id != user_id {
+ return Err(anyhow!("not your project"))?;
+ }
+ project.host_connection()
+ })
+ .await
+ .map(|guard| guard.into_inner())
+ }
+
pub async fn connections_for_buffer_update(
&self,
project_id: ProjectId,
@@ -446,6 +446,12 @@ impl Server {
.add_message_handler(update_language_server)
.add_message_handler(update_diagnostic_summary)
.add_message_handler(update_worktree_settings)
+ .add_request_handler(user_handler(
+ forward_project_request_for_owner::<proto::TaskContextForLocation>,
+ ))
+ .add_request_handler(user_handler(
+ forward_project_request_for_owner::<proto::TaskTemplates>,
+ ))
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::GetHover>,
))
@@ -2889,6 +2895,31 @@ where
Ok(())
}
+/// forward a project request to the dev server. Only allowed
+/// if it's your dev server.
+async fn forward_project_request_for_owner<T>(
+ request: T,
+ response: Response<T>,
+ session: UserSession,
+) -> Result<()>
+where
+ T: EntityMessage + RequestMessage,
+{
+ let project_id = ProjectId::from_proto(request.remote_entity_id());
+
+ let host_connection_id = session
+ .db()
+ .await
+ .host_for_owner_project_request(project_id, session.connection_id, session.user_id())
+ .await?;
+ let payload = session
+ .peer
+ .forward_request(session.connection_id, host_connection_id, request)
+ .await?;
+ response.send(payload)?;
+ Ok(())
+}
+
/// forward a project request to the host. These requests are disallowed
/// for guests.
async fn forward_mutating_project_request<T>(
@@ -4011,28 +4011,29 @@ impl Editor {
let deployed_from_indicator = action.deployed_from_indicator;
let mut task = self.code_actions_task.take();
let action = action.clone();
- cx.spawn(|this, mut cx| async move {
+ cx.spawn(|editor, mut cx| async move {
while let Some(prev_task) = task {
prev_task.await;
- task = this.update(&mut cx, |this, _| this.code_actions_task.take())?;
+ task = editor.update(&mut cx, |this, _| this.code_actions_task.take())?;
}
- let spawned_test_task = this.update(&mut cx, |this, cx| {
- if this.focus_handle.is_focused(cx) {
+ let spawned_test_task = editor.update(&mut cx, |editor, cx| {
+ if editor.focus_handle.is_focused(cx) {
let multibuffer_point = action
.deployed_from_indicator
.map(|row| DisplayPoint::new(row, 0).to_point(&snapshot))
- .unwrap_or_else(|| this.selections.newest::<Point>(cx).head());
+ .unwrap_or_else(|| editor.selections.newest::<Point>(cx).head());
let (buffer, buffer_row) = snapshot
.buffer_snapshot
.buffer_line_for_row(MultiBufferRow(multibuffer_point.row))
.and_then(|(buffer_snapshot, range)| {
- this.buffer
+ editor
+ .buffer
.read(cx)
.buffer(buffer_snapshot.remote_id())
.map(|buffer| (buffer, range.start.row))
})?;
- let (_, code_actions) = this
+ let (_, code_actions) = editor
.available_code_actions
.clone()
.and_then(|(location, code_actions)| {
@@ -4047,7 +4048,7 @@ impl Editor {
})
.unzip();
let buffer_id = buffer.read(cx).remote_id();
- let tasks = this
+ let tasks = editor
.tasks
.get(&(buffer_id, buffer_row))
.map(|t| Arc::new(t.to_owned()));
@@ -4055,81 +4056,100 @@ impl Editor {
return None;
}
- this.completion_tasks.clear();
- this.discard_inline_completion(false, cx);
- let tasks = tasks.as_ref().zip(this.workspace.clone()).and_then(
- |(tasks, (workspace, _))| {
- let position = Point::new(buffer_row, tasks.column);
- let range_start = buffer.read(cx).anchor_at(position, Bias::Right);
- let location = Location {
- buffer: buffer.clone(),
- range: range_start..range_start,
- };
- // Fill in the environmental variables from the tree-sitter captures
- let mut captured_task_variables = TaskVariables::default();
- for (capture_name, value) in tasks.extra_variables.clone() {
- captured_task_variables.insert(
- task::VariableName::Custom(capture_name.into()),
- value.clone(),
- );
- }
-
- workspace
- .update(cx, |workspace, cx| {
- tasks::task_context_for_location(
+ editor.completion_tasks.clear();
+ editor.discard_inline_completion(false, cx);
+ let task_context =
+ tasks
+ .as_ref()
+ .zip(editor.project.clone())
+ .map(|(tasks, project)| {
+ let position = Point::new(buffer_row, tasks.column);
+ let range_start = buffer.read(cx).anchor_at(position, Bias::Right);
+ let location = Location {
+ buffer: buffer.clone(),
+ range: range_start..range_start,
+ };
+ // Fill in the environmental variables from the tree-sitter captures
+ let mut captured_task_variables = TaskVariables::default();
+ for (capture_name, value) in tasks.extra_variables.clone() {
+ captured_task_variables.insert(
+ task::VariableName::Custom(capture_name.into()),
+ value.clone(),
+ );
+ }
+ project.update(cx, |project, cx| {
+ project.task_context_for_location(
captured_task_variables,
- workspace,
location,
cx,
)
})
- .ok()
- .flatten()
- .map(|task_context| {
- Arc::new(ResolvedTasks {
- templates: tasks
- .templates
- .iter()
- .filter_map(|(kind, template)| {
- template
- .resolve_task(&kind.to_id_base(), &task_context)
- .map(|task| (kind.clone(), task))
- })
- .collect(),
- position: snapshot.buffer_snapshot.anchor_before(
- Point::new(multibuffer_point.row, tasks.column),
- ),
- })
+ });
+
+ Some(cx.spawn(|editor, mut cx| async move {
+ let task_context = match task_context {
+ Some(task_context) => task_context.await,
+ None => None,
+ };
+ let resolved_tasks =
+ tasks.zip(task_context).map(|(tasks, task_context)| {
+ Arc::new(ResolvedTasks {
+ templates: tasks
+ .templates
+ .iter()
+ .filter_map(|(kind, template)| {
+ template
+ .resolve_task(&kind.to_id_base(), &task_context)
+ .map(|task| (kind.clone(), task))
+ })
+ .collect(),
+ position: snapshot.buffer_snapshot.anchor_before(Point::new(
+ multibuffer_point.row,
+ tasks.column,
+ )),
})
- },
- );
- let spawn_straight_away = tasks
- .as_ref()
- .map_or(false, |tasks| tasks.templates.len() == 1)
- && code_actions
+ });
+ let spawn_straight_away = resolved_tasks
.as_ref()
- .map_or(true, |actions| actions.is_empty());
- *this.context_menu.write() = Some(ContextMenu::CodeActions(CodeActionsMenu {
- buffer,
- actions: CodeActionContents {
- tasks,
- actions: code_actions,
- },
- selected_item: Default::default(),
- scroll_handle: UniformListScrollHandle::default(),
- deployed_from_indicator,
- }));
- if spawn_straight_away {
- if let Some(task) =
- this.confirm_code_action(&ConfirmCodeAction { item_ix: Some(0) }, cx)
+ .map_or(false, |tasks| tasks.templates.len() == 1)
+ && code_actions
+ .as_ref()
+ .map_or(true, |actions| actions.is_empty());
+ if let Some(task) = editor
+ .update(&mut cx, |editor, cx| {
+ *editor.context_menu.write() =
+ Some(ContextMenu::CodeActions(CodeActionsMenu {
+ buffer,
+ actions: CodeActionContents {
+ tasks: resolved_tasks,
+ actions: code_actions,
+ },
+ selected_item: Default::default(),
+ scroll_handle: UniformListScrollHandle::default(),
+ deployed_from_indicator,
+ }));
+ if spawn_straight_away {
+ if let Some(task) = editor.confirm_code_action(
+ &ConfirmCodeAction { item_ix: Some(0) },
+ cx,
+ ) {
+ cx.notify();
+ return task;
+ }
+ }
+ cx.notify();
+ Task::ready(Ok(()))
+ })
+ .ok()
{
- cx.notify();
- return Some(task);
+ task.await
+ } else {
+ Ok(())
}
- }
- cx.notify();
+ }))
+ } else {
+ Some(Task::ready(Ok(())))
}
- Some(Task::ready(Ok(())))
})?;
if let Some(task) = spawned_test_task {
task.await?;
@@ -7897,11 +7917,14 @@ impl Editor {
let Some(project) = project else {
return;
};
- if project
- .update(&mut cx, |this, _| this.is_remote())
- .unwrap_or(true)
- {
- // Do not display any test indicators in remote projects.
+
+ let hide_runnables = project
+ .update(&mut cx, |project, cx| {
+ // Do not display any test indicators in non-dev server remote projects.
+ project.is_remote() && project.ssh_connection_string(cx).is_none()
+ })
+ .unwrap_or(true);
+ if hide_runnables {
return;
}
let new_rows =
@@ -7940,10 +7963,8 @@ impl Editor {
runnable_ranges
.into_iter()
.filter_map(|mut runnable| {
- let (tasks, _) = cx
- .update(|cx| {
- Self::resolve_runnable(project.clone(), &mut runnable.runnable, cx)
- })
+ let tasks = cx
+ .update(|cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx))
.ok()?;
if tasks.is_empty() {
return None;
@@ -7974,11 +7995,11 @@ impl Editor {
.collect()
}
- fn resolve_runnable(
- project: Model<Project>,
+ fn templates_with_tags(
+ project: &Model<Project>,
runnable: &mut Runnable,
cx: &WindowContext<'_>,
- ) -> (Vec<(TaskSourceKind, TaskTemplate)>, Option<WorktreeId>) {
+ ) -> Vec<(TaskSourceKind, TaskTemplate)> {
let (inventory, worktree_id) = project.read_with(cx, |project, cx| {
let worktree_id = project
.buffer_for_id(runnable.buffer)
@@ -8015,7 +8036,7 @@ impl Editor {
}
}
- (tags, worktree_id)
+ tags
}
pub fn move_to_enclosing_bracket(
@@ -1,58 +1,34 @@
use crate::Editor;
-use anyhow::Context;
-use gpui::{Model, WindowContext};
-use language::ContextProvider;
-use project::{BasicContextProvider, Location, Project};
+use gpui::{Task as AsyncTask, WindowContext};
+use project::Location;
use task::{TaskContext, TaskVariables, VariableName};
use text::{Point, ToOffset, ToPoint};
-use util::ResultExt;
use workspace::Workspace;
-pub(crate) fn task_context_for_location(
- captured_variables: TaskVariables,
- workspace: &Workspace,
- location: Location,
- cx: &mut WindowContext<'_>,
-) -> Option<TaskContext> {
- let cwd = workspace::tasks::task_cwd(workspace, cx)
- .log_err()
- .flatten();
-
- let mut task_variables = combine_task_variables(
- captured_variables,
- location,
- workspace.project().clone(),
- cx,
- )
- .log_err()?;
- // Remove all custom entries starting with _, as they're not intended for use by the end user.
- task_variables.sweep();
-
- Some(TaskContext {
- cwd,
- task_variables,
- })
-}
-
fn task_context_with_editor(
- workspace: &Workspace,
editor: &mut Editor,
cx: &mut WindowContext<'_>,
-) -> Option<TaskContext> {
+) -> AsyncTask<Option<TaskContext>> {
+ let Some(project) = editor.project.clone() else {
+ return AsyncTask::ready(None);
+ };
let (selection, buffer, editor_snapshot) = {
let mut selection = editor.selections.newest::<Point>(cx);
if editor.selections.line_mode {
selection.start = Point::new(selection.start.row, 0);
selection.end = Point::new(selection.end.row + 1, 0);
}
- let (buffer, _, _) = editor
+ let Some((buffer, _, _)) = editor
.buffer()
.read(cx)
- .point_to_buffer_offset(selection.start, cx)?;
+ .point_to_buffer_offset(selection.start, cx)
+ else {
+ return AsyncTask::ready(None);
+ };
let snapshot = editor.snapshot(cx);
- Some((selection, buffer, snapshot))
- }?;
+ (selection, buffer, snapshot)
+ };
let selection_range = selection.range();
let start = editor_snapshot
.display_snapshot
@@ -94,42 +70,23 @@ fn task_context_with_editor(
}
variables
};
- task_context_for_location(captured_variables, workspace, location.clone(), cx)
+
+ let context_task = project.update(cx, |project, cx| {
+ project.task_context_for_location(captured_variables, location.clone(), cx)
+ });
+ cx.spawn(|_| context_task)
}
-pub fn task_context(workspace: &Workspace, cx: &mut WindowContext<'_>) -> TaskContext {
+pub fn task_context(workspace: &Workspace, cx: &mut WindowContext<'_>) -> AsyncTask<TaskContext> {
let Some(editor) = workspace
.active_item(cx)
.and_then(|item| item.act_as::<Editor>(cx))
else {
- return Default::default();
+ return AsyncTask::ready(TaskContext::default());
};
editor.update(cx, |editor, cx| {
- task_context_with_editor(workspace, editor, cx).unwrap_or_default()
+ let context_task = task_context_with_editor(editor, cx);
+ cx.background_executor()
+ .spawn(async move { context_task.await.unwrap_or_default() })
})
}
-
-fn combine_task_variables(
- mut captured_variables: TaskVariables,
- location: Location,
- project: Model<Project>,
- cx: &mut WindowContext<'_>,
-) -> anyhow::Result<TaskVariables> {
- let language_context_provider = location
- .buffer
- .read(cx)
- .language()
- .and_then(|language| language.context_provider());
- let baseline = BasicContextProvider::new(project)
- .build_context(&captured_variables, &location, cx)
- .context("building basic default context")?;
- captured_variables.extend(baseline);
- if let Some(provider) = language_context_provider {
- captured_variables.extend(
- provider
- .build_context(&captured_variables, &location, cx)
- .context("building provider context ")?,
- );
- }
- Ok(captured_variables)
-}
@@ -36,7 +36,7 @@ use git::{blame::Blame, repository::GitRepository};
use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui::{
AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, BorrowAppContext, Context, Entity,
- EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel,
+ EventEmitter, Model, ModelContext, PromptLevel, SharedString, Task, WeakModel,
};
use itertools::Itertools;
use language::{
@@ -47,10 +47,10 @@ use language::{
serialize_version, split_operations,
},
range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel,
- Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation, Event as BufferEvent,
- File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
- Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
- ToPointUtf16, Transaction, Unclipped,
+ ContextProvider, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
+ Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
+ LspAdapterDelegate, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
+ ToOffset, ToPointUtf16, Transaction, Unclipped,
};
use log::error;
use lsp::{
@@ -80,6 +80,7 @@ use similar::{ChangeTag, TextDiff};
use smol::channel::{Receiver, Sender};
use smol::lock::Semaphore;
use std::{
+ borrow::Cow,
cmp::{self, Ordering},
convert::TryInto,
env,
@@ -97,7 +98,10 @@ use std::{
},
time::{Duration, Instant},
};
-use task::static_source::{StaticSource, TrackedFile};
+use task::{
+ static_source::{StaticSource, TrackedFile},
+ RevealStrategy, TaskContext, TaskTemplate, TaskVariables, VariableName,
+};
use terminals::Terminals;
use text::{Anchor, BufferId, LineEnding};
use util::{
@@ -676,6 +680,8 @@ impl Project {
client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
client.add_model_request_handler(Self::handle_blame_buffer);
client.add_model_request_handler(Self::handle_multi_lsp_query);
+ client.add_model_request_handler(Self::handle_task_context_for_location);
+ client.add_model_request_handler(Self::handle_task_templates);
}
pub fn local(
@@ -1257,6 +1263,19 @@ impl Project {
self.dev_server_project_id
}
+ pub fn ssh_connection_string(&self, cx: &ModelContext<Self>) -> Option<SharedString> {
+ if self.is_local() {
+ return None;
+ }
+
+ let dev_server_id = self.dev_server_project_id()?;
+ dev_server_projects::Store::global(cx)
+ .read(cx)
+ .dev_server_for_project(dev_server_id)?
+ .ssh_connection_string
+ .clone()
+ }
+
pub fn replica_id(&self) -> ReplicaId {
match self.client_state {
ProjectClientState::Remote { replica_id, .. } => replica_id,
@@ -7892,7 +7911,7 @@ impl Project {
TaskSourceKind::Worktree {
id: remote_worktree_id,
abs_path,
- id_base: "local_tasks_for_worktree",
+ id_base: "local_tasks_for_worktree".into(),
},
|tx, cx| StaticSource::new(TrackedFile::new(tasks_file_rx, tx, cx)),
cx,
@@ -7912,7 +7931,7 @@ impl Project {
TaskSourceKind::Worktree {
id: remote_worktree_id,
abs_path,
- id_base: "local_vscode_tasks_for_worktree",
+ id_base: "local_vscode_tasks_for_worktree".into(),
},
|tx, cx| {
StaticSource::new(TrackedFile::new_convertible::<
@@ -9424,6 +9443,122 @@ impl Project {
})
}
+ async fn handle_task_context_for_location(
+ project: Model<Self>,
+ envelope: TypedEnvelope<proto::TaskContextForLocation>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::TaskContext> {
+ let location = envelope
+ .payload
+ .location
+ .context("no location given for task context handling")?;
+ let location = cx
+ .update(|cx| deserialize_location(&project, location, cx))?
+ .await?;
+ let context_task = project.update(&mut cx, |project, cx| {
+ let captured_variables = {
+ let mut variables = TaskVariables::default();
+ for range in location
+ .buffer
+ .read(cx)
+ .snapshot()
+ .runnable_ranges(location.range.clone())
+ {
+ for (capture_name, value) in range.extra_captures {
+ variables.insert(VariableName::Custom(capture_name.into()), value);
+ }
+ }
+ variables
+ };
+ project.task_context_for_location(captured_variables, location, cx)
+ })?;
+ let task_context = context_task.await.unwrap_or_default();
+ Ok(proto::TaskContext {
+ cwd: task_context
+ .cwd
+ .map(|cwd| cwd.to_string_lossy().to_string()),
+ task_variables: task_context
+ .task_variables
+ .into_iter()
+ .map(|(variable_name, variable_value)| (variable_name.to_string(), variable_value))
+ .collect(),
+ })
+ }
+
+ async fn handle_task_templates(
+ project: Model<Self>,
+ envelope: TypedEnvelope<proto::TaskTemplates>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::TaskTemplatesResponse> {
+ let worktree = envelope.payload.worktree_id.map(WorktreeId::from_proto);
+ let location = match envelope.payload.location {
+ Some(location) => Some(
+ cx.update(|cx| deserialize_location(&project, location, cx))?
+ .await
+ .context("task templates request location deserializing")?,
+ ),
+ None => None,
+ };
+
+ let templates = project
+ .update(&mut cx, |project, cx| {
+ project.task_templates(worktree, location, cx)
+ })?
+ .await
+ .context("receiving task templates")?
+ .into_iter()
+ .map(|(kind, template)| {
+ let kind = Some(match kind {
+ TaskSourceKind::UserInput => proto::task_source_kind::Kind::UserInput(
+ proto::task_source_kind::UserInput {},
+ ),
+ TaskSourceKind::Worktree {
+ id,
+ abs_path,
+ id_base,
+ } => {
+ proto::task_source_kind::Kind::Worktree(proto::task_source_kind::Worktree {
+ id: id.to_proto(),
+ abs_path: abs_path.to_string_lossy().to_string(),
+ id_base: id_base.to_string(),
+ })
+ }
+ TaskSourceKind::AbsPath { id_base, abs_path } => {
+ proto::task_source_kind::Kind::AbsPath(proto::task_source_kind::AbsPath {
+ abs_path: abs_path.to_string_lossy().to_string(),
+ id_base: id_base.to_string(),
+ })
+ }
+ TaskSourceKind::Language { name } => {
+ proto::task_source_kind::Kind::Language(proto::task_source_kind::Language {
+ name: name.to_string(),
+ })
+ }
+ });
+ let kind = Some(proto::TaskSourceKind { kind });
+ let template = Some(proto::TaskTemplate {
+ label: template.label,
+ command: template.command,
+ args: template.args,
+ env: template.env.into_iter().collect(),
+ cwd: template.cwd,
+ use_new_terminal: template.use_new_terminal,
+ allow_concurrent_runs: template.allow_concurrent_runs,
+ reveal: match template.reveal {
+ RevealStrategy::Always => proto::RevealStrategy::Always as i32,
+ RevealStrategy::Never => proto::RevealStrategy::Never as i32,
+ },
+ tags: template.tags,
+ });
+ proto::TemplatePair { kind, template }
+ })
+ .collect();
+
+ Ok(proto::TaskTemplatesResponse { templates })
+ }
+
async fn try_resolve_code_action(
lang_server: &LanguageServer,
action: &mut CodeAction,
@@ -10410,6 +10545,223 @@ impl Project {
Vec::new()
}
}
+
+ pub fn task_context_for_location(
+ &self,
+ captured_variables: TaskVariables,
+ location: Location,
+ cx: &mut ModelContext<'_, Project>,
+ ) -> Task<Option<TaskContext>> {
+ if self.is_local() {
+ let cwd = self.task_cwd(cx).log_err().flatten();
+
+ cx.spawn(|project, cx| async move {
+ let mut task_variables = cx
+ .update(|cx| {
+ combine_task_variables(
+ captured_variables,
+ location,
+ BasicContextProvider::new(project.upgrade()?),
+ cx,
+ )
+ .log_err()
+ })
+ .ok()
+ .flatten()?;
+ // Remove all custom entries starting with _, as they're not intended for use by the end user.
+ task_variables.sweep();
+ Some(TaskContext {
+ cwd,
+ task_variables,
+ })
+ })
+ } else if let Some(project_id) = self
+ .remote_id()
+ .filter(|_| self.ssh_connection_string(cx).is_some())
+ {
+ let task_context = self.client().request(proto::TaskContextForLocation {
+ project_id,
+ location: Some(proto::Location {
+ buffer_id: location.buffer.read(cx).remote_id().into(),
+ start: Some(serialize_anchor(&location.range.start)),
+ end: Some(serialize_anchor(&location.range.end)),
+ }),
+ });
+ cx.background_executor().spawn(async move {
+ let task_context = task_context.await.log_err()?;
+ Some(TaskContext {
+ cwd: task_context.cwd.map(PathBuf::from),
+ task_variables: task_context
+ .task_variables
+ .into_iter()
+ .filter_map(
+ |(variable_name, variable_value)| match variable_name.parse() {
+ Ok(variable_name) => Some((variable_name, variable_value)),
+ Err(()) => {
+ log::error!("Unknown variable name: {variable_name}");
+ None
+ }
+ },
+ )
+ .collect(),
+ })
+ })
+ } else {
+ Task::ready(None)
+ }
+ }
+
+ pub fn task_templates(
+ &self,
+ worktree: Option<WorktreeId>,
+ location: Option<Location>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<(TaskSourceKind, TaskTemplate)>>> {
+ if self.is_local() {
+ let language = location
+ .and_then(|location| location.buffer.read(cx).language_at(location.range.start));
+ Task::ready(Ok(self
+ .task_inventory()
+ .read(cx)
+ .list_tasks(language, worktree)))
+ } else if let Some(project_id) = self
+ .remote_id()
+ .filter(|_| self.ssh_connection_string(cx).is_some())
+ {
+ let remote_templates =
+ self.query_remote_task_templates(project_id, worktree, location.as_ref(), cx);
+ cx.background_executor().spawn(remote_templates)
+ } else {
+ Task::ready(Ok(Vec::new()))
+ }
+ }
+
+ pub fn query_remote_task_templates(
+ &self,
+ project_id: u64,
+ worktree: Option<WorktreeId>,
+ location: Option<&Location>,
+ cx: &AppContext,
+ ) -> Task<Result<Vec<(TaskSourceKind, TaskTemplate)>>> {
+ let client = self.client();
+ let location = location.map(|location| serialize_location(location, cx));
+ cx.spawn(|_| async move {
+ let response = client
+ .request(proto::TaskTemplates {
+ project_id,
+ worktree_id: worktree.map(|id| id.to_proto()),
+ location,
+ })
+ .await?;
+
+ Ok(response
+ .templates
+ .into_iter()
+ .filter_map(|template_pair| {
+ let task_source_kind = match template_pair.kind?.kind? {
+ proto::task_source_kind::Kind::UserInput(_) => TaskSourceKind::UserInput,
+ proto::task_source_kind::Kind::Worktree(worktree) => {
+ TaskSourceKind::Worktree {
+ id: WorktreeId::from_proto(worktree.id),
+ abs_path: PathBuf::from(worktree.abs_path),
+ id_base: Cow::Owned(worktree.id_base),
+ }
+ }
+ proto::task_source_kind::Kind::AbsPath(abs_path) => {
+ TaskSourceKind::AbsPath {
+ id_base: Cow::Owned(abs_path.id_base),
+ abs_path: PathBuf::from(abs_path.abs_path),
+ }
+ }
+ proto::task_source_kind::Kind::Language(language) => {
+ TaskSourceKind::Language {
+ name: language.name.into(),
+ }
+ }
+ };
+
+ let proto_template = template_pair.template?;
+ let reveal = match proto::RevealStrategy::from_i32(proto_template.reveal)
+ .unwrap_or(proto::RevealStrategy::Always)
+ {
+ proto::RevealStrategy::Always => RevealStrategy::Always,
+ proto::RevealStrategy::Never => RevealStrategy::Never,
+ };
+ let task_template = TaskTemplate {
+ label: proto_template.label,
+ command: proto_template.command,
+ args: proto_template.args,
+ env: proto_template.env.into_iter().collect(),
+ cwd: proto_template.cwd,
+ use_new_terminal: proto_template.use_new_terminal,
+ allow_concurrent_runs: proto_template.allow_concurrent_runs,
+ reveal,
+ tags: proto_template.tags,
+ };
+ Some((task_source_kind, task_template))
+ })
+ .collect())
+ })
+ }
+
+ fn task_cwd(&self, cx: &AppContext) -> anyhow::Result<Option<PathBuf>> {
+ let available_worktrees = self
+ .worktrees()
+ .filter(|worktree| {
+ let worktree = worktree.read(cx);
+ worktree.is_visible()
+ && worktree.is_local()
+ && worktree.root_entry().map_or(false, |e| e.is_dir())
+ })
+ .collect::<Vec<_>>();
+ let cwd = match available_worktrees.len() {
+ 0 => None,
+ 1 => Some(available_worktrees[0].read(cx).abs_path()),
+ _ => {
+ let cwd_for_active_entry = self.active_entry().and_then(|entry_id| {
+ available_worktrees.into_iter().find_map(|worktree| {
+ let worktree = worktree.read(cx);
+ if worktree.contains_entry(entry_id) {
+ Some(worktree.abs_path())
+ } else {
+ None
+ }
+ })
+ });
+ anyhow::ensure!(
+ cwd_for_active_entry.is_some(),
+ "Cannot determine task cwd for multiple worktrees"
+ );
+ cwd_for_active_entry
+ }
+ };
+ Ok(cwd.map(|path| path.to_path_buf()))
+ }
+}
+
+fn combine_task_variables(
+ mut captured_variables: TaskVariables,
+ location: Location,
+ baseline: BasicContextProvider,
+ cx: &mut AppContext,
+) -> anyhow::Result<TaskVariables> {
+ let language_context_provider = location
+ .buffer
+ .read(cx)
+ .language()
+ .and_then(|language| language.context_provider());
+ let baseline = baseline
+ .build_context(&captured_variables, &location, cx)
+ .context("building basic default context")?;
+ captured_variables.extend(baseline);
+ if let Some(provider) = language_context_provider {
+ captured_variables.extend(
+ provider
+ .build_context(&captured_variables, &location, cx)
+ .context("building provider context")?,
+ );
+ }
+ Ok(captured_variables)
}
async fn populate_labels_for_symbols(
@@ -11238,3 +11590,40 @@ impl std::fmt::Display for NoRepositoryError {
}
impl std::error::Error for NoRepositoryError {}
+
+fn serialize_location(location: &Location, cx: &AppContext) -> proto::Location {
+ proto::Location {
+ buffer_id: location.buffer.read(cx).remote_id().into(),
+ start: Some(serialize_anchor(&location.range.start)),
+ end: Some(serialize_anchor(&location.range.end)),
+ }
+}
+
+fn deserialize_location(
+ project: &Model<Project>,
+ location: proto::Location,
+ cx: &mut AppContext,
+) -> Task<Result<Location>> {
+ let buffer_id = match BufferId::new(location.buffer_id) {
+ Ok(id) => id,
+ Err(e) => return Task::ready(Err(e)),
+ };
+ let buffer_task = project.update(cx, |project, cx| {
+ project.wait_for_remote_buffer(buffer_id, cx)
+ });
+ cx.spawn(|_| async move {
+ let buffer = buffer_task.await?;
+ let start = location
+ .start
+ .and_then(deserialize_anchor)
+ .context("missing task context location start")?;
+ let end = location
+ .end
+ .and_then(deserialize_anchor)
+ .context("missing task context location end")?;
+ Ok(Location {
+ buffer,
+ range: start..end,
+ })
+ })
+}
@@ -14,7 +14,7 @@ use serde_json::json;
#[cfg(not(windows))]
use std::os;
use std::task::Poll;
-use task::{TaskContext, TaskTemplate, TaskTemplates};
+use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
use unindent::Unindent as _;
use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
use worktree::WorktreeModelHandle as _;
@@ -129,101 +129,91 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
let task_context = TaskContext::default();
cx.executor().run_until_parked();
- let workree_id = cx.update(|cx| {
+ let worktree_id = cx.update(|cx| {
project.update(cx, |project, cx| {
project.worktrees().next().unwrap().read(cx).id()
})
});
let global_task_source_kind = TaskSourceKind::Worktree {
- id: workree_id,
+ id: worktree_id,
abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
- id_base: "local_tasks_for_worktree",
+ id_base: "local_tasks_for_worktree".into(),
};
- cx.update(|cx| {
- let tree = worktree.read(cx);
- let settings_a = language_settings(
- None,
- Some(
- &(File::for_entry(
- tree.entry_for_path("a/a.rs").unwrap().clone(),
- worktree.clone(),
- ) as _),
- ),
- cx,
- );
- let settings_b = language_settings(
- None,
- Some(
- &(File::for_entry(
- tree.entry_for_path("b/b.rs").unwrap().clone(),
- worktree.clone(),
- ) as _),
- ),
- cx,
- );
+ let all_tasks = cx
+ .update(|cx| {
+ let tree = worktree.read(cx);
- assert_eq!(settings_a.tab_size.get(), 8);
- assert_eq!(settings_b.tab_size.get(), 2);
-
- let all_tasks = project
- .update(cx, |project, cx| {
- project.task_inventory().update(cx, |inventory, _| {
- let (mut old, new) = inventory.used_and_current_resolved_tasks(
- None,
- Some(workree_id),
- &task_context,
- );
- old.extend(new);
- old
- })
- })
- .into_iter()
- .map(|(source_kind, task)| {
- let resolved = task.resolved.unwrap();
- (
- source_kind,
- task.resolved_label,
- resolved.args,
- resolved.env,
- )
- })
- .collect::<Vec<_>>();
- assert_eq!(
- all_tasks,
- vec![
- (
- global_task_source_kind.clone(),
- "cargo check".to_string(),
- vec!["check".to_string(), "--all".to_string()],
- HashMap::default(),
+ let settings_a = language_settings(
+ None,
+ Some(
+ &(File::for_entry(
+ tree.entry_for_path("a/a.rs").unwrap().clone(),
+ worktree.clone(),
+ ) as _),
),
- (
- TaskSourceKind::Worktree {
- id: workree_id,
- abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
- id_base: "local_tasks_for_worktree",
- },
- "cargo check".to_string(),
- vec!["check".to_string()],
- HashMap::default(),
+ cx,
+ );
+ let settings_b = language_settings(
+ None,
+ Some(
+ &(File::for_entry(
+ tree.entry_for_path("b/b.rs").unwrap().clone(),
+ worktree.clone(),
+ ) as _),
),
- ]
- );
- });
+ cx,
+ );
+
+ assert_eq!(settings_a.tab_size.get(), 8);
+ assert_eq!(settings_b.tab_size.get(), 2);
+
+ get_all_tasks(&project, Some(worktree_id), &task_context, cx)
+ })
+ .await
+ .into_iter()
+ .map(|(source_kind, task)| {
+ let resolved = task.resolved.unwrap();
+ (
+ source_kind,
+ task.resolved_label,
+ resolved.args,
+ resolved.env,
+ )
+ })
+ .collect::<Vec<_>>();
+ assert_eq!(
+ all_tasks,
+ vec![
+ (
+ global_task_source_kind.clone(),
+ "cargo check".to_string(),
+ vec!["check".to_string(), "--all".to_string()],
+ HashMap::default(),
+ ),
+ (
+ TaskSourceKind::Worktree {
+ id: worktree_id,
+ abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
+ id_base: "local_tasks_for_worktree".into(),
+ },
+ "cargo check".to_string(),
+ vec!["check".to_string()],
+ HashMap::default(),
+ ),
+ ]
+ );
+ let (_, resolved_task) = cx
+ .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
+ .await
+ .into_iter()
+ .find(|(source_kind, _)| source_kind == &global_task_source_kind)
+ .expect("should have one global task");
project.update(cx, |project, cx| {
- let inventory = project.task_inventory();
- inventory.update(cx, |inventory, _| {
- let (mut old, new) =
- inventory.used_and_current_resolved_tasks(None, Some(workree_id), &task_context);
- old.extend(new);
- let (_, resolved_task) = old
- .into_iter()
- .find(|(source_kind, _)| source_kind == &global_task_source_kind)
- .expect("should have one global task");
+ project.task_inventory().update(cx, |inventory, _| {
inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
- })
+ });
});
let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
@@ -257,63 +247,52 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
tx.unbounded_send(tasks).unwrap();
cx.run_until_parked();
- cx.update(|cx| {
- let all_tasks = project
- .update(cx, |project, cx| {
- project.task_inventory().update(cx, |inventory, _| {
- let (mut old, new) = inventory.used_and_current_resolved_tasks(
- None,
- Some(workree_id),
- &task_context,
- );
- old.extend(new);
- old
- })
- })
- .into_iter()
- .map(|(source_kind, task)| {
- let resolved = task.resolved.unwrap();
- (
- source_kind,
- task.resolved_label,
- resolved.args,
- resolved.env,
- )
- })
- .collect::<Vec<_>>();
- assert_eq!(
- all_tasks,
- vec![
- (
- TaskSourceKind::Worktree {
- id: workree_id,
- abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
- id_base: "local_tasks_for_worktree",
- },
- "cargo check".to_string(),
- vec![
- "check".to_string(),
- "--all".to_string(),
- "--all-targets".to_string()
- ],
- HashMap::from_iter(Some((
- "RUSTFLAGS".to_string(),
- "-Zunstable-options".to_string()
- ))),
- ),
- (
- TaskSourceKind::Worktree {
- id: workree_id,
- abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
- id_base: "local_tasks_for_worktree",
- },
- "cargo check".to_string(),
- vec!["check".to_string()],
- HashMap::default(),
- ),
- ]
- );
- });
+ let all_tasks = cx
+ .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
+ .await
+ .into_iter()
+ .map(|(source_kind, task)| {
+ let resolved = task.resolved.unwrap();
+ (
+ source_kind,
+ task.resolved_label,
+ resolved.args,
+ resolved.env,
+ )
+ })
+ .collect::<Vec<_>>();
+ assert_eq!(
+ all_tasks,
+ vec![
+ (
+ TaskSourceKind::Worktree {
+ id: worktree_id,
+ abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
+ id_base: "local_tasks_for_worktree".into(),
+ },
+ "cargo check".to_string(),
+ vec![
+ "check".to_string(),
+ "--all".to_string(),
+ "--all-targets".to_string()
+ ],
+ HashMap::from_iter(Some((
+ "RUSTFLAGS".to_string(),
+ "-Zunstable-options".to_string()
+ ))),
+ ),
+ (
+ TaskSourceKind::Worktree {
+ id: worktree_id,
+ abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
+ id_base: "local_tasks_for_worktree".into(),
+ },
+ "cargo check".to_string(),
+ vec!["check".to_string()],
+ HashMap::default(),
+ ),
+ ]
+ );
}
#[gpui::test]
@@ -5225,3 +5204,23 @@ fn tsx_lang() -> Arc<Language> {
Some(tree_sitter_typescript::language_tsx()),
))
}
+
+fn get_all_tasks(
+ project: &Model<Project>,
+ worktree_id: Option<WorktreeId>,
+ task_context: &TaskContext,
+ cx: &mut AppContext,
+) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
+ let resolved_tasks = project.update(cx, |project, cx| {
+ project
+ .task_inventory()
+ .read(cx)
+ .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
+ });
+
+ cx.spawn(|_| async move {
+ let (mut old, new) = resolved_tasks.await;
+ old.extend(new);
+ old
+ })
+}
@@ -1,6 +1,7 @@
//! Project-wide storage of the tasks available, capable of updating itself from the sources set.
use std::{
+ borrow::Cow,
cmp::{self, Reverse},
path::{Path, PathBuf},
sync::Arc,
@@ -20,7 +21,7 @@ use task::{
TaskVariables, VariableName,
};
use text::{Point, ToPoint};
-use util::{post_inc, NumericPrefixWithSuffix};
+use util::{post_inc, NumericPrefixWithSuffix, ResultExt};
use worktree::WorktreeId;
use crate::Project;
@@ -47,11 +48,11 @@ pub enum TaskSourceKind {
Worktree {
id: WorktreeId,
abs_path: PathBuf,
- id_base: &'static str,
+ id_base: Cow<'static, str>,
},
/// ~/.config/zed/task.json - like global files with task definitions, applicable to any path
AbsPath {
- id_base: &'static str,
+ id_base: Cow<'static, str>,
abs_path: PathBuf,
},
/// Languages-specific tasks coming from extensions.
@@ -191,13 +192,18 @@ impl Inventory {
/// Deduplicates the tasks by their labels and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
pub fn used_and_current_resolved_tasks(
&self,
- language: Option<Arc<Language>>,
+ remote_templates_task: Option<Task<Result<Vec<(TaskSourceKind, TaskTemplate)>>>>,
worktree: Option<WorktreeId>,
+ location: Option<Location>,
task_context: &TaskContext,
- ) -> (
+ cx: &AppContext,
+ ) -> Task<(
Vec<(TaskSourceKind, ResolvedTask)>,
Vec<(TaskSourceKind, ResolvedTask)>,
- ) {
+ )> {
+ let language = location
+ .as_ref()
+ .and_then(|location| location.buffer.read(cx).language_at(location.range.start));
let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language {
name: language.name(),
});
@@ -229,7 +235,7 @@ impl Inventory {
},
);
let not_used_score = post_inc(&mut lru_score);
- let currently_resolved_tasks = self
+ let mut currently_resolved_tasks = self
.sources
.iter()
.filter(|source| {
@@ -244,7 +250,7 @@ impl Inventory {
.into_iter()
.map(|task| (&source.kind, task))
})
- .chain(language_tasks)
+ .chain(language_tasks.filter(|_| remote_templates_task.is_none()))
.filter_map(|(kind, task)| {
let id_base = kind.to_id_base();
Some((kind, task.resolve_task(&id_base, task_context)?))
@@ -259,66 +265,87 @@ impl Inventory {
.collect::<Vec<_>>();
let previously_spawned_tasks = task_usage
.into_iter()
- .map(|(_, (kind, task, lru_score))| (kind.clone(), task.clone(), lru_score));
-
- let mut tasks_by_label = BTreeMap::default();
- tasks_by_label = previously_spawned_tasks.into_iter().fold(
- tasks_by_label,
- |mut tasks_by_label, (source, task, lru_score)| {
- match tasks_by_label.entry((source, task.resolved_label.clone())) {
- btree_map::Entry::Occupied(mut o) => {
- let (_, previous_lru_score) = o.get();
- if previous_lru_score >= &lru_score {
- o.insert((task, lru_score));
+ .map(|(_, (kind, task, lru_score))| (kind.clone(), task.clone(), lru_score))
+ .collect::<Vec<_>>();
+
+ let task_context = task_context.clone();
+ cx.spawn(move |_| async move {
+ let remote_templates = match remote_templates_task {
+ Some(task) => match task.await.log_err() {
+ Some(remote_templates) => remote_templates,
+ None => return (Vec::new(), Vec::new()),
+ },
+ None => Vec::new(),
+ };
+ let remote_tasks = remote_templates.into_iter().filter_map(|(kind, task)| {
+ let id_base = kind.to_id_base();
+ Some((
+ kind,
+ task.resolve_task(&id_base, &task_context)?,
+ not_used_score,
+ ))
+ });
+ currently_resolved_tasks.extend(remote_tasks);
+
+ let mut tasks_by_label = BTreeMap::default();
+ tasks_by_label = previously_spawned_tasks.into_iter().fold(
+ tasks_by_label,
+ |mut tasks_by_label, (source, task, lru_score)| {
+ match tasks_by_label.entry((source, task.resolved_label.clone())) {
+ btree_map::Entry::Occupied(mut o) => {
+ let (_, previous_lru_score) = o.get();
+ if previous_lru_score >= &lru_score {
+ o.insert((task, lru_score));
+ }
}
- }
- btree_map::Entry::Vacant(v) => {
- v.insert((task, lru_score));
- }
- }
- tasks_by_label
- },
- );
- tasks_by_label = currently_resolved_tasks.iter().fold(
- tasks_by_label,
- |mut tasks_by_label, (source, task, lru_score)| {
- match tasks_by_label.entry((source.clone(), task.resolved_label.clone())) {
- btree_map::Entry::Occupied(mut o) => {
- let (previous_task, _) = o.get();
- let new_template = task.original_task();
- if new_template != previous_task.original_task() {
- o.insert((task.clone(), *lru_score));
+ btree_map::Entry::Vacant(v) => {
+ v.insert((task, lru_score));
}
}
- btree_map::Entry::Vacant(v) => {
- v.insert((task.clone(), *lru_score));
+ tasks_by_label
+ },
+ );
+ tasks_by_label = currently_resolved_tasks.iter().fold(
+ tasks_by_label,
+ |mut tasks_by_label, (source, task, lru_score)| {
+ match tasks_by_label.entry((source.clone(), task.resolved_label.clone())) {
+ btree_map::Entry::Occupied(mut o) => {
+ let (previous_task, _) = o.get();
+ let new_template = task.original_task();
+ if new_template != previous_task.original_task() {
+ o.insert((task.clone(), *lru_score));
+ }
+ }
+ btree_map::Entry::Vacant(v) => {
+ v.insert((task.clone(), *lru_score));
+ }
}
- }
- tasks_by_label
- },
- );
-
- let resolved = tasks_by_label
- .into_iter()
- .map(|((kind, _), (task, lru_score))| (kind, task, lru_score))
- .sorted_by(task_lru_comparator)
- .filter_map(|(kind, task, lru_score)| {
- if lru_score < not_used_score {
- Some((kind, task))
- } else {
- None
- }
- })
- .collect();
+ tasks_by_label
+ },
+ );
- (
- resolved,
- currently_resolved_tasks
+ let resolved = tasks_by_label
.into_iter()
- .sorted_unstable_by(task_lru_comparator)
- .map(|(kind, task, _)| (kind, task))
- .collect(),
- )
+ .map(|((kind, _), (task, lru_score))| (kind, task, lru_score))
+ .sorted_by(task_lru_comparator)
+ .filter_map(|(kind, task, lru_score)| {
+ if lru_score < not_used_score {
+ Some((kind, task))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+
+ (
+ resolved,
+ currently_resolved_tasks
+ .into_iter()
+ .sorted_unstable_by(task_lru_comparator)
+ .map(|(kind, task, _)| (kind, task))
+ .collect(),
+ )
+ })
}
/// Returns the last scheduled task, if any of the sources contains one with the matching id.
@@ -443,21 +470,6 @@ mod test_inventory {
})
}
- pub(super) fn resolved_task_names(
- inventory: &Model<Inventory>,
- worktree: Option<WorktreeId>,
- cx: &mut TestAppContext,
- ) -> Vec<String> {
- inventory.update(cx, |inventory, _| {
- let (used, current) =
- inventory.used_and_current_resolved_tasks(None, worktree, &TaskContext::default());
- used.into_iter()
- .chain(current)
- .map(|(_, task)| task.original_task().label.clone())
- .collect()
- })
- }
-
pub(super) fn register_task_used(
inventory: &Model<Inventory>,
task_name: &str,
@@ -478,21 +490,28 @@ mod test_inventory {
});
}
- pub(super) fn list_tasks(
+ pub(super) async fn list_tasks(
inventory: &Model<Inventory>,
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> {
- inventory.update(cx, |inventory, _| {
- let (used, current) =
- inventory.used_and_current_resolved_tasks(None, worktree, &TaskContext::default());
- let mut all = used;
- all.extend(current);
- all.into_iter()
- .map(|(source_kind, task)| (source_kind, task.resolved_label))
- .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
- .collect()
- })
+ let (used, current) = inventory
+ .update(cx, |inventory, cx| {
+ inventory.used_and_current_resolved_tasks(
+ None,
+ worktree,
+ None,
+ &TaskContext::default(),
+ cx,
+ )
+ })
+ .await;
+ let mut all = used;
+ all.extend(current);
+ all.into_iter()
+ .map(|(source_kind, task)| (source_kind, task.resolved_label))
+ .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+ .collect()
}
}
@@ -622,9 +641,9 @@ mod tests {
use super::*;
#[gpui::test]
- fn test_task_list_sorting(cx: &mut TestAppContext) {
+ async fn test_task_list_sorting(cx: &mut TestAppContext) {
let inventory = cx.update(Inventory::new);
- let initial_tasks = resolved_task_names(&inventory, None, cx);
+ let initial_tasks = resolved_task_names(&inventory, None, cx).await;
assert!(
initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}"
@@ -671,7 +690,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
&expected_initial_state,
"Tasks with equal amount of usages should be sorted alphanumerically"
);
@@ -682,7 +701,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"2_task".to_string(),
"2_task".to_string(),
@@ -701,7 +720,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -736,7 +755,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -756,7 +775,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"11_hello".to_string(),
"3_task".to_string(),
@@ -773,7 +792,7 @@ mod tests {
}
#[gpui::test]
- fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
+ async fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
let inventory_with_statics = cx.update(Inventory::new);
let common_name = "common_task_name";
let path_1 = Path::new("path_1");
@@ -797,7 +816,7 @@ mod tests {
);
inventory.add_source(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_1.to_path_buf(),
},
|tx, cx| {
@@ -811,7 +830,7 @@ mod tests {
);
inventory.add_source(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_2.to_path_buf(),
},
|tx, cx| {
@@ -827,7 +846,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_1,
abs_path: worktree_path_1.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
|tx, cx| {
static_test_source(
@@ -842,7 +861,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_2,
abs_path: worktree_path_2.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
|tx, cx| {
static_test_source(
@@ -858,28 +877,28 @@ mod tests {
let worktree_independent_tasks = vec![
(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_1.to_path_buf(),
},
"static_source_1".to_string(),
),
(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_1.to_path_buf(),
},
common_name.to_string(),
),
(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_2.to_path_buf(),
},
common_name.to_string(),
),
(
TaskSourceKind::AbsPath {
- id_base: "test source",
+ id_base: "test source".into(),
abs_path: path_2.to_path_buf(),
},
"static_source_2".to_string(),
@@ -892,7 +911,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_1,
abs_path: worktree_path_1.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
common_name.to_string(),
),
@@ -900,7 +919,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_1,
abs_path: worktree_path_1.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
"worktree_1".to_string(),
),
@@ -910,7 +929,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_2,
abs_path: worktree_path_2.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
common_name.to_string(),
),
@@ -918,7 +937,7 @@ mod tests {
TaskSourceKind::Worktree {
id: worktree_2,
abs_path: worktree_path_2.to_path_buf(),
- id_base: "test_source",
+ id_base: "test_source".into(),
},
"worktree_2".to_string(),
),
@@ -933,9 +952,12 @@ mod tests {
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
.collect::<Vec<_>>();
- assert_eq!(list_tasks(&inventory_with_statics, None, cx), all_tasks);
assert_eq!(
- list_tasks(&inventory_with_statics, Some(worktree_1), cx),
+ list_tasks(&inventory_with_statics, None, cx).await,
+ all_tasks
+ );
+ assert_eq!(
+ list_tasks(&inventory_with_statics, Some(worktree_1), cx).await,
worktree_1_tasks
.iter()
.chain(worktree_independent_tasks.iter())
@@ -944,7 +966,7 @@ mod tests {
.collect::<Vec<_>>(),
);
assert_eq!(
- list_tasks(&inventory_with_statics, Some(worktree_2), cx),
+ list_tasks(&inventory_with_statics, Some(worktree_2), cx).await,
worktree_2_tasks
.iter()
.chain(worktree_independent_tasks.iter())
@@ -953,4 +975,26 @@ mod tests {
.collect::<Vec<_>>(),
);
}
+
+ pub(super) async fn resolved_task_names(
+ inventory: &Model<Inventory>,
+ worktree: Option<WorktreeId>,
+ cx: &mut TestAppContext,
+ ) -> Vec<String> {
+ let (used, current) = inventory
+ .update(cx, |inventory, cx| {
+ inventory.used_and_current_resolved_tasks(
+ None,
+ worktree,
+ None,
+ &TaskContext::default(),
+ cx,
+ )
+ })
+ .await;
+ used.into_iter()
+ .chain(current)
+ .map(|(_, task)| task.original_task().label.clone())
+ .collect()
+ }
}
@@ -245,7 +245,12 @@ message Envelope {
RegenerateDevServerToken regenerate_dev_server_token = 200;
RegenerateDevServerTokenResponse regenerate_dev_server_token_response = 201;
- RenameDevServer rename_dev_server = 202; // Current max
+ RenameDevServer rename_dev_server = 202;
+
+ TaskContextForLocation task_context_for_location = 203;
+ TaskContext task_context = 204;
+ TaskTemplatesResponse task_templates_response = 205;
+ TaskTemplates task_templates = 206; // Current max
}
reserved 158 to 161;
@@ -2118,3 +2123,71 @@ message GetSupermavenApiKey {}
message GetSupermavenApiKeyResponse {
string api_key = 1;
}
+
+message TaskContextForLocation {
+ uint64 project_id = 1;
+ Location location = 2;
+}
+
+message TaskContext {
+ optional string cwd = 1;
+ map<string, string> task_variables = 2;
+}
+
+message TaskTemplates {
+ uint64 project_id = 1;
+ optional Location location = 2;
+ optional uint64 worktree_id = 3;
+}
+
+message TaskTemplatesResponse {
+ repeated TemplatePair templates = 1;
+}
+
+message TemplatePair {
+ TaskSourceKind kind = 1;
+ TaskTemplate template = 2;
+}
+
+message TaskTemplate {
+ string label = 1;
+ string command = 2;
+ repeated string args = 3;
+ map<string, string> env = 4;
+ optional string cwd = 5;
+ bool use_new_terminal = 6;
+ bool allow_concurrent_runs = 7;
+ RevealStrategy reveal = 8;
+ repeated string tags = 9;
+}
+
+enum RevealStrategy {
+ Always = 0;
+ Never = 1;
+}
+
+message TaskSourceKind {
+ oneof kind {
+ UserInput user_input = 1;
+ Worktree worktree = 2;
+ AbsPath abs_path = 3;
+ Language language = 4;
+ }
+
+ message UserInput {}
+
+ message Worktree {
+ uint64 id = 1;
+ string abs_path = 2;
+ string id_base = 3;
+ }
+
+ message AbsPath {
+ string id_base = 1;
+ string abs_path = 2;
+ }
+
+ message Language {
+ string name = 1;
+ }
+}
@@ -279,6 +279,10 @@ messages!(
(StartLanguageServer, Foreground),
(SynchronizeBuffers, Foreground),
(SynchronizeBuffersResponse, Foreground),
+ (TaskContextForLocation, Background),
+ (TaskContext, Background),
+ (TaskTemplates, Background),
+ (TaskTemplatesResponse, Background),
(Test, Foreground),
(Unfollow, Foreground),
(UnshareProject, Foreground),
@@ -326,7 +330,7 @@ messages!(
(RegenerateDevServerToken, Foreground),
(RegenerateDevServerTokenResponse, Foreground),
(RenameDevServer, Foreground),
- (OpenNewBuffer, Foreground)
+ (OpenNewBuffer, Foreground),
);
request_messages!(
@@ -414,6 +418,8 @@ request_messages!(
(SetChannelVisibility, Ack),
(ShareProject, ShareProjectResponse),
(SynchronizeBuffers, SynchronizeBuffersResponse),
+ (TaskContextForLocation, TaskContext),
+ (TaskTemplates, TaskTemplatesResponse),
(Test, Test),
(UpdateBuffer, Ack),
(UpdateParticipantLocation, Ack),
@@ -481,6 +487,8 @@ entity_messages!(
SearchProject,
StartLanguageServer,
SynchronizeBuffers,
+ TaskContextForLocation,
+ TaskTemplates,
UnshareProject,
UpdateBuffer,
UpdateBufferFile,
@@ -5,10 +5,11 @@ pub mod static_source;
mod task_template;
mod vscode_format;
-use collections::{HashMap, HashSet};
+use collections::{hash_map, HashMap, HashSet};
use gpui::SharedString;
use serde::Serialize;
use std::path::PathBuf;
+use std::str::FromStr;
use std::{borrow::Cow, path::Path};
pub use task_template::{RevealStrategy, TaskTemplate, TaskTemplates};
@@ -161,8 +162,35 @@ impl VariableName {
}
}
+impl FromStr for VariableName {
+ type Err = ();
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let without_prefix = s.strip_prefix(ZED_VARIABLE_NAME_PREFIX).ok_or(())?;
+ let value = match without_prefix {
+ "FILE" => Self::File,
+ "WORKTREE_ROOT" => Self::WorktreeRoot,
+ "SYMBOL" => Self::Symbol,
+ "SELECTED_TEXT" => Self::SelectedText,
+ "ROW" => Self::Row,
+ "COLUMN" => Self::Column,
+ _ => {
+ if let Some(custom_name) =
+ without_prefix.strip_prefix(ZED_CUSTOM_VARIABLE_NAME_PREFIX)
+ {
+ Self::Custom(Cow::Owned(custom_name.to_owned()))
+ } else {
+ return Err(());
+ }
+ }
+ };
+ Ok(value)
+ }
+}
+
/// A prefix that all [`VariableName`] variants are prefixed with when used in environment variables and similar template contexts.
pub const ZED_VARIABLE_NAME_PREFIX: &str = "ZED_";
+const ZED_CUSTOM_VARIABLE_NAME_PREFIX: &str = "CUSTOM_";
impl std::fmt::Display for VariableName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
@@ -178,7 +206,10 @@ impl std::fmt::Display for VariableName {
Self::Column => write!(f, "{ZED_VARIABLE_NAME_PREFIX}COLUMN"),
Self::SelectedText => write!(f, "{ZED_VARIABLE_NAME_PREFIX}SELECTED_TEXT"),
Self::RunnableSymbol => write!(f, "{ZED_VARIABLE_NAME_PREFIX}RUNNABLE_SYMBOL"),
- Self::Custom(s) => write!(f, "{ZED_VARIABLE_NAME_PREFIX}CUSTOM_{s}"),
+ Self::Custom(s) => write!(
+ f,
+ "{ZED_VARIABLE_NAME_PREFIX}{ZED_CUSTOM_VARIABLE_NAME_PREFIX}{s}"
+ ),
}
}
}
@@ -219,6 +250,16 @@ impl FromIterator<(VariableName, String)> for TaskVariables {
}
}
+impl IntoIterator for TaskVariables {
+ type Item = (VariableName, String);
+
+ type IntoIter = hash_map::IntoIter<VariableName, String>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
/// Keeps track of the file associated with a task and context of tasks execution (i.e. current file or current function).
/// Keeps all Zed-related state inside, used to produce a resolved task out of its template.
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize)]
@@ -9,6 +9,7 @@ license = "GPL-3.0-or-later"
workspace = true
[dependencies]
+anyhow.workspace = true
editor.workspace = true
file_icons.workspace = true
fuzzy.workspace = true
@@ -1,11 +1,8 @@
-use std::sync::Arc;
-
use ::settings::Settings;
use editor::{tasks::task_context, Editor};
-use gpui::{AppContext, ViewContext, WindowContext};
-use language::Language;
+use gpui::{AppContext, Task as AsyncTask, ViewContext, WindowContext};
use modal::TasksModal;
-use project::WorktreeId;
+use project::{Location, WorktreeId};
use workspace::tasks::schedule_task;
use workspace::{tasks::schedule_resolved_task, Workspace};
@@ -34,15 +31,23 @@ pub fn init(cx: &mut AppContext) {
if let Some(use_new_terminal) = action.use_new_terminal {
original_task.use_new_terminal = use_new_terminal;
}
- let task_context = task_context(workspace, cx);
- schedule_task(
- workspace,
- task_source_kind,
- &original_task,
- &task_context,
- false,
- cx,
- )
+ let context_task = task_context(workspace, cx);
+ cx.spawn(|workspace, mut cx| async move {
+ let task_context = context_task.await;
+ workspace
+ .update(&mut cx, |workspace, cx| {
+ schedule_task(
+ workspace,
+ task_source_kind,
+ &original_task,
+ &task_context,
+ false,
+ cx,
+ )
+ })
+ .ok()
+ })
+ .detach()
} else {
if let Some(resolved) = last_scheduled_task.resolved.as_mut() {
if let Some(allow_concurrent_runs) = action.allow_concurrent_runs {
@@ -62,7 +67,7 @@ pub fn init(cx: &mut AppContext) {
);
}
} else {
- toggle_modal(workspace, cx);
+ toggle_modal(workspace, cx).detach();
};
});
},
@@ -72,33 +77,52 @@ pub fn init(cx: &mut AppContext) {
fn spawn_task_or_modal(workspace: &mut Workspace, action: &Spawn, cx: &mut ViewContext<Workspace>) {
match &action.task_name {
- Some(name) => spawn_task_with_name(name.clone(), cx),
- None => toggle_modal(workspace, cx),
+ Some(name) => spawn_task_with_name(name.clone(), cx).detach_and_log_err(cx),
+ None => toggle_modal(workspace, cx).detach(),
}
}
-fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) {
- let inventory = workspace.project().read(cx).task_inventory().clone();
+fn toggle_modal(workspace: &mut Workspace, cx: &mut ViewContext<'_, Workspace>) -> AsyncTask<()> {
+ let project = workspace.project().clone();
let workspace_handle = workspace.weak_handle();
- let task_context = task_context(workspace, cx);
- workspace.toggle_modal(cx, |cx| {
- TasksModal::new(inventory, task_context, workspace_handle, cx)
+ let context_task = task_context(workspace, cx);
+ cx.spawn(|workspace, mut cx| async move {
+ let task_context = context_task.await;
+ workspace
+ .update(&mut cx, |workspace, cx| {
+ if workspace.project().update(cx, |project, cx| {
+ project.is_local() || project.ssh_connection_string(cx).is_some()
+ }) {
+ workspace.toggle_modal(cx, |cx| {
+ TasksModal::new(project, task_context, workspace_handle, cx)
+ })
+ }
+ })
+ .ok();
})
}
-fn spawn_task_with_name(name: String, cx: &mut ViewContext<Workspace>) {
+fn spawn_task_with_name(
+ name: String,
+ cx: &mut ViewContext<Workspace>,
+) -> AsyncTask<anyhow::Result<()>> {
cx.spawn(|workspace, mut cx| async move {
+ let context_task =
+ workspace.update(&mut cx, |workspace, cx| task_context(workspace, cx))?;
+ let task_context = context_task.await;
+ let tasks = workspace
+ .update(&mut cx, |workspace, cx| {
+ let (worktree, location) = active_item_selection_properties(workspace, cx);
+ workspace.project().update(cx, |project, cx| {
+ project.task_templates(worktree, location, cx)
+ })
+ })?
+ .await?;
+
let did_spawn = workspace
.update(&mut cx, |workspace, cx| {
- let (worktree, language) = active_item_selection_properties(workspace, cx);
- let tasks = workspace.project().update(cx, |project, cx| {
- project
- .task_inventory()
- .update(cx, |inventory, _| inventory.list_tasks(language, worktree))
- });
let (task_source_kind, target_task) =
tasks.into_iter().find(|(_, task)| task.label == name)?;
- let task_context = task_context(workspace, cx);
schedule_task(
workspace,
task_source_kind,
@@ -108,9 +132,7 @@ fn spawn_task_with_name(name: String, cx: &mut ViewContext<Workspace>) {
cx,
);
Some(())
- })
- .ok()
- .flatten()
+ })?
.is_some();
if !did_spawn {
workspace
@@ -119,32 +141,38 @@ fn spawn_task_with_name(name: String, cx: &mut ViewContext<Workspace>) {
})
.ok();
}
+
+ Ok(())
})
- .detach();
}
fn active_item_selection_properties(
workspace: &Workspace,
cx: &mut WindowContext,
-) -> (Option<WorktreeId>, Option<Arc<Language>>) {
+) -> (Option<WorktreeId>, Option<Location>) {
let active_item = workspace.active_item(cx);
let worktree_id = active_item
.as_ref()
.and_then(|item| item.project_path(cx))
.map(|path| path.worktree_id);
- let language = active_item
+ let location = active_item
.and_then(|active_item| active_item.act_as::<Editor>(cx))
.and_then(|editor| {
editor.update(cx, |editor, cx| {
- let selection = editor.selections.newest::<usize>(cx);
- let (buffer, buffer_position, _) = editor
- .buffer()
- .read(cx)
- .point_to_buffer_offset(selection.start, cx)?;
- buffer.read(cx).language_at(buffer_position)
+ let selection = editor.selections.newest_anchor();
+ let multi_buffer = editor.buffer().clone();
+ let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
+ let (buffer_snapshot, buffer_offset) =
+ multi_buffer_snapshot.point_to_buffer_offset(selection.head())?;
+ let buffer_anchor = buffer_snapshot.anchor_before(buffer_offset);
+ let buffer = multi_buffer.read(cx).buffer(buffer_snapshot.remote_id())?;
+ Some(Location {
+ buffer,
+ range: buffer_anchor..buffer_anchor,
+ })
})
});
- (worktree_id, language)
+ (worktree_id, location)
}
#[cfg(test)]
@@ -250,69 +278,84 @@ mod tests {
.unwrap();
buffer2.update(cx, |this, cx| this.set_language(Some(rust_language), cx));
let editor2 = cx.new_view(|cx| Editor::for_buffer(buffer2, Some(project), cx));
- workspace.update(cx, |this, cx| {
- this.add_item_to_center(Box::new(editor1.clone()), cx);
- this.add_item_to_center(Box::new(editor2.clone()), cx);
- assert_eq!(this.active_item(cx).unwrap().item_id(), editor2.entity_id());
- assert_eq!(
- task_context(this, cx),
- TaskContext {
- cwd: Some("/dir".into()),
- task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/rust/b.rs".into()),
- (VariableName::Filename, "b.rs".into()),
- (VariableName::RelativeFile, "rust/b.rs".into()),
- (VariableName::Dirname, "/dir/rust".into()),
- (VariableName::Stem, "b".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
- (VariableName::Row, "1".into()),
- (VariableName::Column, "1".into()),
- ])
- }
- );
- // And now, let's select an identifier.
- editor2.update(cx, |this, cx| {
- this.change_selections(None, cx, |selections| selections.select_ranges([14..18]))
- });
- assert_eq!(
- task_context(this, cx),
- TaskContext {
- cwd: Some("/dir".into()),
- task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/rust/b.rs".into()),
- (VariableName::Filename, "b.rs".into()),
- (VariableName::RelativeFile, "rust/b.rs".into()),
- (VariableName::Dirname, "/dir/rust".into()),
- (VariableName::Stem, "b".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
- (VariableName::Row, "1".into()),
- (VariableName::Column, "15".into()),
- (VariableName::SelectedText, "is_i".into()),
- (VariableName::Symbol, "this_is_a_rust_file".into()),
- ])
- }
- );
- // Now, let's switch the active item to .ts file.
- this.activate_item(&editor1, cx);
- assert_eq!(
- task_context(this, cx),
- TaskContext {
- cwd: Some("/dir".into()),
- task_variables: TaskVariables::from_iter([
- (VariableName::File, "/dir/a.ts".into()),
- (VariableName::Filename, "a.ts".into()),
- (VariableName::RelativeFile, "a.ts".into()),
- (VariableName::Dirname, "/dir".into()),
- (VariableName::Stem, "a".into()),
- (VariableName::WorktreeRoot, "/dir".into()),
- (VariableName::Row, "1".into()),
- (VariableName::Column, "1".into()),
- (VariableName::Symbol, "this_is_a_test".into()),
- ])
- }
- );
+ let first_context = workspace
+ .update(cx, |workspace, cx| {
+ workspace.add_item_to_center(Box::new(editor1.clone()), cx);
+ workspace.add_item_to_center(Box::new(editor2.clone()), cx);
+ assert_eq!(
+ workspace.active_item(cx).unwrap().item_id(),
+ editor2.entity_id()
+ );
+ task_context(workspace, cx)
+ })
+ .await;
+ assert_eq!(
+ first_context,
+ TaskContext {
+ cwd: Some("/dir".into()),
+ task_variables: TaskVariables::from_iter([
+ (VariableName::File, "/dir/rust/b.rs".into()),
+ (VariableName::Filename, "b.rs".into()),
+ (VariableName::RelativeFile, "rust/b.rs".into()),
+ (VariableName::Dirname, "/dir/rust".into()),
+ (VariableName::Stem, "b".into()),
+ (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::Row, "1".into()),
+ (VariableName::Column, "1".into()),
+ ])
+ }
+ );
+
+ // And now, let's select an identifier.
+ editor2.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |selections| selections.select_ranges([14..18]))
});
+
+ assert_eq!(
+ workspace
+ .update(cx, |workspace, cx| { task_context(workspace, cx) })
+ .await,
+ TaskContext {
+ cwd: Some("/dir".into()),
+ task_variables: TaskVariables::from_iter([
+ (VariableName::File, "/dir/rust/b.rs".into()),
+ (VariableName::Filename, "b.rs".into()),
+ (VariableName::RelativeFile, "rust/b.rs".into()),
+ (VariableName::Dirname, "/dir/rust".into()),
+ (VariableName::Stem, "b".into()),
+ (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::Row, "1".into()),
+ (VariableName::Column, "15".into()),
+ (VariableName::SelectedText, "is_i".into()),
+ (VariableName::Symbol, "this_is_a_rust_file".into()),
+ ])
+ }
+ );
+
+ assert_eq!(
+ workspace
+ .update(cx, |workspace, cx| {
+ // Now, let's switch the active item to .ts file.
+ workspace.activate_item(&editor1, cx);
+ task_context(workspace, cx)
+ })
+ .await,
+ TaskContext {
+ cwd: Some("/dir".into()),
+ task_variables: TaskVariables::from_iter([
+ (VariableName::File, "/dir/a.ts".into()),
+ (VariableName::Filename, "a.ts".into()),
+ (VariableName::RelativeFile, "a.ts".into()),
+ (VariableName::Dirname, "/dir".into()),
+ (VariableName::Stem, "a".into()),
+ (VariableName::WorktreeRoot, "/dir".into()),
+ (VariableName::Row, "1".into()),
+ (VariableName::Column, "1".into()),
+ (VariableName::Symbol, "this_is_a_test".into()),
+ ])
+ }
+ );
}
pub(crate) fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
@@ -4,11 +4,11 @@ use crate::active_item_selection_properties;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
impl_actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusableView,
- InteractiveElement, Model, ParentElement, Render, SharedString, Styled, Subscription, View,
- ViewContext, VisualContext, WeakView,
+ InteractiveElement, Model, ParentElement, Render, SharedString, Styled, Subscription, Task,
+ View, ViewContext, VisualContext, WeakView,
};
use picker::{highlighted_match_with_paths::HighlightedText, Picker, PickerDelegate};
-use project::{Inventory, TaskSourceKind};
+use project::{Project, TaskSourceKind};
use task::{ResolvedTask, TaskContext, TaskTemplate};
use ui::{
div, h_flex, v_flex, ActiveTheme, Button, ButtonCommon, ButtonSize, Clickable, Color,
@@ -60,7 +60,7 @@ impl_actions!(task, [Rerun, Spawn]);
/// A modal used to spawn new tasks.
pub(crate) struct TasksModalDelegate {
- inventory: Model<Inventory>,
+ project: Model<Project>,
candidates: Option<Vec<(TaskSourceKind, ResolvedTask)>>,
last_used_candidate_index: Option<usize>,
divider_index: Option<usize>,
@@ -74,12 +74,12 @@ pub(crate) struct TasksModalDelegate {
impl TasksModalDelegate {
fn new(
- inventory: Model<Inventory>,
+ project: Model<Project>,
task_context: TaskContext,
workspace: WeakView<Workspace>,
) -> Self {
Self {
- inventory,
+ project,
workspace,
candidates: None,
matches: Vec::new(),
@@ -121,8 +121,10 @@ impl TasksModalDelegate {
// it doesn't make sense to requery the inventory for new candidates, as that's potentially costly and more often than not it should just return back
// the original list without a removed entry.
candidates.remove(ix);
- self.inventory.update(cx, |inventory, _| {
- inventory.delete_previously_used(&task.id);
+ self.project.update(cx, |project, cx| {
+ project.task_inventory().update(cx, |inventory, _| {
+ inventory.delete_previously_used(&task.id);
+ })
});
}
}
@@ -134,14 +136,14 @@ pub(crate) struct TasksModal {
impl TasksModal {
pub(crate) fn new(
- inventory: Model<Inventory>,
+ project: Model<Project>,
task_context: TaskContext,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let picker = cx.new_view(|cx| {
Picker::uniform_list(
- TasksModalDelegate::new(inventory, task_context, workspace),
+ TasksModalDelegate::new(project, task_context, workspace),
cx,
)
});
@@ -197,53 +199,85 @@ impl PickerDelegate for TasksModalDelegate {
&mut self,
query: String,
cx: &mut ViewContext<picker::Picker<Self>>,
- ) -> gpui::Task<()> {
+ ) -> Task<()> {
cx.spawn(move |picker, mut cx| async move {
- let Some(candidates) = picker
+ let Some(candidates_task) = picker
.update(&mut cx, |picker, cx| {
- let candidates = match &mut picker.delegate.candidates {
- Some(candidates) => candidates,
+ match &mut picker.delegate.candidates {
+ Some(candidates) => {
+ Task::ready(Ok(string_match_candidates(candidates.iter())))
+ }
None => {
- let Ok((worktree, language)) =
+ let Ok((worktree, location)) =
picker.delegate.workspace.update(cx, |workspace, cx| {
active_item_selection_properties(workspace, cx)
})
else {
- return Vec::new();
- };
- let (used, current) =
- picker.delegate.inventory.update(cx, |inventory, _| {
- inventory.used_and_current_resolved_tasks(
- language,
- worktree,
- &picker.delegate.task_context,
- )
- });
- picker.delegate.last_used_candidate_index = if used.is_empty() {
- None
- } else {
- Some(used.len() - 1)
+ return Task::ready(Ok(Vec::new()));
};
- let mut new_candidates = used;
- new_candidates.extend(current);
- picker.delegate.candidates.insert(new_candidates)
+ let resolved_task =
+ picker.delegate.project.update(cx, |project, cx| {
+ let ssh_connection_string = project.ssh_connection_string(cx);
+ if project.is_remote() && ssh_connection_string.is_none() {
+ Task::ready((Vec::new(), Vec::new()))
+ } else {
+ let remote_templates = if project.is_local() {
+ None
+ } else {
+ project
+ .remote_id()
+ .filter(|_| ssh_connection_string.is_some())
+ .map(|project_id| {
+ project.query_remote_task_templates(
+ project_id,
+ worktree,
+ location.as_ref(),
+ cx,
+ )
+ })
+ };
+ project
+ .task_inventory()
+ .read(cx)
+ .used_and_current_resolved_tasks(
+ remote_templates,
+ worktree,
+ location,
+ &picker.delegate.task_context,
+ cx,
+ )
+ }
+ });
+ cx.spawn(|picker, mut cx| async move {
+ let (used, current) = resolved_task.await;
+ picker.update(&mut cx, |picker, _| {
+ picker.delegate.last_used_candidate_index = if used.is_empty() {
+ None
+ } else {
+ Some(used.len() - 1)
+ };
+
+ let mut new_candidates = used;
+ new_candidates.extend(current);
+ let match_candidates =
+ string_match_candidates(new_candidates.iter());
+ let _ = picker.delegate.candidates.insert(new_candidates);
+ match_candidates
+ })
+ })
}
- };
- candidates
- .iter()
- .enumerate()
- .map(|(index, (_, candidate))| StringMatchCandidate {
- id: index,
- char_bag: candidate.resolved_label.chars().collect(),
- string: candidate.display_label().to_owned(),
- })
- .collect::<Vec<_>>()
+ }
})
.ok()
else {
return;
};
+ let Some(candidates): Option<Vec<StringMatchCandidate>> =
+ candidates_task.await.log_err()
+ else {
+ return;
+ };
let matches = fuzzy::match_strings(
&candidates,
&query,
@@ -534,6 +568,19 @@ impl PickerDelegate for TasksModalDelegate {
}
}
+fn string_match_candidates<'a>(
+ candidates: impl Iterator<Item = &'a (TaskSourceKind, ResolvedTask)> + 'a,
+) -> Vec<StringMatchCandidate> {
+ candidates
+ .enumerate()
+ .map(|(index, (_, candidate))| StringMatchCandidate {
+ id: index,
+ char_bag: candidate.resolved_label.chars().collect(),
+ string: candidate.display_label().to_owned(),
+ })
+ .collect()
+}
+
#[cfg(test)]
mod tests {
use std::{path::PathBuf, sync::Arc};
@@ -1,46 +1,9 @@
-use std::path::PathBuf;
-
use project::TaskSourceKind;
use task::{ResolvedTask, TaskContext, TaskTemplate};
-use ui::{ViewContext, WindowContext};
+use ui::ViewContext;
use crate::Workspace;
-pub fn task_cwd(workspace: &Workspace, cx: &mut WindowContext) -> anyhow::Result<Option<PathBuf>> {
- let project = workspace.project().read(cx);
- let available_worktrees = project
- .worktrees()
- .filter(|worktree| {
- let worktree = worktree.read(cx);
- worktree.is_visible()
- && worktree.is_local()
- && worktree.root_entry().map_or(false, |e| e.is_dir())
- })
- .collect::<Vec<_>>();
- let cwd = match available_worktrees.len() {
- 0 => None,
- 1 => Some(available_worktrees[0].read(cx).abs_path()),
- _ => {
- let cwd_for_active_entry = project.active_entry().and_then(|entry_id| {
- available_worktrees.into_iter().find_map(|worktree| {
- let worktree = worktree.read(cx);
- if worktree.contains_entry(entry_id) {
- Some(worktree.abs_path())
- } else {
- None
- }
- })
- });
- anyhow::ensure!(
- cwd_for_active_entry.is_some(),
- "Cannot determine task cwd for multiple worktrees"
- );
- cwd_for_active_entry
- }
- };
- Ok(cwd.map(|path| path.to_path_buf()))
-}
-
pub fn schedule_task(
workspace: &Workspace,
task_source_kind: TaskSourceKind,
@@ -163,7 +163,9 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
});
let project = workspace.project().clone();
- if project.read(cx).is_local() {
+ if project.update(cx, |project, cx| {
+ project.is_local() || project.ssh_connection_string(cx).is_some()
+ }) {
project.update(cx, |project, cx| {
let fs = app_state.fs.clone();
project.task_inventory().update(cx, |inventory, cx| {
@@ -171,7 +173,7 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut AppContext) {
watch_config_file(&cx.background_executor(), fs, paths::TASKS.clone());
inventory.add_source(
TaskSourceKind::AbsPath {
- id_base: "global_tasks",
+ id_base: "global_tasks".into(),
abs_path: paths::TASKS.clone(),
},
|tx, cx| StaticSource::new(TrackedFile::new(tasks_file_rx, tx, cx)),