Detailed changes
@@ -194,11 +194,12 @@ impl NewProcessModal {
return Ok(());
};
- let (used_tasks, current_resolved_tasks) =
- task_inventory.update(cx, |task_inventory, cx| {
+ let (used_tasks, current_resolved_tasks) = task_inventory
+ .update(cx, |task_inventory, cx| {
task_inventory
- .used_and_current_resolved_tasks(&task_contexts, cx)
- })?;
+ .used_and_current_resolved_tasks(task_contexts.clone(), cx)
+ })?
+ .await;
debug_picker
.update_in(cx, |picker, window, cx| {
@@ -824,14 +824,14 @@ impl RunningState {
let config_is_valid = request_type.is_ok();
let build_output = if let Some(build) = build {
- let (task, locator_name) = match build {
+ let (task_template, locator_name) = match build {
BuildTaskDefinition::Template {
task_template,
locator_name,
} => (task_template, locator_name),
BuildTaskDefinition::ByName(ref label) => {
- let Some(task) = task_store.update(cx, |this, cx| {
- this.task_inventory().and_then(|inventory| {
+ let task = task_store.update(cx, |this, cx| {
+ this.task_inventory().map(|inventory| {
inventory.read(cx).task_template_by_label(
buffer,
worktree_id,
@@ -839,14 +839,15 @@ impl RunningState {
cx,
)
})
- })?
- else {
- anyhow::bail!("Couldn't find task template for {:?}", build)
- };
+ })?;
+ let task = match task {
+ Some(task) => task.await,
+ None => None,
+ }.with_context(|| format!("Couldn't find task template for {build:?}"))?;
(task, None)
}
};
- let Some(task) = task.resolve_task("debug-build-task", &task_context) else {
+ let Some(task) = task_template.resolve_task("debug-build-task", &task_context) else {
anyhow::bail!("Could not resolve task variables within a debug scenario");
};
@@ -14038,7 +14038,8 @@ impl Editor {
prefer_lsp && !lsp_tasks_by_rows.is_empty(),
new_rows,
cx.clone(),
- );
+ )
+ .await;
editor
.update(cx, |editor, _| {
editor.clear_tasks();
@@ -14068,35 +14069,40 @@ impl Editor {
snapshot: DisplaySnapshot,
prefer_lsp: bool,
runnable_ranges: Vec<RunnableRange>,
- mut cx: AsyncWindowContext,
- ) -> Vec<((BufferId, BufferRow), RunnableTasks)> {
- runnable_ranges
- .into_iter()
- .filter_map(|mut runnable| {
- let mut tasks = cx
+ cx: AsyncWindowContext,
+ ) -> Task<Vec<((BufferId, BufferRow), RunnableTasks)>> {
+ cx.spawn(async move |cx| {
+ let mut runnable_rows = Vec::with_capacity(runnable_ranges.len());
+ for mut runnable in runnable_ranges {
+ let Some(tasks) = cx
.update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx))
- .ok()?;
+ .ok()
+ else {
+ continue;
+ };
+ let mut tasks = tasks.await;
+
if prefer_lsp {
tasks.retain(|(task_kind, _)| {
!matches!(task_kind, TaskSourceKind::Language { .. })
});
}
if tasks.is_empty() {
- return None;
+ continue;
}
let point = runnable.run_range.start.to_point(&snapshot.buffer_snapshot);
-
- let row = snapshot
+ let Some(row) = snapshot
.buffer_snapshot
- .buffer_line_for_row(MultiBufferRow(point.row))?
- .1
- .start
- .row;
+ .buffer_line_for_row(MultiBufferRow(point.row))
+ .map(|(_, range)| range.start.row)
+ else {
+ continue;
+ };
let context_range =
BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end);
- Some((
+ runnable_rows.push((
(runnable.buffer_id, row),
RunnableTasks {
templates: tasks,
@@ -14107,16 +14113,17 @@ impl Editor {
column: point.column,
extra_variables: runnable.extra_captures,
},
- ))
- })
- .collect()
+ ));
+ }
+ runnable_rows
+ })
}
fn templates_with_tags(
project: &Entity<Project>,
runnable: &mut Runnable,
cx: &mut App,
- ) -> Vec<(TaskSourceKind, TaskTemplate)> {
+ ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| {
let (worktree_id, file) = project
.buffer_for_id(runnable.buffer, cx)
@@ -14131,39 +14138,40 @@ impl Editor {
)
});
- let mut templates_with_tags = mem::take(&mut runnable.tags)
- .into_iter()
- .flat_map(|RunnableTag(tag)| {
- inventory
- .as_ref()
- .into_iter()
- .flat_map(|inventory| {
- inventory.read(cx).list_tasks(
- file.clone(),
- Some(runnable.language.clone()),
- worktree_id,
- cx,
- )
- })
- .filter(move |(_, template)| {
- template.tags.iter().any(|source_tag| source_tag == &tag)
- })
- })
- .sorted_by_key(|(kind, _)| kind.to_owned())
- .collect::<Vec<_>>();
- if let Some((leading_tag_source, _)) = templates_with_tags.first() {
- // Strongest source wins; if we have worktree tag binding, prefer that to
- // global and language bindings;
- // if we have a global binding, prefer that to language binding.
- let first_mismatch = templates_with_tags
- .iter()
- .position(|(tag_source, _)| tag_source != leading_tag_source);
- if let Some(index) = first_mismatch {
- templates_with_tags.truncate(index);
+ let tags = mem::take(&mut runnable.tags);
+ let language = runnable.language.clone();
+ cx.spawn(async move |cx| {
+ let mut templates_with_tags = Vec::new();
+ if let Some(inventory) = inventory {
+ for RunnableTag(tag) in tags {
+ let Ok(new_tasks) = inventory.update(cx, |inventory, cx| {
+ inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx)
+ }) else {
+ return templates_with_tags;
+ };
+ templates_with_tags.extend(new_tasks.await.into_iter().filter(
+ move |(_, template)| {
+ template.tags.iter().any(|source_tag| source_tag == &tag)
+ },
+ ));
+ }
}
- }
+ templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned());
- templates_with_tags
+ if let Some((leading_tag_source, _)) = templates_with_tags.first() {
+ // Strongest source wins; if we have worktree tag binding, prefer that to
+ // global and language bindings;
+ // if we have a global binding, prefer that to language binding.
+ let first_mismatch = templates_with_tags
+ .iter()
+ .position(|(tag_source, _)| tag_source != leading_tag_source);
+ if let Some(index) = first_mismatch {
+ templates_with_tags.truncate(index);
+ }
+ }
+
+ templates_with_tags
+ })
}
pub fn move_to_enclosing_bracket(
@@ -1,6 +1,6 @@
use std::{ops::Range, path::PathBuf, sync::Arc};
-use crate::{LanguageToolchainStore, Location, Runnable};
+use crate::{File, LanguageToolchainStore, Location, Runnable};
use anyhow::Result;
use collections::HashMap;
@@ -39,10 +39,11 @@ pub trait ContextProvider: Send + Sync {
/// Provides all tasks, associated with the current language.
fn associated_tasks(
&self,
- _: Option<Arc<dyn crate::File>>,
- _cx: &App,
- ) -> Option<TaskTemplates> {
- None
+ _: Arc<dyn Fs>,
+ _: Option<Arc<dyn File>>,
+ _: &App,
+ ) -> Task<Option<TaskTemplates>> {
+ Task::ready(None)
}
/// A language server name, that can return tasks using LSP (ext) for this language.
@@ -510,9 +510,10 @@ impl ContextProvider for GoContextProvider {
fn associated_tasks(
&self,
- _: Option<Arc<dyn language::File>>,
+ _: Arc<dyn Fs>,
+ _: Option<Arc<dyn File>>,
_: &App,
- ) -> Option<TaskTemplates> {
+ ) -> Task<Option<TaskTemplates>> {
let package_cwd = if GO_PACKAGE_TASK_VARIABLE.template_value() == "." {
None
} else {
@@ -520,7 +521,7 @@ impl ContextProvider for GoContextProvider {
};
let module_cwd = Some(GO_MODULE_ROOT_TASK_VARIABLE.template_value());
- Some(TaskTemplates(vec![
+ Task::ready(Some(TaskTemplates(vec![
TaskTemplate {
label: format!(
"go test {} -run {}",
@@ -631,7 +632,7 @@ impl ContextProvider for GoContextProvider {
cwd: module_cwd.clone(),
..TaskTemplate::default()
},
- ]))
+ ])))
}
}
@@ -481,9 +481,10 @@ impl ContextProvider for PythonContextProvider {
fn associated_tasks(
&self,
+ _: Arc<dyn Fs>,
file: Option<Arc<dyn language::File>>,
cx: &App,
- ) -> Option<TaskTemplates> {
+ ) -> Task<Option<TaskTemplates>> {
let test_runner = selected_test_runner(file.as_ref(), cx);
let mut tasks = vec![
@@ -587,7 +588,7 @@ impl ContextProvider for PythonContextProvider {
}
});
- Some(TaskTemplates(tasks))
+ Task::ready(Some(TaskTemplates(tasks)))
}
}
@@ -8,6 +8,7 @@ use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary};
+use project::Fs;
use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME;
use project::project_settings::ProjectSettings;
use regex::Regex;
@@ -628,9 +629,10 @@ impl ContextProvider for RustContextProvider {
fn associated_tasks(
&self,
+ _: Arc<dyn Fs>,
file: Option<Arc<dyn language::File>>,
cx: &App,
- ) -> Option<TaskTemplates> {
+ ) -> Task<Option<TaskTemplates>> {
const DEFAULT_RUN_NAME_STR: &str = "RUST_DEFAULT_PACKAGE_RUN";
const CUSTOM_TARGET_DIR: &str = "RUST_TARGET_DIR";
@@ -798,7 +800,7 @@ impl ContextProvider for RustContextProvider {
.collect();
}
- Some(TaskTemplates(task_templates))
+ Task::ready(Some(TaskTemplates(task_templates)))
}
fn lsp_task_source(&self) -> Option<LanguageServerName> {
@@ -4,10 +4,12 @@ use async_tar::Archive;
use async_trait::async_trait;
use chrono::{DateTime, Local};
use collections::HashMap;
+use futures::future::join_all;
use gpui::{App, AppContext, AsyncApp, Task};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url};
use language::{
- ContextLocation, ContextProvider, File, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
+ ContextLocation, ContextProvider, File, LanguageToolchainStore, LocalFile, LspAdapter,
+ LspAdapterDelegate,
};
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
use node_runtime::NodeRuntime;
@@ -17,11 +19,12 @@ use smol::{fs, io::BufReader, lock::RwLock, stream::StreamExt};
use std::{
any::Any,
borrow::Cow,
+ collections::BTreeSet,
ffi::OsString,
path::{Path, PathBuf},
sync::Arc,
};
-use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName};
+use task::{TaskTemplate, TaskTemplates, VariableName};
use util::archive::extract_zip;
use util::merge_json_value_into;
use util::{ResultExt, fs::remove_matching, maybe};
@@ -32,23 +35,12 @@ pub(crate) struct TypeScriptContextProvider {
const TYPESCRIPT_RUNNER_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_RUNNER"));
-const TYPESCRIPT_JEST_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JEST"));
+
const TYPESCRIPT_JEST_TEST_NAME_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JEST_TEST_NAME"));
-const TYPESCRIPT_MOCHA_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_MOCHA"));
-const TYPESCRIPT_VITEST_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_VITEST"));
const TYPESCRIPT_VITEST_TEST_NAME_VARIABLE: VariableName =
VariableName::Custom(Cow::Borrowed("TYPESCRIPT_VITEST_TEST_NAME"));
-const TYPESCRIPT_JASMINE_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_JASMINE"));
-const TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_BUILD_SCRIPT"));
-const TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE: VariableName =
- VariableName::Custom(Cow::Borrowed("TYPESCRIPT_TEST_SCRIPT"));
#[derive(Clone, Default)]
struct PackageJsonContents(Arc<RwLock<HashMap<PathBuf, PackageJson>>>);
@@ -58,36 +50,21 @@ struct PackageJson {
data: PackageJsonData,
}
-#[derive(Clone, Copy, Default)]
+#[derive(Clone, Default)]
struct PackageJsonData {
jest: bool,
mocha: bool,
vitest: bool,
jasmine: bool,
- build_script: bool,
- test_script: bool,
- runner: Runner,
-}
-
-#[derive(Clone, Copy, Default)]
-enum Runner {
- #[default]
- Npm,
- Yarn,
- Pnpm,
+ scripts: BTreeSet<String>,
+ package_manager: Option<&'static str>,
}
impl PackageJsonData {
- async fn new(
- package_json: HashMap<String, Value>,
- worktree_root: PathBuf,
- fs: Arc<dyn Fs>,
- ) -> Self {
- let mut build_script = false;
- let mut test_script = false;
- if let Some(serde_json::Value::Object(scripts)) = package_json.get("scripts") {
- build_script |= scripts.contains_key("build");
- test_script |= scripts.contains_key("test");
+ fn new(package_json: HashMap<String, Value>) -> Self {
+ let mut scripts = BTreeSet::new();
+ if let Some(serde_json::Value::Object(package_json_scripts)) = package_json.get("scripts") {
+ scripts.extend(package_json_scripts.keys().cloned());
}
let mut jest = false;
@@ -108,262 +85,351 @@ impl PackageJsonData {
jasmine |= dev_dependencies.contains_key("jasmine");
}
- let mut runner = package_json
+ let package_manager = package_json
.get("packageManager")
.and_then(|value| value.as_str())
.and_then(|value| {
if value.starts_with("pnpm") {
- Some(Runner::Pnpm)
+ Some("pnpm")
} else if value.starts_with("yarn") {
- Some(Runner::Yarn)
+ Some("yarn")
} else if value.starts_with("npm") {
- Some(Runner::Npm)
+ Some("npm")
} else {
None
}
});
- if runner.is_none() {
- let detected_runner = detect_package_manager(&fs, &worktree_root).await;
- runner = Some(detected_runner);
- }
-
Self {
jest,
mocha,
vitest,
jasmine,
- build_script,
- test_script,
- runner: runner.unwrap(),
+ scripts,
+ package_manager,
}
}
- fn fill_variables(&self, variables: &mut TaskVariables) {
- let runner = match self.runner {
- Runner::Npm => "npm",
- Runner::Pnpm => "pnpm",
- Runner::Yarn => "yarn",
- };
- variables.insert(TYPESCRIPT_RUNNER_VARIABLE, runner.to_owned());
+ fn merge(&mut self, other: Self) {
+ self.jest |= other.jest;
+ self.mocha |= other.mocha;
+ self.vitest |= other.vitest;
+ self.jasmine |= other.jasmine;
+ self.scripts.extend(other.scripts);
+ }
+ fn fill_task_templates(&self, task_templates: &mut TaskTemplates) {
if self.jest {
- variables.insert(TYPESCRIPT_JEST_TASK_VARIABLE, "jest".to_owned());
- }
- if self.mocha {
- variables.insert(TYPESCRIPT_MOCHA_TASK_VARIABLE, "mocha".to_owned());
+ task_templates.0.push(TaskTemplate {
+ label: "jest file test".to_owned(),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "jest".to_owned(),
+ VariableName::RelativeFile.template_value(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
+ task_templates.0.push(TaskTemplate {
+ label: format!("jest test {}", VariableName::Symbol.template_value()),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "jest".to_owned(),
+ "--testNamePattern".to_owned(),
+ format!(
+ "\"{}\"",
+ TYPESCRIPT_JEST_TEST_NAME_VARIABLE.template_value()
+ ),
+ VariableName::RelativeFile.template_value(),
+ ],
+ tags: vec![
+ "ts-test".to_owned(),
+ "js-test".to_owned(),
+ "tsx-test".to_owned(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
}
+
if self.vitest {
- variables.insert(TYPESCRIPT_VITEST_TASK_VARIABLE, "vitest".to_owned());
+ task_templates.0.push(TaskTemplate {
+ label: format!("{} file test", "vitest".to_owned()),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "vitest".to_owned(),
+ "run".to_owned(),
+ VariableName::RelativeFile.template_value(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
+ task_templates.0.push(TaskTemplate {
+ label: format!(
+ "{} test {}",
+ "vitest".to_owned(),
+ VariableName::Symbol.template_value(),
+ ),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "vitest".to_owned(),
+ "run".to_owned(),
+ "--testNamePattern".to_owned(),
+ format!("\"{}\"", "vitest".to_owned()),
+ VariableName::RelativeFile.template_value(),
+ ],
+ tags: vec![
+ "ts-test".to_owned(),
+ "js-test".to_owned(),
+ "tsx-test".to_owned(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
}
- if self.jasmine {
- variables.insert(TYPESCRIPT_JASMINE_TASK_VARIABLE, "jasmine".to_owned());
+
+ if self.mocha {
+ task_templates.0.push(TaskTemplate {
+ label: format!("{} file test", "mocha".to_owned()),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "mocha".to_owned(),
+ VariableName::RelativeFile.template_value(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
+ task_templates.0.push(TaskTemplate {
+ label: format!(
+ "{} test {}",
+ "mocha".to_owned(),
+ VariableName::Symbol.template_value(),
+ ),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "mocha".to_owned(),
+ "--grep".to_owned(),
+ format!("\"{}\"", VariableName::Symbol.template_value()),
+ VariableName::RelativeFile.template_value(),
+ ],
+ tags: vec![
+ "ts-test".to_owned(),
+ "js-test".to_owned(),
+ "tsx-test".to_owned(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
}
- if self.build_script {
- variables.insert(TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE, "build".to_owned());
+
+ if self.jasmine {
+ task_templates.0.push(TaskTemplate {
+ label: format!("{} file test", "jasmine".to_owned()),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "jasmine".to_owned(),
+ VariableName::RelativeFile.template_value(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
+ task_templates.0.push(TaskTemplate {
+ label: format!(
+ "{} test {}",
+ "jasmine".to_owned(),
+ VariableName::Symbol.template_value(),
+ ),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "jasmine".to_owned(),
+ format!("--filter={}", VariableName::Symbol.template_value()),
+ VariableName::RelativeFile.template_value(),
+ ],
+ tags: vec![
+ "ts-test".to_owned(),
+ "js-test".to_owned(),
+ "tsx-test".to_owned(),
+ ],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
}
- if self.test_script {
- variables.insert(TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE, "test".to_owned());
+
+ for script in &self.scripts {
+ task_templates.0.push(TaskTemplate {
+ label: format!("package.json > {script}",),
+ command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ args: vec![
+ "--prefix".to_owned(),
+ VariableName::WorktreeRoot.template_value(),
+ "run".to_owned(),
+ script.to_owned(),
+ ],
+ tags: vec!["package-script".into()],
+ cwd: Some(VariableName::WorktreeRoot.template_value()),
+ ..TaskTemplate::default()
+ });
}
}
}
impl TypeScriptContextProvider {
pub fn new() -> Self {
- TypeScriptContextProvider {
+ Self {
last_package_json: PackageJsonContents::default(),
}
}
-}
-impl ContextProvider for TypeScriptContextProvider {
- fn associated_tasks(&self, _: Option<Arc<dyn File>>, _: &App) -> Option<TaskTemplates> {
- let mut task_templates = TaskTemplates(Vec::new());
-
- // Jest tasks
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} file test",
- TYPESCRIPT_JEST_TASK_VARIABLE.template_value()
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
- VariableName::RelativeFile.template_value(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} test {}",
- TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
- VariableName::Symbol.template_value(),
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_JEST_TASK_VARIABLE.template_value(),
- "--testNamePattern".to_owned(),
- format!(
- "\"{}\"",
- TYPESCRIPT_JEST_TEST_NAME_VARIABLE.template_value()
- ),
- VariableName::RelativeFile.template_value(),
- ],
- tags: vec![
- "ts-test".to_owned(),
- "js-test".to_owned(),
- "tsx-test".to_owned(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
+ fn combined_package_json_data(
+ &self,
+ fs: Arc<dyn Fs>,
+ worktree_root: &Path,
+ file_abs_path: &Path,
+ cx: &App,
+ ) -> Task<anyhow::Result<PackageJsonData>> {
+ let Some(file_relative_path) = file_abs_path.strip_prefix(&worktree_root).ok() else {
+ log::debug!("No package json data for off-worktree files");
+ return Task::ready(Ok(PackageJsonData::default()));
+ };
+ let new_json_data = file_relative_path
+ .ancestors()
+ .map(|path| worktree_root.join(path))
+ .map(|parent_path| {
+ self.package_json_data(&parent_path, self.last_package_json.clone(), fs.clone(), cx)
+ })
+ .collect::<Vec<_>>();
- // Vitest tasks
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} file test",
- TYPESCRIPT_VITEST_TASK_VARIABLE.template_value()
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
- "run".to_owned(),
- VariableName::RelativeFile.template_value(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} test {}",
- TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
- VariableName::Symbol.template_value(),
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_VITEST_TASK_VARIABLE.template_value(),
- "run".to_owned(),
- "--testNamePattern".to_owned(),
- format!("\"{}\"", TYPESCRIPT_VITEST_TASK_VARIABLE.template_value()),
- VariableName::RelativeFile.template_value(),
- ],
- tags: vec![
- "ts-test".to_owned(),
- "js-test".to_owned(),
- "tsx-test".to_owned(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
+ cx.background_spawn(async move {
+ let mut package_json_data = PackageJsonData::default();
+ for new_data in join_all(new_json_data).await.into_iter().flatten() {
+ package_json_data.merge(new_data);
+ }
+ Ok(package_json_data)
+ })
+ }
- // Mocha tasks
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} file test",
- TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value()
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
- VariableName::RelativeFile.template_value(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} test {}",
- TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
- VariableName::Symbol.template_value(),
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_MOCHA_TASK_VARIABLE.template_value(),
- "--grep".to_owned(),
- format!("\"{}\"", VariableName::Symbol.template_value()),
- VariableName::RelativeFile.template_value(),
- ],
- tags: vec![
- "ts-test".to_owned(),
- "js-test".to_owned(),
- "tsx-test".to_owned(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
+ fn package_json_data(
+ &self,
+ directory_path: &Path,
+ existing_package_json: PackageJsonContents,
+ fs: Arc<dyn Fs>,
+ cx: &App,
+ ) -> Task<anyhow::Result<PackageJsonData>> {
+ let package_json_path = directory_path.join("package.json");
+ let metadata_check_fs = fs.clone();
+ cx.background_spawn(async move {
+ let metadata = metadata_check_fs
+ .metadata(&package_json_path)
+ .await
+ .with_context(|| format!("getting metadata for {package_json_path:?}"))?
+ .with_context(|| format!("missing FS metadata for {package_json_path:?}"))?;
+ let mtime = DateTime::<Local>::from(metadata.mtime.timestamp_for_user());
+ let existing_data = {
+ let contents = existing_package_json.0.read().await;
+ contents
+ .get(&package_json_path)
+ .filter(|package_json| package_json.mtime == mtime)
+ .map(|package_json| package_json.data.clone())
+ };
+ match existing_data {
+ Some(existing_data) => Ok(existing_data),
+ None => {
+ let package_json_string =
+ fs.load(&package_json_path).await.with_context(|| {
+ format!("loading package.json from {package_json_path:?}")
+ })?;
+ let package_json: HashMap<String, serde_json::Value> =
+ serde_json::from_str(&package_json_string).with_context(|| {
+ format!("parsing package.json from {package_json_path:?}")
+ })?;
+ let new_data = PackageJsonData::new(package_json);
+ {
+ let mut contents = existing_package_json.0.write().await;
+ contents.insert(
+ package_json_path,
+ PackageJson {
+ mtime,
+ data: new_data.clone(),
+ },
+ );
+ }
+ Ok(new_data)
+ }
+ }
+ })
+ }
- // Jasmine tasks
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} file test",
- TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value()
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
- VariableName::RelativeFile.template_value(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
- task_templates.0.push(TaskTemplate {
- label: format!(
- "{} test {}",
- TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
- VariableName::Symbol.template_value(),
- ),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
- args: vec![
- TYPESCRIPT_JASMINE_TASK_VARIABLE.template_value(),
- format!("--filter={}", VariableName::Symbol.template_value()),
- VariableName::RelativeFile.template_value(),
- ],
- tags: vec![
- "ts-test".to_owned(),
- "js-test".to_owned(),
- "tsx-test".to_owned(),
- ],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
- ..TaskTemplate::default()
- });
+ fn detect_package_manager(
+ &self,
+ worktree_root: PathBuf,
+ fs: Arc<dyn Fs>,
+ cx: &App,
+ ) -> Task<&'static str> {
+ let last_package_json = self.last_package_json.clone();
+ let package_json_data =
+ self.package_json_data(&worktree_root, last_package_json, fs.clone(), cx);
+ cx.background_spawn(async move {
+ if let Ok(package_json_data) = package_json_data.await {
+ if let Some(package_manager) = package_json_data.package_manager {
+ return package_manager;
+ }
+ }
+ if fs.is_file(&worktree_root.join("pnpm-lock.yaml")).await {
+ return "pnpm";
+ }
+ if fs.is_file(&worktree_root.join("yarn.lock")).await {
+ return "yarn";
+ }
+ "npm"
+ })
+ }
+}
+
+impl ContextProvider for TypeScriptContextProvider {
+ fn associated_tasks(
+ &self,
+ fs: Arc<dyn Fs>,
+ file: Option<Arc<dyn File>>,
+ cx: &App,
+ ) -> Task<Option<TaskTemplates>> {
+ let Some(file) = project::File::from_dyn(file.as_ref()).cloned() else {
+ return Task::ready(None);
+ };
+ let Some(worktree_root) = file.worktree.read(cx).root_dir() else {
+ return Task::ready(None);
+ };
+ let file_abs_path = file.abs_path(cx);
+ let package_json_data =
+ self.combined_package_json_data(fs.clone(), &worktree_root, &file_abs_path, cx);
- for package_json_script in [
- TYPESCRIPT_TEST_SCRIPT_TASK_VARIABLE,
- TYPESCRIPT_BUILD_SCRIPT_TASK_VARIABLE,
- ] {
+ cx.background_spawn(async move {
+ let mut task_templates = TaskTemplates(Vec::new());
task_templates.0.push(TaskTemplate {
label: format!(
- "package.json script {}",
- package_json_script.template_value()
+ "execute selection {}",
+ VariableName::SelectedText.template_value()
),
- command: TYPESCRIPT_RUNNER_VARIABLE.template_value(),
+ command: "node".to_owned(),
args: vec![
- "--prefix".to_owned(),
- VariableName::WorktreeRoot.template_value(),
- "run".to_owned(),
- package_json_script.template_value(),
+ "-e".to_owned(),
+ format!("\"{}\"", VariableName::SelectedText.template_value()),
],
- tags: vec!["package-script".into()],
- cwd: Some(VariableName::WorktreeRoot.template_value()),
..TaskTemplate::default()
});
- }
- task_templates.0.push(TaskTemplate {
- label: format!(
- "execute selection {}",
- VariableName::SelectedText.template_value()
- ),
- command: "node".to_owned(),
- args: vec![
- "-e".to_owned(),
- format!("\"{}\"", VariableName::SelectedText.template_value()),
- ],
- ..TaskTemplate::default()
- });
+ match package_json_data.await {
+ Ok(package_json) => {
+ package_json.fill_task_templates(&mut task_templates);
+ }
+ Err(e) => {
+ log::error!(
+ "Failed to read package.json for worktree {file_abs_path:?}: {e:#}"
+ );
+ }
+ }
- Some(task_templates)
+ Some(task_templates)
+ })
}
fn build_context(
@@ -387,91 +453,19 @@ impl ContextProvider for TypeScriptContextProvider {
);
}
- let Some((fs, worktree_root)) = location.fs.zip(location.worktree_root) else {
- return Task::ready(Ok(vars));
- };
-
- let package_json_contents = self.last_package_json.clone();
+ let task = location
+ .worktree_root
+ .zip(location.fs)
+ .map(|(worktree_root, fs)| self.detect_package_manager(worktree_root, fs, cx));
cx.background_spawn(async move {
- let variables = package_json_variables(fs, worktree_root, package_json_contents)
- .await
- .context("package.json context retrieval")
- .log_err()
- .unwrap_or_else(task::TaskVariables::default);
-
- vars.extend(variables);
-
+ if let Some(task) = task {
+ vars.insert(TYPESCRIPT_RUNNER_VARIABLE, task.await.to_owned());
+ }
Ok(vars)
})
}
}
-async fn package_json_variables(
- fs: Arc<dyn Fs>,
- worktree_root: PathBuf,
- package_json_contents: PackageJsonContents,
-) -> anyhow::Result<task::TaskVariables> {
- let package_json_path = worktree_root.join("package.json");
- let metadata = fs
- .metadata(&package_json_path)
- .await
- .with_context(|| format!("getting metadata for {package_json_path:?}"))?
- .with_context(|| format!("missing FS metadata for {package_json_path:?}"))?;
- let mtime = DateTime::<Local>::from(metadata.mtime.timestamp_for_user());
- let existing_data = {
- let contents = package_json_contents.0.read().await;
- contents
- .get(&package_json_path)
- .filter(|package_json| package_json.mtime == mtime)
- .map(|package_json| package_json.data)
- };
-
- let mut variables = TaskVariables::default();
- if let Some(existing_data) = existing_data {
- existing_data.fill_variables(&mut variables);
- } else {
- let package_json_string = fs
- .load(&package_json_path)
- .await
- .with_context(|| format!("loading package.json from {package_json_path:?}"))?;
- let package_json: HashMap<String, serde_json::Value> =
- serde_json::from_str(&package_json_string)
- .with_context(|| format!("parsing package.json from {package_json_path:?}"))?;
-
- let new_data = PackageJsonData::new(package_json, worktree_root, fs).await;
- new_data.fill_variables(&mut variables);
- {
- let mut contents = package_json_contents.0.write().await;
- contents.insert(
- package_json_path,
- PackageJson {
- mtime,
- data: new_data,
- },
- );
- }
- }
-
- Ok(variables)
-}
-
-async fn detect_package_manager(fs: &Arc<dyn Fs>, worktree_root: &PathBuf) -> Runner {
- // Check for pnpm-lock.yaml first (pnpm)
- if fs
- .metadata(&worktree_root.join("pnpm-lock.yaml"))
- .await
- .is_ok()
- {
- return Runner::Pnpm;
- }
-
- if fs.metadata(&worktree_root.join("yarn.lock")).await.is_ok() {
- return Runner::Yarn;
- }
-
- Runner::Npm
-}
-
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()]
}
@@ -997,6 +997,7 @@ impl Project {
let task_store = cx.new(|cx| {
TaskStore::local(
+ fs.clone(),
buffer_store.downgrade(),
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
@@ -1136,6 +1137,7 @@ impl Project {
.new(|cx| ToolchainStore::remote(SSH_PROJECT_ID, ssh.read(cx).proto_client(), cx));
let task_store = cx.new(|cx| {
TaskStore::remote(
+ fs.clone(),
buffer_store.downgrade(),
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
@@ -1396,6 +1398,7 @@ impl Project {
let task_store = cx.new(|cx| {
if run_tasks {
TaskStore::remote(
+ fs.clone(),
buffer_store.downgrade(),
worktree_store.clone(),
Arc::new(EmptyToolchainStore),
@@ -329,6 +329,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
+ let task_contexts = Arc::new(task_contexts);
let topmost_local_task_source_kind = TaskSourceKind::Worktree {
id: worktree_id,
@@ -354,8 +355,9 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
assert_eq!(settings_a.tab_size.get(), 8);
assert_eq!(settings_b.tab_size.get(), 2);
- get_all_tasks(&project, &task_contexts, cx)
+ get_all_tasks(&project, task_contexts.clone(), cx)
})
+ .await
.into_iter()
.map(|(source_kind, task)| {
let resolved = task.resolved;
@@ -394,7 +396,8 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
);
let (_, resolved_task) = cx
- .update(|cx| get_all_tasks(&project, &task_contexts, cx))
+ .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
+ .await
.into_iter()
.find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
.expect("should have one global task");
@@ -432,7 +435,8 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
cx.run_until_parked();
let all_tasks = cx
- .update(|cx| get_all_tasks(&project, &task_contexts, cx))
+ .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
+ .await
.into_iter()
.map(|(source_kind, task)| {
let resolved = task.resolved;
@@ -519,43 +523,47 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
})
});
- let active_non_worktree_item_tasks = cx.update(|cx| {
- get_all_tasks(
- &project,
- &TaskContexts {
- active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
- active_worktree_context: None,
- other_worktree_contexts: Vec::new(),
- lsp_task_sources: HashMap::default(),
- latest_selection: None,
- },
- cx,
- )
- });
+ let active_non_worktree_item_tasks = cx
+ .update(|cx| {
+ get_all_tasks(
+ &project,
+ Arc::new(TaskContexts {
+ active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
+ active_worktree_context: None,
+ other_worktree_contexts: Vec::new(),
+ lsp_task_sources: HashMap::default(),
+ latest_selection: None,
+ }),
+ cx,
+ )
+ })
+ .await;
assert!(
active_non_worktree_item_tasks.is_empty(),
"A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
);
- let active_worktree_tasks = cx.update(|cx| {
- get_all_tasks(
- &project,
- &TaskContexts {
- active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
- active_worktree_context: Some((worktree_id, {
- let mut worktree_context = TaskContext::default();
- worktree_context
- .task_variables
- .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
- worktree_context
- })),
- other_worktree_contexts: Vec::new(),
- lsp_task_sources: HashMap::default(),
- latest_selection: None,
- },
- cx,
- )
- });
+ let active_worktree_tasks = cx
+ .update(|cx| {
+ get_all_tasks(
+ &project,
+ Arc::new(TaskContexts {
+ active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
+ active_worktree_context: Some((worktree_id, {
+ let mut worktree_context = TaskContext::default();
+ worktree_context
+ .task_variables
+ .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
+ worktree_context
+ })),
+ other_worktree_contexts: Vec::new(),
+ lsp_task_sources: HashMap::default(),
+ latest_selection: None,
+ }),
+ cx,
+ )
+ })
+ .await;
assert_eq!(
active_worktree_tasks
.into_iter()
@@ -8851,20 +8859,22 @@ fn tsx_lang() -> Arc<Language> {
fn get_all_tasks(
project: &Entity<Project>,
- task_contexts: &TaskContexts,
+ task_contexts: Arc<TaskContexts>,
cx: &mut App,
-) -> Vec<(TaskSourceKind, ResolvedTask)> {
- let (mut old, new) = project.update(cx, |project, cx| {
- project
- .task_store
- .read(cx)
- .task_inventory()
- .unwrap()
- .read(cx)
- .used_and_current_resolved_tasks(task_contexts, cx)
+) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
+ let new_tasks = project.update(cx, |project, cx| {
+ project.task_store.update(cx, |task_store, cx| {
+ task_store.task_inventory().unwrap().update(cx, |this, cx| {
+ this.used_and_current_resolved_tasks(task_contexts, cx)
+ })
+ })
});
- old.extend(new);
- old
+
+ cx.background_spawn(async move {
+ let (mut old, new) = new_tasks.await;
+ old.extend(new);
+ old
+ })
}
#[track_caller]
@@ -11,7 +11,8 @@ use std::{
use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use dap::DapRegistry;
-use gpui::{App, AppContext as _, Entity, SharedString, Task};
+use fs::Fs;
+use gpui::{App, AppContext as _, Context, Entity, SharedString, Task};
use itertools::Itertools;
use language::{
Buffer, ContextLocation, ContextProvider, File, Language, LanguageToolchainStore, Location,
@@ -31,14 +32,25 @@ use worktree::WorktreeId;
use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
/// Inventory tracks available tasks for a given project.
-#[derive(Debug, Default)]
pub struct Inventory {
+ fs: Arc<dyn Fs>,
last_scheduled_tasks: VecDeque<(TaskSourceKind, ResolvedTask)>,
last_scheduled_scenarios: VecDeque<DebugScenario>,
templates_from_settings: InventoryFor<TaskTemplate>,
scenarios_from_settings: InventoryFor<DebugScenario>,
}
+impl std::fmt::Debug for Inventory {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Inventory")
+ .field("last_scheduled_tasks", &self.last_scheduled_tasks)
+ .field("last_scheduled_scenarios", &self.last_scheduled_scenarios)
+ .field("templates_from_settings", &self.templates_from_settings)
+ .field("scenarios_from_settings", &self.scenarios_from_settings)
+ .finish()
+ }
+}
+
// Helper trait for better error messages in [InventoryFor]
trait InventoryContents: Clone {
const GLOBAL_SOURCE_FILE: &'static str;
@@ -223,8 +235,14 @@ impl TaskSourceKind {
}
impl Inventory {
- pub fn new(cx: &mut App) -> Entity<Self> {
- cx.new(|_| Self::default())
+ pub fn new(fs: Arc<dyn Fs>, cx: &mut App) -> Entity<Self> {
+ cx.new(|_| Self {
+ fs,
+ last_scheduled_tasks: VecDeque::default(),
+ last_scheduled_scenarios: VecDeque::default(),
+ templates_from_settings: InventoryFor::default(),
+ scenarios_from_settings: InventoryFor::default(),
+ })
}
pub fn scenario_scheduled(&mut self, scenario: DebugScenario) {
@@ -311,7 +329,7 @@ impl Inventory {
worktree_id: Option<WorktreeId>,
label: &str,
cx: &App,
- ) -> Option<TaskTemplate> {
+ ) -> Task<Option<TaskTemplate>> {
let (buffer_worktree_id, file, language) = buffer
.map(|buffer| {
let buffer = buffer.read(cx);
@@ -324,10 +342,15 @@ impl Inventory {
})
.unwrap_or((None, None, None));
- self.list_tasks(file, language, worktree_id.or(buffer_worktree_id), cx)
- .into_iter()
- .find(|(_, template)| template.label == label)
- .map(|val| val.1)
+ let tasks = self.list_tasks(file, language, worktree_id.or(buffer_worktree_id), cx);
+ let label = label.to_owned();
+ cx.background_spawn(async move {
+ tasks
+ .await
+ .into_iter()
+ .find(|(_, template)| template.label == label)
+ .map(|val| val.1)
+ })
}
/// Pulls its task sources relevant to the worktree and the language given,
@@ -339,11 +362,13 @@ impl Inventory {
language: Option<Arc<Language>>,
worktree: Option<WorktreeId>,
cx: &App,
- ) -> Vec<(TaskSourceKind, TaskTemplate)> {
- let global_tasks = self.global_templates_from_settings();
- let worktree_tasks = worktree
+ ) -> Task<Vec<(TaskSourceKind, TaskTemplate)>> {
+ let global_tasks = self.global_templates_from_settings().collect::<Vec<_>>();
+ let fs = self.fs.clone();
+ let mut worktree_tasks = worktree
.into_iter()
- .flat_map(|worktree| self.worktree_templates_from_settings(worktree));
+ .flat_map(|worktree| self.worktree_templates_from_settings(worktree))
+ .collect::<Vec<_>>();
let task_source_kind = language.as_ref().map(|language| TaskSourceKind::Language {
name: language.name().into(),
});
@@ -353,29 +378,38 @@ impl Inventory {
.tasks
.enabled
})
- .and_then(|language| language.context_provider()?.associated_tasks(file, cx))
- .into_iter()
- .flat_map(|tasks| tasks.0.into_iter())
- .flat_map(|task| Some((task_source_kind.clone()?, task)));
-
- worktree_tasks
- .chain(language_tasks)
- .chain(global_tasks)
- .collect()
+ .and_then(|language| {
+ language
+ .context_provider()
+ .map(|provider| provider.associated_tasks(fs, file, cx))
+ });
+ cx.background_spawn(async move {
+ if let Some(t) = language_tasks {
+ worktree_tasks.extend(t.await.into_iter().flat_map(|tasks| {
+ tasks
+ .0
+ .into_iter()
+ .filter_map(|task| Some((task_source_kind.clone()?, task)))
+ }));
+ }
+ worktree_tasks.extend(global_tasks);
+ worktree_tasks
+ })
}
/// Pulls its task sources relevant to the worktree and the language given and resolves them with the [`TaskContexts`] given.
/// Joins the new resolutions with the resolved tasks that were used (spawned) before,
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
/// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
- pub fn used_and_current_resolved_tasks<'a>(
- &'a self,
- task_contexts: &'a TaskContexts,
- cx: &'a App,
- ) -> (
+ pub fn used_and_current_resolved_tasks(
+ &self,
+ task_contexts: Arc<TaskContexts>,
+ cx: &mut Context<Self>,
+ ) -> Task<(
Vec<(TaskSourceKind, ResolvedTask)>,
Vec<(TaskSourceKind, ResolvedTask)>,
- ) {
+ )> {
+ let fs = self.fs.clone();
let worktree = task_contexts.worktree();
let location = task_contexts.location();
let language = location
@@ -423,85 +457,103 @@ impl Inventory {
.collect::<Vec<_>>();
let not_used_score = post_inc(&mut lru_score);
- let global_tasks = self.global_templates_from_settings();
-
- let language_tasks = language
+ let global_tasks = self.global_templates_from_settings().collect::<Vec<_>>();
+ let associated_tasks = language
.filter(|language| {
language_settings(Some(language.name()), file.as_ref(), cx)
.tasks
.enabled
})
- .and_then(|language| language.context_provider()?.associated_tasks(file, cx))
- .into_iter()
- .flat_map(|tasks| tasks.0.into_iter())
- .flat_map(|task| Some((task_source_kind.clone()?, task)));
+ .and_then(|language| {
+ language
+ .context_provider()
+ .map(|provider| provider.associated_tasks(fs, file, cx))
+ });
let worktree_tasks = worktree
.into_iter()
.flat_map(|worktree| self.worktree_templates_from_settings(worktree))
- .chain(language_tasks)
- .chain(global_tasks);
-
- let new_resolved_tasks = worktree_tasks
- .flat_map(|(kind, task)| {
- let id_base = kind.to_id_base();
- if let TaskSourceKind::Worktree { id, .. } = &kind {
- None.or_else(|| {
- let (_, _, item_context) = task_contexts
- .active_item_context
- .as_ref()
- .filter(|(worktree_id, _, _)| Some(id) == worktree_id.as_ref())?;
- task.resolve_task(&id_base, item_context)
- })
- .or_else(|| {
- let (_, worktree_context) = task_contexts
- .active_worktree_context
- .as_ref()
- .filter(|(worktree_id, _)| id == worktree_id)?;
- task.resolve_task(&id_base, worktree_context)
- })
- .or_else(|| {
- if let TaskSourceKind::Worktree { id, .. } = &kind {
- let worktree_context = task_contexts
- .other_worktree_contexts
- .iter()
- .find(|(worktree_id, _)| worktree_id == id)
- .map(|(_, context)| context)?;
+ .collect::<Vec<_>>();
+ let task_contexts = task_contexts.clone();
+ cx.background_spawn(async move {
+ let language_tasks = if let Some(task) = associated_tasks {
+ task.await.map(|templates| {
+ templates
+ .0
+ .into_iter()
+ .flat_map(|task| Some((task_source_kind.clone()?, task)))
+ })
+ } else {
+ None
+ };
+
+ let worktree_tasks = worktree_tasks
+ .into_iter()
+ .chain(language_tasks.into_iter().flatten())
+ .chain(global_tasks);
+
+ let new_resolved_tasks = worktree_tasks
+ .flat_map(|(kind, task)| {
+ let id_base = kind.to_id_base();
+ if let TaskSourceKind::Worktree { id, .. } = &kind {
+ None.or_else(|| {
+ let (_, _, item_context) =
+ task_contexts.active_item_context.as_ref().filter(
+ |(worktree_id, _, _)| Some(id) == worktree_id.as_ref(),
+ )?;
+ task.resolve_task(&id_base, item_context)
+ })
+ .or_else(|| {
+ let (_, worktree_context) = task_contexts
+ .active_worktree_context
+ .as_ref()
+ .filter(|(worktree_id, _)| id == worktree_id)?;
task.resolve_task(&id_base, worktree_context)
- } else {
- None
- }
- })
- } else {
- None.or_else(|| {
- let (_, _, item_context) = task_contexts.active_item_context.as_ref()?;
- task.resolve_task(&id_base, item_context)
- })
- .or_else(|| {
- let (_, worktree_context) =
- task_contexts.active_worktree_context.as_ref()?;
- task.resolve_task(&id_base, worktree_context)
- })
- }
- .or_else(|| task.resolve_task(&id_base, &TaskContext::default()))
- .map(move |resolved_task| (kind.clone(), resolved_task, not_used_score))
- })
- .filter(|(_, resolved_task, _)| {
- match task_labels_to_ids.entry(resolved_task.resolved_label.clone()) {
- hash_map::Entry::Occupied(mut o) => {
- // Allow new tasks with the same label, if their context is different
- o.get_mut().insert(resolved_task.id.clone())
+ })
+ .or_else(|| {
+ if let TaskSourceKind::Worktree { id, .. } = &kind {
+ let worktree_context = task_contexts
+ .other_worktree_contexts
+ .iter()
+ .find(|(worktree_id, _)| worktree_id == id)
+ .map(|(_, context)| context)?;
+ task.resolve_task(&id_base, worktree_context)
+ } else {
+ None
+ }
+ })
+ } else {
+ None.or_else(|| {
+ let (_, _, item_context) =
+ task_contexts.active_item_context.as_ref()?;
+ task.resolve_task(&id_base, item_context)
+ })
+ .or_else(|| {
+ let (_, worktree_context) =
+ task_contexts.active_worktree_context.as_ref()?;
+ task.resolve_task(&id_base, worktree_context)
+ })
}
- hash_map::Entry::Vacant(v) => {
- v.insert(HashSet::from_iter(Some(resolved_task.id.clone())));
- true
+ .or_else(|| task.resolve_task(&id_base, &TaskContext::default()))
+ .map(move |resolved_task| (kind.clone(), resolved_task, not_used_score))
+ })
+ .filter(|(_, resolved_task, _)| {
+ match task_labels_to_ids.entry(resolved_task.resolved_label.clone()) {
+ hash_map::Entry::Occupied(mut o) => {
+ // Allow new tasks with the same label, if their context is different
+ o.get_mut().insert(resolved_task.id.clone())
+ }
+ hash_map::Entry::Vacant(v) => {
+ v.insert(HashSet::from_iter(Some(resolved_task.id.clone())));
+ true
+ }
}
- }
- })
- .sorted_unstable_by(task_lru_comparator)
- .map(|(kind, task, _)| (kind, task))
- .collect::<Vec<_>>();
+ })
+ .sorted_unstable_by(task_lru_comparator)
+ .map(|(kind, task, _)| (kind, task))
+ .collect::<Vec<_>>();
- (previously_spawned_tasks, new_resolved_tasks)
+ (previously_spawned_tasks, new_resolved_tasks)
+ })
}
/// Returns the last scheduled task by task_id if provided.
@@ -746,7 +798,7 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse<usize> {
#[cfg(test)]
mod test_inventory {
- use gpui::{Entity, TestAppContext};
+ use gpui::{AppContext as _, Entity, Task, TestAppContext};
use itertools::Itertools;
use task::TaskContext;
use worktree::WorktreeId;
@@ -759,10 +811,13 @@ mod test_inventory {
inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
- ) -> Vec<String> {
- inventory.update(cx, |inventory, cx| {
- inventory
- .list_tasks(None, None, worktree, cx)
+ ) -> Task<Vec<String>> {
+ let new_tasks = inventory.update(cx, |inventory, cx| {
+ inventory.list_tasks(None, None, worktree, cx)
+ });
+ cx.background_spawn(async move {
+ new_tasks
+ .await
.into_iter()
.map(|(_, task)| task.label)
.sorted()
@@ -774,20 +829,33 @@ mod test_inventory {
inventory: &Entity<Inventory>,
task_name: &str,
cx: &mut TestAppContext,
- ) {
- inventory.update(cx, |inventory, cx| {
- let (task_source_kind, task) = inventory
- .list_tasks(None, None, None, cx)
+ ) -> Task<()> {
+ let tasks = inventory.update(cx, |inventory, cx| {
+ inventory.list_tasks(None, None, None, cx)
+ });
+
+ let task_name = task_name.to_owned();
+ let inventory = inventory.clone();
+ cx.spawn(|mut cx| async move {
+ let (task_source_kind, task) = tasks
+ .await
.into_iter()
.find(|(_, task)| task.label == task_name)
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
+
let id_base = task_source_kind.to_id_base();
- inventory.task_scheduled(
- task_source_kind.clone(),
- task.resolve_task(&id_base, &TaskContext::default())
- .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
- );
- });
+ inventory
+ .update(&mut cx, |inventory, _| {
+ inventory.task_scheduled(
+ task_source_kind.clone(),
+ task.resolve_task(&id_base, &TaskContext::default())
+ .unwrap_or_else(|| {
+ panic!("Failed to resolve task with name {task_name}")
+ }),
+ )
+ })
+ .unwrap();
+ })
}
pub(super) fn register_worktree_task_used(
@@ -795,20 +863,32 @@ mod test_inventory {
worktree_id: WorktreeId,
task_name: &str,
cx: &mut TestAppContext,
- ) {
- inventory.update(cx, |inventory, cx| {
- let (task_source_kind, task) = inventory
- .list_tasks(None, None, Some(worktree_id), cx)
+ ) -> Task<()> {
+ let tasks = inventory.update(cx, |inventory, cx| {
+ inventory.list_tasks(None, None, Some(worktree_id), cx)
+ });
+
+ let inventory = inventory.clone();
+ let task_name = task_name.to_owned();
+ cx.spawn(|mut cx| async move {
+ let (task_source_kind, task) = tasks
+ .await
.into_iter()
.find(|(_, task)| task.label == task_name)
.unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
let id_base = task_source_kind.to_id_base();
- inventory.task_scheduled(
- task_source_kind.clone(),
- task.resolve_task(&id_base, &TaskContext::default())
- .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
- );
- });
+ inventory
+ .update(&mut cx, |inventory, _| {
+ inventory.task_scheduled(
+ task_source_kind.clone(),
+ task.resolve_task(&id_base, &TaskContext::default())
+ .unwrap_or_else(|| {
+ panic!("Failed to resolve task with name {task_name}")
+ }),
+ );
+ })
+ .unwrap();
+ })
}
pub(super) async fn list_tasks(
@@ -816,18 +896,19 @@ mod test_inventory {
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> {
- inventory.update(cx, |inventory, cx| {
- let task_context = &TaskContext::default();
- inventory
- .list_tasks(None, None, worktree, cx)
- .into_iter()
- .filter_map(|(source_kind, task)| {
- let id_base = source_kind.to_id_base();
- Some((source_kind, task.resolve_task(&id_base, task_context)?))
- })
- .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label))
- .collect()
- })
+ let task_context = &TaskContext::default();
+ inventory
+ .update(cx, |inventory, cx| {
+ inventory.list_tasks(None, None, worktree, cx)
+ })
+ .await
+ .into_iter()
+ .filter_map(|(source_kind, task)| {
+ let id_base = source_kind.to_id_base();
+ Some((source_kind, task.resolve_task(&id_base, task_context)?))
+ })
+ .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label))
+ .collect()
}
}
@@ -959,15 +1040,17 @@ impl ContextProviderWithTasks {
impl ContextProvider for ContextProviderWithTasks {
fn associated_tasks(
&self,
- _: Option<Arc<dyn language::File>>,
+ _: Arc<dyn Fs>,
+ _: Option<Arc<dyn File>>,
_: &App,
- ) -> Option<TaskTemplates> {
- Some(self.templates.clone())
+ ) -> Task<Option<TaskTemplates>> {
+ Task::ready(Some(self.templates.clone()))
}
}
#[cfg(test)]
mod tests {
+ use fs::FakeFs;
use gpui::TestAppContext;
use paths::tasks_file;
use pretty_assertions::assert_eq;
@@ -982,13 +1065,14 @@ mod tests {
#[gpui::test]
async fn test_task_list_sorting(cx: &mut TestAppContext) {
init_test(cx);
- let inventory = cx.update(Inventory::new);
- let initial_tasks = resolved_task_names(&inventory, None, cx);
+ let fs = FakeFs::new(cx.executor());
+ let inventory = cx.update(|cx| Inventory::new(fs, cx));
+ let initial_tasks = resolved_task_names(&inventory, None, cx).await;
assert!(
initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}"
);
- let initial_tasks = task_template_names(&inventory, None, cx);
+ let initial_tasks = task_template_names(&inventory, None, cx).await;
assert!(
initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}"
@@ -1012,22 +1096,22 @@ mod tests {
.unwrap();
});
assert_eq!(
- task_template_names(&inventory, None, cx),
+ task_template_names(&inventory, None, cx).await,
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
&expected_initial_state,
"Tasks with equal amount of usages should be sorted alphanumerically"
);
- register_task_used(&inventory, "2_task", cx);
+ register_task_used(&inventory, "2_task", cx).await;
assert_eq!(
- task_template_names(&inventory, None, cx),
+ task_template_names(&inventory, None, cx).await,
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"2_task".to_string(),
"1_a_task".to_string(),
@@ -1036,16 +1120,16 @@ mod tests {
],
);
- register_task_used(&inventory, "1_task", cx);
- register_task_used(&inventory, "1_task", cx);
- register_task_used(&inventory, "1_task", cx);
- register_task_used(&inventory, "3_task", cx);
+ register_task_used(&inventory, "1_task", cx).await;
+ register_task_used(&inventory, "1_task", cx).await;
+ register_task_used(&inventory, "1_task", cx).await;
+ register_task_used(&inventory, "3_task", cx).await;
assert_eq!(
- task_template_names(&inventory, None, cx),
+ task_template_names(&inventory, None, cx).await,
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -1069,7 +1153,7 @@ mod tests {
.unwrap();
});
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -1079,7 +1163,7 @@ mod tests {
"Most recently used task should be at the top"
);
assert_eq!(
- resolved_task_names(&inventory, Some(worktree_id), cx),
+ resolved_task_names(&inventory, Some(worktree_id), cx).await,
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -1088,9 +1172,9 @@ mod tests {
"1_a_task".to_string(),
],
);
- register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx);
+ register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx).await;
assert_eq!(
- resolved_task_names(&inventory, Some(worktree_id), cx),
+ resolved_task_names(&inventory, Some(worktree_id), cx).await,
vec![
"worktree_task_1".to_string(),
"3_task".to_string(),
@@ -1123,11 +1207,11 @@ mod tests {
"3_task".to_string(),
];
assert_eq!(
- task_template_names(&inventory, None, cx),
+ task_template_names(&inventory, None, cx).await,
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"worktree_task_1".to_string(),
"1_a_task".to_string(),
@@ -1140,13 +1224,13 @@ mod tests {
"After global tasks update, worktree task usage is not erased and it's the first still; global task is back to regular order as its file was updated"
);
- register_task_used(&inventory, "11_hello", cx);
+ register_task_used(&inventory, "11_hello", cx).await;
assert_eq!(
- task_template_names(&inventory, None, cx),
+ task_template_names(&inventory, None, cx).await,
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx),
+ resolved_task_names(&inventory, None, cx).await,
vec![
"11_hello".to_string(),
"worktree_task_1".to_string(),
@@ -1162,7 +1246,8 @@ mod tests {
#[gpui::test]
async fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
init_test(cx);
- let inventory = cx.update(Inventory::new);
+ let fs = FakeFs::new(cx.executor());
+ let inventory = cx.update(|cx| Inventory::new(fs, cx));
let common_name = "common_task_name";
let worktree_1 = WorktreeId::from_usize(1);
let worktree_2 = WorktreeId::from_usize(2);
@@ -1319,12 +1404,17 @@ mod tests {
inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
- ) -> Vec<String> {
- inventory.update(cx, |inventory, cx| {
+ ) -> Task<Vec<String>> {
+ let tasks = inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default()));
- let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
+
+ inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
+ });
+
+ cx.background_spawn(async move {
+ let (used, current) = tasks.await;
used.into_iter()
.chain(current)
.map(|(_, task)| task.original_task().label.clone())
@@ -1353,17 +1443,20 @@ mod tests {
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> {
- inventory.update(cx, |inventory, cx| {
- let mut task_contexts = TaskContexts::default();
- task_contexts.active_worktree_context =
- worktree.map(|worktree| (worktree, TaskContext::default()));
- let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
- let mut all = used;
- all.extend(current);
- all.into_iter()
- .map(|(source_kind, task)| (source_kind, task.resolved_label))
- .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
- .collect()
- })
+ let (used, current) = inventory
+ .update(cx, |inventory, cx| {
+ let mut task_contexts = TaskContexts::default();
+ task_contexts.active_worktree_context =
+ worktree.map(|worktree| (worktree, TaskContext::default()));
+
+ inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
+ })
+ .await;
+ let mut all = used;
+ all.extend(current);
+ all.into_iter()
+ .map(|(source_kind, task)| (source_kind, task.resolved_label))
+ .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+ .collect()
}
}
@@ -159,6 +159,7 @@ impl TaskStore {
}
pub fn local(
+ fs: Arc<dyn Fs>,
buffer_store: WeakEntity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
@@ -170,7 +171,7 @@ impl TaskStore {
downstream_client: None,
environment,
},
- task_inventory: Inventory::new(cx),
+ task_inventory: Inventory::new(fs, cx),
buffer_store,
toolchain_store,
worktree_store,
@@ -178,6 +179,7 @@ impl TaskStore {
}
pub fn remote(
+ fs: Arc<dyn Fs>,
buffer_store: WeakEntity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
@@ -190,7 +192,7 @@ impl TaskStore {
upstream_client,
project_id,
},
- task_inventory: Inventory::new(cx),
+ task_inventory: Inventory::new(fs, cx),
buffer_store,
toolchain_store,
worktree_store,
@@ -146,6 +146,7 @@ impl HeadlessProject {
let task_store = cx.new(|cx| {
let mut task_store = TaskStore::local(
+ fs.clone(),
buffer_store.downgrade(),
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
@@ -260,13 +260,14 @@ impl PickerDelegate for TasksModalDelegate {
Some(candidates) => Task::ready(string_match_candidates(candidates)),
None => {
if let Some(task_inventory) = self.task_store.read(cx).task_inventory().cloned() {
- let (used, current) = task_inventory
- .read(cx)
- .used_and_current_resolved_tasks(&self.task_contexts, cx);
+ let task_list = task_inventory.update(cx, |this, cx| {
+ this.used_and_current_resolved_tasks(self.task_contexts.clone(), cx)
+ });
let workspace = self.workspace.clone();
let lsp_task_sources = self.task_contexts.lsp_task_sources.clone();
let task_position = self.task_contexts.latest_selection;
cx.spawn(async move |picker, cx| {
+ let (used, current) = task_list.await;
let Ok((lsp_tasks, prefer_lsp)) = workspace.update(cx, |workspace, cx| {
let lsp_tasks = editor::lsp_tasks(
workspace.project().clone(),
@@ -192,31 +192,33 @@ where
task_contexts(workspace, window, cx)
})?;
let task_contexts = task_contexts.await;
- let mut tasks = workspace.update(cx, |workspace, cx| {
- let Some(task_inventory) = workspace
- .project()
- .read(cx)
- .task_store()
- .read(cx)
- .task_inventory()
- .cloned()
- else {
- return Vec::new();
- };
- let (file, language) = task_contexts
- .location()
- .map(|location| {
- let buffer = location.buffer.read(cx);
- (
- buffer.file().cloned(),
- buffer.language_at(location.range.start),
- )
- })
- .unwrap_or_default();
- task_inventory
- .read(cx)
- .list_tasks(file, language, task_contexts.worktree(), cx)
- })?;
+ let mut tasks = workspace
+ .update(cx, |workspace, cx| {
+ let Some(task_inventory) = workspace
+ .project()
+ .read(cx)
+ .task_store()
+ .read(cx)
+ .task_inventory()
+ .cloned()
+ else {
+ return Task::ready(Vec::new());
+ };
+ let (file, language) = task_contexts
+ .location()
+ .map(|location| {
+ let buffer = location.buffer.read(cx);
+ (
+ buffer.file().cloned(),
+ buffer.language_at(location.range.start),
+ )
+ })
+ .unwrap_or_default();
+ task_inventory
+ .read(cx)
+ .list_tasks(file, language, task_contexts.worktree(), cx)
+ })?
+ .await;
let did_spawn = workspace
.update_in(cx, |workspace, window, cx| {