Detailed changes
@@ -14127,12 +14127,14 @@ name = "tasks_ui"
version = "0.1.0"
dependencies = [
"anyhow",
+ "collections",
"debugger_ui",
"editor",
"feature_flags",
"file_icons",
"fuzzy",
"gpui",
+ "itertools 0.14.0",
"language",
"menu",
"picker",
@@ -318,6 +318,7 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::LspExtExpandMacro>)
.add_request_handler(forward_read_only_project_request::<proto::LspExtOpenDocs>)
+ .add_request_handler(forward_mutating_project_request::<proto::LspExtRunnables>)
.add_request_handler(
forward_read_only_project_request::<proto::LspExtSwitchSourceHeader>,
)
@@ -131,7 +131,7 @@ pub use proposed_changes_editor::{
};
use smallvec::smallvec;
use std::{cell::OnceCell, iter::Peekable};
-use task::{ResolvedTask, TaskTemplate, TaskVariables};
+use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
pub use lsp::CompletionContext;
use lsp::{
@@ -140,6 +140,7 @@ use lsp::{
};
use language::BufferSnapshot;
+pub use lsp_ext::lsp_tasks;
use movement::TextLayoutDetails;
pub use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, RowInfo,
@@ -12449,12 +12450,13 @@ impl Editor {
return Task::ready(());
}
let project = self.project.as_ref().map(Entity::downgrade);
- cx.spawn_in(window, async move |this, cx| {
+ let task_sources = self.lsp_task_sources(cx);
+ cx.spawn_in(window, async move |editor, cx| {
cx.background_executor().timer(UPDATE_DEBOUNCE).await;
let Some(project) = project.and_then(|p| p.upgrade()) else {
return;
};
- let Ok(display_snapshot) = this.update(cx, |this, cx| {
+ let Ok(display_snapshot) = editor.update(cx, |this, cx| {
this.display_map.update(cx, |map, cx| map.snapshot(cx))
}) else {
return;
@@ -12477,15 +12479,77 @@ impl Editor {
}
})
.await;
+ let Ok(lsp_tasks) =
+ cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx))
+ else {
+ return;
+ };
+ let lsp_tasks = lsp_tasks.await;
+
+ let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| {
+ lsp_tasks
+ .into_iter()
+ .flat_map(|(kind, tasks)| {
+ tasks.into_iter().filter_map(move |(location, task)| {
+ Some((kind.clone(), location?, task))
+ })
+ })
+ .fold(HashMap::default(), |mut acc, (kind, location, task)| {
+ let buffer = location.target.buffer;
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let offset = display_snapshot.buffer_snapshot.excerpts().find_map(
+ |(excerpt_id, snapshot, _)| {
+ if snapshot.remote_id() == buffer_snapshot.remote_id() {
+ display_snapshot
+ .buffer_snapshot
+ .anchor_in_excerpt(excerpt_id, location.target.range.start)
+ } else {
+ None
+ }
+ },
+ );
+ if let Some(offset) = offset {
+ let task_buffer_range =
+ location.target.range.to_point(&buffer_snapshot);
+ let context_buffer_range =
+ task_buffer_range.to_offset(&buffer_snapshot);
+ let context_range = BufferOffset(context_buffer_range.start)
+ ..BufferOffset(context_buffer_range.end);
+
+ acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row))
+ .or_insert_with(|| RunnableTasks {
+ templates: Vec::new(),
+ offset,
+ column: task_buffer_range.start.column,
+ extra_variables: HashMap::default(),
+ context_range,
+ })
+ .templates
+ .push((kind, task.original_task().clone()));
+ }
+
+ acc
+ })
+ }) else {
+ return;
+ };
let rows = Self::runnable_rows(project, display_snapshot, new_rows, cx.clone());
- this.update(cx, |this, _| {
- this.clear_tasks();
- for (key, value) in rows {
- this.insert_tasks(key, value);
- }
- })
- .ok();
+ editor
+ .update(cx, |editor, _| {
+ editor.clear_tasks();
+ for (key, mut value) in rows {
+ if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) {
+ value.templates.extend(lsp_tasks.templates);
+ }
+
+ editor.insert_tasks(key, value);
+ }
+ for (key, value) in lsp_tasks_by_rows {
+ editor.insert_tasks(key, value);
+ }
+ })
+ .ok();
})
}
fn fetch_runnable_ranges(
@@ -12500,7 +12564,7 @@ impl Editor {
snapshot: DisplaySnapshot,
runnable_ranges: Vec<RunnableRange>,
mut cx: AsyncWindowContext,
- ) -> Vec<((BufferId, u32), RunnableTasks)> {
+ ) -> Vec<((BufferId, BufferRow), RunnableTasks)> {
runnable_ranges
.into_iter()
.filter_map(|mut runnable| {
@@ -12557,11 +12621,9 @@ impl Editor {
)
});
- let tags = mem::take(&mut runnable.tags);
- let mut tags: Vec<_> = tags
+ let mut templates_with_tags = mem::take(&mut runnable.tags)
.into_iter()
- .flat_map(|tag| {
- let tag = tag.0.clone();
+ .flat_map(|RunnableTag(tag)| {
inventory
.as_ref()
.into_iter()
@@ -12578,20 +12640,20 @@ impl Editor {
})
})
.sorted_by_key(|(kind, _)| kind.to_owned())
- .collect();
- if let Some((leading_tag_source, _)) = tags.first() {
+ .collect::<Vec<_>>();
+ if let Some((leading_tag_source, _)) = templates_with_tags.first() {
// Strongest source wins; if we have worktree tag binding, prefer that to
// global and language bindings;
// if we have a global binding, prefer that to language binding.
- let first_mismatch = tags
+ let first_mismatch = templates_with_tags
.iter()
.position(|(tag_source, _)| tag_source != leading_tag_source);
if let Some(index) = first_mismatch {
- tags.truncate(index);
+ templates_with_tags.truncate(index);
}
}
- tags
+ templates_with_tags
}
pub fn move_to_enclosing_bracket(
@@ -12539,6 +12539,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
initialization_options: Some(json!({
"some other init value": false
})),
+ enable_lsp_tasks: false,
},
);
});
@@ -12558,6 +12559,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
initialization_options: Some(json!({
"anotherInitValue": false
})),
+ enable_lsp_tasks: false,
},
);
});
@@ -12577,6 +12579,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
initialization_options: Some(json!({
"anotherInitValue": false
})),
+ enable_lsp_tasks: false,
},
);
});
@@ -12594,6 +12597,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
binary: None,
settings: None,
initialization_options: None,
+ enable_lsp_tasks: false,
},
);
});
@@ -1,12 +1,25 @@
use std::sync::Arc;
use crate::Editor;
+use collections::HashMap;
+use futures::stream::FuturesUnordered;
use gpui::{App, AppContext as _, Entity, Task};
use itertools::Itertools;
use language::Buffer;
use language::Language;
use lsp::LanguageServerId;
+use lsp::LanguageServerName;
use multi_buffer::Anchor;
+use project::LanguageServerToQuery;
+use project::LocationLink;
+use project::Project;
+use project::TaskSourceKind;
+use project::lsp_store::lsp_ext_command::GetLspRunnables;
+use smol::stream::StreamExt;
+use task::ResolvedTask;
+use task::TaskContext;
+use text::BufferId;
+use util::ResultExt as _;
pub(crate) fn find_specific_language_server_in_selection<F>(
editor: &Editor,
@@ -60,3 +73,83 @@ where
None
})
}
+
+pub fn lsp_tasks(
+ project: Entity<Project>,
+ task_sources: &HashMap<LanguageServerName, Vec<BufferId>>,
+ for_position: Option<text::Anchor>,
+ cx: &mut App,
+) -> Task<Vec<(TaskSourceKind, Vec<(Option<LocationLink>, ResolvedTask)>)>> {
+ let mut lsp_task_sources = task_sources
+ .iter()
+ .map(|(name, buffer_ids)| {
+ let buffers = buffer_ids
+ .iter()
+ .filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
+ .collect::<Vec<_>>();
+ language_server_for_buffers(project.clone(), name.clone(), buffers, cx)
+ })
+ .collect::<FuturesUnordered<_>>();
+
+ cx.spawn(async move |cx| {
+ let mut lsp_tasks = Vec::new();
+ let lsp_task_context = TaskContext::default();
+ while let Some(server_to_query) = lsp_task_sources.next().await {
+ if let Some((server_id, buffers)) = server_to_query {
+ let source_kind = TaskSourceKind::Lsp(server_id);
+ let id_base = source_kind.to_id_base();
+ let mut new_lsp_tasks = Vec::new();
+ for buffer in buffers {
+ if let Ok(runnables_task) = project.update(cx, |project, cx| {
+ let buffer_id = buffer.read(cx).remote_id();
+ project.request_lsp(
+ buffer,
+ LanguageServerToQuery::Other(server_id),
+ GetLspRunnables {
+ buffer_id,
+ position: for_position,
+ },
+ cx,
+ )
+ }) {
+ if let Some(new_runnables) = runnables_task.await.log_err() {
+ new_lsp_tasks.extend(new_runnables.runnables.into_iter().filter_map(
+ |(location, runnable)| {
+ let resolved_task =
+ runnable.resolve_task(&id_base, &lsp_task_context)?;
+ Some((location, resolved_task))
+ },
+ ));
+ }
+ }
+ }
+ lsp_tasks.push((source_kind, new_lsp_tasks));
+ }
+ }
+ lsp_tasks
+ })
+}
+
+fn language_server_for_buffers(
+ project: Entity<Project>,
+ name: LanguageServerName,
+ candidates: Vec<Entity<Buffer>>,
+ cx: &mut App,
+) -> Task<Option<(LanguageServerId, Vec<Entity<Buffer>>)>> {
+ cx.spawn(async move |cx| {
+ for buffer in &candidates {
+ let server_id = buffer
+ .update(cx, |buffer, cx| {
+ project.update(cx, |project, cx| {
+ project.language_server_id_for_name(buffer, &name.0, cx)
+ })
+ })
+ .ok()?
+ .await;
+ if let Some(server_id) = server_id {
+ return Some((server_id, candidates));
+ }
+ }
+ None
+ })
+}
@@ -1,9 +1,12 @@
use crate::Editor;
+use collections::HashMap;
use gpui::{App, Task, Window};
-use project::Location;
+use lsp::LanguageServerName;
+use project::{Location, project_settings::ProjectSettings};
+use settings::Settings as _;
use task::{TaskContext, TaskVariables, VariableName};
-use text::{ToOffset, ToPoint};
+use text::{BufferId, ToOffset, ToPoint};
impl Editor {
pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
@@ -70,4 +73,38 @@ impl Editor {
})
})
}
+
+ pub fn lsp_task_sources(&self, cx: &App) -> HashMap<LanguageServerName, Vec<BufferId>> {
+ let lsp_settings = &ProjectSettings::get_global(cx).lsp;
+
+ self.buffer()
+ .read(cx)
+ .all_buffers()
+ .into_iter()
+ .filter_map(|buffer| {
+ let lsp_tasks_source = buffer
+ .read(cx)
+ .language()?
+ .context_provider()?
+ .lsp_task_source()?;
+ if lsp_settings
+ .get(&lsp_tasks_source)
+ .map_or(true, |s| s.enable_lsp_tasks)
+ {
+ let buffer_id = buffer.read(cx).remote_id();
+ Some((lsp_tasks_source, buffer_id))
+ } else {
+ None
+ }
+ })
+ .fold(
+ HashMap::default(),
+ |mut acc, (lsp_task_source, buffer_id)| {
+ acc.entry(lsp_task_source)
+ .or_insert_with(Vec::new)
+ .push(buffer_id);
+ acc
+ },
+ )
+ }
}
@@ -572,7 +572,11 @@ pub trait LspAdapter: 'static + Send + Sync {
}
/// Support custom initialize params.
- fn prepare_initialize_params(&self, original: InitializeParams) -> Result<InitializeParams> {
+ fn prepare_initialize_params(
+ &self,
+ original: InitializeParams,
+ _: &App,
+ ) -> Result<InitializeParams> {
Ok(original)
}
@@ -1029,7 +1029,10 @@ fn scroll_debounce_ms() -> u64 {
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize, JsonSchema)]
pub struct LanguageTaskConfig {
/// Extra task variables to set for a particular language.
+ #[serde(default)]
pub variables: HashMap<String, String>,
+ #[serde(default = "default_true")]
+ pub enabled: bool,
}
impl InlayHintSettings {
@@ -5,6 +5,7 @@ use crate::{LanguageToolchainStore, Location, Runnable};
use anyhow::Result;
use collections::HashMap;
use gpui::{App, Task};
+use lsp::LanguageServerName;
use task::{TaskTemplates, TaskVariables};
use text::BufferId;
@@ -15,6 +16,7 @@ pub struct RunnableRange {
pub runnable: Runnable,
pub extra_captures: HashMap<String, String>,
}
+
/// Language Contexts are used by Zed tasks to extract information about the source file where the tasks are supposed to be scheduled from.
/// Multiple context providers may be used together: by default, Zed provides a base [`BasicContextProvider`] context that fills all non-custom [`VariableName`] variants.
///
@@ -40,4 +42,9 @@ pub trait ContextProvider: Send + Sync {
) -> Option<TaskTemplates> {
None
}
+
+ /// A language server name, that can return tasks using LSP (ext) for this language.
+ fn lsp_task_source(&self) -> Option<LanguageServerName> {
+ None
+ }
}
@@ -1,7 +1,7 @@
use anyhow::{Context, Result, anyhow, bail};
use async_trait::async_trait;
use futures::StreamExt;
-use gpui::AsyncApp;
+use gpui::{App, AsyncApp};
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
use lsp::{DiagnosticTag, InitializeParams, LanguageServerBinary, LanguageServerName};
@@ -273,6 +273,7 @@ impl super::LspAdapter for CLspAdapter {
fn prepare_initialize_params(
&self,
mut original: InitializeParams,
+ _: &App,
) -> Result<InitializeParams> {
let experimental = json!({
"textDocument": {
@@ -7,8 +7,11 @@ use gpui::{App, AsyncApp, SharedString, Task};
use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
-use lsp::LanguageServerBinary;
+use lsp::{InitializeParams, LanguageServerBinary};
+use project::project_settings::ProjectSettings;
use regex::Regex;
+use serde_json::json;
+use settings::Settings as _;
use smol::fs::{self};
use std::fmt::Display;
use std::{
@@ -18,6 +21,7 @@ use std::{
sync::{Arc, LazyLock},
};
use task::{TaskTemplate, TaskTemplates, TaskType, TaskVariables, VariableName};
+use util::merge_json_value_into;
use util::{ResultExt, fs::remove_matching, maybe};
use crate::language_settings::language_settings;
@@ -48,9 +52,9 @@ impl RustLspAdapter {
const ARCH_SERVER_NAME: &str = "pc-windows-msvc";
}
-impl RustLspAdapter {
- const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer");
+const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer");
+impl RustLspAdapter {
fn build_asset_name() -> String {
let extension = match Self::GITHUB_ASSET_KIND {
AssetKind::TarGz => "tar.gz",
@@ -60,7 +64,7 @@ impl RustLspAdapter {
format!(
"{}-{}-{}.{}",
- Self::SERVER_NAME,
+ SERVER_NAME,
std::env::consts::ARCH,
Self::ARCH_SERVER_NAME,
extension
@@ -98,7 +102,7 @@ impl ManifestProvider for CargoManifestProvider {
#[async_trait(?Send)]
impl LspAdapter for RustLspAdapter {
fn name(&self) -> LanguageServerName {
- Self::SERVER_NAME.clone()
+ SERVER_NAME.clone()
}
fn manifest_name(&self) -> Option<ManifestName> {
@@ -473,6 +477,30 @@ impl LspAdapter for RustLspAdapter {
filter_range,
})
}
+
+ fn prepare_initialize_params(
+ &self,
+ mut original: InitializeParams,
+ cx: &App,
+ ) -> Result<InitializeParams> {
+ let enable_lsp_tasks = ProjectSettings::get_global(cx)
+ .lsp
+ .get(&SERVER_NAME)
+ .map_or(false, |s| s.enable_lsp_tasks);
+ if enable_lsp_tasks {
+ let experimental = json!({
+ "runnables": {
+ "kinds": [ "cargo", "shell" ],
+ },
+ });
+ if let Some(ref mut original_experimental) = original.capabilities.experimental {
+ merge_json_value_into(experimental, original_experimental);
+ } else {
+ original.capabilities.experimental = Some(experimental);
+ }
+ }
+ Ok(original)
+ }
}
pub(crate) struct RustContextProvider;
@@ -776,6 +804,10 @@ impl ContextProvider for RustContextProvider {
Some(TaskTemplates(task_templates))
}
+
+ fn lsp_task_source(&self) -> Option<LanguageServerName> {
+ Some(SERVER_NAME)
+ }
}
/// Part of the data structure of Cargo metadata
@@ -977,62 +977,69 @@ async fn location_links_from_proto(
let mut links = Vec::new();
for link in proto_links {
- let origin = match link.origin {
- Some(origin) => {
- let buffer_id = BufferId::new(origin.buffer_id)?;
- let buffer = lsp_store
- .update(&mut cx, |lsp_store, cx| {
- lsp_store.wait_for_remote_buffer(buffer_id, cx)
- })?
- .await?;
- let start = origin
- .start
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing origin start"))?;
- let end = origin
- .end
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing origin end"))?;
- buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
- .await?;
- Some(Location {
- buffer,
- range: start..end,
- })
- }
- None => None,
- };
-
- let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
- let buffer_id = BufferId::new(target.buffer_id)?;
- let buffer = lsp_store
- .update(&mut cx, |lsp_store, cx| {
- lsp_store.wait_for_remote_buffer(buffer_id, cx)
- })?
- .await?;
- let start = target
- .start
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target start"))?;
- let end = target
- .end
- .and_then(deserialize_anchor)
- .ok_or_else(|| anyhow!("missing target end"))?;
- buffer
- .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
- .await?;
- let target = Location {
- buffer,
- range: start..end,
- };
-
- links.push(LocationLink { origin, target })
+ links.push(location_link_from_proto(link, &lsp_store, &mut cx).await?)
}
Ok(links)
}
+pub async fn location_link_from_proto(
+ link: proto::LocationLink,
+ lsp_store: &Entity<LspStore>,
+ cx: &mut AsyncApp,
+) -> Result<LocationLink> {
+ let origin = match link.origin {
+ Some(origin) => {
+ let buffer_id = BufferId::new(origin.buffer_id)?;
+ let buffer = lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store.wait_for_remote_buffer(buffer_id, cx)
+ })?
+ .await?;
+ let start = origin
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing origin start"))?;
+ let end = origin
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing origin end"))?;
+ buffer
+ .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .await?;
+ Some(Location {
+ buffer,
+ range: start..end,
+ })
+ }
+ None => None,
+ };
+
+ let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
+ let buffer_id = BufferId::new(target.buffer_id)?;
+ let buffer = lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store.wait_for_remote_buffer(buffer_id, cx)
+ })?
+ .await?;
+ let start = target
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = target
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ buffer
+ .update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
+ .await?;
+ let target = Location {
+ buffer,
+ range: start..end,
+ };
+ Ok(LocationLink { origin, target })
+}
+
async fn location_links_from_lsp(
message: Option<lsp::GotoDefinitionResponse>,
lsp_store: Entity<LspStore>,
@@ -1115,6 +1122,65 @@ async fn location_links_from_lsp(
Ok(definitions)
}
+pub async fn location_link_from_lsp(
+ link: lsp::LocationLink,
+ lsp_store: &Entity<LspStore>,
+ buffer: &Entity<Buffer>,
+ server_id: LanguageServerId,
+ cx: &mut AsyncApp,
+) -> Result<LocationLink> {
+ let (lsp_adapter, language_server) =
+ language_server_for_buffer(&lsp_store, &buffer, server_id, cx)?;
+
+ let (origin_range, target_uri, target_range) = (
+ link.origin_selection_range,
+ link.target_uri,
+ link.target_selection_range,
+ );
+
+ let target_buffer_handle = lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store.open_local_buffer_via_lsp(
+ target_uri,
+ language_server.server_id(),
+ lsp_adapter.name.clone(),
+ cx,
+ )
+ })?
+ .await?;
+
+ cx.update(|cx| {
+ let origin_location = origin_range.map(|origin_range| {
+ let origin_buffer = buffer.read(cx);
+ let origin_start =
+ origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
+ let origin_end =
+ origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left);
+ Location {
+ buffer: buffer.clone(),
+ range: origin_buffer.anchor_after(origin_start)
+ ..origin_buffer.anchor_before(origin_end),
+ }
+ });
+
+ let target_buffer = target_buffer_handle.read(cx);
+ let target_start =
+ target_buffer.clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
+ let target_end =
+ target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
+ let target_location = Location {
+ buffer: target_buffer_handle,
+ range: target_buffer.anchor_after(target_start)
+ ..target_buffer.anchor_before(target_end),
+ };
+
+ LocationLink {
+ origin: origin_location,
+ target: target_location,
+ }
+ })
+}
+
fn location_links_to_proto(
links: Vec<LocationLink>,
lsp_store: &mut LspStore,
@@ -1123,43 +1189,50 @@ fn location_links_to_proto(
) -> Vec<proto::LocationLink> {
links
.into_iter()
- .map(|definition| {
- let origin = definition.origin.map(|origin| {
- lsp_store
- .buffer_store()
- .update(cx, |buffer_store, cx| {
- buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
- })
- .detach_and_log_err(cx);
-
- let buffer_id = origin.buffer.read(cx).remote_id().into();
- proto::Location {
- start: Some(serialize_anchor(&origin.range.start)),
- end: Some(serialize_anchor(&origin.range.end)),
- buffer_id,
- }
- });
+ .map(|definition| location_link_to_proto(definition, lsp_store, peer_id, cx))
+ .collect()
+}
- lsp_store
- .buffer_store()
- .update(cx, |buffer_store, cx| {
- buffer_store.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
- })
- .detach_and_log_err(cx);
+pub fn location_link_to_proto(
+ location: LocationLink,
+ lsp_store: &mut LspStore,
+ peer_id: PeerId,
+ cx: &mut App,
+) -> proto::LocationLink {
+ let origin = location.origin.map(|origin| {
+ lsp_store
+ .buffer_store()
+ .update(cx, |buffer_store, cx| {
+ buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
+ })
+ .detach_and_log_err(cx);
- let buffer_id = definition.target.buffer.read(cx).remote_id().into();
- let target = proto::Location {
- start: Some(serialize_anchor(&definition.target.range.start)),
- end: Some(serialize_anchor(&definition.target.range.end)),
- buffer_id,
- };
+ let buffer_id = origin.buffer.read(cx).remote_id().into();
+ proto::Location {
+ start: Some(serialize_anchor(&origin.range.start)),
+ end: Some(serialize_anchor(&origin.range.end)),
+ buffer_id,
+ }
+ });
- proto::LocationLink {
- origin,
- target: Some(target),
- }
+ lsp_store
+ .buffer_store()
+ .update(cx, |buffer_store, cx| {
+ buffer_store.create_buffer_for_peer(&location.target.buffer, peer_id, cx)
})
- .collect()
+ .detach_and_log_err(cx);
+
+ let buffer_id = location.target.buffer.read(cx).remote_id().into();
+ let target = proto::Location {
+ start: Some(serialize_anchor(&location.target.range.start)),
+ end: Some(serialize_anchor(&location.target.range.end)),
+ buffer_id,
+ };
+
+ proto::LocationLink {
+ origin,
+ target: Some(target),
+ }
}
#[async_trait(?Send)]
@@ -280,7 +280,7 @@ impl LocalLspStore {
let initialization_params = cx.update(|cx| {
let mut params = language_server.default_initialize_params(cx);
params.initialization_options = initialization_options;
- adapter.adapter.prepare_initialize_params(params)
+ adapter.adapter.prepare_initialize_params(params, cx)
})??;
Self::setup_lsp_messages(
@@ -3428,6 +3428,9 @@ impl LspStore {
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::OpenDocs>);
+ client.add_entity_request_handler(
+ Self::handle_lsp_command::<lsp_ext_command::GetLspRunnables>,
+ );
client.add_entity_request_handler(
Self::handle_lsp_command::<lsp_ext_command::SwitchSourceHeader>,
);
@@ -8368,7 +8371,6 @@ impl LspStore {
self.buffer_store.update(cx, |buffer_store, cx| {
for buffer in buffer_store.buffers() {
buffer.update(cx, |buffer, cx| {
- // TODO kb clean inlays
buffer.update_diagnostics(server_id, DiagnosticSet::new([], buffer), cx);
buffer.set_completion_triggers(server_id, Default::default(), cx);
});
@@ -1,12 +1,27 @@
-use crate::{lsp_command::LspCommand, lsp_store::LspStore, make_text_document_identifier};
+use crate::{
+ LocationLink,
+ lsp_command::{
+ LspCommand, location_link_from_lsp, location_link_from_proto, location_link_to_proto,
+ },
+ lsp_store::LspStore,
+ make_text_document_identifier,
+};
use anyhow::{Context as _, Result};
use async_trait::async_trait;
+use collections::HashMap;
use gpui::{App, AsyncApp, Entity};
-use language::{Buffer, point_to_lsp, proto::deserialize_anchor};
+use language::{
+ Buffer, point_to_lsp,
+ proto::{deserialize_anchor, serialize_anchor},
+};
use lsp::{LanguageServer, LanguageServerId};
use rpc::proto::{self, PeerId};
use serde::{Deserialize, Serialize};
-use std::{path::Path, sync::Arc};
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+use task::TaskTemplate;
use text::{BufferId, PointUtf16, ToPointUtf16};
pub enum LspExpandMacro {}
@@ -363,3 +378,245 @@ impl LspCommand for SwitchSourceHeader {
BufferId::new(message.buffer_id)
}
}
+
+// https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#runnables
+// Taken from https://github.com/rust-lang/rust-analyzer/blob/a73a37a757a58b43a796d3eb86a1f7dfd0036659/crates/rust-analyzer/src/lsp/ext.rs#L425-L489
+pub enum Runnables {}
+
+impl lsp::request::Request for Runnables {
+ type Params = RunnablesParams;
+ type Result = Vec<Runnable>;
+ const METHOD: &'static str = "experimental/runnables";
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct RunnablesParams {
+ pub text_document: lsp::TextDocumentIdentifier,
+ pub position: Option<lsp::Position>,
+}
+
+#[derive(Deserialize, Serialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct Runnable {
+ pub label: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub location: Option<lsp::LocationLink>,
+ pub kind: RunnableKind,
+ pub args: RunnableArgs,
+}
+
+#[derive(Deserialize, Serialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+#[serde(untagged)]
+pub enum RunnableArgs {
+ Cargo(CargoRunnableArgs),
+ Shell(ShellRunnableArgs),
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum RunnableKind {
+ Cargo,
+ Shell,
+}
+
+#[derive(Deserialize, Serialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct CargoRunnableArgs {
+ #[serde(skip_serializing_if = "HashMap::is_empty")]
+ pub environment: HashMap<String, String>,
+ pub cwd: PathBuf,
+ /// Command to be executed instead of cargo
+ pub override_cargo: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub workspace_root: Option<PathBuf>,
+ // command, --package and --lib stuff
+ pub cargo_args: Vec<String>,
+ // stuff after --
+ pub executable_args: Vec<String>,
+}
+
+#[derive(Deserialize, Serialize, Debug, Clone)]
+#[serde(rename_all = "camelCase")]
+pub struct ShellRunnableArgs {
+ #[serde(skip_serializing_if = "HashMap::is_empty")]
+ pub environment: HashMap<String, String>,
+ pub cwd: PathBuf,
+ pub program: String,
+ pub args: Vec<String>,
+}
+
+#[derive(Debug)]
+pub struct GetLspRunnables {
+ pub buffer_id: BufferId,
+ pub position: Option<text::Anchor>,
+}
+
+#[derive(Debug, Default)]
+pub struct LspRunnables {
+ pub runnables: Vec<(Option<LocationLink>, TaskTemplate)>,
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetLspRunnables {
+ type Response = LspRunnables;
+ type LspRequest = Runnables;
+ type ProtoRequest = proto::LspExtRunnables;
+
+ fn display_name(&self) -> &str {
+ "LSP Runnables"
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ buffer: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &App,
+ ) -> Result<RunnablesParams> {
+ let url = match lsp::Url::from_file_path(path) {
+ Ok(url) => url,
+ Err(()) => anyhow::bail!("Failed to parse path {path:?} as lsp::Url"),
+ };
+ Ok(RunnablesParams {
+ text_document: lsp::TextDocumentIdentifier::new(url),
+ position: self
+ .position
+ .map(|anchor| point_to_lsp(anchor.to_point_utf16(&buffer.snapshot()))),
+ })
+ }
+
+ async fn response_from_lsp(
+ self,
+ lsp_runnables: Vec<Runnable>,
+ lsp_store: Entity<LspStore>,
+ buffer: Entity<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncApp,
+ ) -> Result<LspRunnables> {
+ let mut runnables = Vec::with_capacity(lsp_runnables.len());
+
+ for runnable in lsp_runnables {
+ let location = match runnable.location {
+ Some(location) => Some(
+ location_link_from_lsp(location, &lsp_store, &buffer, server_id, &mut cx)
+ .await?,
+ ),
+ None => None,
+ };
+ let mut task_template = TaskTemplate::default();
+ task_template.label = runnable.label;
+ match runnable.args {
+ RunnableArgs::Cargo(cargo) => {
+ match cargo.override_cargo {
+ Some(override_cargo) => {
+ let mut override_parts =
+ override_cargo.split(" ").map(|s| s.to_string());
+ task_template.command = override_parts
+ .next()
+ .unwrap_or_else(|| override_cargo.clone());
+ task_template.args.extend(override_parts);
+ }
+ None => task_template.command = "cargo".to_string(),
+ };
+ task_template.env = cargo.environment;
+ task_template.cwd = Some(
+ cargo
+ .workspace_root
+ .unwrap_or(cargo.cwd)
+ .to_string_lossy()
+ .to_string(),
+ );
+ task_template.args.extend(cargo.cargo_args);
+ if !cargo.executable_args.is_empty() {
+ task_template.args.push("--".to_string());
+ task_template.args.extend(cargo.executable_args);
+ }
+ }
+ RunnableArgs::Shell(shell) => {
+ task_template.command = shell.program;
+ task_template.args = shell.args;
+ task_template.env = shell.environment;
+ task_template.cwd = Some(shell.cwd.to_string_lossy().to_string());
+ }
+ }
+
+ runnables.push((location, task_template));
+ }
+
+ Ok(LspRunnables { runnables })
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtRunnables {
+ proto::LspExtRunnables {
+ project_id,
+ buffer_id: buffer.remote_id().to_proto(),
+ position: self.position.as_ref().map(serialize_anchor),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::LspExtRunnables,
+ _: Entity<LspStore>,
+ _: Entity<Buffer>,
+ _: AsyncApp,
+ ) -> Result<Self> {
+ let buffer_id = Self::buffer_id_from_proto(&message)?;
+ let position = message.position.and_then(deserialize_anchor);
+ Ok(Self {
+ buffer_id,
+ position,
+ })
+ }
+
+ fn response_to_proto(
+ response: LspRunnables,
+ lsp_store: &mut LspStore,
+ peer_id: PeerId,
+ _: &clock::Global,
+ cx: &mut App,
+ ) -> proto::LspExtRunnablesResponse {
+ proto::LspExtRunnablesResponse {
+ runnables: response
+ .runnables
+ .into_iter()
+ .map(|(location, task_template)| proto::LspRunnable {
+ location: location
+ .map(|location| location_link_to_proto(location, lsp_store, peer_id, cx)),
+ task_template: serde_json::to_vec(&task_template).unwrap(),
+ })
+ .collect(),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::LspExtRunnablesResponse,
+ lsp_store: Entity<LspStore>,
+ _: Entity<Buffer>,
+ mut cx: AsyncApp,
+ ) -> Result<LspRunnables> {
+ let mut runnables = LspRunnables {
+ runnables: Vec::new(),
+ };
+
+ for lsp_runnable in message.runnables {
+ let location = match lsp_runnable.location {
+ Some(location) => {
+ Some(location_link_from_proto(location, &lsp_store, &mut cx).await?)
+ }
+ None => None,
+ };
+ let task_template = serde_json::from_slice(&lsp_runnable.task_template)
+ .context("deserializing task template from proto")?;
+ runnables.runnables.push((location, task_template));
+ }
+
+ Ok(runnables)
+ }
+
+ fn buffer_id_from_proto(message: &proto::LspExtRunnables) -> Result<BufferId> {
+ BufferId::new(message.buffer_id)
+ }
+}
@@ -8,7 +8,7 @@ pub const RUST_ANALYZER_NAME: &str = "rust-analyzer";
/// Experimental: Informs the end user about the state of the server
///
-/// [Rust Analyzer Specification](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/lsp-extensions.md#server-status)
+/// [Rust Analyzer Specification](https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#server-status)
#[derive(Debug)]
enum ServerStatus {}
@@ -38,13 +38,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
let name = language_server.name();
let server_id = language_server.server_id();
- let this = lsp_store;
-
language_server
.on_notification::<ServerStatus, _>({
let name = name.to_string();
move |params, cx| {
- let this = this.clone();
let name = name.to_string();
if let Some(ref message) = params.message {
let message = message.trim();
@@ -53,10 +50,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
"Language server {name} (id {server_id}) status update: {message}"
);
match params.health {
- ServerHealthStatus::Ok => log::info!("{}", formatted_message),
- ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
+ ServerHealthStatus::Ok => log::info!("{formatted_message}"),
+ ServerHealthStatus::Warning => log::warn!("{formatted_message}"),
ServerHealthStatus::Error => {
- log::error!("{}", formatted_message);
+ log::error!("{formatted_message}");
let (tx, _rx) = smol::channel::bounded(1);
let request = LanguageServerPromptRequest {
level: PromptLevel::Critical,
@@ -65,7 +62,7 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
response_channel: tx,
lsp_name: name.clone(),
};
- let _ = this
+ lsp_store
.update(cx, |_, cx| {
cx.emit(LspStoreEvent::LanguageServerPrompt(request));
})
@@ -25,7 +25,7 @@ use std::{
time::Duration,
};
use task::{TaskTemplates, VsCodeTaskFile};
-use util::ResultExt;
+use util::{ResultExt, serde::default_true};
use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId};
use crate::{
@@ -278,12 +278,28 @@ pub struct BinarySettings {
pub ignore_system_version: Option<bool>,
}
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub struct LspSettings {
pub binary: Option<BinarySettings>,
pub initialization_options: Option<serde_json::Value>,
pub settings: Option<serde_json::Value>,
+ /// If the server supports sending tasks over LSP extensions,
+ /// this setting can be used to enable or disable them in Zed.
+ /// Default: true
+ #[serde(default = "default_true")]
+ pub enable_lsp_tasks: bool,
+}
+
+impl Default for LspSettings {
+ fn default() -> Self {
+ Self {
+ binary: None,
+ initialization_options: None,
+ settings: None,
+ enable_lsp_tasks: true,
+ }
+ }
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema)]
@@ -459,6 +459,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
active_worktree_context: None,
other_worktree_contexts: Vec::new(),
+ lsp_task_sources: HashMap::default(),
+ latest_selection: None,
},
cx,
)
@@ -481,6 +483,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
worktree_context
})),
other_worktree_contexts: Vec::new(),
+ lsp_task_sources: HashMap::default(),
+ latest_selection: None,
},
cx,
)
@@ -797,7 +801,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
.receive_notification::<lsp::notification::DidCloseTextDocument>()
.await
.text_document,
- lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
+ lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
);
assert_eq!(
fake_json_server
@@ -12,13 +12,17 @@ use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use gpui::{App, AppContext as _, Entity, SharedString, Task};
use itertools::Itertools;
-use language::{ContextProvider, File, Language, LanguageToolchainStore, Location};
+use language::{
+ ContextProvider, File, Language, LanguageToolchainStore, Location,
+ language_settings::language_settings,
+};
+use lsp::{LanguageServerId, LanguageServerName};
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
use task::{
DebugTaskDefinition, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
TaskVariables, VariableName,
};
-use text::{Point, ToPoint};
+use text::{BufferId, Point, ToPoint};
use util::{NumericPrefixWithSuffix, ResultExt as _, paths::PathExt as _, post_inc};
use worktree::WorktreeId;
@@ -55,6 +59,8 @@ pub enum TaskSourceKind {
},
/// Languages-specific tasks coming from extensions.
Language { name: SharedString },
+ /// Language-specific tasks coming from LSP servers.
+ Lsp(LanguageServerId),
}
/// A collection of task contexts, derived from the current state of the workspace.
@@ -68,6 +74,8 @@ pub struct TaskContexts {
pub active_worktree_context: Option<(WorktreeId, TaskContext)>,
/// If there are multiple worktrees in the workspace, all non-active ones are included here.
pub other_worktree_contexts: Vec<(WorktreeId, TaskContext)>,
+ pub lsp_task_sources: HashMap<LanguageServerName, Vec<BufferId>>,
+ pub latest_selection: Option<text::Anchor>,
}
impl TaskContexts {
@@ -104,18 +112,19 @@ impl TaskContexts {
impl TaskSourceKind {
pub fn to_id_base(&self) -> String {
match self {
- TaskSourceKind::UserInput => "oneshot".to_string(),
- TaskSourceKind::AbsPath { id_base, abs_path } => {
+ Self::UserInput => "oneshot".to_string(),
+ Self::AbsPath { id_base, abs_path } => {
format!("{id_base}_{}", abs_path.display())
}
- TaskSourceKind::Worktree {
+ Self::Worktree {
id,
id_base,
directory_in_worktree,
} => {
format!("{id_base}_{id}_{}", directory_in_worktree.display())
}
- TaskSourceKind::Language { name } => format!("language_{name}"),
+ Self::Language { name } => format!("language_{name}"),
+ Self::Lsp(server_id) => format!("lsp_{server_id}"),
}
}
}
@@ -156,6 +165,11 @@ impl Inventory {
});
let global_tasks = self.global_templates_from_settings();
let language_tasks = language
+ .filter(|language| {
+ language_settings(Some(language.name()), file.as_ref(), cx)
+ .tasks
+ .enabled
+ })
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
.into_iter()
.flat_map(|tasks| tasks.0.into_iter())
@@ -171,10 +185,10 @@ impl Inventory {
/// Joins the new resolutions with the resolved tasks that were used (spawned) before,
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
/// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
- pub fn used_and_current_resolved_tasks(
- &self,
- task_contexts: &TaskContexts,
- cx: &App,
+ pub fn used_and_current_resolved_tasks<'a>(
+ &'a self,
+ task_contexts: &'a TaskContexts,
+ cx: &'a App,
) -> (
Vec<(TaskSourceKind, ResolvedTask)>,
Vec<(TaskSourceKind, ResolvedTask)>,
@@ -227,7 +241,13 @@ impl Inventory {
let not_used_score = post_inc(&mut lru_score);
let global_tasks = self.global_templates_from_settings();
+
let language_tasks = language
+ .filter(|language| {
+ language_settings(Some(language.name()), file.as_ref(), cx)
+ .tasks
+ .enabled
+ })
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
.into_iter()
.flat_map(|tasks| tasks.0.into_iter())
@@ -475,6 +495,7 @@ fn task_lru_comparator(
fn task_source_kind_preference(kind: &TaskSourceKind) -> u32 {
match kind {
+ TaskSourceKind::Lsp(..) => 0,
TaskSourceKind::Language { .. } => 1,
TaskSourceKind::UserInput => 2,
TaskSourceKind::Worktree { .. } => 3,
@@ -698,7 +719,7 @@ mod tests {
async fn test_task_list_sorting(cx: &mut TestAppContext) {
init_test(cx);
let inventory = cx.update(Inventory::new);
- let initial_tasks = resolved_task_names(&inventory, None, cx).await;
+ let initial_tasks = resolved_task_names(&inventory, None, cx);
assert!(
initial_tasks.is_empty(),
"No tasks expected for empty inventory, but got {initial_tasks:?}"
@@ -732,7 +753,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx).await,
+ resolved_task_names(&inventory, None, cx),
&expected_initial_state,
"Tasks with equal amount of usages should be sorted alphanumerically"
);
@@ -743,7 +764,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx).await,
+ resolved_task_names(&inventory, None, cx),
vec![
"2_task".to_string(),
"1_a_task".to_string(),
@@ -761,7 +782,7 @@ mod tests {
&expected_initial_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx).await,
+ resolved_task_names(&inventory, None, cx),
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -797,7 +818,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx).await,
+ resolved_task_names(&inventory, None, cx),
vec![
"3_task".to_string(),
"1_task".to_string(),
@@ -814,7 +835,7 @@ mod tests {
&expected_updated_state,
);
assert_eq!(
- resolved_task_names(&inventory, None, cx).await,
+ resolved_task_names(&inventory, None, cx),
vec![
"11_hello".to_string(),
"3_task".to_string(),
@@ -987,21 +1008,21 @@ mod tests {
TaskStore::init(None);
}
- async fn resolved_task_names(
+ fn resolved_task_names(
inventory: &Entity<Inventory>,
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<String> {
- let (used, current) = inventory.update(cx, |inventory, cx| {
+ inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default()));
- inventory.used_and_current_resolved_tasks(&task_contexts, cx)
- });
- used.into_iter()
- .chain(current)
- .map(|(_, task)| task.original_task().label.clone())
- .collect()
+ let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
+ used.into_iter()
+ .chain(current)
+ .map(|(_, task)| task.original_task().label.clone())
+ .collect()
+ })
}
fn mock_tasks_from_names<'a>(task_names: impl Iterator<Item = &'a str> + 'a) -> String {
@@ -1024,17 +1045,17 @@ mod tests {
worktree: Option<WorktreeId>,
cx: &mut TestAppContext,
) -> Vec<(TaskSourceKind, String)> {
- let (used, current) = inventory.update(cx, |inventory, cx| {
+ inventory.update(cx, |inventory, cx| {
let mut task_contexts = TaskContexts::default();
task_contexts.active_worktree_context =
worktree.map(|worktree| (worktree, TaskContext::default()));
- inventory.used_and_current_resolved_tasks(&task_contexts, cx)
- });
- let mut all = used;
- all.extend(current);
- all.into_iter()
- .map(|(source_kind, task)| (source_kind, task.resolved_label))
- .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
- .collect()
+ let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
+ let mut all = used;
+ all.extend(current);
+ all.into_iter()
+ .map(|(source_kind, task)| (source_kind, task.resolved_label))
+ .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+ .collect()
+ })
}
}
@@ -699,3 +699,18 @@ message LanguageServerIdForName {
message LanguageServerIdForNameResponse {
optional uint64 server_id = 1;
}
+
+message LspExtRunnables {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+ optional Anchor position = 3;
+}
+
+message LspExtRunnablesResponse {
+ repeated LspRunnable runnables = 1;
+}
+
+message LspRunnable {
+ bytes task_template = 1;
+ optional LocationLink location = 2;
+}
@@ -372,12 +372,15 @@ message Envelope {
GetDocumentSymbolsResponse get_document_symbols_response = 331;
LanguageServerIdForName language_server_id_for_name = 332;
- LanguageServerIdForNameResponse language_server_id_for_name_response = 333; // current max
+ LanguageServerIdForNameResponse language_server_id_for_name_response = 333;
LoadCommitDiff load_commit_diff = 334;
LoadCommitDiffResponse load_commit_diff_response = 335;
- StopLanguageServers stop_language_servers = 336; // current max
+ StopLanguageServers stop_language_servers = 336;
+
+ LspExtRunnables lsp_ext_runnables = 337;
+ LspExtRunnablesResponse lsp_ext_runnables_response = 338; // current max
}
reserved 87 to 88;
@@ -171,6 +171,8 @@ messages!(
(LspExtExpandMacroResponse, Background),
(LspExtOpenDocs, Background),
(LspExtOpenDocsResponse, Background),
+ (LspExtRunnables, Background),
+ (LspExtRunnablesResponse, Background),
(LspExtSwitchSourceHeader, Background),
(LspExtSwitchSourceHeaderResponse, Background),
(MarkNotificationRead, Foreground),
@@ -414,6 +416,7 @@ request_messages!(
(LanguageServerIdForName, LanguageServerIdForNameResponse),
(LspExtExpandMacro, LspExtExpandMacroResponse),
(LspExtOpenDocs, LspExtOpenDocsResponse),
+ (LspExtRunnables, LspExtRunnablesResponse),
(SetRoomParticipantRole, Ack),
(BlameBuffer, BlameBufferResponse),
(RejoinRemoteProjects, RejoinRemoteProjectsResponse),
@@ -537,6 +540,7 @@ entity_messages!(
UpdateWorktreeSettings,
LspExtExpandMacro,
LspExtOpenDocs,
+ LspExtRunnables,
AdvertiseContexts,
OpenContext,
CreateContext,
@@ -13,11 +13,13 @@ path = "src/tasks_ui.rs"
[dependencies]
anyhow.workspace = true
+collections.workspace = true
debugger_ui.workspace = true
editor.workspace = true
file_icons.workspace = true
fuzzy.workspace = true
feature_flags.workspace = true
+itertools.workspace = true
gpui.workspace = true
menu.workspace = true
picker.workspace = true
@@ -7,6 +7,7 @@ use gpui::{
Focusable, InteractiveElement, ParentElement, Render, SharedString, Styled, Subscription, Task,
WeakEntity, Window, rems,
};
+use itertools::Itertools;
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
use project::{TaskSourceKind, task_store::TaskStore};
use task::{
@@ -221,42 +222,66 @@ impl PickerDelegate for TasksModalDelegate {
cx: &mut Context<picker::Picker<Self>>,
) -> Task<()> {
let task_type = self.task_modal_type.clone();
- cx.spawn_in(window, async move |picker, cx| {
- let Some(candidates) = picker
- .update(cx, |picker, cx| match &mut picker.delegate.candidates {
- Some(candidates) => string_match_candidates(candidates.iter(), task_type),
- None => {
- let Some(task_inventory) = picker
- .delegate
- .task_store
- .read(cx)
- .task_inventory()
- .cloned()
- else {
+ let candidates = match &self.candidates {
+ Some(candidates) => Task::ready(string_match_candidates(candidates, task_type)),
+ None => {
+ if let Some(task_inventory) = self.task_store.read(cx).task_inventory().cloned() {
+ let (used, current) = task_inventory
+ .read(cx)
+ .used_and_current_resolved_tasks(&self.task_contexts, cx);
+ let workspace = self.workspace.clone();
+ let lsp_task_sources = self.task_contexts.lsp_task_sources.clone();
+ let task_position = self.task_contexts.latest_selection;
+
+ cx.spawn(async move |picker, cx| {
+ let Ok(lsp_tasks) = workspace.update(cx, |workspace, cx| {
+ editor::lsp_tasks(
+ workspace.project().clone(),
+ &lsp_task_sources,
+ task_position,
+ cx,
+ )
+ }) else {
return Vec::new();
};
- let (used, current) = task_inventory
- .read(cx)
- .used_and_current_resolved_tasks(&picker.delegate.task_contexts, cx);
- picker.delegate.last_used_candidate_index = if used.is_empty() {
- None
- } else {
- Some(used.len() - 1)
- };
+ let lsp_tasks = lsp_tasks.await;
+ picker
+ .update(cx, |picker, _| {
+ picker.delegate.last_used_candidate_index = if used.is_empty() {
+ None
+ } else {
+ Some(used.len() - 1)
+ };
+
+ let mut new_candidates = used;
+ new_candidates.extend(lsp_tasks.into_iter().flat_map(
+ |(kind, tasks_with_locations)| {
+ tasks_with_locations
+ .into_iter()
+ .sorted_by_key(|(location, task)| {
+ (location.is_none(), task.resolved_label.clone())
+ })
+ .map(move |(_, task)| (kind.clone(), task))
+ },
+ ));
+ new_candidates.extend(current);
+ let match_candidates =
+ string_match_candidates(&new_candidates, task_type);
+ let _ = picker.delegate.candidates.insert(new_candidates);
+ match_candidates
+ })
+ .ok()
+ .unwrap_or_default()
+ })
+ } else {
+ Task::ready(Vec::new())
+ }
+ }
+ };
- let mut new_candidates = used;
- new_candidates.extend(current);
- let match_candidates =
- string_match_candidates(new_candidates.iter(), task_type);
- let _ = picker.delegate.candidates.insert(new_candidates);
- match_candidates
- }
- })
- .ok()
- else {
- return;
- };
+ cx.spawn_in(window, async move |picker, cx| {
+ let candidates = candidates.await;
let matches = fuzzy::match_strings(
&candidates,
&query,
@@ -426,6 +451,7 @@ impl PickerDelegate for TasksModalDelegate {
color: Color::Default,
};
let icon = match source_kind {
+ TaskSourceKind::Lsp(..) => Some(Icon::new(IconName::Bolt)),
TaskSourceKind::UserInput => Some(Icon::new(IconName::Terminal)),
TaskSourceKind::AbsPath { .. } => Some(Icon::new(IconName::Settings)),
TaskSourceKind::Worktree { .. } => Some(Icon::new(IconName::FileTree)),
@@ -697,10 +723,11 @@ impl PickerDelegate for TasksModalDelegate {
}
fn string_match_candidates<'a>(
- candidates: impl Iterator<Item = &'a (TaskSourceKind, ResolvedTask)> + 'a,
+ candidates: impl IntoIterator<Item = &'a (TaskSourceKind, ResolvedTask)> + 'a,
task_modal_type: TaskModal,
) -> Vec<StringMatchCandidate> {
candidates
+ .into_iter()
.enumerate()
.filter(|(_, (_, candidate))| match candidate.task_type() {
TaskType::Script => task_modal_type == TaskModal::ScriptModal,
@@ -1,6 +1,6 @@
-use std::collections::HashMap;
use std::path::Path;
+use collections::HashMap;
use debugger_ui::Start;
use editor::Editor;
use feature_flags::{Debugger, FeatureFlagViewExt};
@@ -313,6 +313,17 @@ fn task_contexts(workspace: &Workspace, window: &mut Window, cx: &mut App) -> Ta
})
});
+ let lsp_task_sources = active_editor
+ .as_ref()
+ .map(|active_editor| active_editor.update(cx, |editor, cx| editor.lsp_task_sources(cx)))
+ .unwrap_or_default();
+
+ let latest_selection = active_editor.as_ref().map(|active_editor| {
+ active_editor.update(cx, |editor, _| {
+ editor.selections.newest_anchor().head().text_anchor
+ })
+ });
+
let mut worktree_abs_paths = workspace
.worktrees(cx)
.filter(|worktree| is_visible_directory(worktree, cx))
@@ -325,6 +336,9 @@ fn task_contexts(workspace: &Workspace, window: &mut Window, cx: &mut App) -> Ta
cx.background_spawn(async move {
let mut task_contexts = TaskContexts::default();
+ task_contexts.lsp_task_sources = lsp_task_sources;
+ task_contexts.latest_selection = latest_selection;
+
if let Some(editor_context_task) = editor_context_task {
if let Some(editor_context) = editor_context_task.await {
task_contexts.active_item_context =