@@ -1699,10 +1699,7 @@ impl Editor {
editor.tasks_update_task =
Some(editor.refresh_runnables(window, cx));
}
- editor.pull_diagnostics(window, cx);
- }
- project::Event::PullWorkspaceDiagnostics => {
- editor.pull_diagnostics(window, cx);
+ editor.pull_diagnostics(None, window, cx);
}
project::Event::SnippetEdit(id, snippet_edits) => {
if let Some(buffer) = editor.buffer.read(cx).buffer(*id) {
@@ -2105,7 +2102,7 @@ impl Editor {
editor.minimap =
editor.create_minimap(EditorSettings::get_global(cx).minimap, window, cx);
- editor.pull_diagnostics(window, cx);
+ editor.pull_diagnostics(None, window, cx);
}
editor.report_editor_event("Editor Opened", None, cx);
@@ -15974,7 +15971,12 @@ impl Editor {
});
}
- fn pull_diagnostics(&mut self, window: &Window, cx: &mut Context<Self>) -> Option<()> {
+ fn pull_diagnostics(
+ &mut self,
+ buffer_id: Option<BufferId>,
+ window: &Window,
+ cx: &mut Context<Self>,
+ ) -> Option<()> {
let project = self.project.as_ref()?.downgrade();
let pull_diagnostics_settings = ProjectSettings::get_global(cx)
.diagnostics
@@ -15983,7 +15985,10 @@ impl Editor {
return None;
}
let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms);
- let buffers = self.buffer.read(cx).all_buffers();
+ let mut buffers = self.buffer.read(cx).all_buffers();
+ if let Some(buffer_id) = buffer_id {
+ buffers.retain(|buffer| buffer.read(cx).remote_id() == buffer_id);
+ }
self.pull_diagnostics_task = cx.spawn_in(window, async move |editor, cx| {
cx.background_executor().timer(debounce).await;
@@ -18744,27 +18749,23 @@ impl Editor {
self.update_visible_inline_completion(window, cx);
}
if let Some(project) = self.project.as_ref() {
- project.update(cx, |project, cx| {
- if edited_buffer
- .as_ref()
- .is_some_and(|buffer| buffer.read(cx).file().is_some())
- {
- // Diagnostics are not local: an edit within one file (`pub mod foo()` -> `pub mod bar()`), may cause errors in another files with `foo()`.
- // Hence, emit a project-wide event to pull for every buffer's diagnostics that has an open editor.
- // TODO: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic_refresh explains the flow how
- // diagnostics should be pulled: instead of pulling every open editor's buffer's diagnostics (which happens effectively due to emitting this event),
- // we should only pull for the current buffer's diagnostics and get the rest via the workspace diagnostics LSP request — this is not implemented yet.
- cx.emit(project::Event::PullWorkspaceDiagnostics);
- }
-
- if let Some(buffer) = edited_buffer {
+ if let Some(edited_buffer) = edited_buffer {
+ project.update(cx, |project, cx| {
self.registered_buffers
- .entry(buffer.read(cx).remote_id())
+ .entry(edited_buffer.read(cx).remote_id())
.or_insert_with(|| {
- project.register_buffer_with_language_servers(&buffer, cx)
+ project
+ .register_buffer_with_language_servers(&edited_buffer, cx)
});
+ });
+ if edited_buffer.read(cx).file().is_some() {
+ self.pull_diagnostics(
+ Some(edited_buffer.read(cx).remote_id()),
+ window,
+ cx,
+ );
}
- });
+ }
}
cx.emit(EditorEvent::BufferEdited);
cx.emit(SearchEvent::MatchesInvalidated);
@@ -260,7 +260,9 @@ pub(crate) struct LinkedEditingRange {
}
#[derive(Clone, Debug)]
-pub(crate) struct GetDocumentDiagnostics {}
+pub(crate) struct GetDocumentDiagnostics {
+ pub previous_result_id: Option<String>,
+}
#[async_trait(?Send)]
impl LspCommand for PrepareRename {
@@ -3810,6 +3812,109 @@ impl GetDocumentDiagnostics {
data: diagnostic.data.as_ref().map(|data| data.to_string()),
})
}
+
+ pub fn deserialize_workspace_diagnostics_report(
+ report: lsp::WorkspaceDiagnosticReportResult,
+ server_id: LanguageServerId,
+ ) -> Vec<WorkspaceLspPullDiagnostics> {
+ let mut pulled_diagnostics = HashMap::default();
+ match report {
+ lsp::WorkspaceDiagnosticReportResult::Report(workspace_diagnostic_report) => {
+ for report in workspace_diagnostic_report.items {
+ match report {
+ lsp::WorkspaceDocumentDiagnosticReport::Full(report) => {
+ process_full_workspace_diagnostics_report(
+ &mut pulled_diagnostics,
+ server_id,
+ report,
+ )
+ }
+ lsp::WorkspaceDocumentDiagnosticReport::Unchanged(report) => {
+ process_unchanged_workspace_diagnostics_report(
+ &mut pulled_diagnostics,
+ server_id,
+ report,
+ )
+ }
+ }
+ }
+ }
+ lsp::WorkspaceDiagnosticReportResult::Partial(
+ workspace_diagnostic_report_partial_result,
+ ) => {
+ for report in workspace_diagnostic_report_partial_result.items {
+ match report {
+ lsp::WorkspaceDocumentDiagnosticReport::Full(report) => {
+ process_full_workspace_diagnostics_report(
+ &mut pulled_diagnostics,
+ server_id,
+ report,
+ )
+ }
+ lsp::WorkspaceDocumentDiagnosticReport::Unchanged(report) => {
+ process_unchanged_workspace_diagnostics_report(
+ &mut pulled_diagnostics,
+ server_id,
+ report,
+ )
+ }
+ }
+ }
+ }
+ }
+ pulled_diagnostics.into_values().collect()
+ }
+}
+
+pub struct WorkspaceLspPullDiagnostics {
+ pub version: Option<i32>,
+ pub diagnostics: LspPullDiagnostics,
+}
+
+fn process_full_workspace_diagnostics_report(
+ diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+ server_id: LanguageServerId,
+ report: lsp::WorkspaceFullDocumentDiagnosticReport,
+) {
+ let mut new_diagnostics = HashMap::default();
+ process_full_diagnostics_report(
+ &mut new_diagnostics,
+ server_id,
+ report.uri,
+ report.full_document_diagnostic_report,
+ );
+ diagnostics.extend(new_diagnostics.into_iter().map(|(uri, diagnostics)| {
+ (
+ uri,
+ WorkspaceLspPullDiagnostics {
+ version: report.version.map(|v| v as i32),
+ diagnostics,
+ },
+ )
+ }));
+}
+
+fn process_unchanged_workspace_diagnostics_report(
+ diagnostics: &mut HashMap<lsp::Url, WorkspaceLspPullDiagnostics>,
+ server_id: LanguageServerId,
+ report: lsp::WorkspaceUnchangedDocumentDiagnosticReport,
+) {
+ let mut new_diagnostics = HashMap::default();
+ process_unchanged_diagnostics_report(
+ &mut new_diagnostics,
+ server_id,
+ report.uri,
+ report.unchanged_document_diagnostic_report,
+ );
+ diagnostics.extend(new_diagnostics.into_iter().map(|(uri, diagnostics)| {
+ (
+ uri,
+ WorkspaceLspPullDiagnostics {
+ version: report.version.map(|v| v as i32),
+ diagnostics,
+ },
+ )
+ }));
}
#[async_trait(?Send)]
@@ -3832,7 +3937,7 @@ impl LspCommand for GetDocumentDiagnostics {
fn to_lsp(
&self,
path: &Path,
- buffer: &Buffer,
+ _: &Buffer,
language_server: &Arc<LanguageServer>,
_: &App,
) -> Result<lsp::DocumentDiagnosticParams> {
@@ -3849,7 +3954,7 @@ impl LspCommand for GetDocumentDiagnostics {
uri: file_path_to_lsp_url(path)?,
},
identifier,
- previous_result_id: buffer.result_id(),
+ previous_result_id: self.previous_result_id.clone(),
partial_result_params: Default::default(),
work_done_progress_params: Default::default(),
})
@@ -3933,7 +4038,7 @@ impl LspCommand for GetDocumentDiagnostics {
async fn from_proto(
message: proto::GetDocumentDiagnostics,
- _: Entity<LspStore>,
+ lsp_store: Entity<LspStore>,
buffer: Entity<Buffer>,
mut cx: AsyncApp,
) -> Result<Self> {
@@ -3942,7 +4047,11 @@ impl LspCommand for GetDocumentDiagnostics {
buffer.wait_for_version(deserialize_version(&message.version))
})?
.await?;
- Ok(Self {})
+ let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
+ Ok(Self {
+ previous_result_id: lsp_store
+ .update(&mut cx, |lsp_store, _| lsp_store.result_id(buffer_id))?,
+ })
}
fn response_to_proto(
@@ -4,8 +4,8 @@ pub mod rust_analyzer_ext;
use crate::{
CodeAction, Completion, CompletionResponse, CompletionSource, CoreCompletion, Hover, InlayHint,
- LspAction, LspPullDiagnostics, ProjectItem, ProjectPath, ProjectTransaction, ResolveState,
- Symbol, ToolchainStore,
+ LspAction, LspPullDiagnostics, ProjectItem, ProjectPath, ProjectTransaction, PulledDiagnostics,
+ ResolveState, Symbol, ToolchainStore,
buffer_store::{BufferStore, BufferStoreEvent},
environment::ProjectEnvironment,
lsp_command::{self, *},
@@ -61,7 +61,7 @@ use lsp::{
};
use node_runtime::read_package_installed_version;
use parking_lot::Mutex;
-use postage::watch;
+use postage::{mpsc, sink::Sink, stream::Stream, watch};
use rand::prelude::*;
use rpc::{
@@ -90,7 +90,7 @@ use std::{
use text::{Anchor, BufferId, LineEnding, OffsetRangeExt};
use url::Url;
use util::{
- ResultExt as _, debug_panic, defer, maybe, merge_json_value_into,
+ ConnectionResult, ResultExt as _, debug_panic, defer, maybe, merge_json_value_into,
paths::{PathExt, SanitizedPath},
post_inc,
};
@@ -166,6 +166,7 @@ pub struct LocalLspStore {
_subscription: gpui::Subscription,
lsp_tree: Entity<LanguageServerTree>,
registered_buffers: HashMap<BufferId, usize>,
+ buffer_pull_diagnostics_result_ids: HashMap<BufferId, Option<String>>,
}
impl LocalLspStore {
@@ -871,13 +872,17 @@ impl LocalLspStore {
let this = this.clone();
let mut cx = cx.clone();
async move {
- this.update(&mut cx, |this, cx| {
- cx.emit(LspStoreEvent::PullWorkspaceDiagnostics);
- this.downstream_client.as_ref().map(|(client, project_id)| {
- client.send(proto::PullWorkspaceDiagnostics {
- project_id: *project_id,
+ this.update(&mut cx, |lsp_store, _| {
+ lsp_store.pull_workspace_diagnostics(server_id);
+ lsp_store
+ .downstream_client
+ .as_ref()
+ .map(|(client, project_id)| {
+ client.send(proto::PullWorkspaceDiagnostics {
+ project_id: *project_id,
+ server_id: server_id.to_proto(),
+ })
})
- })
})?
.transpose()?;
Ok(())
@@ -2290,9 +2295,11 @@ impl LocalLspStore {
let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
buffer.update(cx, |buffer, cx| {
- buffer.set_result_id(result_id);
+ self.buffer_pull_diagnostics_result_ids
+ .insert(buffer.remote_id(), result_id);
buffer.update_diagnostics(server_id, set, cx)
});
+
Ok(())
}
@@ -3497,7 +3504,6 @@ pub enum LspStoreEvent {
edits: Vec<(lsp::Range, Snippet)>,
most_recent_edit: clock::Lamport,
},
- PullWorkspaceDiagnostics,
}
#[derive(Clone, Debug, Serialize)]
@@ -3680,7 +3686,8 @@ impl LspStore {
this.as_local_mut().unwrap().shutdown_language_servers(cx)
}),
lsp_tree: LanguageServerTree::new(manifest_tree, languages.clone(), cx),
- registered_buffers: Default::default(),
+ registered_buffers: HashMap::default(),
+ buffer_pull_diagnostics_result_ids: HashMap::default(),
}),
last_formatting_failure: None,
downstream_client: None,
@@ -3784,6 +3791,11 @@ impl LspStore {
}
}
}
+ BufferStoreEvent::BufferDropped(buffer_id) => {
+ if let Some(local) = self.as_local_mut() {
+ local.buffer_pull_diagnostics_result_ids.remove(buffer_id);
+ }
+ }
_ => {}
}
}
@@ -5733,6 +5745,7 @@ impl LspStore {
) -> Task<Result<Vec<LspPullDiagnostics>>> {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id();
+ let result_id = self.result_id(buffer_id);
if let Some((client, upstream_project_id)) = self.upstream_client() {
let request_task = client.request(proto::MultiLspQuery {
@@ -5743,7 +5756,10 @@ impl LspStore {
proto::AllLanguageServers {},
)),
request: Some(proto::multi_lsp_query::Request::GetDocumentDiagnostics(
- GetDocumentDiagnostics {}.to_proto(upstream_project_id, buffer_handle.read(cx)),
+ GetDocumentDiagnostics {
+ previous_result_id: result_id.clone(),
+ }
+ .to_proto(upstream_project_id, buffer_handle.read(cx)),
)),
});
let buffer = buffer_handle.clone();
@@ -5765,7 +5781,10 @@ impl LspStore {
}
})
.map(|diagnostics_response| {
- GetDocumentDiagnostics {}.response_from_proto(
+ GetDocumentDiagnostics {
+ previous_result_id: result_id.clone(),
+ }
+ .response_from_proto(
diagnostics_response,
project.clone(),
buffer.clone(),
@@ -5786,7 +5805,9 @@ impl LspStore {
let all_actions_task = self.request_multiple_lsp_locally(
&buffer_handle,
None::<PointUtf16>,
- GetDocumentDiagnostics {},
+ GetDocumentDiagnostics {
+ previous_result_id: result_id,
+ },
cx,
);
cx.spawn(async move |_, _| Ok(all_actions_task.await.into_iter().flatten().collect()))
@@ -6323,6 +6344,7 @@ impl LspStore {
},
)
.ok();
+ self.pull_workspace_diagnostics(language_server.server_id());
}
None
@@ -8172,12 +8194,13 @@ impl LspStore {
}
async fn handle_pull_workspace_diagnostics(
- this: Entity<Self>,
- _: TypedEnvelope<proto::PullWorkspaceDiagnostics>,
+ lsp_store: Entity<Self>,
+ envelope: TypedEnvelope<proto::PullWorkspaceDiagnostics>,
mut cx: AsyncApp,
) -> Result<proto::Ack> {
- this.update(&mut cx, |_, cx| {
- cx.emit(LspStoreEvent::PullWorkspaceDiagnostics);
+ let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
+ lsp_store.update(&mut cx, |lsp_store, _| {
+ lsp_store.pull_workspace_diagnostics(server_id);
})?;
Ok(proto::Ack {})
}
@@ -9097,14 +9120,19 @@ impl LspStore {
// Update language_servers collection with Running variant of LanguageServerState
// indicating that the server is up and running and ready
let workspace_folders = workspace_folders.lock().clone();
+ language_server.set_workspace_folders(workspace_folders);
+
local.language_servers.insert(
server_id,
- LanguageServerState::running(
- workspace_folders,
- adapter.clone(),
- language_server.clone(),
- None,
- ),
+ LanguageServerState::Running {
+ workspace_refresh_task: lsp_workspace_diagnostics_refresh(
+ language_server.clone(),
+ cx,
+ ),
+ adapter: adapter.clone(),
+ server: language_server.clone(),
+ simulate_disk_based_diagnostics_completion: None,
+ },
);
if let Some(file_ops_caps) = language_server
.capabilities()
@@ -9675,6 +9703,229 @@ impl LspStore {
}
}
}
+
+ pub fn result_id(&self, buffer_id: BufferId) -> Option<String> {
+ self.as_local()?
+ .buffer_pull_diagnostics_result_ids
+ .get(&buffer_id)
+ .cloned()
+ .flatten()
+ }
+
+ pub fn all_result_ids(&self) -> HashMap<BufferId, String> {
+ let Some(local) = self.as_local() else {
+ return HashMap::default();
+ };
+ local
+ .buffer_pull_diagnostics_result_ids
+ .iter()
+ .filter_map(|(buffer_id, result_id)| Some((*buffer_id, result_id.clone()?)))
+ .collect()
+ }
+
+ pub fn pull_workspace_diagnostics(&mut self, server_id: LanguageServerId) {
+ if let Some(LanguageServerState::Running {
+ workspace_refresh_task: Some((tx, _)),
+ ..
+ }) = self
+ .as_local_mut()
+ .and_then(|local| local.language_servers.get_mut(&server_id))
+ {
+ tx.try_send(()).ok();
+ }
+ }
+
+ pub fn pull_workspace_diagnostics_for_buffer(&mut self, buffer_id: BufferId, cx: &mut App) {
+ let Some(buffer) = self.buffer_store().read(cx).get_existing(buffer_id).ok() else {
+ return;
+ };
+ let Some(local) = self.as_local_mut() else {
+ return;
+ };
+
+ for server_id in buffer.update(cx, |buffer, cx| {
+ local.language_server_ids_for_buffer(buffer, cx)
+ }) {
+ if let Some(LanguageServerState::Running {
+ workspace_refresh_task: Some((tx, _)),
+ ..
+ }) = local.language_servers.get_mut(&server_id)
+ {
+ tx.try_send(()).ok();
+ }
+ }
+ }
+}
+
+fn lsp_workspace_diagnostics_refresh(
+ server: Arc<LanguageServer>,
+ cx: &mut Context<'_, LspStore>,
+) -> Option<(mpsc::Sender<()>, Task<()>)> {
+ let identifier = match server.capabilities().diagnostic_provider? {
+ lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => {
+ if !diagnostic_options.workspace_diagnostics {
+ return None;
+ }
+ diagnostic_options.identifier
+ }
+ lsp::DiagnosticServerCapabilities::RegistrationOptions(registration_options) => {
+ let diagnostic_options = registration_options.diagnostic_options;
+ if !diagnostic_options.workspace_diagnostics {
+ return None;
+ }
+ diagnostic_options.identifier
+ }
+ };
+
+ let (mut tx, mut rx) = mpsc::channel(1);
+ tx.try_send(()).ok();
+
+ let workspace_query_language_server = cx.spawn(async move |lsp_store, cx| {
+ let mut attempts = 0;
+ let max_attempts = 50;
+
+ loop {
+ let Some(()) = rx.recv().await else {
+ return;
+ };
+
+ 'request: loop {
+ if attempts > max_attempts {
+ log::error!(
+ "Failed to pull workspace diagnostics {max_attempts} times, aborting"
+ );
+ return;
+ }
+ let backoff_millis = (50 * (1 << attempts)).clamp(30, 1000);
+ cx.background_executor()
+ .timer(Duration::from_millis(backoff_millis))
+ .await;
+ attempts += 1;
+
+ let Ok(previous_result_ids) = lsp_store.update(cx, |lsp_store, cx| {
+ lsp_store
+ .all_result_ids()
+ .into_iter()
+ .filter_map(|(buffer_id, result_id)| {
+ let buffer = lsp_store
+ .buffer_store()
+ .read(cx)
+ .get_existing(buffer_id)
+ .ok()?;
+ let abs_path = buffer.read(cx).file()?.as_local()?.abs_path(cx);
+ let uri = file_path_to_lsp_url(&abs_path).ok()?;
+ Some(lsp::PreviousResultId {
+ uri,
+ value: result_id,
+ })
+ })
+ .collect()
+ }) else {
+ return;
+ };
+
+ let response_result = server
+ .request::<lsp::WorkspaceDiagnosticRequest>(lsp::WorkspaceDiagnosticParams {
+ previous_result_ids,
+ identifier: identifier.clone(),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ })
+ .await;
+ // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic_refresh
+ // > If a server closes a workspace diagnostic pull request the client should re-trigger the request.
+ match response_result {
+ ConnectionResult::Timeout => {
+ log::error!("Timeout during workspace diagnostics pull");
+ continue 'request;
+ }
+ ConnectionResult::ConnectionReset => {
+ log::error!("Server closed a workspace diagnostics pull request");
+ continue 'request;
+ }
+ ConnectionResult::Result(Err(e)) => {
+ log::error!("Error during workspace diagnostics pull: {e:#}");
+ break 'request;
+ }
+ ConnectionResult::Result(Ok(pulled_diagnostics)) => {
+ attempts = 0;
+ if lsp_store
+ .update(cx, |lsp_store, cx| {
+ let workspace_diagnostics =
+ GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(pulled_diagnostics, server.server_id());
+ for workspace_diagnostics in workspace_diagnostics {
+ let LspPullDiagnostics::Response {
+ server_id,
+ uri,
+ diagnostics,
+ } = workspace_diagnostics.diagnostics
+ else {
+ continue;
+ };
+
+ let adapter = lsp_store.language_server_adapter_for_id(server_id);
+ let disk_based_sources = adapter
+ .as_ref()
+ .map(|adapter| adapter.disk_based_diagnostic_sources.as_slice())
+ .unwrap_or(&[]);
+
+ match diagnostics {
+ PulledDiagnostics::Unchanged { result_id } => {
+ lsp_store
+ .merge_diagnostics(
+ server_id,
+ lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics: Vec::new(),
+ version: None,
+ },
+ Some(result_id),
+ DiagnosticSourceKind::Pulled,
+ disk_based_sources,
+ |_, _| true,
+ cx,
+ )
+ .log_err();
+ }
+ PulledDiagnostics::Changed {
+ diagnostics,
+ result_id,
+ } => {
+ lsp_store
+ .merge_diagnostics(
+ server_id,
+ lsp::PublishDiagnosticsParams {
+ uri: uri.clone(),
+ diagnostics,
+ version: workspace_diagnostics.version,
+ },
+ result_id,
+ DiagnosticSourceKind::Pulled,
+ disk_based_sources,
+ |old_diagnostic, _| match old_diagnostic.source_kind {
+ DiagnosticSourceKind::Pulled => false,
+ DiagnosticSourceKind::Other
+ | DiagnosticSourceKind::Pushed => true,
+ },
+ cx,
+ )
+ .log_err();
+ }
+ }
+ }
+ })
+ .is_err()
+ {
+ return;
+ }
+ break 'request;
+ }
+ }
+ }
+ }
+ });
+
+ Some((tx, workspace_query_language_server))
}
fn resolve_word_completion(snapshot: &BufferSnapshot, completion: &mut Completion) {
@@ -10055,6 +10306,7 @@ pub enum LanguageServerState {
adapter: Arc<CachedLspAdapter>,
server: Arc<LanguageServer>,
simulate_disk_based_diagnostics_completion: Option<Task<()>>,
+ workspace_refresh_task: Option<(mpsc::Sender<()>, Task<()>)>,
},
}
@@ -10083,19 +10335,6 @@ impl LanguageServerState {
LanguageServerState::Running { server, .. } => server.remove_workspace_folder(uri),
}
}
- fn running(
- workspace_folders: BTreeSet<Url>,
- adapter: Arc<CachedLspAdapter>,
- server: Arc<LanguageServer>,
- simulate_disk_based_diagnostics_completion: Option<Task<()>>,
- ) -> Self {
- server.set_workspace_folders(workspace_folders);
- Self::Running {
- adapter,
- server,
- simulate_disk_based_diagnostics_completion,
- }
- }
}
impl std::fmt::Debug for LanguageServerState {