Detailed changes
@@ -25,6 +25,7 @@ use gpui::{
use indoc::indoc;
use language::FakeLspAdapter;
use lsp::LSP_REQUEST_TIMEOUT;
+use pretty_assertions::assert_eq;
use project::{
ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,
lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro},
@@ -3192,13 +3193,12 @@ async fn test_lsp_pull_diagnostics(
.collect::<Vec<_>>();
let expected_messages = [
expected_pull_diagnostic_lib_message,
- // TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer.
- // expected_push_diagnostic_lib_message,
+ expected_push_diagnostic_lib_message,
];
assert_eq!(
all_diagnostics.len(),
- 1,
- "Expected pull diagnostics, but got: {all_diagnostics:?}"
+ 2,
+ "Expected pull and push diagnostics, but got: {all_diagnostics:?}"
);
for diagnostic in all_diagnostics {
assert!(
@@ -3258,14 +3258,15 @@ async fn test_lsp_pull_diagnostics(
.diagnostics_in_range(MultiBufferOffset(0)..snapshot.len())
.collect::<Vec<_>>();
let expected_messages = [
- expected_workspace_pull_diagnostics_lib_message,
- // TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer.
- // expected_push_diagnostic_lib_message,
+ // Despite workspace diagnostics provided,
+ // the currently open file's diagnostics should be preferred, as LSP suggests.
+ expected_pull_diagnostic_lib_message,
+ expected_push_diagnostic_lib_message,
];
assert_eq!(
all_diagnostics.len(),
- 1,
- "Expected pull diagnostics, but got: {all_diagnostics:?}"
+ 2,
+ "Expected pull and push diagnostics, but got: {all_diagnostics:?}"
);
for diagnostic in all_diagnostics {
assert!(
@@ -3378,8 +3379,9 @@ async fn test_lsp_pull_diagnostics(
"Another workspace diagnostics pull should happen after the diagnostics refresh server request"
);
{
- assert!(
- diagnostics_pulls_result_ids.lock().await.len() == diagnostic_pulls_result_ids,
+ assert_eq!(
+ diagnostics_pulls_result_ids.lock().await.len(),
+ diagnostic_pulls_result_ids,
"Pulls should not happen hence no extra ids should appear"
);
assert!(
@@ -3397,7 +3399,7 @@ async fn test_lsp_pull_diagnostics(
expected_pull_diagnostic_lib_message,
expected_push_diagnostic_lib_message,
];
- assert_eq!(all_diagnostics.len(), 1);
+ assert_eq!(all_diagnostics.len(), 2);
for diagnostic in &all_diagnostics {
assert!(
expected_messages.contains(&diagnostic.diagnostic.message.as_str()),
@@ -1172,6 +1172,7 @@ pub struct Editor {
gutter_breakpoint_indicator: (Option<PhantomBreakpointIndicator>, Option<Task<()>>),
hovered_diff_hunk_row: Option<DisplayRow>,
pull_diagnostics_task: Task<()>,
+ pull_diagnostics_background_task: Task<()>,
in_project_search: bool,
previous_search_ranges: Option<Arc<[Range<Anchor>]>>,
breadcrumb_header: Option<String>,
@@ -2316,6 +2317,7 @@ impl Editor {
.unwrap_or_default(),
tasks_update_task: None,
pull_diagnostics_task: Task::ready(()),
+ pull_diagnostics_background_task: Task::ready(()),
colors: None,
refresh_colors_task: Task::ready(()),
inlay_hints: None,
@@ -2492,7 +2494,6 @@ impl Editor {
if let Some(buffer) = multi_buffer.read(cx).as_singleton() {
editor.register_buffer(buffer.read(cx).remote_id(), cx);
}
- editor.update_lsp_data(None, window, cx);
editor.report_editor_event(ReportEditorEvent::EditorOpened, None, cx);
}
@@ -18400,54 +18401,101 @@ impl Editor {
return None;
}
let project = self.project()?.downgrade();
- let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms);
- let mut buffers = self.buffer.read(cx).all_buffers();
- buffers.retain(|buffer| {
- let buffer_id_to_retain = buffer.read(cx).remote_id();
- buffer_id.is_none_or(|buffer_id| buffer_id == buffer_id_to_retain)
- && self.registered_buffers.contains_key(&buffer_id_to_retain)
- });
- if buffers.is_empty() {
+
+ let mut edited_buffer_ids = HashSet::default();
+ let mut edited_worktree_ids = HashSet::default();
+ let edited_buffers = match buffer_id {
+ Some(buffer_id) => {
+ let buffer = self.buffer().read(cx).buffer(buffer_id)?;
+ let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx))?;
+ edited_buffer_ids.insert(buffer.read(cx).remote_id());
+ edited_worktree_ids.insert(worktree_id);
+ vec![buffer]
+ }
+ None => self
+ .buffer()
+ .read(cx)
+ .all_buffers()
+ .into_iter()
+ .filter(|buffer| {
+ let buffer = buffer.read(cx);
+ match buffer.file().map(|f| f.worktree_id(cx)) {
+ Some(worktree_id) => {
+ edited_buffer_ids.insert(buffer.remote_id());
+ edited_worktree_ids.insert(worktree_id);
+ true
+ }
+ None => false,
+ }
+ })
+ .collect::<Vec<_>>(),
+ };
+
+ if edited_buffers.is_empty() {
self.pull_diagnostics_task = Task::ready(());
+ self.pull_diagnostics_background_task = Task::ready(());
return None;
}
- self.pull_diagnostics_task = cx.spawn_in(window, async move |editor, cx| {
- cx.background_executor().timer(debounce).await;
+ let mut already_used_buffers = HashSet::default();
+ let related_open_buffers = self
+ .workspace
+ .as_ref()
+ .and_then(|(workspace, _)| workspace.upgrade())
+ .into_iter()
+ .flat_map(|workspace| workspace.read(cx).panes())
+ .flat_map(|pane| pane.read(cx).items_of_type::<Editor>())
+ .filter(|editor| editor != &cx.entity())
+ .flat_map(|editor| editor.read(cx).buffer().read(cx).all_buffers())
+ .filter(|buffer| {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ if already_used_buffers.insert(buffer_id) {
+ if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx)) {
+ return !edited_buffer_ids.contains(&buffer_id)
+ && !edited_worktree_ids.contains(&worktree_id);
+ }
+ }
+ false
+ })
+ .collect::<Vec<_>>();
+
+ let debounce = Duration::from_millis(pull_diagnostics_settings.debounce_ms);
+ let make_spawn = |buffers: Vec<Entity<Buffer>>, delay: Duration| {
+ if buffers.is_empty() {
+ return Task::ready(());
+ }
+ let project_weak = project.clone();
+ cx.spawn_in(window, async move |_, cx| {
+ cx.background_executor().timer(delay).await;
- let Ok(mut pull_diagnostics_tasks) = cx.update(|_, cx| {
- buffers
- .into_iter()
- .filter_map(|buffer| {
- project
- .update(cx, |project, cx| {
- project.lsp_store().update(cx, |lsp_store, cx| {
- lsp_store.pull_diagnostics_for_buffer(buffer, cx)
+ let Ok(mut pull_diagnostics_tasks) = cx.update(|_, cx| {
+ buffers
+ .into_iter()
+ .filter_map(|buffer| {
+ project_weak
+ .update(cx, |project, cx| {
+ project.lsp_store().update(cx, |lsp_store, cx| {
+ lsp_store.pull_diagnostics_for_buffer(buffer, cx)
+ })
})
- })
- .ok()
- })
- .collect::<FuturesUnordered<_>>()
- }) else {
- return;
- };
+ .ok()
+ })
+ .collect::<FuturesUnordered<_>>()
+ }) else {
+ return;
+ };
- while let Some(pull_task) = pull_diagnostics_tasks.next().await {
- match pull_task {
- Ok(()) => {
- if editor
- .update_in(cx, |editor, window, cx| {
- editor.update_diagnostics_state(window, cx);
- })
- .is_err()
- {
- return;
- }
+ while let Some(pull_task) = pull_diagnostics_tasks.next().await {
+ if let Err(e) = pull_task {
+ log::error!("Failed to update project diagnostics: {e:#}");
}
- Err(e) => log::error!("Failed to update project diagnostics: {e:#}"),
}
- }
- });
+ })
+ };
+
+ self.pull_diagnostics_task = make_spawn(edited_buffers, debounce);
+ self.pull_diagnostics_background_task = make_spawn(related_open_buffers, debounce * 2);
Some(())
}
@@ -26589,7 +26589,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
}
});
- let ensure_result_id = |expected: Option<String>, cx: &mut TestAppContext| {
+ let ensure_result_id = |expected: Option<SharedString>, cx: &mut TestAppContext| {
project.update(cx, |project, cx| {
let buffer_id = editor
.read(cx)
@@ -26602,7 +26602,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
let buffer_result_id = project
.lsp_store()
.read(cx)
- .result_id(server_id, buffer_id, cx);
+ .result_id_for_buffer_pull(server_id, buffer_id, &None, cx);
assert_eq!(expected, buffer_result_id);
});
};
@@ -26619,7 +26619,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
.next()
.await
.expect("should have sent the first diagnostics pull request");
- ensure_result_id(Some("1".to_string()), cx);
+ ensure_result_id(Some(SharedString::new("1")), cx);
// Editing should trigger diagnostics
editor.update_in(cx, |editor, window, cx| {
@@ -26632,7 +26632,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
2,
"Editing should trigger diagnostic request"
);
- ensure_result_id(Some("2".to_string()), cx);
+ ensure_result_id(Some(SharedString::new("2")), cx);
// Moving cursor should not trigger diagnostic request
editor.update_in(cx, |editor, window, cx| {
@@ -26647,7 +26647,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
2,
"Cursor movement should not trigger diagnostic request"
);
- ensure_result_id(Some("2".to_string()), cx);
+ ensure_result_id(Some(SharedString::new("2")), cx);
// Multiple rapid edits should be debounced
for _ in 0..5 {
editor.update_in(cx, |editor, window, cx| {
@@ -26662,7 +26662,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
final_requests <= 4,
"Multiple rapid edits should be debounced (got {final_requests} requests)",
);
- ensure_result_id(Some(final_requests.to_string()), cx);
+ ensure_result_id(Some(SharedString::new(final_requests.to_string())), cx);
}
#[gpui::test]
@@ -237,6 +237,8 @@ struct SelectionSet {
pub struct Diagnostic {
/// The name of the service that produced this diagnostic.
pub source: Option<String>,
+ /// The ID provided by the dynamic registration that produced this diagnostic.
+ pub registration_id: Option<SharedString>,
/// A machine-readable code that identifies this diagnostic.
pub code: Option<NumberOrString>,
pub code_description: Option<lsp::Uri>,
@@ -5390,6 +5392,7 @@ impl Default for Diagnostic {
is_unnecessary: false,
underline: true,
data: None,
+ registration_id: None,
}
}
}
@@ -3,6 +3,7 @@
use crate::{CursorShape, Diagnostic, DiagnosticSourceKind, diagnostic_set::DiagnosticEntry};
use anyhow::{Context as _, Result};
use clock::ReplicaId;
+use gpui::SharedString;
use lsp::{DiagnosticSeverity, LanguageServerId};
use rpc::proto;
use serde_json::Value;
@@ -239,6 +240,11 @@ pub fn serialize_diagnostics<'a>(
is_disk_based: entry.diagnostic.is_disk_based,
is_unnecessary: entry.diagnostic.is_unnecessary,
data: entry.diagnostic.data.as_ref().map(|data| data.to_string()),
+ registration_id: entry
+ .diagnostic
+ .registration_id
+ .as_ref()
+ .map(ToString::to_string),
})
.collect()
}
@@ -457,6 +463,7 @@ pub fn deserialize_diagnostics(
is_disk_based: diagnostic.is_disk_based,
is_unnecessary: diagnostic.is_unnecessary,
underline: diagnostic.underline,
+ registration_id: diagnostic.registration_id.map(SharedString::from),
source_kind: match proto::diagnostic::SourceKind::from_i32(
diagnostic.source_kind,
)? {
@@ -2283,6 +2283,7 @@ impl MultiBuffer {
cx: &mut Context<Self>,
) {
use language::BufferEvent;
+ let buffer_id = buffer.read(cx).remote_id();
cx.emit(match event {
BufferEvent::Edited => Event::Edited {
edited_buffer: Some(buffer),
@@ -2291,8 +2292,8 @@ impl MultiBuffer {
BufferEvent::Saved => Event::Saved,
BufferEvent::FileHandleChanged => Event::FileHandleChanged,
BufferEvent::Reloaded => Event::Reloaded,
- BufferEvent::LanguageChanged => Event::LanguageChanged(buffer.read(cx).remote_id()),
- BufferEvent::Reparsed => Event::Reparsed(buffer.read(cx).remote_id()),
+ BufferEvent::LanguageChanged => Event::LanguageChanged(buffer_id),
+ BufferEvent::Reparsed => Event::Reparsed(buffer_id),
BufferEvent::DiagnosticsUpdated => Event::DiagnosticsUpdated,
BufferEvent::CapabilityChanged => {
self.capability = buffer.read(cx).capability();
@@ -14,7 +14,7 @@ use client::proto::{self, PeerId};
use clock::Global;
use collections::{HashMap, HashSet};
use futures::future;
-use gpui::{App, AsyncApp, Entity, Task};
+use gpui::{App, AsyncApp, Entity, SharedString, Task};
use language::{
Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, CharScopeContext,
OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped,
@@ -26,8 +26,8 @@ use language::{
use lsp::{
AdapterServerCapabilities, CodeActionKind, CodeActionOptions, CodeDescription,
CompletionContext, CompletionListItemDefaultsEditRange, CompletionTriggerKind,
- DiagnosticServerCapabilities, DocumentHighlightKind, LanguageServer, LanguageServerId,
- LinkedEditingRangeServerCapabilities, OneOf, RenameOptions, ServerCapabilities,
+ DocumentHighlightKind, LanguageServer, LanguageServerId, LinkedEditingRangeServerCapabilities,
+ OneOf, RenameOptions, ServerCapabilities,
};
use serde_json::Value;
use signature_help::{lsp_to_proto_signature, proto_to_lsp_signature};
@@ -265,8 +265,9 @@ pub(crate) struct LinkedEditingRange {
pub(crate) struct GetDocumentDiagnostics {
/// We cannot blindly rely on server's capabilities.diagnostic_provider, as they're a singular field, whereas
/// a server can register multiple diagnostic providers post-mortem.
- pub dynamic_caps: DiagnosticServerCapabilities,
- pub previous_result_id: Option<String>,
+ pub registration_id: Option<SharedString>,
+ pub identifier: Option<String>,
+ pub previous_result_id: Option<SharedString>,
}
#[async_trait(?Send)]
@@ -3755,15 +3756,16 @@ impl GetDocumentDiagnostics {
.into_iter()
.filter_map(|diagnostics| {
Some(LspPullDiagnostics::Response {
+ registration_id: diagnostics.registration_id.map(SharedString::from),
server_id: LanguageServerId::from_proto(diagnostics.server_id),
uri: lsp::Uri::from_str(diagnostics.uri.as_str()).log_err()?,
diagnostics: if diagnostics.changed {
PulledDiagnostics::Unchanged {
- result_id: diagnostics.result_id?,
+ result_id: SharedString::new(diagnostics.result_id?),
}
} else {
PulledDiagnostics::Changed {
- result_id: diagnostics.result_id,
+ result_id: diagnostics.result_id.map(SharedString::new),
diagnostics: diagnostics
.diagnostics
.into_iter()
@@ -3927,6 +3929,7 @@ impl GetDocumentDiagnostics {
pub fn deserialize_workspace_diagnostics_report(
report: lsp::WorkspaceDiagnosticReportResult,
server_id: LanguageServerId,
+ registration_id: Option<SharedString>,
) -> Vec<WorkspaceLspPullDiagnostics> {
let mut pulled_diagnostics = HashMap::default();
match report {
@@ -3938,6 +3941,7 @@ impl GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
report,
+ registration_id.clone(),
)
}
lsp::WorkspaceDocumentDiagnosticReport::Unchanged(report) => {
@@ -3945,6 +3949,7 @@ impl GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
report,
+ registration_id.clone(),
)
}
}
@@ -3960,6 +3965,7 @@ impl GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
report,
+ registration_id.clone(),
)
}
lsp::WorkspaceDocumentDiagnosticReport::Unchanged(report) => {
@@ -3967,6 +3973,7 @@ impl GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
report,
+ registration_id.clone(),
)
}
}
@@ -3987,6 +3994,7 @@ fn process_full_workspace_diagnostics_report(
diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
server_id: LanguageServerId,
report: lsp::WorkspaceFullDocumentDiagnosticReport,
+ registration_id: Option<SharedString>,
) {
let mut new_diagnostics = HashMap::default();
process_full_diagnostics_report(
@@ -3994,6 +4002,7 @@ fn process_full_workspace_diagnostics_report(
server_id,
report.uri,
report.full_document_diagnostic_report,
+ registration_id,
);
diagnostics.extend(new_diagnostics.into_iter().map(|(uri, diagnostics)| {
(
@@ -4010,6 +4019,7 @@ fn process_unchanged_workspace_diagnostics_report(
diagnostics: &mut HashMap<lsp::Uri, WorkspaceLspPullDiagnostics>,
server_id: LanguageServerId,
report: lsp::WorkspaceUnchangedDocumentDiagnosticReport,
+ registration_id: Option<SharedString>,
) {
let mut new_diagnostics = HashMap::default();
process_unchanged_diagnostics_report(
@@ -4017,6 +4027,7 @@ fn process_unchanged_workspace_diagnostics_report(
server_id,
report.uri,
report.unchanged_document_diagnostic_report,
+ registration_id,
);
diagnostics.extend(new_diagnostics.into_iter().map(|(uri, diagnostics)| {
(
@@ -4050,19 +4061,12 @@ impl LspCommand for GetDocumentDiagnostics {
_: &Arc<LanguageServer>,
_: &App,
) -> Result<lsp::DocumentDiagnosticParams> {
- let identifier = match &self.dynamic_caps {
- lsp::DiagnosticServerCapabilities::Options(options) => options.identifier.clone(),
- lsp::DiagnosticServerCapabilities::RegistrationOptions(options) => {
- options.diagnostic_options.identifier.clone()
- }
- };
-
Ok(lsp::DocumentDiagnosticParams {
text_document: lsp::TextDocumentIdentifier {
uri: file_path_to_lsp_url(path)?,
},
- identifier,
- previous_result_id: self.previous_result_id.clone(),
+ identifier: self.identifier.clone(),
+ previous_result_id: self.previous_result_id.clone().map(|id| id.to_string()),
partial_result_params: Default::default(),
work_done_progress_params: Default::default(),
})
@@ -4097,6 +4101,7 @@ impl LspCommand for GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
related_documents,
+ self.registration_id.clone(),
);
}
process_full_diagnostics_report(
@@ -4104,6 +4109,7 @@ impl LspCommand for GetDocumentDiagnostics {
server_id,
url,
report.full_document_diagnostic_report,
+ self.registration_id,
);
}
lsp::DocumentDiagnosticReport::Unchanged(report) => {
@@ -4112,6 +4118,7 @@ impl LspCommand for GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
related_documents,
+ self.registration_id.clone(),
);
}
process_unchanged_diagnostics_report(
@@ -4119,6 +4126,7 @@ impl LspCommand for GetDocumentDiagnostics {
server_id,
url,
report.unchanged_document_diagnostic_report,
+ self.registration_id,
);
}
},
@@ -4128,6 +4136,7 @@ impl LspCommand for GetDocumentDiagnostics {
&mut pulled_diagnostics,
server_id,
related_documents,
+ self.registration_id,
);
}
}
@@ -4170,6 +4179,7 @@ impl LspCommand for GetDocumentDiagnostics {
server_id,
uri,
diagnostics,
+ registration_id,
} => {
let mut changed = false;
let (diagnostics, result_id) = match diagnostics {
@@ -4184,7 +4194,7 @@ impl LspCommand for GetDocumentDiagnostics {
};
Some(proto::PulledDiagnostics {
changed,
- result_id,
+ result_id: result_id.map(|id| id.to_string()),
uri: uri.to_string(),
server_id: server_id.to_proto(),
diagnostics: diagnostics
@@ -4195,6 +4205,7 @@ impl LspCommand for GetDocumentDiagnostics {
.log_err()
})
.collect(),
+ registration_id: registration_id.as_ref().map(ToString::to_string),
})
}
})
@@ -4365,14 +4376,25 @@ fn process_related_documents(
diagnostics: &mut HashMap<lsp::Uri, LspPullDiagnostics>,
server_id: LanguageServerId,
documents: impl IntoIterator<Item = (lsp::Uri, lsp::DocumentDiagnosticReportKind)>,
+ registration_id: Option<SharedString>,
) {
for (url, report_kind) in documents {
match report_kind {
- lsp::DocumentDiagnosticReportKind::Full(report) => {
- process_full_diagnostics_report(diagnostics, server_id, url, report)
- }
+ lsp::DocumentDiagnosticReportKind::Full(report) => process_full_diagnostics_report(
+ diagnostics,
+ server_id,
+ url,
+ report,
+ registration_id.clone(),
+ ),
lsp::DocumentDiagnosticReportKind::Unchanged(report) => {
- process_unchanged_diagnostics_report(diagnostics, server_id, url, report)
+ process_unchanged_diagnostics_report(
+ diagnostics,
+ server_id,
+ url,
+ report,
+ registration_id.clone(),
+ )
}
}
}
@@ -4383,8 +4405,9 @@ fn process_unchanged_diagnostics_report(
server_id: LanguageServerId,
uri: lsp::Uri,
report: lsp::UnchangedDocumentDiagnosticReport,
+ registration_id: Option<SharedString>,
) {
- let result_id = report.result_id;
+ let result_id = SharedString::new(report.result_id);
match diagnostics.entry(uri.clone()) {
hash_map::Entry::Occupied(mut o) => match o.get_mut() {
LspPullDiagnostics::Default => {
@@ -4392,12 +4415,14 @@ fn process_unchanged_diagnostics_report(
server_id,
uri,
diagnostics: PulledDiagnostics::Unchanged { result_id },
+ registration_id,
});
}
LspPullDiagnostics::Response {
server_id: existing_server_id,
uri: existing_uri,
diagnostics: existing_diagnostics,
+ ..
} => {
if server_id != *existing_server_id || &uri != existing_uri {
debug_panic!(
@@ -4417,6 +4442,7 @@ fn process_unchanged_diagnostics_report(
server_id,
uri,
diagnostics: PulledDiagnostics::Unchanged { result_id },
+ registration_id,
});
}
}
@@ -4427,8 +4453,9 @@ fn process_full_diagnostics_report(
server_id: LanguageServerId,
uri: lsp::Uri,
report: lsp::FullDocumentDiagnosticReport,
+ registration_id: Option<SharedString>,
) {
- let result_id = report.result_id;
+ let result_id = report.result_id.map(SharedString::new);
match diagnostics.entry(uri.clone()) {
hash_map::Entry::Occupied(mut o) => match o.get_mut() {
LspPullDiagnostics::Default => {
@@ -4439,12 +4466,14 @@ fn process_full_diagnostics_report(
result_id,
diagnostics: report.items,
},
+ registration_id,
});
}
LspPullDiagnostics::Response {
server_id: existing_server_id,
uri: existing_uri,
diagnostics: existing_diagnostics,
+ ..
} => {
if server_id != *existing_server_id || &uri != existing_uri {
debug_panic!(
@@ -4478,6 +4507,7 @@ fn process_full_diagnostics_report(
result_id,
diagnostics: report.items,
},
+ registration_id,
});
}
}
@@ -116,6 +116,7 @@ use std::{
atomic::{self, AtomicUsize},
},
time::{Duration, Instant},
+ vec,
};
use sum_tree::Dimensions;
use text::{Anchor, BufferId, LineEnding, OffsetRangeExt, ToPoint as _};
@@ -229,7 +230,8 @@ struct LanguageServerSeed {
#[derive(Debug)]
pub struct DocumentDiagnosticsUpdate<'a, D> {
pub diagnostics: D,
- pub result_id: Option<String>,
+ pub result_id: Option<SharedString>,
+ pub registration_id: Option<SharedString>,
pub server_id: LanguageServerId,
pub disk_based_sources: Cow<'a, [String]>,
}
@@ -283,7 +285,14 @@ pub struct LocalLspStore {
lsp_tree: LanguageServerTree,
registered_buffers: HashMap<BufferId, usize>,
buffers_opened_in_servers: HashMap<BufferId, HashSet<LanguageServerId>>,
- buffer_pull_diagnostics_result_ids: HashMap<LanguageServerId, HashMap<PathBuf, Option<String>>>,
+ buffer_pull_diagnostics_result_ids: HashMap<
+ LanguageServerId,
+ HashMap<Option<SharedString>, HashMap<PathBuf, Option<SharedString>>>,
+ >,
+ workspace_pull_diagnostics_result_ids: HashMap<
+ LanguageServerId,
+ HashMap<Option<SharedString>, HashMap<PathBuf, Option<SharedString>>>,
+ >,
}
impl LocalLspStore {
@@ -685,6 +694,7 @@ impl LocalLspStore {
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources,
),
+ registration_id: None,
}],
|_, diagnostic, cx| match diagnostic.source_kind {
DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
@@ -2256,8 +2266,9 @@ impl LocalLspStore {
server_id,
None,
None,
- diagnostics,
+ None,
Vec::new(),
+ diagnostics,
cx,
)
.log_err();
@@ -2335,7 +2346,8 @@ impl LocalLspStore {
&mut self,
buffer: &Entity<Buffer>,
server_id: LanguageServerId,
- result_id: Option<String>,
+ registration_id: Option<Option<SharedString>>,
+ result_id: Option<SharedString>,
version: Option<i32>,
new_diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
reused_diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
@@ -2408,11 +2420,15 @@ impl LocalLspStore {
let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
buffer.update(cx, |buffer, cx| {
- if let Some(abs_path) = File::from_dyn(buffer.file()).map(|f| f.abs_path(cx)) {
- self.buffer_pull_diagnostics_result_ids
- .entry(server_id)
- .or_default()
- .insert(abs_path, result_id);
+ if let Some(registration_id) = registration_id {
+ if let Some(abs_path) = File::from_dyn(buffer.file()).map(|f| f.abs_path(cx)) {
+ self.buffer_pull_diagnostics_result_ids
+ .entry(server_id)
+ .or_default()
+ .entry(registration_id)
+ .or_default()
+ .insert(abs_path, result_id);
+ }
}
buffer.update_diagnostics(server_id, set, cx)
@@ -3266,6 +3282,8 @@ impl LocalLspStore {
self.language_servers.remove(server_id_to_remove);
self.buffer_pull_diagnostics_result_ids
.remove(server_id_to_remove);
+ self.workspace_pull_diagnostics_result_ids
+ .remove(server_id_to_remove);
for buffer_servers in self.buffers_opened_in_servers.values_mut() {
buffer_servers.remove(server_id_to_remove);
}
@@ -3952,6 +3970,7 @@ impl LspStore {
registered_buffers: HashMap::default(),
buffers_opened_in_servers: HashMap::default(),
buffer_pull_diagnostics_result_ids: HashMap::default(),
+ workspace_pull_diagnostics_result_ids: HashMap::default(),
watched_manifest_filenames: ManifestProvidersStore::global(cx)
.manifest_file_names(),
}),
@@ -4225,9 +4244,50 @@ impl LspStore {
lsp_store.lsp_data.remove(&buffer_id);
let local = lsp_store.as_local_mut().unwrap();
local.registered_buffers.remove(&buffer_id);
+
local.buffers_opened_in_servers.remove(&buffer_id);
if let Some(file) = File::from_dyn(buffer.read(cx).file()).cloned() {
local.unregister_old_buffer_from_language_servers(buffer, &file, cx);
+
+ let buffer_abs_path = file.abs_path(cx);
+ for (_, buffer_pull_diagnostics_result_ids) in
+ &mut local.buffer_pull_diagnostics_result_ids
+ {
+ buffer_pull_diagnostics_result_ids.retain(
+ |_, buffer_result_ids| {
+ buffer_result_ids.remove(&buffer_abs_path);
+ !buffer_result_ids.is_empty()
+ },
+ );
+ }
+
+ let diagnostic_updates = local
+ .language_servers
+ .keys()
+ .cloned()
+ .map(|server_id| DocumentDiagnosticsUpdate {
+ diagnostics: DocumentDiagnostics {
+ document_abs_path: buffer_abs_path.clone(),
+ version: None,
+ diagnostics: Vec::new(),
+ },
+ result_id: None,
+ registration_id: None,
+ server_id: server_id,
+ disk_based_sources: Cow::Borrowed(&[]),
+ })
+ .collect::<Vec<_>>();
+
+ lsp_store
+ .merge_diagnostic_entries(
+ diagnostic_updates,
+ |_, diagnostic, _| {
+ diagnostic.source_kind != DiagnosticSourceKind::Pulled
+ },
+ cx,
+ )
+ .context("Clearing diagnostics for the closed buffer")
+ .log_err();
}
}
})
@@ -6700,9 +6760,11 @@ impl LspStore {
};
assert!(any_server_has_diagnostics_provider);
+ let identifier = buffer_diagnostic_identifier(&dynamic_caps);
let request = GetDocumentDiagnostics {
previous_result_id: None,
- dynamic_caps,
+ identifier,
+ registration_id: None,
};
let request_task = client.request_lsp(
upstream_project_id,
@@ -6735,19 +6797,27 @@ impl LspStore {
.language_server_dynamic_registrations
.get(&server_id)
.into_iter()
- .flat_map(|registrations| registrations.diagnostics.values().cloned())
+ .flat_map(|registrations| registrations.diagnostics.clone())
.collect::<Vec<_>>();
Some(
providers_with_identifiers
.into_iter()
- .map(|dynamic_caps| {
- let result_id = self.result_id(server_id, buffer_id, cx);
+ .map(|(registration_id, dynamic_caps)| {
+ let identifier = buffer_diagnostic_identifier(&dynamic_caps);
+ let registration_id = registration_id.map(SharedString::from);
+ let result_id = self.result_id_for_buffer_pull(
+ server_id,
+ buffer_id,
+ ®istration_id,
+ cx,
+ );
self.request_lsp(
buffer.clone(),
LanguageServerToQuery::Other(server_id),
GetDocumentDiagnostics {
previous_result_id: result_id,
- dynamic_caps,
+ registration_id,
+ identifier,
},
cx,
)
@@ -7112,8 +7182,7 @@ impl LspStore {
return;
}
- let mut unchanged_buffers = HashSet::default();
- let mut changed_buffers = HashSet::default();
+ let mut unchanged_buffers = HashMap::default();
let server_diagnostics_updates = diagnostics
.into_iter()
.filter_map(|diagnostics_set| match diagnostics_set {
@@ -7121,24 +7190,25 @@ impl LspStore {
server_id,
uri,
diagnostics,
- } => Some((server_id, uri, diagnostics)),
+ registration_id,
+ } => Some((server_id, uri, diagnostics, registration_id)),
LspPullDiagnostics::Default => None,
})
.fold(
HashMap::default(),
- |mut acc, (server_id, uri, diagnostics)| {
+ |mut acc, (server_id, uri, diagnostics, new_registration_id)| {
let (result_id, diagnostics) = match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
- unchanged_buffers.insert(uri.clone());
+ unchanged_buffers
+ .entry(new_registration_id.clone())
+ .or_insert_with(HashSet::default)
+ .insert(uri.clone());
(Some(result_id), Vec::new())
}
PulledDiagnostics::Changed {
result_id,
diagnostics,
- } => {
- changed_buffers.insert(uri.clone());
- (result_id, diagnostics)
- }
+ } => (result_id, diagnostics),
};
let disk_based_sources = Cow::Owned(
lsp_store
@@ -7148,8 +7218,11 @@ impl LspStore {
.unwrap_or(&[])
.to_vec(),
);
- acc.entry(server_id).or_insert_with(Vec::new).push(
- DocumentDiagnosticsUpdate {
+ acc.entry(server_id)
+ .or_insert_with(HashMap::default)
+ .entry(new_registration_id.clone())
+ .or_insert_with(Vec::new)
+ .push(DocumentDiagnosticsUpdate {
server_id,
diagnostics: lsp::PublishDiagnosticsParams {
uri,
@@ -7158,37 +7231,35 @@ impl LspStore {
},
result_id,
disk_based_sources,
- },
- );
+ registration_id: new_registration_id,
+ });
acc
},
);
for diagnostic_updates in server_diagnostics_updates.into_values() {
- lsp_store
- .merge_lsp_diagnostics(
- DiagnosticSourceKind::Pulled,
- diagnostic_updates,
- |buffer, old_diagnostic, cx| {
- File::from_dyn(buffer.file())
- .and_then(|file| {
- let abs_path = file.as_local()?.abs_path(cx);
- lsp::Uri::from_file_path(abs_path).ok()
- })
- .is_none_or(|buffer_uri| {
- unchanged_buffers.contains(&buffer_uri)
- || match old_diagnostic.source_kind {
- DiagnosticSourceKind::Pulled => {
- !changed_buffers.contains(&buffer_uri)
- }
- DiagnosticSourceKind::Other
- | DiagnosticSourceKind::Pushed => true,
- }
- })
- },
- cx,
- )
- .log_err();
+ for (registration_id, diagnostic_updates) in diagnostic_updates {
+ lsp_store
+ .merge_lsp_diagnostics(
+ DiagnosticSourceKind::Pulled,
+ diagnostic_updates,
+ |document_uri, old_diagnostic, _| match old_diagnostic.source_kind {
+ DiagnosticSourceKind::Pulled => {
+ old_diagnostic.registration_id != registration_id
+ || unchanged_buffers
+ .get(&old_diagnostic.registration_id)
+ .is_some_and(|unchanged_buffers| {
+ unchanged_buffers.contains(&document_uri)
+ })
+ }
+ DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
+ true
+ }
+ },
+ cx,
+ )
+ .log_err();
+ }
}
})
})
@@ -8195,7 +8266,7 @@ impl LspStore {
&mut self,
server_id: LanguageServerId,
abs_path: PathBuf,
- result_id: Option<String>,
+ result_id: Option<SharedString>,
version: Option<i32>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
cx: &mut Context<Self>,
@@ -8210,6 +8281,7 @@ impl LspStore {
result_id,
server_id,
disk_based_sources: Cow::Borrowed(&[]),
+ registration_id: None,
}],
|_, _, _| false,
cx,
@@ -8220,7 +8292,7 @@ impl LspStore {
pub fn merge_diagnostic_entries<'a>(
&mut self,
diagnostic_updates: Vec<DocumentDiagnosticsUpdate<'a, DocumentDiagnostics>>,
- merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
+ merge: impl Fn(&lsp::Uri, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>,
) -> anyhow::Result<()> {
let mut diagnostics_summary = None::<proto::UpdateDiagnosticSummary>;
@@ -8241,13 +8313,15 @@ impl LspStore {
path: relative_path,
};
+ let document_uri = lsp::Uri::from_file_path(abs_path)
+ .map_err(|()| anyhow!("Failed to convert buffer path {abs_path:?} to lsp Uri"))?;
if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) {
let snapshot = buffer_handle.read(cx).snapshot();
let buffer = buffer_handle.read(cx);
let reused_diagnostics = buffer
.buffer_diagnostics(Some(server_id))
.iter()
- .filter(|v| merge(buffer, &v.diagnostic, cx))
+ .filter(|v| merge(&document_uri, &v.diagnostic, cx))
.map(|v| {
let start = Unclipped(v.range.start.to_point_utf16(&snapshot));
let end = Unclipped(v.range.end.to_point_utf16(&snapshot));
@@ -8263,6 +8337,7 @@ impl LspStore {
.update_buffer_diagnostics(
&buffer_handle,
server_id,
+ Some(update.registration_id),
update.result_id,
update.diagnostics.version,
update.diagnostics.diagnostics.clone(),
@@ -8271,6 +8346,25 @@ impl LspStore {
)?;
update.diagnostics.diagnostics.extend(reused_diagnostics);
+ } else if let Some(local) = self.as_local() {
+ let reused_diagnostics = local
+ .diagnostics
+ .get(&worktree_id)
+ .and_then(|diagnostics_for_tree| diagnostics_for_tree.get(&project_path.path))
+ .and_then(|diagnostics_by_server_id| {
+ diagnostics_by_server_id
+ .binary_search_by_key(&server_id, |e| e.0)
+ .ok()
+ .map(|ix| &diagnostics_by_server_id[ix].1)
+ })
+ .into_iter()
+ .flatten()
+ .filter(|v| merge(&document_uri, &v.diagnostic, cx));
+
+ update
+ .diagnostics
+ .diagnostics
+ .extend(reused_diagnostics.cloned());
}
let updated = worktree.update(cx, |worktree, cx| {
@@ -8355,7 +8449,7 @@ impl LspStore {
.unwrap_or_default();
let new_summary = DiagnosticSummary::new(&diagnostics);
- if new_summary.is_empty() {
+ if diagnostics.is_empty() {
if let Some(diagnostics_by_server_id) = diagnostics_for_tree.get_mut(&path_in_worktree)
{
if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
@@ -9665,7 +9759,7 @@ impl LspStore {
);
}
lsp::ProgressParamsValue::WorkspaceDiagnostic(report) => {
- let identifier = match progress_params.token {
+ let registration_id = match progress_params.token {
lsp::NumberOrString::Number(_) => None,
lsp::NumberOrString::String(token) => token
.split_once(WORKSPACE_DIAGNOSTICS_TOKEN_START)
@@ -9678,10 +9772,15 @@ impl LspStore {
.as_local_mut()
.and_then(|local| local.language_servers.get_mut(&language_server_id))
&& let Some(workspace_diagnostics) =
- workspace_diagnostics_refresh_tasks.get_mut(&identifier)
+ workspace_diagnostics_refresh_tasks.get_mut(®istration_id)
{
workspace_diagnostics.progress_tx.try_send(()).ok();
- self.apply_workspace_diagnostic_report(language_server_id, report, cx)
+ self.apply_workspace_diagnostic_report(
+ language_server_id,
+ report,
+ registration_id.map(SharedString::from),
+ cx,
+ )
}
}
}
@@ -10941,7 +11040,7 @@ impl LspStore {
&mut self,
server_id: LanguageServerId,
diagnostics: lsp::PublishDiagnosticsParams,
- result_id: Option<String>,
+ result_id: Option<SharedString>,
source_kind: DiagnosticSourceKind,
disk_based_sources: &[String],
cx: &mut Context<Self>,
@@ -10953,6 +11052,7 @@ impl LspStore {
result_id,
server_id,
disk_based_sources: Cow::Borrowed(disk_based_sources),
+ registration_id: None,
}],
|_, _, _| false,
cx,
@@ -10963,7 +11063,7 @@ impl LspStore {
&mut self,
source_kind: DiagnosticSourceKind,
lsp_diagnostics: Vec<DocumentDiagnosticsUpdate<lsp::PublishDiagnosticsParams>>,
- merge: impl Fn(&Buffer, &Diagnostic, &App) -> bool + Clone,
+ merge: impl Fn(&lsp::Uri, &Diagnostic, &App) -> bool + Clone,
cx: &mut Context<Self>,
) -> Result<()> {
anyhow::ensure!(self.mode.is_local(), "called update_diagnostics on remote");
@@ -10978,10 +11078,12 @@ impl LspStore {
update.server_id,
update.diagnostics,
&update.disk_based_sources,
+ update.registration_id.clone(),
),
result_id: update.result_id,
server_id: update.server_id,
disk_based_sources: update.disk_based_sources,
+ registration_id: update.registration_id,
})
})
.collect();
@@ -10996,6 +11098,7 @@ impl LspStore {
server_id: LanguageServerId,
mut lsp_diagnostics: lsp::PublishDiagnosticsParams,
disk_based_sources: &[String],
+ registration_id: Option<SharedString>,
) -> DocumentDiagnostics {
let mut diagnostics = Vec::default();
let mut primary_diagnostic_group_ids = HashMap::default();
@@ -11069,6 +11172,7 @@ impl LspStore {
is_unnecessary,
underline,
data: diagnostic.data.clone(),
+ registration_id: registration_id.clone(),
},
});
if let Some(infos) = &diagnostic.related_information {
@@ -11096,6 +11200,7 @@ impl LspStore {
is_unnecessary: false,
underline,
data: diagnostic.data.clone(),
+ registration_id: registration_id.clone(),
},
});
}
@@ -11845,18 +11950,22 @@ impl LspStore {
}
if let Some(local) = self.as_local_mut() {
local.buffer_pull_diagnostics_result_ids.remove(&for_server);
+ local
+ .workspace_pull_diagnostics_result_ids
+ .remove(&for_server);
for buffer_servers in local.buffers_opened_in_servers.values_mut() {
buffer_servers.remove(&for_server);
}
}
}
- pub fn result_id(
+ pub fn result_id_for_buffer_pull(
&self,
server_id: LanguageServerId,
buffer_id: BufferId,
+ registration_id: &Option<SharedString>,
cx: &App,
- ) -> Option<String> {
+ ) -> Option<SharedString> {
let abs_path = self
.buffer_store
.read(cx)
@@ -11866,20 +11975,40 @@ impl LspStore {
self.as_local()?
.buffer_pull_diagnostics_result_ids
.get(&server_id)?
+ .get(registration_id)?
.get(&abs_path)?
.clone()
}
- pub fn all_result_ids(&self, server_id: LanguageServerId) -> HashMap<PathBuf, String> {
+ /// Gets all result_ids for a workspace diagnostics pull request.
+ /// First, it tries to find buffer's result_id retrieved via the diagnostics pull; if it fails, it falls back to the workspace disagnostics pull result_id.
+ /// The latter is supposed to be of lower priority as we keep on pulling diagnostics for open buffers eagerly.
+ pub fn result_ids_for_workspace_refresh(
+ &self,
+ server_id: LanguageServerId,
+ registration_id: &Option<SharedString>,
+ ) -> HashMap<PathBuf, SharedString> {
let Some(local) = self.as_local() else {
return HashMap::default();
};
local
- .buffer_pull_diagnostics_result_ids
+ .workspace_pull_diagnostics_result_ids
.get(&server_id)
.into_iter()
+ .filter_map(|diagnostics| diagnostics.get(registration_id))
.flatten()
- .filter_map(|(abs_path, result_id)| Some((abs_path.clone(), result_id.clone()?)))
+ .filter_map(|(abs_path, result_id)| {
+ let result_id = local
+ .buffer_pull_diagnostics_result_ids
+ .get(&server_id)
+ .and_then(|buffer_ids_result_ids| {
+ buffer_ids_result_ids.get(registration_id)?.get(abs_path)
+ })
+ .cloned()
+ .flatten()
+ .or_else(|| result_id.clone())?;
+ Some((abs_path.clone(), result_id))
+ })
.collect()
}
@@ -11924,12 +12053,16 @@ impl LspStore {
&mut self,
server_id: LanguageServerId,
report: lsp::WorkspaceDiagnosticReportResult,
+ registration_id: Option<SharedString>,
cx: &mut Context<Self>,
) {
let workspace_diagnostics =
- GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(report, server_id);
- let mut unchanged_buffers = HashSet::default();
- let mut changed_buffers = HashSet::default();
+ GetDocumentDiagnostics::deserialize_workspace_diagnostics_report(
+ report,
+ server_id,
+ registration_id,
+ );
+ let mut unchanged_buffers = HashMap::default();
let workspace_diagnostics_updates = workspace_diagnostics
.into_iter()
.filter_map(
@@ -11938,25 +12071,32 @@ impl LspStore {
server_id,
uri,
diagnostics,
- } => Some((server_id, uri, diagnostics, workspace_diagnostics.version)),
+ registration_id,
+ } => Some((
+ server_id,
+ uri,
+ diagnostics,
+ workspace_diagnostics.version,
+ registration_id,
+ )),
LspPullDiagnostics::Default => None,
},
)
.fold(
HashMap::default(),
- |mut acc, (server_id, uri, diagnostics, version)| {
+ |mut acc, (server_id, uri, diagnostics, version, new_registration_id)| {
let (result_id, diagnostics) = match diagnostics {
PulledDiagnostics::Unchanged { result_id } => {
- unchanged_buffers.insert(uri.clone());
+ unchanged_buffers
+ .entry(new_registration_id.clone())
+ .or_insert_with(HashSet::default)
+ .insert(uri.clone());
(Some(result_id), Vec::new())
}
PulledDiagnostics::Changed {
result_id,
diagnostics,
- } => {
- changed_buffers.insert(uri.clone());
- (result_id, diagnostics)
- }
+ } => (result_id, diagnostics),
};
let disk_based_sources = Cow::Owned(
self.language_server_adapter_for_id(server_id)
@@ -11965,47 +12105,68 @@ impl LspStore {
.unwrap_or(&[])
.to_vec(),
);
- acc.entry(server_id)
- .or_insert_with(Vec::new)
- .push(DocumentDiagnosticsUpdate {
- server_id,
- diagnostics: lsp::PublishDiagnosticsParams {
- uri,
- diagnostics,
- version,
- },
- result_id,
- disk_based_sources,
- });
+
+ let Some(abs_path) = uri.to_file_path().ok() else {
+ return acc;
+ };
+ let Some((worktree, relative_path)) =
+ self.worktree_store.read(cx).find_worktree(abs_path.clone(), cx)
+ else {
+ log::warn!("skipping workspace diagnostics update, no worktree found for path {abs_path:?}");
+ return acc;
+ };
+ let worktree_id = worktree.read(cx).id();
+ let project_path = ProjectPath {
+ worktree_id,
+ path: relative_path,
+ };
+ if let Some(local_lsp_store) = self.as_local_mut() {
+ local_lsp_store.workspace_pull_diagnostics_result_ids.entry(server_id)
+ .or_default().entry(new_registration_id.clone()).or_default().insert(abs_path, result_id.clone());
+ }
+ // The LSP spec recommends that "diagnostics from a document pull should win over diagnostics from a workspace pull."
+ // Since we actively pull diagnostics for documents with open buffers, we ignore contents of workspace pulls for these documents.
+ if self.buffer_store.read(cx).get_by_path(&project_path).is_none() {
+ acc.entry(server_id)
+ .or_insert_with(HashMap::default)
+ .entry(new_registration_id.clone())
+ .or_insert_with(Vec::new)
+ .push(DocumentDiagnosticsUpdate {
+ server_id,
+ diagnostics: lsp::PublishDiagnosticsParams {
+ uri,
+ diagnostics,
+ version,
+ },
+ result_id,
+ disk_based_sources,
+ registration_id: new_registration_id,
+ });
+ }
acc
},
);
for diagnostic_updates in workspace_diagnostics_updates.into_values() {
- self.merge_lsp_diagnostics(
- DiagnosticSourceKind::Pulled,
- diagnostic_updates,
- |buffer, old_diagnostic, cx| {
- File::from_dyn(buffer.file())
- .and_then(|file| {
- let abs_path = file.as_local()?.abs_path(cx);
- lsp::Uri::from_file_path(abs_path).ok()
- })
- .is_none_or(|buffer_uri| {
- unchanged_buffers.contains(&buffer_uri)
- || match old_diagnostic.source_kind {
- DiagnosticSourceKind::Pulled => {
- !changed_buffers.contains(&buffer_uri)
- }
- DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => {
- true
- }
- }
- })
- },
- cx,
- )
- .log_err();
+ for (registration_id, diagnostic_updates) in diagnostic_updates {
+ self.merge_lsp_diagnostics(
+ DiagnosticSourceKind::Pulled,
+ diagnostic_updates,
+ |document_uri, old_diagnostic, _| match old_diagnostic.source_kind {
+ DiagnosticSourceKind::Pulled => {
+ old_diagnostic.registration_id != registration_id
+ || unchanged_buffers
+ .get(&old_diagnostic.registration_id)
+ .is_some_and(|unchanged_buffers| {
+ unchanged_buffers.contains(&document_uri)
+ })
+ }
+ DiagnosticSourceKind::Other | DiagnosticSourceKind::Pushed => true,
+ },
+ cx,
+ )
+ .log_err();
+ }
}
}
@@ -12284,54 +12445,41 @@ impl LspStore {
.diagnostics
.insert(Some(reg.id.clone()), caps.clone());
- if let LanguageServerState::Running {
- workspace_diagnostics_refresh_tasks,
- ..
- } = state
- && let Some(task) = lsp_workspace_diagnostics_refresh(
- Some(reg.id.clone()),
- caps.clone(),
- server.clone(),
- cx,
- )
- {
- workspace_diagnostics_refresh_tasks.insert(Some(reg.id), task);
+ let supports_workspace_diagnostics =
+ |capabilities: &DiagnosticServerCapabilities| match capabilities {
+ DiagnosticServerCapabilities::Options(diagnostic_options) => {
+ diagnostic_options.workspace_diagnostics
+ }
+ DiagnosticServerCapabilities::RegistrationOptions(
+ diagnostic_registration_options,
+ ) => {
+ diagnostic_registration_options
+ .diagnostic_options
+ .workspace_diagnostics
+ }
+ };
+
+ if supports_workspace_diagnostics(&caps) {
+ if let LanguageServerState::Running {
+ workspace_diagnostics_refresh_tasks,
+ ..
+ } = state
+ && let Some(task) = lsp_workspace_diagnostics_refresh(
+ Some(reg.id.clone()),
+ caps.clone(),
+ server.clone(),
+ cx,
+ )
+ {
+ workspace_diagnostics_refresh_tasks.insert(Some(reg.id), task);
+ }
}
- let mut did_update_caps = false;
server.update_capabilities(|capabilities| {
- if capabilities.diagnostic_provider.as_ref().is_none_or(
- |current_caps| {
- let supports_workspace_diagnostics =
- |capabilities: &DiagnosticServerCapabilities| {
- match capabilities {
- DiagnosticServerCapabilities::Options(
- diagnostic_options,
- ) => diagnostic_options.workspace_diagnostics,
- DiagnosticServerCapabilities::RegistrationOptions(
- diagnostic_registration_options,
- ) => {
- diagnostic_registration_options
- .diagnostic_options
- .workspace_diagnostics
- }
- }
- };
- // We don't actually care about capabilities.diagnostic_provider, but it IS relevant for the remote peer
- // to know that there's at least one provider. Otherwise, it will never ask us to issue documentdiagnostic calls on their behalf,
- // as it'll think that they're not supported.
- // If we did not support any workspace diagnostics up to this point but now do, let's update.
- !supports_workspace_diagnostics(current_caps)
- & supports_workspace_diagnostics(&caps)
- },
- ) {
- did_update_caps = true;
- capabilities.diagnostic_provider = Some(caps);
- }
+ capabilities.diagnostic_provider = Some(caps);
});
- if did_update_caps {
- notify_server_capabilities_updated(&server, cx);
- }
+
+ notify_server_capabilities_updated(&server, cx);
}
}
"textDocument/documentColor" => {
@@ -12499,7 +12647,7 @@ impl LspStore {
.language_servers
.get_mut(&server_id)
.context("Could not obtain Language Servers state")?;
- let options = local
+ local
.language_server_dynamic_registrations
.get_mut(&server_id)
.with_context(|| {
@@ -12512,13 +12660,12 @@ impl LspStore {
)?;
let mut has_any_diagnostic_providers_still = true;
- if let Some(identifier) = diagnostic_identifier(&options)
- && let LanguageServerState::Running {
- workspace_diagnostics_refresh_tasks,
- ..
- } = state
+ if let LanguageServerState::Running {
+ workspace_diagnostics_refresh_tasks,
+ ..
+ } = state
{
- workspace_diagnostics_refresh_tasks.remove(&identifier);
+ workspace_diagnostics_refresh_tasks.remove(&Some(unreg.id.clone()));
has_any_diagnostic_providers_still =
!workspace_diagnostics_refresh_tasks.is_empty();
}
@@ -12822,7 +12969,8 @@ fn lsp_workspace_diagnostics_refresh(
server: Arc<LanguageServer>,
cx: &mut Context<'_, LspStore>,
) -> Option<WorkspaceRefreshTask> {
- let identifier = diagnostic_identifier(&options)?;
+ let identifier = workspace_diagnostic_identifier(&options)?;
+ let registration_id_shared = registration_id.as_ref().map(SharedString::from);
let (progress_tx, mut progress_rx) = mpsc::channel(1);
let (mut refresh_tx, mut refresh_rx) = mpsc::channel(1);
@@ -12854,13 +13002,13 @@ fn lsp_workspace_diagnostics_refresh(
let Ok(previous_result_ids) = lsp_store.update(cx, |lsp_store, _| {
lsp_store
- .all_result_ids(server.server_id())
+ .result_ids_for_workspace_refresh(server.server_id(), ®istration_id_shared)
.into_iter()
.filter_map(|(abs_path, result_id)| {
let uri = file_path_to_lsp_url(&abs_path).ok()?;
Some(lsp::PreviousResultId {
uri,
- value: result_id,
+ value: result_id.to_string(),
})
})
.collect()
@@ -12868,9 +13016,9 @@ fn lsp_workspace_diagnostics_refresh(
return;
};
- let token = if let Some(identifier) = ®istration_id {
+ let token = if let Some(registration_id) = ®istration_id {
format!(
- "workspace/diagnostic/{}/{requests}/{WORKSPACE_DIAGNOSTICS_TOKEN_START}{identifier}",
+ "workspace/diagnostic/{}/{requests}/{WORKSPACE_DIAGNOSTICS_TOKEN_START}{registration_id}",
server.server_id(),
)
} else {
@@ -12920,6 +13068,7 @@ fn lsp_workspace_diagnostics_refresh(
lsp_store.apply_workspace_diagnostic_report(
server.server_id(),
pulled_diagnostics,
+ registration_id_shared.clone(),
cx,
)
})
@@ -12941,7 +13090,21 @@ fn lsp_workspace_diagnostics_refresh(
})
}
-fn diagnostic_identifier(options: &DiagnosticServerCapabilities) -> Option<Option<String>> {
+fn buffer_diagnostic_identifier(options: &DiagnosticServerCapabilities) -> Option<String> {
+ match &options {
+ lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => {
+ diagnostic_options.identifier.clone()
+ }
+ lsp::DiagnosticServerCapabilities::RegistrationOptions(registration_options) => {
+ let diagnostic_options = ®istration_options.diagnostic_options;
+ diagnostic_options.identifier.clone()
+ }
+ }
+}
+
+fn workspace_diagnostic_identifier(
+ options: &DiagnosticServerCapabilities,
+) -> Option<Option<String>> {
match &options {
lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => {
if !diagnostic_options.workspace_diagnostics {
@@ -90,6 +90,7 @@ pub fn register_notifications(
disk_based_sources: Cow::Borrowed(
&adapter.disk_based_diagnostic_sources,
),
+ registration_id: None,
}],
|_, diag, _| !is_inactive_region(diag),
cx,
@@ -984,6 +984,8 @@ pub enum LspPullDiagnostics {
server_id: LanguageServerId,
/// URI of the resource,
uri: lsp::Uri,
+ /// The ID provided by the dynamic registration that produced diagnostics.
+ registration_id: Option<SharedString>,
/// The diagnostics produced by this language server.
diagnostics: PulledDiagnostics,
},
@@ -994,10 +996,10 @@ pub enum PulledDiagnostics {
Unchanged {
/// An ID the current pulled batch for this file.
/// If given, can be used to query workspace diagnostics partially.
- result_id: String,
+ result_id: SharedString,
},
Changed {
- result_id: Option<String>,
+ result_id: Option<SharedString>,
diagnostics: Vec<lsp::Diagnostic>,
},
}
@@ -2750,11 +2750,13 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
let buffer = project
- .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/a.rs"), cx)
+ })
.await
.unwrap();
@@ -2763,7 +2765,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
lsp_store
.update_diagnostic_entries(
LanguageServerId(0),
- PathBuf::from("/dir/a.rs"),
+ PathBuf::from(path!("/dir/a.rs")),
None,
None,
vec![
@@ -2820,17 +2822,17 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC
init_test(cx);
let fs = FakeFs::new(cx.executor());
- fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
+ fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
.await;
- let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_diagnostic_entries(
LanguageServerId(0),
- Path::new("/dir/a.rs").to_owned(),
+ Path::new(path!("/dir/a.rs")).to_owned(),
None,
None,
vec![DiagnosticEntry {
@@ -2849,7 +2851,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC
lsp_store
.update_diagnostic_entries(
LanguageServerId(1),
- Path::new("/dir/a.rs").to_owned(),
+ Path::new(path!("/dir/a.rs")).to_owned(),
None,
None,
vec![DiagnosticEntry {
@@ -258,6 +258,7 @@ message Diagnostic {
Anchor start = 1;
Anchor end = 2;
optional string source = 3;
+ optional string registration_id = 17;
enum SourceKind {
Pulled = 0;
@@ -949,6 +949,7 @@ message PulledDiagnostics {
optional string result_id = 3;
bool changed = 4;
repeated LspDiagnostic diagnostics = 5;
+ optional string registration_id = 6;
}
message PullWorkspaceDiagnostics {