Detailed changes
@@ -731,6 +731,8 @@ mod tests {
// Create some diagnostics
worktree.update(&mut cx, |worktree, cx| {
worktree
+ .as_local_mut()
+ .unwrap()
.update_diagnostic_entries(
Arc::from("/test/main.rs".as_ref()),
None,
@@ -882,6 +884,8 @@ mod tests {
// Diagnostics are added for another earlier path.
worktree.update(&mut cx, |worktree, cx| {
worktree
+ .as_local_mut()
+ .unwrap()
.update_diagnostic_entries(
Arc::from("/test/consts.rs".as_ref()),
None,
@@ -980,6 +984,8 @@ mod tests {
// Diagnostics are added to the first path
worktree.update(&mut cx, |worktree, cx| {
worktree
+ .as_local_mut()
+ .unwrap()
.update_diagnostic_entries(
Arc::from("/test/consts.rs".as_ref()),
None,
@@ -840,7 +840,7 @@ mod tests {
("mod.body".to_string(), Color::red().into()),
("fn.name".to_string(), Color::blue().into()),
]);
- let lang = Arc::new(
+ let language = Arc::new(
Language::new(
LanguageConfig {
name: "Test".to_string(),
@@ -857,10 +857,9 @@ mod tests {
)
.unwrap(),
);
- lang.set_theme(&theme);
+ language.set_theme(&theme);
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@@ -928,7 +927,7 @@ mod tests {
("mod.body".to_string(), Color::red().into()),
("fn.name".to_string(), Color::blue().into()),
]);
- let lang = Arc::new(
+ let language = Arc::new(
Language::new(
LanguageConfig {
name: "Test".to_string(),
@@ -945,10 +944,9 @@ mod tests {
)
.unwrap(),
);
- lang.set_theme(&theme);
+ language.set_theme(&theme);
- let buffer =
- cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
buffer.condition(&cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@@ -533,9 +533,8 @@ impl Editor {
_: &workspace::OpenNew,
cx: &mut ViewContext<Workspace>,
) {
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, "", cx).with_language(Some(language::PLAIN_TEXT.clone()), None, cx)
- });
+ let buffer = cx
+ .add_model(|cx| Buffer::new(0, "", cx).with_language(language::PLAIN_TEXT.clone(), cx));
workspace.open_item(BufferItemHandle(buffer), cx);
}
@@ -5746,10 +5745,10 @@ mod tests {
#[gpui::test]
async fn test_select_larger_smaller_syntax_node(mut cx: gpui::TestAppContext) {
let settings = cx.read(EditorSettings::test);
- let language = Some(Arc::new(Language::new(
+ let language = Arc::new(Language::new(
LanguageConfig::default(),
Some(tree_sitter_rust::language()),
- )));
+ ));
let text = r#"
use mod1::mod2::{mod3, mod4};
@@ -5760,7 +5759,7 @@ mod tests {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -5887,7 +5886,7 @@ mod tests {
#[gpui::test]
async fn test_autoindent_selections(mut cx: gpui::TestAppContext) {
let settings = cx.read(EditorSettings::test);
- let language = Some(Arc::new(
+ let language = Arc::new(
Language::new(
LanguageConfig {
brackets: vec![
@@ -5915,11 +5914,11 @@ mod tests {
"#,
)
.unwrap(),
- ));
+ );
let text = "fn a() {}";
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx));
editor
@@ -5944,7 +5943,7 @@ mod tests {
#[gpui::test]
async fn test_autoclose_pairs(mut cx: gpui::TestAppContext) {
let settings = cx.read(EditorSettings::test);
- let language = Some(Arc::new(Language::new(
+ let language = Arc::new(Language::new(
LanguageConfig {
brackets: vec![
BracketPair {
@@ -5963,7 +5962,7 @@ mod tests {
..Default::default()
},
Some(tree_sitter_rust::language()),
- )));
+ ));
let text = r#"
a
@@ -5973,7 +5972,7 @@ mod tests {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -6055,13 +6054,13 @@ mod tests {
#[gpui::test]
async fn test_toggle_comment(mut cx: gpui::TestAppContext) {
let settings = cx.read(EditorSettings::test);
- let language = Some(Arc::new(Language::new(
+ let language = Arc::new(Language::new(
LanguageConfig {
line_comment: Some("// ".to_string()),
..Default::default()
},
Some(tree_sitter_rust::language()),
- )));
+ ));
let text = "
fn a() {
@@ -6072,7 +6071,7 @@ mod tests {
"
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
@@ -6323,7 +6322,7 @@ mod tests {
#[gpui::test]
async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) {
let settings = cx.read(EditorSettings::test);
- let language = Some(Arc::new(Language::new(
+ let language = Arc::new(Language::new(
LanguageConfig {
brackets: vec![
BracketPair {
@@ -6342,7 +6341,7 @@ mod tests {
..Default::default()
},
Some(tree_sitter_rust::language()),
- )));
+ ));
let text = concat!(
"{ }\n", // Suppress rustfmt
@@ -6352,7 +6351,7 @@ mod tests {
"{{} }\n", //
);
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx));
view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@@ -34,7 +34,7 @@ impl PathOpener for BufferOpener {
) -> Option<Task<Result<Box<dyn ItemHandle>>>> {
let buffer = worktree.open_buffer(project_path.path, cx);
let task = cx.spawn(|_, _| async move {
- let buffer = buffer.await?;
+ let buffer = buffer.await?.0;
Ok(Box::new(BufferItemHandle(buffer)) as Box<dyn ItemHandle>)
});
Some(task)
@@ -385,13 +385,17 @@ impl Buffer {
}
}
- pub fn with_language(
+ pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
+ self.set_language(Some(language), cx);
+ self
+ }
+
+ pub fn with_language_server(
mut self,
- language: Option<Arc<Language>>,
- language_server: Option<Arc<LanguageServer>>,
+ server: Arc<LanguageServer>,
cx: &mut ModelContext<Self>,
) -> Self {
- self.set_language(language, language_server, cx);
+ self.set_language_server(Some(server), cx);
self
}
@@ -523,13 +527,16 @@ impl Buffer {
}))
}
- pub fn set_language(
+ pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
+ self.language = language;
+ self.reparse(cx);
+ }
+
+ pub fn set_language_server(
&mut self,
- language: Option<Arc<Language>>,
language_server: Option<Arc<lsp::LanguageServer>>,
cx: &mut ModelContext<Self>,
) {
- self.language = language;
self.language_server = if let Some(server) = language_server {
let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
Some(LanguageServerState {
@@ -611,7 +618,6 @@ impl Buffer {
None
};
- self.reparse(cx);
self.update_language_server();
}
@@ -145,9 +145,8 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) {
#[gpui::test]
async fn test_reparse(mut cx: gpui::TestAppContext) {
let text = "fn a() {}";
- let buffer = cx.add_model(|cx| {
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
- });
+ let buffer =
+ cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
// Wait for the initial text to parse
buffer
@@ -280,7 +279,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) {
#[gpui::test]
async fn test_outline(mut cx: gpui::TestAppContext) {
- let language = Some(Arc::new(
+ let language = Arc::new(
rust_lang()
.with_outline_query(
r#"
@@ -308,7 +307,7 @@ async fn test_outline(mut cx: gpui::TestAppContext) {
"#,
)
.unwrap(),
- ));
+ );
let text = r#"
struct Person {
@@ -337,7 +336,7 @@ async fn test_outline(mut cx: gpui::TestAppContext) {
"#
.unindent();
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
let outline = buffer
.read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap();
@@ -422,7 +421,7 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
}
"
.unindent();
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
+ Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
});
let buffer = buffer.read(cx);
assert_eq!(
@@ -452,8 +451,7 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
cx.add_model(|cx| {
let text = "fn a() {}";
- let mut buffer =
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
+ let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
buffer.edit_with_autoindent([8..8], "\n\n", cx);
assert_eq!(buffer.text(), "fn a() {\n \n}");
@@ -479,8 +477,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta
"
.unindent();
- let mut buffer =
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
+ let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
// Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
// their indentation is not adjusted.
@@ -529,8 +526,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
"
.unindent();
- let mut buffer =
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
+ let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
buffer.edit_with_autoindent([5..5], "\nb", cx);
assert_eq!(
@@ -575,7 +571,9 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.unindent();
let buffer = cx.add_model(|cx| {
- Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx)
+ Buffer::new(0, text, cx)
+ .with_language(Arc::new(rust_lang), cx)
+ .with_language_server(language_server, cx)
});
let open_notification = fake
@@ -849,7 +847,7 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
);
let mut buffer = Buffer::new(0, text, cx);
- buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
+ buffer.set_language(Some(Arc::new(rust_lang())), cx);
buffer
.update_diagnostics(
None,
@@ -5,7 +5,7 @@ pub mod worktree;
use anyhow::{anyhow, Result};
use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
use clock::ReplicaId;
-use collections::HashMap;
+use collections::{hash_map, HashMap, HashSet};
use futures::Future;
use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
use gpui::{
@@ -27,7 +27,7 @@ pub struct Project {
worktrees: Vec<ModelHandle<Worktree>>,
active_entry: Option<ProjectEntry>,
languages: Arc<LanguageRegistry>,
- language_servers: HashMap<(Arc<Path>, String), Arc<LanguageServer>>,
+ language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
client: Arc<client::Client>,
user_store: ModelHandle<UserStore>,
fs: Arc<dyn Fs>,
@@ -63,6 +63,7 @@ pub enum Event {
ActiveEntryChanged(Option<ProjectEntry>),
WorktreeRemoved(WorktreeId),
DiskBasedDiagnosticsStarted,
+ DiskBasedDiagnosticsUpdated { worktree_id: WorktreeId },
DiskBasedDiagnosticsFinished,
DiagnosticsUpdated(ProjectPath),
}
@@ -223,7 +224,6 @@ impl Project {
worktree,
client.clone(),
user_store.clone(),
- languages.clone(),
cx,
)
.await?,
@@ -463,39 +463,21 @@ impl Project {
path: ProjectPath,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
- if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
- let language_server = worktree
- .read(cx)
- .as_local()
- .map(|w| w.abs_path().clone())
- .and_then(|worktree_abs_path| {
- let abs_path = worktree.abs_path().join(&path.path);
- let language = self.languages.select_language(&abs_path).cloned();
- if let Some(language) = language {
- if let Some(language_server) =
- self.register_language_server(worktree.abs_path(), &language, cx)
- {
- worktree.register_language(&language, &language_server);
- }
- }
- });
- worktree.update(cx, |worktree, cx| {
- if let Some(worktree) = worktree.as_local_mut() {
- let abs_path = worktree.abs_path().join(&path.path);
- let language = self.languages.select_language(&abs_path).cloned();
- if let Some(language) = language {
- if let Some(language_server) =
- self.register_language_server(worktree.abs_path(), &language, cx)
- {
- worktree.register_language(&language, &language_server);
- }
- }
- }
- worktree.open_buffer(path.path, cx)
- })
+ let worktree = if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
+ worktree
} else {
- cx.spawn(|_, _| async move { Err(anyhow!("no such worktree")) })
- }
+ return cx.spawn(|_, _| async move { Err(anyhow!("no such worktree")) });
+ };
+ let buffer_task = worktree.update(cx, |worktree, cx| worktree.open_buffer(path.path, cx));
+ cx.spawn(|this, mut cx| async move {
+ let (buffer, buffer_is_new) = buffer_task.await?;
+ if buffer_is_new {
+ this.update(&mut cx, |this, cx| {
+ this.buffer_added(worktree, buffer.clone(), cx)
+ });
+ }
+ Ok(buffer)
+ })
}
pub fn save_buffer_as(
@@ -504,189 +486,213 @@ impl Project {
abs_path: PathBuf,
cx: &mut ModelContext<Project>,
) -> Task<Result<()>> {
- let result = self.worktree_for_abs_path(&abs_path, cx);
- let languages = self.languages.clone();
+ let worktree_task = self.worktree_for_abs_path(&abs_path, cx);
cx.spawn(|this, mut cx| async move {
- let (worktree, path) = result.await?;
+ let (worktree, path) = worktree_task.await?;
worktree
.update(&mut cx, |worktree, cx| {
- let worktree = worktree.as_local_mut().unwrap();
- let language = languages.select_language(&abs_path);
- if let Some(language) = language {
- if let Some(language_server) = this.update(cx, |this, cx| {
- this.register_language_server(worktree.abs_path(), language, cx)
- }) {
- worktree.register_language(&language, &language_server);
- }
- }
-
- worktree.save_buffer_as(buffer.clone(), path, cx)
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .save_buffer_as(buffer.clone(), path, cx)
})
.await?;
+ this.update(&mut cx, |this, cx| this.buffer_added(worktree, buffer, cx));
Ok(())
})
}
- fn register_language_server(
+ fn buffer_added(
&mut self,
- worktree_abs_path: Arc<Path>,
- language: &Arc<Language>,
+ worktree: ModelHandle<Worktree>,
+ buffer: ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
- ) -> Option<Arc<LanguageServer>> {
- if let Some(server) = self
+ ) -> Option<()> {
+ // Set the buffer's language
+ let full_path = buffer.read(cx).file()?.full_path();
+ let language = self.languages.select_language(&full_path)?.clone();
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(language.clone()), cx);
+ });
+
+ // For local worktrees, start a language server if needed.
+ let worktree = worktree.read(cx);
+ let worktree_id = worktree.id();
+ let worktree_abs_path = worktree.as_local()?.abs_path().clone();
+ let language_server = match self
.language_servers
- .get(&(worktree_abs_path.clone(), language.name().to_string()))
+ .entry((worktree_id, language.name().to_string()))
{
- return Some(server.clone());
- }
+ hash_map::Entry::Occupied(e) => Some(e.get().clone()),
+ hash_map::Entry::Vacant(e) => Self::start_language_server(
+ self.client.clone(),
+ language,
+ worktree_id,
+ &worktree_abs_path,
+ cx,
+ )
+ .map(|server| e.insert(server).clone()),
+ };
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language_server(language_server, cx)
+ });
+
+ None
+ }
- if let Some(language_server) = language
- .start_server(&worktree_abs_path, cx)
+ fn start_language_server(
+ rpc: Arc<Client>,
+ language: Arc<Language>,
+ worktree_id: WorktreeId,
+ worktree_path: &Path,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Arc<LanguageServer>> {
+ let language_server = language
+ .start_server(worktree_path, cx)
.log_err()
- .flatten()
- {
- enum DiagnosticProgress {
- Updating,
- Updated,
- }
+ .flatten()?;
- let disk_based_sources = language
- .disk_based_diagnostic_sources()
- .cloned()
- .unwrap_or_default();
- let disk_based_diagnostics_progress_token =
- language.disk_based_diagnostics_progress_token().cloned();
- let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
- let (disk_based_diagnostics_done_tx, disk_based_diagnostics_done_rx) =
- smol::channel::unbounded();
- language_server
- .on_notification::<lsp::notification::PublishDiagnostics, _>(move |params| {
- smol::block_on(diagnostics_tx.send(params)).ok();
- })
- .detach();
- cx.spawn_weak(|this, mut cx| {
- let has_disk_based_diagnostic_progress_token =
- disk_based_diagnostics_progress_token.is_some();
- let disk_based_diagnostics_done_tx = disk_based_diagnostics_done_tx.clone();
- async move {
- while let Ok(diagnostics) = diagnostics_rx.recv().await {
- if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
- handle.update(&mut cx, |this, cx| {
- if !has_disk_based_diagnostic_progress_token {
+ enum DiagnosticProgress {
+ Updating,
+ Publish(lsp::PublishDiagnosticsParams),
+ Updated,
+ }
+
+ let disk_based_sources = language
+ .disk_based_diagnostic_sources()
+ .cloned()
+ .unwrap_or_default();
+ let disk_based_diagnostics_progress_token =
+ language.disk_based_diagnostics_progress_token().cloned();
+ let has_disk_based_diagnostic_progress_token =
+ disk_based_diagnostics_progress_token.is_some();
+ let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
+
+ language_server
+ .on_notification::<lsp::notification::PublishDiagnostics, _>({
+ let diagnostics_tx = diagnostics_tx.clone();
+ move |params| {
+ if !has_disk_based_diagnostic_progress_token {
+ smol::block_on(diagnostics_tx.send(DiagnosticProgress::Updating)).ok();
+ }
+ smol::block_on(diagnostics_tx.send(DiagnosticProgress::Publish(params))).ok();
+ if !has_disk_based_diagnostic_progress_token {
+ smol::block_on(diagnostics_tx.send(DiagnosticProgress::Updated)).ok();
+ }
+ }
+ })
+ .detach();
+
+ let mut pending_disk_based_diagnostics: i32 = 0;
+ language_server
+ .on_notification::<lsp::notification::Progress, _>(move |params| {
+ let token = match params.token {
+ lsp::NumberOrString::Number(_) => None,
+ lsp::NumberOrString::String(token) => Some(token),
+ };
+
+ if token == disk_based_diagnostics_progress_token {
+ match params.value {
+ lsp::ProgressParamsValue::WorkDone(progress) => match progress {
+ lsp::WorkDoneProgress::Begin(_) => {
+ if pending_disk_based_diagnostics == 0 {
smol::block_on(
- disk_based_diagnostics_done_tx
- .send(DiagnosticProgress::Updating),
+ diagnostics_tx.send(DiagnosticProgress::Updating),
)
.ok();
}
- this.update_diagnostics(diagnostics, &disk_based_sources, cx)
- .log_err();
- if !has_disk_based_diagnostic_progress_token {
+ pending_disk_based_diagnostics += 1;
+ }
+ lsp::WorkDoneProgress::End(_) => {
+ pending_disk_based_diagnostics -= 1;
+ if pending_disk_based_diagnostics == 0 {
smol::block_on(
- disk_based_diagnostics_done_tx
- .send(DiagnosticProgress::Updated),
+ diagnostics_tx.send(DiagnosticProgress::Updated),
)
.ok();
}
- })
- } else {
- break;
- }
+ }
+ _ => {}
+ },
}
}
})
.detach();
- let mut pending_disk_based_diagnostics: i32 = 0;
- language_server
- .on_notification::<lsp::notification::Progress, _>(move |params| {
- let token = match params.token {
- lsp::NumberOrString::Number(_) => None,
- lsp::NumberOrString::String(token) => Some(token),
- };
-
- if token == disk_based_diagnostics_progress_token {
- match params.value {
- lsp::ProgressParamsValue::WorkDone(progress) => match progress {
- lsp::WorkDoneProgress::Begin(_) => {
- if pending_disk_based_diagnostics == 0 {
- smol::block_on(
- disk_based_diagnostics_done_tx
- .send(DiagnosticProgress::Updating),
- )
- .ok();
- }
- pending_disk_based_diagnostics += 1;
- }
- lsp::WorkDoneProgress::End(_) => {
- pending_disk_based_diagnostics -= 1;
- if pending_disk_based_diagnostics == 0 {
- smol::block_on(
- disk_based_diagnostics_done_tx
- .send(DiagnosticProgress::Updated),
- )
- .ok();
- }
- }
- _ => {}
- },
+ cx.spawn_weak(|this, mut cx| async move {
+ while let Ok(message) = diagnostics_rx.recv().await {
+ let this = cx.read(|cx| this.upgrade(cx))?;
+ match message {
+ DiagnosticProgress::Updating => {
+ let project_id = this.update(&mut cx, |this, cx| {
+ cx.emit(Event::DiskBasedDiagnosticsStarted);
+ this.remote_id()
+ });
+ if let Some(project_id) = project_id {
+ rpc.send(proto::DiskBasedDiagnosticsUpdating {
+ project_id,
+ worktree_id: worktree_id.to_proto(),
+ })
+ .await
+ .log_err();
}
}
- })
- .detach();
- let rpc = self.client.clone();
- cx.spawn_weak(|this, mut cx| async move {
- while let Ok(progress) = disk_based_diagnostics_done_rx.recv().await {
- if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
- match progress {
- DiagnosticProgress::Updating => {
- let message = handle.update(&mut cx, |this, cx| {
- cx.emit(Event::DiskBasedDiagnosticsUpdating);
- let this = this.as_local().unwrap();
- this.share.as_ref().map(|share| {
- proto::DiskBasedDiagnosticsUpdating {
- project_id: share.project_id,
- worktree_id: this.id().to_proto(),
- }
- })
- });
-
- if let Some(message) = message {
- rpc.send(message).await.log_err();
- }
- }
- DiagnosticProgress::Updated => {
- let message = handle.update(&mut cx, |this, cx| {
- cx.emit(Event::DiskBasedDiagnosticsUpdated);
- let this = this.as_local().unwrap();
- this.share.as_ref().map(|share| {
- proto::DiskBasedDiagnosticsUpdated {
- project_id: share.project_id,
- worktree_id: this.id().to_proto(),
- }
- })
- });
-
- if let Some(message) = message {
- rpc.send(message).await.log_err();
- }
- }
+ DiagnosticProgress::Publish(params) => {
+ this.update(&mut cx, |this, cx| {
+ this.update_diagnostics(params, &disk_based_sources, cx)
+ .log_err();
+ });
+ }
+ DiagnosticProgress::Updated => {
+ let project_id = this.update(&mut cx, |this, cx| {
+ cx.emit(Event::DiskBasedDiagnosticsFinished);
+ this.remote_id()
+ });
+ if let Some(project_id) = project_id {
+ rpc.send(proto::DiskBasedDiagnosticsUpdated {
+ project_id,
+ worktree_id: worktree_id.to_proto(),
+ })
+ .await
+ .log_err();
}
- } else {
- break;
}
}
- })
- .detach();
+ }
+ Some(())
+ })
+ .detach();
- self.language_servers.insert(
- (worktree_abs_path.clone(), language.name().to_string()),
- language_server.clone(),
- );
- Some(language_server)
- } else {
- None
+ Some(language_server)
+ }
+
+ fn update_diagnostics(
+ &mut self,
+ diagnostics: lsp::PublishDiagnosticsParams,
+ disk_based_sources: &HashSet<String>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ let path = diagnostics
+ .uri
+ .to_file_path()
+ .map_err(|_| anyhow!("URI is not a file"))?;
+ for tree in &self.worktrees {
+ let relative_path = tree.update(cx, |tree, _| {
+ path.strip_prefix(tree.as_local()?.abs_path()).ok()
+ });
+ if let Some(relative_path) = relative_path {
+ return tree.update(cx, |tree, cx| {
+ tree.as_local_mut().unwrap().update_diagnostics(
+ relative_path.into(),
+ diagnostics,
+ disk_based_sources,
+ cx,
+ )
+ });
+ }
}
+ todo!()
}
pub fn worktree_for_abs_path(
@@ -726,12 +732,10 @@ impl Project {
let fs = self.fs.clone();
let client = self.client.clone();
let user_store = self.user_store.clone();
- let languages = self.languages.clone();
let path = Arc::from(abs_path.as_ref());
cx.spawn(|project, mut cx| async move {
let worktree =
- Worktree::open_local(client.clone(), user_store, path, fs, languages, &mut cx)
- .await?;
+ Worktree::open_local(client.clone(), user_store, path, fs, &mut cx).await?;
let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
project.add_worktree(worktree.clone(), cx);
@@ -936,13 +940,11 @@ impl Project {
.worktree
.ok_or_else(|| anyhow!("invalid worktree"))?;
let user_store = self.user_store.clone();
- let languages = self.languages.clone();
cx.spawn(|this, mut cx| {
async move {
- let worktree = Worktree::remote(
- remote_id, replica_id, worktree, client, user_store, languages, &mut cx,
- )
- .await?;
+ let worktree =
+ Worktree::remote(remote_id, replica_id, worktree, client, user_store, &mut cx)
+ .await?;
this.update(&mut cx, |this, cx| this.add_worktree(worktree, cx));
Ok(())
}
@@ -1248,6 +1250,25 @@ impl Entity for Project {
}
}
}
+
+ fn app_will_quit(
+ &mut self,
+ _: &mut MutableAppContext,
+ ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
+ use futures::FutureExt;
+
+ let shutdown_futures = self
+ .language_servers
+ .drain()
+ .filter_map(|(_, server)| server.shutdown())
+ .collect::<Vec<_>>();
+ Some(
+ async move {
+ futures::future::join_all(shutdown_futures).await;
+ }
+ .boxed(),
+ )
+ }
}
impl Collaborator {
@@ -1275,8 +1296,12 @@ mod tests {
use super::*;
use client::test::FakeHttpClient;
use fs::RealFs;
- use gpui::TestAppContext;
- use language::LanguageRegistry;
+ use futures::StreamExt;
+ use gpui::{test::subscribe, TestAppContext};
+ use language::{
+ tree_sitter_rust, Diagnostic, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point,
+ };
+ use lsp::Url;
use serde_json::json;
use std::{os::unix, path::PathBuf};
use util::test::temp_tree;
@@ -1344,6 +1369,114 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
+ let (language_server_config, mut fake_server) =
+ LanguageServerConfig::fake(cx.background()).await;
+ let progress_token = language_server_config
+ .disk_based_diagnostics_progress_token
+ .clone()
+ .unwrap();
+ let mut languages = LanguageRegistry::new();
+ languages.add(Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".to_string(),
+ path_suffixes: vec!["rs".to_string()],
+ language_server: Some(language_server_config),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )));
+
+ let dir = temp_tree(json!({
+ "a.rs": "fn a() { A }",
+ "b.rs": "const y: i32 = 1",
+ }));
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = Client::new(http_client.clone());
+ let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+
+ let tree = Worktree::open_local(
+ client,
+ user_store,
+ dir.path(),
+ Arc::new(RealFs),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ // Cause worktree to start the fake language server
+ let _buffer = tree
+ .update(&mut cx, |tree, cx| tree.open_buffer("b.rs", cx))
+ .await
+ .unwrap();
+
+ let mut events = subscribe(&tree, &mut cx);
+
+ fake_server.start_progress(&progress_token).await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsUpdating
+ );
+
+ fake_server.start_progress(&progress_token).await;
+ fake_server.end_progress(&progress_token).await;
+ fake_server.start_progress(&progress_token).await;
+
+ fake_server
+ .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
+ uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
+ version: None,
+ diagnostics: vec![lsp::Diagnostic {
+ range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
+ severity: Some(lsp::DiagnosticSeverity::ERROR),
+ message: "undefined variable 'A'".to_string(),
+ ..Default::default()
+ }],
+ })
+ .await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiagnosticsUpdated(Arc::from(Path::new("a.rs")))
+ );
+
+ fake_server.end_progress(&progress_token).await;
+ fake_server.end_progress(&progress_token).await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsUpdated
+ );
+
+ let (buffer, _) = tree
+ .update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx))
+ .await
+ .unwrap();
+
+ buffer.read_with(&cx, |buffer, _| {
+ let snapshot = buffer.snapshot();
+ let diagnostics = snapshot
+ .diagnostics_in_range::<_, Point>(0..buffer.len())
+ .collect::<Vec<_>>();
+ assert_eq!(
+ diagnostics,
+ &[DiagnosticEntry {
+ range: Point::new(0, 9)..Point::new(0, 10),
+ diagnostic: Diagnostic {
+ severity: lsp::DiagnosticSeverity::ERROR,
+ message: "undefined variable 'A'".to_string(),
+ group_id: 0,
+ is_primary: true,
+ ..Default::default()
+ }
+ }]
+ )
+ });
+ }
+
#[gpui::test]
async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) {
let dir = temp_tree(json!({
@@ -4,7 +4,7 @@ use super::{
DiagnosticSummary, ProjectEntry,
};
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
-use anyhow::{anyhow, Context, Result};
+use anyhow::{anyhow, Result};
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
use clock::ReplicaId;
use collections::{hash_map, HashMap, HashSet};
@@ -15,11 +15,10 @@ use gpui::{
Task, UpgradeModelHandle, WeakModelHandle,
};
use language::{
- range_from_lsp, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, File as _, Language,
- LanguageRegistry, Operation, PointUtf16, Rope,
+ range_from_lsp, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, File as _, Operation,
+ PointUtf16, Rope,
};
use lazy_static::lazy_static;
-use lsp::LanguageServer;
use parking_lot::Mutex;
use postage::{
prelude::{Sink as _, Stream as _},
@@ -37,7 +36,7 @@ use std::{
ops::Deref,
path::{Path, PathBuf},
sync::{
- atomic::{AtomicUsize, Ordering::SeqCst},
+ atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
Arc,
},
time::{Duration, SystemTime},
@@ -74,29 +73,6 @@ pub enum Event {
impl Entity for Worktree {
type Event = Event;
-
- fn app_will_quit(
- &mut self,
- _: &mut MutableAppContext,
- ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
- use futures::FutureExt;
-
- if let Self::Local(worktree) = self {
- let shutdown_futures = worktree
- .language_servers
- .drain()
- .filter_map(|(_, server)| server.shutdown())
- .collect::<Vec<_>>();
- Some(
- async move {
- futures::future::join_all(shutdown_futures).await;
- }
- .boxed(),
- )
- } else {
- None
- }
- }
}
impl Worktree {
@@ -105,11 +81,10 @@ impl Worktree {
user_store: ModelHandle<UserStore>,
path: impl Into<Arc<Path>>,
fs: Arc<dyn Fs>,
- languages: Arc<LanguageRegistry>,
cx: &mut AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let (tree, scan_states_tx) =
- LocalWorktree::new(client, user_store, path, fs.clone(), languages, cx).await?;
+ LocalWorktree::new(client, user_store, path, fs.clone(), cx).await?;
tree.update(cx, |tree, cx| {
let tree = tree.as_local_mut().unwrap();
let abs_path = tree.snapshot.abs_path.clone();
@@ -131,7 +106,6 @@ impl Worktree {
worktree: proto::Worktree,
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
- languages: Arc<LanguageRegistry>,
cx: &mut AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let remote_id = worktree.id;
@@ -238,7 +212,6 @@ impl Worktree {
loading_buffers: Default::default(),
open_buffers: Default::default(),
queued_operations: Default::default(),
- languages,
user_store,
diagnostic_summaries,
})
@@ -306,13 +279,6 @@ impl Worktree {
}
}
- pub fn languages(&self) -> &Arc<LanguageRegistry> {
- match self {
- Worktree::Local(worktree) => &worktree.language_registry,
- Worktree::Remote(worktree) => &worktree.languages,
- }
- }
-
pub fn user_store(&self) -> &ModelHandle<UserStore> {
match self {
Worktree::Local(worktree) => &worktree.user_store,
@@ -379,7 +345,7 @@ impl Worktree {
&mut self,
path: impl AsRef<Path>,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<ModelHandle<Buffer>>> {
+ ) -> Task<Result<(ModelHandle<Buffer>, bool)>> {
let path = path.as_ref();
// If there is already a buffer for the given path, then return it.
@@ -388,9 +354,10 @@ impl Worktree {
Worktree::Remote(worktree) => worktree.get_open_buffer(path, cx),
};
if let Some(existing_buffer) = existing_buffer {
- return cx.spawn(move |_, _| async move { Ok(existing_buffer) });
+ return cx.spawn(move |_, _| async move { Ok((existing_buffer, false)) });
}
+ let is_new = Arc::new(AtomicBool::new(true));
let path: Arc<Path> = Arc::from(path);
let mut loading_watch = match self.loading_buffers().entry(path.clone()) {
// If the given path is already being loaded, then wait for that existing
@@ -412,7 +379,10 @@ impl Worktree {
// After the buffer loads, record the fact that it is no longer
// loading.
this.update(&mut cx, |this, _| this.loading_buffers().remove(&path));
- *tx.borrow_mut() = Some(result.map_err(|e| Arc::new(e)));
+ *tx.borrow_mut() = Some(match result {
+ Ok(buffer) => Ok((buffer, is_new)),
+ Err(error) => Err(Arc::new(error)),
+ });
})
.detach();
rx
@@ -422,7 +392,10 @@ impl Worktree {
cx.spawn(|_, _| async move {
loop {
if let Some(result) = loading_watch.borrow().as_ref() {
- return result.clone().map_err(|e| anyhow!("{}", e));
+ return match result {
+ Ok((buf, is_new)) => Ok((buf.clone(), is_new.fetch_and(false, SeqCst))),
+ Err(error) => Err(anyhow!("{}", error)),
+ };
}
loading_watch.recv().await;
}
@@ -731,179 +704,6 @@ impl Worktree {
}
}
- pub fn update_diagnostics(
- &mut self,
- params: lsp::PublishDiagnosticsParams,
- disk_based_sources: &HashSet<String>,
- cx: &mut ModelContext<Worktree>,
- ) -> Result<()> {
- let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?;
- let abs_path = params
- .uri
- .to_file_path()
- .map_err(|_| anyhow!("URI is not a file"))?;
- let worktree_path = Arc::from(
- abs_path
- .strip_prefix(&this.abs_path)
- .context("path is not within worktree")?,
- );
-
- let mut next_group_id = 0;
- let mut diagnostics = Vec::default();
- let mut primary_diagnostic_group_ids = HashMap::default();
- let mut sources_by_group_id = HashMap::default();
- let mut supporting_diagnostic_severities = HashMap::default();
- for diagnostic in ¶ms.diagnostics {
- let source = diagnostic.source.as_ref();
- let code = diagnostic.code.as_ref().map(|code| match code {
- lsp::NumberOrString::Number(code) => code.to_string(),
- lsp::NumberOrString::String(code) => code.clone(),
- });
- let range = range_from_lsp(diagnostic.range);
- let is_supporting = diagnostic
- .related_information
- .as_ref()
- .map_or(false, |infos| {
- infos.iter().any(|info| {
- primary_diagnostic_group_ids.contains_key(&(
- source,
- code.clone(),
- range_from_lsp(info.location.range),
- ))
- })
- });
-
- if is_supporting {
- if let Some(severity) = diagnostic.severity {
- supporting_diagnostic_severities
- .insert((source, code.clone(), range), severity);
- }
- } else {
- let group_id = post_inc(&mut next_group_id);
- let is_disk_based =
- source.map_or(false, |source| disk_based_sources.contains(source));
-
- sources_by_group_id.insert(group_id, source);
- primary_diagnostic_group_ids
- .insert((source, code.clone(), range.clone()), group_id);
-
- diagnostics.push(DiagnosticEntry {
- range,
- diagnostic: Diagnostic {
- code: code.clone(),
- severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
- message: diagnostic.message.clone(),
- group_id,
- is_primary: true,
- is_valid: true,
- is_disk_based,
- },
- });
- if let Some(infos) = &diagnostic.related_information {
- for info in infos {
- if info.location.uri == params.uri {
- let range = range_from_lsp(info.location.range);
- diagnostics.push(DiagnosticEntry {
- range,
- diagnostic: Diagnostic {
- code: code.clone(),
- severity: DiagnosticSeverity::INFORMATION,
- message: info.message.clone(),
- group_id,
- is_primary: false,
- is_valid: true,
- is_disk_based,
- },
- });
- }
- }
- }
- }
- }
-
- for entry in &mut diagnostics {
- let diagnostic = &mut entry.diagnostic;
- if !diagnostic.is_primary {
- let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
- if let Some(&severity) = supporting_diagnostic_severities.get(&(
- source,
- diagnostic.code.clone(),
- entry.range.clone(),
- )) {
- diagnostic.severity = severity;
- }
- }
- }
-
- self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx)?;
- Ok(())
- }
-
- pub fn update_diagnostic_entries(
- &mut self,
- worktree_path: Arc<Path>,
- version: Option<i32>,
- diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
- cx: &mut ModelContext<Self>,
- ) -> Result<()> {
- let this = self.as_local_mut().unwrap();
- for buffer in this.open_buffers.values() {
- if let Some(buffer) = buffer.upgrade(cx) {
- if buffer
- .read(cx)
- .file()
- .map_or(false, |file| *file.path() == worktree_path)
- {
- let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
- (
- buffer.remote_id(),
- buffer.update_diagnostics(version, diagnostics.clone(), cx),
- )
- });
- self.send_buffer_update(remote_id, operation?, cx);
- break;
- }
- }
- }
-
- let this = self.as_local_mut().unwrap();
- let summary = DiagnosticSummary::new(&diagnostics);
- this.diagnostic_summaries
- .insert(PathKey(worktree_path.clone()), summary.clone());
- this.diagnostics.insert(worktree_path.clone(), diagnostics);
-
- cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
-
- if let Some(share) = this.share.as_ref() {
- cx.foreground()
- .spawn({
- let client = this.client.clone();
- let project_id = share.project_id;
- let worktree_id = this.id().to_proto();
- let path = worktree_path.to_string_lossy().to_string();
- async move {
- client
- .send(proto::UpdateDiagnosticSummary {
- project_id,
- worktree_id,
- summary: Some(proto::DiagnosticSummary {
- path,
- error_count: summary.error_count as u32,
- warning_count: summary.warning_count as u32,
- info_count: summary.info_count as u32,
- hint_count: summary.hint_count as u32,
- }),
- })
- .await
- .log_err()
- }
- })
- .detach();
- }
-
- Ok(())
- }
-
fn send_buffer_update(
&mut self,
buffer_id: u64,
@@ -998,11 +798,9 @@ pub struct LocalWorktree {
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
queued_operations: Vec<(u64, Operation)>,
- language_registry: Arc<LanguageRegistry>,
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
fs: Arc<dyn Fs>,
- language_servers: HashMap<String, Arc<LanguageServer>>,
}
struct ShareState {
@@ -1020,7 +818,6 @@ pub struct RemoteWorktree {
replica_id: ReplicaId,
loading_buffers: LoadingBuffers,
open_buffers: HashMap<usize, RemoteBuffer>,
- languages: Arc<LanguageRegistry>,
user_store: ModelHandle<UserStore>,
queued_operations: Vec<(u64, Operation)>,
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
@@ -1028,7 +825,9 @@ pub struct RemoteWorktree {
type LoadingBuffers = HashMap<
Arc<Path>,
- postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
+ postage::watch::Receiver<
+ Option<Result<(ModelHandle<Buffer>, Arc<AtomicBool>), Arc<anyhow::Error>>>,
+ >,
>;
#[derive(Default, Deserialize)]
@@ -1042,7 +841,6 @@ impl LocalWorktree {
user_store: ModelHandle<UserStore>,
path: impl Into<Arc<Path>>,
fs: Arc<dyn Fs>,
- languages: Arc<LanguageRegistry>,
cx: &mut AsyncAppContext,
) -> Result<(ModelHandle<Worktree>, Sender<ScanState>)> {
let abs_path = path.into();
@@ -1105,11 +903,9 @@ impl LocalWorktree {
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
queued_operations: Default::default(),
- language_registry: languages,
client,
user_store,
fs,
- language_servers: Default::default(),
};
cx.spawn_weak(|this, mut cx| async move {
@@ -1149,19 +945,6 @@ impl LocalWorktree {
self.config.collaborators.clone()
}
- pub fn language_registry(&self) -> &LanguageRegistry {
- &self.language_registry
- }
-
- pub fn register_language(
- &mut self,
- language: &Arc<Language>,
- language_server: &Arc<LanguageServer>,
- ) {
- self.language_servers
- .insert(language.name().to_string(), language_server.clone());
- }
-
fn get_open_buffer(
&mut self,
path: &Path,
@@ -1195,23 +978,13 @@ impl LocalWorktree {
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
.await?;
- let (diagnostics, language, language_server) = this.update(&mut cx, |this, _| {
- let this = this.as_local_mut().unwrap();
- let diagnostics = this.diagnostics.get(&path).cloned();
- let language = this
- .language_registry
- .select_language(file.full_path())
- .cloned();
- let server = language
- .as_ref()
- .and_then(|language| this.language_servers.get(language.name()).cloned());
- (diagnostics, language, server)
+ let diagnostics = this.update(&mut cx, |this, _| {
+ this.as_local_mut().unwrap().diagnostics.get(&path).cloned()
});
let mut buffer_operations = Vec::new();
let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx);
- buffer.set_language(language, language_server, cx);
if let Some(diagnostics) = diagnostics {
let op = buffer.update_diagnostics(None, diagnostics, cx).unwrap();
buffer_operations.push(op);
@@ -1239,7 +1012,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
let peer_id = envelope.original_sender_id();
let path = Path::new(&envelope.payload.path);
- let buffer = this
+ let (buffer, _) = this
.update(&mut cx, |this, cx| this.open_buffer(path, cx))
.await?;
this.update(&mut cx, |this, cx| {
@@ -1285,6 +1058,201 @@ impl LocalWorktree {
cx.notify();
}
+ pub fn update_diagnostics(
+ &mut self,
+ worktree_path: Arc<Path>,
+ params: lsp::PublishDiagnosticsParams,
+ disk_based_sources: &HashSet<String>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Result<()> {
+ let mut next_group_id = 0;
+ let mut diagnostics = Vec::default();
+ let mut primary_diagnostic_group_ids = HashMap::default();
+ let mut sources_by_group_id = HashMap::default();
+ let mut supporting_diagnostic_severities = HashMap::default();
+ for diagnostic in ¶ms.diagnostics {
+ let source = diagnostic.source.as_ref();
+ let code = diagnostic.code.as_ref().map(|code| match code {
+ lsp::NumberOrString::Number(code) => code.to_string(),
+ lsp::NumberOrString::String(code) => code.clone(),
+ });
+ let range = range_from_lsp(diagnostic.range);
+ let is_supporting = diagnostic
+ .related_information
+ .as_ref()
+ .map_or(false, |infos| {
+ infos.iter().any(|info| {
+ primary_diagnostic_group_ids.contains_key(&(
+ source,
+ code.clone(),
+ range_from_lsp(info.location.range),
+ ))
+ })
+ });
+
+ if is_supporting {
+ if let Some(severity) = diagnostic.severity {
+ supporting_diagnostic_severities
+ .insert((source, code.clone(), range), severity);
+ }
+ } else {
+ let group_id = post_inc(&mut next_group_id);
+ let is_disk_based =
+ source.map_or(false, |source| disk_based_sources.contains(source));
+
+ sources_by_group_id.insert(group_id, source);
+ primary_diagnostic_group_ids
+ .insert((source, code.clone(), range.clone()), group_id);
+
+ diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ code: code.clone(),
+ severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
+ message: diagnostic.message.clone(),
+ group_id,
+ is_primary: true,
+ is_valid: true,
+ is_disk_based,
+ },
+ });
+ if let Some(infos) = &diagnostic.related_information {
+ for info in infos {
+ if info.location.uri == params.uri {
+ let range = range_from_lsp(info.location.range);
+ diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ code: code.clone(),
+ severity: DiagnosticSeverity::INFORMATION,
+ message: info.message.clone(),
+ group_id,
+ is_primary: false,
+ is_valid: true,
+ is_disk_based,
+ },
+ });
+ }
+ }
+ }
+ }
+ }
+
+ for entry in &mut diagnostics {
+ let diagnostic = &mut entry.diagnostic;
+ if !diagnostic.is_primary {
+ let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
+ if let Some(&severity) = supporting_diagnostic_severities.get(&(
+ source,
+ diagnostic.code.clone(),
+ entry.range.clone(),
+ )) {
+ diagnostic.severity = severity;
+ }
+ }
+ }
+
+ self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx)?;
+ Ok(())
+ }
+
+ pub fn update_diagnostic_entries(
+ &mut self,
+ worktree_path: Arc<Path>,
+ version: Option<i32>,
+ diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Result<()> {
+ for buffer in self.open_buffers.values() {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ if buffer
+ .read(cx)
+ .file()
+ .map_or(false, |file| *file.path() == worktree_path)
+ {
+ let (remote_id, operation) = buffer.update(cx, |buffer, cx| {
+ (
+ buffer.remote_id(),
+ buffer.update_diagnostics(version, diagnostics.clone(), cx),
+ )
+ });
+ self.send_buffer_update(remote_id, operation?, cx);
+ break;
+ }
+ }
+ }
+
+ let summary = DiagnosticSummary::new(&diagnostics);
+ self.diagnostic_summaries
+ .insert(PathKey(worktree_path.clone()), summary.clone());
+ self.diagnostics.insert(worktree_path.clone(), diagnostics);
+
+ cx.emit(Event::DiagnosticsUpdated(worktree_path.clone()));
+
+ if let Some(share) = self.share.as_ref() {
+ cx.foreground()
+ .spawn({
+ let client = self.client.clone();
+ let project_id = share.project_id;
+ let worktree_id = self.id().to_proto();
+ let path = worktree_path.to_string_lossy().to_string();
+ async move {
+ client
+ .send(proto::UpdateDiagnosticSummary {
+ project_id,
+ worktree_id,
+ summary: Some(proto::DiagnosticSummary {
+ path,
+ error_count: summary.error_count as u32,
+ warning_count: summary.warning_count as u32,
+ info_count: summary.info_count as u32,
+ hint_count: summary.hint_count as u32,
+ }),
+ })
+ .await
+ .log_err()
+ }
+ })
+ .detach();
+ }
+
+ Ok(())
+ }
+
+ fn send_buffer_update(
+ &mut self,
+ buffer_id: u64,
+ operation: Operation,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Option<()> {
+ let share = self.share.as_ref()?;
+ let project_id = share.project_id;
+ let worktree_id = self.id();
+ let rpc = self.client.clone();
+ cx.spawn(|worktree, mut cx| async move {
+ if let Err(error) = rpc
+ .request(proto::UpdateBuffer {
+ project_id,
+ worktree_id: worktree_id.0 as u64,
+ buffer_id,
+ operations: vec![language::proto::serialize_operation(&operation)],
+ })
+ .await
+ {
+ worktree.update(&mut cx, |worktree, _| {
+ log::error!("error sending buffer operation: {}", error);
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .queued_operations
+ .push((buffer_id, operation));
+ });
+ }
+ })
+ .detach();
+ None
+ }
+
pub fn scan_complete(&self) -> impl Future<Output = ()> {
let mut scan_state_rx = self.last_scan_state_rx.clone();
async move {
@@ -1375,21 +1343,8 @@ impl LocalWorktree {
}
});
- let (language, language_server) = this.update(&mut cx, |worktree, _| {
- let worktree = worktree.as_local_mut().unwrap();
- let language = worktree
- .language_registry()
- .select_language(file.full_path())
- .cloned();
- let language_server = language
- .as_ref()
- .and_then(|language| worktree.language_servers.get(language.name()).cloned());
- (language, language_server.clone())
- });
-
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version, file.mtime, Some(Box::new(file)), cx);
- buffer.set_language(language, language_server, cx);
});
Ok(())
@@ -1578,16 +1533,10 @@ impl RemoteWorktree {
mtime: entry.mtime,
is_local: false,
};
- let language = this.read_with(&cx, |this, _| {
- use language::File;
- this.languages().select_language(file.full_path()).cloned()
- });
let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?;
let buffer_id = remote_buffer.id as usize;
let buffer = cx.add_model(|cx| {
- Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx)
- .unwrap()
- .with_language(language, None, cx)
+ Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx).unwrap()
});
this.update(&mut cx, move |this, cx| {
let this = this.as_remote_mut().unwrap();
@@ -3129,9 +3078,7 @@ mod tests {
use anyhow::Result;
use client::test::{FakeHttpClient, FakeServer};
use fs::RealFs;
- use gpui::test::subscribe;
- use language::{tree_sitter_rust, DiagnosticEntry, Language, LanguageServerConfig};
- use language::{Diagnostic, LanguageConfig};
+ use language::{Diagnostic, DiagnosticEntry};
use lsp::Url;
use rand::prelude::*;
use serde_json::json;
@@ -3169,7 +3116,6 @@ mod tests {
user_store,
Arc::from(Path::new("/root")),
Arc::new(fs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3207,12 +3153,11 @@ mod tests {
user_store,
dir.path(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
- let buffer = tree
+ let (buffer, _) = tree
.update(&mut cx, |tree, cx| tree.open_buffer("file1", cx))
.await
.unwrap();
@@ -3242,7 +3187,6 @@ mod tests {
user_store,
file_path.clone(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3251,7 +3195,7 @@ mod tests {
.await;
cx.read(|cx| assert_eq!(tree.read(cx).file_count(), 1));
- let buffer = tree
+ let (buffer, _) = tree
.update(&mut cx, |tree, cx| tree.open_buffer("", cx))
.await
.unwrap();
@@ -3291,7 +3235,6 @@ mod tests {
user_store.clone(),
dir.path(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3299,7 +3242,7 @@ mod tests {
let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
let buffer = tree.update(cx, |tree, cx| tree.open_buffer(path, cx));
- async move { buffer.await.unwrap() }
+ async move { buffer.await.unwrap().0 }
};
let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
tree.read_with(cx, |tree, _| {
@@ -3330,7 +3273,6 @@ mod tests {
initial_snapshot.to_proto(&Default::default()),
Client::new(http_client.clone()),
user_store,
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3443,7 +3385,6 @@ mod tests {
user_store,
dir.path(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3496,7 +3437,6 @@ mod tests {
user_store,
"/the-dir".as_ref(),
fs,
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3511,9 +3451,9 @@ mod tests {
)
});
- let buffer_a_1 = buffer_a_1.await.unwrap();
- let buffer_a_2 = buffer_a_2.await.unwrap();
- let buffer_b = buffer_b.await.unwrap();
+ let buffer_a_1 = buffer_a_1.await.unwrap().0;
+ let buffer_a_2 = buffer_a_2.await.unwrap().0;
+ let buffer_b = buffer_b.await.unwrap().0;
assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents");
assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents");
@@ -3526,7 +3466,8 @@ mod tests {
let buffer_a_3 = worktree
.update(&mut cx, |worktree, cx| worktree.open_buffer("a.txt", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
// There's still only one buffer per path.
assert_eq!(buffer_a_3.id(), buffer_a_id);
@@ -3550,7 +3491,6 @@ mod tests {
user_store,
dir.path(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3559,7 +3499,7 @@ mod tests {
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
- let buffer1 = tree
+ let (buffer1, _) = tree
.update(&mut cx, |tree, cx| tree.open_buffer("file1", cx))
.await
.unwrap();
@@ -3626,7 +3566,7 @@ mod tests {
// When a file is deleted, the buffer is considered dirty.
let events = Rc::new(RefCell::new(Vec::new()));
- let buffer2 = tree
+ let (buffer2, _) = tree
.update(&mut cx, |tree, cx| tree.open_buffer("file2", cx))
.await
.unwrap();
@@ -3647,7 +3587,7 @@ mod tests {
// When a file is already dirty when deleted, we don't emit a Dirtied event.
let events = Rc::new(RefCell::new(Vec::new()));
- let buffer3 = tree
+ let (buffer3, _) = tree
.update(&mut cx, |tree, cx| tree.open_buffer("file3", cx))
.await
.unwrap();
@@ -3687,7 +3627,6 @@ mod tests {
user_store,
dir.path(),
Arc::new(RealFs),
- Default::default(),
&mut cx.to_async(),
)
.await
@@ -3696,7 +3635,7 @@ mod tests {
.await;
let abs_path = dir.path().join("the-file");
- let buffer = tree
+ let (buffer, _) = tree
.update(&mut cx, |tree, cx| {
tree.open_buffer(Path::new("the-file"), cx)
})
@@ -3775,115 +3714,6 @@ mod tests {
.await;
}
- #[gpui::test]
- async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) {
- let (language_server_config, mut fake_server) =
- LanguageServerConfig::fake(cx.background()).await;
- let progress_token = language_server_config
- .disk_based_diagnostics_progress_token
- .clone()
- .unwrap();
- let mut languages = LanguageRegistry::new();
- languages.add(Arc::new(Language::new(
- LanguageConfig {
- name: "Rust".to_string(),
- path_suffixes: vec!["rs".to_string()],
- language_server: Some(language_server_config),
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- )));
-
- let dir = temp_tree(json!({
- "a.rs": "fn a() { A }",
- "b.rs": "const y: i32 = 1",
- }));
-
- let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
- let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
-
- let tree = Worktree::open_local(
- client,
- user_store,
- dir.path(),
- Arc::new(RealFs),
- Arc::new(languages),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
-
- // Cause worktree to start the fake language server
- let _buffer = tree
- .update(&mut cx, |tree, cx| tree.open_buffer("b.rs", cx))
- .await
- .unwrap();
-
- let mut events = subscribe(&tree, &mut cx);
-
- fake_server.start_progress(&progress_token).await;
- assert_eq!(
- events.next().await.unwrap(),
- Event::DiskBasedDiagnosticsUpdating
- );
-
- fake_server.start_progress(&progress_token).await;
- fake_server.end_progress(&progress_token).await;
- fake_server.start_progress(&progress_token).await;
-
- fake_server
- .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
- uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(),
- version: None,
- diagnostics: vec![lsp::Diagnostic {
- range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
- severity: Some(lsp::DiagnosticSeverity::ERROR),
- message: "undefined variable 'A'".to_string(),
- ..Default::default()
- }],
- })
- .await;
- assert_eq!(
- events.next().await.unwrap(),
- Event::DiagnosticsUpdated(Arc::from(Path::new("a.rs")))
- );
-
- fake_server.end_progress(&progress_token).await;
- fake_server.end_progress(&progress_token).await;
- assert_eq!(
- events.next().await.unwrap(),
- Event::DiskBasedDiagnosticsUpdated
- );
-
- let buffer = tree
- .update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx))
- .await
- .unwrap();
-
- buffer.read_with(&cx, |buffer, _| {
- let snapshot = buffer.snapshot();
- let diagnostics = snapshot
- .diagnostics_in_range::<_, Point>(0..buffer.len())
- .collect::<Vec<_>>();
- assert_eq!(
- diagnostics,
- &[DiagnosticEntry {
- range: Point::new(0, 9)..Point::new(0, 10),
- diagnostic: Diagnostic {
- severity: lsp::DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- group_id: 0,
- is_primary: true,
- ..Default::default()
- }
- }]
- )
- });
- }
-
#[gpui::test]
async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
let fs = Arc::new(FakeFs::new());
@@ -3911,13 +3741,12 @@ mod tests {
user_store,
"/the-dir".as_ref(),
fs,
- Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
- let buffer = worktree
+ let (buffer, _) = worktree
.update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx))
.await
.unwrap();
@@ -4024,7 +3853,12 @@ mod tests {
worktree
.update(&mut cx, |tree, cx| {
- tree.update_diagnostics(message, &Default::default(), cx)
+ tree.as_local_mut().unwrap().update_diagnostics(
+ Arc::from("a.rs".as_ref()),
+ message,
+ &Default::default(),
+ cx,
+ )
})
.unwrap();
let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
@@ -1213,7 +1213,8 @@ mod tests {
let buffer_b = worktree_b
.update(&mut cx_b, |worktree, cx| worktree.open_buffer("b.txt", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx));
buffer_b.read_with(&cx_b, |buf, cx| {
assert_eq!(buf.read(cx).text(), "b-contents")
@@ -1222,7 +1223,8 @@ mod tests {
let buffer_a = worktree_a
.update(&mut cx_a, |tree, cx| tree.open_buffer("b.txt", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
let editor_b = cx_b.add_view(window_b, |cx| {
Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), cx)
@@ -1436,11 +1438,13 @@ mod tests {
let buffer_b = worktree_b
.update(&mut cx_b, |tree, cx| tree.open_buffer("file1", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
let buffer_c = worktree_c
.update(&mut cx_c, |tree, cx| tree.open_buffer("file1", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "i-am-b, ", cx));
buffer_c.update(&mut cx_c, |buf, cx| buf.edit([0..0], "i-am-c, ", cx));
@@ -1448,7 +1452,8 @@ mod tests {
let buffer_a = worktree_a
.update(&mut cx_a, |tree, cx| tree.open_buffer("file1", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
buffer_a
.condition(&mut cx_a, |buf, _| buf.text() == "i-am-c, i-am-b, ")
@@ -1574,7 +1579,8 @@ mod tests {
let buffer_b = worktree_b
.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
let mtime = buffer_b.read_with(&cx_b, |buf, _| buf.file().unwrap().mtime());
buffer_b.update(&mut cx_b, |buf, cx| buf.edit([0..0], "world ", cx));
@@ -1669,7 +1675,8 @@ mod tests {
let buffer_a = worktree_a
.update(&mut cx_a, |tree, cx| tree.open_buffer("a.txt", cx))
.await
- .unwrap();
+ .unwrap()
+ .0;
// Start opening the same buffer as client B
let buffer_b = cx_b
@@ -1681,7 +1688,7 @@ mod tests {
buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "z", cx));
let text = buffer_a.read_with(&cx_a, |buf, _| buf.text());
- let buffer_b = buffer_b.await.unwrap();
+ let buffer_b = buffer_b.await.unwrap().0;
buffer_b.condition(&cx_b, |buf, _| buf.text() == text).await;
}
@@ -2016,7 +2023,8 @@ mod tests {
.background()
.spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.rs", cx)))
.await
- .unwrap();
+ .unwrap()
+ .0;
buffer_b.read_with(&cx_b, |buffer, _| {
assert_eq!(
@@ -2128,7 +2136,8 @@ mod tests {
.background()
.spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.rs", cx)))
.await
- .unwrap();
+ .unwrap()
+ .0;
let format = buffer_b.update(&mut cx_b, |buffer, cx| buffer.format(cx));
let (request_id, _) = fake_language_server