Detailed changes
@@ -1192,7 +1192,7 @@ dependencies = [
[[package]]
name = "collab"
-version = "0.8.3"
+version = "0.9.0"
dependencies = [
"anyhow",
"async-tungstenite",
@@ -1834,6 +1834,7 @@ dependencies = [
"editor",
"gpui",
"language",
+ "lsp",
"postage",
"project",
"serde_json",
@@ -8522,7 +8523,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
[[package]]
name = "zed"
-version = "0.83.0"
+version = "0.84.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -76,6 +76,7 @@ serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] }
rand = { version = "0.8" }
postage = { version = "0.5", features = ["futures-traits"] }
+smallvec = { version = "1.6", features = ["union"] }
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" }
@@ -197,7 +197,20 @@
// enviroment. Use `:` to seperate multiple values.
"env": {
// "KEY": "value1:value2"
- }
+ },
+ // Set the terminal's line height.
+ // May take 3 values:
+ // 1. Use a line height that's comfortable for reading, 1.618
+ // "line_height": "comfortable"
+ // 2. Use a standard line height, 1.3. This option is useful for TUIs,
+ // particularly if they use box characters
+ // "line_height": "standard",
+ // 3. Use a custom line height.
+ // "line_height": {
+ // "custom": 2
+ // },
+ //
+ "line_height": "comfortable"
// Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size.
// "font_size": "15"
@@ -18,4 +18,4 @@ settings = { path = "../settings" }
util = { path = "../util" }
workspace = { path = "../workspace" }
futures = "0.3"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
@@ -9,4 +9,4 @@ path = "src/clock.rs"
doctest = false
[dependencies]
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
-version = "0.8.3"
+version = "0.9.0"
publish = false
[[bin]]
@@ -22,6 +22,7 @@ use language::{
LanguageConfig, OffsetRangeExt, Point, Rope,
};
use live_kit_client::MacOSDisplay;
+use lsp::LanguageServerId;
use project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath};
use rand::prelude::*;
use serde_json::json;
@@ -3475,6 +3476,7 @@ async fn test_collaborating_with_diagnostics(
worktree_id,
path: Arc::from(Path::new("a.rs")),
},
+ LanguageServerId(0),
DiagnosticSummary {
error_count: 1,
warning_count: 0,
@@ -3510,6 +3512,7 @@ async fn test_collaborating_with_diagnostics(
worktree_id,
path: Arc::from(Path::new("a.rs")),
},
+ LanguageServerId(0),
DiagnosticSummary {
error_count: 1,
warning_count: 0,
@@ -3550,10 +3553,10 @@ async fn test_collaborating_with_diagnostics(
worktree_id,
path: Arc::from(Path::new("a.rs")),
},
+ LanguageServerId(0),
DiagnosticSummary {
error_count: 1,
warning_count: 1,
- ..Default::default()
},
)]
);
@@ -3566,10 +3569,10 @@ async fn test_collaborating_with_diagnostics(
worktree_id,
path: Arc::from(Path::new("a.rs")),
},
+ LanguageServerId(0),
DiagnosticSummary {
error_count: 1,
warning_count: 1,
- ..Default::default()
},
)]
);
@@ -13,4 +13,4 @@ gpui = { path = "../gpui" }
menu = { path = "../menu" }
settings = { path = "../settings" }
theme = { path = "../theme" }
-smallvec = "1.6"
+smallvec = { workspace = true }
@@ -14,7 +14,7 @@ use language::{
ToPointUtf16,
};
use log::{debug, error};
-use lsp::LanguageServer;
+use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use request::{LogMessage, StatusNotification};
use settings::Settings;
@@ -27,8 +27,7 @@ use std::{
sync::Arc,
};
use util::{
- channel::ReleaseChannel, fs::remove_matching, github::latest_github_release, http::HttpClient,
- paths, ResultExt,
+ fs::remove_matching, github::latest_github_release, http::HttpClient, paths, ResultExt,
};
const COPILOT_AUTH_NAMESPACE: &'static str = "copilot_auth";
@@ -41,15 +40,6 @@ actions!(
);
pub fn init(http: Arc<dyn HttpClient>, node_runtime: Arc<NodeRuntime>, cx: &mut AppContext) {
- // Disable Copilot for stable releases.
- if *cx.global::<ReleaseChannel>() == ReleaseChannel::Stable {
- cx.update_global::<collections::CommandPaletteFilter, _, _>(|filter, _cx| {
- filter.filtered_namespaces.insert(COPILOT_NAMESPACE);
- filter.filtered_namespaces.insert(COPILOT_AUTH_NAMESPACE);
- });
- return;
- }
-
let copilot = cx.add_model({
let node_runtime = node_runtime.clone();
move |cx| Copilot::start(http, node_runtime, cx)
@@ -380,7 +370,7 @@ impl Copilot {
let node_path = node_runtime.binary_path().await?;
let arguments: &[OsString] = &[server_path.into(), "--stdio".into()];
let server = LanguageServer::new(
- 0,
+ LanguageServerId(0),
&node_path,
arguments,
Path::new("/"),
@@ -24,55 +24,57 @@ struct OpenGithub;
const COPILOT_SIGN_UP_URL: &'static str = "https://github.com/features/copilot";
pub fn init(cx: &mut AppContext) {
- let copilot = Copilot::global(cx).unwrap();
+ if let Some(copilot) = Copilot::global(cx) {
+ let mut code_verification: Option<ViewHandle<CopilotCodeVerification>> = None;
+ cx.observe(&copilot, move |copilot, cx| {
+ let status = copilot.read(cx).status();
- let mut code_verification: Option<ViewHandle<CopilotCodeVerification>> = None;
- cx.observe(&copilot, move |copilot, cx| {
- let status = copilot.read(cx).status();
-
- match &status {
- crate::Status::SigningIn { prompt } => {
- if let Some(code_verification) = code_verification.as_mut() {
- if cx.has_window(code_verification.window_id()) {
- cx.update_window(code_verification.window_id(), |cx| {
- code_verification.update(cx, |code_verification_view, cx| {
- code_verification_view.set_status(status, cx);
+ match &status {
+ crate::Status::SigningIn { prompt } => {
+ if let Some(code_verification_handle) = code_verification.as_mut() {
+ let window_id = code_verification_handle.window_id();
+ if cx.has_window(window_id) {
+ cx.update_window(window_id, |cx| {
+ code_verification_handle.update(cx, |code_verification, cx| {
+ code_verification.set_status(status, cx)
+ });
cx.activate_window();
});
- });
- } else {
- *code_verification = create_copilot_auth_window(cx, &status);
+ } else {
+ code_verification = Some(create_copilot_auth_window(cx, &status));
+ }
+ } else if let Some(_prompt) = prompt {
+ code_verification = Some(create_copilot_auth_window(cx, &status));
}
- } else if let Some(_prompt) = prompt {
- code_verification = Some(create_copilot_auth_window(cx, &status));
}
- }
- Status::Authorized | Status::Unauthorized => {
- if let Some(code_verification) = code_verification.as_ref() {
- cx.update_window(code_verification.window_id(), |cx| {
- code_verification.update(cx, |code_verification, cx| {
- code_verification.set_status(status, cx);
+ Status::Authorized | Status::Unauthorized => {
+ if let Some(code_verification) = code_verification.as_ref() {
+ let window_id = code_verification.window_id();
+ cx.update_window(window_id, |cx| {
+ code_verification.update(cx, |code_verification, cx| {
+ code_verification.set_status(status, cx)
+ });
+
+ cx.platform().activate(true);
cx.activate_window();
});
-
- cx.platform().activate(true);
- });
+ }
}
- }
- _ => {
- if let Some(code_verification) = code_verification.take() {
- cx.remove_window(code_verification.window_id());
+ _ => {
+ if let Some(code_verification) = code_verification.take() {
+ cx.remove_window(code_verification.window_id());
+ }
}
}
- }
- })
- .detach();
+ })
+ .detach();
- cx.add_action(
- |code_verification: &mut CopilotCodeVerification, _: &ClickedConnect, _| {
- code_verification.connect_clicked = true;
- },
- );
+ cx.add_action(
+ |code_verification: &mut CopilotCodeVerification, _: &ClickedConnect, _| {
+ code_verification.connect_clicked = true;
+ },
+ );
+ }
}
fn create_copilot_auth_window(
@@ -10,10 +10,11 @@ doctest = false
[dependencies]
anyhow = "1.0"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
collections = { path = "../collections" }
editor = { path = "../editor" }
language = { path = "../language" }
+lsp = { path = "../lsp" }
gpui = { path = "../gpui" }
project = { path = "../project" }
settings = { path = "../settings" }
@@ -27,6 +28,7 @@ unindent = "0.1"
client = { path = "../client", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
+lsp = { path = "../lsp", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
serde_json = { workspace = true }
@@ -1,7 +1,7 @@
pub mod items;
use anyhow::Result;
-use collections::{BTreeMap, HashSet};
+use collections::{BTreeSet, HashSet};
use editor::{
diagnostic_block_renderer,
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
@@ -17,6 +17,7 @@ use language::{
Anchor, Bias, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Point, Selection,
SelectionGoal,
};
+use lsp::LanguageServerId;
use project::{DiagnosticSummary, Project, ProjectPath};
use serde_json::json;
use settings::Settings;
@@ -55,7 +56,7 @@ struct ProjectDiagnosticsEditor {
summary: DiagnosticSummary,
excerpts: ModelHandle<MultiBuffer>,
path_states: Vec<PathState>,
- paths_to_update: BTreeMap<ProjectPath, usize>,
+ paths_to_update: BTreeSet<(ProjectPath, LanguageServerId)>,
}
struct PathState {
@@ -71,6 +72,7 @@ struct Jump {
}
struct DiagnosticGroupState {
+ language_server_id: LanguageServerId,
primary_diagnostic: DiagnosticEntry<language::Anchor>,
primary_excerpt_ix: usize,
excerpts: Vec<ExcerptId>,
@@ -115,7 +117,7 @@ impl View for ProjectDiagnosticsEditor {
}),
"summary": self.summary,
"paths_to_update": self.paths_to_update.iter().map(|(path, server_id)|
- (path.path.to_string_lossy(), server_id)
+ (path.path.to_string_lossy(), server_id.0)
).collect::<Vec<_>>(),
"paths_states": self.path_states.iter().map(|state|
json!({
@@ -148,7 +150,7 @@ impl ProjectDiagnosticsEditor {
path,
} => {
this.paths_to_update
- .insert(path.clone(), *language_server_id);
+ .insert((path.clone(), *language_server_id));
}
_ => {}
})
@@ -167,7 +169,7 @@ impl ProjectDiagnosticsEditor {
let project = project_handle.read(cx);
let paths_to_update = project
.diagnostic_summaries(cx)
- .map(|e| (e.0, e.1.language_server_id))
+ .map(|(path, server_id, _)| (path, server_id))
.collect();
let summary = project.diagnostic_summary(cx);
let mut this = Self {
@@ -195,9 +197,13 @@ impl ProjectDiagnosticsEditor {
}
}
- fn update_excerpts(&mut self, language_server_id: Option<usize>, cx: &mut ViewContext<Self>) {
+ fn update_excerpts(
+ &mut self,
+ language_server_id: Option<LanguageServerId>,
+ cx: &mut ViewContext<Self>,
+ ) {
let mut paths = Vec::new();
- self.paths_to_update.retain(|path, server_id| {
+ self.paths_to_update.retain(|(path, server_id)| {
if language_server_id
.map_or(true, |language_server_id| language_server_id == *server_id)
{
@@ -214,7 +220,9 @@ impl ProjectDiagnosticsEditor {
let buffer = project
.update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx))
.await?;
- this.update(&mut cx, |this, cx| this.populate_excerpts(path, buffer, cx))?;
+ this.update(&mut cx, |this, cx| {
+ this.populate_excerpts(path, language_server_id, buffer, cx)
+ })?;
}
Result::<_, anyhow::Error>::Ok(())
}
@@ -226,6 +234,7 @@ impl ProjectDiagnosticsEditor {
fn populate_excerpts(
&mut self,
path: ProjectPath,
+ language_server_id: Option<LanguageServerId>,
buffer: ModelHandle<Buffer>,
cx: &mut ViewContext<Self>,
) {
@@ -264,9 +273,9 @@ impl ProjectDiagnosticsEditor {
let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| {
let mut old_groups = path_state.diagnostic_groups.iter().enumerate().peekable();
let mut new_groups = snapshot
- .diagnostic_groups()
+ .diagnostic_groups(language_server_id)
.into_iter()
- .filter(|group| {
+ .filter(|(_, group)| {
group.entries[group.primary_ix].diagnostic.severity
<= DiagnosticSeverity::WARNING
})
@@ -278,12 +287,27 @@ impl ProjectDiagnosticsEditor {
match (old_groups.peek(), new_groups.peek()) {
(None, None) => break,
(None, Some(_)) => to_insert = new_groups.next(),
- (Some(_), None) => to_remove = old_groups.next(),
- (Some((_, old_group)), Some(new_group)) => {
+ (Some((_, old_group)), None) => {
+ if language_server_id.map_or(true, |id| id == old_group.language_server_id)
+ {
+ to_remove = old_groups.next();
+ } else {
+ to_keep = old_groups.next();
+ }
+ }
+ (Some((_, old_group)), Some((_, new_group))) => {
let old_primary = &old_group.primary_diagnostic;
let new_primary = &new_group.entries[new_group.primary_ix];
match compare_diagnostics(old_primary, new_primary, &snapshot) {
- Ordering::Less => to_remove = old_groups.next(),
+ Ordering::Less => {
+ if language_server_id
+ .map_or(true, |id| id == old_group.language_server_id)
+ {
+ to_remove = old_groups.next();
+ } else {
+ to_keep = old_groups.next();
+ }
+ }
Ordering::Equal => {
to_keep = old_groups.next();
new_groups.next();
@@ -293,8 +317,9 @@ impl ProjectDiagnosticsEditor {
}
}
- if let Some(group) = to_insert {
+ if let Some((language_server_id, group)) = to_insert {
let mut group_state = DiagnosticGroupState {
+ language_server_id,
primary_diagnostic: group.entries[group.primary_ix].clone(),
primary_excerpt_ix: 0,
excerpts: Default::default(),
@@ -772,26 +797,24 @@ mod tests {
};
use gpui::{TestAppContext, WindowContext};
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped};
+ use project::FakeFs;
use serde_json::json;
use unindent::Unindent as _;
- use workspace::AppState;
#[gpui::test]
async fn test_diagnostics(cx: &mut TestAppContext) {
- let app_state = cx.update(AppState::test);
- app_state
- .fs
- .as_fake()
- .insert_tree(
- "/test",
- json!({
- "consts.rs": "
+ Settings::test_async(cx);
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/test",
+ json!({
+ "consts.rs": "
const a: i32 = 'a';
const b: i32 = c;
"
- .unindent(),
+ .unindent(),
- "main.rs": "
+ "main.rs": "
fn main() {
let x = vec![];
let y = vec![];
@@ -803,19 +826,20 @@ mod tests {
d(x);
}
"
- .unindent(),
- }),
- )
- .await;
+ .unindent(),
+ }),
+ )
+ .await;
- let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
+ let language_server_id = LanguageServerId(0);
+ let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
// Create some diagnostics
project.update(cx, |project, cx| {
project
.update_diagnostic_entries(
- 0,
+ language_server_id,
PathBuf::from("/test/main.rs"),
None,
vec![
@@ -964,10 +988,10 @@ mod tests {
// Diagnostics are added for another earlier path.
project.update(cx, |project, cx| {
- project.disk_based_diagnostics_started(0, cx);
+ project.disk_based_diagnostics_started(language_server_id, cx);
project
.update_diagnostic_entries(
- 0,
+ language_server_id,
PathBuf::from("/test/consts.rs"),
None,
vec![DiagnosticEntry {
@@ -984,7 +1008,7 @@ mod tests {
cx,
)
.unwrap();
- project.disk_based_diagnostics_finished(0, cx);
+ project.disk_based_diagnostics_finished(language_server_id, cx);
});
view.next_notification(cx).await;
@@ -1064,10 +1088,10 @@ mod tests {
// Diagnostics are added to the first path
project.update(cx, |project, cx| {
- project.disk_based_diagnostics_started(0, cx);
+ project.disk_based_diagnostics_started(language_server_id, cx);
project
.update_diagnostic_entries(
- 0,
+ language_server_id,
PathBuf::from("/test/consts.rs"),
None,
vec![
@@ -1100,7 +1124,7 @@ mod tests {
cx,
)
.unwrap();
- project.disk_based_diagnostics_finished(0, cx);
+ project.disk_based_diagnostics_finished(language_server_id, cx);
});
view.next_notification(cx).await;
@@ -1180,6 +1204,272 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
+ Settings::test_async(cx);
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/test",
+ json!({
+ "main.js": "
+ a();
+ b();
+ c();
+ d();
+ e();
+ ".unindent()
+ }),
+ )
+ .await;
+
+ let server_id_1 = LanguageServerId(100);
+ let server_id_2 = LanguageServerId(101);
+ let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
+ let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+
+ let view = cx.add_view(&workspace, |cx| {
+ ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
+ });
+
+ // Two language servers start updating diagnostics
+ project.update(cx, |project, cx| {
+ project.disk_based_diagnostics_started(server_id_1, cx);
+ project.disk_based_diagnostics_started(server_id_2, cx);
+ project
+ .update_diagnostic_entries(
+ server_id_1,
+ PathBuf::from("/test/main.js"),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)),
+ diagnostic: Diagnostic {
+ message: "error 1".to_string(),
+ severity: DiagnosticSeverity::WARNING,
+ is_primary: true,
+ is_disk_based: true,
+ group_id: 1,
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ project
+ .update_diagnostic_entries(
+ server_id_2,
+ PathBuf::from("/test/main.js"),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)),
+ diagnostic: Diagnostic {
+ message: "warning 1".to_string(),
+ severity: DiagnosticSeverity::ERROR,
+ is_primary: true,
+ is_disk_based: true,
+ group_id: 2,
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ });
+
+ // The first language server finishes
+ project.update(cx, |project, cx| {
+ project.disk_based_diagnostics_finished(server_id_1, cx);
+ });
+
+ // Only the first language server's diagnostics are shown.
+ cx.foreground().run_until_parked();
+ view.update(cx, |view, cx| {
+ assert_eq!(
+ editor_blocks(&view.editor, cx),
+ [
+ (0, "path header block".into()),
+ (2, "diagnostic header".into()),
+ ]
+ );
+ assert_eq!(
+ view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+ concat!(
+ "\n", // filename
+ "\n", // padding
+ // diagnostic group 1
+ "\n", // primary message
+ "\n", // padding
+ "a();\n", //
+ "b();",
+ )
+ );
+ });
+
+ // The second language server finishes
+ project.update(cx, |project, cx| {
+ project.disk_based_diagnostics_finished(server_id_2, cx);
+ });
+
+ // Both language server's diagnostics are shown.
+ cx.foreground().run_until_parked();
+ view.update(cx, |view, cx| {
+ assert_eq!(
+ editor_blocks(&view.editor, cx),
+ [
+ (0, "path header block".into()),
+ (2, "diagnostic header".into()),
+ (6, "collapsed context".into()),
+ (7, "diagnostic header".into()),
+ ]
+ );
+ assert_eq!(
+ view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+ concat!(
+ "\n", // filename
+ "\n", // padding
+ // diagnostic group 1
+ "\n", // primary message
+ "\n", // padding
+ "a();\n", // location
+ "b();\n", //
+ "\n", // collapsed context
+ // diagnostic group 2
+ "\n", // primary message
+ "\n", // padding
+ "a();\n", // context
+ "b();\n", //
+ "c();", // context
+ )
+ );
+ });
+
+ // Both language servers start updating diagnostics, and the first server finishes.
+ project.update(cx, |project, cx| {
+ project.disk_based_diagnostics_started(server_id_1, cx);
+ project.disk_based_diagnostics_started(server_id_2, cx);
+ project
+ .update_diagnostic_entries(
+ server_id_1,
+ PathBuf::from("/test/main.js"),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)),
+ diagnostic: Diagnostic {
+ message: "warning 2".to_string(),
+ severity: DiagnosticSeverity::WARNING,
+ is_primary: true,
+ is_disk_based: true,
+ group_id: 1,
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ project
+ .update_diagnostic_entries(
+ server_id_2,
+ PathBuf::from("/test/main.rs"),
+ None,
+ vec![],
+ cx,
+ )
+ .unwrap();
+ project.disk_based_diagnostics_finished(server_id_1, cx);
+ });
+
+ // Only the first language server's diagnostics are updated.
+ cx.foreground().run_until_parked();
+ view.update(cx, |view, cx| {
+ assert_eq!(
+ editor_blocks(&view.editor, cx),
+ [
+ (0, "path header block".into()),
+ (2, "diagnostic header".into()),
+ (7, "collapsed context".into()),
+ (8, "diagnostic header".into()),
+ ]
+ );
+ assert_eq!(
+ view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+ concat!(
+ "\n", // filename
+ "\n", // padding
+ // diagnostic group 1
+ "\n", // primary message
+ "\n", // padding
+ "a();\n", // location
+ "b();\n", //
+ "c();\n", // context
+ "\n", // collapsed context
+ // diagnostic group 2
+ "\n", // primary message
+ "\n", // padding
+ "b();\n", // context
+ "c();\n", //
+ "d();", // context
+ )
+ );
+ });
+
+ // The second language server finishes.
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostic_entries(
+ server_id_2,
+ PathBuf::from("/test/main.js"),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)),
+ diagnostic: Diagnostic {
+ message: "warning 2".to_string(),
+ severity: DiagnosticSeverity::WARNING,
+ is_primary: true,
+ is_disk_based: true,
+ group_id: 1,
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ project.disk_based_diagnostics_finished(server_id_2, cx);
+ });
+
+ // Both language servers' diagnostics are updated.
+ cx.foreground().run_until_parked();
+ view.update(cx, |view, cx| {
+ assert_eq!(
+ editor_blocks(&view.editor, cx),
+ [
+ (0, "path header block".into()),
+ (2, "diagnostic header".into()),
+ (7, "collapsed context".into()),
+ (8, "diagnostic header".into()),
+ ]
+ );
+ assert_eq!(
+ view.editor.update(cx, |editor, cx| editor.display_text(cx)),
+ concat!(
+ "\n", // filename
+ "\n", // padding
+ // diagnostic group 1
+ "\n", // primary message
+ "\n", // padding
+ "b();\n", // location
+ "c();\n", //
+ "d();\n", // context
+ "\n", // collapsed context
+ // diagnostic group 2
+ "\n", // primary message
+ "\n", // padding
+ "c();\n", // context
+ "d();\n", //
+ "e();", // context
+ )
+ );
+ });
+ }
+
fn editor_blocks(editor: &ViewHandle<Editor>, cx: &mut WindowContext) -> Vec<(u32, String)> {
editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -7,6 +7,7 @@ use gpui::{
WeakViewHandle,
};
use language::Diagnostic;
+use lsp::LanguageServerId;
use project::Project;
use settings::Settings;
use workspace::{item::ItemHandle, StatusItemView};
@@ -15,7 +16,7 @@ pub struct DiagnosticIndicator {
summary: project::DiagnosticSummary,
active_editor: Option<WeakViewHandle<Editor>>,
current_diagnostic: Option<Diagnostic>,
- in_progress_checks: HashSet<usize>,
+ in_progress_checks: HashSet<LanguageServerId>,
_observe_active_editor: Option<Subscription>,
}
@@ -58,7 +58,7 @@ postage = { workspace = true }
rand = { version = "0.8.3", optional = true }
serde = { workspace = true }
serde_derive = { workspace = true }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2"
tree-sitter-rust = { version = "*", optional = true }
tree-sitter-html = { version = "*", optional = true }
@@ -4411,7 +4411,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext)
cx.set_state(
&[
"one ", //
- "twoˇ", //
+ "twoˇ", //
"three ", //
"four", //
]
@@ -4486,7 +4486,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext)
&[
"one", //
"", //
- "twoˇ", //
+ "twoˇ", //
"", //
"three", //
"four", //
@@ -4501,7 +4501,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext)
cx.assert_editor_state(
&[
"one ", //
- "twoˇ", //
+ "twoˇ", //
"three ", //
"four", //
]
@@ -436,6 +436,7 @@ mod tests {
use indoc::indoc;
use language::{Diagnostic, DiagnosticSet};
+ use lsp::LanguageServerId;
use project::HoverBlock;
use smol::stream::StreamExt;
@@ -620,7 +621,7 @@ mod tests {
}],
&snapshot,
);
- buffer.update_diagnostics(set, cx);
+ buffer.update_diagnostics(LanguageServerId(0), set, cx);
});
// Hover pops diagnostic immediately
@@ -2764,6 +2764,15 @@ impl MultiBufferSnapshot {
.and_then(|(buffer, offset)| buffer.language_scope_at(offset))
}
+ pub fn language_indent_size_at<T: ToOffset>(
+ &self,
+ position: T,
+ cx: &AppContext,
+ ) -> Option<IndentSize> {
+ let (buffer_snapshot, offset) = self.point_to_buffer_offset(position)?;
+ Some(buffer_snapshot.language_indent_size_at(offset, cx))
+ }
+
pub fn is_dirty(&self) -> bool {
self.is_dirty
}
@@ -2791,7 +2800,7 @@ impl MultiBufferSnapshot {
) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
where
T: 'a + ToOffset,
- O: 'a + text::FromAnchor,
+ O: 'a + text::FromAnchor + Ord,
{
self.as_singleton()
.into_iter()
@@ -44,7 +44,7 @@ seahash = "4.1"
serde = { workspace = true }
serde_derive = { workspace = true }
serde_json = { workspace = true }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2"
time = { version = "0.3", features = ["serde", "serde-well-known"] }
tiny-skia = "0.5"
@@ -50,7 +50,7 @@ serde = { workspace = true }
serde_derive = { workspace = true }
serde_json = { workspace = true }
similar = "1.3"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2"
tree-sitter = "0.20"
tree-sitter-rust = { version = "*", optional = true }
@@ -16,9 +16,11 @@ use clock::ReplicaId;
use fs::LineEnding;
use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
+use lsp::LanguageServerId;
use parking_lot::Mutex;
use settings::Settings;
use similar::{ChangeTag, TextDiff};
+use smallvec::SmallVec;
use smol::future::yield_now;
use std::{
any::Any,
@@ -71,7 +73,7 @@ pub struct Buffer {
syntax_map: Mutex<SyntaxMap>,
parsing_in_background: bool,
parse_count: usize,
- diagnostics: DiagnosticSet,
+ diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
remote_selections: TreeMap<ReplicaId, SelectionSet>,
selections_update_count: usize,
diagnostics_update_count: usize,
@@ -88,7 +90,7 @@ pub struct BufferSnapshot {
pub git_diff: git::diff::BufferDiff,
pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>,
- diagnostics: DiagnosticSet,
+ diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
diagnostics_update_count: usize,
file_update_count: usize,
git_diff_update_count: usize,
@@ -156,6 +158,7 @@ pub struct Completion {
#[derive(Clone, Debug)]
pub struct CodeAction {
+ pub server_id: LanguageServerId,
pub range: Range<Anchor>,
pub lsp_action: lsp::CodeAction,
}
@@ -163,16 +166,20 @@ pub struct CodeAction {
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Operation {
Buffer(text::Operation),
+
UpdateDiagnostics {
+ server_id: LanguageServerId,
diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
lamport_timestamp: clock::Lamport,
},
+
UpdateSelections {
selections: Arc<[Selection<Anchor>]>,
lamport_timestamp: clock::Lamport,
line_mode: bool,
cursor_shape: CursorShape,
},
+
UpdateCompletionTriggers {
triggers: Vec<String>,
lamport_timestamp: clock::Lamport,
@@ -408,6 +415,7 @@ impl Buffer {
) -> Task<Vec<proto::Operation>> {
let mut operations = Vec::new();
operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
+
operations.extend(self.remote_selections.iter().map(|(_, set)| {
proto::serialize_operation(&Operation::UpdateSelections {
selections: set.selections.clone(),
@@ -416,10 +424,15 @@ impl Buffer {
cursor_shape: set.cursor_shape,
})
}));
- operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
- diagnostics: self.diagnostics.iter().cloned().collect(),
- lamport_timestamp: self.diagnostics_timestamp,
- }));
+
+ for (server_id, diagnostics) in &self.diagnostics {
+ operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
+ lamport_timestamp: self.diagnostics_timestamp,
+ server_id: *server_id,
+ diagnostics: diagnostics.iter().cloned().collect(),
+ }));
+ }
+
operations.push(proto::serialize_operation(
&Operation::UpdateCompletionTriggers {
triggers: self.completion_triggers.clone(),
@@ -865,13 +878,19 @@ impl Buffer {
cx.notify();
}
- pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
+ pub fn update_diagnostics(
+ &mut self,
+ server_id: LanguageServerId,
+ diagnostics: DiagnosticSet,
+ cx: &mut ModelContext<Self>,
+ ) {
let lamport_timestamp = self.text.lamport_clock.tick();
let op = Operation::UpdateDiagnostics {
+ server_id,
diagnostics: diagnostics.iter().cloned().collect(),
lamport_timestamp,
};
- self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx);
+ self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
self.send_operation(op, cx);
}
@@ -1579,11 +1598,13 @@ impl Buffer {
unreachable!("buffer operations should never be applied at this layer")
}
Operation::UpdateDiagnostics {
+ server_id,
diagnostics: diagnostic_set,
lamport_timestamp,
} => {
let snapshot = self.snapshot();
self.apply_diagnostic_update(
+ server_id,
DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
lamport_timestamp,
cx,
@@ -1625,12 +1646,16 @@ impl Buffer {
fn apply_diagnostic_update(
&mut self,
+ server_id: LanguageServerId,
diagnostics: DiagnosticSet,
lamport_timestamp: clock::Lamport,
cx: &mut ModelContext<Self>,
) {
if lamport_timestamp > self.diagnostics_timestamp {
- self.diagnostics = diagnostics;
+ match self.diagnostics.binary_search_by_key(&server_id, |e| e.0) {
+ Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
+ Ok(ix) => self.diagnostics[ix].1 = diagnostics,
+ };
self.diagnostics_timestamp = lamport_timestamp;
self.diagnostics_update_count += 1;
self.text.lamport_clock.observe(lamport_timestamp);
@@ -2504,14 +2529,55 @@ impl BufferSnapshot {
) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + Clone + ToOffset,
- O: 'a + FromAnchor,
+ O: 'a + FromAnchor + Ord,
{
- self.diagnostics.range(search_range, self, true, reversed)
+ let mut iterators: Vec<_> = self
+ .diagnostics
+ .iter()
+ .map(|(_, collection)| {
+ collection
+ .range::<T, O>(search_range.clone(), self, true, reversed)
+ .peekable()
+ })
+ .collect();
+
+ std::iter::from_fn(move || {
+ let (next_ix, _) = iterators
+ .iter_mut()
+ .enumerate()
+ .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
+ .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
+ iterators[next_ix].next()
+ })
}
- pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
+ pub fn diagnostic_groups(
+ &self,
+ language_server_id: Option<LanguageServerId>,
+ ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
let mut groups = Vec::new();
- self.diagnostics.groups(&mut groups, self);
+
+ if let Some(language_server_id) = language_server_id {
+ if let Ok(ix) = self
+ .diagnostics
+ .binary_search_by_key(&language_server_id, |e| e.0)
+ {
+ self.diagnostics[ix]
+ .1
+ .groups(language_server_id, &mut groups, self);
+ }
+ } else {
+ for (language_server_id, diagnostics) in self.diagnostics.iter() {
+ diagnostics.groups(*language_server_id, &mut groups, self);
+ }
+ }
+
+ groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
+ let a_start = &group_a.entries[group_a.primary_ix].range.start;
+ let b_start = &group_b.entries[group_b.primary_ix].range.start;
+ a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
+ });
+
groups
}
@@ -2522,7 +2588,9 @@ impl BufferSnapshot {
where
O: 'a + FromAnchor,
{
- self.diagnostics.group(group_id, self)
+ self.diagnostics
+ .iter()
+ .flat_map(move |(_, set)| set.group(group_id, self))
}
pub fn diagnostics_update_count(&self) -> usize {
@@ -1866,7 +1866,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
buffer,
);
log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
- buffer.update_diagnostics(diagnostics, cx);
+ buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx);
});
mutation_count -= 1;
}
@@ -1,5 +1,6 @@
use crate::Diagnostic;
use collections::HashMap;
+use lsp::LanguageServerId;
use std::{
cmp::{Ordering, Reverse},
iter,
@@ -129,7 +130,12 @@ impl DiagnosticSet {
})
}
- pub fn groups(&self, output: &mut Vec<DiagnosticGroup<Anchor>>, buffer: &text::BufferSnapshot) {
+ pub fn groups(
+ &self,
+ language_server_id: LanguageServerId,
+ output: &mut Vec<(LanguageServerId, DiagnosticGroup<Anchor>)>,
+ buffer: &text::BufferSnapshot,
+ ) {
let mut groups = HashMap::default();
for entry in self.diagnostics.iter() {
groups
@@ -144,16 +150,22 @@ impl DiagnosticSet {
entries
.iter()
.position(|entry| entry.diagnostic.is_primary)
- .map(|primary_ix| DiagnosticGroup {
- entries,
- primary_ix,
+ .map(|primary_ix| {
+ (
+ language_server_id,
+ DiagnosticGroup {
+ entries,
+ primary_ix,
+ },
+ )
})
}));
- output[start_ix..].sort_unstable_by(|a, b| {
- a.entries[a.primary_ix]
+ output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| {
+ group_a.entries[group_a.primary_ix]
.range
.start
- .cmp(&b.entries[b.primary_ix].range.start, buffer)
+ .cmp(&group_b.entries[group_b.primary_ix].range.start, buffer)
+ .then_with(|| id_a.cmp(&id_b))
});
}
@@ -54,6 +54,7 @@ use futures::channel::mpsc;
pub use buffer::Operation;
pub use buffer::*;
pub use diagnostic_set::DiagnosticEntry;
+pub use lsp::LanguageServerId;
pub use outline::{Outline, OutlineItem};
pub use tree_sitter::{Parser, Tree};
@@ -414,7 +415,7 @@ pub struct BracketPair {
pub struct Language {
pub(crate) config: LanguageConfig,
pub(crate) grammar: Option<Arc<Grammar>>,
- pub(crate) adapter: Option<Arc<CachedLspAdapter>>,
+ pub(crate) adapters: Vec<Arc<CachedLspAdapter>>,
#[cfg(any(test, feature = "test-support"))]
fake_adapter: Option<(
@@ -492,7 +493,7 @@ struct AvailableLanguage {
path: &'static str,
config: LanguageConfig,
grammar: tree_sitter::Language,
- lsp_adapter: Option<Arc<dyn LspAdapter>>,
+ lsp_adapters: Vec<Arc<dyn LspAdapter>>,
get_queries: fn(&str) -> LanguageQueries,
}
@@ -513,6 +514,7 @@ pub struct LanguageRegistry {
}
struct LanguageRegistryState {
+ next_language_server_id: usize,
languages: Vec<Arc<Language>>,
available_languages: Vec<AvailableLanguage>,
next_available_language_id: AvailableLanguageId,
@@ -522,11 +524,17 @@ struct LanguageRegistryState {
version: usize,
}
+pub struct PendingLanguageServer {
+ pub server_id: LanguageServerId,
+ pub task: Task<Result<lsp::LanguageServer>>,
+}
+
impl LanguageRegistry {
pub fn new(login_shell_env_loaded: Task<()>) -> Self {
let (lsp_binary_statuses_tx, lsp_binary_statuses_rx) = async_broadcast::broadcast(16);
Self {
state: RwLock::new(LanguageRegistryState {
+ next_language_server_id: 0,
languages: vec![PLAIN_TEXT.clone()],
available_languages: Default::default(),
next_available_language_id: 0,
@@ -558,7 +566,7 @@ impl LanguageRegistry {
path: &'static str,
config: LanguageConfig,
grammar: tree_sitter::Language,
- lsp_adapter: Option<Arc<dyn LspAdapter>>,
+ lsp_adapters: Vec<Arc<dyn LspAdapter>>,
get_queries: fn(&str) -> LanguageQueries,
) {
let state = &mut *self.state.write();
@@ -567,7 +575,7 @@ impl LanguageRegistry {
path,
config,
grammar,
- lsp_adapter,
+ lsp_adapters,
get_queries,
});
}
@@ -590,12 +598,13 @@ impl LanguageRegistry {
state
.available_languages
.iter()
- .filter_map(|l| l.lsp_adapter.clone())
+ .flat_map(|l| l.lsp_adapters.clone())
.chain(
state
.languages
.iter()
- .filter_map(|l| l.adapter.as_ref().map(|a| a.adapter.clone())),
+ .flat_map(|language| &language.adapters)
+ .map(|adapter| adapter.adapter.clone()),
)
.collect::<Vec<_>>()
};
@@ -721,7 +730,7 @@ impl LanguageRegistry {
let queries = (language.get_queries)(&language.path);
let language =
Language::new(language.config, Some(language.grammar))
- .with_lsp_adapter(language.lsp_adapter)
+ .with_lsp_adapters(language.lsp_adapters)
.await;
let name = language.name();
match language.with_queries(queries) {
@@ -776,16 +785,15 @@ impl LanguageRegistry {
pub fn start_language_server(
self: &Arc<Self>,
- server_id: usize,
language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>,
http_client: Arc<dyn HttpClient>,
cx: &mut AppContext,
- ) -> Option<Task<Result<lsp::LanguageServer>>> {
+ ) -> Option<PendingLanguageServer> {
#[cfg(any(test, feature = "test-support"))]
if language.fake_adapter.is_some() {
- let language = language;
- return Some(cx.spawn(|cx| async move {
+ let task = cx.spawn(|cx| async move {
let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
let (server, mut fake_server) = lsp::LanguageServer::fake(
fake_adapter.name.to_string(),
@@ -810,7 +818,10 @@ impl LanguageRegistry {
})
.detach();
Ok(server)
- }));
+ });
+
+ let server_id = self.state.write().next_language_server_id();
+ return Some(PendingLanguageServer { server_id, task });
}
let download_dir = self
@@ -820,11 +831,16 @@ impl LanguageRegistry {
.log_err()?;
let this = self.clone();
- let adapter = language.adapter.clone()?;
+ let language = language.clone();
+ let http_client = http_client.clone();
+ let download_dir = download_dir.clone();
+ let root_path = root_path.clone();
+ let adapter = adapter.clone();
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
+ let server_id = self.state.write().next_language_server_id();
- Some(cx.spawn(|cx| async move {
+ let task = cx.spawn(|cx| async move {
login_shell_env_loaded.await;
let mut lock = this.lsp_binary_paths.lock();
@@ -856,7 +872,9 @@ impl LanguageRegistry {
)?;
Ok(server)
- }))
+ });
+
+ Some(PendingLanguageServer { server_id, task })
}
pub fn language_server_binary_statuses(
@@ -867,6 +885,10 @@ impl LanguageRegistry {
}
impl LanguageRegistryState {
+ fn next_language_server_id(&mut self) -> LanguageServerId {
+ LanguageServerId(post_inc(&mut self.next_language_server_id))
+ }
+
fn add(&mut self, language: Arc<Language>) {
if let Some(theme) = self.theme.as_ref() {
language.set_theme(&theme.editor.syntax);
@@ -974,15 +996,15 @@ impl Language {
highlight_map: Default::default(),
})
}),
- adapter: None,
+ adapters: Vec::new(),
#[cfg(any(test, feature = "test-support"))]
fake_adapter: None,
}
}
- pub fn lsp_adapter(&self) -> Option<Arc<CachedLspAdapter>> {
- self.adapter.clone()
+ pub fn lsp_adapters(&self) -> &[Arc<CachedLspAdapter>] {
+ &self.adapters
}
pub fn id(&self) -> Option<usize> {
@@ -1209,9 +1231,9 @@ impl Language {
Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap()
}
- pub async fn with_lsp_adapter(mut self, lsp_adapter: Option<Arc<dyn LspAdapter>>) -> Self {
- if let Some(adapter) = lsp_adapter {
- self.adapter = Some(CachedLspAdapter::new(adapter).await);
+ pub async fn with_lsp_adapters(mut self, lsp_adapters: Vec<Arc<dyn LspAdapter>>) -> Self {
+ for adapter in lsp_adapters {
+ self.adapters.push(CachedLspAdapter::new(adapter).await);
}
self
}
@@ -1224,7 +1246,7 @@ impl Language {
let (servers_tx, servers_rx) = mpsc::unbounded();
self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone()));
let adapter = CachedLspAdapter::new(Arc::new(fake_lsp_adapter)).await;
- self.adapter = Some(adapter);
+ self.adapters = vec![adapter];
servers_rx
}
@@ -1233,28 +1255,31 @@ impl Language {
}
pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
- match self.adapter.as_ref() {
+ match self.adapters.first().as_ref() {
Some(adapter) => &adapter.disk_based_diagnostic_sources,
None => &[],
}
}
pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> {
- if let Some(adapter) = self.adapter.as_ref() {
- adapter.disk_based_diagnostics_progress_token.as_deref()
- } else {
- None
+ for adapter in &self.adapters {
+ let token = adapter.disk_based_diagnostics_progress_token.as_deref();
+ if token.is_some() {
+ return token;
+ }
}
+
+ None
}
pub async fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
- if let Some(processor) = self.adapter.as_ref() {
- processor.process_diagnostics(diagnostics).await;
+ for adapter in &self.adapters {
+ adapter.process_diagnostics(diagnostics).await;
}
}
pub async fn process_completion(self: &Arc<Self>, completion: &mut lsp::CompletionItem) {
- if let Some(adapter) = self.adapter.as_ref() {
+ for adapter in &self.adapters {
adapter.process_completion(completion).await;
}
}
@@ -1263,7 +1288,8 @@ impl Language {
self: &Arc<Self>,
completion: &lsp::CompletionItem,
) -> Option<CodeLabel> {
- self.adapter
+ self.adapters
+ .first()
.as_ref()?
.label_for_completion(completion, self)
.await
@@ -1274,7 +1300,8 @@ impl Language {
name: &str,
kind: lsp::SymbolKind,
) -> Option<CodeLabel> {
- self.adapter
+ self.adapters
+ .first()
.as_ref()?
.label_for_symbol(name, kind, self)
.await
@@ -1559,7 +1586,7 @@ mod tests {
..Default::default()
},
tree_sitter_javascript::language(),
- None,
+ vec![],
|_| Default::default(),
);
@@ -1595,7 +1622,7 @@ mod tests {
..Default::default()
},
tree_sitter_json::language(),
- None,
+ vec![],
|_| Default::default(),
);
languages.register(
@@ -1606,7 +1633,7 @@ mod tests {
..Default::default()
},
tree_sitter_rust::language(),
- None,
+ vec![],
|_| Default::default(),
);
assert_eq!(
@@ -4,7 +4,7 @@ use crate::{
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
-use lsp::DiagnosticSeverity;
+use lsp::{DiagnosticSeverity, LanguageServerId};
use rpc::proto;
use std::{ops::Range, sync::Arc};
use text::*;
@@ -40,6 +40,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
crate::Operation::Buffer(text::Operation::Edit(edit)) => {
proto::operation::Variant::Edit(serialize_edit_operation(edit))
}
+
crate::Operation::Buffer(text::Operation::Undo {
undo,
lamport_timestamp,
@@ -58,6 +59,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
})
.collect(),
}),
+
crate::Operation::UpdateSelections {
selections,
line_mode,
@@ -70,14 +72,18 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation {
line_mode: *line_mode,
cursor_shape: serialize_cursor_shape(cursor_shape) as i32,
}),
+
crate::Operation::UpdateDiagnostics {
- diagnostics,
lamport_timestamp,
+ server_id,
+ diagnostics,
} => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
replica_id: lamport_timestamp.replica_id as u32,
lamport_timestamp: lamport_timestamp.value,
+ server_id: server_id.0 as u64,
diagnostics: serialize_diagnostics(diagnostics.iter()),
}),
+
crate::Operation::UpdateCompletionTriggers {
triggers,
lamport_timestamp,
@@ -267,11 +273,12 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operati
}
proto::operation::Variant::UpdateDiagnostics(message) => {
crate::Operation::UpdateDiagnostics {
- diagnostics: deserialize_diagnostics(message.diagnostics),
lamport_timestamp: clock::Lamport {
replica_id: message.replica_id as ReplicaId,
value: message.lamport_timestamp,
},
+ server_id: LanguageServerId(message.server_id as usize),
+ diagnostics: deserialize_diagnostics(message.diagnostics),
}
}
proto::operation::Variant::UpdateCompletionTriggers(message) => {
@@ -462,6 +469,7 @@ pub async fn deserialize_completion(
pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
proto::CodeAction {
+ server_id: action.server_id.0 as u64,
start: Some(serialize_anchor(&action.range.start)),
end: Some(serialize_anchor(&action.range.end)),
lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
@@ -479,6 +487,7 @@ pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction>
.ok_or_else(|| anyhow!("invalid end"))?;
let lsp_action = serde_json::from_slice(&action.lsp_action)?;
Ok(CodeAction {
+ server_id: LanguageServerId(action.server_id as usize),
range: start..end,
lsp_action,
})
@@ -16,6 +16,7 @@ use smol::{
process::{self, Child},
};
use std::{
+ fmt,
future::Future,
io::Write,
path::PathBuf,
@@ -35,7 +36,7 @@ type NotificationHandler = Box<dyn Send + FnMut(Option<usize>, &str, AsyncAppCon
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
pub struct LanguageServer {
- server_id: usize,
+ server_id: LanguageServerId,
next_id: AtomicUsize,
outbound_tx: channel::Sender<Vec<u8>>,
name: String,
@@ -51,6 +52,10 @@ pub struct LanguageServer {
_server: Option<Child>,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[repr(transparent)]
+pub struct LanguageServerId(pub usize);
+
pub struct Subscription {
method: &'static str,
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
@@ -107,7 +112,7 @@ struct Error {
impl LanguageServer {
pub fn new<T: AsRef<std::ffi::OsStr>>(
- server_id: usize,
+ server_id: LanguageServerId,
binary_path: &Path,
arguments: &[T],
root_path: &Path,
@@ -158,7 +163,7 @@ impl LanguageServer {
}
fn new_internal<Stdin, Stdout, F>(
- server_id: usize,
+ server_id: LanguageServerId,
stdin: Stdin,
stdout: Stdout,
server: Option<Child>,
@@ -581,7 +586,7 @@ impl LanguageServer {
&self.capabilities
}
- pub fn server_id(&self) -> usize {
+ pub fn server_id(&self) -> LanguageServerId {
self.server_id
}
@@ -685,6 +690,12 @@ impl Subscription {
}
}
+impl fmt::Display for LanguageServerId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
impl Drop for Subscription {
fn drop(&mut self) {
self.notification_handlers.lock().remove(self.method);
@@ -720,7 +731,7 @@ impl LanguageServer {
let (notifications_tx, notifications_rx) = channel::unbounded();
let server = Self::new_internal(
- 0,
+ LanguageServerId(0),
stdin_writer,
stdout_reader,
None,
@@ -731,7 +742,7 @@ impl LanguageServer {
);
let fake = FakeLanguageServer {
server: Arc::new(Self::new_internal(
- 0,
+ LanguageServerId(0),
stdout_writer,
stdin_reader,
None,
@@ -12,7 +12,7 @@ use language::{
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction,
Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Unclipped,
};
-use lsp::{DocumentHighlightKind, LanguageServer, ServerCapabilities};
+use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, ServerCapabilities};
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
@@ -33,21 +33,25 @@ pub(crate) trait LspCommand: 'static + Sized {
language_server: &Arc<LanguageServer>,
cx: &AppContext,
) -> <Self::LspRequest as lsp::request::Request>::Params;
+
async fn response_from_lsp(
self,
message: <Self::LspRequest as lsp::request::Request>::Result,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Self::Response>;
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest;
+
async fn from_proto(
message: Self::ProtoRequest,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
cx: AsyncAppContext,
) -> Result<Self>;
+
fn response_to_proto(
response: Self::Response,
project: &mut Project,
@@ -55,6 +59,7 @@ pub(crate) trait LspCommand: 'static + Sized {
buffer_version: &clock::Global,
cx: &mut AppContext,
) -> <Self::ProtoRequest as proto::RequestMessage>::Response;
+
async fn response_from_proto(
self,
message: <Self::ProtoRequest as proto::RequestMessage>::Response,
@@ -62,6 +67,7 @@ pub(crate) trait LspCommand: 'static + Sized {
buffer: ModelHandle<Buffer>,
cx: AsyncAppContext,
) -> Result<Self::Response>;
+
fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64;
}
@@ -137,6 +143,7 @@ impl LspCommand for PrepareRename {
message: Option<lsp::PrepareRenameResponse>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ _: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Option<Range<Anchor>>> {
buffer.read_with(&cx, |buffer, _| {
@@ -263,10 +270,12 @@ impl LspCommand for PerformRename {
message: Option<lsp::WorkspaceEdit>,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
if let Some(edit) = message {
- let (lsp_adapter, lsp_server) = language_server_for_buffer(&project, &buffer, &mut cx)?;
+ let (lsp_adapter, lsp_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
Project::deserialize_workspace_edit(
project,
edit,
@@ -380,9 +389,10 @@ impl LspCommand for GetDefinition {
message: Option<lsp::GotoDefinitionResponse>,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
- location_links_from_lsp(message, project, buffer, cx).await
+ location_links_from_lsp(message, project, buffer, server_id, cx).await
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition {
@@ -472,9 +482,10 @@ impl LspCommand for GetTypeDefinition {
message: Option<lsp::GotoTypeDefinitionResponse>,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
- location_links_from_lsp(message, project, buffer, cx).await
+ location_links_from_lsp(message, project, buffer, server_id, cx).await
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetTypeDefinition {
@@ -537,12 +548,13 @@ impl LspCommand for GetTypeDefinition {
fn language_server_for_buffer(
project: &ModelHandle<Project>,
buffer: &ModelHandle<Buffer>,
+ server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
project
.read_with(cx, |project, cx| {
project
- .language_server_for_buffer(buffer.read(cx), cx)
+ .language_server_for_buffer(buffer.read(cx), server_id, cx)
.map(|(adapter, server)| (adapter.clone(), server.clone()))
})
.ok_or_else(|| anyhow!("no language server found for buffer"))
@@ -614,6 +626,7 @@ async fn location_links_from_lsp(
message: Option<lsp::GotoDefinitionResponse>,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
let message = match message {
@@ -642,7 +655,8 @@ async fn location_links_from_lsp(
}
}
- let (lsp_adapter, language_server) = language_server_for_buffer(&project, &buffer, &mut cx)?;
+ let (lsp_adapter, language_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
let mut definitions = Vec::new();
for (origin_range, target_uri, target_range) in unresolved_links {
let target_buffer_handle = project
@@ -756,11 +770,12 @@ impl LspCommand for GetReferences {
locations: Option<Vec<lsp::Location>>,
project: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut references = Vec::new();
let (lsp_adapter, language_server) =
- language_server_for_buffer(&project, &buffer, &mut cx)?;
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
if let Some(locations) = locations {
for lsp_location in locations {
@@ -917,6 +932,7 @@ impl LspCommand for GetDocumentHighlights {
lsp_highlights: Option<Vec<lsp::DocumentHighlight>>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ _: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<DocumentHighlight>> {
buffer.read_with(&cx, |buffer, _| {
@@ -1062,6 +1078,7 @@ impl LspCommand for GetHover {
message: Option<lsp::Hover>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ _: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Self::Response> {
Ok(message.and_then(|hover| {
@@ -1283,6 +1300,7 @@ impl LspCommand for GetCompletions {
completions: Option<lsp::CompletionResponse>,
_: ModelHandle<Project>,
buffer: ModelHandle<Buffer>,
+ _: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<Completion>> {
let completions = if let Some(completions) = completions {
@@ -1502,6 +1520,7 @@ impl LspCommand for GetCodeActions {
actions: Option<lsp::CodeActionResponse>,
_: ModelHandle<Project>,
_: ModelHandle<Buffer>,
+ server_id: LanguageServerId,
_: AsyncAppContext,
) -> Result<Vec<CodeAction>> {
Ok(actions
@@ -1510,6 +1529,7 @@ impl LspCommand for GetCodeActions {
.filter_map(|entry| {
if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
Some(CodeAction {
+ server_id,
range: self.range.clone(),
lsp_action,
})
@@ -31,12 +31,12 @@ use language::{
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
- PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
- Unclipped,
+ PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
+ Transaction, Unclipped,
};
use lsp::{
DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
- DocumentHighlightKind, LanguageServer, LanguageString, MarkedString,
+ DocumentHighlightKind, LanguageServer, LanguageServerId, LanguageString, MarkedString,
};
use lsp_command::*;
use lsp_glob_set::LspGlobSet;
@@ -95,11 +95,10 @@ pub struct Project {
active_entry: Option<ProjectEntryId>,
buffer_changes_tx: mpsc::UnboundedSender<BufferMessage>,
languages: Arc<LanguageRegistry>,
- language_servers: HashMap<usize, LanguageServerState>,
- language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
- language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
- last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
- next_language_server_id: usize,
+ language_servers: HashMap<LanguageServerId, LanguageServerState>,
+ language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
+ language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
+ last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
client: Arc<client::Client>,
next_entry_id: Arc<AtomicUsize>,
join_project_response_message_id: u32,
@@ -124,7 +123,7 @@ pub struct Project {
/// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
/// Used for re-issuing buffer requests when peers temporarily disconnect
incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
- buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
+ buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
buffers_being_formatted: HashSet<usize>,
nonce: u128,
_maintain_buffer_languages: Task<()>,
@@ -133,6 +132,11 @@ pub struct Project {
copilot_enabled: bool,
}
+struct LspBufferSnapshot {
+ version: i32,
+ snapshot: TextBufferSnapshot,
+}
+
enum BufferMessage {
Operation {
buffer_id: u64,
@@ -185,14 +189,14 @@ pub enum Event {
WorktreeAdded,
WorktreeRemoved(WorktreeId),
DiskBasedDiagnosticsStarted {
- language_server_id: usize,
+ language_server_id: LanguageServerId,
},
DiskBasedDiagnosticsFinished {
- language_server_id: usize,
+ language_server_id: LanguageServerId,
},
DiagnosticsUpdated {
path: ProjectPath,
- language_server_id: usize,
+ language_server_id: LanguageServerId,
},
RemoteIdChanged(Option<u64>),
DisconnectedFromHost,
@@ -239,7 +243,6 @@ pub struct ProjectPath {
#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
pub struct DiagnosticSummary {
- pub language_server_id: usize,
pub error_count: usize,
pub warning_count: usize,
}
@@ -310,12 +313,8 @@ pub struct Hover {
pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
impl DiagnosticSummary {
- fn new<'a, T: 'a>(
- language_server_id: usize,
- diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
- ) -> Self {
+ fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
let mut this = Self {
- language_server_id,
error_count: 0,
warning_count: 0,
};
@@ -337,10 +336,14 @@ impl DiagnosticSummary {
self.error_count == 0 && self.warning_count == 0
}
- pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
+ pub fn to_proto(
+ &self,
+ language_server_id: LanguageServerId,
+ path: &Path,
+ ) -> proto::DiagnosticSummary {
proto::DiagnosticSummary {
path: path.to_string_lossy().to_string(),
- language_server_id: self.language_server_id as u64,
+ language_server_id: language_server_id.0 as u64,
error_count: self.error_count as u32,
warning_count: self.warning_count as u32,
}
@@ -469,7 +472,6 @@ impl Project {
language_server_statuses: Default::default(),
last_workspace_edits_by_language_server: Default::default(),
buffers_being_formatted: Default::default(),
- next_language_server_id: 0,
nonce: StdRng::from_entropy().gen(),
terminals: Terminals {
local_handles: Vec::new(),
@@ -543,7 +545,7 @@ impl Project {
.into_iter()
.map(|server| {
(
- server.id as usize,
+ LanguageServerId(server.id as usize),
LanguageServerStatus {
name: server.name,
pending_work: Default::default(),
@@ -554,7 +556,6 @@ impl Project {
})
.collect(),
last_workspace_edits_by_language_server: Default::default(),
- next_language_server_id: 0,
opened_buffers: Default::default(),
buffers_being_formatted: Default::default(),
buffer_snapshots: Default::default(),
@@ -645,7 +646,7 @@ impl Project {
let mut language_servers_to_stop = Vec::new();
for language in self.languages.to_vec() {
- if let Some(lsp_adapter) = language.lsp_adapter() {
+ for lsp_adapter in language.lsp_adapters() {
if !settings.enable_language_server(Some(&language.name())) {
let lsp_name = &lsp_adapter.name;
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
@@ -665,7 +666,7 @@ impl Project {
// Start all the newly-enabled language servers.
for (worktree_id, worktree_path, language) in language_servers_to_start {
- self.start_language_server(worktree_id, worktree_path, language, cx);
+ self.start_language_servers(worktree_id, worktree_path, language, cx);
}
if !self.copilot_enabled && Copilot::global(cx).is_some() {
@@ -1028,7 +1029,7 @@ impl Project {
.send(proto::StartLanguageServer {
project_id,
server: Some(proto::LanguageServer {
- id: *server_id as u64,
+ id: server_id.0 as u64,
name: status.name.clone(),
}),
})
@@ -1155,7 +1156,7 @@ impl Project {
.into_iter()
.map(|server| {
(
- server.id as usize,
+ LanguageServerId(server.id as usize),
LanguageServerStatus {
name: server.name,
pending_work: Default::default(),
@@ -1447,7 +1448,7 @@ impl Project {
fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
- language_server_id: usize,
+ language_server_id: LanguageServerId,
language_server_name: LanguageServerName,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
@@ -1545,12 +1546,13 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
- let old_path =
- File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
+ let old_file = File::from_dyn(buffer.read(cx).file())
+ .filter(|f| f.is_local())
+ .cloned();
cx.spawn(|this, mut cx| async move {
- if let Some(old_path) = old_path {
+ if let Some(old_file) = &old_file {
this.update(&mut cx, |this, cx| {
- this.unregister_buffer_from_language_server(&buffer, old_path, cx);
+ this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
});
}
let (worktree, path) = worktree_task.await?;
@@ -1564,7 +1566,7 @@ impl Project {
.await?;
this.update(&mut cx, |this, cx| {
this.detect_language_for_buffer(&buffer, cx);
- this.register_buffer_with_language_server(&buffer, cx);
+ this.register_buffer_with_language_servers(&buffer, cx);
});
Ok(())
})
@@ -1628,17 +1630,18 @@ impl Project {
.detach();
self.detect_language_for_buffer(buffer, cx);
- self.register_buffer_with_language_server(buffer, cx);
+ self.register_buffer_with_language_servers(buffer, cx);
self.register_buffer_with_copilot(buffer, cx);
cx.observe_release(buffer, |this, buffer, cx| {
if let Some(file) = File::from_dyn(buffer.file()) {
if file.is_local() {
let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
- if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
+ for server in this.language_servers_for_buffer(buffer, cx) {
server
+ .1
.notify::<lsp::notification::DidCloseTextDocument>(
lsp::DidCloseTextDocumentParams {
- text_document: lsp::TextDocumentIdentifier::new(uri),
+ text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
},
)
.log_err();
@@ -1652,51 +1655,55 @@ impl Project {
Ok(())
}
- fn register_buffer_with_language_server(
+ fn register_buffer_with_language_servers(
&mut self,
buffer_handle: &ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
) {
let buffer = buffer_handle.read(cx);
let buffer_id = buffer.remote_id();
+
if let Some(file) = File::from_dyn(buffer.file()) {
- if file.is_local() {
- let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
- let initial_snapshot = buffer.text_snapshot();
-
- let mut language_server = None;
- let mut language_id = None;
- if let Some(language) = buffer.language() {
- let worktree_id = file.worktree_id(cx);
- if let Some(adapter) = language.lsp_adapter() {
- language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
- language_server = self
- .language_server_ids
- .get(&(worktree_id, adapter.name.clone()))
- .and_then(|id| self.language_servers.get(id))
- .and_then(|server_state| {
- if let LanguageServerState::Running { server, .. } = server_state {
- Some(server.clone())
- } else {
- None
- }
- });
- }
- }
+ if !file.is_local() {
+ return;
+ }
- if let Some(local_worktree) = file.worktree.read(cx).as_local() {
- if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
- self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx)
- .log_err();
- }
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ let initial_snapshot = buffer.text_snapshot();
+ let language = buffer.language().cloned();
+ let worktree_id = file.worktree_id(cx);
+
+ if let Some(local_worktree) = file.worktree.read(cx).as_local() {
+ for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
+ self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
+ .log_err();
}
+ }
+
+ if let Some(language) = language {
+ for adapter in language.lsp_adapters() {
+ let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
+ let server = self
+ .language_server_ids
+ .get(&(worktree_id, adapter.name.clone()))
+ .and_then(|id| self.language_servers.get(id))
+ .and_then(|server_state| {
+ if let LanguageServerState::Running { server, .. } = server_state {
+ Some(server.clone())
+ } else {
+ None
+ }
+ });
+ let server = match server {
+ Some(server) => server,
+ None => continue,
+ };
- if let Some(server) = language_server {
server
.notify::<lsp::notification::DidOpenTextDocument>(
lsp::DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
- uri,
+ uri.clone(),
language_id.unwrap_or_default(),
0,
initial_snapshot.text(),
@@ -1704,6 +1711,7 @@ impl Project {
},
)
.log_err();
+
buffer_handle.update(cx, |buffer, cx| {
buffer.set_completion_triggers(
server
@@ -1713,31 +1721,50 @@ impl Project {
.and_then(|provider| provider.trigger_characters.clone())
.unwrap_or_default(),
cx,
- )
+ );
});
+
+ let snapshot = LspBufferSnapshot {
+ version: 0,
+ snapshot: initial_snapshot.clone(),
+ };
self.buffer_snapshots
- .insert(buffer_id, vec![(0, initial_snapshot)]);
+ .entry(buffer_id)
+ .or_default()
+ .insert(server.server_id(), vec![snapshot]);
}
}
}
}
- fn unregister_buffer_from_language_server(
+ fn unregister_buffer_from_language_servers(
&mut self,
buffer: &ModelHandle<Buffer>,
- old_path: PathBuf,
+ old_file: &File,
cx: &mut ModelContext<Self>,
) {
+ let old_path = match old_file.as_local() {
+ Some(local) => local.abs_path(cx),
+ None => return,
+ };
+
buffer.update(cx, |buffer, cx| {
- buffer.update_diagnostics(Default::default(), cx);
+ let worktree_id = old_file.worktree_id(cx);
+ let ids = &self.language_server_ids;
+
+ let language = buffer.language().cloned();
+ let adapters = language.iter().flat_map(|language| language.lsp_adapters());
+ for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
+ buffer.update_diagnostics(server_id, Default::default(), cx);
+ }
+
self.buffer_snapshots.remove(&buffer.remote_id());
- if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
+ let file_url = lsp::Url::from_file_path(old_path).unwrap();
+ for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
language_server
.notify::<lsp::notification::DidCloseTextDocument>(
lsp::DidCloseTextDocumentParams {
- text_document: lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(old_path).unwrap(),
- ),
+ text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
},
)
.log_err();
@@ -1833,52 +1860,67 @@ impl Project {
})
.ok();
}
+
BufferEvent::Edited { .. } => {
- let language_server = self
- .language_server_for_buffer(buffer.read(cx), cx)
- .map(|(_, server)| server.clone())?;
let buffer = buffer.read(cx);
let file = File::from_dyn(buffer.file())?;
let abs_path = file.as_local()?.abs_path(cx);
let uri = lsp::Url::from_file_path(abs_path).unwrap();
- let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
- let (version, prev_snapshot) = buffer_snapshots.last()?;
let next_snapshot = buffer.text_snapshot();
- let next_version = version + 1;
-
- let content_changes = buffer
- .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
- .map(|edit| {
- let edit_start = edit.new.start.0;
- let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
- let new_text = next_snapshot
- .text_for_range(edit.new.start.1..edit.new.end.1)
- .collect();
- lsp::TextDocumentContentChangeEvent {
- range: Some(lsp::Range::new(
- point_to_lsp(edit_start),
- point_to_lsp(edit_end),
- )),
- range_length: None,
- text: new_text,
- }
- })
+
+ let language_servers: Vec<_> = self
+ .language_servers_iter_for_buffer(buffer, cx)
+ .map(|i| i.1.clone())
.collect();
- buffer_snapshots.push((next_version, next_snapshot));
+ for language_server in language_servers {
+ let language_server = language_server.clone();
+
+ let buffer_snapshots = self
+ .buffer_snapshots
+ .get_mut(&buffer.remote_id())
+ .and_then(|m| m.get_mut(&language_server.server_id()))?;
+ let previous_snapshot = buffer_snapshots.last()?;
+ let next_version = previous_snapshot.version + 1;
+
+ let content_changes = buffer
+ .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version())
+ .map(|edit| {
+ let edit_start = edit.new.start.0;
+ let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
+ let new_text = next_snapshot
+ .text_for_range(edit.new.start.1..edit.new.end.1)
+ .collect();
+ lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(
+ point_to_lsp(edit_start),
+ point_to_lsp(edit_end),
+ )),
+ range_length: None,
+ text: new_text,
+ }
+ })
+ .collect();
- language_server
- .notify::<lsp::notification::DidChangeTextDocument>(
- lsp::DidChangeTextDocumentParams {
- text_document: lsp::VersionedTextDocumentIdentifier::new(
- uri,
- next_version,
- ),
- content_changes,
- },
- )
- .log_err();
+ buffer_snapshots.push(LspBufferSnapshot {
+ version: next_version,
+ snapshot: next_snapshot.clone(),
+ });
+
+ language_server
+ .notify::<lsp::notification::DidChangeTextDocument>(
+ lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(
+ uri.clone(),
+ next_version,
+ ),
+ content_changes,
+ },
+ )
+ .log_err();
+ }
}
+
BufferEvent::Saved => {
let file = File::from_dyn(buffer.read(cx).file())?;
let worktree_id = file.worktree_id(cx);
@@ -1898,24 +1940,26 @@ impl Project {
.log_err();
}
- let language_server_id = self.language_server_id_for_buffer(buffer.read(cx), cx)?;
- if let Some(LanguageServerState::Running {
- adapter,
- simulate_disk_based_diagnostics_completion,
- ..
- }) = self.language_servers.get_mut(&language_server_id)
- {
- // After saving a buffer using a language server that doesn't provide
- // a disk-based progress token, kick off a timer that will reset every
- // time the buffer is saved. If the timer eventually fires, simulate
- // disk-based diagnostics being finished so that other pieces of UI
- // (e.g., project diagnostics view, diagnostic status bar) can update.
- // We don't emit an event right away because the language server might take
- // some time to publish diagnostics.
- if adapter.disk_based_diagnostics_progress_token.is_none() {
- const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration = Duration::from_secs(1);
-
- let task = cx.spawn_weak(|this, mut cx| async move {
+ let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
+ for language_server_id in language_server_ids {
+ if let Some(LanguageServerState::Running {
+ adapter,
+ simulate_disk_based_diagnostics_completion,
+ ..
+ }) = self.language_servers.get_mut(&language_server_id)
+ {
+ // After saving a buffer using a language server that doesn't provide
+ // a disk-based progress token, kick off a timer that will reset every
+ // time the buffer is saved. If the timer eventually fires, simulate
+ // disk-based diagnostics being finished so that other pieces of UI
+ // (e.g., project diagnostics view, diagnostic status bar) can update.
+ // We don't emit an event right away because the language server might take
+ // some time to publish diagnostics.
+ if adapter.disk_based_diagnostics_progress_token.is_none() {
+ const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
+ Duration::from_secs(1);
+
+ let task = cx.spawn_weak(|this, mut cx| async move {
cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx | {
@@ -1929,10 +1973,12 @@ impl Project {
});
}
});
- *simulate_disk_based_diagnostics_completion = Some(task);
+ *simulate_disk_based_diagnostics_completion = Some(task);
+ }
}
}
}
+
_ => {}
}
@@ -1987,7 +2033,7 @@ impl Project {
for buffer in plain_text_buffers {
project.detect_language_for_buffer(&buffer, cx);
- project.register_buffer_with_language_server(&buffer, cx);
+ project.register_buffer_with_language_servers(&buffer, cx);
}
for buffer in buffers_with_unknown_injections {
@@ -2071,12 +2117,12 @@ impl Project {
if let Some(worktree) = file.worktree.read(cx).as_local() {
let worktree_id = worktree.id();
let worktree_abs_path = worktree.abs_path().clone();
- self.start_language_server(worktree_id, worktree_abs_path, new_language, cx);
+ self.start_language_servers(worktree_id, worktree_abs_path, new_language, cx);
}
}
}
- fn start_language_server(
+ fn start_language_servers(
&mut self,
worktree_id: WorktreeId,
worktree_path: Arc<Path>,
@@ -2090,313 +2136,332 @@ impl Project {
return;
}
- let adapter = if let Some(adapter) = language.lsp_adapter() {
- adapter
- } else {
- return;
- };
- let key = (worktree_id, adapter.name.clone());
+ for adapter in language.lsp_adapters() {
+ let key = (worktree_id, adapter.name.clone());
+ if self.language_server_ids.contains_key(&key) {
+ continue;
+ }
- let mut initialization_options = adapter.initialization_options.clone();
+ let pending_server = match self.languages.start_language_server(
+ language.clone(),
+ adapter.clone(),
+ worktree_path.clone(),
+ self.client.http_client(),
+ cx,
+ ) {
+ Some(pending_server) => pending_server,
+ None => continue,
+ };
- let lsp = &cx.global::<Settings>().lsp.get(&adapter.name.0);
- let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
- match (&mut initialization_options, override_options) {
- (Some(initialization_options), Some(override_options)) => {
- merge_json_value_into(override_options, initialization_options);
- }
- (None, override_options) => initialization_options = override_options,
- _ => {}
- }
+ let lsp = &cx.global::<Settings>().lsp.get(&adapter.name.0);
+ let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
- self.language_server_ids
- .entry(key.clone())
- .or_insert_with(|| {
- let languages = self.languages.clone();
- let server_id = post_inc(&mut self.next_language_server_id);
- let language_server = self.languages.start_language_server(
- server_id,
- language.clone(),
- worktree_path,
- self.client.http_client(),
- cx,
- );
- self.language_servers.insert(
- server_id,
- LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
- let workspace_config =
- cx.update(|cx| languages.workspace_configuration(cx)).await;
- let language_server = language_server?.await.log_err()?;
- let language_server = language_server
- .initialize(initialization_options)
- .await
- .log_err()?;
- let this = this.upgrade(&cx)?;
+ let mut initialization_options = adapter.initialization_options.clone();
+ match (&mut initialization_options, override_options) {
+ (Some(initialization_options), Some(override_options)) => {
+ merge_json_value_into(override_options, initialization_options);
+ }
+ (None, override_options) => initialization_options = override_options,
+ _ => {}
+ }
- language_server
- .on_notification::<lsp::notification::PublishDiagnostics, _>({
- let this = this.downgrade();
- let adapter = adapter.clone();
- move |mut params, cx| {
- let this = this;
- let adapter = adapter.clone();
- cx.spawn(|mut cx| async move {
- adapter.process_diagnostics(&mut params).await;
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, cx| {
- this.update_diagnostics(
- server_id,
- params,
- &adapter.disk_based_diagnostic_sources,
- cx,
- )
- .log_err();
- });
- }
- })
- .detach();
- }
- })
- .detach();
+ let server_id = pending_server.server_id;
+ let state = self.setup_pending_language_server(
+ initialization_options,
+ pending_server,
+ adapter.clone(),
+ language.clone(),
+ key.clone(),
+ cx,
+ );
+ self.language_servers.insert(server_id, state);
+ self.language_server_ids.insert(key.clone(), server_id);
+ }
+ }
- language_server
- .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
- let languages = languages.clone();
- move |params, mut cx| {
- let languages = languages.clone();
- async move {
- let workspace_config = cx
- .update(|cx| languages.workspace_configuration(cx))
- .await;
- Ok(params
- .items
- .into_iter()
- .map(|item| {
- if let Some(section) = &item.section {
- workspace_config
- .get(section)
- .cloned()
- .unwrap_or(serde_json::Value::Null)
- } else {
- workspace_config.clone()
- }
- })
- .collect())
- }
- }
- })
- .detach();
+ fn setup_pending_language_server(
+ &mut self,
+ initialization_options: Option<serde_json::Value>,
+ pending_server: PendingLanguageServer,
+ adapter: Arc<CachedLspAdapter>,
+ language: Arc<Language>,
+ key: (WorktreeId, LanguageServerName),
+ cx: &mut ModelContext<Project>,
+ ) -> LanguageServerState {
+ let server_id = pending_server.server_id;
+ let languages = self.languages.clone();
- // Even though we don't have handling for these requests, respond to them to
- // avoid stalling any language server like `gopls` which waits for a response
- // to these requests when initializing.
- language_server
- .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
- let this = this.downgrade();
- move |params, mut cx| async move {
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| {
- if let Some(status) =
- this.language_server_statuses.get_mut(&server_id)
- {
- if let lsp::NumberOrString::String(token) =
- params.token
- {
- status.progress_tokens.insert(token);
- }
- }
- });
- }
- Ok(())
- }
- })
- .detach();
- language_server
- .on_request::<lsp::request::RegisterCapability, _, _>({
- let this = this.downgrade();
- move |params, mut cx| async move {
- let this = this
- .upgrade(&cx)
- .ok_or_else(|| anyhow!("project dropped"))?;
- for reg in params.registrations {
- if reg.method == "workspace/didChangeWatchedFiles" {
- if let Some(options) = reg.register_options {
- let options = serde_json::from_value(options)?;
- this.update(&mut cx, |this, cx| {
- this.on_lsp_did_change_watched_files(
- server_id, options, cx,
- );
- });
- }
- }
- }
- Ok(())
- }
- })
- .detach();
+ LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
+ let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await;
+ let language_server = pending_server.task.await.log_err()?;
+ let language_server = language_server
+ .initialize(initialization_options)
+ .await
+ .log_err()?;
+ let this = this.upgrade(&cx)?;
- language_server
- .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
- let this = this.downgrade();
- let adapter = adapter.clone();
- let language_server = language_server.clone();
- move |params, cx| {
- Self::on_lsp_workspace_edit(
- this,
- params,
+ language_server
+ .on_notification::<lsp::notification::PublishDiagnostics, _>({
+ let this = this.downgrade();
+ let adapter = adapter.clone();
+ move |mut params, cx| {
+ let this = this;
+ let adapter = adapter.clone();
+ cx.spawn(|mut cx| async move {
+ adapter.process_diagnostics(&mut params).await;
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.update_diagnostics(
server_id,
- adapter.clone(),
- language_server.clone(),
+ params,
+ &adapter.disk_based_diagnostic_sources,
cx,
)
- }
- })
- .detach();
-
- let disk_based_diagnostics_progress_token =
- adapter.disk_based_diagnostics_progress_token.clone();
-
- language_server
- .on_notification::<lsp::notification::Progress, _>({
- let this = this.downgrade();
- move |params, mut cx| {
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, cx| {
- this.on_lsp_progress(
- params,
- server_id,
- disk_based_diagnostics_progress_token.clone(),
- cx,
- );
- });
- }
- }
- })
- .detach();
-
- language_server
- .notify::<lsp::notification::DidChangeConfiguration>(
- lsp::DidChangeConfigurationParams {
- settings: workspace_config,
- },
- )
- .ok();
-
- this.update(&mut cx, |this, cx| {
- // If the language server for this key doesn't match the server id, don't store the
- // server. Which will cause it to be dropped, killing the process
- if this
- .language_server_ids
- .get(&key)
- .map(|id| id != &server_id)
- .unwrap_or(false)
- {
- return None;
- }
-
- // Update language_servers collection with Running variant of LanguageServerState
- // indicating that the server is up and running and ready
- this.language_servers.insert(
- server_id,
- LanguageServerState::Running {
- adapter: adapter.clone(),
- language,
- watched_paths: Default::default(),
- server: language_server.clone(),
- simulate_disk_based_diagnostics_completion: None,
- },
- );
- this.language_server_statuses.insert(
- server_id,
- LanguageServerStatus {
- name: language_server.name().to_string(),
- pending_work: Default::default(),
- has_pending_diagnostic_updates: false,
- progress_tokens: Default::default(),
- },
- );
-
- if let Some(project_id) = this.remote_id() {
- this.client
- .send(proto::StartLanguageServer {
- project_id,
- server: Some(proto::LanguageServer {
- id: server_id as u64,
- name: language_server.name().to_string(),
- }),
- })
.log_err();
+ });
}
+ })
+ .detach();
+ }
+ })
+ .detach();
- // Tell the language server about every open buffer in the worktree that matches the language.
- for buffer in this.opened_buffers.values() {
- if let Some(buffer_handle) = buffer.upgrade(cx) {
- let buffer = buffer_handle.read(cx);
- let file = if let Some(file) = File::from_dyn(buffer.file()) {
- file
- } else {
- continue;
- };
- let language = if let Some(language) = buffer.language() {
- language
+ language_server
+ .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
+ let languages = languages.clone();
+ move |params, mut cx| {
+ let languages = languages.clone();
+ async move {
+ let workspace_config =
+ cx.update(|cx| languages.workspace_configuration(cx)).await;
+ Ok(params
+ .items
+ .into_iter()
+ .map(|item| {
+ if let Some(section) = &item.section {
+ workspace_config
+ .get(section)
+ .cloned()
+ .unwrap_or(serde_json::Value::Null)
} else {
- continue;
- };
- if file.worktree.read(cx).id() != key.0
- || language.lsp_adapter().map(|a| a.name.clone())
- != Some(key.1.clone())
- {
- continue;
+ workspace_config.clone()
}
+ })
+ .collect())
+ }
+ }
+ })
+ .detach();
- let file = file.as_local()?;
- let versions = this
- .buffer_snapshots
- .entry(buffer.remote_id())
- .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
-
- let (version, initial_snapshot) = versions.last().unwrap();
- let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
- language_server
- .notify::<lsp::notification::DidOpenTextDocument>(
- lsp::DidOpenTextDocumentParams {
- text_document: lsp::TextDocumentItem::new(
- uri,
- adapter
- .language_ids
- .get(language.name().as_ref())
- .cloned()
- .unwrap_or_default(),
- *version,
- initial_snapshot.text(),
- ),
- },
- )
- .log_err()?;
- buffer_handle.update(cx, |buffer, cx| {
- buffer.set_completion_triggers(
- language_server
- .capabilities()
- .completion_provider
- .as_ref()
- .and_then(|provider| {
- provider.trigger_characters.clone()
- })
- .unwrap_or_default(),
- cx,
- )
+ // Even though we don't have handling for these requests, respond to them to
+ // avoid stalling any language server like `gopls` which waits for a response
+ // to these requests when initializing.
+ language_server
+ .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
+ let this = this.downgrade();
+ move |params, mut cx| async move {
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, _| {
+ if let Some(status) =
+ this.language_server_statuses.get_mut(&server_id)
+ {
+ if let lsp::NumberOrString::String(token) = params.token {
+ status.progress_tokens.insert(token);
+ }
+ }
+ });
+ }
+ Ok(())
+ }
+ })
+ .detach();
+ language_server
+ .on_request::<lsp::request::RegisterCapability, _, _>({
+ let this = this.downgrade();
+ move |params, mut cx| async move {
+ let this = this
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("project dropped"))?;
+ for reg in params.registrations {
+ if reg.method == "workspace/didChangeWatchedFiles" {
+ if let Some(options) = reg.register_options {
+ let options = serde_json::from_value(options)?;
+ this.update(&mut cx, |this, cx| {
+ this.on_lsp_did_change_watched_files(
+ server_id, options, cx,
+ );
});
}
}
+ }
+ Ok(())
+ }
+ })
+ .detach();
- cx.notify();
- Some(language_server)
- })
- })),
+ language_server
+ .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
+ let this = this.downgrade();
+ let adapter = adapter.clone();
+ let language_server = language_server.clone();
+ move |params, cx| {
+ Self::on_lsp_workspace_edit(
+ this,
+ params,
+ server_id,
+ adapter.clone(),
+ language_server.clone(),
+ cx,
+ )
+ }
+ })
+ .detach();
+
+ let disk_based_diagnostics_progress_token =
+ adapter.disk_based_diagnostics_progress_token.clone();
+
+ language_server
+ .on_notification::<lsp::notification::Progress, _>({
+ let this = this.downgrade();
+ move |params, mut cx| {
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.on_lsp_progress(
+ params,
+ server_id,
+ disk_based_diagnostics_progress_token.clone(),
+ cx,
+ );
+ });
+ }
+ }
+ })
+ .detach();
+
+ language_server
+ .notify::<lsp::notification::DidChangeConfiguration>(
+ lsp::DidChangeConfigurationParams {
+ settings: workspace_config,
+ },
+ )
+ .ok();
+
+ this.update(&mut cx, |this, cx| {
+ // If the language server for this key doesn't match the server id, don't store the
+ // server. Which will cause it to be dropped, killing the process
+ if this
+ .language_server_ids
+ .get(&key)
+ .map(|id| id != &server_id)
+ .unwrap_or(false)
+ {
+ return None;
+ }
+
+ // Update language_servers collection with Running variant of LanguageServerState
+ // indicating that the server is up and running and ready
+ this.language_servers.insert(
+ server_id,
+ LanguageServerState::Running {
+ adapter: adapter.clone(),
+ language: language.clone(),
+ watched_paths: Default::default(),
+ server: language_server.clone(),
+ simulate_disk_based_diagnostics_completion: None,
+ },
+ );
+ this.language_server_statuses.insert(
+ server_id,
+ LanguageServerStatus {
+ name: language_server.name().to_string(),
+ pending_work: Default::default(),
+ has_pending_diagnostic_updates: false,
+ progress_tokens: Default::default(),
+ },
);
- server_id
- });
+ if let Some(project_id) = this.remote_id() {
+ this.client
+ .send(proto::StartLanguageServer {
+ project_id,
+ server: Some(proto::LanguageServer {
+ id: server_id.0 as u64,
+ name: language_server.name().to_string(),
+ }),
+ })
+ .log_err();
+ }
+
+ // Tell the language server about every open buffer in the worktree that matches the language.
+ for buffer in this.opened_buffers.values() {
+ if let Some(buffer_handle) = buffer.upgrade(cx) {
+ let buffer = buffer_handle.read(cx);
+ let file = match File::from_dyn(buffer.file()) {
+ Some(file) => file,
+ None => continue,
+ };
+ let language = match buffer.language() {
+ Some(language) => language,
+ None => continue,
+ };
+
+ if file.worktree.read(cx).id() != key.0
+ || !language.lsp_adapters().iter().any(|a| a.name == key.1)
+ {
+ continue;
+ }
+
+ let file = file.as_local()?;
+ let versions = this
+ .buffer_snapshots
+ .entry(buffer.remote_id())
+ .or_default()
+ .entry(server_id)
+ .or_insert_with(|| {
+ vec![LspBufferSnapshot {
+ version: 0,
+ snapshot: buffer.text_snapshot(),
+ }]
+ });
+
+ let snapshot = versions.last().unwrap();
+ let version = snapshot.version;
+ let initial_snapshot = &snapshot.snapshot;
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ language_server
+ .notify::<lsp::notification::DidOpenTextDocument>(
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ uri,
+ adapter
+ .language_ids
+ .get(language.name().as_ref())
+ .cloned()
+ .unwrap_or_default(),
+ version,
+ initial_snapshot.text(),
+ ),
+ },
+ )
+ .log_err()?;
+ buffer_handle.update(cx, |buffer, cx| {
+ buffer.set_completion_triggers(
+ language_server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|provider| provider.trigger_characters.clone())
+ .unwrap_or_default(),
+ cx,
+ )
+ });
+ }
+ }
+
+ cx.notify();
+ Some(language_server)
+ })
+ }))
}
// Returns a list of all of the worktrees which no longer have a language server and the root path
@@ -303,6 +303,7 @@ async fn test_managing_language_servers(
rust_buffer2.update(cx, |buffer, cx| {
buffer.update_diagnostics(
+ LanguageServerId(0),
DiagnosticSet::from_sorted_entries(
vec![DiagnosticEntry {
diagnostic: Default::default(),
@@ -399,7 +400,7 @@ async fn test_managing_language_servers(
.text_document,
lsp::TextDocumentItem {
uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
- version: 1,
+ version: 0,
text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
language_id: Default::default()
}
@@ -426,7 +427,7 @@ async fn test_managing_language_servers(
},
lsp::TextDocumentItem {
uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
- version: 1,
+ version: 0,
text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
language_id: Default::default()
}
@@ -581,7 +582,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
project.update(cx, |project, cx| {
project
.update_diagnostics(
- 0,
+ LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: Url::from_file_path("/dir/a.rs").unwrap(),
version: None,
@@ -598,7 +599,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
.unwrap();
project
.update_diagnostics(
- 0,
+ LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: Url::from_file_path("/dir/b.rs").unwrap(),
version: None,
@@ -674,7 +675,7 @@ async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
project.update(cx, |project, cx| {
project
.update_diagnostics(
- 0,
+ LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: Url::from_file_path("/root/other.rs").unwrap(),
version: None,
@@ -766,7 +767,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
assert_eq!(
events.next().await.unwrap(),
Event::DiskBasedDiagnosticsStarted {
- language_server_id: 0,
+ language_server_id: LanguageServerId(0),
}
);
@@ -783,7 +784,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
assert_eq!(
events.next().await.unwrap(),
Event::DiagnosticsUpdated {
- language_server_id: 0,
+ language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into()
}
);
@@ -792,7 +793,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
assert_eq!(
events.next().await.unwrap(),
Event::DiskBasedDiagnosticsFinished {
- language_server_id: 0
+ language_server_id: LanguageServerId(0)
}
);
@@ -830,7 +831,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
assert_eq!(
events.next().await.unwrap(),
Event::DiagnosticsUpdated {
- language_server_id: 0,
+ language_server_id: LanguageServerId(0),
path: (worktree_id, Path::new("a.rs")).into()
}
);
@@ -891,7 +892,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
assert_eq!(
events.next().await.unwrap(),
Event::DiskBasedDiagnosticsStarted {
- language_server_id: 1
+ language_server_id: LanguageServerId(1)
}
);
project.read_with(cx, |project, _| {
@@ -899,7 +900,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
project
.language_servers_running_disk_based_diagnostics()
.collect::<Vec<_>>(),
- [1]
+ [LanguageServerId(1)]
);
});
@@ -909,7 +910,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
assert_eq!(
events.next().await.unwrap(),
Event::DiskBasedDiagnosticsFinished {
- language_server_id: 1
+ language_server_id: LanguageServerId(1)
}
);
project.read_with(cx, |project, _| {
@@ -917,7 +918,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
project
.language_servers_running_disk_based_diagnostics()
.collect::<Vec<_>>(),
- [0; 0]
+ [LanguageServerId(0); 0]
);
});
}
@@ -1402,6 +1403,8 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
project
.update_buffer_diagnostics(
&buffer,
+ LanguageServerId(0),
+ None,
vec![
DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
@@ -1420,7 +1423,6 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
},
},
],
- None,
cx,
)
.unwrap();
@@ -1447,6 +1449,64 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
});
}
+#[gpui::test]
+async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
+ println!("hello from stdout");
+ eprintln!("hello from stderr");
+ cx.foreground().forbid_parking();
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostic_entries(
+ LanguageServerId(0),
+ Path::new("/dir/a.rs").to_owned(),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ is_primary: true,
+ message: "syntax error a1".to_string(),
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ project
+ .update_diagnostic_entries(
+ LanguageServerId(1),
+ Path::new("/dir/a.rs").to_owned(),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ is_primary: true,
+ message: "syntax error b1".to_string(),
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+
+ assert_eq!(
+ project.diagnostic_summary(cx),
+ DiagnosticSummary {
+ error_count: 2,
+ warning_count: 0,
+ }
+ );
+ });
+}
+
#[gpui::test]
async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
cx.foreground().forbid_parking();
@@ -1573,6 +1633,7 @@ async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
new_text: "".into(),
},
],
+ LanguageServerId(0),
Some(lsp_document_version),
cx,
)
@@ -1667,6 +1728,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestApp
new_text: "".into(),
},
],
+ LanguageServerId(0),
None,
cx,
)
@@ -1770,6 +1832,7 @@ async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
.unindent(),
},
],
+ LanguageServerId(0),
None,
cx,
)
@@ -2258,7 +2321,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) {
..Default::default()
},
tree_sitter_rust::language(),
- None,
+ vec![],
|_| Default::default(),
);
@@ -2948,7 +3011,9 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
};
project
- .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
+ .update(cx, |p, cx| {
+ p.update_diagnostics(LanguageServerId(0), message, &[], cx)
+ })
.unwrap();
let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
@@ -26,6 +26,7 @@ use language::{
},
Buffer, DiagnosticEntry, File as _, PointUtf16, Rope, RopeFingerprint, Unclipped,
};
+use lsp::LanguageServerId;
use parking_lot::Mutex;
use postage::{
barrier,
@@ -50,7 +51,7 @@ use std::{
},
time::{Duration, SystemTime},
};
-use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
+use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeSet};
use util::{paths::HOME, ResultExt, TryFutureExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
@@ -67,8 +68,14 @@ pub struct LocalWorktree {
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
_background_scanner_task: Task<()>,
share: Option<ShareState>,
- diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
- diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
+ diagnostics: HashMap<
+ Arc<Path>,
+ Vec<(
+ LanguageServerId,
+ Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ )>,
+ >,
+ diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
client: Arc<Client>,
fs: Arc<dyn Fs>,
visible: bool,
@@ -82,7 +89,7 @@ pub struct RemoteWorktree {
updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
replica_id: ReplicaId,
- diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
+ diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
visible: bool,
disconnected: bool,
}
@@ -463,13 +470,17 @@ impl Worktree {
pub fn diagnostic_summaries(
&self,
- ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
+ ) -> impl Iterator<Item = (Arc<Path>, LanguageServerId, DiagnosticSummary)> + '_ {
match self {
Worktree::Local(worktree) => &worktree.diagnostic_summaries,
Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
}
.iter()
- .map(|(path, summary)| (path.0.clone(), *summary))
+ .flat_map(|(path, summaries)| {
+ summaries
+ .iter()
+ .map(move |(&server_id, &summary)| (path.clone(), server_id, summary))
+ })
}
pub fn abs_path(&self) -> Arc<Path> {
@@ -514,31 +525,54 @@ impl LocalWorktree {
pub fn diagnostics_for_path(
&self,
path: &Path,
- ) -> Option<Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
- self.diagnostics.get(path).cloned()
+ ) -> Vec<(
+ LanguageServerId,
+ Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ )> {
+ self.diagnostics.get(path).cloned().unwrap_or_default()
}
pub fn update_diagnostics(
&mut self,
- language_server_id: usize,
+ server_id: LanguageServerId,
worktree_path: Arc<Path>,
diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
_: &mut ModelContext<Worktree>,
) -> Result<bool> {
- self.diagnostics.remove(&worktree_path);
- let old_summary = self
+ let summaries_by_server_id = self
.diagnostic_summaries
- .remove(&PathKey(worktree_path.clone()))
+ .entry(worktree_path.clone())
+ .or_default();
+
+ let old_summary = summaries_by_server_id
+ .remove(&server_id)
.unwrap_or_default();
- let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
- if !new_summary.is_empty() {
- self.diagnostic_summaries
- .insert(PathKey(worktree_path.clone()), new_summary);
- self.diagnostics.insert(worktree_path.clone(), diagnostics);
+
+ let new_summary = DiagnosticSummary::new(&diagnostics);
+ if new_summary.is_empty() {
+ if let Some(diagnostics_by_server_id) = self.diagnostics.get_mut(&worktree_path) {
+ if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
+ diagnostics_by_server_id.remove(ix);
+ }
+ if diagnostics_by_server_id.is_empty() {
+ self.diagnostics.remove(&worktree_path);
+ }
+ }
+ } else {
+ summaries_by_server_id.insert(server_id, new_summary);
+ let diagnostics_by_server_id =
+ self.diagnostics.entry(worktree_path.clone()).or_default();
+ match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
+ Ok(ix) => {
+ diagnostics_by_server_id[ix] = (server_id, diagnostics);
+ }
+ Err(ix) => {
+ diagnostics_by_server_id.insert(ix, (server_id, diagnostics));
+ }
+ }
}
- let updated = !old_summary.is_empty() || !new_summary.is_empty();
- if updated {
+ if !old_summary.is_empty() || !new_summary.is_empty() {
if let Some(share) = self.share.as_ref() {
self.client
.send(proto::UpdateDiagnosticSummary {
@@ -546,7 +580,7 @@ impl LocalWorktree {
worktree_id: self.id().to_proto(),
summary: Some(proto::DiagnosticSummary {
path: worktree_path.to_string_lossy().to_string(),
- language_server_id: language_server_id as u64,
+ language_server_id: server_id.0 as u64,
error_count: new_summary.error_count as u32,
warning_count: new_summary.warning_count as u32,
}),
@@ -555,7 +589,7 @@ impl LocalWorktree {
}
}
- Ok(updated)
+ Ok(!old_summary.is_empty() || !new_summary.is_empty())
}
fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
@@ -945,13 +979,15 @@ impl LocalWorktree {
let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
let worktree_id = cx.model_id() as u64;
- for (path, summary) in self.diagnostic_summaries.iter() {
- if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
- project_id,
- worktree_id,
- summary: Some(summary.to_proto(&path.0)),
- }) {
- return Task::ready(Err(e));
+ for (path, summaries) in &self.diagnostic_summaries {
+ for (&server_id, summary) in summaries {
+ if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
+ project_id,
+ worktree_id,
+ summary: Some(summary.to_proto(server_id, &path)),
+ }) {
+ return Task::ready(Err(e));
+ }
}
}
@@ -1109,15 +1145,24 @@ impl RemoteWorktree {
path: Arc<Path>,
summary: &proto::DiagnosticSummary,
) {
+ let server_id = LanguageServerId(summary.language_server_id as usize);
let summary = DiagnosticSummary {
- language_server_id: summary.language_server_id as usize,
error_count: summary.error_count as usize,
warning_count: summary.warning_count as usize,
};
+
if summary.is_empty() {
- self.diagnostic_summaries.remove(&PathKey(path));
+ if let Some(summaries) = self.diagnostic_summaries.get_mut(&path) {
+ summaries.remove(&server_id);
+ if summaries.is_empty() {
+ self.diagnostic_summaries.remove(&path);
+ }
+ }
} else {
- self.diagnostic_summaries.insert(PathKey(path), summary);
+ self.diagnostic_summaries
+ .entry(path)
+ .or_default()
+ .insert(server_id, summary);
}
}
@@ -9,7 +9,7 @@ path = "src/rope.rs"
[dependencies]
bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "950bc5482c216c395049ae33ae4501e08975f17f" }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
sum_tree = { path = "../sum_tree" }
arrayvec = "0.7.1"
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
@@ -684,9 +684,10 @@ message SearchProjectResponse {
}
message CodeAction {
- Anchor start = 1;
- Anchor end = 2;
- bytes lsp_action = 3;
+ uint64 server_id = 1;
+ Anchor start = 2;
+ Anchor end = 3;
+ bytes lsp_action = 4;
}
message ProjectTransaction {
@@ -860,7 +861,8 @@ message IncomingContactRequest {
message UpdateDiagnostics {
uint32 replica_id = 1;
uint32 lamport_timestamp = 2;
- repeated Diagnostic diagnostics = 3;
+ uint64 server_id = 3;
+ repeated Diagnostic diagnostics = 4;
}
message Follow {
@@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 51;
+pub const PROTOCOL_VERSION: u32 = 52;
@@ -25,7 +25,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
postage = { workspace = true }
serde = { workspace = true }
serde_derive = { workspace = true }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2"
[dev-dependencies]
@@ -252,6 +252,7 @@ pub struct TerminalSettings {
pub working_directory: Option<WorkingDirectory>,
pub font_size: Option<f32>,
pub font_family: Option<String>,
+ pub line_height: Option<TerminalLineHeight>,
pub font_features: Option<fonts::Features>,
pub env: Option<HashMap<String, String>>,
pub blinking: Option<TerminalBlink>,
@@ -260,6 +261,25 @@ pub struct TerminalSettings {
pub copy_on_select: Option<bool>,
}
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
+#[serde(rename_all = "snake_case")]
+pub enum TerminalLineHeight {
+ #[default]
+ Comfortable,
+ Standard,
+ Custom(f32),
+}
+
+impl TerminalLineHeight {
+ fn value(&self) -> f32 {
+ match self {
+ TerminalLineHeight::Comfortable => 1.618,
+ TerminalLineHeight::Standard => 1.3,
+ TerminalLineHeight::Custom(line_height) => *line_height,
+ }
+ }
+}
+
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum TerminalBlink {
@@ -316,6 +336,14 @@ impl Default for WorkingDirectory {
}
}
+impl TerminalSettings {
+ fn line_height(&self) -> Option<f32> {
+ self.line_height
+ .to_owned()
+ .map(|line_height| line_height.value())
+ }
+}
+
#[derive(PartialEq, Eq, Debug, Default, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum DockAnchor {
@@ -640,16 +668,6 @@ impl Settings {
})
}
- fn terminal_setting<F, R: Default + Clone>(&self, f: F) -> R
- where
- F: Fn(&TerminalSettings) -> Option<&R>,
- {
- f(&self.terminal_overrides)
- .or_else(|| f(&self.terminal_defaults))
- .cloned()
- .unwrap_or_else(|| R::default())
- }
-
pub fn telemetry(&self) -> TelemetrySettings {
TelemetrySettings {
diagnostics: Some(self.telemetry_diagnostics()),
@@ -671,20 +689,33 @@ impl Settings {
.expect("missing default")
}
+ fn terminal_setting<F, R>(&self, f: F) -> R
+ where
+ F: Fn(&TerminalSettings) -> Option<R>,
+ {
+ None.or_else(|| f(&self.terminal_overrides))
+ .or_else(|| f(&self.terminal_defaults))
+ .expect("missing default")
+ }
+
+ pub fn terminal_line_height(&self) -> f32 {
+ self.terminal_setting(|terminal_setting| terminal_setting.line_height())
+ }
+
pub fn terminal_scroll(&self) -> AlternateScroll {
- self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.as_ref())
+ self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.to_owned())
}
pub fn terminal_shell(&self) -> Shell {
- self.terminal_setting(|terminal_setting| terminal_setting.shell.as_ref())
+ self.terminal_setting(|terminal_setting| terminal_setting.shell.to_owned())
}
pub fn terminal_env(&self) -> HashMap<String, String> {
- self.terminal_setting(|terminal_setting| terminal_setting.env.as_ref())
+ self.terminal_setting(|terminal_setting| terminal_setting.env.to_owned())
}
pub fn terminal_strategy(&self) -> WorkingDirectory {
- self.terminal_setting(|terminal_setting| terminal_setting.working_directory.as_ref())
+ self.terminal_setting(|terminal_setting| terminal_setting.working_directory.to_owned())
}
#[cfg(any(test, feature = "test-support"))]
@@ -10,4 +10,4 @@ doctest = false
[dependencies]
anyhow = "1.0"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
@@ -17,7 +17,7 @@ theme = { path = "../theme" }
util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2.5"
mio-extras = "2.0.6"
futures = "0.3"
@@ -21,7 +21,7 @@ workspace = { path = "../workspace" }
db = { path = "../db" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
terminal = { path = "../terminal" }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2.5"
mio-extras = "2.0.6"
futures = "0.3"
@@ -577,7 +577,7 @@ impl Drawable<TerminalView> for TerminalElement {
let selection_color = settings.theme.editor.selection.selection;
let match_color = settings.theme.search.match_background;
let dimensions = {
- let line_height = font_cache.line_height(text_style.font_size);
+ let line_height = text_style.font_size * settings.terminal_line_height();
let cell_width = font_cache.em_advance(text_style.font_id, text_style.font_size);
TerminalSize::new(line_height, cell_width, constraint.max)
};
@@ -24,7 +24,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
parking_lot = "0.11"
postage = { workspace = true }
rand = { version = "0.8.3", optional = true }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
util = { path = "../util" }
regex = "1.5"
@@ -16,4 +16,4 @@ settings = { path = "../settings" }
workspace = { path = "../workspace" }
project = { path = "../project" }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
@@ -47,7 +47,7 @@ postage = { workspace = true }
serde = { workspace = true }
serde_derive = { workspace = true }
serde_json = { workspace = true }
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
indoc = "1.0.4"
uuid = { version = "1.1.2", features = ["v4"] }
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.83.0"
+version = "0.84.0"
publish = false
[lib]
@@ -96,7 +96,7 @@ serde_derive = { workspace = true }
serde_json = { workspace = true }
serde_path_to_error = "0.1.4"
simplelog = "0.9"
-smallvec = { version = "1.6", features = ["union"] }
+smallvec = { workspace = true }
smol = "1.2.5"
tempdir = { version = "0.3.7" }
thiserror = "1.0.29"
@@ -37,121 +37,107 @@ pub fn init(
themes: Arc<ThemeRegistry>,
node_runtime: Arc<NodeRuntime>,
) {
- for (name, grammar, lsp_adapter) in [
+ fn adapter_arc(adapter: impl LspAdapter) -> Arc<dyn LspAdapter> {
+ Arc::new(adapter)
+ }
+
+ let languages_list = [
(
"c",
tree_sitter_c::language(),
- Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
+ vec![adapter_arc(c::CLspAdapter)],
),
(
"cpp",
tree_sitter_cpp::language(),
- Some(Arc::new(c::CLspAdapter)),
- ),
- (
- "css",
- tree_sitter_css::language(),
- None, //
+ vec![adapter_arc(c::CLspAdapter)],
),
+ ("css", tree_sitter_css::language(), vec![]),
(
"elixir",
tree_sitter_elixir::language(),
- Some(Arc::new(elixir::ElixirLspAdapter)),
+ vec![adapter_arc(elixir::ElixirLspAdapter)],
),
(
"go",
tree_sitter_go::language(),
- Some(Arc::new(go::GoLspAdapter)),
+ vec![adapter_arc(go::GoLspAdapter)],
),
(
"json",
tree_sitter_json::language(),
- Some(Arc::new(json::JsonLspAdapter::new(
+ vec![adapter_arc(json::JsonLspAdapter::new(
node_runtime.clone(),
languages.clone(),
themes.clone(),
- ))),
- ),
- (
- "markdown",
- tree_sitter_markdown::language(),
- None, //
+ ))],
),
+ ("markdown", tree_sitter_markdown::language(), vec![]),
(
"python",
tree_sitter_python::language(),
- Some(Arc::new(python::PythonLspAdapter::new(
+ vec![adapter_arc(python::PythonLspAdapter::new(
node_runtime.clone(),
- ))),
+ ))],
),
(
"rust",
tree_sitter_rust::language(),
- Some(Arc::new(rust::RustLspAdapter)),
- ),
- (
- "toml",
- tree_sitter_toml::language(),
- None, //
+ vec![adapter_arc(rust::RustLspAdapter)],
),
+ ("toml", tree_sitter_toml::language(), vec![]),
(
"tsx",
tree_sitter_typescript::language_tsx(),
- Some(Arc::new(typescript::TypeScriptLspAdapter::new(
+ vec![adapter_arc(typescript::TypeScriptLspAdapter::new(
node_runtime.clone(),
- ))),
+ ))],
),
(
"typescript",
tree_sitter_typescript::language_typescript(),
- Some(Arc::new(typescript::TypeScriptLspAdapter::new(
+ vec![adapter_arc(typescript::TypeScriptLspAdapter::new(
node_runtime.clone(),
- ))),
+ ))],
),
(
"javascript",
tree_sitter_typescript::language_tsx(),
- Some(Arc::new(typescript::TypeScriptLspAdapter::new(
+ vec![adapter_arc(typescript::TypeScriptLspAdapter::new(
node_runtime.clone(),
- ))),
+ ))],
),
(
"html",
tree_sitter_html::language(),
- Some(Arc::new(html::HtmlLspAdapter::new(node_runtime.clone()))),
+ vec![adapter_arc(html::HtmlLspAdapter::new(node_runtime.clone()))],
),
(
"ruby",
tree_sitter_ruby::language(),
- Some(Arc::new(ruby::RubyLanguageServer)),
+ vec![adapter_arc(ruby::RubyLanguageServer)],
),
(
"erb",
tree_sitter_embedded_template::language(),
- Some(Arc::new(ruby::RubyLanguageServer)),
- ),
- (
- "scheme",
- tree_sitter_scheme::language(),
- None, //
- ),
- (
- "racket",
- tree_sitter_racket::language(),
- None, //
+ vec![adapter_arc(ruby::RubyLanguageServer)],
),
+ ("scheme", tree_sitter_scheme::language(), vec![]),
+ ("racket", tree_sitter_racket::language(), vec![]),
(
"lua",
tree_sitter_lua::language(),
- Some(Arc::new(lua::LuaLspAdapter)),
+ vec![adapter_arc(lua::LuaLspAdapter)],
),
(
"yaml",
tree_sitter_yaml::language(),
- Some(Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))),
+ vec![adapter_arc(yaml::YamlLspAdapter::new(node_runtime.clone()))],
),
- ] {
- languages.register(name, load_config(name), grammar, lsp_adapter, load_queries);
+ ];
+
+ for (name, grammar, lsp_adapters) in languages_list {
+ languages.register(name, load_config(name), grammar, lsp_adapters, load_queries);
}
}
@@ -163,7 +149,7 @@ pub async fn language(
) -> Arc<Language> {
Arc::new(
Language::new(load_config(name), Some(grammar))
- .with_lsp_adapter(lsp_adapter)
+ .with_lsp_adapters(lsp_adapter.into_iter().collect())
.await
.with_queries(load_queries(name))
.unwrap(),
@@ -1,21 +1,23 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
-use futures::StreamExt;
+use futures::{future::BoxFuture, FutureExt};
+use gpui::AppContext;
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
use lsp::CodeActionKind;
use node_runtime::NodeRuntime;
-use serde_json::json;
+use serde_json::{json, Value};
use smol::fs;
use std::{
any::Any,
ffi::OsString,
+ future,
path::{Path, PathBuf},
sync::Arc,
};
use util::http::HttpClient;
use util::ResultExt;
-fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
+fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![
server_path.into(),
"--stdio".into(),
@@ -24,6 +26,10 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
]
}
+fn eslint_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
+ vec![server_path.into(), "--stdio".into()]
+}
+
pub struct TypeScriptLspAdapter {
node: Arc<NodeRuntime>,
}
@@ -37,7 +43,7 @@ impl TypeScriptLspAdapter {
}
}
-struct Versions {
+struct TypeScriptVersions {
typescript_version: String,
server_version: String,
}
@@ -52,7 +58,7 @@ impl LspAdapter for TypeScriptLspAdapter {
&self,
_: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Send + Any>> {
- Ok(Box::new(Versions {
+ Ok(Box::new(TypeScriptVersions {
typescript_version: self.node.npm_package_latest_version("typescript").await?,
server_version: self
.node
@@ -67,7 +73,7 @@ impl LspAdapter for TypeScriptLspAdapter {
_: Arc<dyn HttpClient>,
container_dir: PathBuf,
) -> Result<LanguageServerBinary> {
- let versions = versions.downcast::<Versions>().unwrap();
+ let versions = versions.downcast::<TypeScriptVersions>().unwrap();
let server_path = container_dir.join(Self::NEW_SERVER_PATH);
if fs::metadata(&server_path).await.is_err() {
@@ -87,37 +93,28 @@ impl LspAdapter for TypeScriptLspAdapter {
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
- arguments: server_binary_arguments(&server_path),
+ arguments: typescript_server_binary_arguments(&server_path),
})
}
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
(|| async move {
- let mut last_version_dir = None;
- let mut entries = fs::read_dir(&container_dir).await?;
- while let Some(entry) = entries.next().await {
- let entry = entry?;
- if entry.file_type().await?.is_dir() {
- last_version_dir = Some(entry.path());
- }
- }
- let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
- let old_server_path = last_version_dir.join(Self::OLD_SERVER_PATH);
- let new_server_path = last_version_dir.join(Self::NEW_SERVER_PATH);
+ let old_server_path = container_dir.join(Self::OLD_SERVER_PATH);
+ let new_server_path = container_dir.join(Self::NEW_SERVER_PATH);
if new_server_path.exists() {
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
- arguments: server_binary_arguments(&new_server_path),
+ arguments: typescript_server_binary_arguments(&new_server_path),
})
} else if old_server_path.exists() {
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
- arguments: server_binary_arguments(&old_server_path),
+ arguments: typescript_server_binary_arguments(&old_server_path),
})
} else {
Err(anyhow!(
"missing executable in directory {:?}",
- last_version_dir
+ container_dir
))
}
})()
@@ -170,6 +167,136 @@ impl LspAdapter for TypeScriptLspAdapter {
}
}
+pub struct EsLintLspAdapter {
+ node: Arc<NodeRuntime>,
+}
+
+impl EsLintLspAdapter {
+ const SERVER_PATH: &'static str =
+ "node_modules/vscode-langservers-extracted/lib/eslint-language-server/eslintServer.js";
+
+ #[allow(unused)]
+ pub fn new(node: Arc<NodeRuntime>) -> Self {
+ EsLintLspAdapter { node }
+ }
+}
+
+#[async_trait]
+impl LspAdapter for EsLintLspAdapter {
+ fn workspace_configuration(&self, _: &mut AppContext) -> Option<BoxFuture<'static, Value>> {
+ Some(
+ future::ready(json!({
+ "": {
+ "validate": "on",
+ "packageManager": "npm",
+ "useESLintClass": false,
+ "experimental": {
+ "useFlatConfig": false
+ },
+ "codeActionOnSave": {
+ "mode": "all"
+ },
+ "format": false,
+ "quiet": false,
+ "onIgnoredFiles": "off",
+ "options": {},
+ "rulesCustomizations": [],
+ "run": "onType",
+ "problems": {
+ "shortenToSingleLine": false
+ },
+ "nodePath": null,
+ "workspaceFolder": {
+ "name": "testing_ts",
+ "uri": "file:///Users/julia/Stuff/testing_ts"
+ },
+ "codeAction": {
+ "disableRuleComment": {
+ "enable": true,
+ "location": "separateLine",
+ "commentStyle": "line"
+ },
+ "showDocumentation": {
+ "enable": true
+ }
+ }
+ }
+ }))
+ .boxed(),
+ )
+ }
+
+ async fn name(&self) -> LanguageServerName {
+ LanguageServerName("eslint".into())
+ }
+
+ async fn fetch_latest_server_version(
+ &self,
+ _: Arc<dyn HttpClient>,
+ ) -> Result<Box<dyn 'static + Send + Any>> {
+ Ok(Box::new(
+ self.node
+ .npm_package_latest_version("vscode-langservers-extracted")
+ .await?,
+ ))
+ }
+
+ async fn fetch_server_binary(
+ &self,
+ versions: Box<dyn 'static + Send + Any>,
+ _: Arc<dyn HttpClient>,
+ container_dir: PathBuf,
+ ) -> Result<LanguageServerBinary> {
+ let version = versions.downcast::<String>().unwrap();
+ let server_path = container_dir.join(Self::SERVER_PATH);
+
+ if fs::metadata(&server_path).await.is_err() {
+ self.node
+ .npm_install_packages(
+ [("vscode-langservers-extracted", version.as_str())],
+ &container_dir,
+ )
+ .await?;
+ }
+
+ Ok(LanguageServerBinary {
+ path: self.node.binary_path().await?,
+ arguments: eslint_server_binary_arguments(&server_path),
+ })
+ }
+
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ (|| async move {
+ let server_path = container_dir.join(Self::SERVER_PATH);
+ if server_path.exists() {
+ Ok(LanguageServerBinary {
+ path: self.node.binary_path().await?,
+ arguments: eslint_server_binary_arguments(&server_path),
+ })
+ } else {
+ Err(anyhow!(
+ "missing executable in directory {:?}",
+ container_dir
+ ))
+ }
+ })()
+ .await
+ .log_err()
+ }
+
+ async fn label_for_completion(
+ &self,
+ _item: &lsp::CompletionItem,
+ _language: &Arc<language::Language>,
+ ) -> Option<language::CodeLabel> {
+ None
+ }
+
+ async fn initialization_options(&self) -> Option<serde_json::Value> {
+ None
+ }
+}
+
#[cfg(test)]
mod tests {
use gpui::TestAppContext;
@@ -21,7 +21,7 @@ use log::LevelFilter;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use project::Fs;
-use serde_json::json;
+use serde::{Deserialize, Serialize};
use settings::{
self, settings_file::SettingsFile, KeymapFileContent, Settings, SettingsFileContent,
WorkingDirectory,
@@ -317,6 +317,30 @@ fn init_logger() {
}
}
+#[derive(Serialize, Deserialize)]
+struct LocationData {
+ file: String,
+ line: u32,
+}
+
+#[derive(Serialize, Deserialize)]
+struct Panic {
+ thread: String,
+ payload: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ location_data: Option<LocationData>,
+ backtrace: Vec<String>,
+ // TODO
+ // stripped_backtrace: String,
+}
+
+#[derive(Serialize)]
+struct PanicRequest {
+ panic: Panic,
+ version: String,
+ token: String,
+}
+
fn init_panic_hook(app_version: String) {
let is_pty = stdout_is_a_pty();
panic::set_hook(Box::new(move |info| {
@@ -333,39 +357,38 @@ fn init_panic_hook(app_version: String) {
},
};
- let message = match info.location() {
- Some(location) => {
- format!(
- "thread '{}' panicked at '{}'\n{}:{}\n{:?}",
- thread,
- payload,
- location.file(),
- location.line(),
- backtrace
- )
- }
- None => format!(
- "thread '{}' panicked at '{}'\n{:?}",
- thread, payload, backtrace
- ),
+ let panic_data = Panic {
+ thread: thread.into(),
+ payload: payload.into(),
+ location_data: info.location().map(|location| LocationData {
+ file: location.file().into(),
+ line: location.line(),
+ }),
+ backtrace: format!("{:?}", backtrace)
+ .split("\n")
+ .map(|line| line.to_string())
+ .collect(),
+ // modified_backtrace: None,
};
- if is_pty {
- eprintln!("{}", message);
- return;
- }
+ if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
+ if is_pty {
+ eprintln!("{}", panic_data_json);
+ return;
+ }
- let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
- let panic_file_path =
- paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, timestamp));
- let panic_file = std::fs::OpenOptions::new()
- .append(true)
- .create(true)
- .open(&panic_file_path)
- .log_err();
- if let Some(mut panic_file) = panic_file {
- write!(&mut panic_file, "{}", message).log_err();
- panic_file.flush().log_err();
+ let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
+ let panic_file_path =
+ paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, timestamp));
+ let panic_file = std::fs::OpenOptions::new()
+ .append(true)
+ .create(true)
+ .open(&panic_file_path)
+ .log_err();
+ if let Some(mut panic_file) = panic_file {
+ write!(&mut panic_file, "{}", panic_data_json).log_err();
+ panic_file.flush().log_err();
+ }
}
}));
}
@@ -402,15 +425,17 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
};
if diagnostics_telemetry {
- let text = smol::fs::read_to_string(&child_path)
+ let panic_data_text = smol::fs::read_to_string(&child_path)
.await
.context("error reading panic file")?;
- let body = serde_json::to_string(&json!({
- "text": text,
- "version": version,
- "token": ZED_SECRET_CLIENT_TOKEN,
- }))
+
+ let body = serde_json::to_string(&PanicRequest {
+ panic: serde_json::from_str(&panic_data_text)?,
+ version: version.to_string(),
+ token: ZED_SECRET_CLIENT_TOKEN.into(),
+ })
.unwrap();
+
let request = Request::post(&panic_report_url)
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.header("Content-Type", "application/json")