From 2c6aeaed7c70289ca3b9898fd1cb1a8c5b80d374 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 21 Oct 2021 16:26:37 +0200 Subject: [PATCH 01/61] Start on integrating rust-analyzer Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- Cargo.lock | 40 +++++++ crates/gpui/src/executor.rs | 1 + crates/lsp/Cargo.toml | 15 +++ crates/lsp/src/lib.rs | 201 ++++++++++++++++++++++++++++++++ crates/project/Cargo.toml | 7 +- crates/project/src/lib.rs | 18 ++- crates/project_panel/src/lib.rs | 3 +- crates/workspace/src/lib.rs | 3 +- 8 files changed, 279 insertions(+), 9 deletions(-) create mode 100644 crates/lsp/Cargo.toml create mode 100644 crates/lsp/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index d751e25284bb178e6533652617633b83b9820eb6..c514dbfe1180ba9c2b6c5cf3cc6265837678024f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2949,6 +2949,34 @@ dependencies = [ "scoped-tls", ] +[[package]] +name = "lsp" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures", + "gpui", + "lsp-types", + "parking_lot", + "serde 1.0.125", + "serde_json 1.0.64", + "smol", + "util", +] + +[[package]] +name = "lsp-types" +version = "0.91.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be7801b458592d0998af808d97f6a85a6057af3aaf2a2a5c3c677702bbeb4ed7" +dependencies = [ + "bitflags 1.2.1", + "serde 1.0.125", + "serde_json 1.0.64", + "serde_repr", + "url", +] + [[package]] name = "lzw" version = "0.10.0" @@ -3762,6 +3790,7 @@ dependencies = [ "lazy_static", "libc", "log", + "lsp", "parking_lot", "postage", "rand 0.8.3", @@ -4571,6 +4600,17 @@ dependencies = [ "thiserror", ] +[[package]] +name = "serde_repr" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98d0516900518c29efa217c298fa1f4e6c6ffc85ae29fd7f4ee48f176e1a9ed5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "serde_urlencoded" version = "0.7.0" diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 01338c8a0ac5f5d5a6e50b67500e088b12c52d7a..32ee8fc87ffc15fe2d6617ef774a0d740ea26e40 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -50,6 +50,7 @@ type AnyFuture = Pin>; type AnyLocalTask = async_task::Task>; +#[must_use] pub enum Task { Local { any_task: AnyLocalTask, diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..9d4c4850e99ca7ad6279230e8dd7f121199b2103 --- /dev/null +++ b/crates/lsp/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "lsp" +version = "0.1.0" +edition = "2018" + +[dependencies] +gpui = { path = "../gpui" } +util = { path = "../util" } +anyhow = "1.0" +futures = "0.3" +lsp-types = "0.91" +parking_lot = "0.11" +serde = { version = "1.0", features = ["derive"] } +serde_json = { version = "1.0", features = ["raw_value"] } +smol = "1.2" diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..e05b435ba4ca716cebbcfd877748800e63ee00ba --- /dev/null +++ b/crates/lsp/src/lib.rs @@ -0,0 +1,201 @@ +use anyhow::{anyhow, Context, Result}; +use gpui::{executor, Task}; +use parking_lot::Mutex; +use serde::{Deserialize, Serialize}; +use serde_json::value::RawValue; +use smol::{ + channel, + io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}, + process::Command, +}; +use std::{ + collections::HashMap, + future::Future, + io::Write, + sync::{ + atomic::{AtomicUsize, Ordering::SeqCst}, + Arc, + }, +}; +use std::{path::Path, process::Stdio}; +use util::TryFutureExt; + +const JSON_RPC_VERSION: &'static str = "2.0"; +const CONTENT_LEN_HEADER: &'static str = "Content-Length: "; + +pub struct LanguageServer { + next_id: AtomicUsize, + outbound_tx: channel::Sender>, + response_handlers: Arc>>, + _input_task: Task>, + _output_task: Task>, +} + +type ResponseHandler = Box)>; + +#[derive(Serialize)] +struct Request { + jsonrpc: &'static str, + id: usize, + method: &'static str, + params: T, +} + +#[derive(Deserialize)] +struct Error { + message: String, +} + +#[derive(Deserialize)] +struct Notification<'a> { + method: String, + #[serde(borrow)] + params: &'a RawValue, +} + +#[derive(Deserialize)] +struct Response<'a> { + id: usize, + #[serde(default)] + error: Option, + #[serde(default, borrow)] + result: Option<&'a RawValue>, +} + +impl LanguageServer { + pub fn new(path: &Path, background: &executor::Background) -> Result> { + let mut server = Command::new(path) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::inherit()) + .spawn()?; + let mut stdin = server.stdin.take().unwrap(); + let mut stdout = BufReader::new(server.stdout.take().unwrap()); + let (outbound_tx, outbound_rx) = channel::unbounded::>(); + let response_handlers = Arc::new(Mutex::new(HashMap::::new())); + let _input_task = background.spawn( + { + let response_handlers = response_handlers.clone(); + async move { + let mut buffer = Vec::new(); + loop { + buffer.clear(); + + stdout.read_until(b'\n', &mut buffer).await?; + stdout.read_until(b'\n', &mut buffer).await?; + let message_len: usize = std::str::from_utf8(&buffer)? + .strip_prefix(CONTENT_LEN_HEADER) + .ok_or_else(|| anyhow!("invalid header"))? + .trim_end() + .parse()?; + + buffer.resize(message_len, 0); + stdout.read_exact(&mut buffer).await?; + if let Ok(Notification { .. }) = serde_json::from_slice(&buffer) { + } else if let Ok(Response { id, error, result }) = + serde_json::from_slice(&buffer) + { + if let Some(handler) = response_handlers.lock().remove(&id) { + if let Some(result) = result { + handler(Ok(result.get())); + } else if let Some(error) = error { + handler(Err(error)); + } + } + } else { + return Err(anyhow!( + "failed to deserialize message:\n{}", + std::str::from_utf8(&buffer)? + )); + } + } + } + } + .log_err(), + ); + let _output_task = background.spawn( + async move { + let mut content_len_buffer = Vec::new(); + loop { + let message = outbound_rx.recv().await?; + write!(content_len_buffer, "{}", message.len()).unwrap(); + stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?; + stdin.write_all(&content_len_buffer).await?; + stdin.write_all("\r\n\r\n".as_bytes()).await?; + stdin.write_all(&message).await?; + } + } + .log_err(), + ); + + let this = Arc::new(Self { + response_handlers, + next_id: Default::default(), + outbound_tx, + _input_task, + _output_task, + }); + let init = this.clone().init(); + background + .spawn(async move { + init.log_err().await; + }) + .detach(); + + Ok(this) + } + + async fn init(self: Arc) -> Result<()> { + let init_response = self + .request::(lsp_types::InitializeParams { + process_id: Default::default(), + root_path: Default::default(), + root_uri: Default::default(), + initialization_options: Default::default(), + capabilities: Default::default(), + trace: Default::default(), + workspace_folders: Default::default(), + client_info: Default::default(), + locale: Default::default(), + }) + .await?; + Ok(()) + } + + pub fn request( + self: &Arc, + params: T::Params, + ) -> impl Future> + where + T::Result: 'static + Send, + { + let id = self.next_id.fetch_add(1, SeqCst); + let message = serde_json::to_vec(&Request { + jsonrpc: JSON_RPC_VERSION, + id, + method: T::METHOD, + params, + }) + .unwrap(); + let mut response_handlers = self.response_handlers.lock(); + let (tx, rx) = smol::channel::bounded(1); + response_handlers.insert( + id, + Box::new(move |result| { + let response = match result { + Ok(response) => { + serde_json::from_str(response).context("failed to deserialize response") + } + Err(error) => Err(anyhow!("{}", error.message)), + }; + let _ = smol::block_on(tx.send(response)); + }), + ); + + let outbound_tx = self.outbound_tx.clone(); + async move { + outbound_tx.send(message).await?; + rx.recv().await? + } + } +} diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index f7d87a4625299dc0164bcc5931f456c239e6e626..73959da5fcfa1005db686676a8e81a0059430cba 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -8,15 +8,15 @@ test-support = [] [dependencies] buffer = { path = "../buffer" } +client = { path = "../client" } clock = { path = "../clock" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } -client = { path = "../client" } +lsp = { path = "../lsp" } +rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } util = { path = "../util" } -rpc = { path = "../rpc" } - anyhow = "1.0.38" async-trait = "0.1" futures = "0.3" @@ -35,6 +35,5 @@ toml = "0.5" client = { path = "../client", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } - rand = "0.8.3" tempdir = { version = "0.3.7" } diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 184dfd4d9cc54fa3a9f9bf1397ccd917aa1e17fa..d4e41b4f28fc1af38398545e39a25b6546b46a29 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -7,7 +7,8 @@ use buffer::LanguageRegistry; use client::Client; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; -use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{executor, AppContext, Entity, ModelContext, ModelHandle, Task}; +use lsp::LanguageServer; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -23,6 +24,7 @@ pub struct Project { languages: Arc, client: Arc, fs: Arc, + language_server: Arc, } pub enum Event { @@ -43,13 +45,23 @@ pub struct ProjectEntry { } impl Project { - pub fn new(languages: Arc, rpc: Arc, fs: Arc) -> Self { + pub fn new( + languages: Arc, + rpc: Arc, + fs: Arc, + background: &executor::Background, + ) -> Self { Self { worktrees: Default::default(), active_entry: None, languages, client: rpc, fs, + language_server: LanguageServer::new( + Path::new("/Users/as-cii/Downloads/rust-analyzer-x86_64-apple-darwin"), + background, + ) + .unwrap(), } } @@ -408,6 +420,6 @@ mod tests { let languages = Arc::new(LanguageRegistry::new()); let fs = Arc::new(RealFs); let rpc = client::Client::new(); - cx.add_model(|_| Project::new(languages, rpc, fs)) + cx.add_model(|cx| Project::new(languages, rpc, fs, cx.background())) } } diff --git a/crates/project_panel/src/lib.rs b/crates/project_panel/src/lib.rs index 385b7dbca2de231d2e9417483da055df442ac2a5..422484e74d8809b52913fee24e9f052ca539d6c4 100644 --- a/crates/project_panel/src/lib.rs +++ b/crates/project_panel/src/lib.rs @@ -617,11 +617,12 @@ mod tests { ) .await; - let project = cx.add_model(|_| { + let project = cx.add_model(|cx| { Project::new( params.languages.clone(), params.client.clone(), params.fs.clone(), + cx.background(), ) }); let root1 = project diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs index c227ee61bd892ea4dbb2f1a0894469abe69a0178..9fafd433bbe7b9fff78ce90543c047e6a8799331 100644 --- a/crates/workspace/src/lib.rs +++ b/crates/workspace/src/lib.rs @@ -322,11 +322,12 @@ pub struct Workspace { impl Workspace { pub fn new(params: &WorkspaceParams, cx: &mut ViewContext) -> Self { - let project = cx.add_model(|_| { + let project = cx.add_model(|cx| { Project::new( params.languages.clone(), params.client.clone(), params.fs.clone(), + cx.background(), ) }); cx.observe(&project, |_, _, cx| cx.notify()).detach(); From 715faaacebe80f600698ab8bf7a3fdf5e2408b4d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 21 Oct 2021 19:27:10 +0200 Subject: [PATCH 02/61] WIP --- crates/lsp/build.rs | 36 +++++++++++++++++++ crates/lsp/src/lib.rs | 81 ++++++++++++++++++++++++++++++------------- 2 files changed, 93 insertions(+), 24 deletions(-) create mode 100644 crates/lsp/build.rs diff --git a/crates/lsp/build.rs b/crates/lsp/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..a29aa4b016bcff06ca57bb6d58f8c829ae3f2389 --- /dev/null +++ b/crates/lsp/build.rs @@ -0,0 +1,36 @@ +use std::{ + env, + fs::{self, Permissions}, + os::unix::prelude::PermissionsExt, + process::Command, +}; + +fn main() { + let target = env::var("TARGET").unwrap(); + let rust_analyzer_filename = format!("rust-analyzer-{}", target); + let rust_analyzer_url = format!( + "https://github.com/rust-analyzer/rust-analyzer/releases/download/2021-10-18/{}.gz", + rust_analyzer_filename + ); + println!( + "cargo:rustc-env=RUST_ANALYZER_FILENAME={}", + rust_analyzer_filename + ); + + let target_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + let rust_analyzer_target_path = format!("{}/{}", target_dir, rust_analyzer_filename); + assert!( + Command::new("/bin/sh") + .arg("-c") + .arg(format!( + "curl -L {} | gunzip > {}", + rust_analyzer_url, rust_analyzer_target_path + )) + .status() + .unwrap() + .success(), + "failed to download rust-analyzer" + ); + fs::set_permissions(rust_analyzer_target_path, Permissions::from_mode(0x755)) + .expect("failed to make rust-analyzer executable"); +} diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index e05b435ba4ca716cebbcfd877748800e63ee00ba..53e85a373a0cc32f31bcd47582e2f0eeaafbe4fb 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -42,24 +42,32 @@ struct Request { } #[derive(Deserialize)] -struct Error { - message: String, +struct Response<'a> { + id: usize, + #[serde(default)] + error: Option, + #[serde(default, borrow)] + result: Option<&'a RawValue>, +} + +#[derive(Serialize)] +struct OutboundNotification { + jsonrpc: &'static str, + method: &'static str, + params: T, } #[derive(Deserialize)] -struct Notification<'a> { - method: String, +struct InboundNotification<'a> { + #[serde(borrow)] + method: &'a str, #[serde(borrow)] params: &'a RawValue, } #[derive(Deserialize)] -struct Response<'a> { - id: usize, - #[serde(default)] - error: Option, - #[serde(default, borrow)] - result: Option<&'a RawValue>, +struct Error { + message: String, } impl LanguageServer { @@ -91,7 +99,7 @@ impl LanguageServer { buffer.resize(message_len, 0); stdout.read_exact(&mut buffer).await?; - if let Ok(Notification { .. }) = serde_json::from_slice(&buffer) { + if let Ok(InboundNotification { .. }) = serde_json::from_slice(&buffer) { } else if let Ok(Response { id, error, result }) = serde_json::from_slice(&buffer) { @@ -146,19 +154,19 @@ impl LanguageServer { } async fn init(self: Arc) -> Result<()> { - let init_response = self - .request::(lsp_types::InitializeParams { - process_id: Default::default(), - root_path: Default::default(), - root_uri: Default::default(), - initialization_options: Default::default(), - capabilities: Default::default(), - trace: Default::default(), - workspace_folders: Default::default(), - client_info: Default::default(), - locale: Default::default(), - }) - .await?; + self.request::(lsp_types::InitializeParams { + process_id: Default::default(), + root_path: Default::default(), + root_uri: Default::default(), + initialization_options: Default::default(), + capabilities: Default::default(), + trace: Default::default(), + workspace_folders: Default::default(), + client_info: Default::default(), + locale: Default::default(), + }) + .await?; + self.notify::(lsp_types::InitializedParams {})?; Ok(()) } @@ -198,4 +206,29 @@ impl LanguageServer { rx.recv().await? } } + + pub fn notify( + &self, + params: T::Params, + ) -> Result<()> { + let message = serde_json::to_vec(&OutboundNotification { + jsonrpc: JSON_RPC_VERSION, + method: T::METHOD, + params, + }) + .unwrap(); + smol::block_on(self.outbound_tx.send(message))?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + + #[gpui::test] + async fn test_basic(cx: TestAppContext) { + let server = LanguageServer::new(); + } } From 59ed535cdf8e839940669ba067b424adef37500a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 25 Oct 2021 11:02:35 +0200 Subject: [PATCH 03/61] Implement a more robust way of locating rust-analyzer When bundled, we will retrieve it out of the `Resources` folder. Locally, we're expected to run `script/download-rust-analyzer` and put `vendor/bin` in our $PATH. --- .github/workflows/ci.yml | 8 ++++++ .gitignore | 1 + crates/gpui/src/platform.rs | 2 ++ crates/gpui/src/platform/mac/platform.rs | 28 +++++++++++++++++- crates/gpui/src/platform/test.rs | 6 +++- crates/lsp/Cargo.toml | 3 ++ crates/lsp/build.rs | 36 ++++-------------------- crates/lsp/src/lib.rs | 26 +++++++++++------ crates/project/src/lib.rs | 10 ++----- crates/project_panel/src/lib.rs | 2 +- crates/workspace/src/lib.rs | 2 +- script/bundle | 10 +++++-- script/download-rust-analyzer | 15 ++++++++++ 13 files changed, 97 insertions(+), 52 deletions(-) create mode 100755 script/download-rust-analyzer diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfbdc2ca02f89119c8953c0f9733daa2b60402ee..fd92792714e338f74c66a4cec7822686644bac89 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,6 +32,11 @@ jobs: with: clean: false + - name: Download rust-analyzer + run: | + script/download-rust-analyzer + echo "$PWD/vendor/bin" >> $GITHUB_PATH + - name: Run tests run: cargo test --workspace --no-fail-fast @@ -63,6 +68,9 @@ jobs: with: clean: false + - name: Download rust-analyzer + run: script/download-rust-analyzer + - name: Create app bundle run: script/bundle diff --git a/.gitignore b/.gitignore index d096dc01da8a69d51c1a00fc44a85b5f67ebcbd8..379d197a5cf394aa9a2f89aae8a12472e631ba57 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ /script/node_modules /server/.env.toml /server/static/styles.css +/vendor/bin diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index cd972021a57c0084c38145b7222813be515d8fa2..6f776524c261eb0891d5093fb00edd7748bc3850 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -53,6 +53,8 @@ pub trait Platform: Send + Sync { fn set_cursor_style(&self, style: CursorStyle); fn local_timezone(&self) -> UtcOffset; + + fn path_for_resource(&self, name: Option<&str>, extension: Option<&str>) -> Result; } pub(crate) trait ForegroundPlatform { diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index c956a199989ea35cfc62a678caac03240d44775d..8a4dc8cdf9184a222f9673ffa0c3aee993c184af 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -14,7 +14,9 @@ use cocoa::{ NSPasteboardTypeString, NSSavePanel, NSWindow, }, base::{id, nil, selector, YES}, - foundation::{NSArray, NSAutoreleasePool, NSData, NSInteger, NSString, NSURL}, + foundation::{ + NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSString, NSUInteger, NSURL, + }, }; use core_foundation::{ base::{CFType, CFTypeRef, OSStatus, TCFType as _}, @@ -45,6 +47,9 @@ use std::{ }; use time::UtcOffset; +#[allow(non_upper_case_globals)] +const NSUTF8StringEncoding: NSUInteger = 4; + const MAC_PLATFORM_IVAR: &'static str = "platform"; static mut APP_CLASS: *const Class = ptr::null(); static mut APP_DELEGATE_CLASS: *const Class = ptr::null(); @@ -588,6 +593,27 @@ impl platform::Platform for MacPlatform { UtcOffset::from_whole_seconds(seconds_from_gmt.try_into().unwrap()).unwrap() } } + + fn path_for_resource(&self, name: Option<&str>, extension: Option<&str>) -> Result { + unsafe { + let bundle: id = NSBundle::mainBundle(); + if bundle.is_null() { + Err(anyhow!("app is not running inside a bundle")) + } else { + let name = name.map_or(nil, |name| ns_string(name)); + let extension = extension.map_or(nil, |extension| ns_string(extension)); + let path: id = msg_send![bundle, pathForResource: name ofType: extension]; + if path.is_null() { + Err(anyhow!("resource could not be found")) + } else { + let len = msg_send![path, lengthOfBytesUsingEncoding: NSUTF8StringEncoding]; + let bytes = path.UTF8String() as *const u8; + let path = str::from_utf8(slice::from_raw_parts(bytes, len)).unwrap(); + Ok(PathBuf::from(path)) + } + } + } + } } unsafe fn get_foreground_platform(object: &mut Object) -> &MacForegroundPlatform { diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index d705a277e54f6d278d5d8fb02ad2c9ccf28014fb..c866a5d23fc68d6ba3e61247256f56381e841254 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -1,6 +1,6 @@ use super::CursorStyle; use crate::{AnyAction, ClipboardItem}; -use anyhow::Result; +use anyhow::{anyhow, Result}; use parking_lot::Mutex; use pathfinder_geometry::vector::Vector2F; use std::{ @@ -148,6 +148,10 @@ impl super::Platform for Platform { fn local_timezone(&self) -> UtcOffset { UtcOffset::UTC } + + fn path_for_resource(&self, _name: Option<&str>, _extension: Option<&str>) -> Result { + Err(anyhow!("app not running inside a bundle")) + } } impl Window { diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 9d4c4850e99ca7ad6279230e8dd7f121199b2103..897c8c8224fa3d95e8f6cfd7ef6e1c8974eda5af 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,3 +13,6 @@ parking_lot = "0.11" serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["raw_value"] } smol = "1.2" + +[dev-dependencies] +gpui = { path = "../gpui", features = ["test-support"] } diff --git a/crates/lsp/build.rs b/crates/lsp/build.rs index a29aa4b016bcff06ca57bb6d58f8c829ae3f2389..0aedf6d1d28e7bba9cd055cb80c86bd6ef5247af 100644 --- a/crates/lsp/build.rs +++ b/crates/lsp/build.rs @@ -1,36 +1,10 @@ -use std::{ - env, - fs::{self, Permissions}, - os::unix::prelude::PermissionsExt, - process::Command, -}; +use std::env; fn main() { let target = env::var("TARGET").unwrap(); - let rust_analyzer_filename = format!("rust-analyzer-{}", target); - let rust_analyzer_url = format!( - "https://github.com/rust-analyzer/rust-analyzer/releases/download/2021-10-18/{}.gz", - rust_analyzer_filename - ); - println!( - "cargo:rustc-env=RUST_ANALYZER_FILENAME={}", - rust_analyzer_filename - ); + println!("cargo:rustc-env=TARGET={}", target); - let target_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - let rust_analyzer_target_path = format!("{}/{}", target_dir, rust_analyzer_filename); - assert!( - Command::new("/bin/sh") - .arg("-c") - .arg(format!( - "curl -L {} | gunzip > {}", - rust_analyzer_url, rust_analyzer_target_path - )) - .status() - .unwrap() - .success(), - "failed to download rust-analyzer" - ); - fs::set_permissions(rust_analyzer_target_path, Permissions::from_mode(0x755)) - .expect("failed to make rust-analyzer executable"); + if let Ok(bundled) = env::var("BUNDLE") { + println!("cargo:rustc-env=BUNDLE={}", bundled); + } } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 53e85a373a0cc32f31bcd47582e2f0eeaafbe4fb..c2fdf751829b4f93006f6100e7d378a4f663218b 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -1,5 +1,5 @@ use anyhow::{anyhow, Context, Result}; -use gpui::{executor, Task}; +use gpui::{executor, AppContext, Task}; use parking_lot::Mutex; use serde::{Deserialize, Serialize}; use serde_json::value::RawValue; @@ -71,6 +71,21 @@ struct Error { } impl LanguageServer { + pub fn rust(cx: &AppContext) -> Result> { + const BUNDLE: Option<&'static str> = option_env!("BUNDLE"); + const TARGET: &'static str = env!("TARGET"); + + let rust_analyzer_name = format!("rust-analyzer-{}", TARGET); + if BUNDLE.map_or(Ok(false), |b| b.parse())? { + let rust_analyzer_path = cx + .platform() + .path_for_resource(Some(&rust_analyzer_name), None)?; + Self::new(&rust_analyzer_path, cx.background()) + } else { + Self::new(Path::new(&rust_analyzer_name), cx.background()) + } + } + pub fn new(path: &Path, background: &executor::Background) -> Result> { let mut server = Command::new(path) .stdin(Stdio::piped()) @@ -143,12 +158,7 @@ impl LanguageServer { _input_task, _output_task, }); - let init = this.clone().init(); - background - .spawn(async move { - init.log_err().await; - }) - .detach(); + background.spawn(this.clone().init().log_err()).detach(); Ok(this) } @@ -229,6 +239,6 @@ mod tests { #[gpui::test] async fn test_basic(cx: TestAppContext) { - let server = LanguageServer::new(); + let server = cx.read(|cx| LanguageServer::rust(cx).unwrap()); } } diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index d4e41b4f28fc1af38398545e39a25b6546b46a29..f9926e0416b42b6b4509de3b1ee8f4149d48c086 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -49,7 +49,7 @@ impl Project { languages: Arc, rpc: Arc, fs: Arc, - background: &executor::Background, + cx: &AppContext, ) -> Self { Self { worktrees: Default::default(), @@ -57,11 +57,7 @@ impl Project { languages, client: rpc, fs, - language_server: LanguageServer::new( - Path::new("/Users/as-cii/Downloads/rust-analyzer-x86_64-apple-darwin"), - background, - ) - .unwrap(), + language_server: LanguageServer::rust(cx).unwrap(), } } @@ -420,6 +416,6 @@ mod tests { let languages = Arc::new(LanguageRegistry::new()); let fs = Arc::new(RealFs); let rpc = client::Client::new(); - cx.add_model(|cx| Project::new(languages, rpc, fs, cx.background())) + cx.add_model(|cx| Project::new(languages, rpc, fs, cx)) } } diff --git a/crates/project_panel/src/lib.rs b/crates/project_panel/src/lib.rs index 422484e74d8809b52913fee24e9f052ca539d6c4..0a8c6e6fbb99194f32d5bddef6a18fc364fd2825 100644 --- a/crates/project_panel/src/lib.rs +++ b/crates/project_panel/src/lib.rs @@ -622,7 +622,7 @@ mod tests { params.languages.clone(), params.client.clone(), params.fs.clone(), - cx.background(), + cx, ) }); let root1 = project diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs index 9fafd433bbe7b9fff78ce90543c047e6a8799331..1b53d15862c3cd26ae14350736f49d0488566736 100644 --- a/crates/workspace/src/lib.rs +++ b/crates/workspace/src/lib.rs @@ -327,7 +327,7 @@ impl Workspace { params.languages.clone(), params.client.clone(), params.fs.clone(), - cx.background(), + cx, ) }); cx.observe(&project, |_, _, cx| cx.notify()).detach(); diff --git a/script/bundle b/script/bundle index e86f80755ec06e01abf781cf8d7b2ce3bfb42d4d..e77f5407dd5acbf0407914069c1a84d9c1f20a8f 100755 --- a/script/bundle +++ b/script/bundle @@ -2,6 +2,8 @@ set -e +export BUNDLE=true + # Install cargo-bundle 0.5.0 if it's not already installed cargo install cargo-bundle --version 0.5.0 @@ -11,10 +13,14 @@ cargo bundle --release --target x86_64-apple-darwin popd > /dev/null # Build the binary for aarch64 (Apple M1) -cargo build --release --target aarch64-apple-darwin +# cargo build --release --target aarch64-apple-darwin # Replace the bundle's binary with a "fat binary" that combines the two architecture-specific binaries -lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed +# lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed + +# Bundle rust-analyzer +cp vendor/bin/rust-analyzer-x86_64-apple-darwin target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ +cp vendor/bin/rust-analyzer-aarch64-apple-darwin target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ # Sign the app bundle with an ad-hoc signature so it runs on the M1. We need a real certificate but this works for now. if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then diff --git a/script/download-rust-analyzer b/script/download-rust-analyzer new file mode 100755 index 0000000000000000000000000000000000000000..9a64f9ed6983e975e5bdbe328fed5a8b988fed19 --- /dev/null +++ b/script/download-rust-analyzer @@ -0,0 +1,15 @@ +#!/bin/bash + +set -e + +export RUST_ANALYZER_URL="https://github.com/rust-analyzer/rust-analyzer/releases/download/2021-10-18/" + +function download { + local filename="rust-analyzer-$1" + curl -L $RUST_ANALYZER_URL/$filename.gz | gunzip > vendor/bin/$filename + chmod +x vendor/bin/$filename +} + +mkdir -p vendor/bin +download "x86_64-apple-darwin" +download "aarch64-apple-darwin" From 7105589904d09835be4edee16587c8c1f448af52 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 25 Oct 2021 12:29:28 +0200 Subject: [PATCH 04/61] Don't send notifications or requests until LSP is initialized --- Cargo.lock | 1 + crates/lsp/Cargo.toml | 1 + crates/lsp/src/lib.rs | 88 +++++++++++++++++++++++++++++++++---------- 3 files changed, 71 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c514dbfe1180ba9c2b6c5cf3cc6265837678024f..a9e631b17595cddd878230f52820e87f17d56ddd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2958,6 +2958,7 @@ dependencies = [ "gpui", "lsp-types", "parking_lot", + "postage", "serde 1.0.125", "serde_json 1.0.64", "smol", diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 897c8c8224fa3d95e8f6cfd7ef6e1c8974eda5af..3c1f08bb5f39215107b4d3b7ff529a7bf7f9c617 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -10,6 +10,7 @@ anyhow = "1.0" futures = "0.3" lsp-types = "0.91" parking_lot = "0.11" +postage = { version = "0.4.1", features = ["futures-traits"] } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["raw_value"] } smol = "1.2" diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index c2fdf751829b4f93006f6100e7d378a4f663218b..2cc2426d2f69b96b097276e60b0cda0a32d9abb4 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -1,6 +1,7 @@ use anyhow::{anyhow, Context, Result}; use gpui::{executor, AppContext, Task}; use parking_lot::Mutex; +use postage::{barrier, prelude::Stream}; use serde::{Deserialize, Serialize}; use serde_json::value::RawValue; use smol::{ @@ -29,6 +30,7 @@ pub struct LanguageServer { response_handlers: Arc>>, _input_task: Task>, _output_task: Task>, + initialized: barrier::Receiver, } type ResponseHandler = Box)>; @@ -151,32 +153,51 @@ impl LanguageServer { .log_err(), ); + let (initialized_tx, initialized_rx) = barrier::channel(); let this = Arc::new(Self { response_handlers, next_id: Default::default(), outbound_tx, _input_task, _output_task, + initialized: initialized_rx, }); - background.spawn(this.clone().init().log_err()).detach(); + + background + .spawn({ + let this = this.clone(); + async move { + this.init().log_err().await; + drop(initialized_tx); + } + }) + .detach(); Ok(this) } async fn init(self: Arc) -> Result<()> { - self.request::(lsp_types::InitializeParams { - process_id: Default::default(), - root_path: Default::default(), - root_uri: Default::default(), - initialization_options: Default::default(), - capabilities: Default::default(), - trace: Default::default(), - workspace_folders: Default::default(), - client_info: Default::default(), - locale: Default::default(), - }) + let res = self + .request_internal::( + lsp_types::InitializeParams { + process_id: Default::default(), + root_path: Default::default(), + root_uri: Default::default(), + initialization_options: Default::default(), + capabilities: Default::default(), + trace: Default::default(), + workspace_folders: Default::default(), + client_info: Default::default(), + locale: Default::default(), + }, + false, + ) + .await?; + self.notify_internal::( + lsp_types::InitializedParams {}, + false, + ) .await?; - self.notify::(lsp_types::InitializedParams {})?; Ok(()) } @@ -184,6 +205,17 @@ impl LanguageServer { self: &Arc, params: T::Params, ) -> impl Future> + where + T::Result: 'static + Send, + { + self.request_internal::(params, true) + } + + fn request_internal( + self: &Arc, + params: T::Params, + wait_for_initialization: bool, + ) -> impl Future> where T::Result: 'static + Send, { @@ -210,25 +242,43 @@ impl LanguageServer { }), ); - let outbound_tx = self.outbound_tx.clone(); + let this = self.clone(); async move { - outbound_tx.send(message).await?; + if wait_for_initialization { + this.initialized.clone().recv().await; + } + this.outbound_tx.send(message).await?; rx.recv().await? } } pub fn notify( - &self, + self: &Arc, + params: T::Params, + ) -> impl Future> { + self.notify_internal::(params, true) + } + + fn notify_internal( + self: &Arc, params: T::Params, - ) -> Result<()> { + wait_for_initialization: bool, + ) -> impl Future> { let message = serde_json::to_vec(&OutboundNotification { jsonrpc: JSON_RPC_VERSION, method: T::METHOD, params, }) .unwrap(); - smol::block_on(self.outbound_tx.send(message))?; - Ok(()) + + let this = self.clone(); + async move { + if wait_for_initialization { + this.initialized.clone().recv().await; + } + this.outbound_tx.send(message).await?; + Ok(()) + } } } From 9759f9e947ccaf50d428349219a9e0ebc7600901 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 25 Oct 2021 17:35:45 +0200 Subject: [PATCH 05/61] Uncomment script/bundle lines Co-Authored-By: Nathan Sobo --- script/bundle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/script/bundle b/script/bundle index e77f5407dd5acbf0407914069c1a84d9c1f20a8f..6b60ffbdf205623fb9fd1dc0c63bc45776b67a02 100755 --- a/script/bundle +++ b/script/bundle @@ -13,10 +13,10 @@ cargo bundle --release --target x86_64-apple-darwin popd > /dev/null # Build the binary for aarch64 (Apple M1) -# cargo build --release --target aarch64-apple-darwin +cargo build --release --target aarch64-apple-darwin # Replace the bundle's binary with a "fat binary" that combines the two architecture-specific binaries -# lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed +lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed # Bundle rust-analyzer cp vendor/bin/rust-analyzer-x86_64-apple-darwin target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ From 580bad2042447ffae0afa1fbbae6c6702692bfcc Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 25 Oct 2021 18:11:52 +0200 Subject: [PATCH 06/61] Get a basic end-to-end test for rust-analyzer integration working Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- Cargo.lock | 2 + crates/lsp/Cargo.toml | 3 + crates/lsp/build.rs | 6 +- crates/lsp/src/lib.rs | 231 ++++++++++++++++++++++++-------- crates/project/src/lib.rs | 14 +- crates/project/src/worktree.rs | 4 + crates/project_panel/src/lib.rs | 3 +- crates/workspace/src/lib.rs | 3 +- script/bundle | 2 +- 9 files changed, 194 insertions(+), 74 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a9e631b17595cddd878230f52820e87f17d56ddd..08d40212d0c14b5d3638182511b1556daef84bf8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2956,12 +2956,14 @@ dependencies = [ "anyhow", "futures", "gpui", + "log", "lsp-types", "parking_lot", "postage", "serde 1.0.125", "serde_json 1.0.64", "smol", + "unindent", "util", ] diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 3c1f08bb5f39215107b4d3b7ff529a7bf7f9c617..22cde9631d588e0f0c61d70d08d2beaa485cf6df 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -8,6 +8,7 @@ gpui = { path = "../gpui" } util = { path = "../util" } anyhow = "1.0" futures = "0.3" +log = "0.4" lsp-types = "0.91" parking_lot = "0.11" postage = { version = "0.4.1", features = ["futures-traits"] } @@ -17,3 +18,5 @@ smol = "1.2" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } +util = { path = "../util", features = ["test-support"] } +unindent = "0.1.7" diff --git a/crates/lsp/build.rs b/crates/lsp/build.rs index 0aedf6d1d28e7bba9cd055cb80c86bd6ef5247af..703d9ccd3c6b54bbfe393d40244291b201e9343e 100644 --- a/crates/lsp/build.rs +++ b/crates/lsp/build.rs @@ -2,9 +2,9 @@ use std::env; fn main() { let target = env::var("TARGET").unwrap(); - println!("cargo:rustc-env=TARGET={}", target); + println!("cargo:rustc-env=ZED_TARGET={}", target); - if let Ok(bundled) = env::var("BUNDLE") { - println!("cargo:rustc-env=BUNDLE={}", bundled); + if let Ok(bundled) = env::var("ZED_BUNDLE") { + println!("cargo:rustc-env=ZED_BUNDLE={}", bundled); } } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 2cc2426d2f69b96b097276e60b0cda0a32d9abb4..104ab2677e8148eb74e6774f163258b7ec92da54 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -1,9 +1,9 @@ use anyhow::{anyhow, Context, Result}; use gpui::{executor, AppContext, Task}; -use parking_lot::Mutex; -use postage::{barrier, prelude::Stream}; +use parking_lot::{Mutex, RwLock}; +use postage::{barrier, oneshot, prelude::Stream, sink::Sink}; use serde::{Deserialize, Serialize}; -use serde_json::value::RawValue; +use serde_json::{json, value::RawValue}; use smol::{ channel, io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}, @@ -24,16 +24,23 @@ use util::TryFutureExt; const JSON_RPC_VERSION: &'static str = "2.0"; const CONTENT_LEN_HEADER: &'static str = "Content-Length: "; +type NotificationHandler = Box; +type ResponseHandler = Box)>; + pub struct LanguageServer { next_id: AtomicUsize, outbound_tx: channel::Sender>, + notification_handlers: Arc>>, response_handlers: Arc>>, _input_task: Task>, _output_task: Task>, initialized: barrier::Receiver, } -type ResponseHandler = Box)>; +pub struct Subscription { + method: &'static str, + notification_handlers: Arc>>, +} #[derive(Serialize)] struct Request { @@ -48,8 +55,8 @@ struct Response<'a> { id: usize, #[serde(default)] error: Option, - #[serde(default, borrow)] - result: Option<&'a RawValue>, + #[serde(borrow)] + result: &'a RawValue, } #[derive(Serialize)] @@ -67,29 +74,33 @@ struct InboundNotification<'a> { params: &'a RawValue, } -#[derive(Deserialize)] +#[derive(Debug, Deserialize)] struct Error { message: String, } impl LanguageServer { - pub fn rust(cx: &AppContext) -> Result> { - const BUNDLE: Option<&'static str> = option_env!("BUNDLE"); - const TARGET: &'static str = env!("TARGET"); + pub fn rust(root_path: &Path, cx: &AppContext) -> Result> { + const ZED_BUNDLE: Option<&'static str> = option_env!("ZED_BUNDLE"); + const ZED_TARGET: &'static str = env!("ZED_TARGET"); - let rust_analyzer_name = format!("rust-analyzer-{}", TARGET); - if BUNDLE.map_or(Ok(false), |b| b.parse())? { + let rust_analyzer_name = format!("rust-analyzer-{}", ZED_TARGET); + if ZED_BUNDLE.map_or(Ok(false), |b| b.parse())? { let rust_analyzer_path = cx .platform() .path_for_resource(Some(&rust_analyzer_name), None)?; - Self::new(&rust_analyzer_path, cx.background()) + Self::new(root_path, &rust_analyzer_path, cx.background()) } else { - Self::new(Path::new(&rust_analyzer_name), cx.background()) + Self::new(root_path, Path::new(&rust_analyzer_name), cx.background()) } } - pub fn new(path: &Path, background: &executor::Background) -> Result> { - let mut server = Command::new(path) + pub fn new( + root_path: &Path, + server_path: &Path, + background: &executor::Background, + ) -> Result> { + let mut server = Command::new(server_path) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) @@ -97,9 +108,11 @@ impl LanguageServer { let mut stdin = server.stdin.take().unwrap(); let mut stdout = BufReader::new(server.stdout.take().unwrap()); let (outbound_tx, outbound_rx) = channel::unbounded::>(); - let response_handlers = Arc::new(Mutex::new(HashMap::::new())); + let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new())); + let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new())); let _input_task = background.spawn( { + let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); async move { let mut buffer = Vec::new(); @@ -116,15 +129,21 @@ impl LanguageServer { buffer.resize(message_len, 0); stdout.read_exact(&mut buffer).await?; - if let Ok(InboundNotification { .. }) = serde_json::from_slice(&buffer) { + + if let Ok(InboundNotification { method, params }) = + serde_json::from_slice(&buffer) + { + if let Some(handler) = notification_handlers.read().get(method) { + handler(params.get()); + } } else if let Ok(Response { id, error, result }) = serde_json::from_slice(&buffer) { if let Some(handler) = response_handlers.lock().remove(&id) { - if let Some(result) = result { - handler(Ok(result.get())); - } else if let Some(error) = error { + if let Some(error) = error { handler(Err(error)); + } else { + handler(Ok(result.get())); } } } else { @@ -142,6 +161,8 @@ impl LanguageServer { async move { let mut content_len_buffer = Vec::new(); loop { + content_len_buffer.clear(); + let message = outbound_rx.recv().await?; write!(content_len_buffer, "{}", message.len()).unwrap(); stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?; @@ -155,6 +176,7 @@ impl LanguageServer { let (initialized_tx, initialized_rx) = barrier::channel(); let this = Arc::new(Self { + notification_handlers, response_handlers, next_id: Default::default(), outbound_tx, @@ -163,11 +185,13 @@ impl LanguageServer { initialized: initialized_rx, }); + let root_uri = + lsp_types::Url::from_file_path(root_path).map_err(|_| anyhow!("invalid root path"))?; background .spawn({ let this = this.clone(); async move { - this.init().log_err().await; + this.init(root_uri).log_err().await; drop(initialized_tx); } }) @@ -176,45 +200,74 @@ impl LanguageServer { Ok(this) } - async fn init(self: Arc) -> Result<()> { - let res = self - .request_internal::( - lsp_types::InitializeParams { - process_id: Default::default(), - root_path: Default::default(), - root_uri: Default::default(), - initialization_options: Default::default(), - capabilities: Default::default(), - trace: Default::default(), - workspace_folders: Default::default(), - client_info: Default::default(), - locale: Default::default(), - }, - false, - ) - .await?; + async fn init(self: Arc, root_uri: lsp_types::Url) -> Result<()> { + self.request_internal::(lsp_types::InitializeParams { + process_id: Default::default(), + root_path: Default::default(), + root_uri: Some(root_uri), + initialization_options: Default::default(), + capabilities: lsp_types::ClientCapabilities { + experimental: Some(json!({ + "serverStatusNotification": true, + })), + ..Default::default() + }, + trace: Default::default(), + workspace_folders: Default::default(), + client_info: Default::default(), + locale: Default::default(), + }) + .await?; self.notify_internal::( lsp_types::InitializedParams {}, - false, ) .await?; Ok(()) } + pub fn on_notification(&self, f: F) -> Subscription + where + T: lsp_types::notification::Notification, + F: 'static + Send + Sync + Fn(T::Params), + { + let prev_handler = self.notification_handlers.write().insert( + T::METHOD, + Box::new( + move |notification| match serde_json::from_str(notification) { + Ok(notification) => f(notification), + Err(err) => log::error!("error parsing notification {}: {}", T::METHOD, err), + }, + ), + ); + + assert!( + prev_handler.is_none(), + "registered multiple handlers for the same notification" + ); + + Subscription { + method: T::METHOD, + notification_handlers: self.notification_handlers.clone(), + } + } + pub fn request( - self: &Arc, + self: Arc, params: T::Params, ) -> impl Future> where T::Result: 'static + Send, { - self.request_internal::(params, true) + let this = self.clone(); + async move { + this.initialized.clone().recv().await; + this.request_internal::(params).await + } } fn request_internal( self: &Arc, params: T::Params, - wait_for_initialization: bool, ) -> impl Future> where T::Result: 'static + Send, @@ -228,7 +281,7 @@ impl LanguageServer { }) .unwrap(); let mut response_handlers = self.response_handlers.lock(); - let (tx, rx) = smol::channel::bounded(1); + let (mut tx, mut rx) = oneshot::channel(); response_handlers.insert( id, Box::new(move |result| { @@ -238,17 +291,14 @@ impl LanguageServer { } Err(error) => Err(anyhow!("{}", error.message)), }; - let _ = smol::block_on(tx.send(response)); + let _ = tx.try_send(response); }), ); let this = self.clone(); async move { - if wait_for_initialization { - this.initialized.clone().recv().await; - } this.outbound_tx.send(message).await?; - rx.recv().await? + rx.recv().await.unwrap() } } @@ -256,13 +306,16 @@ impl LanguageServer { self: &Arc, params: T::Params, ) -> impl Future> { - self.notify_internal::(params, true) + let this = self.clone(); + async move { + this.initialized.clone().recv().await; + this.notify_internal::(params).await + } } fn notify_internal( self: &Arc, params: T::Params, - wait_for_initialization: bool, ) -> impl Future> { let message = serde_json::to_vec(&OutboundNotification { jsonrpc: JSON_RPC_VERSION, @@ -273,22 +326,90 @@ impl LanguageServer { let this = self.clone(); async move { - if wait_for_initialization { - this.initialized.clone().recv().await; - } this.outbound_tx.send(message).await?; Ok(()) } } } +impl Drop for Subscription { + fn drop(&mut self) { + self.notification_handlers.write().remove(self.method); + } +} + #[cfg(test)] mod tests { use super::*; use gpui::TestAppContext; + use unindent::Unindent; + use util::test::temp_tree; #[gpui::test] async fn test_basic(cx: TestAppContext) { - let server = cx.read(|cx| LanguageServer::rust(cx).unwrap()); + let root_dir = temp_tree(json!({ + "Cargo.toml": r#" + [package] + name = "temp" + version = "0.1.0" + edition = "2018" + "#.unindent(), + "src": { + "lib.rs": r#" + fn fun() { + let hello = "world"; + } + "#.unindent() + } + })); + + let server = cx.read(|cx| LanguageServer::rust(root_dir.path(), cx).unwrap()); + server.next_idle_notification().await; + + let hover = server + .request::(lsp_types::HoverParams { + text_document_position_params: lsp_types::TextDocumentPositionParams { + text_document: lsp_types::TextDocumentIdentifier::new( + lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(), + ), + position: lsp_types::Position::new(1, 21), + }, + work_done_progress_params: Default::default(), + }) + .await + .unwrap() + .unwrap(); + assert_eq!( + hover.contents, + lsp_types::HoverContents::Markup(lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: "&str".to_string() + }) + ); + } + + impl LanguageServer { + async fn next_idle_notification(self: &Arc) { + let (tx, rx) = channel::unbounded(); + let _subscription = + self.on_notification::(move |params| { + if params.quiescent { + tx.try_send(()).unwrap(); + } + }); + let _ = rx.recv().await; + } + } + + pub enum ServerStatusNotification {} + + impl lsp_types::notification::Notification for ServerStatusNotification { + type Params = ServerStatusParams; + const METHOD: &'static str = "experimental/serverStatus"; + } + + #[derive(Deserialize, Serialize, PartialEq, Eq, Clone)] + pub struct ServerStatusParams { + pub quiescent: bool, } } diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index f9926e0416b42b6b4509de3b1ee8f4149d48c086..184dfd4d9cc54fa3a9f9bf1397ccd917aa1e17fa 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -7,8 +7,7 @@ use buffer::LanguageRegistry; use client::Client; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; -use gpui::{executor, AppContext, Entity, ModelContext, ModelHandle, Task}; -use lsp::LanguageServer; +use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -24,7 +23,6 @@ pub struct Project { languages: Arc, client: Arc, fs: Arc, - language_server: Arc, } pub enum Event { @@ -45,19 +43,13 @@ pub struct ProjectEntry { } impl Project { - pub fn new( - languages: Arc, - rpc: Arc, - fs: Arc, - cx: &AppContext, - ) -> Self { + pub fn new(languages: Arc, rpc: Arc, fs: Arc) -> Self { Self { worktrees: Default::default(), active_entry: None, languages, client: rpc, fs, - language_server: LanguageServer::rust(cx).unwrap(), } } @@ -416,6 +408,6 @@ mod tests { let languages = Arc::new(LanguageRegistry::new()); let fs = Arc::new(RealFs); let rpc = client::Client::new(); - cx.add_model(|cx| Project::new(languages, rpc, fs, cx)) + cx.add_model(|_| Project::new(languages, rpc, fs)) } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3f27b0ed3f5b8213ec66f20db40cb448610612ba..9b98ed9eebdb66d2c78732b7e623b8cdc07d88b0 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -14,6 +14,7 @@ use gpui::{ Task, UpgradeModelHandle, WeakModelHandle, }; use lazy_static::lazy_static; +use lsp::LanguageServer; use parking_lot::Mutex; use postage::{ prelude::{Sink as _, Stream as _}, @@ -684,6 +685,7 @@ pub struct LocalWorktree { queued_operations: Vec<(u64, Operation)>, rpc: Arc, fs: Arc, + language_server: Arc, } #[derive(Default, Deserialize)] @@ -721,6 +723,7 @@ impl LocalWorktree { let (scan_states_tx, scan_states_rx) = smol::channel::unbounded(); let (mut last_scan_state_tx, last_scan_state_rx) = watch::channel_with(ScanState::Scanning); let tree = cx.add_model(move |cx: &mut ModelContext| { + let language_server = LanguageServer::rust(&abs_path, cx).unwrap(); let mut snapshot = Snapshot { id: cx.model_id(), scan_id: 0, @@ -796,6 +799,7 @@ impl LocalWorktree { languages, rpc, fs, + language_server, }; cx.spawn_weak(|this, mut cx| async move { diff --git a/crates/project_panel/src/lib.rs b/crates/project_panel/src/lib.rs index 0a8c6e6fbb99194f32d5bddef6a18fc364fd2825..385b7dbca2de231d2e9417483da055df442ac2a5 100644 --- a/crates/project_panel/src/lib.rs +++ b/crates/project_panel/src/lib.rs @@ -617,12 +617,11 @@ mod tests { ) .await; - let project = cx.add_model(|cx| { + let project = cx.add_model(|_| { Project::new( params.languages.clone(), params.client.clone(), params.fs.clone(), - cx, ) }); let root1 = project diff --git a/crates/workspace/src/lib.rs b/crates/workspace/src/lib.rs index 1b53d15862c3cd26ae14350736f49d0488566736..c227ee61bd892ea4dbb2f1a0894469abe69a0178 100644 --- a/crates/workspace/src/lib.rs +++ b/crates/workspace/src/lib.rs @@ -322,12 +322,11 @@ pub struct Workspace { impl Workspace { pub fn new(params: &WorkspaceParams, cx: &mut ViewContext) -> Self { - let project = cx.add_model(|cx| { + let project = cx.add_model(|_| { Project::new( params.languages.clone(), params.client.clone(), params.fs.clone(), - cx, ) }); cx.observe(&project, |_, _, cx| cx.notify()).detach(); diff --git a/script/bundle b/script/bundle index 6b60ffbdf205623fb9fd1dc0c63bc45776b67a02..7b1929d112b4fc22f41bee86160cab0a8977cca8 100755 --- a/script/bundle +++ b/script/bundle @@ -2,7 +2,7 @@ set -e -export BUNDLE=true +export ZED_BUNDLE=true # Install cargo-bundle 0.5.0 if it's not already installed cargo install cargo-bundle --version 0.5.0 From 2d6d10f920d95d7e8a61c4521522bdede37a4ef9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 25 Oct 2021 19:46:33 +0200 Subject: [PATCH 07/61] Log unhandled notifications in `LanguageServer` Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/lsp/src/lib.rs | 44 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 104ab2677e8148eb74e6774f163258b7ec92da54..e80ad55947b5a14e4c5eda5c32bd5f4cce60494a 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -3,7 +3,7 @@ use gpui::{executor, AppContext, Task}; use parking_lot::{Mutex, RwLock}; use postage::{barrier, oneshot, prelude::Stream, sink::Sink}; use serde::{Deserialize, Serialize}; -use serde_json::{json, value::RawValue}; +use serde_json::{json, value::RawValue, Value}; use smol::{ channel, io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}, @@ -13,6 +13,7 @@ use std::{ collections::HashMap, future::Future, io::Write, + str::FromStr, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, Arc, @@ -135,6 +136,15 @@ impl LanguageServer { { if let Some(handler) = notification_handlers.read().get(method) { handler(params.get()); + } else { + log::info!( + "unhandled notification {}:\n{}", + method, + serde_json::to_string_pretty( + &Value::from_str(params.get()).unwrap() + ) + .unwrap() + ); } } else if let Ok(Response { id, error, result }) = serde_json::from_slice(&buffer) @@ -347,6 +357,12 @@ mod tests { #[gpui::test] async fn test_basic(cx: TestAppContext) { + let lib_source = r#" + fn fun() { + let hello = "world"; + } + "# + .unindent(); let root_dir = temp_tree(json!({ "Cargo.toml": r#" [package] @@ -355,23 +371,33 @@ mod tests { edition = "2018" "#.unindent(), "src": { - "lib.rs": r#" - fn fun() { - let hello = "world"; - } - "#.unindent() + "lib.rs": &lib_source } })); + let lib_file_uri = + lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(); let server = cx.read(|cx| LanguageServer::rust(root_dir.path(), cx).unwrap()); server.next_idle_notification().await; + server + .notify::( + lsp_types::DidOpenTextDocumentParams { + text_document: lsp_types::TextDocumentItem::new( + lib_file_uri.clone(), + "rust".to_string(), + 0, + lib_source, + ), + }, + ) + .await + .unwrap(); + let hover = server .request::(lsp_types::HoverParams { text_document_position_params: lsp_types::TextDocumentPositionParams { - text_document: lsp_types::TextDocumentIdentifier::new( - lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(), - ), + text_document: lsp_types::TextDocumentIdentifier::new(lib_file_uri), position: lsp_types::Position::new(1, 21), }, work_done_progress_params: Default::default(), From b49a268031ad955429b56511ce2d79d09f594261 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 25 Oct 2021 15:28:40 -0700 Subject: [PATCH 08/61] Add a fake lsp server Co-Authored-By: Nathan Sobo --- Cargo.lock | 11 ++ crates/lsp/Cargo.toml | 6 + crates/lsp/src/lib.rs | 259 ++++++++++++++++++++++++++++++++++---- crates/project/Cargo.toml | 2 + 4 files changed, 255 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08d40212d0c14b5d3638182511b1556daef84bf8..4932886028f6a4792a0f8b2eb86177df57c9022f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -328,6 +328,15 @@ dependencies = [ "futures-lite", ] +[[package]] +name = "async-pipe" +version = "0.1.3" +source = "git+https://github.com/routerify/async-pipe-rs?rev=feeb77e83142a9ff837d0767652ae41bfc5d8e47#feeb77e83142a9ff837d0767652ae41bfc5d8e47" +dependencies = [ + "futures", + "log", +] + [[package]] name = "async-process" version = "1.0.2" @@ -2954,6 +2963,7 @@ name = "lsp" version = "0.1.0" dependencies = [ "anyhow", + "async-pipe", "futures", "gpui", "log", @@ -2962,6 +2972,7 @@ dependencies = [ "postage", "serde 1.0.125", "serde_json 1.0.64", + "simplelog", "smol", "unindent", "util", diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 22cde9631d588e0f0c61d70d08d2beaa485cf6df..263eed76fb9d515e0194835a94bcf9c79c08d909 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -3,10 +3,14 @@ name = "lsp" version = "0.1.0" edition = "2018" +[features] +test-support = ["async-pipe"] + [dependencies] gpui = { path = "../gpui" } util = { path = "../util" } anyhow = "1.0" +async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true } futures = "0.3" log = "0.4" lsp-types = "0.91" @@ -19,4 +23,6 @@ smol = "1.2" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } +async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" } +simplelog = "0.9" unindent = "0.1.7" diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index e80ad55947b5a14e4c5eda5c32bd5f4cce60494a..ea770c74222f09fec602a22bd1b41d1557911e1d 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -1,4 +1,5 @@ use anyhow::{anyhow, Context, Result}; +use futures::{io::BufWriter, AsyncRead, AsyncWrite}; use gpui::{executor, AppContext, Task}; use parking_lot::{Mutex, RwLock}; use postage::{barrier, oneshot, prelude::Stream, sink::Sink}; @@ -13,6 +14,7 @@ use std::{ collections::HashMap, future::Future, io::Write, + marker::PhantomData, str::FromStr, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, @@ -22,6 +24,8 @@ use std::{ use std::{path::Path, process::Stdio}; use util::TryFutureExt; +pub use lsp_types::*; + const JSON_RPC_VERSION: &'static str = "2.0"; const CONTENT_LEN_HEADER: &'static str = "Content-Length: "; @@ -43,16 +47,16 @@ pub struct Subscription { notification_handlers: Arc>>, } -#[derive(Serialize)] -struct Request { - jsonrpc: &'static str, +#[derive(Serialize, Deserialize)] +struct Request<'a, T> { + jsonrpc: &'a str, id: usize, - method: &'static str, + method: &'a str, params: T, } -#[derive(Deserialize)] -struct Response<'a> { +#[derive(Serialize, Deserialize)] +struct AnyResponse<'a> { id: usize, #[serde(default)] error: Option, @@ -60,22 +64,24 @@ struct Response<'a> { result: &'a RawValue, } -#[derive(Serialize)] -struct OutboundNotification { - jsonrpc: &'static str, - method: &'static str, +#[derive(Serialize, Deserialize)] +struct Notification<'a, T> { + #[serde(borrow)] + jsonrpc: &'a str, + #[serde(borrow)] + method: &'a str, params: T, } #[derive(Deserialize)] -struct InboundNotification<'a> { +struct AnyNotification<'a> { #[serde(borrow)] method: &'a str, #[serde(borrow)] params: &'a RawValue, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] struct Error { message: String, } @@ -90,24 +96,46 @@ impl LanguageServer { let rust_analyzer_path = cx .platform() .path_for_resource(Some(&rust_analyzer_name), None)?; - Self::new(root_path, &rust_analyzer_path, cx.background()) + Self::new(root_path, &rust_analyzer_path, &[], cx.background()) } else { - Self::new(root_path, Path::new(&rust_analyzer_name), cx.background()) + Self::new( + root_path, + Path::new(&rust_analyzer_name), + &[], + cx.background(), + ) } } pub fn new( root_path: &Path, server_path: &Path, + server_args: &[&str], background: &executor::Background, ) -> Result> { let mut server = Command::new(server_path) + .args(server_args) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) .spawn()?; - let mut stdin = server.stdin.take().unwrap(); - let mut stdout = BufReader::new(server.stdout.take().unwrap()); + let stdin = server.stdin.take().unwrap(); + let stdout = server.stdout.take().unwrap(); + Self::new_internal(root_path, stdin, stdout, background) + } + + fn new_internal( + root_path: &Path, + stdin: Stdin, + stdout: Stdout, + background: &executor::Background, + ) -> Result> + where + Stdin: AsyncWrite + Unpin + Send + 'static, + Stdout: AsyncRead + Unpin + Send + 'static, + { + let mut stdin = BufWriter::new(stdin); + let mut stdout = BufReader::new(stdout); let (outbound_tx, outbound_rx) = channel::unbounded::>(); let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new())); let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new())); @@ -119,7 +147,6 @@ impl LanguageServer { let mut buffer = Vec::new(); loop { buffer.clear(); - stdout.read_until(b'\n', &mut buffer).await?; stdout.read_until(b'\n', &mut buffer).await?; let message_len: usize = std::str::from_utf8(&buffer)? @@ -131,7 +158,7 @@ impl LanguageServer { buffer.resize(message_len, 0); stdout.read_exact(&mut buffer).await?; - if let Ok(InboundNotification { method, params }) = + if let Ok(AnyNotification { method, params }) = serde_json::from_slice(&buffer) { if let Some(handler) = notification_handlers.read().get(method) { @@ -146,7 +173,7 @@ impl LanguageServer { .unwrap() ); } - } else if let Ok(Response { id, error, result }) = + } else if let Ok(AnyResponse { id, error, result }) = serde_json::from_slice(&buffer) { if let Some(handler) = response_handlers.lock().remove(&id) { @@ -179,6 +206,7 @@ impl LanguageServer { stdin.write_all(&content_len_buffer).await?; stdin.write_all("\r\n\r\n".as_bytes()).await?; stdin.write_all(&message).await?; + stdin.flush().await?; } } .log_err(), @@ -211,7 +239,8 @@ impl LanguageServer { } async fn init(self: Arc, root_uri: lsp_types::Url) -> Result<()> { - self.request_internal::(lsp_types::InitializeParams { + #[allow(deprecated)] + let params = lsp_types::InitializeParams { process_id: Default::default(), root_path: Default::default(), root_uri: Some(root_uri), @@ -226,8 +255,10 @@ impl LanguageServer { workspace_folders: Default::default(), client_info: Default::default(), locale: Default::default(), - }) - .await?; + }; + + self.request_internal::(params) + .await?; self.notify_internal::( lsp_types::InitializedParams {}, ) @@ -327,7 +358,7 @@ impl LanguageServer { self: &Arc, params: T::Params, ) -> impl Future> { - let message = serde_json::to_vec(&OutboundNotification { + let message = serde_json::to_vec(&Notification { jsonrpc: JSON_RPC_VERSION, method: T::METHOD, params, @@ -342,16 +373,136 @@ impl LanguageServer { } } +impl Subscription { + pub fn detach(mut self) { + self.method = ""; + } +} + impl Drop for Subscription { fn drop(&mut self) { self.notification_handlers.write().remove(self.method); } } +#[cfg(any(test, feature = "test-support"))] +pub struct FakeLanguageServer { + buffer: Vec, + stdin: smol::io::BufReader, + stdout: smol::io::BufWriter, +} + +#[cfg(any(test, feature = "test-support"))] +pub struct RequestId { + id: usize, + _type: std::marker::PhantomData, +} + +#[cfg(any(test, feature = "test-support"))] +impl LanguageServer { + pub async fn fake(executor: &executor::Background) -> (Arc, FakeLanguageServer) { + let stdin = async_pipe::pipe(); + let stdout = async_pipe::pipe(); + ( + Self::new_internal(Path::new("/"), stdin.0, stdout.1, executor).unwrap(), + FakeLanguageServer { + stdin: smol::io::BufReader::new(stdin.1), + stdout: smol::io::BufWriter::new(stdout.0), + buffer: Vec::new(), + }, + ) + } +} + +#[cfg(any(test, feature = "test-support"))] +impl FakeLanguageServer { + pub async fn notify(&mut self, params: T::Params) { + let message = serde_json::to_vec(&Notification { + jsonrpc: JSON_RPC_VERSION, + method: T::METHOD, + params, + }) + .unwrap(); + self.send(message).await; + } + + pub async fn respond<'a, T: request::Request>( + &mut self, + request_id: RequestId, + result: T::Result, + ) { + let result = serde_json::to_string(&result).unwrap(); + let message = serde_json::to_vec(&AnyResponse { + id: request_id.id, + error: None, + result: &RawValue::from_string(result).unwrap(), + }) + .unwrap(); + self.send(message).await; + } + + pub async fn receive_request(&mut self) -> (RequestId, T::Params) { + self.receive().await; + let request = serde_json::from_slice::>(&self.buffer).unwrap(); + assert_eq!(request.method, T::METHOD); + assert_eq!(request.jsonrpc, JSON_RPC_VERSION); + ( + RequestId { + id: request.id, + _type: PhantomData, + }, + request.params, + ) + } + + pub async fn receive_notification(&mut self) -> T::Params { + self.receive().await; + let notification = serde_json::from_slice::>(&self.buffer).unwrap(); + assert_eq!(notification.method, T::METHOD); + notification.params + } + + async fn send(&mut self, message: Vec) { + self.stdout + .write_all(CONTENT_LEN_HEADER.as_bytes()) + .await + .unwrap(); + self.stdout + .write_all((format!("{}", message.len())).as_bytes()) + .await + .unwrap(); + self.stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap(); + self.stdout.write_all(&message).await.unwrap(); + self.stdout.flush().await.unwrap(); + } + + async fn receive(&mut self) { + self.buffer.clear(); + self.stdin + .read_until(b'\n', &mut self.buffer) + .await + .unwrap(); + self.stdin + .read_until(b'\n', &mut self.buffer) + .await + .unwrap(); + let message_len: usize = std::str::from_utf8(&self.buffer) + .unwrap() + .strip_prefix(CONTENT_LEN_HEADER) + .unwrap() + .trim_end() + .parse() + .unwrap(); + self.buffer.resize(message_len, 0); + self.stdin.read_exact(&mut self.buffer).await.unwrap(); + } +} + #[cfg(test)] mod tests { use super::*; use gpui::TestAppContext; + use simplelog::SimpleLogger; use unindent::Unindent; use util::test::temp_tree; @@ -414,6 +565,68 @@ mod tests { ); } + #[gpui::test] + async fn test_fake(cx: TestAppContext) { + SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap(); + + let (server, mut fake) = LanguageServer::fake(&cx.background()).await; + + let (message_tx, message_rx) = channel::unbounded(); + let (diagnostics_tx, diagnostics_rx) = channel::unbounded(); + server + .on_notification::(move |params| { + message_tx.try_send(params).unwrap() + }) + .detach(); + server + .on_notification::(move |params| { + diagnostics_tx.try_send(params).unwrap() + }) + .detach(); + + let (init_id, _) = fake.receive_request::().await; + fake.respond(init_id, InitializeResult::default()).await; + fake.receive_notification::() + .await; + + server + .notify::(DidOpenTextDocumentParams { + text_document: TextDocumentItem::new( + Url::from_str("file://a/b").unwrap(), + "rust".to_string(), + 0, + "".to_string(), + ), + }) + .await + .unwrap(); + assert_eq!( + fake.receive_notification::() + .await + .text_document + .uri + .as_str(), + "file://a/b" + ); + + fake.notify::(ShowMessageParams { + typ: MessageType::ERROR, + message: "ok".to_string(), + }) + .await; + fake.notify::(PublishDiagnosticsParams { + uri: Url::from_str("file://b/c").unwrap(), + version: Some(5), + diagnostics: vec![], + }) + .await; + assert_eq!(message_rx.recv().await.unwrap().message, "ok"); + assert_eq!( + diagnostics_rx.recv().await.unwrap().uri.as_str(), + "file://b/c" + ); + } + impl LanguageServer { async fn next_idle_notification(self: &Arc) { let (tx, rx) = channel::unbounded(); diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index 73959da5fcfa1005db686676a8e81a0059430cba..a6d69ad954c300aba34d2d7ab93af1d276e4b991 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -33,6 +33,8 @@ toml = "0.5" [dev-dependencies] client = { path = "../client", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } +lsp = { path = "../lsp", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } rand = "0.8.3" From c3ff489fee393e4d9a0cd82a6971101e19db14ee Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 25 Oct 2021 18:04:27 -0700 Subject: [PATCH 09/61] Handle initialize request internally in fake lsp server Co-Authored-By: Nathan Sobo --- crates/lsp/src/lib.rs | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index ea770c74222f09fec602a22bd1b41d1557911e1d..60e5ba906577198d2f82148f606398bbfdc507f6 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -14,7 +14,6 @@ use std::{ collections::HashMap, future::Future, io::Write, - marker::PhantomData, str::FromStr, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, @@ -403,14 +402,20 @@ impl LanguageServer { pub async fn fake(executor: &executor::Background) -> (Arc, FakeLanguageServer) { let stdin = async_pipe::pipe(); let stdout = async_pipe::pipe(); - ( - Self::new_internal(Path::new("/"), stdin.0, stdout.1, executor).unwrap(), - FakeLanguageServer { - stdin: smol::io::BufReader::new(stdin.1), - stdout: smol::io::BufWriter::new(stdout.0), - buffer: Vec::new(), - }, - ) + let mut fake = FakeLanguageServer { + stdin: smol::io::BufReader::new(stdin.1), + stdout: smol::io::BufWriter::new(stdout.0), + buffer: Vec::new(), + }; + + let server = Self::new_internal(Path::new("/"), stdin.0, stdout.1, executor).unwrap(); + + let (init_id, _) = fake.receive_request::().await; + fake.respond(init_id, InitializeResult::default()).await; + fake.receive_notification::() + .await; + + (server, fake) } } @@ -449,7 +454,7 @@ impl FakeLanguageServer { ( RequestId { id: request.id, - _type: PhantomData, + _type: std::marker::PhantomData, }, request.params, ) @@ -584,11 +589,6 @@ mod tests { }) .detach(); - let (init_id, _) = fake.receive_request::().await; - fake.respond(init_id, InitializeResult::default()).await; - fake.receive_notification::() - .await; - server .notify::(DidOpenTextDocumentParams { text_document: TextDocumentItem::new( From 50c77daa0b1f9769e01e4d0e0307a785f1a61c17 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 25 Oct 2021 21:42:30 -0700 Subject: [PATCH 10/61] Start work on a test for worktree handling LSP diagnostics --- Cargo.lock | 1 + crates/project/Cargo.toml | 1 + crates/project/src/lib.rs | 6 +- crates/project/src/worktree.rs | 103 ++++++++++++++++++++++++++++++++- 4 files changed, 107 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0504fafb62f3f0eaec8875ad1a0cd0b158f712e2..3a9736c56484dc1019898edfc53e64b69e28665a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3833,6 +3833,7 @@ dependencies = [ "sum_tree", "tempdir", "toml 0.5.8", + "unindent", "util", ] diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index a0ca5e6e7503d7564fd096ca2176b695e6309e37..bb00812251b95651c67874d46940b6d673a103fd 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -41,3 +41,4 @@ util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } rand = "0.8.3" tempdir = { version = "0.3.7" } +unindent = "0.1.7" diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 3e129c8fb8a1d67feb7b7abdebb1bc89c0c5c1fe..a98b89db5ac2fcfa787ffc9276f5be6d0335b4fe 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -8,6 +8,7 @@ use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use language::LanguageRegistry; +use lsp::LanguageServer; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -73,8 +74,11 @@ impl Project { let rpc = self.client.clone(); let languages = self.languages.clone(); let path = Arc::from(abs_path); + let language_server = LanguageServer::rust(&path, cx); cx.spawn(|this, mut cx| async move { - let worktree = Worktree::open_local(rpc, path, fs, languages, &mut cx).await?; + let worktree = + Worktree::open_local(rpc, path, fs, languages, Some(language_server?), &mut cx) + .await?; this.update(&mut cx, |this, cx| { this.add_worktree(worktree.clone(), cx); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 500e9d502f2ad8d70f13a16a7058afd4d06df582..426115bfd4ccf91717f1384c558dd8e206fce17f 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -98,10 +98,11 @@ impl Worktree { path: impl Into>, fs: Arc, languages: Arc, + language_server: Option>, cx: &mut AsyncAppContext, ) -> Result> { let (tree, scan_states_tx) = - LocalWorktree::new(rpc, path, fs.clone(), languages, cx).await?; + LocalWorktree::new(rpc, path, fs.clone(), languages, language_server, cx).await?; tree.update(cx, |tree, cx| { let tree = tree.as_local_mut().unwrap(); let abs_path = tree.snapshot.abs_path.clone(); @@ -671,7 +672,7 @@ pub struct LocalWorktree { queued_operations: Vec<(u64, Operation)>, rpc: Arc, fs: Arc, - language_server: Arc, + language_server: Option>, } #[derive(Default, Deserialize)] @@ -685,6 +686,7 @@ impl LocalWorktree { path: impl Into>, fs: Arc, languages: Arc, + language_server: Option>, cx: &mut AsyncAppContext, ) -> Result<(ModelHandle, Sender)> { let abs_path = path.into(); @@ -709,7 +711,6 @@ impl LocalWorktree { let (scan_states_tx, scan_states_rx) = smol::channel::unbounded(); let (mut last_scan_state_tx, last_scan_state_rx) = watch::channel_with(ScanState::Scanning); let tree = cx.add_model(move |cx: &mut ModelContext| { - let language_server = LanguageServer::rust(&abs_path, cx).unwrap(); let mut snapshot = Snapshot { id: cx.model_id(), scan_id: 0, @@ -815,6 +816,28 @@ impl LocalWorktree { }) .detach(); + if let Some(language_server) = &tree.language_server { + let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded(); + language_server + .on_notification::(move |params| { + smol::block_on(diagnostics_tx.send(params)).ok(); + }) + .detach(); + cx.spawn_weak(|this, mut cx| async move { + while let Ok(diagnostics) = diagnostics_rx.recv().await { + if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { + handle.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + this.update_diagnostics(diagnostics, cx); + }); + } else { + break; + } + } + }) + .detach(); + } + Worktree::Local(tree) }); @@ -1161,6 +1184,14 @@ impl LocalWorktree { }) }) } + + fn update_diagnostics( + &mut self, + diagnostics: lsp::PublishDiagnosticsParams, + cx: &mut ModelContext, + ) { + // + } } fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { @@ -2804,6 +2835,8 @@ mod tests { use anyhow::Result; use client::test::FakeServer; use fs::RealFs; + use language::Point; + use lsp::Url; use rand::prelude::*; use serde_json::json; use std::{cell::RefCell, rc::Rc}; @@ -2812,6 +2845,7 @@ mod tests { fmt::Write, time::{SystemTime, UNIX_EPOCH}, }; + use unindent::Unindent as _; use util::test::temp_tree; #[gpui::test] @@ -2834,6 +2868,7 @@ mod tests { Arc::from(Path::new("/root")), Arc::new(fs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -2866,6 +2901,7 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -2896,6 +2932,7 @@ mod tests { file_path.clone(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -2942,6 +2979,7 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -3102,6 +3140,7 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -3156,6 +3195,7 @@ mod tests { "/path/to/the-dir".as_ref(), fs, Default::default(), + None, &mut cx.to_async(), ) .await @@ -3204,6 +3244,7 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -3337,6 +3378,7 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), + None, &mut cx.to_async(), ) .await @@ -3425,6 +3467,61 @@ mod tests { .await; } + #[gpui::test] + async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { + let (language_server, mut fake_lsp) = LanguageServer::fake(&cx.background()).await; + let dir = temp_tree(json!({ + "a.rs": " + fn a() { A } + fn b() { BB } + ".unindent(), + "b.rs": " + const y: i32 = 1 + ".unindent(), + })); + + let tree = Worktree::open_local( + Client::new(), + dir.path(), + Arc::new(RealFs), + Default::default(), + Some(language_server), + &mut cx.to_async(), + ) + .await + .unwrap(); + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + fake_lsp + .notify::(lsp::PublishDiagnosticsParams { + uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(), + version: None, + diagnostics: vec![ + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'A'".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(2, 11)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'BB'".to_string(), + ..Default::default() + }, + ], + }) + .await; + + let buffer = tree + .update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx)) + .await + .unwrap(); + + // Check buffer's diagnostics + } + #[gpui::test(iterations = 100)] fn test_random(mut rng: StdRng) { let operations = env::var("OPERATIONS") From 64445c7d1c05cc8cb74c9f44bbc73a78f580f17c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 25 Oct 2021 18:05:40 -0700 Subject: [PATCH 11/61] Start work on AnchorRangeMultimap Co-Authored-By: Nathan Sobo --- crates/buffer/src/anchor.rs | 163 ++++++++++++++++++++++++++++++++++-- crates/buffer/src/lib.rs | 42 +++++++--- 2 files changed, 186 insertions(+), 19 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 1ac82727df7485bb6d098a66b251ecb465cc1cc6..c8a7eaed24db47698fb970345206201341c5d76a 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,13 +1,13 @@ -use crate::Point; +use crate::{Point, ToOffset}; use super::{Buffer, Content}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; -use sum_tree::Bias; +use sum_tree::{Bias, SumTree}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { - pub offset: usize, + pub full_offset: usize, pub bias: Bias, pub version: clock::Global, } @@ -30,10 +30,38 @@ pub struct AnchorRangeMap { #[derive(Clone)] pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); +pub struct AnchorRangeMultimap { + entries: SumTree>, + pub(crate) version: clock::Global, + pub(crate) start_bias: Bias, + pub(crate) end_bias: Bias, +} + +#[derive(Clone)] +struct AnchorRangeMultimapEntry { + range: FullOffsetRange, + value: T, +} + +#[derive(Clone, Debug)] +struct FullOffsetRange { + start: usize, + end: usize, +} + +#[derive(Clone, Debug)] +struct AnchorRangeMultimapSummary { + start: usize, + end: usize, + min_start: usize, + max_end: usize, + count: usize, +} + impl Anchor { pub fn min() -> Self { Self { - offset: 0, + full_offset: 0, bias: Bias::Left, version: Default::default(), } @@ -41,7 +69,7 @@ impl Anchor { pub fn max() -> Self { Self { - offset: usize::MAX, + full_offset: usize::MAX, bias: Bias::Right, version: Default::default(), } @@ -55,7 +83,7 @@ impl Anchor { } let offset_comparison = if self.version == other.version { - self.offset.cmp(&other.offset) + self.full_offset.cmp(&other.full_offset) } else { buffer .full_offset_for_anchor(self) @@ -136,6 +164,129 @@ impl AnchorRangeSet { } } +impl AnchorRangeMultimap { + fn intersecting_point_ranges<'a, O: ToOffset>( + &'a self, + range: Range, + content: impl Into>, + inclusive: bool, + ) -> impl Iterator, &T)> + 'a { + use super::ToPoint as _; + + let content = content.into(); + let start = range.start.to_full_offset(&content, self.start_bias); + let end = range.end.to_full_offset(&content, self.end_bias); + let mut cursor = self.entries.filter::<_, usize>( + move |summary: &AnchorRangeMultimapSummary| { + if inclusive { + start <= summary.max_end && end >= summary.min_start + } else { + start < summary.max_end && end > summary.min_start + } + }, + &(), + ); + let mut anchor = Anchor { + full_offset: 0, + bias: Bias::Left, + version: self.version.clone(), + }; + std::iter::from_fn(move || { + if let Some(item) = cursor.item() { + let ix = *cursor.start(); + anchor.full_offset = item.range.start; + anchor.bias = self.start_bias; + let start = anchor.to_point(&content); + anchor.full_offset = item.range.end; + anchor.bias = self.end_bias; + let end = anchor.to_point(&content); + let value = &item.value; + cursor.next(&()); + Some((ix, start..end, value)) + } else { + None + } + }) + } +} + +impl sum_tree::Item for AnchorRangeMultimapEntry { + type Summary = AnchorRangeMultimapSummary; + + fn summary(&self) -> Self::Summary { + AnchorRangeMultimapSummary { + start: self.range.start, + end: self.range.end, + min_start: self.range.start, + max_end: self.range.end, + count: 1, + } + } +} + +impl Default for AnchorRangeMultimapSummary { + fn default() -> Self { + Self { + start: 0, + end: usize::MAX, + min_start: usize::MAX, + max_end: 0, + count: 0, + } + } +} + +impl sum_tree::Summary for AnchorRangeMultimapSummary { + type Context = (); + + fn add_summary(&mut self, other: &Self, _: &Self::Context) { + self.min_start = self.min_start.min(other.min_start); + self.max_end = self.max_end.max(other.max_end); + + #[cfg(debug_assertions)] + { + let start_comparison = self.start.cmp(&other.start); + assert!(start_comparison <= Ordering::Equal); + if start_comparison == Ordering::Equal { + assert!(self.end.cmp(&other.end) >= Ordering::Equal); + } + } + + self.start = other.start; + self.end = other.end; + self.count += other.count; + } +} + +impl Default for FullOffsetRange { + fn default() -> Self { + Self { + start: 0, + end: usize::MAX, + } + } +} + +impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize { + fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { + *self += summary.count; + } +} + +impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange { + fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { + self.start = summary.start; + self.end = summary.end; + } +} + +impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange { + fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering { + Ord::cmp(&self.start, &cursor_location.start) + .then_with(|| Ord::cmp(&cursor_location.end, &self.end)) + } +} + pub trait AnchorRangeExt { fn cmp<'a>(&self, b: &Range, buffer: impl Into>) -> Result; } diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index a5771ad4c0f55508bad6a93d73b674230a627e29..7203e0b1d71edb4b1daaffbafc740a7539f1dd30 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -1696,9 +1696,13 @@ impl<'a> Content<'a> { fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { let cx = Some(anchor.version.clone()); let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); - cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx); + cursor.seek( + &VersionedOffset::Offset(anchor.full_offset), + anchor.bias, + &cx, + ); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.offset - cursor.start().0.offset() + anchor.full_offset - cursor.start().0.offset() } else { 0 }; @@ -1766,13 +1770,8 @@ impl<'a> Content<'a> { } fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - let offset = position.to_offset(self); - let max_offset = self.len(); - assert!(offset <= max_offset, "offset is out of range"); - let mut cursor = self.fragments.cursor::(); - cursor.seek(&offset, bias, &None); Anchor { - offset: offset + cursor.start().deleted, + full_offset: position.to_full_offset(self, bias), bias, version: self.version.clone(), } @@ -1842,9 +1841,13 @@ impl<'a> Content<'a> { let mut cursor = self .fragments .cursor::<(VersionedOffset, FragmentTextSummary)>(); - cursor.seek(&VersionedOffset::Offset(anchor.offset), anchor.bias, &cx); + cursor.seek( + &VersionedOffset::Offset(anchor.full_offset), + anchor.bias, + &cx, + ); let overshoot = if cursor.item().is_some() { - anchor.offset - cursor.start().0.offset() + anchor.full_offset - cursor.start().0.offset() } else { 0 }; @@ -2239,7 +2242,7 @@ impl<'a> Into for &'a Anchor { fn into(self) -> proto::Anchor { proto::Anchor { version: (&self.version).into(), - offset: self.offset as u64, + offset: self.full_offset as u64, bias: match self.bias { Bias::Left => proto::anchor::Bias::Left as i32, Bias::Right => proto::anchor::Bias::Right as i32, @@ -2373,7 +2376,7 @@ impl TryFrom for Anchor { } Ok(Self { - offset: message.offset as usize, + full_offset: message.offset as usize, bias: if message.bias == proto::anchor::Bias::Left as i32 { Bias::Left } else if message.bias == proto::anchor::Bias::Right as i32 { @@ -2408,6 +2411,14 @@ impl TryFrom for Selection { pub trait ToOffset { fn to_offset<'a>(&self, content: impl Into>) -> usize; + + fn to_full_offset<'a>(&self, content: impl Into>, bias: Bias) -> usize { + let content = content.into(); + let offset = self.to_offset(&content); + let mut cursor = content.fragments.cursor::(); + cursor.seek(&offset, bias, &None); + offset + cursor.start().deleted + } } impl ToOffset for Point { @@ -2417,7 +2428,8 @@ impl ToOffset for Point { } impl ToOffset for usize { - fn to_offset<'a>(&self, _: impl Into>) -> usize { + fn to_offset<'a>(&self, content: impl Into>) -> usize { + assert!(*self <= content.into().len(), "offset is out of range"); *self } } @@ -2426,6 +2438,10 @@ impl ToOffset for Anchor { fn to_offset<'a>(&self, content: impl Into>) -> usize { content.into().summary_for_anchor(self).bytes } + + fn to_full_offset<'a>(&self, _: impl Into>, _: Bias) -> usize { + self.full_offset + } } impl<'a> ToOffset for &'a Anchor { From 5dc47c625e87af95f603c2aaeb541e30b697360f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 26 Oct 2021 14:27:00 +0200 Subject: [PATCH 12/61] Fix compilation errors --- crates/server/src/rpc.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 073bf5bc7ca47f5850af7d706ce1787f1817bb6c..81c2d2af275f5a4c7dc0942ab804f1c042f54217 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1016,6 +1016,7 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1124,6 +1125,7 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1216,6 +1218,7 @@ mod tests { "/a".as_ref(), fs.clone(), lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1352,6 +1355,7 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1436,6 +1440,7 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1502,6 +1507,7 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -1563,6 +1569,7 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await @@ -2033,6 +2040,7 @@ mod tests { "/a".as_ref(), fs.clone(), lang_registry.clone(), + None, &mut cx_a.to_async(), ) .await From e8a2885721efbd7b15ccb990d77d0d5b9f261b80 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 26 Oct 2021 14:27:19 +0200 Subject: [PATCH 13/61] Introduce `Content::anchor_range_multimap` --- crates/buffer/src/anchor.rs | 91 +++++++++++++---------- crates/buffer/src/lib.rs | 55 +++++++++++++- crates/editor/src/display_map/fold_map.rs | 2 +- crates/project/src/worktree.rs | 1 - crates/sum_tree/src/cursor.rs | 14 ++-- crates/sum_tree/src/lib.rs | 2 +- 6 files changed, 115 insertions(+), 50 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index c8a7eaed24db47698fb970345206201341c5d76a..442e69406e32c7d70d188562fda06abd056cd859 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -31,26 +31,26 @@ pub struct AnchorRangeMap { pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); pub struct AnchorRangeMultimap { - entries: SumTree>, + pub(crate) entries: SumTree>, pub(crate) version: clock::Global, pub(crate) start_bias: Bias, pub(crate) end_bias: Bias, } #[derive(Clone)] -struct AnchorRangeMultimapEntry { - range: FullOffsetRange, - value: T, +pub(crate) struct AnchorRangeMultimapEntry { + pub(crate) range: FullOffsetRange, + pub(crate) value: T, } #[derive(Clone, Debug)] -struct FullOffsetRange { - start: usize, - end: usize, +pub(crate) struct FullOffsetRange { + pub(crate) start: usize, + pub(crate) end: usize, } #[derive(Clone, Debug)] -struct AnchorRangeMultimapSummary { +pub(crate) struct AnchorRangeMultimapSummary { start: usize, end: usize, min_start: usize, @@ -165,46 +165,61 @@ impl AnchorRangeSet { } impl AnchorRangeMultimap { - fn intersecting_point_ranges<'a, O: ToOffset>( + fn intersecting_point_ranges<'a>( &'a self, - range: Range, - content: impl Into>, + range: Range, + content: &'a Content<'a>, inclusive: bool, ) -> impl Iterator, &T)> + 'a { use super::ToPoint as _; - let content = content.into(); - let start = range.start.to_full_offset(&content, self.start_bias); - let end = range.end.to_full_offset(&content, self.end_bias); let mut cursor = self.entries.filter::<_, usize>( - move |summary: &AnchorRangeMultimapSummary| { - if inclusive { - start <= summary.max_end && end >= summary.min_start - } else { - start < summary.max_end && end > summary.min_start + { + let mut endpoint = Anchor { + full_offset: 0, + bias: Bias::Right, + version: self.version.clone(), + }; + move |summary: &AnchorRangeMultimapSummary| { + endpoint.full_offset = summary.max_end; + endpoint.bias = self.end_bias; + let start_cmp = range.start.cmp(&endpoint, content).unwrap(); + + endpoint.full_offset = summary.min_start; + endpoint.bias = self.start_bias; + let end_cmp = range.end.cmp(&endpoint, content).unwrap(); + + if inclusive { + start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal + } else { + start_cmp == Ordering::Less && end_cmp == Ordering::Greater + } } }, &(), ); - let mut anchor = Anchor { - full_offset: 0, - bias: Bias::Left, - version: self.version.clone(), - }; - std::iter::from_fn(move || { - if let Some(item) = cursor.item() { - let ix = *cursor.start(); - anchor.full_offset = item.range.start; - anchor.bias = self.start_bias; - let start = anchor.to_point(&content); - anchor.full_offset = item.range.end; - anchor.bias = self.end_bias; - let end = anchor.to_point(&content); - let value = &item.value; - cursor.next(&()); - Some((ix, start..end, value)) - } else { - None + + std::iter::from_fn({ + let mut endpoint = Anchor { + full_offset: 0, + bias: Bias::Left, + version: self.version.clone(), + }; + move || { + if let Some(item) = cursor.item() { + let ix = *cursor.start(); + endpoint.full_offset = item.range.start; + endpoint.bias = self.start_bias; + let start = endpoint.to_point(content); + endpoint.full_offset = item.range.end; + endpoint.bias = self.end_bias; + let end = endpoint.to_point(content); + let value = &item.value; + cursor.next(&()); + Some((ix, start..end, value)) + } else { + None + } } }) } diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 7203e0b1d71edb4b1daaffbafc740a7539f1dd30..c037c386970a51ac9d55097e83ef514101378362 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -20,6 +20,7 @@ use rpc::proto; pub use selection::*; use std::{ cmp, + collections::{BTreeMap, BTreeSet}, convert::{TryFrom, TryInto}, iter::Iterator, ops::Range, @@ -315,7 +316,7 @@ impl UndoMap { } } -struct Edits<'a, F: Fn(&FragmentSummary) -> bool> { +struct Edits<'a, F: FnMut(&FragmentSummary) -> bool> { visible_text: &'a Rope, deleted_text: &'a Rope, cursor: Option>, @@ -1836,6 +1837,56 @@ impl<'a> Content<'a> { AnchorRangeSet(self.anchor_range_map(entries.into_iter().map(|range| (range, ())))) } + pub fn anchor_range_multimap( + &self, + start_bias: Bias, + end_bias: Bias, + entries: E, + ) -> AnchorRangeMultimap + where + T: Clone, + E: IntoIterator, T)>, + O: ToOffset, + { + let mut items = Vec::new(); + let mut endpoints = BTreeMap::new(); + for (ix, (range, value)) in entries.into_iter().enumerate() { + items.push(AnchorRangeMultimapEntry { + range: FullOffsetRange { start: 0, end: 0 }, + value, + }); + endpoints + .entry((range.start.to_offset(self), start_bias)) + .or_insert(Vec::new()) + .push((ix, true)); + endpoints + .entry((range.end.to_offset(self), end_bias)) + .or_insert(Vec::new()) + .push((ix, false)); + } + + let mut cursor = self.fragments.cursor::(); + for ((endpoint, bias), item_ixs) in endpoints { + cursor.seek_forward(&endpoint, bias, &None); + let full_offset = cursor.start().deleted + endpoint; + for (item_ix, is_start) in item_ixs { + if is_start { + items[item_ix].range.start = full_offset; + } else { + items[item_ix].range.end = full_offset; + } + } + } + items.sort_unstable_by_key(|i| (i.range.start, i.range.end)); + + AnchorRangeMultimap { + entries: SumTree::from_iter(items, &()), + version: self.version.clone(), + start_bias, + end_bias, + } + } + fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { let cx = Some(anchor.version.clone()); let mut cursor = self @@ -1917,7 +1968,7 @@ impl<'a> RopeBuilder<'a> { } } -impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { +impl<'a, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { type Item = Edit; fn next(&mut self) -> Option { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 73e032e7f37b103b1e5753cd18fa91cedfb5a4de..7a230aa165e74266dedf7636cf05cecbf773ca7d 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -720,7 +720,7 @@ fn intersecting_folds<'a, T>( folds: &'a SumTree, range: Range, inclusive: bool, -) -> FilterCursor<'a, impl 'a + Fn(&FoldSummary) -> bool, Fold, usize> +) -> FilterCursor<'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, usize> where T: ToOffset, { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 426115bfd4ccf91717f1384c558dd8e206fce17f..642bc596afe547345a47709e40f4299a91fa77e8 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1190,7 +1190,6 @@ impl LocalWorktree { diagnostics: lsp::PublishDiagnosticsParams, cx: &mut ModelContext, ) { - // } } diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 21855933288e0b84b62b95f09210443eca265923..11c2847a7ffe022fac6fb7be4220a2fcf6457cb5 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -184,9 +184,9 @@ where self.next_internal(|_| true, cx) } - fn next_internal(&mut self, filter_node: F, cx: &::Context) + fn next_internal(&mut self, mut filter_node: F, cx: &::Context) where - F: Fn(&T::Summary) -> bool, + F: FnMut(&T::Summary) -> bool, { let mut descend = false; @@ -509,24 +509,24 @@ where } } -pub struct FilterCursor<'a, F: Fn(&T::Summary) -> bool, T: Item, D> { +pub struct FilterCursor<'a, F: FnMut(&T::Summary) -> bool, T: Item, D> { cursor: Cursor<'a, T, D>, filter_node: F, } impl<'a, F, T, D> FilterCursor<'a, F, T, D> where - F: Fn(&T::Summary) -> bool, + F: FnMut(&T::Summary) -> bool, T: Item, D: Dimension<'a, T::Summary>, { pub fn new( tree: &'a SumTree, - filter_node: F, + mut filter_node: F, cx: &::Context, ) -> Self { let mut cursor = tree.cursor::(); - cursor.next_internal(&filter_node, cx); + cursor.next_internal(&mut filter_node, cx); Self { cursor, filter_node, @@ -542,7 +542,7 @@ where } pub fn next(&mut self, cx: &::Context) { - self.cursor.next_internal(&self.filter_node, cx); + self.cursor.next_internal(&mut self.filter_node, cx); } } diff --git a/crates/sum_tree/src/lib.rs b/crates/sum_tree/src/lib.rs index 2bbb567ba16824074bf3f34990d835828bb030ec..eeef9563249b0af2dbb67c6da699927004d577bc 100644 --- a/crates/sum_tree/src/lib.rs +++ b/crates/sum_tree/src/lib.rs @@ -163,7 +163,7 @@ impl SumTree { cx: &::Context, ) -> FilterCursor where - F: Fn(&T::Summary) -> bool, + F: FnMut(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, { FilterCursor::new(self, filter_node, cx) From 60abc5f09032668a02903a1e8dbebda2241b4ba5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 26 Oct 2021 17:04:12 +0200 Subject: [PATCH 14/61] Take `ToOffset` instead of anchors in `intersecting_point_ranges` Co-Authored-By: Nathan Sobo --- crates/buffer/src/anchor.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 442e69406e32c7d70d188562fda06abd056cd859..bb8186ed5fb07299b0a13bb34a0ed9d55444044e 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -165,14 +165,20 @@ impl AnchorRangeSet { } impl AnchorRangeMultimap { - fn intersecting_point_ranges<'a>( + fn intersecting_point_ranges<'a, O>( &'a self, - range: Range, + range: Range, content: &'a Content<'a>, inclusive: bool, - ) -> impl Iterator, &T)> + 'a { + ) -> impl Iterator, &T)> + 'a + where + O: ToOffset, + { use super::ToPoint as _; + let end_bias = if inclusive { Bias::Right } else { Bias::Left }; + let range = range.start.to_full_offset(content, Bias::Left) + ..range.end.to_full_offset(content, end_bias); let mut cursor = self.entries.filter::<_, usize>( { let mut endpoint = Anchor { @@ -183,11 +189,13 @@ impl AnchorRangeMultimap { move |summary: &AnchorRangeMultimapSummary| { endpoint.full_offset = summary.max_end; endpoint.bias = self.end_bias; - let start_cmp = range.start.cmp(&endpoint, content).unwrap(); + let max_end = endpoint.to_full_offset(content, self.end_bias); + let start_cmp = range.start.cmp(&max_end); endpoint.full_offset = summary.min_start; endpoint.bias = self.start_bias; - let end_cmp = range.end.cmp(&endpoint, content).unwrap(); + let min_start = endpoint.to_full_offset(content, self.start_bias); + let end_cmp = range.end.cmp(&min_start); if inclusive { start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal From 0674e768648b4ceb42f51cff3e38b6c70afcb8de Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 26 Oct 2021 19:42:40 +0200 Subject: [PATCH 15/61] WIP --- Cargo.lock | 2 + crates/buffer/src/anchor.rs | 16 ++- crates/buffer/src/lib.rs | 114 ++++++++------- crates/editor/src/display_map.rs | 4 +- crates/editor/src/lib.rs | 6 +- crates/language/Cargo.toml | 2 + crates/language/src/lib.rs | 233 ++++++++++++++++++++++++++----- crates/language/src/tests.rs | 14 +- crates/lsp/src/lib.rs | 2 + crates/project/src/worktree.rs | 48 ++++++- crates/workspace/src/items.rs | 11 +- 11 files changed, 341 insertions(+), 111 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3a9736c56484dc1019898edfc53e64b69e28665a..accd0a093900ee80ef30d0cebf80b1b008b36b06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2828,7 +2828,9 @@ dependencies = [ "gpui", "lazy_static", "log", + "lsp", "parking_lot", + "postage", "rand 0.8.3", "rpc", "serde 1.0.125", diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index bb8186ed5fb07299b0a13bb34a0ed9d55444044e..9cafd673a9d5fd45bfa7c70873a93f5487e08ba6 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,6 +1,4 @@ -use crate::{Point, ToOffset}; - -use super::{Buffer, Content}; +use super::{Buffer, Content, Point, ToOffset}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; use sum_tree::{Bias, SumTree}; @@ -30,6 +28,7 @@ pub struct AnchorRangeMap { #[derive(Clone)] pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); +#[derive(Clone)] pub struct AnchorRangeMultimap { pub(crate) entries: SumTree>, pub(crate) version: clock::Global, @@ -164,6 +163,17 @@ impl AnchorRangeSet { } } +impl Default for AnchorRangeMultimap { + fn default() -> Self { + Self { + entries: Default::default(), + version: Default::default(), + start_bias: Bias::Left, + end_bias: Bias::Left, + } + } +} + impl AnchorRangeMultimap { fn intersecting_point_ranges<'a, O>( &'a self, diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index c037c386970a51ac9d55097e83ef514101378362..4fac433781afd0e262114583a1dbcf81ef2a55b1 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -19,8 +19,7 @@ pub use rope::{Chunks, Rope, TextSummary}; use rpc::proto; pub use selection::*; use std::{ - cmp, - collections::{BTreeMap, BTreeSet}, + cmp::{self, Reverse}, convert::{TryFrom, TryInto}, iter::Iterator, ops::Range, @@ -534,6 +533,8 @@ impl Buffer { pub fn snapshot(&self) -> Snapshot { Snapshot { visible_text: self.visible_text.clone(), + deleted_text: self.deleted_text.clone(), + undo_map: self.undo_map.clone(), fragments: self.fragments.clone(), version: self.version.clone(), } @@ -1344,27 +1345,7 @@ impl Buffer { } pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { - let since_2 = since.clone(); - let cursor = if since == self.version { - None - } else { - Some(self.fragments.filter( - move |summary| summary.max_version.changed_since(&since_2), - &None, - )) - }; - - Edits { - visible_text: &self.visible_text, - deleted_text: &self.deleted_text, - cursor, - undos: &self.undo_map, - since, - old_offset: 0, - new_offset: 0, - old_point: Point::zero(), - new_point: Point::zero(), - } + self.content().edits_since(since) } } @@ -1522,6 +1503,8 @@ impl Buffer { #[derive(Clone)] pub struct Snapshot { visible_text: Rope, + deleted_text: Rope, + undo_map: UndoMap, fragments: SumTree, version: clock::Global, } @@ -1596,6 +1579,14 @@ impl Snapshot { self.content().anchor_at(position, Bias::Right) } + pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { + self.content().edits_since(since) + } + + pub fn version(&self) -> &clock::Global { + &self.version + } + pub fn content(&self) -> Content { self.into() } @@ -1603,6 +1594,8 @@ impl Snapshot { pub struct Content<'a> { visible_text: &'a Rope, + deleted_text: &'a Rope, + undo_map: &'a UndoMap, fragments: &'a SumTree, version: &'a clock::Global, } @@ -1611,6 +1604,8 @@ impl<'a> From<&'a Snapshot> for Content<'a> { fn from(snapshot: &'a Snapshot) -> Self { Self { visible_text: &snapshot.visible_text, + deleted_text: &snapshot.deleted_text, + undo_map: &snapshot.undo_map, fragments: &snapshot.fragments, version: &snapshot.version, } @@ -1621,6 +1616,8 @@ impl<'a> From<&'a Buffer> for Content<'a> { fn from(buffer: &'a Buffer) -> Self { Self { visible_text: &buffer.visible_text, + deleted_text: &buffer.deleted_text, + undo_map: &buffer.undo_map, fragments: &buffer.fragments, version: &buffer.version, } @@ -1631,6 +1628,8 @@ impl<'a> From<&'a mut Buffer> for Content<'a> { fn from(buffer: &'a mut Buffer) -> Self { Self { visible_text: &buffer.visible_text, + deleted_text: &buffer.deleted_text, + undo_map: &buffer.undo_map, fragments: &buffer.fragments, version: &buffer.version, } @@ -1641,6 +1640,8 @@ impl<'a> From<&'a Content<'a>> for Content<'a> { fn from(content: &'a Content) -> Self { Self { visible_text: &content.visible_text, + deleted_text: &content.deleted_text, + undo_map: &content.undo_map, fragments: &content.fragments, version: &content.version, } @@ -1848,39 +1849,19 @@ impl<'a> Content<'a> { E: IntoIterator, T)>, O: ToOffset, { - let mut items = Vec::new(); - let mut endpoints = BTreeMap::new(); - for (ix, (range, value)) in entries.into_iter().enumerate() { - items.push(AnchorRangeMultimapEntry { - range: FullOffsetRange { start: 0, end: 0 }, + let mut entries = entries + .into_iter() + .map(|(range, value)| AnchorRangeMultimapEntry { + range: FullOffsetRange { + start: range.start.to_full_offset(self, start_bias), + end: range.end.to_full_offset(self, end_bias), + }, value, - }); - endpoints - .entry((range.start.to_offset(self), start_bias)) - .or_insert(Vec::new()) - .push((ix, true)); - endpoints - .entry((range.end.to_offset(self), end_bias)) - .or_insert(Vec::new()) - .push((ix, false)); - } - - let mut cursor = self.fragments.cursor::(); - for ((endpoint, bias), item_ixs) in endpoints { - cursor.seek_forward(&endpoint, bias, &None); - let full_offset = cursor.start().deleted + endpoint; - for (item_ix, is_start) in item_ixs { - if is_start { - items[item_ix].range.start = full_offset; - } else { - items[item_ix].range.end = full_offset; - } - } - } - items.sort_unstable_by_key(|i| (i.range.start, i.range.end)); - + }) + .collect::>(); + entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end))); AnchorRangeMultimap { - entries: SumTree::from_iter(items, &()), + entries: SumTree::from_iter(entries, &()), version: self.version.clone(), start_bias, end_bias, @@ -1913,6 +1894,31 @@ impl<'a> Content<'a> { Err(anyhow!("offset out of bounds")) } } + + // TODO: take a reference to clock::Global. + pub fn edits_since(&self, since: clock::Global) -> impl 'a + Iterator { + let since_2 = since.clone(); + let cursor = if since == *self.version { + None + } else { + Some(self.fragments.filter( + move |summary| summary.max_version.changed_since(&since_2), + &None, + )) + }; + + Edits { + visible_text: &self.visible_text, + deleted_text: &self.deleted_text, + cursor, + undos: &self.undo_map, + since, + old_offset: 0, + new_offset: 0, + old_point: Point::zero(), + new_point: Point::zero(), + } + } } struct RopeBuilder<'a> { diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 30a506ea920b717c45b4146e106edf942a31eb34..f343442392363eaf0b70fc8061f22137ba6a96bd 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -702,7 +702,7 @@ mod tests { lang.set_theme(&theme); let buffer = cx.add_model(|cx| { - Buffer::from_history(0, History::new(text.into()), None, Some(lang), cx) + Buffer::from_history(0, History::new(text.into()), None, Some(lang), None, cx) }); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; @@ -790,7 +790,7 @@ mod tests { lang.set_theme(&theme); let buffer = cx.add_model(|cx| { - Buffer::from_history(0, History::new(text.into()), None, Some(lang), cx) + Buffer::from_history(0, History::new(text.into()), None, Some(lang), None, cx) }); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index 61414d5dc7ea9864a4ea5ee53bd744f6e7266b20..b2a97982b70c86c50375c21b2f1e2a59cbf78917 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -4422,7 +4422,7 @@ mod tests { let buffer = cx.add_model(|cx| { let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), cx) + Buffer::from_history(0, history, None, Some(language), None, cx) }); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) @@ -4581,7 +4581,7 @@ mod tests { let buffer = cx.add_model(|cx| { let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), cx) + Buffer::from_history(0, history, None, Some(language), None, cx) }); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) @@ -4696,7 +4696,7 @@ mod tests { let buffer = cx.add_model(|cx| { let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), cx) + Buffer::from_history(0, history, None, Some(language), None, cx) }); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 3cbfb3ae1253074092f5de0066822f3d5cd050c2..4aba95f97a8a58ddb26b470ec6bb4a8c90207fd7 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -10,6 +10,7 @@ test-support = ["rand", "buffer/test-support"] buffer = { path = "../buffer" } clock = { path = "../clock" } gpui = { path = "../gpui" } +lsp = { path = "../lsp" } rpc = { path = "../rpc" } theme = { path = "../theme" } util = { path = "../util" } @@ -18,6 +19,7 @@ futures = "0.3" lazy_static = "1.4" log = "0.4" parking_lot = "0.11.1" +postage = { version = "0.4.1", features = ["futures-traits"] } rand = { version = "0.8.3", optional = true } serde = { version = "1", features = ["derive"] } similar = "1.3" diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index b80eed7e331d95b01592c8ae32459d9c7e14da5b..6249b613c97bbc57b29d407edd5e39ccb44e8ba4 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -14,6 +14,7 @@ use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; use parking_lot::Mutex; +use postage::{prelude::Stream, sink::Sink, watch}; use rpc::proto; use similar::{ChangeTag, TextDiff}; use smol::future::yield_now; @@ -32,7 +33,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, }; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; -use util::TryFutureExt as _; +use util::{post_inc, TryFutureExt as _}; thread_local! { static PARSER: RefCell = RefCell::new(Parser::new()); @@ -57,6 +58,8 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, + diagnostics: AnchorRangeMultimap<()>, + language_server: Option, #[cfg(test)] operations: Vec, } @@ -69,6 +72,20 @@ pub struct Snapshot { query_cursor: QueryCursorHandle, } +struct LanguageServerState { + latest_snapshot: watch::Sender>, + pending_snapshots: BTreeMap, + next_version: usize, + _maintain_server: Task>, +} + +#[derive(Clone)] +struct LanguageServerSnapshot { + buffer_snapshot: buffer::Snapshot, + version: usize, + path: Arc, +} + #[derive(Clone, Debug, Eq, PartialEq)] pub enum Event { Edited, @@ -87,8 +104,14 @@ pub trait File { fn mtime(&self) -> SystemTime; + /// Returns the path of this file relative to the worktree's root directory. fn path(&self) -> &Arc; + /// Returns the absolute path of this file. + fn abs_path(&self, cx: &AppContext) -> Option; + + /// Returns the path of this file relative to the worktree's parent directory (this means it + /// includes the name of the worktree's root folder). fn full_path(&self, cx: &AppContext) -> PathBuf; /// Returns the last component of this handle's absolute path. If this handle refers to the root @@ -173,6 +196,7 @@ impl Buffer { ), None, None, + None, cx, ) } @@ -182,12 +206,14 @@ impl Buffer { history: History, file: Option>, language: Option>, + language_server: Option>, cx: &mut ModelContext, ) -> Self { Self::build( TextBuffer::new(replica_id, cx.model_id() as u64, history), file, language, + language_server, cx, ) } @@ -203,6 +229,7 @@ impl Buffer { TextBuffer::from_proto(replica_id, message)?, file, language, + None, cx, )) } @@ -211,6 +238,7 @@ impl Buffer { buffer: TextBuffer, file: Option>, language: Option>, + language_server: Option>, cx: &mut ModelContext, ) -> Self { let saved_mtime; @@ -231,12 +259,13 @@ impl Buffer { sync_parse_timeout: Duration::from_millis(1), autoindent_requests: Default::default(), pending_autoindent: Default::default(), - language, - + language: None, + diagnostics: Default::default(), + language_server: None, #[cfg(test)] operations: Default::default(), }; - result.reparse(cx); + result.set_language(language, language_server, cx); result } @@ -274,9 +303,90 @@ impl Buffer { })) } - pub fn set_language(&mut self, language: Option>, cx: &mut ModelContext) { + pub fn set_language( + &mut self, + language: Option>, + language_server: Option>, + cx: &mut ModelContext, + ) { self.language = language; + self.language_server = if let Some(server) = language_server { + let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel(); + Some(LanguageServerState { + latest_snapshot: latest_snapshot_tx, + pending_snapshots: Default::default(), + next_version: 0, + _maintain_server: cx.background().spawn( + async move { + let mut prev_snapshot: Option = None; + while let Some(snapshot) = latest_snapshot_rx.recv().await { + if let Some(snapshot) = snapshot { + let uri = lsp::Url::from_file_path(&snapshot.path).unwrap(); + if let Some(prev_snapshot) = prev_snapshot { + let changes = lsp::DidChangeTextDocumentParams { + text_document: lsp::VersionedTextDocumentIdentifier::new( + uri, + snapshot.version as i32, + ), + content_changes: snapshot + .buffer_snapshot + .edits_since( + prev_snapshot.buffer_snapshot.version().clone(), + ) + .map(|edit| { + lsp::TextDocumentContentChangeEvent { + // TODO: Use UTF-16 positions. + range: Some(lsp::Range::new( + lsp::Position::new( + edit.old_lines.start.row, + edit.old_lines.start.column, + ), + lsp::Position::new( + edit.old_lines.end.row, + edit.old_lines.end.column, + ), + )), + range_length: None, + text: snapshot + .buffer_snapshot + .text_for_range(edit.new_bytes) + .collect(), + } + }) + .collect(), + }; + server + .notify::(changes) + .await?; + } else { + server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + Default::default(), + snapshot.version as i32, + snapshot.buffer_snapshot.text().into(), + ), + }, + ) + .await?; + } + + prev_snapshot = Some(snapshot); + } + } + Ok(()) + } + .log_err(), + ), + }) + } else { + None + }; + self.reparse(cx); + self.update_language_server(cx); } pub fn did_save( @@ -486,6 +596,45 @@ impl Buffer { cx.notify(); } + pub fn update_diagnostics( + &mut self, + params: lsp::PublishDiagnosticsParams, + cx: &mut ModelContext, + ) -> Result<()> { + dbg!(¶ms); + let language_server = self.language_server.as_mut().unwrap(); + let version = params.version.ok_or_else(|| anyhow!("missing version"))? as usize; + let snapshot = language_server + .pending_snapshots + .get(&version) + .ok_or_else(|| anyhow!("missing snapshot"))?; + self.diagnostics = snapshot.buffer_snapshot.content().anchor_range_multimap( + Bias::Left, + Bias::Right, + params.diagnostics.into_iter().map(|diagnostic| { + // TODO: Use UTF-16 positions. + let start = Point::new( + diagnostic.range.start.line, + diagnostic.range.start.character, + ); + let end = Point::new(diagnostic.range.end.line, diagnostic.range.end.character); + (start..end, ()) + }), + ); + + let versions_to_delete = language_server + .pending_snapshots + .range(..version) + .map(|(v, _)| *v) + .collect::>(); + for version in versions_to_delete { + language_server.pending_snapshots.remove(&version); + } + + cx.notify(); + Ok(()) + } + fn request_autoindent(&mut self, cx: &mut ModelContext) { if let Some(indent_columns) = self.compute_autoindents() { let indent_columns = cx.background().spawn(indent_columns); @@ -811,17 +960,38 @@ impl Buffer { cx: &mut ModelContext, ) -> Result<()> { if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) { - cx.notify(); let was_dirty = start_version != self.saved_version; - let edited = self.edits_since(start_version).next().is_some(); - if edited { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + self.did_edit(start_version, was_dirty, cx); } Ok(()) } + fn update_language_server(&mut self, cx: &AppContext) { + let language_server = if let Some(language_server) = self.language_server.as_mut() { + language_server + } else { + return; + }; + let file = if let Some(file) = self.file.as_ref() { + file + } else { + return; + }; + + let version = post_inc(&mut language_server.next_version); + let snapshot = LanguageServerSnapshot { + buffer_snapshot: self.text.snapshot(), + version, + path: Arc::from(file.abs_path(cx).unwrap()), + }; + language_server + .pending_snapshots + .insert(version, snapshot.clone()); + let _ = language_server + .latest_snapshot + .blocking_send(Some(snapshot)); + } + pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) where I: IntoIterator>, @@ -929,11 +1099,24 @@ impl Buffer { self.send_operation(Operation::Edit(edit), cx); } - fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { + fn did_edit( + &mut self, + old_version: clock::Global, + was_dirty: bool, + cx: &mut ModelContext, + ) { + if self.edits_since(old_version).next().is_none() { + return; + } + + self.reparse(cx); + self.update_language_server(cx); + cx.emit(Event::Edited); if !was_dirty { cx.emit(Event::Dirtied); } + cx.notify(); } pub fn add_selection_set( @@ -991,18 +1174,10 @@ impl Buffer { cx: &mut ModelContext, ) -> Result<()> { self.pending_autoindent.take(); - let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - self.text.apply_ops(ops)?; - - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } - + self.did_edit(old_version, was_dirty, cx); Ok(()) } @@ -1031,11 +1206,7 @@ impl Buffer { self.send_operation(operation, cx); } - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + self.did_edit(old_version, was_dirty, cx); } pub fn redo(&mut self, cx: &mut ModelContext) { @@ -1046,11 +1217,7 @@ impl Buffer { self.send_operation(operation, cx); } - cx.notify(); - if self.edits_since(old_version).next().is_some() { - self.did_edit(was_dirty, cx); - self.reparse(cx); - } + self.did_edit(old_version, was_dirty, cx); } } @@ -1081,6 +1248,7 @@ impl Entity for Buffer { } } +// TODO: Do we need to clone a buffer? impl Clone for Buffer { fn clone(&self) -> Self { Self { @@ -1095,7 +1263,8 @@ impl Clone for Buffer { parse_count: self.parse_count, autoindent_requests: Default::default(), pending_autoindent: Default::default(), - + diagnostics: self.diagnostics.clone(), + language_server: None, #[cfg(test)] operations: self.operations.clone(), } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 23cdced4c70c09fbc8dc6bfa696fa76eebe51d1d..cc1382a0988586b5e75b25cce76bdb57ba8c481d 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -80,7 +80,7 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) { async fn test_reparse(mut cx: gpui::TestAppContext) { let buffer = cx.add_model(|cx| { let text = "fn a() {}".into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx) + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx) }); // Wait for the initial text to parse @@ -224,7 +224,7 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { " .unindent() .into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx) + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx) }); let buffer = buffer.read(cx); assert_eq!( @@ -254,7 +254,8 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { fn test_edit_with_autoindent(cx: &mut MutableAppContext) { cx.add_model(|cx| { let text = "fn a() {}".into(); - let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx); + let mut buffer = + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx); buffer.edit_with_autoindent([8..8], "\n\n", cx); assert_eq!(buffer.text(), "fn a() {\n \n}"); @@ -273,7 +274,7 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) { fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { cx.add_model(|cx| { let text = History::new("fn a() {}".into()); - let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx); + let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), None, cx); let selection_set_id = buffer.add_selection_set(Vec::new(), cx); buffer.start_transaction(Some(selection_set_id)).unwrap(); @@ -332,7 +333,8 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta " .unindent() .into(); - let mut buffer = Buffer::from_history(0, History::new(text), None, Some(rust_lang()), cx); + let mut buffer = + Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx); // Lines 2 and 3 don't match the indentation suggestion. When editing these lines, // their indentation is not adjusted. @@ -383,7 +385,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte .unindent() .into(), ); - let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), cx); + let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), None, cx); buffer.edit_with_autoindent([5..5], "\nb", cx); assert_eq!( diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 60e5ba906577198d2f82148f606398bbfdc507f6..1aba87e37591fa804804a8511c216b3c8271bd7d 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -157,6 +157,7 @@ impl LanguageServer { buffer.resize(message_len, 0); stdout.read_exact(&mut buffer).await?; + println!("{}", std::str::from_utf8(&buffer).unwrap()); if let Ok(AnyNotification { method, params }) = serde_json::from_slice(&buffer) { @@ -200,6 +201,7 @@ impl LanguageServer { content_len_buffer.clear(); let message = outbound_rx.recv().await?; + println!("{}", std::str::from_utf8(&message).unwrap()); write!(content_len_buffer, "{}", message.len()).unwrap(); stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?; stdin.write_all(&content_len_buffer).await?; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 642bc596afe547345a47709e40f4299a91fa77e8..47e400aa0992bc7a6be35253cdbd92dffcbb7266 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -40,7 +40,7 @@ use std::{ }; use sum_tree::Bias; use sum_tree::{Edit, SeekTarget, SumTree}; -use util::TryFutureExt; +use util::{ResultExt, TryFutureExt}; lazy_static! { static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); @@ -295,6 +295,13 @@ impl Worktree { } } + pub fn language_server(&self) -> Option<&Arc> { + match self { + Worktree::Local(worktree) => worktree.language_server.as_ref(), + Worktree::Remote(_) => None, + } + } + pub fn handle_add_peer( &mut self, envelope: TypedEnvelope, @@ -667,9 +674,10 @@ pub struct LocalWorktree { share: Option, open_buffers: HashMap>, shared_buffers: HashMap>>, + diagnostics: HashMap>, peers: HashMap, - languages: Arc, queued_operations: Vec<(u64, Operation)>, + languages: Arc, rpc: Arc, fs: Arc, language_server: Option>, @@ -781,6 +789,7 @@ impl LocalWorktree { poll_task: None, open_buffers: Default::default(), shared_buffers: Default::default(), + diagnostics: Default::default(), queued_operations: Default::default(), peers: Default::default(), languages, @@ -828,7 +837,7 @@ impl LocalWorktree { if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { handle.update(&mut cx, |this, cx| { let this = this.as_local_mut().unwrap(); - this.update_diagnostics(diagnostics, cx); + this.update_diagnostics(diagnostics, cx).log_err(); }); } else { break; @@ -867,6 +876,7 @@ impl LocalWorktree { }); let path = Arc::from(path); + let language_server = self.language_server.clone(); cx.spawn(|this, mut cx| async move { if let Some(existing_buffer) = existing_buffer { Ok(existing_buffer) @@ -887,6 +897,7 @@ impl LocalWorktree { History::new(contents.into()), Some(Box::new(file)), language, + language_server, cx, ) }); @@ -1187,9 +1198,29 @@ impl LocalWorktree { fn update_diagnostics( &mut self, - diagnostics: lsp::PublishDiagnosticsParams, + params: lsp::PublishDiagnosticsParams, cx: &mut ModelContext, - ) { + ) -> Result<()> { + let file_path = params + .uri + .to_file_path() + .map_err(|_| anyhow!("URI is not a file"))?; + + for buffer in self.open_buffers.values() { + if let Some(buffer) = buffer.upgrade(cx) { + if buffer + .read(cx) + .file() + .map_or(false, |file| file.path().as_ref() == file_path) + { + buffer.update(cx, |buffer, cx| buffer.update_diagnostics(params, cx))?; + return Ok(()); + } + } + } + + self.diagnostics.insert(file_path, params.diagnostics); + Ok(()) } } @@ -1809,6 +1840,13 @@ impl language::File for File { &self.path } + fn abs_path(&self, cx: &AppContext) -> Option { + let worktree = self.worktree.read(cx); + worktree + .as_local() + .map(|worktree| worktree.absolutize(&self.path)) + } + fn full_path(&self, cx: &AppContext) -> PathBuf { let worktree = self.worktree.read(cx); let mut full_path = PathBuf::new(); diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index 07c511602c1e015e2468f7131e2831aa4dc616f9..9370ac7f85e3b4226140bcc2c124b2db670db967 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -127,16 +127,15 @@ impl ItemView for Editor { cx.spawn(|buffer, mut cx| async move { save_as.await.map(|new_file| { - let language = worktree.read_with(&cx, |worktree, cx| { - worktree - .languages() - .select_language(new_file.full_path(cx)) - .cloned() + let (language, language_server) = worktree.read_with(&cx, |worktree, cx| { + let language = worktree.languages().select_language(new_file.full_path(cx)); + let language_server = worktree.language_server(); + (language.cloned(), language_server.cloned()) }); buffer.update(&mut cx, |buffer, cx| { buffer.did_save(version, new_file.mtime, Some(Box::new(new_file)), cx); - buffer.set_language(language, cx); + buffer.set_language(language, language_server, cx); }); }) }) From de8218314c3d74c676a209fa97cce47c779a8a29 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 11:45:59 -0700 Subject: [PATCH 16/61] Notify language server when saving a buffer Co-Authored-By: Nathan Sobo --- crates/language/src/lib.rs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 6249b613c97bbc57b29d407edd5e39ccb44e8ba4..15dcbf0df63c5ac81f1a31f42a8faade5726d338 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -13,6 +13,7 @@ use clock::ReplicaId; use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; +use lsp::LanguageServer; use parking_lot::Mutex; use postage::{prelude::Stream, sink::Sink, watch}; use rpc::proto; @@ -73,6 +74,7 @@ pub struct Snapshot { } struct LanguageServerState { + server: Arc, latest_snapshot: watch::Sender>, pending_snapshots: BTreeMap, next_version: usize, @@ -316,6 +318,7 @@ impl Buffer { latest_snapshot: latest_snapshot_tx, pending_snapshots: Default::default(), next_version: 0, + server: server.clone(), _maintain_server: cx.background().spawn( async move { let mut prev_snapshot: Option = None; @@ -401,6 +404,25 @@ impl Buffer { if let Some(new_file) = new_file { self.file = Some(new_file); } + if let Some(state) = &self.language_server { + cx.background() + .spawn( + state + .server + .notify::( + lsp::DidSaveTextDocumentParams { + text_document: lsp::TextDocumentIdentifier { + uri: lsp::Url::from_file_path( + self.file.as_ref().unwrap().abs_path(cx).unwrap(), + ) + .unwrap(), + }, + text: None, + }, + ), + ) + .detach() + } cx.emit(Event::Saved); } From 7d5425e1423e5bb1cbaef268f0969aab0c4da556 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 12:17:51 -0700 Subject: [PATCH 17/61] Move lsp configuration into language crate Co-Authored-By: Nathan Sobo --- crates/language/build.rs | 6 ++++ crates/language/src/language.rs | 40 +++++++++++++++++++++++++++ crates/lsp/build.rs | 10 ------- crates/lsp/src/lib.rs | 39 +++++++------------------- crates/project/src/lib.rs | 20 ++++++++++---- crates/project/src/worktree.rs | 3 +- crates/zed/languages/rust/config.toml | 4 +++ script/download-rust-analyzer | 4 +++ 8 files changed, 79 insertions(+), 47 deletions(-) create mode 100644 crates/language/build.rs delete mode 100644 crates/lsp/build.rs diff --git a/crates/language/build.rs b/crates/language/build.rs new file mode 100644 index 0000000000000000000000000000000000000000..d69cce4d1d847ee3041611307c87fc96762236d8 --- /dev/null +++ b/crates/language/build.rs @@ -0,0 +1,6 @@ +fn main() { + if let Ok(bundled) = std::env::var("ZED_BUNDLE") { + println!("cargo:rustc-env=ZED_BUNDLE={}", bundled); + } +} + diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 22609905663428d0473372591113d982068fed57..b6ff132881c0df34cc4543617b88f834332d3012 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,5 +1,6 @@ use crate::HighlightMap; use anyhow::Result; +use gpui::AppContext; use parking_lot::Mutex; use serde::Deserialize; use std::{path::Path, str, sync::Arc}; @@ -12,6 +13,13 @@ pub struct LanguageConfig { pub name: String, pub path_suffixes: Vec, pub brackets: Vec, + pub language_server: Option, +} + +#[derive(Deserialize)] +pub struct LanguageServerConfig { + pub binary: String, + pub disk_based_diagnostic_sources: Vec, } #[derive(Clone, Debug, Deserialize)] @@ -51,6 +59,12 @@ impl LanguageRegistry { } } + pub fn get_language(&self, name: &str) -> Option<&Arc> { + self.languages + .iter() + .find(|language| language.name() == name) + } + pub fn select_language(&self, path: impl AsRef) -> Option<&Arc> { let path = path.as_ref(); let filename = path.file_name().and_then(|name| name.to_str()); @@ -97,6 +111,32 @@ impl Language { self.config.name.as_str() } + pub fn start_server( + &self, + root_path: &Path, + cx: &AppContext, + ) -> Result>> { + if let Some(config) = &self.config.language_server { + const ZED_BUNDLE: Option<&'static str> = option_env!("ZED_BUNDLE"); + let binary_path = if ZED_BUNDLE.map_or(Ok(false), |b| b.parse())? { + cx.platform() + .path_for_resource(Some(&config.binary), None)? + } else { + Path::new(&config.binary).to_path_buf() + }; + lsp::LanguageServer::new(&binary_path, root_path, cx.background()).map(Some) + } else { + Ok(None) + } + } + + pub fn disk_based_diagnostic_sources(&self) -> &[String] { + self.config + .language_server + .as_ref() + .map_or(&[], |config| &config.disk_based_diagnostic_sources) + } + pub fn brackets(&self) -> &[BracketPair] { &self.config.brackets } diff --git a/crates/lsp/build.rs b/crates/lsp/build.rs deleted file mode 100644 index 703d9ccd3c6b54bbfe393d40244291b201e9343e..0000000000000000000000000000000000000000 --- a/crates/lsp/build.rs +++ /dev/null @@ -1,10 +0,0 @@ -use std::env; - -fn main() { - let target = env::var("TARGET").unwrap(); - println!("cargo:rustc-env=ZED_TARGET={}", target); - - if let Ok(bundled) = env::var("ZED_BUNDLE") { - println!("cargo:rustc-env=ZED_BUNDLE={}", bundled); - } -} diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 1aba87e37591fa804804a8511c216b3c8271bd7d..88fd5a4bc747ed903b25c8410d1f7a2ed713b136 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufWriter, AsyncRead, AsyncWrite}; -use gpui::{executor, AppContext, Task}; +use gpui::{executor, Task}; use parking_lot::{Mutex, RwLock}; use postage::{barrier, oneshot, prelude::Stream, sink::Sink}; use serde::{Deserialize, Serialize}; @@ -86,47 +86,25 @@ struct Error { } impl LanguageServer { - pub fn rust(root_path: &Path, cx: &AppContext) -> Result> { - const ZED_BUNDLE: Option<&'static str> = option_env!("ZED_BUNDLE"); - const ZED_TARGET: &'static str = env!("ZED_TARGET"); - - let rust_analyzer_name = format!("rust-analyzer-{}", ZED_TARGET); - if ZED_BUNDLE.map_or(Ok(false), |b| b.parse())? { - let rust_analyzer_path = cx - .platform() - .path_for_resource(Some(&rust_analyzer_name), None)?; - Self::new(root_path, &rust_analyzer_path, &[], cx.background()) - } else { - Self::new( - root_path, - Path::new(&rust_analyzer_name), - &[], - cx.background(), - ) - } - } - pub fn new( + binary_path: &Path, root_path: &Path, - server_path: &Path, - server_args: &[&str], background: &executor::Background, ) -> Result> { - let mut server = Command::new(server_path) - .args(server_args) + let mut server = Command::new(binary_path) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) .spawn()?; let stdin = server.stdin.take().unwrap(); let stdout = server.stdout.take().unwrap(); - Self::new_internal(root_path, stdin, stdout, background) + Self::new_internal(stdin, stdout, root_path, background) } fn new_internal( - root_path: &Path, stdin: Stdin, stdout: Stdout, + root_path: &Path, background: &executor::Background, ) -> Result> where @@ -410,7 +388,7 @@ impl LanguageServer { buffer: Vec::new(), }; - let server = Self::new_internal(Path::new("/"), stdin.0, stdout.1, executor).unwrap(); + let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap(); let (init_id, _) = fake.receive_request::().await; fake.respond(init_id, InitializeResult::default()).await; @@ -535,7 +513,10 @@ mod tests { let lib_file_uri = lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(); - let server = cx.read(|cx| LanguageServer::rust(root_dir.path(), cx).unwrap()); + let server = cx.read(|cx| { + LanguageServer::new(Path::new("rust-analyzer"), root_dir.path(), cx.background()) + .unwrap() + }); server.next_idle_notification().await; server diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index a98b89db5ac2fcfa787ffc9276f5be6d0335b4fe..552974bc69538935bdefb04d0daa3418999ef316 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -8,12 +8,11 @@ use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use language::LanguageRegistry; -use lsp::LanguageServer; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, }; -use util::TryFutureExt as _; +use util::{ResultExt, TryFutureExt as _}; pub use fs::*; pub use worktree::*; @@ -74,11 +73,20 @@ impl Project { let rpc = self.client.clone(); let languages = self.languages.clone(); let path = Arc::from(abs_path); - let language_server = LanguageServer::rust(&path, cx); + let language_server = languages + .get_language("Rust") + .unwrap() + .start_server(&path, cx); cx.spawn(|this, mut cx| async move { - let worktree = - Worktree::open_local(rpc, path, fs, languages, Some(language_server?), &mut cx) - .await?; + let worktree = Worktree::open_local( + rpc, + path, + fs, + languages, + language_server.log_err().flatten(), + &mut cx, + ) + .await?; this.update(&mut cx, |this, cx| { this.add_worktree(worktree.clone(), cx); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 47e400aa0992bc7a6be35253cdbd92dffcbb7266..c05f121accab5ab4ce1008cbb39a523b2fc0c340 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -295,7 +295,7 @@ impl Worktree { } } - pub fn language_server(&self) -> Option<&Arc> { + pub fn language_server(&self) -> Option<&Arc> { match self { Worktree::Local(worktree) => worktree.language_server.as_ref(), Worktree::Remote(_) => None, @@ -2872,7 +2872,6 @@ mod tests { use anyhow::Result; use client::test::FakeServer; use fs::RealFs; - use language::Point; use lsp::Url; use rand::prelude::*; use serde_json::json; diff --git a/crates/zed/languages/rust/config.toml b/crates/zed/languages/rust/config.toml index 11b273d137df9bbd8134c3a55e49d02459c76537..571916400878048ffaa2df958bfd1b0fe17d7d51 100644 --- a/crates/zed/languages/rust/config.toml +++ b/crates/zed/languages/rust/config.toml @@ -8,3 +8,7 @@ brackets = [ { start = "\"", end = "\"", close = true, newline = false }, { start = "/*", end = " */", close = true, newline = false }, ] + +[language_server] +binary = "rust-analyzer" +disk_based_diagnostic_sources = ["rustc"] diff --git a/script/download-rust-analyzer b/script/download-rust-analyzer index 9a64f9ed6983e975e5bdbe328fed5a8b988fed19..8cc0c00e6f468320bedc76baa194ab73ae8303f1 100755 --- a/script/download-rust-analyzer +++ b/script/download-rust-analyzer @@ -13,3 +13,7 @@ function download { mkdir -p vendor/bin download "x86_64-apple-darwin" download "aarch64-apple-darwin" + +cd vendor/bin +lipo -create rust-analyzer-* -output rust-analyzer +rm rust-analyzer-* From 4069db49599434afdc3c0cbe6d4f1469a29d92b1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 12:46:46 -0700 Subject: [PATCH 18/61] Allow underlines to have different color than the text Co-Authored-By: Nathan Sobo --- crates/editor/src/element.rs | 8 +++--- crates/editor/src/lib.rs | 2 +- crates/gpui/examples/text.rs | 4 +-- crates/gpui/src/elements/label.rs | 4 +-- crates/gpui/src/fonts.rs | 39 ++++++++++++++++++++------- crates/gpui/src/platform/mac/fonts.rs | 12 ++++----- crates/gpui/src/text_layout.rs | 34 +++++++++++------------ crates/workspace/src/items.rs | 2 +- 8 files changed, 63 insertions(+), 42 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index cf0a101b0feaa8490888ca1aef1aec71ff8513a5..b18e72b2d77199e263e46b2f04cfee5e2acc9d55 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -394,7 +394,7 @@ impl EditorElement { RunStyle { font_id: style.text.font_id, color: Color::black(), - underline: false, + underline: None, }, )], ) @@ -435,7 +435,7 @@ impl EditorElement { RunStyle { font_id: style.text.font_id, color, - underline: false, + underline: None, }, )], ))); @@ -476,7 +476,7 @@ impl EditorElement { RunStyle { font_id: placeholder_style.font_id, color: placeholder_style.color, - underline: false, + underline: None, }, )], ) @@ -859,7 +859,7 @@ impl LayoutState { RunStyle { font_id: self.style.text.font_id, color: Color::black(), - underline: false, + underline: None, }, )], ) diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index b2a97982b70c86c50375c21b2f1e2a59cbf78917..cb813350a0c2a009fc070e56ea05c214e47e3ed5 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -2763,7 +2763,7 @@ impl EditorSettings { font_size: 14., color: Color::from_u32(0xff0000ff), font_properties, - underline: false, + underline: None, }, placeholder_text: None, background: Default::default(), diff --git a/crates/gpui/examples/text.rs b/crates/gpui/examples/text.rs index 6c82b2d88a3df891ce9ae9b120d0e24496f790fb..fb4772d11ccd61a620f49dadd79586d89346b8e7 100644 --- a/crates/gpui/examples/text.rs +++ b/crates/gpui/examples/text.rs @@ -62,7 +62,7 @@ impl gpui::Element for TextElement { .select_font(family, &Default::default()) .unwrap(), color: Color::default(), - underline: false, + underline: None, }; let bold = RunStyle { font_id: cx @@ -76,7 +76,7 @@ impl gpui::Element for TextElement { ) .unwrap(), color: Color::default(), - underline: false, + underline: None, }; let text = "Hello world!"; diff --git a/crates/gpui/src/elements/label.rs b/crates/gpui/src/elements/label.rs index 33274ffaeb8663e08e0ecccebb92ce5faa34dd2f..f78e3973e9670884594d6c82201c379638c2d3d3 100644 --- a/crates/gpui/src/elements/label.rs +++ b/crates/gpui/src/elements/label.rs @@ -207,7 +207,7 @@ mod tests { "Menlo", 12., Default::default(), - false, + None, Color::black(), cx.font_cache(), ) @@ -216,7 +216,7 @@ mod tests { "Menlo", 12., *FontProperties::new().weight(Weight::BOLD), - false, + None, Color::new(255, 0, 0, 255), cx.font_cache(), ) diff --git a/crates/gpui/src/fonts.rs b/crates/gpui/src/fonts.rs index 4ac7a92bc4c0b382c1835612d8dca5af767c36ec..b1aae4c9be323aa54bbd23b4ff4aa463e4a0882e 100644 --- a/crates/gpui/src/fonts.rs +++ b/crates/gpui/src/fonts.rs @@ -27,14 +27,14 @@ pub struct TextStyle { pub font_id: FontId, pub font_size: f32, pub font_properties: Properties, - pub underline: bool, + pub underline: Option, } #[derive(Clone, Debug, Default)] pub struct HighlightStyle { pub color: Color, pub font_properties: Properties, - pub underline: bool, + pub underline: Option, } #[allow(non_camel_case_types)] @@ -64,7 +64,7 @@ struct TextStyleJson { #[serde(default)] italic: bool, #[serde(default)] - underline: bool, + underline: UnderlineStyleJson, } #[derive(Deserialize)] @@ -74,7 +74,14 @@ struct HighlightStyleJson { #[serde(default)] italic: bool, #[serde(default)] - underline: bool, + underline: UnderlineStyleJson, +} + +#[derive(Deserialize)] +#[serde(untagged)] +enum UnderlineStyleJson { + Underlined(bool), + UnderlinedWithColor(Color), } impl TextStyle { @@ -82,7 +89,7 @@ impl TextStyle { font_family_name: impl Into>, font_size: f32, font_properties: Properties, - underline: bool, + underline: Option, color: Color, font_cache: &FontCache, ) -> anyhow::Result { @@ -116,7 +123,7 @@ impl TextStyle { json.family, json.size, font_properties, - json.underline, + underline_from_json(json.underline, json.color), json.color, font_cache, ) @@ -167,6 +174,12 @@ impl From for HighlightStyle { } } +impl Default for UnderlineStyleJson { + fn default() -> Self { + Self::Underlined(false) + } +} + impl Default for TextStyle { fn default() -> Self { FONT_CACHE.with(|font_cache| { @@ -199,7 +212,7 @@ impl HighlightStyle { Self { color: json.color, font_properties, - underline: json.underline, + underline: underline_from_json(json.underline, json.color), } } } @@ -209,7 +222,7 @@ impl From for HighlightStyle { Self { color, font_properties: Default::default(), - underline: false, + underline: None, } } } @@ -248,12 +261,20 @@ impl<'de> Deserialize<'de> for HighlightStyle { Ok(Self { color: serde_json::from_value(json).map_err(de::Error::custom)?, font_properties: Properties::new(), - underline: false, + underline: None, }) } } } +fn underline_from_json(json: UnderlineStyleJson, text_color: Color) -> Option { + match json { + UnderlineStyleJson::Underlined(false) => None, + UnderlineStyleJson::Underlined(true) => Some(text_color), + UnderlineStyleJson::UnderlinedWithColor(color) => Some(color), + } +} + fn properties_from_json(weight: Option, italic: bool) -> Properties { let weight = match weight.unwrap_or(WeightJson::normal) { WeightJson::thin => Weight::THIN, diff --git a/crates/gpui/src/platform/mac/fonts.rs b/crates/gpui/src/platform/mac/fonts.rs index c01700ce22817b341dca6caf634490a3a0b24666..c7f03689ee677bb017bcd42224e182c84f9b2bf2 100644 --- a/crates/gpui/src/platform/mac/fonts.rs +++ b/crates/gpui/src/platform/mac/fonts.rs @@ -417,21 +417,21 @@ mod tests { let menlo_regular = RunStyle { font_id: fonts.select_font(&menlo, &Properties::new()).unwrap(), color: Default::default(), - underline: false, + underline: None, }; let menlo_italic = RunStyle { font_id: fonts .select_font(&menlo, &Properties::new().style(Style::Italic)) .unwrap(), color: Default::default(), - underline: false, + underline: None, }; let menlo_bold = RunStyle { font_id: fonts .select_font(&menlo, &Properties::new().weight(Weight::BOLD)) .unwrap(), color: Default::default(), - underline: false, + underline: None, }; assert_ne!(menlo_regular, menlo_italic); assert_ne!(menlo_regular, menlo_bold); @@ -458,13 +458,13 @@ mod tests { let zapfino_regular = RunStyle { font_id: fonts.select_font(&zapfino, &Properties::new())?, color: Default::default(), - underline: false, + underline: None, }; let menlo = fonts.load_family("Menlo")?; let menlo_regular = RunStyle { font_id: fonts.select_font(&menlo, &Properties::new())?, color: Default::default(), - underline: false, + underline: None, }; let text = "This is, m𐍈re 𐍈r less, Zapfino!𐍈"; @@ -543,7 +543,7 @@ mod tests { let style = RunStyle { font_id: fonts.select_font(&font_ids, &Default::default()).unwrap(), color: Default::default(), - underline: false, + underline: None, }; let line = "\u{feff}"; diff --git a/crates/gpui/src/text_layout.rs b/crates/gpui/src/text_layout.rs index a7b976d72c746fe010ccd3a3ad0ebccdd8884917..105dae7c9279923f114133b438e0a51b03cce674 100644 --- a/crates/gpui/src/text_layout.rs +++ b/crates/gpui/src/text_layout.rs @@ -28,7 +28,7 @@ pub struct TextLayoutCache { pub struct RunStyle { pub color: Color, pub font_id: FontId, - pub underline: bool, + pub underline: Option, } impl TextLayoutCache { @@ -167,7 +167,7 @@ impl<'a> Hash for CacheKeyRef<'a> { #[derive(Default, Debug)] pub struct Line { layout: Arc, - style_runs: SmallVec<[(u32, Color, bool); 32]>, + style_runs: SmallVec<[(u32, Color, Option); 32]>, } #[derive(Default, Debug)] @@ -249,7 +249,7 @@ impl Line { let mut style_runs = self.style_runs.iter(); let mut run_end = 0; let mut color = Color::black(); - let mut underline_start = None; + let mut underline = None; for run in &self.layout.runs { let max_glyph_width = cx @@ -268,24 +268,24 @@ impl Line { } if glyph.index >= run_end { - if let Some((run_len, run_color, run_underlined)) = style_runs.next() { - if let Some(underline_origin) = underline_start { - if !*run_underlined || *run_color != color { + if let Some((run_len, run_color, run_underline_color)) = style_runs.next() { + if let Some((underline_origin, underline_color)) = underline { + if *run_underline_color != Some(underline_color) { cx.scene.push_underline(scene::Quad { bounds: RectF::from_points( underline_origin, glyph_origin + vec2f(0., 1.), ), - background: Some(color), + background: Some(underline_color), border: Default::default(), corner_radius: 0., }); - underline_start = None; + underline = None; } } - if *run_underlined { - underline_start.get_or_insert(glyph_origin); + if let Some(run_underline_color) = run_underline_color { + underline.get_or_insert((glyph_origin, *run_underline_color)); } run_end += *run_len as usize; @@ -293,13 +293,13 @@ impl Line { } else { run_end = self.layout.len; color = Color::black(); - if let Some(underline_origin) = underline_start.take() { + if let Some((underline_origin, underline_color)) = underline.take() { cx.scene.push_underline(scene::Quad { bounds: RectF::from_points( underline_origin, glyph_origin + vec2f(0., 1.), ), - background: Some(color), + background: Some(underline_color), border: Default::default(), corner_radius: 0., }); @@ -317,12 +317,12 @@ impl Line { } } - if let Some(underline_start) = underline_start.take() { + if let Some((underline_start, underline_color)) = underline.take() { let line_end = origin + baseline_offset + vec2f(self.layout.width, 0.); cx.scene.push_underline(scene::Quad { bounds: RectF::from_points(underline_start, line_end + vec2f(0., 1.)), - background: Some(color), + background: Some(underline_color), border: Default::default(), corner_radius: 0., }); @@ -597,7 +597,7 @@ impl LineWrapper { RunStyle { font_id: self.font_id, color: Default::default(), - underline: false, + underline: None, }, )], ) @@ -681,7 +681,7 @@ mod tests { let normal = RunStyle { font_id, color: Default::default(), - underline: false, + underline: None, }; let bold = RunStyle { font_id: font_cache @@ -694,7 +694,7 @@ mod tests { ) .unwrap(), color: Default::default(), - underline: false, + underline: None, }; let text = "aa bbb cccc ddddd eeee"; diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index 9370ac7f85e3b4226140bcc2c124b2db670db967..d1275aee7c5f60204785c087bad23ae8a835eacf 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -37,7 +37,7 @@ impl Item for Buffer { font_id, font_size, font_properties, - underline: false, + underline: None, }; EditorSettings { tab_size: settings.tab_size, From 5bfbeb55c06c4a9d7da8efc4585682ca56943749 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 14:26:15 -0700 Subject: [PATCH 19/61] Simplify buffer constructor methods Don't expose the `buffer::History` to callers of `language::Buffer` --- crates/editor/src/display_map.rs | 12 ++++---- crates/editor/src/lib.rs | 29 +++++++------------ crates/language/src/lib.rs | 45 +++++++++++++----------------- crates/language/src/tests.rs | 48 ++++++++++++++------------------ crates/project/src/worktree.rs | 18 ++++-------- 5 files changed, 60 insertions(+), 92 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index f343442392363eaf0b70fc8061f22137ba6a96bd..a87f11374386cd3d1a234572add4132141622458 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -359,7 +359,7 @@ mod tests { use super::*; use crate::{movement, test::*}; use gpui::{color::Color, MutableAppContext}; - use language::{History, Language, LanguageConfig, RandomCharIter, SelectionGoal}; + use language::{Language, LanguageConfig, RandomCharIter, SelectionGoal}; use rand::{prelude::StdRng, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; @@ -701,9 +701,8 @@ mod tests { ); lang.set_theme(&theme); - let buffer = cx.add_model(|cx| { - Buffer::from_history(0, History::new(text.into()), None, Some(lang), None, cx) - }); + let buffer = + cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx)); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; let tab_size = 2; @@ -789,9 +788,8 @@ mod tests { ); lang.set_theme(&theme); - let buffer = cx.add_model(|cx| { - Buffer::from_history(0, History::new(text.into()), None, Some(lang), None, cx) - }); + let buffer = + cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx)); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; let font_cache = cx.font_cache(); diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index cb813350a0c2a009fc070e56ea05c214e47e3ed5..05eb9fa7e5008bc6926502af5402313412c6a16d 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -2903,7 +2903,7 @@ impl SelectionExt for Selection { mod tests { use super::*; use crate::test::sample_text; - use buffer::{History, Point}; + use buffer::Point; use unindent::Unindent; #[gpui::test] @@ -4406,10 +4406,10 @@ mod tests { #[gpui::test] async fn test_select_larger_smaller_syntax_node(mut cx: gpui::TestAppContext) { let settings = cx.read(EditorSettings::test); - let language = Arc::new(Language::new( + let language = Some(Arc::new(Language::new( LanguageConfig::default(), tree_sitter_rust::language(), - )); + ))); let text = r#" use mod1::mod2::{mod3, mod4}; @@ -4420,10 +4420,7 @@ mod tests { "# .unindent(); - let buffer = cx.add_model(|cx| { - let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), None, cx) - }); + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) .await; @@ -4550,7 +4547,7 @@ mod tests { #[gpui::test] async fn test_autoclose_pairs(mut cx: gpui::TestAppContext) { let settings = cx.read(EditorSettings::test); - let language = Arc::new(Language::new( + let language = Some(Arc::new(Language::new( LanguageConfig { brackets: vec![ BracketPair { @@ -4569,7 +4566,7 @@ mod tests { ..Default::default() }, tree_sitter_rust::language(), - )); + ))); let text = r#" a @@ -4579,10 +4576,7 @@ mod tests { "# .unindent(); - let buffer = cx.add_model(|cx| { - let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), None, cx) - }); + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) .await; @@ -4665,7 +4659,7 @@ mod tests { #[gpui::test] async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) { let settings = cx.read(EditorSettings::test); - let language = Arc::new(Language::new( + let language = Some(Arc::new(Language::new( LanguageConfig { brackets: vec![ BracketPair { @@ -4684,7 +4678,7 @@ mod tests { ..Default::default() }, tree_sitter_rust::language(), - )); + ))); let text = concat!( "{ }\n", // Suppress rustfmt @@ -4694,10 +4688,7 @@ mod tests { "{{} }\n", // ); - let buffer = cx.add_model(|cx| { - let history = History::new(text.into()); - Buffer::from_history(0, history, None, Some(language), None, cx) - }); + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) .await; diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 15dcbf0df63c5ac81f1a31f42a8faade5726d338..cee5db9441f62a6df1b94e39f1214b76c5380eed 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -197,26 +197,22 @@ impl Buffer { History::new(base_text.into()), ), None, - None, - None, - cx, ) } - pub fn from_history( + pub fn from_file>>( replica_id: ReplicaId, - history: History, - file: Option>, - language: Option>, - language_server: Option>, + base_text: T, + file: Box, cx: &mut ModelContext, ) -> Self { Self::build( - TextBuffer::new(replica_id, cx.model_id() as u64, history), - file, - language, - language_server, - cx, + TextBuffer::new( + replica_id, + cx.model_id() as u64, + History::new(base_text.into()), + ), + Some(file), ) } @@ -224,25 +220,24 @@ impl Buffer { replica_id: ReplicaId, message: proto::Buffer, file: Option>, - language: Option>, - cx: &mut ModelContext, ) -> Result { Ok(Self::build( TextBuffer::from_proto(replica_id, message)?, file, - language, - None, - cx, )) } - fn build( - buffer: TextBuffer, - file: Option>, + pub fn with_language( + mut self, language: Option>, - language_server: Option>, + language_server: Option>, cx: &mut ModelContext, ) -> Self { + self.set_language(language, language_server, cx); + self + } + + fn build(buffer: TextBuffer, file: Option>) -> Self { let saved_mtime; if let Some(file) = file.as_ref() { saved_mtime = file.mtime(); @@ -250,7 +245,7 @@ impl Buffer { saved_mtime = UNIX_EPOCH; } - let mut result = Self { + Self { text: buffer, saved_mtime, saved_version: clock::Global::new(), @@ -266,9 +261,7 @@ impl Buffer { language_server: None, #[cfg(test)] operations: Default::default(), - }; - result.set_language(language, language_server, cx); - result + } } pub fn snapshot(&self) -> Snapshot { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index cc1382a0988586b5e75b25cce76bdb57ba8c481d..5d8f2ebc3e8b70c70334f66c352cd736c5873da2 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -78,10 +78,8 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) { #[gpui::test] async fn test_reparse(mut cx: gpui::TestAppContext) { - let buffer = cx.add_model(|cx| { - let text = "fn a() {}".into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx) - }); + let text = "fn a() {}"; + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(rust_lang(), None, cx)); // Wait for the initial text to parse buffer @@ -222,9 +220,8 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { } } " - .unindent() - .into(); - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx) + .unindent(); + Buffer::new(0, text, cx).with_language(rust_lang(), None, cx) }); let buffer = buffer.read(cx); assert_eq!( @@ -253,9 +250,8 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { #[gpui::test] fn test_edit_with_autoindent(cx: &mut MutableAppContext) { cx.add_model(|cx| { - let text = "fn a() {}".into(); - let mut buffer = - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx); + let text = "fn a() {}"; + let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); buffer.edit_with_autoindent([8..8], "\n\n", cx); assert_eq!(buffer.text(), "fn a() {\n \n}"); @@ -273,8 +269,9 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) { #[gpui::test] fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { cx.add_model(|cx| { - let text = History::new("fn a() {}".into()); - let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), None, cx); + let text = "fn a() {}"; + + let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); let selection_set_id = buffer.add_selection_set(Vec::new(), cx); buffer.start_transaction(Some(selection_set_id)).unwrap(); @@ -331,10 +328,9 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta d; } " - .unindent() - .into(); - let mut buffer = - Buffer::from_history(0, History::new(text), None, Some(rust_lang()), None, cx); + .unindent(); + + let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); // Lines 2 and 3 don't match the indentation suggestion. When editing these lines, // their indentation is not adjusted. @@ -378,14 +374,12 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta #[gpui::test] fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) { cx.add_model(|cx| { - let text = History::new( - " - fn a() {} - " - .unindent() - .into(), - ); - let mut buffer = Buffer::from_history(0, text, None, Some(rust_lang()), None, cx); + let text = " + fn a() {} + " + .unindent(); + + let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); buffer.edit_with_autoindent([5..5], "\nb", cx); assert_eq!( @@ -440,8 +434,8 @@ impl Buffer { } } -fn rust_lang() -> Arc { - Arc::new( +fn rust_lang() -> Option> { + Some(Arc::new( Language::new( LanguageConfig { name: "Rust".to_string(), @@ -461,7 +455,7 @@ fn rust_lang() -> Arc { .unwrap() .with_brackets_query(r#" ("{" @open "}" @close) "#) .unwrap(), - ) + )) } fn empty(point: Point) -> Range { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index c05f121accab5ab4ce1008cbb39a523b2fc0c340..5b62402eed22db38ad72e3d6f5632ea1c940750a 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -12,7 +12,7 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, }; -use language::{Buffer, History, LanguageRegistry, Operation, Rope}; +use language::{Buffer, LanguageRegistry, Operation, Rope}; use lazy_static::lazy_static; use lsp::LanguageServer; use parking_lot::Mutex; @@ -892,10 +892,7 @@ impl LocalWorktree { .cloned() }); let buffer = cx.add_model(|cx| { - Buffer::from_history( - 0, - History::new(contents.into()), - Some(Box::new(file)), + Buffer::from_file(0, contents, Box::new(file), cx).with_language( language, language_server, cx, @@ -1321,14 +1318,9 @@ impl RemoteWorktree { let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?; let buffer_id = remote_buffer.id as usize; let buffer = cx.add_model(|cx| { - Buffer::from_proto( - replica_id, - remote_buffer, - Some(Box::new(file)), - language, - cx, - ) - .unwrap() + Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file))) + .unwrap() + .with_language(language, None, cx) }); this.update(&mut cx, |this, cx| { let this = this.as_remote_mut().unwrap(); From ef4fc42d936cb942271e4378d6d043e21b1e2b89 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 15:46:08 -0700 Subject: [PATCH 20/61] Allow retrieving a buffer's diagnostics --- crates/buffer/src/anchor.rs | 17 ++++---- crates/buffer/src/lib.rs | 1 + crates/language/Cargo.toml | 3 +- crates/language/src/lib.rs | 82 ++++++++++++++++++++++++------------ crates/language/src/tests.rs | 72 +++++++++++++++++++++++++++++++ 5 files changed, 140 insertions(+), 35 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 9cafd673a9d5fd45bfa7c70873a93f5487e08ba6..e1e6eaa2191eddf883620852ac2497722489666c 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -175,10 +175,10 @@ impl Default for AnchorRangeMultimap { } impl AnchorRangeMultimap { - fn intersecting_point_ranges<'a, O>( + pub fn intersecting_point_ranges<'a, O>( &'a self, range: Range, - content: &'a Content<'a>, + content: Content<'a>, inclusive: bool, ) -> impl Iterator, &T)> + 'a where @@ -187,10 +187,11 @@ impl AnchorRangeMultimap { use super::ToPoint as _; let end_bias = if inclusive { Bias::Right } else { Bias::Left }; - let range = range.start.to_full_offset(content, Bias::Left) - ..range.end.to_full_offset(content, end_bias); + let range = range.start.to_full_offset(&content, Bias::Left) + ..range.end.to_full_offset(&content, end_bias); let mut cursor = self.entries.filter::<_, usize>( { + let content = content.clone(); let mut endpoint = Anchor { full_offset: 0, bias: Bias::Right, @@ -199,12 +200,12 @@ impl AnchorRangeMultimap { move |summary: &AnchorRangeMultimapSummary| { endpoint.full_offset = summary.max_end; endpoint.bias = self.end_bias; - let max_end = endpoint.to_full_offset(content, self.end_bias); + let max_end = endpoint.to_full_offset(&content, self.end_bias); let start_cmp = range.start.cmp(&max_end); endpoint.full_offset = summary.min_start; endpoint.bias = self.start_bias; - let min_start = endpoint.to_full_offset(content, self.start_bias); + let min_start = endpoint.to_full_offset(&content, self.start_bias); let end_cmp = range.end.cmp(&min_start); if inclusive { @@ -228,10 +229,10 @@ impl AnchorRangeMultimap { let ix = *cursor.start(); endpoint.full_offset = item.range.start; endpoint.bias = self.start_bias; - let start = endpoint.to_point(content); + let start = endpoint.to_point(&content); endpoint.full_offset = item.range.end; endpoint.bias = self.end_bias; - let end = endpoint.to_point(content); + let end = endpoint.to_point(&content); let value = &item.value; cursor.next(&()); Some((ix, start..end, value)) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 4fac433781afd0e262114583a1dbcf81ef2a55b1..37b66f5f62d96e161c4350cf3369b1944d16e036 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -1592,6 +1592,7 @@ impl Snapshot { } } +#[derive(Clone)] pub struct Content<'a> { visible_text: &'a Rope, deleted_text: &'a Rope, diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 4aba95f97a8a58ddb26b470ec6bb4a8c90207fd7..b0a384e16a7667323bbbac702956b0b5992b7011 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["rand", "buffer/test-support"] +test-support = ["rand", "buffer/test-support", "lsp/test-support"] [dependencies] buffer = { path = "../buffer" } @@ -29,6 +29,7 @@ tree-sitter = "0.19.5" [dev-dependencies] buffer = { path = "../buffer", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } +lsp = { path = "../lsp", features = ["test-support"] } rand = "0.8.3" tree-sitter-rust = "0.19.0" unindent = "0.1.7" diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index cee5db9441f62a6df1b94e39f1214b76c5380eed..86714def0f427ff9064893d391e77596401aa839 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -13,7 +13,7 @@ use clock::ReplicaId; use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; -use lsp::LanguageServer; +use lsp::{DiagnosticSeverity, LanguageServer}; use parking_lot::Mutex; use postage::{prelude::Stream, sink::Sink, watch}; use rpc::proto; @@ -59,7 +59,7 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - diagnostics: AnchorRangeMultimap<()>, + diagnostics: AnchorRangeMultimap<(DiagnosticSeverity, String)>, language_server: Option, #[cfg(test)] operations: Vec, @@ -73,6 +73,13 @@ pub struct Snapshot { query_cursor: QueryCursorHandle, } +#[derive(Debug, PartialEq, Eq)] +pub struct Diagnostic { + pub range: Range, + pub severity: DiagnosticSeverity, + pub message: String, +} + struct LanguageServerState { server: Arc, latest_snapshot: watch::Sender>, @@ -613,43 +620,67 @@ impl Buffer { pub fn update_diagnostics( &mut self, - params: lsp::PublishDiagnosticsParams, + version: Option, + diagnostics: Vec, cx: &mut ModelContext, ) -> Result<()> { - dbg!(¶ms); - let language_server = self.language_server.as_mut().unwrap(); - let version = params.version.ok_or_else(|| anyhow!("missing version"))? as usize; - let snapshot = language_server - .pending_snapshots - .get(&version) - .ok_or_else(|| anyhow!("missing snapshot"))?; - self.diagnostics = snapshot.buffer_snapshot.content().anchor_range_multimap( + let version = version.map(|version| version as usize); + let content = if let Some(version) = version { + let language_server = self.language_server.as_mut().unwrap(); + let snapshot = language_server + .pending_snapshots + .get(&version) + .ok_or_else(|| anyhow!("missing snapshot"))?; + snapshot.buffer_snapshot.content() + } else { + self.content() + }; + self.diagnostics = content.anchor_range_multimap( Bias::Left, Bias::Right, - params.diagnostics.into_iter().map(|diagnostic| { + diagnostics.into_iter().map(|diagnostic| { // TODO: Use UTF-16 positions. let start = Point::new( diagnostic.range.start.line, diagnostic.range.start.character, ); let end = Point::new(diagnostic.range.end.line, diagnostic.range.end.character); - (start..end, ()) + let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); + (start..end, (severity, diagnostic.message)) }), ); - let versions_to_delete = language_server - .pending_snapshots - .range(..version) - .map(|(v, _)| *v) - .collect::>(); - for version in versions_to_delete { - language_server.pending_snapshots.remove(&version); + if let Some(version) = version { + let language_server = self.language_server.as_mut().unwrap(); + let versions_to_delete = language_server + .pending_snapshots + .range(..version) + .map(|(v, _)| *v) + .collect::>(); + for version in versions_to_delete { + language_server.pending_snapshots.remove(&version); + } } cx.notify(); Ok(()) } + pub fn diagnostics_in_range<'a, T: ToOffset>( + &'a self, + range: Range, + ) -> impl Iterator + 'a { + let content = self.content(); + let range = range.start.to_offset(&content)..range.end.to_offset(&content); + self.diagnostics + .intersecting_point_ranges(range, content, true) + .map(move |(_, range, (severity, message))| Diagnostic { + range, + severity: *severity, + message: message.clone(), + }) + } + fn request_autoindent(&mut self, cx: &mut ModelContext) { if let Some(indent_columns) = self.compute_autoindents() { let indent_columns = cx.background().spawn(indent_columns); @@ -987,17 +1018,16 @@ impl Buffer { } else { return; }; - let file = if let Some(file) = self.file.as_ref() { - file - } else { - return; - }; + let abs_path = self + .file + .as_ref() + .map_or(PathBuf::new(), |file| file.abs_path(cx).unwrap()); let version = post_inc(&mut language_server.next_version); let snapshot = LanguageServerSnapshot { buffer_snapshot: self.text.snapshot(), version, - path: Arc::from(file.abs_path(cx).unwrap()), + path: Arc::from(abs_path), }; language_server .pending_snapshots diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 5d8f2ebc3e8b70c70334f66c352cd736c5873da2..988b1d0c058e8994ff2179197dcde369f7817658 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -407,6 +407,78 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte }); } +#[gpui::test] +async fn test_diagnostics(mut cx: gpui::TestAppContext) { + let (language_server, mut fake) = lsp::LanguageServer::fake(&cx.background()).await; + + let text = " + fn a() { A } + fn b() { BB } + fn c() { CCC } + " + .unindent(); + + let buffer = cx.add_model(|cx| { + Buffer::new(0, text, cx).with_language(rust_lang(), Some(language_server), cx) + }); + + let open_notification = fake + .receive_notification::() + .await; + + buffer.update(&mut cx, |buffer, cx| { + // Edit the buffer, moving the content down + buffer.edit([0..0], "\n\n", cx); + + // Receive diagnostics for an earlier version of the buffer. + buffer + .update_diagnostics( + Some(open_notification.text_document.version), + vec![ + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'A'".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'BB'".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'CCC'".to_string(), + ..Default::default() + }, + ], + cx, + ) + .unwrap(); + + // The diagnostics have moved down since they were created. + assert_eq!( + buffer + .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) + .collect::>(), + &[ + Diagnostic { + range: Point::new(3, 9)..Point::new(3, 11), + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string() + }, + Diagnostic { + range: Point::new(4, 9)..Point::new(4, 12), + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'CCC'".to_string() + } + ] + ) + }); +} + #[test] fn test_contiguous_ranges() { assert_eq!( From 79ba217485474a3536207bfd787bf1ffafc7b5f8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 15:46:39 -0700 Subject: [PATCH 21/61] Fix routing of diagnostics to buffers in worktree --- crates/project/src/worktree.rs | 75 ++++++++++++++++++++-------------- 1 file changed, 44 insertions(+), 31 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5b62402eed22db38ad72e3d6f5632ea1c940750a..2fa709c0a70bae4e31eb62d2187154167b164f92 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3,7 +3,7 @@ use super::{ ignore::IgnoreStack, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context, Result}; use client::{proto, Client, PeerId, TypedEnvelope}; use clock::ReplicaId; use futures::{Stream, StreamExt}; @@ -891,17 +891,25 @@ impl LocalWorktree { .select_language(file.full_path(cx)) .cloned() }); + let diagnostics = this.update(&mut cx, |this, _| { + this.as_local_mut() + .unwrap() + .diagnostics + .remove(path.as_ref()) + }); let buffer = cx.add_model(|cx| { - Buffer::from_file(0, contents, Box::new(file), cx).with_language( - language, - language_server, - cx, - ) + let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); + buffer.set_language(language, language_server, cx); + if let Some(diagnostics) = diagnostics { + buffer.update_diagnostics(None, diagnostics, cx).unwrap(); + } + buffer }); this.update(&mut cx, |this, _| { let this = this .as_local_mut() .ok_or_else(|| anyhow!("must be a local worktree"))?; + this.open_buffers.insert(buffer.id(), buffer.downgrade()); Ok(buffer) }) @@ -1201,7 +1209,10 @@ impl LocalWorktree { let file_path = params .uri .to_file_path() - .map_err(|_| anyhow!("URI is not a file"))?; + .map_err(|_| anyhow!("URI is not a file"))? + .strip_prefix(&self.abs_path) + .context("path is not within worktree")? + .to_owned(); for buffer in self.open_buffers.values() { if let Some(buffer) = buffer.upgrade(cx) { @@ -1210,7 +1221,9 @@ impl LocalWorktree { .file() .map_or(false, |file| file.path().as_ref() == file_path) { - buffer.update(cx, |buffer, cx| buffer.update_diagnostics(params, cx))?; + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(params.version, params.diagnostics, cx) + })?; return Ok(()); } } @@ -2862,8 +2875,10 @@ mod tests { use super::*; use crate::fs::FakeFs; use anyhow::Result; + use buffer::Point; use client::test::FakeServer; use fs::RealFs; + use language::Diagnostic; use lsp::Url; use rand::prelude::*; use serde_json::json; @@ -2873,7 +2888,6 @@ mod tests { fmt::Write, time::{SystemTime, UNIX_EPOCH}, }; - use unindent::Unindent as _; use util::test::temp_tree; #[gpui::test] @@ -3499,13 +3513,8 @@ mod tests { async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { let (language_server, mut fake_lsp) = LanguageServer::fake(&cx.background()).await; let dir = temp_tree(json!({ - "a.rs": " - fn a() { A } - fn b() { BB } - ".unindent(), - "b.rs": " - const y: i32 = 1 - ".unindent(), + "a.rs": "fn a() { A }", + "b.rs": "const y: i32 = 1", })); let tree = Worktree::open_local( @@ -3525,20 +3534,12 @@ mod tests { .notify::(lsp::PublishDiagnosticsParams { uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(), version: None, - diagnostics: vec![ - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'A'".to_string(), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(2, 11)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'BB'".to_string(), - ..Default::default() - }, - ], + diagnostics: vec![lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'A'".to_string(), + ..Default::default() + }], }) .await; @@ -3547,7 +3548,19 @@ mod tests { .await .unwrap(); - // Check buffer's diagnostics + buffer.read_with(&cx, |buffer, _| { + let diagnostics = buffer + .diagnostics_in_range(0..buffer.len()) + .collect::>(); + assert_eq!( + diagnostics, + &[Diagnostic { + range: Point::new(0, 9)..Point::new(0, 10), + severity: lsp::DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + }] + ) + }); } #[gpui::test(iterations = 100)] From f1db618be28a9fe7015bf94ff6b18db17361c238 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 17:56:39 -0700 Subject: [PATCH 22/61] Generalize AnchorRangeMultimap's 'intersecting ranges' API Co-Authored-By: Nathan Sobo --- crates/buffer/src/anchor.rs | 17 ++++++++--------- crates/buffer/src/lib.rs | 16 ++++++++++++++++ 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index e1e6eaa2191eddf883620852ac2497722489666c..479b31fe6c602b5e4a9761823960cc93184a3630 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,4 +1,4 @@ -use super::{Buffer, Content, Point, ToOffset}; +use super::{Buffer, Content, FromAnchor, Point, ToOffset}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; use sum_tree::{Bias, SumTree}; @@ -175,17 +175,16 @@ impl Default for AnchorRangeMultimap { } impl AnchorRangeMultimap { - pub fn intersecting_point_ranges<'a, O>( + pub fn intersecting_ranges<'a, I, O>( &'a self, - range: Range, + range: Range, content: Content<'a>, inclusive: bool, - ) -> impl Iterator, &T)> + 'a + ) -> impl Iterator, &T)> + 'a where - O: ToOffset, + I: ToOffset, + O: FromAnchor, { - use super::ToPoint as _; - let end_bias = if inclusive { Bias::Right } else { Bias::Left }; let range = range.start.to_full_offset(&content, Bias::Left) ..range.end.to_full_offset(&content, end_bias); @@ -229,10 +228,10 @@ impl AnchorRangeMultimap { let ix = *cursor.start(); endpoint.full_offset = item.range.start; endpoint.bias = self.start_bias; - let start = endpoint.to_point(&content); + let start = O::from_anchor(&endpoint, &content); endpoint.full_offset = item.range.end; endpoint.bias = self.end_bias; - let end = endpoint.to_point(&content); + let end = O::from_anchor(&endpoint, &content); let value = &item.value; cursor.next(&()); Some((ix, start..end, value)) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 37b66f5f62d96e161c4350cf3369b1944d16e036..093f4691f33c6a58bbd5111f1d45b9713937445f 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -2523,3 +2523,19 @@ impl ToPoint for usize { content.into().visible_text.to_point(*self) } } + +pub trait FromAnchor { + fn from_anchor<'a>(anchor: &Anchor, content: &Content<'a>) -> Self; +} + +impl FromAnchor for Point { + fn from_anchor<'a>(anchor: &Anchor, content: &Content<'a>) -> Self { + anchor.to_point(content) + } +} + +impl FromAnchor for usize { + fn from_anchor<'a>(anchor: &Anchor, content: &Content<'a>) -> Self { + anchor.to_offset(content) + } +} From c539069cbb9043df27538fc2f5bd5591b248508f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 26 Oct 2021 17:57:50 -0700 Subject: [PATCH 23/61] Include diagnostic info in HighlightedChunks iterator Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map.rs | 10 +- crates/editor/src/display_map/fold_map.rs | 31 +++-- crates/editor/src/display_map/tab_map.rs | 40 +++--- crates/editor/src/display_map/wrap_map.rs | 34 ++--- crates/editor/src/element.rs | 27 +++- crates/editor/src/lib.rs | 4 + crates/language/src/lib.rs | 159 ++++++++++++++++++---- crates/language/src/tests.rs | 7 +- crates/theme/src/lib.rs | 10 ++ crates/zed/assets/themes/black.toml | 4 + crates/zed/assets/themes/dark.toml | 4 + crates/zed/assets/themes/light.toml | 6 +- 12 files changed, 247 insertions(+), 89 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index a87f11374386cd3d1a234572add4132141622458..596dc9507f66eb4130247ea0992b15267d55d984 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -972,16 +972,16 @@ mod tests { ) -> Vec<(String, Option<&'a str>)> { let mut snapshot = map.update(cx, |map, cx| map.snapshot(cx)); let mut chunks: Vec<(String, Option<&str>)> = Vec::new(); - for (chunk, style_id) in snapshot.highlighted_chunks_for_rows(rows) { - let style_name = style_id.name(theme); + for chunk in snapshot.highlighted_chunks_for_rows(rows) { + let style_name = chunk.highlight_id.name(theme); if let Some((last_chunk, last_style_name)) = chunks.last_mut() { if style_name == *last_style_name { - last_chunk.push_str(chunk); + last_chunk.push_str(chunk.text); } else { - chunks.push((chunk.to_string(), style_name)); + chunks.push((chunk.text.to_string(), style_name)); } } else { - chunks.push((chunk.to_string(), style_name)); + chunks.push((chunk.text.to_string(), style_name)); } } chunks diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 7a230aa165e74266dedf7636cf05cecbf773ca7d..efccbbbf5dae1e0acf99c67bdb7f39d1c7b7ad6e 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,5 +1,7 @@ use gpui::{AppContext, ModelHandle}; -use language::{Anchor, AnchorRangeExt, Buffer, HighlightId, Point, TextSummary, ToOffset}; +use language::{ + Anchor, AnchorRangeExt, Buffer, HighlightId, HighlightedChunk, Point, TextSummary, ToOffset, +}; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -995,12 +997,12 @@ impl<'a> Iterator for Chunks<'a> { pub struct HighlightedChunks<'a> { transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>, buffer_chunks: language::HighlightedChunks<'a>, - buffer_chunk: Option<(usize, &'a str, HighlightId)>, + buffer_chunk: Option<(usize, HighlightedChunk<'a>)>, buffer_offset: usize, } impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); + type Item = HighlightedChunk<'a>; fn next(&mut self) -> Option { let transform = if let Some(item) = self.transform_cursor.item() { @@ -1022,34 +1024,35 @@ impl<'a> Iterator for HighlightedChunks<'a> { self.transform_cursor.next(&()); } - return Some((output_text, HighlightId::default())); + return Some(HighlightedChunk { + text: output_text, + highlight_id: HighlightId::default(), + diagnostic: None, + }); } // Retrieve a chunk from the current location in the buffer. if self.buffer_chunk.is_none() { let chunk_offset = self.buffer_chunks.offset(); - self.buffer_chunk = self - .buffer_chunks - .next() - .map(|(chunk, capture_ix)| (chunk_offset, chunk, capture_ix)); + self.buffer_chunk = self.buffer_chunks.next().map(|chunk| (chunk_offset, chunk)); } // Otherwise, take a chunk from the buffer's text. - if let Some((chunk_offset, mut chunk, capture_ix)) = self.buffer_chunk { + if let Some((chunk_offset, mut chunk)) = self.buffer_chunk { let offset_in_chunk = self.buffer_offset - chunk_offset; - chunk = &chunk[offset_in_chunk..]; + chunk.text = &chunk.text[offset_in_chunk..]; // Truncate the chunk so that it ends at the next fold. let region_end = self.transform_cursor.end(&()).1 - self.buffer_offset; - if chunk.len() >= region_end { - chunk = &chunk[0..region_end]; + if chunk.text.len() >= region_end { + chunk.text = &chunk.text[0..region_end]; self.transform_cursor.next(&()); } else { self.buffer_chunk.take(); } - self.buffer_offset += chunk.len(); - return Some((chunk, capture_ix)); + self.buffer_offset += chunk.text.len(); + return Some(chunk); } None diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index cfab4fd941921fef6410f64ab2104db3a7ee8873..93fae6d6b2c5ea443574afd5f82dd0c636a131d8 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,5 +1,5 @@ use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot}; -use language::{rope, HighlightId}; +use language::{rope, HighlightedChunk}; use parking_lot::Mutex; use std::{mem, ops::Range}; use sum_tree::Bias; @@ -173,9 +173,11 @@ impl Snapshot { .highlighted_chunks(input_start..input_end), column: expanded_char_column, tab_size: self.tab_size, - chunk: &SPACES[0..to_next_stop], + chunk: HighlightedChunk { + text: &SPACES[0..to_next_stop], + ..Default::default() + }, skip_leading_tab: to_next_stop > 0, - style_id: Default::default(), } } @@ -415,23 +417,21 @@ impl<'a> Iterator for Chunks<'a> { pub struct HighlightedChunks<'a> { fold_chunks: fold_map::HighlightedChunks<'a>, - chunk: &'a str, - style_id: HighlightId, + chunk: HighlightedChunk<'a>, column: usize, tab_size: usize, skip_leading_tab: bool, } impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); + type Item = HighlightedChunk<'a>; fn next(&mut self) -> Option { - if self.chunk.is_empty() { - if let Some((chunk, style_id)) = self.fold_chunks.next() { + if self.chunk.text.is_empty() { + if let Some(chunk) = self.fold_chunks.next() { self.chunk = chunk; - self.style_id = style_id; if self.skip_leading_tab { - self.chunk = &self.chunk[1..]; + self.chunk.text = &self.chunk.text[1..]; self.skip_leading_tab = false; } } else { @@ -439,18 +439,24 @@ impl<'a> Iterator for HighlightedChunks<'a> { } } - for (ix, c) in self.chunk.char_indices() { + for (ix, c) in self.chunk.text.char_indices() { match c { '\t' => { if ix > 0 { - let (prefix, suffix) = self.chunk.split_at(ix); - self.chunk = suffix; - return Some((prefix, self.style_id)); + let (prefix, suffix) = self.chunk.text.split_at(ix); + self.chunk.text = suffix; + return Some(HighlightedChunk { + text: prefix, + ..self.chunk + }); } else { - self.chunk = &self.chunk[1..]; + self.chunk.text = &self.chunk.text[1..]; let len = self.tab_size - self.column % self.tab_size; self.column += len; - return Some((&SPACES[0..len], self.style_id)); + return Some(HighlightedChunk { + text: &SPACES[0..len], + ..self.chunk + }); } } '\n' => self.column = 0, @@ -458,7 +464,7 @@ impl<'a> Iterator for HighlightedChunks<'a> { } } - Some((mem::take(&mut self.chunk), mem::take(&mut self.style_id))) + Some(mem::take(&mut self.chunk)) } } diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 897dfa01b9cde891f3d6720694edc0046dbd3f18..a62c67dbce5b4d3654015f7d060645462b383b47 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -3,7 +3,7 @@ use super::{ tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint, TextSummary}, }; use gpui::{fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, Task}; -use language::{HighlightId, Point}; +use language::{HighlightedChunk, Point}; use lazy_static::lazy_static; use smol::future::yield_now; use std::{collections::VecDeque, ops::Range, time::Duration}; @@ -52,8 +52,7 @@ pub struct Chunks<'a> { pub struct HighlightedChunks<'a> { input_chunks: tab_map::HighlightedChunks<'a>, - input_chunk: &'a str, - style_id: HighlightId, + input_chunk: HighlightedChunk<'a>, output_position: WrapPoint, max_output_row: u32, transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, @@ -490,8 +489,7 @@ impl Snapshot { .min(self.tab_snapshot.max_point()); HighlightedChunks { input_chunks: self.tab_snapshot.highlighted_chunks(input_start..input_end), - input_chunk: "", - style_id: HighlightId::default(), + input_chunk: Default::default(), output_position: output_start, max_output_row: rows.end, transforms, @@ -674,7 +672,7 @@ impl<'a> Iterator for Chunks<'a> { } impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); + type Item = HighlightedChunk<'a>; fn next(&mut self) -> Option { if self.output_position.row() >= self.max_output_row { @@ -699,18 +697,19 @@ impl<'a> Iterator for HighlightedChunks<'a> { self.output_position.0 += summary; self.transforms.next(&()); - return Some((&display_text[start_ix..end_ix], self.style_id)); + return Some(HighlightedChunk { + text: &display_text[start_ix..end_ix], + ..self.input_chunk + }); } - if self.input_chunk.is_empty() { - let (chunk, style_id) = self.input_chunks.next().unwrap(); - self.input_chunk = chunk; - self.style_id = style_id; + if self.input_chunk.text.is_empty() { + self.input_chunk = self.input_chunks.next().unwrap(); } let mut input_len = 0; let transform_end = self.transforms.end(&()).0; - for c in self.input_chunk.chars() { + for c in self.input_chunk.text.chars() { let char_len = c.len_utf8(); input_len += char_len; if c == '\n' { @@ -726,9 +725,12 @@ impl<'a> Iterator for HighlightedChunks<'a> { } } - let (prefix, suffix) = self.input_chunk.split_at(input_len); - self.input_chunk = suffix; - Some((prefix, self.style_id)) + let (prefix, suffix) = self.input_chunk.text.split_at(input_len); + self.input_chunk.text = suffix; + Some(HighlightedChunk { + text: prefix, + ..self.input_chunk + }) } } @@ -1090,7 +1092,7 @@ mod tests { let actual_text = self .highlighted_chunks_for_rows(start_row..end_row) - .map(|c| c.0) + .map(|c| c.text) .collect::(); assert_eq!( expected_text, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b18e72b2d77199e263e46b2f04cfee5e2acc9d55..f538f3f4cb4c477e239899b89408a49e0a5ef450 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -17,7 +17,7 @@ use gpui::{ MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle, }; use json::json; -use language::HighlightId; +use language::{DiagnosticSeverity, HighlightedChunk}; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -495,8 +495,12 @@ impl EditorElement { let mut line_exceeded_max_len = false; let chunks = snapshot.highlighted_chunks_for_rows(rows.clone()); - 'outer: for (chunk, style_ix) in chunks.chain(Some(("\n", HighlightId::default()))) { - for (ix, mut line_chunk) in chunk.split('\n').enumerate() { + let newline_chunk = HighlightedChunk { + text: "\n", + ..Default::default() + }; + 'outer: for chunk in chunks.chain([newline_chunk]) { + for (ix, mut line_chunk) in chunk.text.split('\n').enumerate() { if ix > 0 { layouts.push(cx.text_layout_cache.layout_str( &line, @@ -513,7 +517,8 @@ impl EditorElement { } if !line_chunk.is_empty() && !line_exceeded_max_len { - let highlight_style = style_ix + let highlight_style = chunk + .highlight_id .style(&style.syntax) .unwrap_or(style.text.clone().into()); // Avoid a lookup if the font properties match the previous ones. @@ -537,13 +542,25 @@ impl EditorElement { line_exceeded_max_len = true; } + let underline = if let Some(severity) = chunk.diagnostic { + match severity { + DiagnosticSeverity::ERROR => Some(style.error_underline), + DiagnosticSeverity::WARNING => Some(style.warning_underline), + DiagnosticSeverity::INFORMATION => Some(style.information_underline), + DiagnosticSeverity::HINT => Some(style.hint_underline), + _ => highlight_style.underline, + } + } else { + highlight_style.underline + }; + line.push_str(line_chunk); styles.push(( line_chunk.len(), RunStyle { font_id, color: highlight_style.color, - underline: highlight_style.underline, + underline, }, )); prev_font_id = font_id; diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index 05eb9fa7e5008bc6926502af5402313412c6a16d..fc28e04729f01d534f105effd16ad58ec7298a78 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -2774,6 +2774,10 @@ impl EditorSettings { selection: Default::default(), guest_selections: Default::default(), syntax: Default::default(), + error_underline: Default::default(), + warning_underline: Default::default(), + information_underline: Default::default(), + hint_underline: Default::default(), } }, } diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 86714def0f427ff9064893d391e77596401aa839..056661bb3fd3bb70e74a4a186037603e335defcb 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -13,7 +13,7 @@ use clock::ReplicaId; use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; -use lsp::{DiagnosticSeverity, LanguageServer}; +use lsp::LanguageServer; use parking_lot::Mutex; use postage::{prelude::Stream, sink::Sink, watch}; use rpc::proto; @@ -26,16 +26,19 @@ use std::{ collections::{BTreeMap, HashMap, HashSet}, ffi::OsString, future::Future, - iter::Iterator, + iter::{Iterator, Peekable}, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, sync::Arc, time::{Duration, Instant, SystemTime, UNIX_EPOCH}, + vec, }; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; use util::{post_inc, TryFutureExt as _}; +pub use lsp::DiagnosticSeverity; + thread_local! { static PARSER: RefCell = RefCell::new(Parser::new()); } @@ -68,6 +71,7 @@ pub struct Buffer { pub struct Snapshot { text: buffer::Snapshot, tree: Option, + diagnostics: AnchorRangeMultimap<(DiagnosticSeverity, String)>, is_parsing: bool, language: Option>, query_cursor: QueryCursorHandle, @@ -182,15 +186,34 @@ struct Highlights<'a> { pub struct HighlightedChunks<'a> { range: Range, chunks: Chunks<'a>, + diagnostic_endpoints: Peekable>, + error_depth: usize, + warning_depth: usize, + information_depth: usize, + hint_depth: usize, highlights: Option>, } +#[derive(Clone, Copy, Debug, Default)] +pub struct HighlightedChunk<'a> { + pub text: &'a str, + pub highlight_id: HighlightId, + pub diagnostic: Option, +} + struct Diff { base_version: clock::Global, new_text: Arc, changes: Vec<(ChangeTag, usize)>, } +#[derive(Clone, Copy)] +struct DiagnosticEndpoint { + offset: usize, + is_start: bool, + severity: DiagnosticSeverity, +} + impl Buffer { pub fn new>>( replica_id: ReplicaId, @@ -275,6 +298,7 @@ impl Buffer { Snapshot { text: self.text.snapshot(), tree: self.syntax_tree(), + diagnostics: self.diagnostics.clone(), is_parsing: self.parsing_in_background, language: self.language.clone(), query_cursor: QueryCursorHandle::new(), @@ -673,7 +697,7 @@ impl Buffer { let content = self.content(); let range = range.start.to_offset(&content)..range.end.to_offset(&content); self.diagnostics - .intersecting_point_ranges(range, content, true) + .intersecting_ranges(range, content, true) .map(move |(_, range, (severity, message))| Diagnostic { range, severity: *severity, @@ -1021,7 +1045,9 @@ impl Buffer { let abs_path = self .file .as_ref() - .map_or(PathBuf::new(), |file| file.abs_path(cx).unwrap()); + .map_or(Path::new("/").to_path_buf(), |file| { + file.abs_path(cx).unwrap() + }); let version = post_inc(&mut language_server.next_version); let snapshot = LanguageServerSnapshot { @@ -1462,30 +1488,54 @@ impl Snapshot { range: Range, ) -> HighlightedChunks { let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); + + let mut diagnostic_endpoints = Vec::::new(); + for (_, range, (severity, _)) in + self.diagnostics + .intersecting_ranges(range.clone(), self.content(), true) + { + diagnostic_endpoints.push(DiagnosticEndpoint { + offset: range.start, + is_start: true, + severity: *severity, + }); + diagnostic_endpoints.push(DiagnosticEndpoint { + offset: range.end, + is_start: false, + severity: *severity, + }); + } + diagnostic_endpoints.sort_unstable_by_key(|endpoint| endpoint.offset); + let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable(); + let chunks = self.text.as_rope().chunks_in_range(range.clone()); - if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { - let captures = self.query_cursor.set_byte_range(range.clone()).captures( - &language.highlights_query, - tree.root_node(), - TextProvider(self.text.as_rope()), - ); + let highlights = + if let Some((language, tree)) = self.language.as_ref().zip(self.tree.as_ref()) { + let captures = self.query_cursor.set_byte_range(range.clone()).captures( + &language.highlights_query, + tree.root_node(), + TextProvider(self.text.as_rope()), + ); - HighlightedChunks { - range, - chunks, - highlights: Some(Highlights { + Some(Highlights { captures, next_capture: None, stack: Default::default(), highlight_map: language.highlight_map(), - }), - } - } else { - HighlightedChunks { - range, - chunks, - highlights: None, - } + }) + } else { + None + }; + + HighlightedChunks { + range, + chunks, + diagnostic_endpoints, + error_depth: 0, + warning_depth: 0, + information_depth: 0, + hint_depth: 0, + highlights, } } } @@ -1495,6 +1545,7 @@ impl Clone for Snapshot { Self { text: self.text.clone(), tree: self.tree.clone(), + diagnostics: self.diagnostics.clone(), is_parsing: self.is_parsing, language: self.language.clone(), query_cursor: QueryCursorHandle::new(), @@ -1556,13 +1607,43 @@ impl<'a> HighlightedChunks<'a> { pub fn offset(&self) -> usize { self.range.start } + + fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) { + let depth = match endpoint.severity { + DiagnosticSeverity::ERROR => &mut self.error_depth, + DiagnosticSeverity::WARNING => &mut self.warning_depth, + DiagnosticSeverity::INFORMATION => &mut self.information_depth, + DiagnosticSeverity::HINT => &mut self.hint_depth, + _ => return, + }; + if endpoint.is_start { + *depth += 1; + } else { + *depth -= 1; + } + } + + fn current_diagnostic_severity(&mut self) -> Option { + if self.error_depth > 0 { + Some(DiagnosticSeverity::ERROR) + } else if self.warning_depth > 0 { + Some(DiagnosticSeverity::WARNING) + } else if self.information_depth > 0 { + Some(DiagnosticSeverity::INFORMATION) + } else if self.hint_depth > 0 { + Some(DiagnosticSeverity::HINT) + } else { + None + } + } } impl<'a> Iterator for HighlightedChunks<'a> { - type Item = (&'a str, HighlightId); + type Item = HighlightedChunk<'a>; fn next(&mut self) -> Option { let mut next_capture_start = usize::MAX; + let mut next_diagnostic_endpoint = usize::MAX; if let Some(highlights) = self.highlights.as_mut() { while let Some((parent_capture_end, _)) = highlights.stack.last() { @@ -1583,22 +1664,36 @@ impl<'a> Iterator for HighlightedChunks<'a> { next_capture_start = capture.node.start_byte(); break; } else { - let style_id = highlights.highlight_map.get(capture.index); - highlights.stack.push((capture.node.end_byte(), style_id)); + let highlight_id = highlights.highlight_map.get(capture.index); + highlights + .stack + .push((capture.node.end_byte(), highlight_id)); highlights.next_capture = highlights.captures.next(); } } } + while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() { + if endpoint.offset <= self.range.start { + self.update_diagnostic_depths(endpoint); + self.diagnostic_endpoints.next(); + } else { + next_diagnostic_endpoint = endpoint.offset; + break; + } + } + if let Some(chunk) = self.chunks.peek() { let chunk_start = self.range.start; - let mut chunk_end = (self.chunks.offset() + chunk.len()).min(next_capture_start); - let mut style_id = HighlightId::default(); - if let Some((parent_capture_end, parent_style_id)) = + let mut chunk_end = (self.chunks.offset() + chunk.len()) + .min(next_capture_start) + .min(next_diagnostic_endpoint); + let mut highlight_id = HighlightId::default(); + if let Some((parent_capture_end, parent_highlight_id)) = self.highlights.as_ref().and_then(|h| h.stack.last()) { chunk_end = chunk_end.min(*parent_capture_end); - style_id = *parent_style_id; + highlight_id = *parent_highlight_id; } let slice = @@ -1608,7 +1703,11 @@ impl<'a> Iterator for HighlightedChunks<'a> { self.chunks.next().unwrap(); } - Some((slice, style_id)) + Some(HighlightedChunk { + text: slice, + highlight_id, + diagnostic: self.current_diagnostic_severity(), + }) } else { None } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 988b1d0c058e8994ff2179197dcde369f7817658..1dbf9700ee61345c7a149b15732bfe09c8879d29 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -475,7 +475,12 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'CCC'".to_string() } ] - ) + ); + + dbg!(buffer + .snapshot() + .highlighted_text_for_range(0..buffer.len()) + .collect::>()); }); } diff --git a/crates/theme/src/lib.rs b/crates/theme/src/lib.rs index 46b3f4a7507fe56b65ad47a4a2475ec7dfb53ed3..2a0abc4395011c5a954c1af1a827981a5800af39 100644 --- a/crates/theme/src/lib.rs +++ b/crates/theme/src/lib.rs @@ -214,6 +214,12 @@ pub struct EditorStyle { pub line_number_active: Color, pub guest_selections: Vec, pub syntax: Arc, + pub error_underline: Color, + pub warning_underline: Color, + #[serde(default)] + pub information_underline: Color, + #[serde(default)] + pub hint_underline: Color, } #[derive(Clone, Copy, Default, Deserialize)] @@ -254,6 +260,10 @@ impl InputEditorStyle { line_number_active: Default::default(), guest_selections: Default::default(), syntax: Default::default(), + error_underline: Default::default(), + warning_underline: Default::default(), + information_underline: Default::default(), + hint_underline: Default::default(), } } } diff --git a/crates/zed/assets/themes/black.toml b/crates/zed/assets/themes/black.toml index d37b7905be476c8499a46e43e7f30b3e551e2385..e99629c5ce68b35c337484f0d24c55f8453e0774 100644 --- a/crates/zed/assets/themes/black.toml +++ b/crates/zed/assets/themes/black.toml @@ -39,6 +39,10 @@ bad = "#b7372e" active_line = "#00000033" hover = "#00000033" +[editor] +error_underline = "#ff0000" +warning_underline = "#00ffff" + [editor.syntax] keyword = { color = "#0086c0", weight = "bold" } function = "#dcdcaa" diff --git a/crates/zed/assets/themes/dark.toml b/crates/zed/assets/themes/dark.toml index 694e3469111890d317c1528c04143b5e36a8d37f..ce64e3c3f0002462100c6e1d11dea11d620d76f5 100644 --- a/crates/zed/assets/themes/dark.toml +++ b/crates/zed/assets/themes/dark.toml @@ -39,6 +39,10 @@ bad = "#b7372e" active_line = "#00000022" hover = "#00000033" +[editor] +error_underline = "#ff0000" +warning_underline = "#00ffff" + [editor.syntax] keyword = { color = "#0086c0", weight = "bold" } function = "#dcdcaa" diff --git a/crates/zed/assets/themes/light.toml b/crates/zed/assets/themes/light.toml index 677a9fd6f6f7b31ca49c58b7e34c7a2c4929e39e..13803c11a8060395db10d7ef19aa64032522d198 100644 --- a/crates/zed/assets/themes/light.toml +++ b/crates/zed/assets/themes/light.toml @@ -26,7 +26,7 @@ guests = [ { selection = "#EE823133", cursor = "#EE8231" }, { selection = "#5A2B9233", cursor = "#5A2B92" }, { selection = "#FDF35133", cursor = "#FDF351" }, - { selection = "#4EACAD33", cursor = "#4EACAD" } + { selection = "#4EACAD33", cursor = "#4EACAD" }, ] [status] @@ -39,6 +39,10 @@ bad = "#b7372e" active_line = "#00000008" hover = "#0000000D" +[editor] +error_underline = "#ff0000" +warning_underline = "#00ffff" + [editor.syntax] keyword = { color = "#0000fa", weight = "bold" } function = "#795e26" From a7a73a5b0bd3e43c42543b83cf10365c2f5e7fb8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 27 Oct 2021 11:56:04 +0200 Subject: [PATCH 24/61] Fix bug in `to_full_offset` when `Anchor::version` != `Content::version` --- crates/buffer/src/lib.rs | 4 -- crates/language/src/tests.rs | 96 ++++++++++++++++++++++++++++++++++-- 2 files changed, 92 insertions(+), 8 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 093f4691f33c6a58bbd5111f1d45b9713937445f..3fc70c437d7cb15d53f983bf5a165f60e180acb6 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -2496,10 +2496,6 @@ impl ToOffset for Anchor { fn to_offset<'a>(&self, content: impl Into>) -> usize { content.into().summary_for_anchor(self).bytes } - - fn to_full_offset<'a>(&self, _: impl Into>, _: Bias) -> usize { - self.full_offset - } } impl<'a> ToOffset for &'a Anchor { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 1dbf9700ee61345c7a149b15732bfe09c8879d29..d44e1f85b523e44423e5bfa072554ff88eb946c8 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -476,11 +476,99 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { } ] ); + assert_eq!( + chunks_with_diagnostics(buffer, 0..buffer.len()), + [ + ("\n\nfn a() { ".to_string(), None), + ("A".to_string(), Some(DiagnosticSeverity::ERROR)), + (" }\nfn b() { ".to_string(), None), + ("BB".to_string(), Some(DiagnosticSeverity::ERROR)), + (" }\nfn c() { ".to_string(), None), + ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)), + (" }\n".to_string(), None), + ] + ); + assert_eq!( + chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)), + [ + ("B".to_string(), Some(DiagnosticSeverity::ERROR)), + (" }\nfn c() { ".to_string(), None), + ("CC".to_string(), Some(DiagnosticSeverity::ERROR)), + ] + ); - dbg!(buffer - .snapshot() - .highlighted_text_for_range(0..buffer.len()) - .collect::>()); + // Ensure overlapping diagnostics are highlighted correctly. + buffer + .update_diagnostics( + Some(open_notification.text_document.version), + vec![ + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'A'".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)), + severity: Some(lsp::DiagnosticSeverity::WARNING), + message: "unreachable statement".to_string(), + ..Default::default() + }, + ], + cx, + ) + .unwrap(); + assert_eq!( + buffer + .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) + .collect::>(), + &[ + Diagnostic { + range: Point::new(2, 9)..Point::new(2, 12), + severity: DiagnosticSeverity::WARNING, + message: "unreachable statement".to_string() + }, + Diagnostic { + range: Point::new(2, 9)..Point::new(2, 10), + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + }, + ] + ); + assert_eq!( + chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)), + [ + ("fn a() { ".to_string(), None), + ("A".to_string(), Some(DiagnosticSeverity::ERROR)), + (" }".to_string(), Some(DiagnosticSeverity::WARNING)), + ("\n".to_string(), None), + ] + ); + assert_eq!( + chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)), + [ + (" }".to_string(), Some(DiagnosticSeverity::WARNING)), + ("\n".to_string(), None), + ] + ); + + fn chunks_with_diagnostics( + buffer: &Buffer, + range: Range, + ) -> Vec<(String, Option)> { + let mut chunks: Vec<(String, Option)> = Vec::new(); + for chunk in buffer.snapshot().highlighted_text_for_range(range) { + if chunks + .last() + .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic) + { + chunks.last_mut().unwrap().0.push_str(chunk.text); + } else { + chunks.push((chunk.text.to_string(), chunk.diagnostic)); + } + } + chunks + } }); } From bc076c1cc14ef219610011d212a6a0b057475b01 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 27 Oct 2021 12:39:11 +0200 Subject: [PATCH 25/61] Update display map snapshots when diagnostics are updated This is similar to what we do when we receive new parse trees from tree-sitter. --- crates/editor/src/display_map/fold_map.rs | 7 ++++++- crates/language/src/lib.rs | 8 ++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index efccbbbf5dae1e0acf99c67bdb7f39d1c7b7ad6e..a2e6b150e7f81857860529f9ba19c241260aaf25 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -204,6 +204,7 @@ pub struct FoldMap { struct SyncState { version: clock::Global, parse_count: usize, + diagnostics_update_count: usize, } impl FoldMap { @@ -225,6 +226,7 @@ impl FoldMap { last_sync: Mutex::new(SyncState { version: buffer.version(), parse_count: buffer.parse_count(), + diagnostics_update_count: buffer.diagnostics_update_count(), }), version: AtomicUsize::new(0), }; @@ -256,6 +258,7 @@ impl FoldMap { SyncState { version: buffer.version(), parse_count: buffer.parse_count(), + diagnostics_update_count: buffer.diagnostics_update_count(), }, ); let edits = buffer @@ -263,7 +266,9 @@ impl FoldMap { .map(Into::into) .collect::>(); if edits.is_empty() { - if last_sync.parse_count != buffer.parse_count() { + if last_sync.parse_count != buffer.parse_count() + || last_sync.diagnostics_update_count != buffer.diagnostics_update_count() + { self.version.fetch_add(1, SeqCst); } Vec::new() diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 056661bb3fd3bb70e74a4a186037603e335defcb..866e182af3411749608a98ba6cf6716f16e3d2a3 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -63,6 +63,7 @@ pub struct Buffer { parsing_in_background: bool, parse_count: usize, diagnostics: AnchorRangeMultimap<(DiagnosticSeverity, String)>, + diagnostics_update_count: usize, language_server: Option, #[cfg(test)] operations: Vec, @@ -288,6 +289,7 @@ impl Buffer { pending_autoindent: Default::default(), language: None, diagnostics: Default::default(), + diagnostics_update_count: 0, language_server: None, #[cfg(test)] operations: Default::default(), @@ -686,6 +688,7 @@ impl Buffer { } } + self.diagnostics_update_count += 1; cx.notify(); Ok(()) } @@ -705,6 +708,10 @@ impl Buffer { }) } + pub fn diagnostics_update_count(&self) -> usize { + self.diagnostics_update_count + } + fn request_autoindent(&mut self, cx: &mut ModelContext) { if let Some(indent_columns) = self.compute_autoindents() { let indent_columns = cx.background().spawn(indent_columns); @@ -1335,6 +1342,7 @@ impl Clone for Buffer { autoindent_requests: Default::default(), pending_autoindent: Default::default(), diagnostics: self.diagnostics.clone(), + diagnostics_update_count: self.diagnostics_update_count, language_server: None, #[cfg(test)] operations: self.operations.clone(), From 0c10d6c82d331c9f912823d0c6aafafea73a8ff3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 27 Oct 2021 11:02:12 -0700 Subject: [PATCH 26/61] Introduce FullOffset type Co-Authored-By: Nathan Sobo Co-Authored-By: Antonio Scandurra --- crates/buffer/src/anchor.rs | 40 +++---- crates/buffer/src/lib.rs | 212 ++++++++++++++++++++++-------------- 2 files changed, 154 insertions(+), 98 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index 479b31fe6c602b5e4a9761823960cc93184a3630..c1789a933bd456daa4fd1db2da3ee56bebdc2b4f 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -1,3 +1,5 @@ +use crate::FullOffset; + use super::{Buffer, Content, FromAnchor, Point, ToOffset}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; @@ -5,7 +7,7 @@ use sum_tree::{Bias, SumTree}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { - pub full_offset: usize, + pub full_offset: FullOffset, pub bias: Bias, pub version: clock::Global, } @@ -13,7 +15,7 @@ pub struct Anchor { #[derive(Clone)] pub struct AnchorMap { pub(crate) version: clock::Global, - pub(crate) entries: Vec<((usize, Bias), T)>, + pub(crate) entries: Vec<((FullOffset, Bias), T)>, } #[derive(Clone)] @@ -22,7 +24,7 @@ pub struct AnchorSet(pub(crate) AnchorMap<()>); #[derive(Clone)] pub struct AnchorRangeMap { pub(crate) version: clock::Global, - pub(crate) entries: Vec<(Range<(usize, Bias)>, T)>, + pub(crate) entries: Vec<(Range<(FullOffset, Bias)>, T)>, } #[derive(Clone)] @@ -44,23 +46,23 @@ pub(crate) struct AnchorRangeMultimapEntry { #[derive(Clone, Debug)] pub(crate) struct FullOffsetRange { - pub(crate) start: usize, - pub(crate) end: usize, + pub(crate) start: FullOffset, + pub(crate) end: FullOffset, } #[derive(Clone, Debug)] pub(crate) struct AnchorRangeMultimapSummary { - start: usize, - end: usize, - min_start: usize, - max_end: usize, + start: FullOffset, + end: FullOffset, + min_start: FullOffset, + max_end: FullOffset, count: usize, } impl Anchor { pub fn min() -> Self { Self { - full_offset: 0, + full_offset: FullOffset(0), bias: Bias::Left, version: Default::default(), } @@ -68,7 +70,7 @@ impl Anchor { pub fn max() -> Self { Self { - full_offset: usize::MAX, + full_offset: FullOffset::MAX, bias: Bias::Right, version: Default::default(), } @@ -192,7 +194,7 @@ impl AnchorRangeMultimap { { let content = content.clone(); let mut endpoint = Anchor { - full_offset: 0, + full_offset: FullOffset(0), bias: Bias::Right, version: self.version.clone(), }; @@ -219,7 +221,7 @@ impl AnchorRangeMultimap { std::iter::from_fn({ let mut endpoint = Anchor { - full_offset: 0, + full_offset: FullOffset(0), bias: Bias::Left, version: self.version.clone(), }; @@ -260,10 +262,10 @@ impl sum_tree::Item for AnchorRangeMultimapEntry { impl Default for AnchorRangeMultimapSummary { fn default() -> Self { Self { - start: 0, - end: usize::MAX, - min_start: usize::MAX, - max_end: 0, + start: FullOffset(0), + end: FullOffset::MAX, + min_start: FullOffset::MAX, + max_end: FullOffset(0), count: 0, } } @@ -294,8 +296,8 @@ impl sum_tree::Summary for AnchorRangeMultimapSummary { impl Default for FullOffsetRange { fn default() -> Self { Self { - start: 0, - end: usize::MAX, + start: FullOffset(0), + end: FullOffset::MAX, } } } diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 3fc70c437d7cb15d53f983bf5a165f60e180acb6..00fe10ad337747db222ef1007eb06b32155e2ea0 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -22,7 +22,7 @@ use std::{ cmp::{self, Reverse}, convert::{TryFrom, TryInto}, iter::Iterator, - ops::Range, + ops::{self, Range}, str, sync::Arc, time::{Duration, Instant}, @@ -84,7 +84,7 @@ pub struct Transaction { start: clock::Global, end: clock::Global, edits: Vec, - ranges: Vec>, + ranges: Vec>, selections_before: HashMap>, selections_after: HashMap>, first_edit_at: Instant, @@ -101,7 +101,7 @@ impl Transaction { self.end.observe(edit.timestamp.local()); let mut other_ranges = edit.ranges.iter().peekable(); - let mut new_ranges: Vec> = Vec::new(); + let mut new_ranges = Vec::new(); let insertion_len = edit.new_text.as_ref().map_or(0, |t| t.len()); let mut delta = 0; @@ -429,7 +429,7 @@ pub enum Operation { pub struct EditOperation { timestamp: InsertionTimestamp, version: clock::Global, - ranges: Vec>, + ranges: Vec>, new_text: Option, } @@ -437,7 +437,7 @@ pub struct EditOperation { pub struct UndoOperation { id: clock::Local, counts: HashMap, - ranges: Vec>, + ranges: Vec>, version: clock::Global, } @@ -735,7 +735,7 @@ impl Buffer { fragment_start = old_fragments.start().visible; } - let full_range_start = range.start + old_fragments.start().deleted; + let full_range_start = FullOffset(range.start + old_fragments.start().deleted); // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { @@ -783,7 +783,7 @@ impl Buffer { } } - let full_range_end = range.end + old_fragments.start().deleted; + let full_range_end = FullOffset(range.end + old_fragments.start().deleted); edit.ranges.push(full_range_start..full_range_end); } @@ -898,7 +898,7 @@ impl Buffer { fn apply_remote_edit( &mut self, version: &clock::Global, - ranges: &[Range], + ranges: &[Range], new_text: Option<&str>, timestamp: InsertionTimestamp, ) { @@ -909,24 +909,27 @@ impl Buffer { let cx = Some(version.clone()); let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = - old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = old_fragments.slice( + &VersionedFullOffset::Offset(ranges[0].start), + Bias::Left, + &cx, + ); new_ropes.push_tree(new_fragments.summary().text); - let mut fragment_start = old_fragments.start().offset(); + let mut fragment_start = old_fragments.start().full_offset(); for range in ranges { - let fragment_end = old_fragments.end(&cx).offset(); + let fragment_end = old_fragments.end(&cx).full_offset(); // If the current fragment ends before this range, then jump ahead to the first fragment // that extends past the start of this range, reusing any intervening fragments. if fragment_end < range.start { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { + if fragment_start > old_fragments.start().full_offset() { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; + suffix.len = fragment_end.0 - fragment_start.0; new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -934,21 +937,21 @@ impl Buffer { } let slice = - old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx); + old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); new_ropes.push_tree(slice.summary().text); new_fragments.push_tree(slice, &None); - fragment_start = old_fragments.start().offset(); + fragment_start = old_fragments.start().full_offset(); } // If we are at the end of a non-concurrent fragment, advance to the next one. - let fragment_end = old_fragments.end(&cx).offset(); + let fragment_end = old_fragments.end(&cx).full_offset(); if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); - fragment.len = fragment_end - fragment_start; + fragment.len = fragment_end.0 - fragment_start.0; new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); old_fragments.next(&cx); - fragment_start = old_fragments.start().offset(); + fragment_start = old_fragments.start().full_offset(); } // Skip over insertions that are concurrent to this edit, but have a lower lamport @@ -970,7 +973,7 @@ impl Buffer { // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; + prefix.len = range.start.0 - fragment_start.0; fragment_start = range.start; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); @@ -995,11 +998,11 @@ impl Buffer { // portions as deleted. while fragment_start < range.end { let fragment = old_fragments.item().unwrap(); - let fragment_end = old_fragments.end(&cx).offset(); + let fragment_end = old_fragments.end(&cx).full_offset(); let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { - intersection.len = intersection_end - fragment_start; + intersection.len = intersection_end.0 - fragment_start.0; intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -1016,11 +1019,11 @@ impl Buffer { // If the current fragment has been partially consumed, then consume the rest of it // and advance to the next fragment before slicing. - if fragment_start > old_fragments.start().offset() { - let fragment_end = old_fragments.end(&cx).offset(); + if fragment_start > old_fragments.start().full_offset() { + let fragment_end = old_fragments.end(&cx).full_offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; + suffix.len = fragment_end.0 - fragment_start.0; new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -1049,9 +1052,9 @@ impl Buffer { } let cx = Some(cx); - let mut old_fragments = self.fragments.cursor::(); + let mut old_fragments = self.fragments.cursor::(); let mut new_fragments = old_fragments.slice( - &VersionedOffset::Offset(undo.ranges[0].start), + &VersionedFullOffset::Offset(undo.ranges[0].start), Bias::Right, &cx, ); @@ -1060,11 +1063,14 @@ impl Buffer { new_ropes.push_tree(new_fragments.summary().text); for range in &undo.ranges { - let mut end_offset = old_fragments.end(&cx).offset(); + let mut end_offset = old_fragments.end(&cx).full_offset(); if end_offset < range.start { - let preceding_fragments = - old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Right, &cx); + let preceding_fragments = old_fragments.slice( + &VersionedFullOffset::Offset(range.start), + Bias::Right, + &cx, + ); new_ropes.push_tree(preceding_fragments.summary().text); new_fragments.push_tree(preceding_fragments, &None); } @@ -1084,16 +1090,16 @@ impl Buffer { new_fragments.push(fragment, &None); old_fragments.next(&cx); - if end_offset == old_fragments.end(&cx).offset() { + if end_offset == old_fragments.end(&cx).full_offset() { let unseen_fragments = old_fragments.slice( - &VersionedOffset::Offset(end_offset), + &VersionedFullOffset::Offset(end_offset), Bias::Right, &cx, ); new_ropes.push_tree(unseen_fragments.summary().text); new_fragments.push_tree(unseen_fragments, &None); } - end_offset = old_fragments.end(&cx).offset(); + end_offset = old_fragments.end(&cx).full_offset(); } else { break; } @@ -1698,14 +1704,14 @@ impl<'a> Content<'a> { fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { let cx = Some(anchor.version.clone()); - let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); + let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); cursor.seek( - &VersionedOffset::Offset(anchor.full_offset), + &VersionedFullOffset::Offset(anchor.full_offset), anchor.bias, &cx, ); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.full_offset - cursor.start().0.offset() + anchor.full_offset - cursor.start().0.full_offset() } else { 0 }; @@ -1723,11 +1729,11 @@ impl<'a> Content<'a> { let cx = Some(map.version.clone()); let mut summary = TextSummary::default(); let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); + let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); map.entries.iter().map(move |((offset, bias), value)| { - cursor.seek_forward(&VersionedOffset::Offset(*offset), *bias, &cx); + cursor.seek_forward(&VersionedFullOffset::Offset(*offset), *bias, &cx); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - offset - cursor.start().0.offset() + *offset - cursor.start().0.full_offset() } else { 0 }; @@ -1743,25 +1749,29 @@ impl<'a> Content<'a> { let cx = Some(map.version.clone()); let mut summary = TextSummary::default(); let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedOffset, usize)>(); + let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); map.entries.iter().map(move |(range, value)| { let Range { start: (start_offset, start_bias), end: (end_offset, end_bias), } = range; - cursor.seek_forward(&VersionedOffset::Offset(*start_offset), *start_bias, &cx); + cursor.seek_forward( + &VersionedFullOffset::Offset(*start_offset), + *start_bias, + &cx, + ); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - start_offset - cursor.start().0.offset() + *start_offset - cursor.start().0.full_offset() } else { 0 }; summary += rope_cursor.summary(cursor.start().1 + overshoot); let start_summary = summary.clone(); - cursor.seek_forward(&VersionedOffset::Offset(*end_offset), *end_bias, &cx); + cursor.seek_forward(&VersionedFullOffset::Offset(*end_offset), *end_bias, &cx); let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - end_offset - cursor.start().0.offset() + *end_offset - cursor.start().0.full_offset() } else { 0 }; @@ -1790,7 +1800,7 @@ impl<'a> Content<'a> { .into_iter() .map(|((offset, bias), value)| { cursor.seek_forward(&offset, bias, &None); - let full_offset = cursor.start().deleted + offset; + let full_offset = FullOffset(cursor.start().deleted + offset); ((full_offset, bias), value) }) .collect(); @@ -1812,9 +1822,9 @@ impl<'a> Content<'a> { end: (end_offset, end_bias), } = range; cursor.seek_forward(&start_offset, start_bias, &None); - let full_start_offset = cursor.start().deleted + start_offset; + let full_start_offset = FullOffset(cursor.start().deleted + start_offset); cursor.seek_forward(&end_offset, end_bias, &None); - let full_end_offset = cursor.start().deleted + end_offset; + let full_end_offset = FullOffset(cursor.start().deleted + end_offset); ( (full_start_offset, start_bias)..(full_end_offset, end_bias), value, @@ -1869,23 +1879,23 @@ impl<'a> Content<'a> { } } - fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { + fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { let cx = Some(anchor.version.clone()); let mut cursor = self .fragments - .cursor::<(VersionedOffset, FragmentTextSummary)>(); + .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); cursor.seek( - &VersionedOffset::Offset(anchor.full_offset), + &VersionedFullOffset::Offset(anchor.full_offset), anchor.bias, &cx, ); let overshoot = if cursor.item().is_some() { - anchor.full_offset - cursor.start().0.offset() + anchor.full_offset - cursor.start().0.full_offset() } else { 0 }; let summary = cursor.start().1; - summary.visible + summary.deleted + overshoot + FullOffset(summary.visible + summary.deleted + overshoot) } fn point_for_offset(&self, offset: usize) -> Result { @@ -2118,12 +2128,56 @@ impl Default for FragmentSummary { } } +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct FullOffset(usize); + +impl FullOffset { + const MAX: Self = FullOffset(usize::MAX); + + fn to_proto(self) -> u64 { + self.0 as u64 + } + + fn from_proto(value: u64) -> Self { + Self(value as usize) + } +} + +impl ops::AddAssign for FullOffset { + fn add_assign(&mut self, rhs: usize) { + self.0 += rhs; + } +} + +impl ops::Add for FullOffset { + type Output = Self; + + fn add(mut self, rhs: usize) -> Self::Output { + self += rhs; + self + } +} + +impl ops::Sub for FullOffset { + type Output = usize; + + fn sub(self, rhs: Self) -> Self::Output { + self.0 - rhs.0 + } +} + impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { *self += summary.text.visible; } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { + self.0 += summary.text.visible + summary.text.deleted; + } +} + impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize { fn cmp( &self, @@ -2135,28 +2189,28 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usiz } #[derive(Copy, Clone, Debug, Eq, PartialEq)] -enum VersionedOffset { - Offset(usize), - InvalidVersion, +enum VersionedFullOffset { + Offset(FullOffset), + Invalid, } -impl VersionedOffset { - fn offset(&self) -> usize { - if let Self::Offset(offset) = self { - *offset +impl VersionedFullOffset { + fn full_offset(&self) -> FullOffset { + if let Self::Offset(position) = self { + *position } else { panic!("invalid version") } } } -impl Default for VersionedOffset { +impl Default for VersionedFullOffset { fn default() -> Self { - Self::Offset(0) + Self::Offset(Default::default()) } } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset { fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { if let Self::Offset(offset) = self { let version = cx.as_ref().unwrap(); @@ -2167,18 +2221,18 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { .iter() .all(|t| !version.observed(*t)) { - *self = Self::InvalidVersion; + *self = Self::Invalid; } } } } -impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedOffset { - fn cmp(&self, other: &Self, _: &Option) -> cmp::Ordering { - match (self, other) { +impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset { + fn cmp(&self, cursor_position: &Self, _: &Option) -> cmp::Ordering { + match (self, cursor_position) { (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b), - (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less, - (Self::InvalidVersion, _) => unreachable!(), + (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less, + (Self::Invalid, _) => unreachable!(), } } } @@ -2229,8 +2283,8 @@ impl<'a> Into for &'a Operation { .ranges .iter() .map(|r| proto::Range { - start: r.start as u64, - end: r.end as u64, + start: r.start.to_proto(), + end: r.end.to_proto(), }) .collect(), counts: undo @@ -2281,8 +2335,8 @@ impl<'a> Into for &'a EditOperation { .ranges .iter() .map(|range| proto::Range { - start: range.start as u64, - end: range.end as u64, + start: range.start.to_proto(), + end: range.end.to_proto(), }) .collect(); proto::operation::Edit { @@ -2300,7 +2354,7 @@ impl<'a> Into for &'a Anchor { fn into(self) -> proto::Anchor { proto::Anchor { version: (&self.version).into(), - offset: self.full_offset as u64, + offset: self.full_offset.to_proto(), bias: match self.bias { Bias::Left => proto::anchor::Bias::Left as i32, Bias::Right => proto::anchor::Bias::Right as i32, @@ -2356,7 +2410,7 @@ impl TryFrom for Operation { ranges: undo .ranges .into_iter() - .map(|r| r.start as usize..r.end as usize) + .map(|r| FullOffset::from_proto(r.start)..FullOffset::from_proto(r.end)) .collect(), version: undo.version.into(), }, @@ -2406,7 +2460,7 @@ impl From for EditOperation { let ranges = edit .ranges .into_iter() - .map(|range| range.start as usize..range.end as usize) + .map(|range| FullOffset::from_proto(range.start)..FullOffset::from_proto(range.end)) .collect(); EditOperation { timestamp: InsertionTimestamp { @@ -2434,7 +2488,7 @@ impl TryFrom for Anchor { } Ok(Self { - full_offset: message.offset as usize, + full_offset: FullOffset::from_proto(message.offset), bias: if message.bias == proto::anchor::Bias::Left as i32 { Bias::Left } else if message.bias == proto::anchor::Bias::Right as i32 { @@ -2470,12 +2524,12 @@ impl TryFrom for Selection { pub trait ToOffset { fn to_offset<'a>(&self, content: impl Into>) -> usize; - fn to_full_offset<'a>(&self, content: impl Into>, bias: Bias) -> usize { + fn to_full_offset<'a>(&self, content: impl Into>, bias: Bias) -> FullOffset { let content = content.into(); let offset = self.to_offset(&content); let mut cursor = content.fragments.cursor::(); cursor.seek(&offset, bias, &None); - offset + cursor.start().deleted + FullOffset(offset + cursor.start().deleted) } } From 99772489263c18b973452c41b80078983cbc8ca4 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 27 Oct 2021 18:58:07 -0600 Subject: [PATCH 27/61] Adjust disk-based diagnostics based on edits since the last save Still need to add tests... not sure if this is right yet. Co-Authored-By: Max Brunsfeld --- crates/buffer/src/lib.rs | 3 +++ crates/language/src/language.rs | 8 +++---- crates/language/src/lib.rs | 42 +++++++++++++++++++++++++++++---- 3 files changed, 45 insertions(+), 8 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 00fe10ad337747db222ef1007eb06b32155e2ea0..40610fcbc3fd1293937c0097679503666d2a3e54 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -332,6 +332,7 @@ pub struct Edit { pub old_bytes: Range, pub new_bytes: Range, pub old_lines: Range, + pub new_lines: Range, } impl Edit { @@ -2014,6 +2015,7 @@ impl<'a, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { old_bytes: self.old_offset..self.old_offset, new_bytes: self.new_offset..self.new_offset + fragment.len, old_lines: self.old_point..self.old_point, + new_lines: self.new_point..self.new_point + fragment_lines, }); } @@ -2035,6 +2037,7 @@ impl<'a, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { old_bytes: self.old_offset..self.old_offset + fragment.len, new_bytes: self.new_offset..self.new_offset, old_lines: self.old_point..self.old_point + &fragment_lines, + new_lines: self.new_point..self.new_point, }); } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index b6ff132881c0df34cc4543617b88f834332d3012..8517f3c2a0adec661e18ad50d2efd612e0d31d96 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -3,7 +3,7 @@ use anyhow::Result; use gpui::AppContext; use parking_lot::Mutex; use serde::Deserialize; -use std::{path::Path, str, sync::Arc}; +use std::{collections::HashSet, path::Path, str, sync::Arc}; use theme::SyntaxTheme; use tree_sitter::{Language as Grammar, Query}; pub use tree_sitter::{Parser, Tree}; @@ -19,7 +19,7 @@ pub struct LanguageConfig { #[derive(Deserialize)] pub struct LanguageServerConfig { pub binary: String, - pub disk_based_diagnostic_sources: Vec, + pub disk_based_diagnostic_sources: HashSet, } #[derive(Clone, Debug, Deserialize)] @@ -130,11 +130,11 @@ impl Language { } } - pub fn disk_based_diagnostic_sources(&self) -> &[String] { + pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet> { self.config .language_server .as_ref() - .map_or(&[], |config| &config.disk_based_diagnostic_sources) + .map(|config| &config.disk_based_diagnostic_sources) } pub fn brackets(&self) -> &[BracketPair] { diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 866e182af3411749608a98ba6cf6716f16e3d2a3..cae05b983b5f339821b3076a5b7c052116c0db62 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -661,18 +661,52 @@ impl Buffer { } else { self.content() }; + + let empty_set = HashSet::new(); + let disk_based_sources = self + .language + .as_ref() + .and_then(|language| language.disk_based_diagnostic_sources()) + .unwrap_or(&empty_set); + + let mut edits_since_save = self.text.edits_since(self.saved_version.clone()).peekable(); + let mut last_edit_old_end = Point::zero(); + let mut last_edit_new_end = Point::zero(); + self.diagnostics = content.anchor_range_multimap( Bias::Left, Bias::Right, - diagnostics.into_iter().map(|diagnostic| { + diagnostics.into_iter().filter_map(|diagnostic| { // TODO: Use UTF-16 positions. - let start = Point::new( + let mut start = Point::new( diagnostic.range.start.line, diagnostic.range.start.character, ); - let end = Point::new(diagnostic.range.end.line, diagnostic.range.end.character); + let mut end = Point::new(diagnostic.range.end.line, diagnostic.range.end.character); let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); - (start..end, (severity, diagnostic.message)) + + if diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)) + { + while let Some(edit) = edits_since_save.peek() { + if edit.old_lines.end <= start { + last_edit_old_end = edit.old_lines.end; + last_edit_new_end = edit.new_lines.end; + edits_since_save.next(); + } else if edit.old_lines.start <= end && edit.old_lines.end >= start { + return None; + } else { + break; + } + } + + start = last_edit_new_end + (start - last_edit_old_end); + end = last_edit_new_end + (end - last_edit_old_end); + } + + Some((start..end, (severity, diagnostic.message))) }), ); From fcb217b9e8a7c2bd147dc0aeea8f2d41361d783d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 11:32:10 +0200 Subject: [PATCH 28/61] Report `new_lines` extent correctly when coalescing edits --- crates/buffer/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 40610fcbc3fd1293937c0097679503666d2a3e54..0ea890003774a33644e61b59904f07bcadb21634 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -2007,6 +2007,7 @@ impl<'a, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { if let Some(ref mut change) = change { if change.new_bytes.end == self.new_offset { change.new_bytes.end += fragment.len; + change.new_lines.end += fragment_lines; } else { break; } From ac76706aa7e58b0781cdc073668a8c0df13d579d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 11:35:33 +0200 Subject: [PATCH 29/61] Sort LSP diagnostics by (start, end) --- crates/language/src/lib.rs | 80 ++++++++++++++++++++------------------ 1 file changed, 42 insertions(+), 38 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index cae05b983b5f339821b3076a5b7c052116c0db62..3547c65a3fe35000d89828cd8ed7d4840162a734 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -647,7 +647,7 @@ impl Buffer { pub fn update_diagnostics( &mut self, version: Option, - diagnostics: Vec, + mut diagnostics: Vec, cx: &mut ModelContext, ) -> Result<()> { let version = version.map(|version| version as usize); @@ -669,46 +669,50 @@ impl Buffer { .and_then(|language| language.disk_based_diagnostic_sources()) .unwrap_or(&empty_set); - let mut edits_since_save = self.text.edits_since(self.saved_version.clone()).peekable(); - let mut last_edit_old_end = Point::zero(); - let mut last_edit_new_end = Point::zero(); - - self.diagnostics = content.anchor_range_multimap( - Bias::Left, - Bias::Right, - diagnostics.into_iter().filter_map(|diagnostic| { - // TODO: Use UTF-16 positions. - let mut start = Point::new( - diagnostic.range.start.line, - diagnostic.range.start.character, - ); - let mut end = Point::new(diagnostic.range.end.line, diagnostic.range.end.character); - let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); - - if diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)) - { - while let Some(edit) = edits_since_save.peek() { - if edit.old_lines.end <= start { - last_edit_old_end = edit.old_lines.end; - last_edit_new_end = edit.new_lines.end; - edits_since_save.next(); - } else if edit.old_lines.start <= end && edit.old_lines.end >= start { - return None; - } else { - break; + diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); + self.diagnostics = { + let mut edits_since_save = content.edits_since(self.saved_version.clone()).peekable(); + let mut last_edit_old_end = Point::zero(); + let mut last_edit_new_end = Point::zero(); + + content.anchor_range_multimap( + Bias::Left, + Bias::Right, + diagnostics.into_iter().filter_map(|diagnostic| { + // TODO: Use UTF-16 positions. + let mut start = Point::new( + diagnostic.range.start.line, + diagnostic.range.start.character, + ); + let mut end = + Point::new(diagnostic.range.end.line, diagnostic.range.end.character); + let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); + + if diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)) + { + while let Some(edit) = edits_since_save.peek() { + if edit.old_lines.end <= start { + last_edit_old_end = edit.old_lines.end; + last_edit_new_end = edit.new_lines.end; + edits_since_save.next(); + } else if edit.old_lines.start <= end && edit.old_lines.end >= start { + return None; + } else { + break; + } } - } - start = last_edit_new_end + (start - last_edit_old_end); - end = last_edit_new_end + (end - last_edit_old_end); - } + start = last_edit_new_end + (start - last_edit_old_end); + end = last_edit_new_end + (end - last_edit_old_end); + } - Some((start..end, (severity, diagnostic.message))) - }), - ); + Some((start..end, (severity, diagnostic.message))) + }), + ) + }; if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); From 7091e0c567ebf8c4c6dd6f7f3204434bad9a9957 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 11:37:24 +0200 Subject: [PATCH 30/61] Add a unit test for disk-based diagnostics --- crates/language/src/tests.rs | 104 ++++++++++++++++++++++++++++------- 1 file changed, 84 insertions(+), 20 deletions(-) diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index d44e1f85b523e44423e5bfa072554ff88eb946c8..1798db9d0f50a2aa042b9f63241fa5d53b0ca0ee 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -1,6 +1,7 @@ use super::*; +use crate::language::LanguageServerConfig; use gpui::{ModelHandle, MutableAppContext}; -use std::rc::Rc; +use std::{iter::FromIterator, rc::Rc}; use unindent::Unindent as _; #[gpui::test] @@ -426,10 +427,14 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .receive_notification::() .await; - buffer.update(&mut cx, |buffer, cx| { - // Edit the buffer, moving the content down - buffer.edit([0..0], "\n\n", cx); + // Edit the buffer, moving the content down + buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx)); + let change_notification_1 = fake + .receive_notification::() + .await; + assert!(change_notification_1.text_document.version > open_notification.text_document.version); + buffer.update(&mut cx, |buffer, cx| { // Receive diagnostics for an earlier version of the buffer. buffer .update_diagnostics( @@ -551,25 +556,80 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { ("\n".to_string(), None), ] ); + }); - fn chunks_with_diagnostics( - buffer: &Buffer, - range: Range, - ) -> Vec<(String, Option)> { - let mut chunks: Vec<(String, Option)> = Vec::new(); - for chunk in buffer.snapshot().highlighted_text_for_range(range) { - if chunks - .last() - .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic) - { - chunks.last_mut().unwrap().0.push_str(chunk.text); - } else { - chunks.push((chunk.text.to_string(), chunk.diagnostic)); - } + // Keep editing the buffer and ensure disk-based diagnostics get translated according to the + // changes since the last save. + buffer.update(&mut cx, |buffer, cx| { + buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx); + buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx); + }); + let change_notification_2 = fake + .receive_notification::() + .await; + assert!( + change_notification_2.text_document.version > change_notification_1.text_document.version + ); + + buffer.update(&mut cx, |buffer, cx| { + buffer + .update_diagnostics( + Some(change_notification_2.text_document.version), + vec![ + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'BB'".to_string(), + source: Some("rustc".to_string()), + ..Default::default() + }, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + severity: Some(lsp::DiagnosticSeverity::ERROR), + message: "undefined variable 'A'".to_string(), + source: Some("rustc".to_string()), + ..Default::default() + }, + ], + cx, + ) + .unwrap(); + assert_eq!( + buffer + .diagnostics_in_range(0..buffer.len()) + .collect::>(), + &[ + Diagnostic { + range: Point::new(2, 21)..Point::new(2, 22), + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + }, + Diagnostic { + range: Point::new(3, 9)..Point::new(3, 11), + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string() + }, + ] + ); + }); + + fn chunks_with_diagnostics( + buffer: &Buffer, + range: Range, + ) -> Vec<(String, Option)> { + let mut chunks: Vec<(String, Option)> = Vec::new(); + for chunk in buffer.snapshot().highlighted_text_for_range(range) { + if chunks + .last() + .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic) + { + chunks.last_mut().unwrap().0.push_str(chunk.text); + } else { + chunks.push((chunk.text.to_string(), chunk.diagnostic)); } - chunks } - }); + chunks + } } #[test] @@ -605,6 +665,10 @@ fn rust_lang() -> Option> { LanguageConfig { name: "Rust".to_string(), path_suffixes: vec!["rs".to_string()], + language_server: Some(LanguageServerConfig { + binary: "rust-analyzer".to_string(), + disk_based_diagnostic_sources: HashSet::from_iter(vec!["rustc".to_string()]), + }), ..Default::default() }, tree_sitter_rust::language(), From ee78d6f17b1eb679eb9dd667a44dea1e1b53f987 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 13:40:08 +0200 Subject: [PATCH 31/61] Express multi-cursor edits using the new coordinate space The language server expects that ranges further in the list of edits account for the impact of prior changes in the edit list. --- crates/language/src/lib.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 3547c65a3fe35000d89828cd8ed7d4840162a734..5e5a27e35a7bbc476f681a7b68875b7d29b9c71b 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -363,16 +363,18 @@ impl Buffer { prev_snapshot.buffer_snapshot.version().clone(), ) .map(|edit| { + // TODO: Use UTF-16 positions. + let edit_start = edit.new_lines.start; + let edit_end = edit_start + edit.deleted_lines(); lsp::TextDocumentContentChangeEvent { - // TODO: Use UTF-16 positions. range: Some(lsp::Range::new( lsp::Position::new( - edit.old_lines.start.row, - edit.old_lines.start.column, + edit_start.row, + edit_start.column, ), lsp::Position::new( - edit.old_lines.end.row, - edit.old_lines.end.column, + edit_end.row, + edit_end.column, ), )), range_length: None, From 50afb2d65f7f1208fb75395f74ac5a76fd751d2c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 14:07:14 +0200 Subject: [PATCH 32/61] Remove stray println! statements in the LSP I/O code --- crates/lsp/src/lib.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 88fd5a4bc747ed903b25c8410d1f7a2ed713b136..642595c5adb0255e9a1193b5ad841ebaaa5d8007 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -135,7 +135,6 @@ impl LanguageServer { buffer.resize(message_len, 0); stdout.read_exact(&mut buffer).await?; - println!("{}", std::str::from_utf8(&buffer).unwrap()); if let Ok(AnyNotification { method, params }) = serde_json::from_slice(&buffer) { @@ -179,7 +178,6 @@ impl LanguageServer { content_len_buffer.clear(); let message = outbound_rx.recv().await?; - println!("{}", std::str::from_utf8(&message).unwrap()); write!(content_len_buffer, "{}", message.len()).unwrap(); stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?; stdin.write_all(&content_len_buffer).await?; From d12387b7539c0d47adbca3927ae9ed6b3fb1aef7 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 18:26:32 +0200 Subject: [PATCH 33/61] Ensure start endpoints always come before end endpoints Co-Authored-By: Max Brunsfeld --- crates/language/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 5e5a27e35a7bbc476f681a7b68875b7d29b9c71b..351d6840b327a3df77bce95a363530dec4ab29d9 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -1553,7 +1553,7 @@ impl Snapshot { severity: *severity, }); } - diagnostic_endpoints.sort_unstable_by_key(|endpoint| endpoint.offset); + diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable(); let chunks = self.text.as_rope().chunks_in_range(range.clone()); From ce8741977b775c5a85e2bfeb032887083690147f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 28 Oct 2021 19:02:26 +0200 Subject: [PATCH 34/61] Clip points coming from language server This avoids panicking in Zed if the points they give us are invalid. Co-Authored-By: Max Brunsfeld --- crates/buffer/src/lib.rs | 6 +++++- crates/language/src/lib.rs | 4 +++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 0ea890003774a33644e61b59904f07bcadb21634..2fbd050bc8e47e1a093e3cb9e89d1283532cac4a 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -570,7 +570,7 @@ impl Buffer { } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - self.visible_text.clip_point(point, bias) + self.content().clip_point(point, bias) } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -1899,6 +1899,10 @@ impl<'a> Content<'a> { FullOffset(summary.visible + summary.deleted + overshoot) } + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + self.visible_text.clip_point(point, bias) + } + fn point_for_offset(&self, offset: usize) -> Result { if offset <= self.len() { Ok(self.text_summary_for_range(0..offset).lines) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 351d6840b327a3df77bce95a363530dec4ab29d9..57540982d82cd811f184733d4b4dc4073807774a 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -711,7 +711,9 @@ impl Buffer { end = last_edit_new_end + (end - last_edit_old_end); } - Some((start..end, (severity, diagnostic.message))) + let range = + content.clip_point(start, Bias::Left)..content.clip_point(end, Bias::Right); + Some((range, (severity, diagnostic.message))) }), ) }; From 2cbb313467743cc78bcd062af355a4cfcce90d67 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 28 Oct 2021 17:12:17 -0700 Subject: [PATCH 35/61] Avoid panic when rust language isn't present (project unit tests) --- crates/project/src/lib.rs | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 552974bc69538935bdefb04d0daa3418999ef316..458e7bf3637e342218a276ebf5839d73e64ff7b8 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -75,18 +75,11 @@ impl Project { let path = Arc::from(abs_path); let language_server = languages .get_language("Rust") - .unwrap() - .start_server(&path, cx); + .map(|language| language.start_server(&path, cx)); cx.spawn(|this, mut cx| async move { - let worktree = Worktree::open_local( - rpc, - path, - fs, - languages, - language_server.log_err().flatten(), - &mut cx, - ) - .await?; + let language_server = language_server.and_then(|language| language.log_err().flatten()); + let worktree = + Worktree::open_local(rpc, path, fs, languages, language_server, &mut cx).await?; this.update(&mut cx, |this, cx| { this.add_worktree(worktree.clone(), cx); }); From a82a12fd1498d0d850ca9c5ce76e9a11eb0f5671 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Oct 2021 11:41:07 +0200 Subject: [PATCH 36/61] Bundle fat-binary for rust-analyzer --- script/bundle | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/script/bundle b/script/bundle index 7b1929d112b4fc22f41bee86160cab0a8977cca8..540c280cee2c1754e149f3639e1978bd29959d45 100755 --- a/script/bundle +++ b/script/bundle @@ -19,8 +19,7 @@ cargo build --release --target aarch64-apple-darwin lipo -create target/x86_64-apple-darwin/release/Zed target/aarch64-apple-darwin/release/Zed -output target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/MacOS/zed # Bundle rust-analyzer -cp vendor/bin/rust-analyzer-x86_64-apple-darwin target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ -cp vendor/bin/rust-analyzer-aarch64-apple-darwin target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ +cp vendor/bin/rust-analyzer target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/ # Sign the app bundle with an ad-hoc signature so it runs on the M1. We need a real certificate but this works for now. if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTARIZATION_USERNAME && -n $APPLE_NOTARIZATION_PASSWORD ]]; then From 78c158e1a446a09cf62408dea12370821a61329d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Oct 2021 12:02:42 +0200 Subject: [PATCH 37/61] Support only UTF-8 for now when communicating with language server --- crates/lsp/Cargo.toml | 2 +- crates/lsp/src/lib.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 263eed76fb9d515e0194835a94bcf9c79c08d909..08c48b7ec35ca951f9cceac1bdbe8e593fcb0f18 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,7 +13,7 @@ anyhow = "1.0" async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true } futures = "0.3" log = "0.4" -lsp-types = "0.91" +lsp-types = { version = "0.91", features = ["proposed"] } parking_lot = "0.11" postage = { version = "0.4.1", features = ["futures-traits"] } serde = { version = "1.0", features = ["derive"] } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 642595c5adb0255e9a1193b5ad841ebaaa5d8007..1d6ed4439460fdb99840ffed825cf9cf966776fa 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -226,6 +226,7 @@ impl LanguageServer { experimental: Some(json!({ "serverStatusNotification": true, })), + offset_encoding: Some(vec!["utf-8".to_string()]), ..Default::default() }, trace: Default::default(), From 9c1b01521a8edc83088083b4dba141f27b64f1e0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Oct 2021 14:41:41 +0200 Subject: [PATCH 38/61] Avoid unnecessary conversion to offsets in `diagnostics_in_range` --- crates/language/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 520358cd1721ee72e441cba29a3abf11e4d52898..96ca701fb90083779a60481bf33fc7789037c582 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -735,12 +735,11 @@ impl Buffer { Ok(()) } - pub fn diagnostics_in_range<'a, T: ToOffset>( + pub fn diagnostics_in_range<'a, T: 'a + ToOffset>( &'a self, range: Range, ) -> impl Iterator + 'a { let content = self.content(); - let range = range.start.to_offset(&content)..range.end.to_offset(&content); self.diagnostics .intersecting_ranges(range, content, true) .map(move |(_, range, (severity, message))| Diagnostic { From 6212ebad9b980f46fc38b688dcea38fb308d1378 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Oct 2021 18:31:21 +0200 Subject: [PATCH 39/61] Communicate with language servers in terms of UTF-16 coordinates This required indexing UTF-16 positions in `Rope`. We tried opting into the UTF-8 experimental support but it didn't seem to work correctly and the standard is UTF-16 anyway. Co-Authored-By: Nathan Sobo --- crates/buffer/Cargo.toml | 2 +- crates/buffer/src/lib.rs | 184 +++++++-------- crates/buffer/src/point.rs | 28 +-- crates/buffer/src/point_utf16.rs | 111 +++++++++ crates/buffer/src/rope.rs | 261 ++++++++++++++++++++-- crates/buffer/src/tests.rs | 17 +- crates/editor/src/display_map/fold_map.rs | 104 ++++----- crates/language/src/lib.rs | 67 +++--- crates/lsp/Cargo.toml | 2 +- crates/lsp/src/lib.rs | 1 - crates/sum_tree/src/cursor.rs | 4 + 11 files changed, 562 insertions(+), 219 deletions(-) create mode 100644 crates/buffer/src/point_utf16.rs diff --git a/crates/buffer/Cargo.toml b/crates/buffer/Cargo.toml index e4112c20d5a4c8ecf95d697ecdc2412a92d4b5d6..f6d949c05f47fbc0305d1ab8790f311b06f253ca 100644 --- a/crates/buffer/Cargo.toml +++ b/crates/buffer/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "buffer" version = "0.1.0" -edition = "2018" +edition = "2021" [features] test-support = ["rand", "seahash"] diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index a8d4981ed755ce64108e22d54889bb244b02addd..301cc1478c649401c761f990e743024a6387100f 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -1,6 +1,7 @@ mod anchor; mod operation_queue; mod point; +mod point_utf16; #[cfg(any(test, feature = "test-support"))] pub mod random_char_iter; pub mod rope; @@ -13,8 +14,10 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use operation_queue::OperationQueue; pub use point::*; +pub use point_utf16::*; #[cfg(any(test, feature = "test-support"))] pub use random_char_iter::*; +use rope::TextDimension; pub use rope::{Chunks, Rope, TextSummary}; use rpc::proto; pub use selection::*; @@ -309,41 +312,34 @@ impl UndoMap { } } -struct Edits<'a, F: FnMut(&FragmentSummary) -> bool> { - visible_text: &'a Rope, - deleted_text: &'a Rope, - cursor: Option>, +struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> { + visible_cursor: rope::Cursor<'a>, + deleted_cursor: rope::Cursor<'a>, + fragments_cursor: Option>, undos: &'a UndoMap, since: clock::Global, - old_offset: usize, - new_offset: usize, - old_point: Point, - new_point: Point, + old_end: D, + new_end: D, } #[derive(Clone, Debug, Default, Eq, PartialEq)] -pub struct Edit { - pub old_bytes: Range, - pub new_bytes: Range, - pub old_lines: Range, - pub new_lines: Range, +pub struct Edit { + pub old: Range, + pub new: Range, } -impl Edit { - pub fn delta(&self) -> isize { - self.inserted_bytes() as isize - self.deleted_bytes() as isize - } - - pub fn deleted_bytes(&self) -> usize { - self.old_bytes.end - self.old_bytes.start - } - - pub fn inserted_bytes(&self) -> usize { - self.new_bytes.end - self.new_bytes.start - } - - pub fn deleted_lines(&self) -> Point { - self.old_lines.end - self.old_lines.start +impl Edit<(D1, D2)> { + pub fn flatten(self) -> (Edit, Edit) { + ( + Edit { + old: self.old.start.0..self.old.end.0, + new: self.new.start.0..self.new.end.0, + }, + Edit { + old: self.old.start.1..self.old.end.1, + new: self.new.start.1..self.new.end.1, + }, + ) } } @@ -1369,7 +1365,10 @@ impl Buffer { }) } - pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { + pub fn edits_since<'a, D>(&'a self, since: clock::Global) -> impl 'a + Iterator> + where + D: 'a + TextDimension<'a> + Ord, + { self.content().edits_since(since) } } @@ -1589,11 +1588,11 @@ impl Snapshot { } pub fn to_offset(&self, point: Point) -> usize { - self.visible_text.to_offset(point) + self.visible_text.point_to_offset(point) } pub fn to_point(&self, offset: usize) -> Point { - self.visible_text.to_point(offset) + self.visible_text.offset_to_point(offset) } pub fn anchor_before(&self, position: T) -> Anchor { @@ -1604,7 +1603,10 @@ impl Snapshot { self.content().anchor_at(position, Bias::Right) } - pub fn edits_since<'a>(&'a self, since: clock::Global) -> impl 'a + Iterator { + pub fn edits_since<'a, D>(&'a self, since: clock::Global) -> impl 'a + Iterator> + where + D: 'a + TextDimension<'a> + Ord, + { self.content().edits_since(since) } @@ -1756,7 +1758,7 @@ impl<'a> Content<'a> { } else { 0 }; - summary += rope_cursor.summary(cursor.start().1 + overshoot); + summary += rope_cursor.summary::(cursor.start().1 + overshoot); (summary.clone(), value) }) } @@ -1785,7 +1787,7 @@ impl<'a> Content<'a> { } else { 0 }; - summary += rope_cursor.summary(cursor.start().1 + overshoot); + summary += rope_cursor.summary::(cursor.start().1 + overshoot); let start_summary = summary.clone(); cursor.seek_forward(&VersionedFullOffset::Offset(*end_offset), *end_bias, &cx); @@ -1794,7 +1796,7 @@ impl<'a> Content<'a> { } else { 0 }; - summary += rope_cursor.summary(cursor.start().1 + overshoot); + summary += rope_cursor.summary::(cursor.start().1 + overshoot); let end_summary = summary.clone(); (start_summary..end_summary, value) @@ -1921,6 +1923,10 @@ impl<'a> Content<'a> { self.visible_text.clip_point(point, bias) } + pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { + self.visible_text.clip_point_utf16(point, bias) + } + fn point_for_offset(&self, offset: usize) -> Result { if offset <= self.len() { Ok(self.text_summary_for_range(0..offset).lines) @@ -1930,9 +1936,12 @@ impl<'a> Content<'a> { } // TODO: take a reference to clock::Global. - pub fn edits_since(&self, since: clock::Global) -> impl 'a + Iterator { + pub fn edits_since(&self, since: clock::Global) -> impl 'a + Iterator> + where + D: 'a + TextDimension<'a> + Ord, + { let since_2 = since.clone(); - let cursor = if since == *self.version { + let fragments_cursor = if since == *self.version { None } else { Some(self.fragments.filter( @@ -1942,15 +1951,13 @@ impl<'a> Content<'a> { }; Edits { - visible_text: &self.visible_text, - deleted_text: &self.deleted_text, - cursor, + visible_cursor: self.visible_text.cursor(0), + deleted_cursor: self.deleted_text.cursor(0), + fragments_cursor, undos: &self.undo_map, since, - old_offset: 0, - new_offset: 0, - old_point: Point::zero(), - new_point: Point::zero(), + old_end: Default::default(), + new_end: Default::default(), } } } @@ -2008,70 +2015,61 @@ impl<'a> RopeBuilder<'a> { } } -impl<'a, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { - type Item = Edit; +impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator + for Edits<'a, D, F> +{ + type Item = Edit; fn next(&mut self) -> Option { - let mut change: Option = None; - let cursor = self.cursor.as_mut()?; + let mut pending_edit: Option> = None; + let cursor = self.fragments_cursor.as_mut()?; while let Some(fragment) = cursor.item() { - let bytes = cursor.start().visible - self.new_offset; - let lines = self.visible_text.to_point(cursor.start().visible) - self.new_point; - self.old_offset += bytes; - self.old_point += &lines; - self.new_offset += bytes; - self.new_point += &lines; + let summary = self.visible_cursor.summary(cursor.start().visible); + self.old_end.add_assign(&summary); + self.new_end.add_assign(&summary); + if pending_edit + .as_ref() + .map_or(false, |change| change.new.end < self.new_end) + { + break; + } if !fragment.was_visible(&self.since, &self.undos) && fragment.visible { - let fragment_lines = - self.visible_text.to_point(self.new_offset + fragment.len) - self.new_point; - if let Some(ref mut change) = change { - if change.new_bytes.end == self.new_offset { - change.new_bytes.end += fragment.len; - change.new_lines.end += fragment_lines; - } else { - break; - } + let fragment_summary = self.visible_cursor.summary(cursor.end(&None).visible); + let mut new_end = self.new_end.clone(); + new_end.add_assign(&fragment_summary); + if let Some(pending_edit) = pending_edit.as_mut() { + pending_edit.new.end = new_end.clone(); } else { - change = Some(Edit { - old_bytes: self.old_offset..self.old_offset, - new_bytes: self.new_offset..self.new_offset + fragment.len, - old_lines: self.old_point..self.old_point, - new_lines: self.new_point..self.new_point + fragment_lines, + pending_edit = Some(Edit { + old: self.old_end.clone()..self.old_end.clone(), + new: self.new_end.clone()..new_end.clone(), }); } - self.new_offset += fragment.len; - self.new_point += &fragment_lines; + self.new_end = new_end; } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible { - let deleted_start = cursor.start().deleted; - let fragment_lines = self.deleted_text.to_point(deleted_start + fragment.len) - - self.deleted_text.to_point(deleted_start); - if let Some(ref mut change) = change { - if change.new_bytes.end == self.new_offset { - change.old_bytes.end += fragment.len; - change.old_lines.end += &fragment_lines; - } else { - break; - } + self.deleted_cursor.seek_forward(cursor.start().deleted); + let fragment_summary = self.deleted_cursor.summary(cursor.end(&None).deleted); + let mut old_end = self.old_end.clone(); + old_end.add_assign(&fragment_summary); + if let Some(pending_edit) = pending_edit.as_mut() { + pending_edit.old.end = old_end.clone(); } else { - change = Some(Edit { - old_bytes: self.old_offset..self.old_offset + fragment.len, - new_bytes: self.new_offset..self.new_offset, - old_lines: self.old_point..self.old_point + &fragment_lines, - new_lines: self.new_point..self.new_point, + pending_edit = Some(Edit { + old: self.old_end.clone()..old_end.clone(), + new: self.new_end.clone()..self.new_end.clone(), }); } - self.old_offset += fragment.len; - self.old_point += &fragment_lines; + self.old_end = old_end; } cursor.next(&None); } - change + pending_edit } } @@ -2531,7 +2529,13 @@ pub trait ToOffset { impl ToOffset for Point { fn to_offset<'a>(&self, content: impl Into>) -> usize { - content.into().visible_text.to_offset(*self) + content.into().visible_text.point_to_offset(*self) + } +} + +impl ToOffset for PointUtf16 { + fn to_offset<'a>(&self, content: impl Into>) -> usize { + content.into().visible_text.point_utf16_to_offset(*self) } } @@ -2566,7 +2570,7 @@ impl ToPoint for Anchor { impl ToPoint for usize { fn to_point<'a>(&self, content: impl Into>) -> Point { - content.into().visible_text.to_point(*self) + content.into().visible_text.offset_to_point(*self) } } diff --git a/crates/buffer/src/point.rs b/crates/buffer/src/point.rs index a2da4e4f6ce245a1cf7198f7fa1bae0f1d622fe6..5e62176956cfb378089b465e6778425cc40ec183 100644 --- a/crates/buffer/src/point.rs +++ b/crates/buffer/src/point.rs @@ -32,11 +32,7 @@ impl<'a> Add<&'a Self> for Point { type Output = Point; fn add(self, other: &'a Self) -> Self::Output { - if other.row == 0 { - Point::new(self.row, self.column + other.column) - } else { - Point::new(self.row + other.row, other.column) - } + self + *other } } @@ -44,7 +40,11 @@ impl Add for Point { type Output = Point; fn add(self, other: Self) -> Self::Output { - self + &other + if other.row == 0 { + Point::new(self.row, self.column + other.column) + } else { + Point::new(self.row + other.row, other.column) + } } } @@ -52,13 +52,7 @@ impl<'a> Sub<&'a Self> for Point { type Output = Point; fn sub(self, other: &'a Self) -> Self::Output { - debug_assert!(*other <= self); - - if self.row == other.row { - Point::new(0, self.column - other.column) - } else { - Point::new(self.row - other.row, self.column) - } + self - *other } } @@ -66,7 +60,13 @@ impl Sub for Point { type Output = Point; fn sub(self, other: Self) -> Self::Output { - self - &other + debug_assert!(other <= self); + + if self.row == other.row { + Point::new(0, self.column - other.column) + } else { + Point::new(self.row - other.row, self.column) + } } } diff --git a/crates/buffer/src/point_utf16.rs b/crates/buffer/src/point_utf16.rs new file mode 100644 index 0000000000000000000000000000000000000000..22b895a2c009b0d38ee8b82c9d1e5f1401578b8d --- /dev/null +++ b/crates/buffer/src/point_utf16.rs @@ -0,0 +1,111 @@ +use std::{ + cmp::Ordering, + ops::{Add, AddAssign, Sub}, +}; + +#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)] +pub struct PointUtf16 { + pub row: u32, + pub column: u32, +} + +impl PointUtf16 { + pub const MAX: Self = Self { + row: u32::MAX, + column: u32::MAX, + }; + + pub fn new(row: u32, column: u32) -> Self { + PointUtf16 { row, column } + } + + pub fn zero() -> Self { + PointUtf16::new(0, 0) + } + + pub fn is_zero(&self) -> bool { + self.row == 0 && self.column == 0 + } +} + +impl<'a> Add<&'a Self> for PointUtf16 { + type Output = PointUtf16; + + fn add(self, other: &'a Self) -> Self::Output { + self + *other + } +} + +impl Add for PointUtf16 { + type Output = PointUtf16; + + fn add(self, other: Self) -> Self::Output { + if other.row == 0 { + PointUtf16::new(self.row, self.column + other.column) + } else { + PointUtf16::new(self.row + other.row, other.column) + } + } +} + +impl<'a> Sub<&'a Self> for PointUtf16 { + type Output = PointUtf16; + + fn sub(self, other: &'a Self) -> Self::Output { + self - *other + } +} + +impl Sub for PointUtf16 { + type Output = PointUtf16; + + fn sub(self, other: Self) -> Self::Output { + debug_assert!(other <= self); + + if self.row == other.row { + PointUtf16::new(0, self.column - other.column) + } else { + PointUtf16::new(self.row - other.row, self.column) + } + } +} + +impl<'a> AddAssign<&'a Self> for PointUtf16 { + fn add_assign(&mut self, other: &'a Self) { + *self += *other; + } +} + +impl AddAssign for PointUtf16 { + fn add_assign(&mut self, other: Self) { + if other.row == 0 { + self.column += other.column; + } else { + self.row += other.row; + self.column = other.column; + } + } +} + +impl PartialOrd for PointUtf16 { + fn partial_cmp(&self, other: &PointUtf16) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for PointUtf16 { + #[cfg(target_pointer_width = "64")] + fn cmp(&self, other: &PointUtf16) -> Ordering { + let a = (self.row as usize) << 32 | self.column as usize; + let b = (other.row as usize) << 32 | other.column as usize; + a.cmp(&b) + } + + #[cfg(target_pointer_width = "32")] + fn cmp(&self, other: &PointUtf16) -> Ordering { + match self.row.cmp(&other.row) { + Ordering::Equal => self.column.cmp(&other.column), + comparison @ _ => comparison, + } + } +} diff --git a/crates/buffer/src/rope.rs b/crates/buffer/src/rope.rs index a1c57140025c0d8465a908118f8be0c168d85100..3cf43bd16025f408ad16dfc79181ad64dbc49a89 100644 --- a/crates/buffer/src/rope.rs +++ b/crates/buffer/src/rope.rs @@ -1,8 +1,10 @@ +use crate::PointUtf16; + use super::Point; use arrayvec::ArrayString; use smallvec::SmallVec; use std::{cmp, ops::Range, str}; -use sum_tree::{Bias, SumTree}; +use sum_tree::{Bias, Dimension, SumTree}; #[cfg(test)] const CHUNK_BASE: usize = 6; @@ -136,7 +138,7 @@ impl Rope { Chunks::new(self, range, true) } - pub fn to_point(&self, offset: usize) -> Point { + pub fn offset_to_point(&self, offset: usize) -> Point { assert!(offset <= self.summary().bytes); let mut cursor = self.chunks.cursor::<(usize, Point)>(); cursor.seek(&offset, Bias::Left, &()); @@ -144,15 +146,40 @@ impl Rope { cursor.start().1 + cursor .item() - .map_or(Point::zero(), |chunk| chunk.to_point(overshoot)) + .map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot)) + } + + pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { + assert!(offset <= self.summary().bytes); + let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>(); + cursor.seek(&offset, Bias::Left, &()); + let overshoot = offset - cursor.start().0; + cursor.start().1 + + cursor.item().map_or(PointUtf16::zero(), |chunk| { + chunk.offset_to_point_utf16(overshoot) + }) } - pub fn to_offset(&self, point: Point) -> usize { + pub fn point_to_offset(&self, point: Point) -> usize { assert!(point <= self.summary().lines); let mut cursor = self.chunks.cursor::<(Point, usize)>(); cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().0; - cursor.start().1 + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot)) + cursor.start().1 + + cursor + .item() + .map_or(0, |chunk| chunk.point_to_offset(overshoot)) + } + + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { + assert!(point <= self.summary().lines_utf16); + let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>(); + cursor.seek(&point, Bias::Left, &()); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor + .item() + .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot)) } pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { @@ -188,6 +215,17 @@ impl Rope { self.summary().lines } } + + pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { + let mut cursor = self.chunks.cursor::(); + cursor.seek(&point, Bias::Right, &()); + if let Some(chunk) = cursor.item() { + let overshoot = point - cursor.start(); + *cursor.start() + chunk.clip_point_utf16(overshoot, bias) + } else { + self.summary().lines_utf16 + } + } } impl<'a> From<&'a str> for Rope { @@ -258,22 +296,24 @@ impl<'a> Cursor<'a> { slice } - pub fn summary(&mut self, end_offset: usize) -> TextSummary { + pub fn summary>(&mut self, end_offset: usize) -> D { debug_assert!(end_offset >= self.offset); - let mut summary = TextSummary::default(); + let mut summary = D::default(); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); - summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]); + summary.add_assign(&D::from_summary(&TextSummary::from( + &start_chunk.0[start_ix..end_ix], + ))); } if end_offset > self.chunks.end(&()) { self.chunks.next(&()); - summary += &self.chunks.summary(&end_offset, Bias::Right, &()); + summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &())); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); - summary += TextSummary::from(&end_chunk.0[..end_ix]); + summary.add_assign(&D::from_summary(&TextSummary::from(&end_chunk.0[..end_ix]))); } } @@ -375,7 +415,7 @@ impl<'a> Iterator for Chunks<'a> { struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>); impl Chunk { - fn to_point(&self, target: usize) -> Point { + fn offset_to_point(&self, target: usize) -> Point { let mut offset = 0; let mut point = Point::new(0, 0); for ch in self.0.chars() { @@ -394,7 +434,26 @@ impl Chunk { point } - fn to_offset(&self, target: Point) -> usize { + fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 { + let mut offset = 0; + let mut point = PointUtf16::new(0, 0); + for ch in self.0.chars() { + if offset >= target { + break; + } + + if ch == '\n' { + point.row += 1; + point.column = 0; + } else { + point.column += ch.len_utf16() as u32; + } + offset += ch.len_utf8(); + } + point + } + + fn point_to_offset(&self, target: Point) -> usize { let mut offset = 0; let mut point = Point::new(0, 0); for ch in self.0.chars() { @@ -416,6 +475,28 @@ impl Chunk { offset } + fn point_utf16_to_offset(&self, target: PointUtf16) -> usize { + let mut offset = 0; + let mut point = PointUtf16::new(0, 0); + for ch in self.0.chars() { + if point >= target { + if point > target { + panic!("point {:?} is inside of character {:?}", target, ch); + } + break; + } + + if ch == '\n' { + point.row += 1; + point.column = 0; + } else { + point.column += ch.len_utf16() as u32; + } + offset += ch.len_utf8(); + } + offset + } + fn clip_point(&self, target: Point, bias: Bias) -> Point { for (row, line) in self.0.split('\n').enumerate() { if row == target.row as usize { @@ -431,6 +512,23 @@ impl Chunk { } unreachable!() } + + fn clip_point_utf16(&self, target: PointUtf16, bias: Bias) -> PointUtf16 { + for (row, line) in self.0.split('\n').enumerate() { + if row == target.row as usize { + let mut code_units = line.encode_utf16(); + let mut column = code_units.by_ref().take(target.column as usize).count(); + if char::decode_utf16(code_units).next().transpose().is_err() { + match bias { + Bias::Left => column -= 1, + Bias::Right => column += 1, + } + } + return PointUtf16::new(row as u32, column as u32); + } + } + unreachable!() + } } impl sum_tree::Item for Chunk { @@ -445,6 +543,7 @@ impl sum_tree::Item for Chunk { pub struct TextSummary { pub bytes: usize, pub lines: Point, + pub lines_utf16: PointUtf16, pub first_line_chars: u32, pub last_line_chars: u32, pub longest_row: u32, @@ -454,17 +553,19 @@ pub struct TextSummary { impl<'a> From<&'a str> for TextSummary { fn from(text: &'a str) -> Self { let mut lines = Point::new(0, 0); + let mut lines_utf16 = PointUtf16::new(0, 0); let mut first_line_chars = 0; let mut last_line_chars = 0; let mut longest_row = 0; let mut longest_row_chars = 0; for c in text.chars() { if c == '\n' { - lines.row += 1; - lines.column = 0; + lines += Point::new(1, 0); + lines_utf16 += PointUtf16::new(1, 0); last_line_chars = 0; } else { lines.column += c.len_utf8() as u32; + lines_utf16.column += c.len_utf16() as u32; last_line_chars += 1; } @@ -481,6 +582,7 @@ impl<'a> From<&'a str> for TextSummary { TextSummary { bytes: text.len(), lines, + lines_utf16, first_line_chars, last_line_chars, longest_row, @@ -520,7 +622,8 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { } self.bytes += other.bytes; - self.lines += &other.lines; + self.lines += other.lines; + self.lines_utf16 += other.lines_utf16; } } @@ -530,15 +633,77 @@ impl std::ops::AddAssign for TextSummary { } } +pub trait TextDimension<'a>: Dimension<'a, TextSummary> { + fn from_summary(summary: &TextSummary) -> Self; + fn add_assign(&mut self, other: &Self); +} + +impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1, D2) { + fn from_summary(summary: &TextSummary) -> Self { + (D1::from_summary(summary), D2::from_summary(summary)) + } + + fn add_assign(&mut self, other: &Self) { + self.0.add_assign(&other.0); + self.1.add_assign(&other.1); + } +} + +impl<'a> TextDimension<'a> for TextSummary { + fn from_summary(summary: &TextSummary) -> Self { + summary.clone() + } + + fn add_assign(&mut self, other: &Self) { + *self += other; + } +} + impl<'a> sum_tree::Dimension<'a, TextSummary> for usize { fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { *self += summary.bytes; } } +impl<'a> TextDimension<'a> for usize { + fn from_summary(summary: &TextSummary) -> Self { + summary.bytes + } + + fn add_assign(&mut self, other: &Self) { + *self += other; + } +} + impl<'a> sum_tree::Dimension<'a, TextSummary> for Point { fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { - *self += &summary.lines; + *self += summary.lines; + } +} + +impl<'a> TextDimension<'a> for Point { + fn from_summary(summary: &TextSummary) -> Self { + summary.lines + } + + fn add_assign(&mut self, other: &Self) { + *self += other; + } +} + +impl<'a> sum_tree::Dimension<'a, TextSummary> for PointUtf16 { + fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { + *self += summary.lines_utf16; + } +} + +impl<'a> TextDimension<'a> for PointUtf16 { + fn from_summary(summary: &TextSummary) -> Self { + summary.lines_utf16 + } + + fn add_assign(&mut self, other: &Self) { + *self += other; } } @@ -577,6 +742,41 @@ mod tests { assert_eq!(rope.text(), text); } + #[test] + fn test_clip() { + let rope = Rope::from("🧘"); + + assert_eq!(rope.clip_offset(1, Bias::Left), 0); + assert_eq!(rope.clip_offset(1, Bias::Right), 4); + assert_eq!(rope.clip_offset(5, Bias::Right), 4); + + assert_eq!( + rope.clip_point(Point::new(0, 1), Bias::Left), + Point::new(0, 0) + ); + assert_eq!( + rope.clip_point(Point::new(0, 1), Bias::Right), + Point::new(0, 4) + ); + assert_eq!( + rope.clip_point(Point::new(0, 5), Bias::Right), + Point::new(0, 4) + ); + + assert_eq!( + rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Left), + PointUtf16::new(0, 0) + ); + assert_eq!( + rope.clip_point_utf16(PointUtf16::new(0, 1), Bias::Right), + PointUtf16::new(0, 2) + ); + assert_eq!( + rope.clip_point_utf16(PointUtf16::new(0, 3), Bias::Right), + PointUtf16::new(0, 2) + ); + } + #[gpui::test(iterations = 100)] fn test_random(mut rng: StdRng) { let operations = env::var("OPERATIONS") @@ -624,14 +824,33 @@ mod tests { } let mut point = Point::new(0, 0); + let mut point_utf16 = PointUtf16::new(0, 0); for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) { - assert_eq!(actual.to_point(ix), point, "to_point({})", ix); - assert_eq!(actual.to_offset(point), ix, "to_offset({:?})", point); + assert_eq!(actual.offset_to_point(ix), point, "offset_to_point({})", ix); + assert_eq!( + actual.offset_to_point_utf16(ix), + point_utf16, + "offset_to_point_utf16({})", + ix + ); + assert_eq!( + actual.point_to_offset(point), + ix, + "point_to_offset({:?})", + point + ); + assert_eq!( + actual.point_utf16_to_offset(point_utf16), + ix, + "point_utf16_to_offset({:?})", + point_utf16 + ); if ch == '\n' { - point.row += 1; - point.column = 0 + point += Point::new(1, 0); + point_utf16 += PointUtf16::new(1, 0); } else { point.column += ch.len_utf8() as u32; + point_utf16.column += ch.len_utf16() as u32; } } @@ -639,7 +858,7 @@ mod tests { let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right); let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left); assert_eq!( - actual.cursor(start_ix).summary(end_ix), + actual.cursor(start_ix).summary::(end_ix), TextSummary::from(&expected[start_ix..end_ix]) ); } diff --git a/crates/buffer/src/tests.rs b/crates/buffer/src/tests.rs index bce08ebf738925a31b19541186669cc0b6ac6f8f..5cbc36a8f51901a11be89c7514410d57eaad6a6c 100644 --- a/crates/buffer/src/tests.rs +++ b/crates/buffer/src/tests.rs @@ -78,7 +78,7 @@ fn test_random_edits(mut rng: StdRng) { for mut old_buffer in buffer_versions { let edits = buffer - .edits_since(old_buffer.version.clone()) + .edits_since::(old_buffer.version.clone()) .collect::>(); log::info!( @@ -88,12 +88,12 @@ fn test_random_edits(mut rng: StdRng) { edits, ); - let mut delta = 0_isize; for edit in edits { - let old_start = (edit.old_bytes.start as isize + delta) as usize; - let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect(); - old_buffer.edit(Some(old_start..old_start + edit.deleted_bytes()), new_text); - delta += edit.delta(); + let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); + old_buffer.edit( + Some(edit.new.start..edit.new.start + edit.old.len()), + new_text, + ); } assert_eq!(old_buffer.text(), buffer.text()); } @@ -123,6 +123,7 @@ fn test_text_summary_for_range() { TextSummary { bytes: 2, lines: Point::new(1, 0), + lines_utf16: PointUtf16::new(1, 0), first_line_chars: 1, last_line_chars: 0, longest_row: 0, @@ -134,6 +135,7 @@ fn test_text_summary_for_range() { TextSummary { bytes: 11, lines: Point::new(3, 0), + lines_utf16: PointUtf16::new(3, 0), first_line_chars: 1, last_line_chars: 0, longest_row: 2, @@ -145,6 +147,7 @@ fn test_text_summary_for_range() { TextSummary { bytes: 20, lines: Point::new(4, 1), + lines_utf16: PointUtf16::new(4, 1), first_line_chars: 2, last_line_chars: 1, longest_row: 3, @@ -156,6 +159,7 @@ fn test_text_summary_for_range() { TextSummary { bytes: 22, lines: Point::new(4, 3), + lines_utf16: PointUtf16::new(4, 3), first_line_chars: 2, last_line_chars: 3, longest_row: 3, @@ -167,6 +171,7 @@ fn test_text_summary_for_range() { TextSummary { bytes: 15, lines: Point::new(2, 3), + lines_utf16: PointUtf16::new(2, 3), first_line_chars: 4, last_line_chars: 3, longest_row: 1, diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index a2e6b150e7f81857860529f9ba19c241260aaf25..5ff2d3db6b35ea22943c44d12faf907624707bfd 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,6 +1,7 @@ use gpui::{AppContext, ModelHandle}; use language::{ - Anchor, AnchorRangeExt, Buffer, HighlightId, HighlightedChunk, Point, TextSummary, ToOffset, + Anchor, AnchorRangeExt, Buffer, HighlightId, HighlightedChunk, Point, PointUtf16, TextSummary, + ToOffset, }; use parking_lot::Mutex; use std::{ @@ -112,9 +113,8 @@ impl<'a> FoldMapWriter<'a> { let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); folds.push(fold); edits.push(buffer::Edit { - old_bytes: range.clone(), - new_bytes: range.clone(), - ..Default::default() + old: range.clone(), + new: range, }); } } @@ -157,9 +157,8 @@ impl<'a> FoldMapWriter<'a> { while let Some(fold) = folds_cursor.item() { let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer); edits.push(buffer::Edit { - old_bytes: offset_range.clone(), - new_bytes: offset_range, - ..Default::default() + old: offset_range.clone(), + new: offset_range, }); fold_ixs_to_delete.push(*folds_cursor.start()); folds_cursor.next(&buffer); @@ -288,7 +287,11 @@ impl FoldMap { } } - fn apply_edits(&self, buffer_edits: Vec, cx: &AppContext) -> Vec { + fn apply_edits( + &self, + buffer_edits: Vec>, + cx: &AppContext, + ) -> Vec { let buffer = self.buffer.read(cx).snapshot(); let mut buffer_edits_iter = buffer_edits.iter().cloned().peekable(); @@ -298,28 +301,28 @@ impl FoldMap { cursor.seek(&0, Bias::Right, &()); while let Some(mut edit) = buffer_edits_iter.next() { - new_transforms.push_tree(cursor.slice(&edit.old_bytes.start, Bias::Left, &()), &()); - edit.new_bytes.start -= edit.old_bytes.start - cursor.start(); - edit.old_bytes.start = *cursor.start(); + new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &()); + edit.new.start -= edit.old.start - cursor.start(); + edit.old.start = *cursor.start(); - cursor.seek(&edit.old_bytes.end, Bias::Right, &()); + cursor.seek(&edit.old.end, Bias::Right, &()); cursor.next(&()); - let mut delta = edit.delta(); + let mut delta = edit.new.len() as isize - edit.old.len() as isize; loop { - edit.old_bytes.end = *cursor.start(); + edit.old.end = *cursor.start(); if let Some(next_edit) = buffer_edits_iter.peek() { - if next_edit.old_bytes.start > edit.old_bytes.end { + if next_edit.old.start > edit.old.end { break; } let next_edit = buffer_edits_iter.next().unwrap(); - delta += next_edit.delta(); + delta += next_edit.new.len() as isize - next_edit.old.len() as isize; - if next_edit.old_bytes.end >= edit.old_bytes.end { - edit.old_bytes.end = next_edit.old_bytes.end; - cursor.seek(&edit.old_bytes.end, Bias::Right, &()); + if next_edit.old.end >= edit.old.end { + edit.old.end = next_edit.old.end; + cursor.seek(&edit.old.end, Bias::Right, &()); cursor.next(&()); } } else { @@ -327,10 +330,9 @@ impl FoldMap { } } - edit.new_bytes.end = - ((edit.new_bytes.start + edit.deleted_bytes()) as isize + delta) as usize; + edit.new.end = ((edit.new.start + edit.old.len()) as isize + delta) as usize; - let anchor = buffer.anchor_before(edit.new_bytes.start); + let anchor = buffer.anchor_before(edit.new.start); let mut folds_cursor = self.folds.cursor::(); folds_cursor.seek(&Fold(anchor..Anchor::max()), Bias::Left, &buffer); @@ -346,10 +348,7 @@ impl FoldMap { }) .peekable(); - while folds - .peek() - .map_or(false, |fold| fold.start < edit.new_bytes.end) - { + while folds.peek().map_or(false, |fold| fold.start < edit.new.end) { let mut fold = folds.next().unwrap(); let sum = new_transforms.summary(); @@ -382,13 +381,15 @@ impl FoldMap { if fold.end > fold.start { let output_text = "…"; let chars = output_text.chars().count() as u32; - let lines = super::Point::new(0, output_text.len() as u32); + let lines = Point::new(0, output_text.len() as u32); + let lines_utf16 = PointUtf16::new(0, output_text.encode_utf16().count() as u32); new_transforms.push( Transform { summary: TransformSummary { output: TextSummary { bytes: output_text.len(), lines, + lines_utf16, first_line_chars: chars, last_line_chars: chars, longest_row: 0, @@ -404,9 +405,8 @@ impl FoldMap { } let sum = new_transforms.summary(); - if sum.input.bytes < edit.new_bytes.end { - let text_summary = - buffer.text_summary_for_range(sum.input.bytes..edit.new_bytes.end); + if sum.input.bytes < edit.new.end { + let text_summary = buffer.text_summary_for_range(sum.input.bytes..edit.new.end); new_transforms.push( Transform { summary: TransformSummary { @@ -443,35 +443,35 @@ impl FoldMap { let mut new_transforms = new_transforms.cursor::<(usize, FoldOffset)>(); for mut edit in buffer_edits { - old_transforms.seek(&edit.old_bytes.start, Bias::Left, &()); + old_transforms.seek(&edit.old.start, Bias::Left, &()); if old_transforms.item().map_or(false, |t| t.is_fold()) { - edit.old_bytes.start = old_transforms.start().0; + edit.old.start = old_transforms.start().0; } let old_start = - old_transforms.start().1 .0 + (edit.old_bytes.start - old_transforms.start().0); + old_transforms.start().1 .0 + (edit.old.start - old_transforms.start().0); - old_transforms.seek_forward(&edit.old_bytes.end, Bias::Right, &()); + old_transforms.seek_forward(&edit.old.end, Bias::Right, &()); if old_transforms.item().map_or(false, |t| t.is_fold()) { old_transforms.next(&()); - edit.old_bytes.end = old_transforms.start().0; + edit.old.end = old_transforms.start().0; } let old_end = - old_transforms.start().1 .0 + (edit.old_bytes.end - old_transforms.start().0); + old_transforms.start().1 .0 + (edit.old.end - old_transforms.start().0); - new_transforms.seek(&edit.new_bytes.start, Bias::Left, &()); + new_transforms.seek(&edit.new.start, Bias::Left, &()); if new_transforms.item().map_or(false, |t| t.is_fold()) { - edit.new_bytes.start = new_transforms.start().0; + edit.new.start = new_transforms.start().0; } let new_start = - new_transforms.start().1 .0 + (edit.new_bytes.start - new_transforms.start().0); + new_transforms.start().1 .0 + (edit.new.start - new_transforms.start().0); - new_transforms.seek_forward(&edit.new_bytes.end, Bias::Right, &()); + new_transforms.seek_forward(&edit.new.end, Bias::Right, &()); if new_transforms.item().map_or(false, |t| t.is_fold()) { new_transforms.next(&()); - edit.new_bytes.end = new_transforms.start().0; + edit.new.end = new_transforms.start().0; } let new_end = - new_transforms.start().1 .0 + (edit.new_bytes.end - new_transforms.start().0); + new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0); fold_edits.push(FoldEdit { old_bytes: FoldOffset(old_start)..FoldOffset(old_end), @@ -748,22 +748,22 @@ where ) } -fn consolidate_buffer_edits(edits: &mut Vec) { +fn consolidate_buffer_edits(edits: &mut Vec>) { edits.sort_unstable_by(|a, b| { - a.old_bytes + a.old .start - .cmp(&b.old_bytes.start) - .then_with(|| b.old_bytes.end.cmp(&a.old_bytes.end)) + .cmp(&b.old.start) + .then_with(|| b.old.end.cmp(&a.old.end)) }); let mut i = 1; while i < edits.len() { let edit = edits[i].clone(); let prev_edit = &mut edits[i - 1]; - if prev_edit.old_bytes.end >= edit.old_bytes.start { - prev_edit.old_bytes.end = prev_edit.old_bytes.end.max(edit.old_bytes.end); - prev_edit.new_bytes.start = prev_edit.new_bytes.start.min(edit.new_bytes.start); - prev_edit.new_bytes.end = prev_edit.new_bytes.end.max(edit.new_bytes.end); + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = prev_edit.old.end.max(edit.old.end); + prev_edit.new.start = prev_edit.new.start.min(edit.new.start); + prev_edit.new.end = prev_edit.new.end.max(edit.new.end); edits.remove(i); continue; } @@ -1343,7 +1343,9 @@ mod tests { let start_version = buffer.version.clone(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count); - buffer.edits_since(start_version).collect::>() + buffer + .edits_since::(start_version) + .collect::>() }); log::info!("editing {:?}", edits); } diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 96ca701fb90083779a60481bf33fc7789037c582..0b48b698a4f6e18f722ecc822319abec95fafe97 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -359,13 +359,19 @@ impl Buffer { ), content_changes: snapshot .buffer_snapshot - .edits_since( + .edits_since::<(PointUtf16, usize)>( prev_snapshot.buffer_snapshot.version().clone(), ) .map(|edit| { - // TODO: Use UTF-16 positions. - let edit_start = edit.new_lines.start; - let edit_end = edit_start + edit.deleted_lines(); + let edit_start = edit.new.start.0; + let edit_end = edit_start + + (edit.old.end.0 - edit.old.start.0); + let new_text = snapshot + .buffer_snapshot + .text_for_range( + edit.new.start.1..edit.new.end.1, + ) + .collect(); lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range::new( lsp::Position::new( @@ -378,10 +384,7 @@ impl Buffer { ), )), range_length: None, - text: snapshot - .buffer_snapshot - .text_for_range(edit.new_bytes) - .collect(), + text: new_text, } }) .collect(), @@ -613,22 +616,17 @@ impl Buffer { } fn interpolate_tree(&self, tree: &mut SyntaxTree) { - let mut delta = 0_isize; - for edit in self.edits_since(tree.version.clone()) { - let start_offset = (edit.old_bytes.start as isize + delta) as usize; - let start_point = self.as_rope().to_point(start_offset); + for edit in self.edits_since::<(usize, Point)>(tree.version.clone()) { + let (bytes, lines) = edit.flatten(); tree.tree.edit(&InputEdit { - start_byte: start_offset, - old_end_byte: start_offset + edit.deleted_bytes(), - new_end_byte: start_offset + edit.inserted_bytes(), - start_position: start_point.to_ts_point(), - old_end_position: (start_point + edit.deleted_lines()).to_ts_point(), - new_end_position: self - .as_rope() - .to_point(start_offset + edit.inserted_bytes()) + start_byte: bytes.new.start, + old_end_byte: bytes.new.start + bytes.old.len(), + new_end_byte: bytes.new.end, + start_position: lines.new.start.to_ts_point(), + old_end_position: (lines.new.start + (lines.old.end - lines.old.start)) .to_ts_point(), + new_end_position: lines.new.end.to_ts_point(), }); - delta += edit.inserted_bytes() as isize - edit.deleted_bytes() as isize; } tree.version = self.version(); } @@ -673,21 +671,22 @@ impl Buffer { diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); self.diagnostics = { - let mut edits_since_save = content.edits_since(self.saved_version.clone()).peekable(); - let mut last_edit_old_end = Point::zero(); - let mut last_edit_new_end = Point::zero(); + let mut edits_since_save = content + .edits_since::(self.saved_version.clone()) + .peekable(); + let mut last_edit_old_end = PointUtf16::zero(); + let mut last_edit_new_end = PointUtf16::zero(); content.anchor_range_multimap( Bias::Left, Bias::Right, diagnostics.into_iter().filter_map(|diagnostic| { - // TODO: Use UTF-16 positions. - let mut start = Point::new( + let mut start = PointUtf16::new( diagnostic.range.start.line, diagnostic.range.start.character, ); let mut end = - Point::new(diagnostic.range.end.line, diagnostic.range.end.character); + PointUtf16::new(diagnostic.range.end.line, diagnostic.range.end.character); let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); if diagnostic @@ -696,11 +695,11 @@ impl Buffer { .map_or(false, |source| disk_based_sources.contains(source)) { while let Some(edit) = edits_since_save.peek() { - if edit.old_lines.end <= start { - last_edit_old_end = edit.old_lines.end; - last_edit_new_end = edit.new_lines.end; + if edit.old.end <= start { + last_edit_old_end = edit.old.end; + last_edit_new_end = edit.new.end; edits_since_save.next(); - } else if edit.old_lines.start <= end && edit.old_lines.end >= start { + } else if edit.old.start <= end && edit.old.end >= start { return None; } else { break; @@ -711,8 +710,8 @@ impl Buffer { end = last_edit_new_end + (end - last_edit_old_end); } - let range = - content.clip_point(start, Bias::Left)..content.clip_point(end, Bias::Right); + let range = content.clip_point_utf16(start, Bias::Left) + ..content.clip_point_utf16(end, Bias::Right); Some((range, (severity, diagnostic.message))) }), ) @@ -1223,7 +1222,7 @@ impl Buffer { was_dirty: bool, cx: &mut ModelContext, ) { - if self.edits_since(old_version).next().is_none() { + if self.edits_since::(old_version).next().is_none() { return; } diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 08c48b7ec35ca951f9cceac1bdbe8e593fcb0f18..263eed76fb9d515e0194835a94bcf9c79c08d909 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,7 +13,7 @@ anyhow = "1.0" async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47", optional = true } futures = "0.3" log = "0.4" -lsp-types = { version = "0.91", features = ["proposed"] } +lsp-types = "0.91" parking_lot = "0.11" postage = { version = "0.4.1", features = ["futures-traits"] } serde = { version = "1.0", features = ["derive"] } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 1d6ed4439460fdb99840ffed825cf9cf966776fa..642595c5adb0255e9a1193b5ad841ebaaa5d8007 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -226,7 +226,6 @@ impl LanguageServer { experimental: Some(json!({ "serverStatusNotification": true, })), - offset_encoding: Some(vec!["utf-8".to_string()]), ..Default::default() }, trace: Default::default(), diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 324cf0eadb5773c6a8597f30b13bedcefddb202f..7799bb2ff004f65c168a56505fdaac5b40492221 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -537,6 +537,10 @@ where self.cursor.start() } + pub fn end(&self, cx: &::Context) -> D { + self.cursor.end(cx) + } + pub fn item(&self) -> Option<&'a T> { self.cursor.item() } From b571eae4f3051974326f5d587b5a9d2fd6869fb8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 29 Oct 2021 19:09:19 +0200 Subject: [PATCH 40/61] Extend empty diagnostic ranges by one character Co-Authored-By: Nathan Sobo --- crates/language/src/lib.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 0b48b698a4f6e18f722ecc822319abec95fafe97..fd7f8d44b112fc112a1a43b35774cb60668aa10d 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -710,8 +710,12 @@ impl Buffer { end = last_edit_new_end + (end - last_edit_old_end); } - let range = content.clip_point_utf16(start, Bias::Left) + let mut range = content.clip_point_utf16(start, Bias::Left) ..content.clip_point_utf16(end, Bias::Right); + if range.start == range.end { + range.end.column += 1; + range.end = content.clip_point_utf16(range.end, Bias::Right); + } Some((range, (severity, diagnostic.message))) }), ) From 3228a553298e5ed11fd4586ca2bce9b74a9d61e3 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sat, 30 Oct 2021 07:40:20 -0600 Subject: [PATCH 41/61] Fix test-support feature propagation for editor and workspace --- crates/zed/Cargo.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 53718d5a69ba7c71428e02498e6242da2852d22e..1a536ef73f5b7db5df85ffb2cad53e32e52ae7a6 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -17,11 +17,13 @@ path = "src/main.rs" test-support = [ "buffer/test-support", "client/test-support", + "editor/test-support", "gpui/test-support", "language/test-support", "project/test-support", "rpc/test-support", "tempdir", + "workspace/test-support", ] [dependencies] From f59be5fecfa252800fd725fad7ad2580170365f0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 1 Nov 2021 10:03:33 +0100 Subject: [PATCH 42/61] Always notify when receiving buffer operations We had changed it to only emit a notification when the buffer was actually edited, but we also want to notify when we receive non-edit operations, such as a selection update. --- crates/language/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index fd7f8d44b112fc112a1a43b35774cb60668aa10d..02744ceb792cd555f8bd6b208d50fa5e5a740b98 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -1299,6 +1299,9 @@ impl Buffer { let old_version = self.version.clone(); self.text.apply_ops(ops)?; self.did_edit(old_version, was_dirty, cx); + // Notify independently of whether the buffer was edited as the operations could include a + // selection update. + cx.notify(); Ok(()) } From 2919cbe9cbb2de632e507da58f23b90105ae80f4 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 1 Nov 2021 10:30:10 +0100 Subject: [PATCH 43/61] Try signing rust-analyzer in order to embed it in the app bundle --- script/bundle | 1 + 1 file changed, 1 insertion(+) diff --git a/script/bundle b/script/bundle index 540c280cee2c1754e149f3639e1978bd29959d45..7b02b063059f7974d84c5acc8f3aa1242c48854f 100755 --- a/script/bundle +++ b/script/bundle @@ -31,6 +31,7 @@ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTAR security import /tmp/zed-certificate.p12 -k zed.keychain -P $MACOS_CERTIFICATE_PASSWORD -T /usr/bin/codesign rm /tmp/zed-certificate.p12 security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_CERTIFICATE_PASSWORD zed.keychain + /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app/Contents/Resources/rust-analyzer -v /usr/bin/codesign --force --deep --timestamp --options runtime --sign "Zed Industries, Inc." target/x86_64-apple-darwin/release/bundle/osx/Zed.app -v security default-keychain -s login.keychain else From 6e5ec2a00d5ab8953c14382e04be0249f2918ed6 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 1 Nov 2021 10:48:20 +0100 Subject: [PATCH 44/61] Take a `&clock::Global` instead of cloning in `edits_since` --- crates/buffer/src/lib.rs | 20 ++++++++++++-------- crates/buffer/src/tests.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 4 ++-- crates/language/src/lib.rs | 16 ++++++++-------- 4 files changed, 23 insertions(+), 19 deletions(-) diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index 301cc1478c649401c761f990e743024a6387100f..fad83e901ae5645717d7a1f8d11a314765e47053 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -317,7 +317,7 @@ struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> { deleted_cursor: rope::Cursor<'a>, fragments_cursor: Option>, undos: &'a UndoMap, - since: clock::Global, + since: &'a clock::Global, old_end: D, new_end: D, } @@ -1365,7 +1365,10 @@ impl Buffer { }) } - pub fn edits_since<'a, D>(&'a self, since: clock::Global) -> impl 'a + Iterator> + pub fn edits_since<'a, D>( + &'a self, + since: &'a clock::Global, + ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a> + Ord, { @@ -1603,7 +1606,10 @@ impl Snapshot { self.content().anchor_at(position, Bias::Right) } - pub fn edits_since<'a, D>(&'a self, since: clock::Global) -> impl 'a + Iterator> + pub fn edits_since<'a, D>( + &'a self, + since: &'a clock::Global, + ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a> + Ord, { @@ -1935,17 +1941,15 @@ impl<'a> Content<'a> { } } - // TODO: take a reference to clock::Global. - pub fn edits_since(&self, since: clock::Global) -> impl 'a + Iterator> + pub fn edits_since(&self, since: &'a clock::Global) -> impl 'a + Iterator> where D: 'a + TextDimension<'a> + Ord, { - let since_2 = since.clone(); - let fragments_cursor = if since == *self.version { + let fragments_cursor = if since == self.version { None } else { Some(self.fragments.filter( - move |summary| summary.max_version.changed_since(&since_2), + move |summary| summary.max_version.changed_since(since), &None, )) }; diff --git a/crates/buffer/src/tests.rs b/crates/buffer/src/tests.rs index 5cbc36a8f51901a11be89c7514410d57eaad6a6c..68d6e6aa355a735f040ee3ce47a9c6190f29af21 100644 --- a/crates/buffer/src/tests.rs +++ b/crates/buffer/src/tests.rs @@ -78,7 +78,7 @@ fn test_random_edits(mut rng: StdRng) { for mut old_buffer in buffer_versions { let edits = buffer - .edits_since::(old_buffer.version.clone()) + .edits_since::(&old_buffer.version) .collect::>(); log::info!( diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 5ff2d3db6b35ea22943c44d12faf907624707bfd..8a5b4c55846e638f8326269d17f291723f5bb162 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -261,7 +261,7 @@ impl FoldMap { }, ); let edits = buffer - .edits_since(last_sync.version) + .edits_since(&last_sync.version) .map(Into::into) .collect::>(); if edits.is_empty() { @@ -1344,7 +1344,7 @@ mod tests { let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count); buffer - .edits_since::(start_version) + .edits_since::(&start_version) .collect::>() }); log::info!("editing {:?}", edits); diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 02744ceb792cd555f8bd6b208d50fa5e5a740b98..2bbea109671ecb3d50581dd8f5b770c8a86534b1 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -360,7 +360,7 @@ impl Buffer { content_changes: snapshot .buffer_snapshot .edits_since::<(PointUtf16, usize)>( - prev_snapshot.buffer_snapshot.version().clone(), + prev_snapshot.buffer_snapshot.version(), ) .map(|edit| { let edit_start = edit.new.start.0; @@ -616,7 +616,7 @@ impl Buffer { } fn interpolate_tree(&self, tree: &mut SyntaxTree) { - for edit in self.edits_since::<(usize, Point)>(tree.version.clone()) { + for edit in self.edits_since::<(usize, Point)>(&tree.version) { let (bytes, lines) = edit.flatten(); tree.tree.edit(&InputEdit { start_byte: bytes.new.start, @@ -672,7 +672,7 @@ impl Buffer { diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); self.diagnostics = { let mut edits_since_save = content - .edits_since::(self.saved_version.clone()) + .edits_since::(&self.saved_version) .peekable(); let mut last_edit_old_end = PointUtf16::zero(); let mut last_edit_new_end = PointUtf16::zero(); @@ -1081,7 +1081,7 @@ impl Buffer { ) -> Result<()> { if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) { let was_dirty = start_version != self.saved_version; - self.did_edit(start_version, was_dirty, cx); + self.did_edit(&start_version, was_dirty, cx); } Ok(()) } @@ -1222,7 +1222,7 @@ impl Buffer { fn did_edit( &mut self, - old_version: clock::Global, + old_version: &clock::Global, was_dirty: bool, cx: &mut ModelContext, ) { @@ -1298,7 +1298,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); self.text.apply_ops(ops)?; - self.did_edit(old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. cx.notify(); @@ -1330,7 +1330,7 @@ impl Buffer { self.send_operation(operation, cx); } - self.did_edit(old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, cx); } pub fn redo(&mut self, cx: &mut ModelContext) { @@ -1341,7 +1341,7 @@ impl Buffer { self.send_operation(operation, cx); } - self.did_edit(old_version, was_dirty, cx); + self.did_edit(&old_version, was_dirty, cx); } } From b8994c2a897a4ff4f7f7e946fc9f550bba04adb8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 11:56:49 -0700 Subject: [PATCH 45/61] Add a facility for delaying quit until critical tasks finish Co-Authored-By: Antonio Scandurra --- Cargo.lock | 1 + crates/gpui/Cargo.toml | 1 + crates/gpui/src/app.rs | 12 +++++++ crates/gpui/src/executor.rs | 41 +++++++++++++++++++++--- crates/gpui/src/platform.rs | 1 + crates/gpui/src/platform/mac/platform.rs | 16 +++++++++ crates/gpui/src/platform/test.rs | 2 ++ 7 files changed, 70 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index accd0a093900ee80ef30d0cebf80b1b008b36b06..0e01793175a4ce5a3442c5b7471c09f0af8cbfde 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2309,6 +2309,7 @@ dependencies = [ "etagere", "font-kit", "foreign-types", + "futures", "gpui_macros", "image 0.23.14", "lazy_static", diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 5378e05b4cfe48579bb91a8c48338aa904d5b9c5..dc1edc654741f69e26d7a69e4957c5dcdd6a874f 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -15,6 +15,7 @@ backtrace = "0.3" ctor = "0.1" env_logger = { version = "0.8", optional = true } etagere = "0.2" +futures = "0.3" image = "0.23" lazy_static = "1.4.0" log = "0.4" diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 0cf10697387a905c8adc91cfc3b03038b4e72fd5..ed2759aa0ccb88939cfc63ea3f7e4a6a960be289 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -265,6 +265,18 @@ impl App { self } + pub fn on_quit(self, mut callback: F) -> Self + where + F: 'static + FnMut(&mut MutableAppContext), + { + let cx = self.0.clone(); + self.0 + .borrow_mut() + .foreground_platform + .on_quit(Box::new(move || callback(&mut *cx.borrow_mut()))); + self + } + pub fn on_event(self, mut callback: F) -> Self where F: 'static + FnMut(Event, &mut MutableAppContext) -> bool, diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 32ee8fc87ffc15fe2d6617ef774a0d740ea26e40..5a2f244aa197fcec0f8eec756c2d9458cb0252f0 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -38,9 +38,13 @@ pub enum Foreground { } pub enum Background { - Deterministic(Arc), + Deterministic { + executor: Arc, + critical_tasks: Mutex>>, + }, Production { executor: Arc>, + critical_tasks: Mutex>>, _stop: channel::Sender<()>, }, } @@ -500,6 +504,7 @@ impl Background { Self::Production { executor, + critical_tasks: Default::default(), _stop: stop.0, } } @@ -516,11 +521,36 @@ impl Background { let future = any_future(future); let any_task = match self { Self::Production { executor, .. } => executor.spawn(future), - Self::Deterministic(executor) => executor.spawn(future), + Self::Deterministic { executor, .. } => executor.spawn(future), }; Task::send(any_task) } + pub fn spawn_critical(&self, future: F) + where + T: 'static + Send, + F: Send + Future + 'static, + { + let task = self.spawn(async move { + future.await; + }); + match self { + Self::Production { critical_tasks, .. } + | Self::Deterministic { critical_tasks, .. } => critical_tasks.lock().push(task), + } + } + + pub fn block_on_critical_tasks(&self, timeout: Duration) -> bool { + match self { + Background::Production { critical_tasks, .. } + | Self::Deterministic { critical_tasks, .. } => { + let tasks = mem::take(&mut *critical_tasks.lock()); + self.block_with_timeout(timeout, futures::future::join_all(tasks)) + .is_ok() + } + } + } + pub fn block_with_timeout( &self, timeout: Duration, @@ -534,7 +564,7 @@ impl Background { if !timeout.is_zero() { let output = match self { Self::Production { .. } => smol::block_on(util::timeout(timeout, &mut future)).ok(), - Self::Deterministic(executor) => executor.block_on(&mut future), + Self::Deterministic { executor, .. } => executor.block_on(&mut future), }; if let Some(output) = output { return Ok(*output.downcast().unwrap()); @@ -587,7 +617,10 @@ pub fn deterministic(seed: u64) -> (Rc, Arc) { let executor = Arc::new(Deterministic::new(seed)); ( Rc::new(Foreground::Deterministic(executor.clone())), - Arc::new(Background::Deterministic(executor)), + Arc::new(Background::Deterministic { + executor, + critical_tasks: Default::default(), + }), ) } diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 6f776524c261eb0891d5093fb00edd7748bc3850..c0229102a00fe7f8588f2d2219aff5472438b6d6 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -60,6 +60,7 @@ pub trait Platform: Send + Sync { pub(crate) trait ForegroundPlatform { fn on_become_active(&self, callback: Box); fn on_resign_active(&self, callback: Box); + fn on_quit(&self, callback: Box); fn on_event(&self, callback: Box bool>); fn on_open_files(&self, callback: Box)>); fn run(&self, on_finish_launching: Box ()>); diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 8a4dc8cdf9184a222f9673ffa0c3aee993c184af..9aec0b5c04ff88888388c23605ac6e9132138843 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -81,6 +81,10 @@ unsafe fn build_classes() { sel!(applicationDidResignActive:), did_resign_active as extern "C" fn(&mut Object, Sel, id), ); + decl.add_method( + sel!(applicationWillTerminate:), + will_terminate as extern "C" fn(&mut Object, Sel, id), + ); decl.add_method( sel!(handleGPUIMenuItem:), handle_menu_item as extern "C" fn(&mut Object, Sel, id), @@ -100,6 +104,7 @@ pub struct MacForegroundPlatform(RefCell); pub struct MacForegroundPlatformState { become_active: Option>, resign_active: Option>, + quit: Option>, event: Option bool>>, menu_command: Option>, open_files: Option)>>, @@ -196,6 +201,10 @@ impl platform::ForegroundPlatform for MacForegroundPlatform { self.0.borrow_mut().resign_active = Some(callback); } + fn on_quit(&self, callback: Box) { + self.0.borrow_mut().quit = Some(callback); + } + fn on_event(&self, callback: Box bool>) { self.0.borrow_mut().event = Some(callback); } @@ -664,6 +673,13 @@ extern "C" fn did_resign_active(this: &mut Object, _: Sel, _: id) { } } +extern "C" fn will_terminate(this: &mut Object, _: Sel, _: id) { + let platform = unsafe { get_foreground_platform(this) }; + if let Some(callback) = platform.0.borrow_mut().quit.as_mut() { + callback(); + } +} + extern "C" fn open_files(this: &mut Object, _: Sel, _: id, paths: id) { let paths = unsafe { (0..paths.count()) diff --git a/crates/gpui/src/platform/test.rs b/crates/gpui/src/platform/test.rs index c866a5d23fc68d6ba3e61247256f56381e841254..eda430bc5163c72b20cdcc48ff9fd08736d95cf4 100644 --- a/crates/gpui/src/platform/test.rs +++ b/crates/gpui/src/platform/test.rs @@ -58,6 +58,8 @@ impl super::ForegroundPlatform for ForegroundPlatform { fn on_resign_active(&self, _: Box) {} + fn on_quit(&self, _: Box) {} + fn on_event(&self, _: Box bool>) {} fn on_open_files(&self, _: Box)>) {} From 1aee7bdb1dae097c0ce08773aa02cd017bf76aa9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 11:57:21 -0700 Subject: [PATCH 46/61] Delay quit until language servers are gracefully shut down Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/language/src/language.rs | 2 +- crates/language/src/tests.rs | 2 +- crates/lsp/src/lib.rs | 119 +++++++++++++++++++++++--------- crates/project/src/worktree.rs | 2 +- crates/zed/src/main.rs | 12 +++- 5 files changed, 98 insertions(+), 39 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 8517f3c2a0adec661e18ad50d2efd612e0d31d96..7b42f5dcbc041dadf3e5907f87c406bc177a690d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -124,7 +124,7 @@ impl Language { } else { Path::new(&config.binary).to_path_buf() }; - lsp::LanguageServer::new(&binary_path, root_path, cx.background()).map(Some) + lsp::LanguageServer::new(&binary_path, root_path, cx.background().clone()).map(Some) } else { Ok(None) } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index a6af0cd015688fa2bc060a5026c3534675278f2f..58d5965b3c5341ddc5e41281125c5f20e0be1361 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -409,7 +409,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte #[gpui::test] async fn test_diagnostics(mut cx: gpui::TestAppContext) { - let (language_server, mut fake) = lsp::LanguageServer::fake(&cx.background()).await; + let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await; let text = " fn a() { A } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 642595c5adb0255e9a1193b5ad841ebaaa5d8007..38562e581b55baa0b711592d0d96b30cd4da4423 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -36,9 +36,10 @@ pub struct LanguageServer { outbound_tx: channel::Sender>, notification_handlers: Arc>>, response_handlers: Arc>>, - _input_task: Task>, - _output_task: Task>, + executor: Arc, + io_tasks: Option<(Task>, Task>)>, initialized: barrier::Receiver, + output_done_rx: Option, } pub struct Subscription { @@ -89,7 +90,7 @@ impl LanguageServer { pub fn new( binary_path: &Path, root_path: &Path, - background: &executor::Background, + background: Arc, ) -> Result> { let mut server = Command::new(binary_path) .stdin(Stdio::piped()) @@ -105,7 +106,7 @@ impl LanguageServer { stdin: Stdin, stdout: Stdout, root_path: &Path, - background: &executor::Background, + executor: Arc, ) -> Result> where Stdin: AsyncWrite + Unpin + Send + 'static, @@ -116,7 +117,7 @@ impl LanguageServer { let (outbound_tx, outbound_rx) = channel::unbounded::>(); let notification_handlers = Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::new())); let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::new())); - let _input_task = background.spawn( + let input_task = executor.spawn( { let notification_handlers = notification_handlers.clone(); let response_handlers = response_handlers.clone(); @@ -171,13 +172,12 @@ impl LanguageServer { } .log_err(), ); - let _output_task = background.spawn( + let (output_done_tx, output_done_rx) = barrier::channel(); + let output_task = executor.spawn( async move { let mut content_len_buffer = Vec::new(); - loop { + while let Ok(message) = outbound_rx.recv().await { content_len_buffer.clear(); - - let message = outbound_rx.recv().await?; write!(content_len_buffer, "{}", message.len()).unwrap(); stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?; stdin.write_all(&content_len_buffer).await?; @@ -185,6 +185,8 @@ impl LanguageServer { stdin.write_all(&message).await?; stdin.flush().await?; } + drop(output_done_tx); + Ok(()) } .log_err(), ); @@ -195,14 +197,15 @@ impl LanguageServer { response_handlers, next_id: Default::default(), outbound_tx, - _input_task, - _output_task, + executor: executor.clone(), + io_tasks: Some((input_task, output_task)), initialized: initialized_rx, + output_done_rx: Some(output_done_rx), }); let root_uri = lsp_types::Url::from_file_path(root_path).map_err(|_| anyhow!("invalid root path"))?; - background + executor .spawn({ let this = this.clone(); async move { @@ -234,12 +237,18 @@ impl LanguageServer { locale: Default::default(), }; - self.request_internal::(params) - .await?; - self.notify_internal::( - lsp_types::InitializedParams {}, + let this = self.clone(); + Self::request_internal::( + &this.next_id, + &this.response_handlers, + &this.outbound_tx, + params, ) .await?; + Self::notify_internal::( + &this.outbound_tx, + lsp_types::InitializedParams {}, + )?; Ok(()) } @@ -279,18 +288,26 @@ impl LanguageServer { let this = self.clone(); async move { this.initialized.clone().recv().await; - this.request_internal::(params).await + Self::request_internal::( + &this.next_id, + &this.response_handlers, + &this.outbound_tx, + params, + ) + .await } } fn request_internal( - self: &Arc, + next_id: &AtomicUsize, + response_handlers: &Mutex>, + outbound_tx: &channel::Sender>, params: T::Params, ) -> impl Future> where T::Result: 'static + Send, { - let id = self.next_id.fetch_add(1, SeqCst); + let id = next_id.fetch_add(1, SeqCst); let message = serde_json::to_vec(&Request { jsonrpc: JSON_RPC_VERSION, id, @@ -298,7 +315,7 @@ impl LanguageServer { params, }) .unwrap(); - let mut response_handlers = self.response_handlers.lock(); + let mut response_handlers = response_handlers.lock(); let (mut tx, mut rx) = oneshot::channel(); response_handlers.insert( id, @@ -313,9 +330,9 @@ impl LanguageServer { }), ); - let this = self.clone(); + let send = outbound_tx.try_send(message); async move { - this.outbound_tx.send(message).await?; + send?; rx.recv().await.unwrap() } } @@ -327,26 +344,50 @@ impl LanguageServer { let this = self.clone(); async move { this.initialized.clone().recv().await; - this.notify_internal::(params).await + Self::notify_internal::(&this.outbound_tx, params)?; + Ok(()) } } fn notify_internal( - self: &Arc, + outbound_tx: &channel::Sender>, params: T::Params, - ) -> impl Future> { + ) -> Result<()> { let message = serde_json::to_vec(&Notification { jsonrpc: JSON_RPC_VERSION, method: T::METHOD, params, }) .unwrap(); + outbound_tx.try_send(message)?; + Ok(()) + } +} - let this = self.clone(); - async move { - this.outbound_tx.send(message).await?; - Ok(()) - } +impl Drop for LanguageServer { + fn drop(&mut self) { + let tasks = self.io_tasks.take(); + let response_handlers = self.response_handlers.clone(); + let outbound_tx = self.outbound_tx.clone(); + let next_id = AtomicUsize::new(self.next_id.load(SeqCst)); + let mut output_done = self.output_done_rx.take().unwrap(); + self.executor.spawn_critical( + async move { + Self::request_internal::( + &next_id, + &response_handlers, + &outbound_tx, + (), + ) + .await?; + Self::notify_internal::(&outbound_tx, ())?; + drop(outbound_tx); + output_done.recv().await; + drop(tasks); + Ok(()) + } + .log_err(), + ) } } @@ -377,7 +418,7 @@ pub struct RequestId { #[cfg(any(test, feature = "test-support"))] impl LanguageServer { - pub async fn fake(executor: &executor::Background) -> (Arc, FakeLanguageServer) { + pub async fn fake(executor: Arc) -> (Arc, FakeLanguageServer) { let stdin = async_pipe::pipe(); let stdout = async_pipe::pipe(); let mut fake = FakeLanguageServer { @@ -512,8 +553,12 @@ mod tests { lsp_types::Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(); let server = cx.read(|cx| { - LanguageServer::new(Path::new("rust-analyzer"), root_dir.path(), cx.background()) - .unwrap() + LanguageServer::new( + Path::new("rust-analyzer"), + root_dir.path(), + cx.background().clone(), + ) + .unwrap() }); server.next_idle_notification().await; @@ -555,7 +600,7 @@ mod tests { async fn test_fake(cx: TestAppContext) { SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap(); - let (server, mut fake) = LanguageServer::fake(&cx.background()).await; + let (server, mut fake) = LanguageServer::fake(cx.background()).await; let (message_tx, message_rx) = channel::unbounded(); let (diagnostics_tx, diagnostics_rx) = channel::unbounded(); @@ -606,6 +651,12 @@ mod tests { diagnostics_rx.recv().await.unwrap().uri.as_str(), "file://b/c" ); + + drop(server); + let (shutdown_request, _) = fake.receive_request::().await; + fake.respond(shutdown_request, ()).await; + fake.receive_notification::() + .await; } impl LanguageServer { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index a4eb69db8ff5c43283d18809579d2d281cf472f3..5dbcd2ec8553aa665f02230dff4a14a046172af3 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3508,7 +3508,7 @@ mod tests { #[gpui::test] async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { - let (language_server, mut fake_lsp) = LanguageServer::fake(&cx.background()).await; + let (language_server, mut fake_lsp) = LanguageServer::fake(cx.background()).await; let dir = temp_tree(json!({ "a.rs": "fn a() { A }", "b.rs": "const y: i32 = 1", diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d2357ea3c37ea2a0476957918eee27f001888972..2107a3660fcffe716eca3089d8b9cc42b6ec7150 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -7,7 +7,7 @@ use gpui::AssetSource; use log::LevelFilter; use parking_lot::Mutex; use simplelog::SimpleLogger; -use std::{fs, path::PathBuf, sync::Arc}; +use std::{fs, path::PathBuf, sync::Arc, time::Duration}; use theme::ThemeRegistry; use workspace::{self, settings, OpenNew}; use zed::{self, assets::Assets, fs::RealFs, language, menus, AppState, OpenParams, OpenPaths}; @@ -29,7 +29,15 @@ fn main() { let languages = Arc::new(language::build_language_registry()); languages.set_theme(&settings.borrow().theme.editor.syntax); - app.run(move |cx| { + app.on_quit(|cx| { + let did_finish = cx + .background() + .block_on_critical_tasks(Duration::from_millis(100)); + if !did_finish { + log::error!("timed out on quit before critical tasks finished"); + } + }) + .run(move |cx| { let client = client::Client::new(); let http = http::client(); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx)); From 78d97a3db24d72e7b47a260b3099b98001bd6769 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 12:59:01 -0700 Subject: [PATCH 47/61] Use Diagnostic struct in buffer's diagnostics multimap --- crates/language/src/lib.rs | 31 +++++++-------- crates/language/src/tests.rs | 72 ++++++++++++++++++++-------------- crates/project/src/worktree.rs | 12 +++--- 3 files changed, 64 insertions(+), 51 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 2bbea109671ecb3d50581dd8f5b770c8a86534b1..911f8ea1fd81f740962f0a0b4704145881006b68 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -62,7 +62,7 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - diagnostics: AnchorRangeMultimap<(DiagnosticSeverity, String)>, + diagnostics: AnchorRangeMultimap, diagnostics_update_count: usize, language_server: Option, #[cfg(test)] @@ -72,15 +72,14 @@ pub struct Buffer { pub struct Snapshot { text: buffer::Snapshot, tree: Option, - diagnostics: AnchorRangeMultimap<(DiagnosticSeverity, String)>, + diagnostics: AnchorRangeMultimap, is_parsing: bool, language: Option>, query_cursor: QueryCursorHandle, } -#[derive(Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Diagnostic { - pub range: Range, pub severity: DiagnosticSeverity, pub message: String, } @@ -687,8 +686,6 @@ impl Buffer { ); let mut end = PointUtf16::new(diagnostic.range.end.line, diagnostic.range.end.character); - let severity = diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR); - if diagnostic .source .as_ref() @@ -716,7 +713,13 @@ impl Buffer { range.end.column += 1; range.end = content.clip_point_utf16(range.end, Bias::Right); } - Some((range, (severity, diagnostic.message))) + Some(( + range, + Diagnostic { + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: diagnostic.message, + }, + )) }), ) }; @@ -741,15 +744,11 @@ impl Buffer { pub fn diagnostics_in_range<'a, T: 'a + ToOffset>( &'a self, range: Range, - ) -> impl Iterator + 'a { + ) -> impl Iterator, &Diagnostic)> + 'a { let content = self.content(); self.diagnostics .intersecting_ranges(range, content, true) - .map(move |(_, range, (severity, message))| Diagnostic { - range, - severity: *severity, - message: message.clone(), - }) + .map(move |(_, range, diagnostic)| (range, diagnostic)) } pub fn diagnostics_update_count(&self) -> usize { @@ -1544,19 +1543,19 @@ impl Snapshot { let range = range.start.to_offset(&*self)..range.end.to_offset(&*self); let mut diagnostic_endpoints = Vec::::new(); - for (_, range, (severity, _)) in + for (_, range, diagnostic) in self.diagnostics .intersecting_ranges(range.clone(), self.content(), true) { diagnostic_endpoints.push(DiagnosticEndpoint { offset: range.start, is_start: true, - severity: *severity, + severity: diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { offset: range.end, is_start: false, - severity: *severity, + severity: diagnostic.severity, }); } diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start)); diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 58d5965b3c5341ddc5e41281125c5f20e0be1361..5ab67362995437dc0e9c6e53c2ec97e25d7231de 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -468,16 +468,20 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ - Diagnostic { - range: Point::new(3, 9)..Point::new(3, 11), - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'BB'".to_string() - }, - Diagnostic { - range: Point::new(4, 9)..Point::new(4, 12), - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'CCC'".to_string() - } + ( + Point::new(3, 9)..Point::new(3, 11), + &Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string() + }, + ), + ( + Point::new(4, 9)..Point::new(4, 12), + &Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'CCC'".to_string() + } + ) ] ); assert_eq!( @@ -527,16 +531,20 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ - Diagnostic { - range: Point::new(2, 9)..Point::new(2, 12), - severity: DiagnosticSeverity::WARNING, - message: "unreachable statement".to_string() - }, - Diagnostic { - range: Point::new(2, 9)..Point::new(2, 10), - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string() - }, + ( + Point::new(2, 9)..Point::new(2, 12), + &Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "unreachable statement".to_string() + } + ), + ( + Point::new(2, 9)..Point::new(2, 10), + &Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + }, + ) ] ); assert_eq!( @@ -598,16 +606,20 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range(0..buffer.len()) .collect::>(), &[ - Diagnostic { - range: Point::new(2, 21)..Point::new(2, 22), - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string() - }, - Diagnostic { - range: Point::new(3, 9)..Point::new(3, 11), - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'BB'".to_string() - }, + ( + Point::new(2, 21)..Point::new(2, 22), + &Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + } + ), + ( + Point::new(3, 9)..Point::new(3, 11), + &Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string() + }, + ) ] ); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5dbcd2ec8553aa665f02230dff4a14a046172af3..7c9273a8cf9f26a9780524f966fc9f429815a061 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3551,11 +3551,13 @@ mod tests { .collect::>(); assert_eq!( diagnostics, - &[Diagnostic { - range: Point::new(0, 9)..Point::new(0, 10), - severity: lsp::DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string() - }] + &[( + Point::new(0, 9)..Point::new(0, 10), + &Diagnostic { + severity: lsp::DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string() + } + )] ) }); } From 40c861c249dd4290a73a647f03d8902c64a890c2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 14:05:19 -0700 Subject: [PATCH 48/61] Move protobuf logic from buffer crate to language crate This will enable us to add operations that only pertain to the language crate. Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 - crates/buffer/Cargo.toml | 1 - crates/buffer/src/lib.rs | 299 +++------------------------------ crates/buffer/src/selection.rs | 54 +----- crates/language/src/lib.rs | 38 ++++- crates/language/src/proto.rs | 261 ++++++++++++++++++++++++++++ crates/project/src/worktree.rs | 6 +- 7 files changed, 322 insertions(+), 338 deletions(-) create mode 100644 crates/language/src/proto.rs diff --git a/Cargo.lock b/Cargo.lock index 0e01793175a4ce5a3442c5b7471c09f0af8cbfde..b0a7d6988c03a12cb637cd843338051343fe4ac8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -761,7 +761,6 @@ dependencies = [ "gpui", "log", "rand 0.8.3", - "rpc", "seahash", "smallvec", "sum_tree", diff --git a/crates/buffer/Cargo.toml b/crates/buffer/Cargo.toml index f6d949c05f47fbc0305d1ab8790f311b06f253ca..3d2cc8eec0eb8377a97b8201d25a26957b7dc77f 100644 --- a/crates/buffer/Cargo.toml +++ b/crates/buffer/Cargo.toml @@ -8,7 +8,6 @@ test-support = ["rand", "seahash"] [dependencies] clock = { path = "../clock" } -rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } anyhow = "1.0.38" arrayvec = "0.7.1" diff --git a/crates/buffer/src/lib.rs b/crates/buffer/src/lib.rs index fad83e901ae5645717d7a1f8d11a314765e47053..bf97574d34a33116530ba85917463499579a78f2 100644 --- a/crates/buffer/src/lib.rs +++ b/crates/buffer/src/lib.rs @@ -19,11 +19,9 @@ pub use point_utf16::*; pub use random_char_iter::*; use rope::TextDimension; pub use rope::{Chunks, Rope, TextSummary}; -use rpc::proto; pub use selection::*; use std::{ cmp::{self, Reverse}, - convert::TryFrom, iter::Iterator, ops::{self, Range}, str, @@ -35,7 +33,7 @@ use sum_tree::{FilterCursor, SumTree}; #[cfg(any(test, feature = "test-support"))] #[derive(Clone, Default)] -struct DeterministicState; +pub struct DeterministicState; #[cfg(any(test, feature = "test-support"))] impl std::hash::BuildHasher for DeterministicState { @@ -344,10 +342,10 @@ impl Edit<(D1, D2)> { } #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] -struct InsertionTimestamp { - replica_id: ReplicaId, - local: clock::Seq, - lamport: clock::Seq, +pub struct InsertionTimestamp { + pub replica_id: ReplicaId, + pub local: clock::Seq, + pub lamport: clock::Seq, } impl InsertionTimestamp { @@ -422,18 +420,18 @@ pub enum Operation { #[derive(Clone, Debug, Eq, PartialEq)] pub struct EditOperation { - timestamp: InsertionTimestamp, - version: clock::Global, - ranges: Vec>, - new_text: Option, + pub timestamp: InsertionTimestamp, + pub version: clock::Global, + pub ranges: Vec>, + pub new_text: Option, } #[derive(Clone, Debug, Eq, PartialEq)] pub struct UndoOperation { - id: clock::Local, - counts: HashMap, - ranges: Vec>, - version: clock::Global, + pub id: clock::Local, + pub counts: HashMap, + pub ranges: Vec>, + pub version: clock::Global, } impl Buffer { @@ -472,34 +470,6 @@ impl Buffer { } } - pub fn from_proto(replica_id: u16, message: proto::Buffer) -> Result { - let mut buffer = Buffer::new(replica_id, message.id, History::new(message.content.into())); - let ops = message - .history - .into_iter() - .map(|op| Operation::Edit(op.into())); - buffer.apply_ops(ops)?; - buffer.selections = message - .selections - .into_iter() - .map(|set| { - let set = SelectionSet::try_from(set)?; - Result::<_, anyhow::Error>::Ok((set.id, set)) - }) - .collect::>()?; - Ok(buffer) - } - - pub fn to_proto(&self) -> proto::Buffer { - let ops = self.history.ops.values().map(Into::into).collect(); - proto::Buffer { - id: self.remote_id, - content: self.history.base_text.to_string(), - history: ops, - selections: self.selections.iter().map(|(_, set)| set.into()).collect(), - } - } - pub fn version(&self) -> clock::Global { self.version.clone() } @@ -1203,6 +1173,14 @@ impl Buffer { .retain(|set_id, _| set_id.replica_id != replica_id) } + pub fn base_text(&self) -> &Arc { + &self.history.base_text + } + + pub fn history(&self) -> impl Iterator { + self.history.ops.values() + } + pub fn undo(&mut self) -> Vec { let mut ops = Vec::new(); if let Some(transaction) = self.history.pop_undo().cloned() { @@ -1331,6 +1309,10 @@ impl Buffer { } } + pub fn add_raw_selection_set(&mut self, id: SelectionSetId, selections: SelectionSet) { + self.selections.insert(id, selections); + } + pub fn set_active_selection_set( &mut self, set_id: Option, @@ -2157,18 +2139,10 @@ impl Default for FragmentSummary { } #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct FullOffset(usize); +pub struct FullOffset(pub usize); impl FullOffset { const MAX: Self = FullOffset(usize::MAX); - - fn to_proto(self) -> u64 { - self.0 as u64 - } - - fn from_proto(value: u64) -> Self { - Self(value as usize) - } } impl ops::AddAssign for FullOffset { @@ -2298,227 +2272,6 @@ impl Operation { } } -impl<'a> Into for &'a Operation { - fn into(self) -> proto::Operation { - proto::Operation { - variant: Some(match self { - Operation::Edit(edit) => proto::operation::Variant::Edit(edit.into()), - Operation::Undo { - undo, - lamport_timestamp, - } => proto::operation::Variant::Undo(proto::operation::Undo { - replica_id: undo.id.replica_id as u32, - local_timestamp: undo.id.value, - lamport_timestamp: lamport_timestamp.value, - ranges: undo - .ranges - .iter() - .map(|r| proto::Range { - start: r.start.to_proto(), - end: r.end.to_proto(), - }) - .collect(), - counts: undo - .counts - .iter() - .map(|(edit_id, count)| proto::operation::UndoCount { - replica_id: edit_id.replica_id as u32, - local_timestamp: edit_id.value, - count: *count, - }) - .collect(), - version: From::from(&undo.version), - }), - Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp, - } => proto::operation::Variant::UpdateSelections( - proto::operation::UpdateSelections { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, - lamport_timestamp: lamport_timestamp.value, - version: selections.version().into(), - selections: selections - .raw_entries() - .iter() - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0.to_proto(), - end: range.end.0.to_proto(), - reversed: state.reversed, - }) - .collect(), - }, - ), - Operation::RemoveSelections { - set_id, - lamport_timestamp, - } => proto::operation::Variant::RemoveSelections( - proto::operation::RemoveSelections { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, - lamport_timestamp: lamport_timestamp.value, - }, - ), - Operation::SetActiveSelections { - set_id, - lamport_timestamp, - } => proto::operation::Variant::SetActiveSelections( - proto::operation::SetActiveSelections { - replica_id: lamport_timestamp.replica_id as u32, - local_timestamp: set_id.map(|set_id| set_id.value), - lamport_timestamp: lamport_timestamp.value, - }, - ), - #[cfg(test)] - Operation::Test(_) => unimplemented!(), - }), - } - } -} - -impl<'a> Into for &'a EditOperation { - fn into(self) -> proto::operation::Edit { - let ranges = self - .ranges - .iter() - .map(|range| proto::Range { - start: range.start.to_proto(), - end: range.end.to_proto(), - }) - .collect(); - proto::operation::Edit { - replica_id: self.timestamp.replica_id as u32, - local_timestamp: self.timestamp.local, - lamport_timestamp: self.timestamp.lamport, - version: From::from(&self.version), - ranges, - new_text: self.new_text.clone(), - } - } -} - -impl TryFrom for Operation { - type Error = anyhow::Error; - - fn try_from(message: proto::Operation) -> Result { - Ok( - match message - .variant - .ok_or_else(|| anyhow!("missing operation variant"))? - { - proto::operation::Variant::Edit(edit) => Operation::Edit(edit.into()), - proto::operation::Variant::Undo(undo) => Operation::Undo { - lamport_timestamp: clock::Lamport { - replica_id: undo.replica_id as ReplicaId, - value: undo.lamport_timestamp, - }, - undo: UndoOperation { - id: clock::Local { - replica_id: undo.replica_id as ReplicaId, - value: undo.local_timestamp, - }, - counts: undo - .counts - .into_iter() - .map(|c| { - ( - clock::Local { - replica_id: c.replica_id as ReplicaId, - value: c.local_timestamp, - }, - c.count, - ) - }) - .collect(), - ranges: undo - .ranges - .into_iter() - .map(|r| FullOffset::from_proto(r.start)..FullOffset::from_proto(r.end)) - .collect(), - version: undo.version.into(), - }, - }, - proto::operation::Variant::UpdateSelections(message) => { - let version = message.version.into(); - let entries = message - .selections - .iter() - .map(|selection| { - let range = (FullOffset::from_proto(selection.start), Bias::Left) - ..(FullOffset::from_proto(selection.end), Bias::Right); - let state = SelectionState { - id: selection.id as usize, - reversed: selection.reversed, - goal: SelectionGoal::None, - }; - (range, state) - }) - .collect(); - let selections = AnchorRangeMap::from_raw(version, entries); - - Operation::UpdateSelections { - set_id: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.local_timestamp, - }, - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - selections: Arc::from(selections), - } - } - proto::operation::Variant::RemoveSelections(message) => { - Operation::RemoveSelections { - set_id: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.local_timestamp, - }, - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - } - } - proto::operation::Variant::SetActiveSelections(message) => { - Operation::SetActiveSelections { - set_id: message.local_timestamp.map(|value| clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value, - }), - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - } - } - }, - ) - } -} - -impl From for EditOperation { - fn from(edit: proto::operation::Edit) -> Self { - let ranges = edit - .ranges - .into_iter() - .map(|range| FullOffset::from_proto(range.start)..FullOffset::from_proto(range.end)) - .collect(); - EditOperation { - timestamp: InsertionTimestamp { - replica_id: edit.replica_id as ReplicaId, - local: edit.local_timestamp, - lamport: edit.lamport_timestamp, - }, - version: edit.version.into(), - ranges, - new_text: edit.new_text, - } - } -} - pub trait ToOffset { fn to_offset<'a>(&self, content: impl Into>) -> usize; diff --git a/crates/buffer/src/selection.rs b/crates/buffer/src/selection.rs index 3dc84f66471ed4e1d20af63fa8327ad2c95e1d25..c55a5f423ba62309445fc75b0df771d1a15ff748 100644 --- a/crates/buffer/src/selection.rs +++ b/crates/buffer/src/selection.rs @@ -1,7 +1,5 @@ -use super::{AnchorRangeMap, Buffer, Content, FullOffset, Point, ToOffset, ToPoint}; -use rpc::proto; +use super::{AnchorRangeMap, Buffer, Content, Point, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; -use sum_tree::Bias; pub type SelectionSetId = clock::Lamport; pub type SelectionsVersion = usize; @@ -129,53 +127,3 @@ impl SelectionSet { }) } } - -impl<'a> Into for &'a SelectionSet { - fn into(self) -> proto::SelectionSet { - let version = self.selections.version(); - let entries = self.selections.raw_entries(); - proto::SelectionSet { - replica_id: self.id.replica_id as u32, - lamport_timestamp: self.id.value as u32, - is_active: self.active, - version: version.into(), - selections: entries - .iter() - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0.to_proto(), - end: range.end.0.to_proto(), - reversed: state.reversed, - }) - .collect(), - } - } -} - -impl From for SelectionSet { - fn from(set: proto::SelectionSet) -> Self { - Self { - id: clock::Lamport { - replica_id: set.replica_id as u16, - value: set.lamport_timestamp, - }, - active: set.is_active, - selections: Arc::new(AnchorRangeMap::from_raw( - set.version.into(), - set.selections - .into_iter() - .map(|selection| { - let range = (FullOffset::from_proto(selection.start), Bias::Left) - ..(FullOffset::from_proto(selection.end), Bias::Right); - let state = SelectionState { - id: selection.id as usize, - reversed: selection.reversed, - goal: SelectionGoal::None, - }; - (range, state) - }) - .collect(), - )), - } - } -} diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 911f8ea1fd81f740962f0a0b4704145881006b68..726afd761734585eb154eefe279e9a47a7897681 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -1,5 +1,6 @@ mod highlight_map; mod language; +pub mod proto; #[cfg(test)] mod tests; @@ -16,7 +17,6 @@ use lazy_static::lazy_static; use lsp::LanguageServer; use parking_lot::Mutex; use postage::{prelude::Stream, sink::Sink, watch}; -use rpc::proto; use similar::{ChangeTag, TextDiff}; use smol::future::yield_now; use std::{ @@ -251,10 +251,34 @@ impl Buffer { message: proto::Buffer, file: Option>, ) -> Result { - Ok(Self::build( - TextBuffer::from_proto(replica_id, message)?, - file, - )) + let mut buffer = + buffer::Buffer::new(replica_id, message.id, History::new(message.content.into())); + let ops = message + .history + .into_iter() + .map(|op| Operation::Edit(proto::deserialize_edit_operation(op))); + buffer.apply_ops(ops)?; + for set in message.selections { + let set = proto::deserialize_selection_set(set); + buffer.add_raw_selection_set(set.id, set); + } + Ok(Self::build(buffer, file)) + } + + pub fn to_proto(&self) -> proto::Buffer { + proto::Buffer { + id: self.remote_id(), + content: self.text.base_text().to_string(), + history: self + .text + .history() + .map(proto::serialize_edit_operation) + .collect(), + selections: self + .selection_sets() + .map(|(_, set)| proto::serialize_selection_set(set)) + .collect(), + } } pub fn with_language( @@ -319,7 +343,7 @@ impl Buffer { .as_ref() .ok_or_else(|| anyhow!("buffer has no file"))?; let text = self.as_rope().clone(); - let version = self.version.clone(); + let version = self.version(); let save = file.save(self.remote_id(), text, version, cx.as_mut()); Ok(cx.spawn(|this, mut cx| async move { let (version, mtime) = save.await?; @@ -494,7 +518,7 @@ impl Buffer { .await; this.update(&mut cx, |this, cx| { if this.apply_diff(diff, cx) { - this.saved_version = this.version.clone(); + this.saved_version = this.version(); this.saved_mtime = new_mtime; cx.emit(Event::Reloaded); } diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs new file mode 100644 index 0000000000000000000000000000000000000000..769373b8c86cac5388a22b8a8d600a236fef8480 --- /dev/null +++ b/crates/language/src/proto.rs @@ -0,0 +1,261 @@ +use std::sync::Arc; + +use anyhow::{anyhow, Result}; +use buffer::*; +use clock::ReplicaId; +use rpc::proto; + +pub use proto::Buffer; + +pub fn serialize_operation(operation: &Operation) -> proto::Operation { + proto::Operation { + variant: Some(match operation { + Operation::Edit(edit) => { + proto::operation::Variant::Edit(serialize_edit_operation(edit)) + } + Operation::Undo { + undo, + lamport_timestamp, + } => proto::operation::Variant::Undo(proto::operation::Undo { + replica_id: undo.id.replica_id as u32, + local_timestamp: undo.id.value, + lamport_timestamp: lamport_timestamp.value, + ranges: undo + .ranges + .iter() + .map(|r| proto::Range { + start: r.start.0 as u64, + end: r.end.0 as u64, + }) + .collect(), + counts: undo + .counts + .iter() + .map(|(edit_id, count)| proto::operation::UndoCount { + replica_id: edit_id.replica_id as u32, + local_timestamp: edit_id.value, + count: *count, + }) + .collect(), + version: From::from(&undo.version), + }), + Operation::UpdateSelections { + set_id, + selections, + lamport_timestamp, + } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { + replica_id: set_id.replica_id as u32, + local_timestamp: set_id.value, + lamport_timestamp: lamport_timestamp.value, + version: selections.version().into(), + selections: selections + .raw_entries() + .iter() + .map(|(range, state)| proto::Selection { + id: state.id as u64, + start: range.start.0 .0 as u64, + end: range.end.0 .0 as u64, + reversed: state.reversed, + }) + .collect(), + }), + Operation::RemoveSelections { + set_id, + lamport_timestamp, + } => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections { + replica_id: set_id.replica_id as u32, + local_timestamp: set_id.value, + lamport_timestamp: lamport_timestamp.value, + }), + Operation::SetActiveSelections { + set_id, + lamport_timestamp, + } => proto::operation::Variant::SetActiveSelections( + proto::operation::SetActiveSelections { + replica_id: lamport_timestamp.replica_id as u32, + local_timestamp: set_id.map(|set_id| set_id.value), + lamport_timestamp: lamport_timestamp.value, + }, + ), + }), + } +} + +pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit { + let ranges = operation + .ranges + .iter() + .map(|range| proto::Range { + start: range.start.0 as u64, + end: range.end.0 as u64, + }) + .collect(); + proto::operation::Edit { + replica_id: operation.timestamp.replica_id as u32, + local_timestamp: operation.timestamp.local, + lamport_timestamp: operation.timestamp.lamport, + version: From::from(&operation.version), + ranges, + new_text: operation.new_text.clone(), + } +} + +pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { + let version = set.selections.version(); + let entries = set.selections.raw_entries(); + proto::SelectionSet { + replica_id: set.id.replica_id as u32, + lamport_timestamp: set.id.value as u32, + is_active: set.active, + version: version.into(), + selections: entries + .iter() + .map(|(range, state)| proto::Selection { + id: state.id as u64, + start: range.start.0 .0 as u64, + end: range.end.0 .0 as u64, + reversed: state.reversed, + }) + .collect(), + } +} + +pub fn deserialize_operation(message: proto::Operation) -> Result { + Ok( + match message + .variant + .ok_or_else(|| anyhow!("missing operation variant"))? + { + proto::operation::Variant::Edit(edit) => { + Operation::Edit(deserialize_edit_operation(edit)) + } + proto::operation::Variant::Undo(undo) => Operation::Undo { + lamport_timestamp: clock::Lamport { + replica_id: undo.replica_id as ReplicaId, + value: undo.lamport_timestamp, + }, + undo: UndoOperation { + id: clock::Local { + replica_id: undo.replica_id as ReplicaId, + value: undo.local_timestamp, + }, + counts: undo + .counts + .into_iter() + .map(|c| { + ( + clock::Local { + replica_id: c.replica_id as ReplicaId, + value: c.local_timestamp, + }, + c.count, + ) + }) + .collect(), + ranges: undo + .ranges + .into_iter() + .map(|r| FullOffset(r.start as usize)..FullOffset(r.end as usize)) + .collect(), + version: undo.version.into(), + }, + }, + proto::operation::Variant::UpdateSelections(message) => { + let version = message.version.into(); + let entries = message + .selections + .iter() + .map(|selection| { + let range = (FullOffset(selection.start as usize), Bias::Left) + ..(FullOffset(selection.end as usize), Bias::Right); + let state = SelectionState { + id: selection.id as usize, + reversed: selection.reversed, + goal: SelectionGoal::None, + }; + (range, state) + }) + .collect(); + let selections = AnchorRangeMap::from_raw(version, entries); + + Operation::UpdateSelections { + set_id: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.local_timestamp, + }, + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + selections: Arc::from(selections), + } + } + proto::operation::Variant::RemoveSelections(message) => Operation::RemoveSelections { + set_id: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.local_timestamp, + }, + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }, + proto::operation::Variant::SetActiveSelections(message) => { + Operation::SetActiveSelections { + set_id: message.local_timestamp.map(|value| clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value, + }), + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + } + } + }, + ) +} + +pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation { + let ranges = edit + .ranges + .into_iter() + .map(|range| FullOffset(range.start as usize)..FullOffset(range.end as usize)) + .collect(); + EditOperation { + timestamp: InsertionTimestamp { + replica_id: edit.replica_id as ReplicaId, + local: edit.local_timestamp, + lamport: edit.lamport_timestamp, + }, + version: edit.version.into(), + ranges, + new_text: edit.new_text, + } +} + +pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { + SelectionSet { + id: clock::Lamport { + replica_id: set.replica_id as u16, + value: set.lamport_timestamp, + }, + active: set.is_active, + selections: Arc::new(AnchorRangeMap::from_raw( + set.version.into(), + set.selections + .into_iter() + .map(|selection| { + let range = (FullOffset(selection.start as usize), Bias::Left) + ..(FullOffset(selection.end as usize), Bias::Right); + let state = SelectionState { + id: selection.id as usize, + reversed: selection.reversed, + goal: SelectionGoal::None, + }; + (range, state) + }) + .collect(), + )), + } +} diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 7c9273a8cf9f26a9780524f966fc9f429815a061..166913b1c01c5ff4cfef889a858082af236d5672 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -430,8 +430,8 @@ impl Worktree { let ops = payload .operations .into_iter() - .map(|op| op.try_into()) - .collect::>>()?; + .map(|op| language::proto::deserialize_operation(op)) + .collect::, _>>()?; match self { Worktree::Local(worktree) => { @@ -1944,7 +1944,7 @@ impl language::File for File { .request(proto::UpdateBuffer { worktree_id: remote_id, buffer_id, - operations: vec![(&operation).into()], + operations: vec![language::proto::serialize_operation(&operation)], }) .await { From 0e62ddbb654d0a3b3716b8fcbdccd6f19e949af0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 15:28:37 -0700 Subject: [PATCH 49/61] Replicate diagnostics to remote buffers Co-Authored-By: Nathan Sobo --- crates/buffer/src/anchor.rs | 44 +++++++++++- crates/language/src/lib.rs | 49 +++++++++++--- crates/language/src/proto.rs | 126 +++++++++++++++++++++++++---------- crates/rpc/proto/zed.proto | 23 +++++++ crates/rpc/src/peer.rs | 4 ++ script/server | 2 +- 6 files changed, 197 insertions(+), 51 deletions(-) diff --git a/crates/buffer/src/anchor.rs b/crates/buffer/src/anchor.rs index b4f01909bfdd1f7cf7fcf327e141c8e0d802e956..d1577230cc49ba4a28c57e358ca57fbe9eb38c26 100644 --- a/crates/buffer/src/anchor.rs +++ b/crates/buffer/src/anchor.rs @@ -176,12 +176,17 @@ impl AnchorRangeMap { self.entries.len() } - pub fn from_raw(version: clock::Global, entries: Vec<(Range<(FullOffset, Bias)>, T)>) -> Self { + pub fn from_full_offset_ranges( + version: clock::Global, + entries: Vec<(Range<(FullOffset, Bias)>, T)>, + ) -> Self { Self { version, entries } } - pub fn raw_entries(&self) -> &[(Range<(FullOffset, Bias)>, T)] { - &self.entries + pub fn full_offset_ranges(&self) -> impl Iterator, &T)> { + self.entries + .iter() + .map(|(range, value)| (range.start.0..range.end.0, value)) } pub fn point_ranges<'a>( @@ -270,6 +275,10 @@ impl Default for AnchorRangeMultimap { } impl AnchorRangeMultimap { + pub fn version(&self) -> &clock::Global { + &self.version + } + pub fn intersecting_ranges<'a, I, O>( &'a self, range: Range, @@ -336,6 +345,35 @@ impl AnchorRangeMultimap { } }) } + + pub fn from_full_offset_ranges( + version: clock::Global, + start_bias: Bias, + end_bias: Bias, + entries: impl Iterator, T)>, + ) -> Self { + Self { + version, + start_bias, + end_bias, + entries: SumTree::from_iter( + entries.map(|(range, value)| AnchorRangeMultimapEntry { + range: FullOffsetRange { + start: range.start, + end: range.end, + }, + value, + }), + &(), + ), + } + } + + pub fn full_offset_ranges(&self) -> impl Iterator, &T)> { + self.entries + .cursor::<()>() + .map(|entry| (entry.range.start..entry.range.end, &entry.value)) + } } impl sum_tree::Item for AnchorRangeMultimapEntry { diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 726afd761734585eb154eefe279e9a47a7897681..7fa27b154d31be67940add0a9ce8b309ee8ccd72 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -9,7 +9,7 @@ pub use self::{ language::{BracketPair, Language, LanguageConfig, LanguageRegistry}, }; use anyhow::{anyhow, Result}; -pub use buffer::{Buffer as TextBuffer, *}; +pub use buffer::{Buffer as TextBuffer, Operation as _, *}; use clock::ReplicaId; use futures::FutureExt as _; use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task}; @@ -99,6 +99,12 @@ struct LanguageServerSnapshot { path: Arc, } +#[derive(Clone)] +pub enum Operation { + Buffer(buffer::Operation), + UpdateDiagnostics(AnchorRangeMultimap), +} + #[derive(Clone, Debug, Eq, PartialEq)] pub enum Event { Edited, @@ -256,7 +262,7 @@ impl Buffer { let ops = message .history .into_iter() - .map(|op| Operation::Edit(proto::deserialize_edit_operation(op))); + .map(|op| buffer::Operation::Edit(proto::deserialize_edit_operation(op))); buffer.apply_ops(ops)?; for set in message.selections { let set = proto::deserialize_selection_set(set); @@ -278,6 +284,7 @@ impl Buffer { .selection_sets() .map(|(_, set)| proto::serialize_selection_set(set)) .collect(), + diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)), } } @@ -761,6 +768,7 @@ impl Buffer { } self.diagnostics_update_count += 1; + self.send_operation(Operation::UpdateDiagnostics(self.diagnostics.clone()), cx); cx.notify(); Ok(()) } @@ -1240,7 +1248,7 @@ impl Buffer { } self.end_transaction(None, cx).unwrap(); - self.send_operation(Operation::Edit(edit), cx); + self.send_operation(Operation::Buffer(buffer::Operation::Edit(edit)), cx); } fn did_edit( @@ -1269,10 +1277,10 @@ impl Buffer { cx: &mut ModelContext, ) -> SelectionSetId { let operation = self.text.add_selection_set(selections); - if let Operation::UpdateSelections { set_id, .. } = &operation { + if let buffer::Operation::UpdateSelections { set_id, .. } = &operation { let set_id = *set_id; cx.notify(); - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); set_id } else { unreachable!() @@ -1287,7 +1295,7 @@ impl Buffer { ) -> Result<()> { let operation = self.text.update_selection_set(set_id, selections)?; cx.notify(); - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); Ok(()) } @@ -1297,7 +1305,7 @@ impl Buffer { cx: &mut ModelContext, ) -> Result<()> { let operation = self.text.set_active_selection_set(set_id)?; - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); Ok(()) } @@ -1308,7 +1316,7 @@ impl Buffer { ) -> Result<()> { let operation = self.text.remove_selection_set(set_id)?; cx.notify(); - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); Ok(()) } @@ -1320,7 +1328,17 @@ impl Buffer { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - self.text.apply_ops(ops)?; + let buffer_ops = ops + .into_iter() + .filter_map(|op| match op { + Operation::Buffer(op) => Some(op), + Operation::UpdateDiagnostics(diagnostics) => { + self.apply_diagnostic_update(diagnostics, cx); + None + } + }) + .collect::>(); + self.text.apply_ops(buffer_ops)?; self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. @@ -1328,6 +1346,15 @@ impl Buffer { Ok(()) } + fn apply_diagnostic_update( + &mut self, + diagnostics: AnchorRangeMultimap, + cx: &mut ModelContext, + ) { + self.diagnostics = diagnostics; + cx.notify(); + } + #[cfg(not(test))] pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { if let Some(file) = &self.file { @@ -1350,7 +1377,7 @@ impl Buffer { let old_version = self.version.clone(); for operation in self.text.undo() { - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); } self.did_edit(&old_version, was_dirty, cx); @@ -1361,7 +1388,7 @@ impl Buffer { let old_version = self.version.clone(); for operation in self.text.redo() { - self.send_operation(operation, cx); + self.send_operation(Operation::Buffer(operation), cx); } self.did_edit(&old_version, was_dirty, cx); diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 769373b8c86cac5388a22b8a8d600a236fef8480..4e6a8316c2391034cdd7e66b1594de0d0c1c2417 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,8 +1,12 @@ use std::sync::Arc; +use crate::Diagnostic; + +use super::Operation; use anyhow::{anyhow, Result}; use buffer::*; use clock::ReplicaId; +use lsp::DiagnosticSeverity; use rpc::proto; pub use proto::Buffer; @@ -10,13 +14,13 @@ pub use proto::Buffer; pub fn serialize_operation(operation: &Operation) -> proto::Operation { proto::Operation { variant: Some(match operation { - Operation::Edit(edit) => { + Operation::Buffer(buffer::Operation::Edit(edit)) => { proto::operation::Variant::Edit(serialize_edit_operation(edit)) } - Operation::Undo { + Operation::Buffer(buffer::Operation::Undo { undo, lamport_timestamp, - } => proto::operation::Variant::Undo(proto::operation::Undo { + }) => proto::operation::Variant::Undo(proto::operation::Undo { replica_id: undo.id.replica_id as u32, local_timestamp: undo.id.value, lamport_timestamp: lamport_timestamp.value, @@ -39,44 +43,46 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { .collect(), version: From::from(&undo.version), }), - Operation::UpdateSelections { + Operation::Buffer(buffer::Operation::UpdateSelections { set_id, selections, lamport_timestamp, - } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { + }) => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { replica_id: set_id.replica_id as u32, local_timestamp: set_id.value, lamport_timestamp: lamport_timestamp.value, version: selections.version().into(), selections: selections - .raw_entries() - .iter() + .full_offset_ranges() .map(|(range, state)| proto::Selection { id: state.id as u64, - start: range.start.0 .0 as u64, - end: range.end.0 .0 as u64, + start: range.start.0 as u64, + end: range.end.0 as u64, reversed: state.reversed, }) .collect(), }), - Operation::RemoveSelections { + Operation::Buffer(buffer::Operation::RemoveSelections { set_id, lamport_timestamp, - } => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections { + }) => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections { replica_id: set_id.replica_id as u32, local_timestamp: set_id.value, lamport_timestamp: lamport_timestamp.value, }), - Operation::SetActiveSelections { + Operation::Buffer(buffer::Operation::SetActiveSelections { set_id, lamport_timestamp, - } => proto::operation::Variant::SetActiveSelections( + }) => proto::operation::Variant::SetActiveSelections( proto::operation::SetActiveSelections { replica_id: lamport_timestamp.replica_id as u32, local_timestamp: set_id.map(|set_id| set_id.value), lamport_timestamp: lamport_timestamp.value, }, ), + Operation::UpdateDiagnostics(diagnostic_set) => { + proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set)) + } }), } } @@ -102,24 +108,44 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation:: pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { let version = set.selections.version(); - let entries = set.selections.raw_entries(); + let entries = set.selections.full_offset_ranges(); proto::SelectionSet { replica_id: set.id.replica_id as u32, lamport_timestamp: set.id.value as u32, is_active: set.active, version: version.into(), selections: entries - .iter() .map(|(range, state)| proto::Selection { id: state.id as u64, - start: range.start.0 .0 as u64, - end: range.end.0 .0 as u64, + start: range.start.0 as u64, + end: range.end.0 as u64, reversed: state.reversed, }) .collect(), } } +pub fn serialize_diagnostics(map: &AnchorRangeMultimap) -> proto::DiagnosticSet { + proto::DiagnosticSet { + version: map.version().into(), + diagnostics: map + .full_offset_ranges() + .map(|(range, diagnostic)| proto::Diagnostic { + start: range.start.0 as u64, + end: range.end.0 as u64, + message: diagnostic.message.clone(), + severity: match diagnostic.severity { + DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, + DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, + DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, + DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, + _ => proto::diagnostic::Severity::None, + } as i32, + }) + .collect(), + } +} + pub fn deserialize_operation(message: proto::Operation) -> Result { Ok( match message @@ -127,9 +153,9 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { .ok_or_else(|| anyhow!("missing operation variant"))? { proto::operation::Variant::Edit(edit) => { - Operation::Edit(deserialize_edit_operation(edit)) + Operation::Buffer(buffer::Operation::Edit(deserialize_edit_operation(edit))) } - proto::operation::Variant::Undo(undo) => Operation::Undo { + proto::operation::Variant::Undo(undo) => Operation::Buffer(buffer::Operation::Undo { lamport_timestamp: clock::Lamport { replica_id: undo.replica_id as ReplicaId, value: undo.lamport_timestamp, @@ -159,7 +185,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { .collect(), version: undo.version.into(), }, - }, + }), proto::operation::Variant::UpdateSelections(message) => { let version = message.version.into(); let entries = message @@ -176,9 +202,9 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { (range, state) }) .collect(); - let selections = AnchorRangeMap::from_raw(version, entries); + let selections = AnchorRangeMap::from_full_offset_ranges(version, entries); - Operation::UpdateSelections { + Operation::Buffer(buffer::Operation::UpdateSelections { set_id: clock::Lamport { replica_id: message.replica_id as ReplicaId, value: message.local_timestamp, @@ -188,20 +214,22 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { value: message.lamport_timestamp, }, selections: Arc::from(selections), - } + }) + } + proto::operation::Variant::RemoveSelections(message) => { + Operation::Buffer(buffer::Operation::RemoveSelections { + set_id: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.local_timestamp, + }, + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }) } - proto::operation::Variant::RemoveSelections(message) => Operation::RemoveSelections { - set_id: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.local_timestamp, - }, - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - }, proto::operation::Variant::SetActiveSelections(message) => { - Operation::SetActiveSelections { + Operation::Buffer(buffer::Operation::SetActiveSelections { set_id: message.local_timestamp.map(|value| clock::Lamport { replica_id: message.replica_id as ReplicaId, value, @@ -210,7 +238,10 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { replica_id: message.replica_id as ReplicaId, value: message.lamport_timestamp, }, - } + }) + } + proto::operation::Variant::UpdateDiagnostics(message) => { + Operation::UpdateDiagnostics(deserialize_diagnostics(message)) } }, ) @@ -241,7 +272,7 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { value: set.lamport_timestamp, }, active: set.is_active, - selections: Arc::new(AnchorRangeMap::from_raw( + selections: Arc::new(AnchorRangeMap::from_full_offset_ranges( set.version.into(), set.selections .into_iter() @@ -259,3 +290,26 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { )), } } + +pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap { + AnchorRangeMultimap::from_full_offset_ranges( + message.version.into(), + Bias::Left, + Bias::Right, + message.diagnostics.into_iter().filter_map(|diagnostic| { + Some(( + FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize), + Diagnostic { + severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? { + proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, + proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, + proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION, + proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT, + proto::diagnostic::Severity::None => return None, + }, + message: diagnostic.message, + }, + )) + }), + ) +} diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 2986ab9451d0c33d130f5bc688ec412df0934fde..8753f27dbac619f372257da92c1d1aab7eabc5da 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -228,6 +228,7 @@ message Buffer { string content = 2; repeated Operation.Edit history = 3; repeated SelectionSet selections = 4; + DiagnosticSet diagnostics = 5; } message SelectionSet { @@ -245,6 +246,27 @@ message Selection { bool reversed = 4; } +message DiagnosticSet { + repeated VectorClockEntry version = 1; + repeated Diagnostic diagnostics = 2; +} + +message Diagnostic { + uint64 start = 1; + uint64 end = 2; + Severity severity = 3; + string message = 4; + enum Severity { + None = 0; + Error = 1; + Warning = 2; + Information = 3; + Hint = 4; + } +} + + + message Operation { oneof variant { Edit edit = 1; @@ -252,6 +274,7 @@ message Operation { UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; SetActiveSelections set_active_selections = 5; + DiagnosticSet update_diagnostics = 6; } message Edit { diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 251ffb5bb512e2a603b57922b9097edbd408fecc..1a407e512f798c4d3a05e9d508cae89231656682 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -398,6 +398,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], + diagnostics: None, }), } ); @@ -419,6 +420,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], + diagnostics: None, }), } ); @@ -449,6 +451,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], + diagnostics: None, }), } } @@ -460,6 +463,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], + diagnostics: None, }), } } diff --git a/script/server b/script/server index 491932c9525276d78dbc70ab58986f7850ecec12..f85ab348e156b8e567b079ab07657457aedf2f59 100755 --- a/script/server +++ b/script/server @@ -2,5 +2,5 @@ set -e -cd server +cd crates/server cargo run $@ From f0353d6abadd24e1bf1f4938dbf8e025b8687031 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 17:11:40 -0700 Subject: [PATCH 50/61] Add note in README about rustc bug on macOS monterey --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index dc576d604201dc6c1a2176d31d8801dba45bec68..eaa9ea50abc63a2e40a9423de50df21953943d36 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,14 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea ## Development tips +### Compiling on macOS Monterey + +The Zed server uses libcurl, which currently triggers [a bug](https://github.com/rust-lang/rust/issues/90342) in `rustc`. To work around this bug, export the following environment variable: + +``` +export MACOSX_DEPLOYMENT_TARGET=10.7 +``` + ### Dump element JSON If you trigger `cmd-shift-i`, Zed will copy a JSON representation of the current window contents to the clipboard. You can paste this in a tool like [DJSON](https://chrome.google.com/webstore/detail/djson-json-viewer-formatt/chaeijjekipecdajnijdldjjipaegdjc?hl=en) to navigate the state of on-screen elements in a structured way. From 61e06487b760b5a7d0af17b4596fa6cc17a961e9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 1 Nov 2021 17:14:22 -0700 Subject: [PATCH 51/61] Avoid circular model update when sending diagnostics operations --- crates/language/src/lib.rs | 5 +- crates/project/src/worktree.rs | 135 ++++++++++++++++++--------------- 2 files changed, 76 insertions(+), 64 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 7fa27b154d31be67940add0a9ce8b309ee8ccd72..306f08f3b8641ce600ccab9482b10843a96e99b9 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -679,7 +679,7 @@ impl Buffer { version: Option, mut diagnostics: Vec, cx: &mut ModelContext, - ) -> Result<()> { + ) -> Result { let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -768,9 +768,8 @@ impl Buffer { } self.diagnostics_update_count += 1; - self.send_operation(Operation::UpdateDiagnostics(self.diagnostics.clone()), cx); cx.notify(); - Ok(()) + Ok(Operation::UpdateDiagnostics(self.diagnostics.clone())) } pub fn diagnostics_in_range<'a, T: 'a + ToOffset>( diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 166913b1c01c5ff4cfef889a858082af236d5672..07b291c594e8824f0761d5299c211574ea70122d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -649,6 +649,79 @@ impl Worktree { } } } + + fn update_diagnostics( + &mut self, + params: lsp::PublishDiagnosticsParams, + cx: &mut ModelContext, + ) -> Result<()> { + let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; + let file_path = params + .uri + .to_file_path() + .map_err(|_| anyhow!("URI is not a file"))? + .strip_prefix(&this.abs_path) + .context("path is not within worktree")? + .to_owned(); + + for buffer in this.open_buffers.values() { + if let Some(buffer) = buffer.upgrade(cx) { + if buffer + .read(cx) + .file() + .map_or(false, |file| file.path().as_ref() == file_path) + { + let (remote_id, operation) = buffer.update(cx, |buffer, cx| { + ( + buffer.remote_id(), + buffer.update_diagnostics(params.version, params.diagnostics, cx), + ) + }); + self.send_buffer_update(remote_id, operation?, cx); + return Ok(()); + } + } + } + + this.diagnostics.insert(file_path, params.diagnostics); + Ok(()) + } + + fn send_buffer_update( + &mut self, + buffer_id: u64, + operation: Operation, + cx: &mut ModelContext, + ) { + if let Some((rpc, remote_id)) = match self { + Worktree::Local(worktree) => worktree + .remote_id + .borrow() + .map(|id| (worktree.rpc.clone(), id)), + Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)), + } { + cx.spawn(|worktree, mut cx| async move { + if let Err(error) = rpc + .request(proto::UpdateBuffer { + worktree_id: remote_id, + buffer_id, + operations: vec![language::proto::serialize_operation(&operation)], + }) + .await + { + worktree.update(&mut cx, |worktree, _| { + log::error!("error sending buffer operation: {}", error); + match worktree { + Worktree::Local(t) => &mut t.queued_operations, + Worktree::Remote(t) => &mut t.queued_operations, + } + .push((buffer_id, operation)); + }); + } + }) + .detach(); + } + } } impl Deref for Worktree { @@ -836,7 +909,6 @@ impl LocalWorktree { while let Ok(diagnostics) = diagnostics_rx.recv().await { if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { handle.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); this.update_diagnostics(diagnostics, cx).log_err(); }); } else { @@ -1200,38 +1272,6 @@ impl LocalWorktree { }) }) } - - fn update_diagnostics( - &mut self, - params: lsp::PublishDiagnosticsParams, - cx: &mut ModelContext, - ) -> Result<()> { - let file_path = params - .uri - .to_file_path() - .map_err(|_| anyhow!("URI is not a file"))? - .strip_prefix(&self.abs_path) - .context("path is not within worktree")? - .to_owned(); - - for buffer in self.open_buffers.values() { - if let Some(buffer) = buffer.upgrade(cx) { - if buffer - .read(cx) - .file() - .map_or(false, |file| file.path().as_ref() == file_path) - { - buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(params.version, params.diagnostics, cx) - })?; - return Ok(()); - } - } - } - - self.diagnostics.insert(file_path, params.diagnostics); - Ok(()) - } } fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { @@ -1932,34 +1972,7 @@ impl language::File for File { fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) { self.worktree.update(cx, |worktree, cx| { - if let Some((rpc, remote_id)) = match worktree { - Worktree::Local(worktree) => worktree - .remote_id - .borrow() - .map(|id| (worktree.rpc.clone(), id)), - Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)), - } { - cx.spawn(|worktree, mut cx| async move { - if let Err(error) = rpc - .request(proto::UpdateBuffer { - worktree_id: remote_id, - buffer_id, - operations: vec![language::proto::serialize_operation(&operation)], - }) - .await - { - worktree.update(&mut cx, |worktree, _| { - log::error!("error sending buffer operation: {}", error); - match worktree { - Worktree::Local(t) => &mut t.queued_operations, - Worktree::Remote(t) => &mut t.queued_operations, - } - .push((buffer_id, operation)); - }); - } - }) - .detach(); - } + worktree.send_buffer_update(buffer_id, operation, cx); }); } From f5aa07aac9bd2999582c2e501f6921bafbdfe69c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 2 Nov 2021 14:55:31 +0100 Subject: [PATCH 52/61] Remove all windows before quitting This gives all entities a chance of running `Drop::drop` which, in turn, could cause them to spawn a critical task. For example, we use this capability when a language server is dropped and we need to asynchronously send a shutdown message. --- crates/gpui/src/app.rs | 7 +++++++ crates/zed/src/main.rs | 1 + 2 files changed, 8 insertions(+) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ed2759aa0ccb88939cfc63ea3f7e4a6a960be289..a7675470f5b151cea17d63b30a94e8284dd34970 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -1230,6 +1230,13 @@ impl MutableAppContext { self.remove_dropped_entities(); } + pub fn remove_all_windows(&mut self) { + for (window_id, _) in self.cx.windows.drain() { + self.presenters_and_platform_windows.remove(&window_id); + } + self.remove_dropped_entities(); + } + fn open_platform_window(&mut self, window_id: usize, window_options: WindowOptions) { let mut window = self.cx diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 2107a3660fcffe716eca3089d8b9cc42b6ec7150..0fb111693ae2927221487efe291e0214554b8a48 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -30,6 +30,7 @@ fn main() { languages.set_theme(&settings.borrow().theme.editor.syntax); app.on_quit(|cx| { + cx.remove_all_windows(); let did_finish = cx .background() .block_on_critical_tasks(Duration::from_millis(100)); From 882c8ce69654561981c7efc4cb94e2013f553151 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 2 Nov 2021 14:58:00 +0100 Subject: [PATCH 53/61] Avoid error during deserialization if the `result` field isn't there --- crates/lsp/src/lib.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 38562e581b55baa0b711592d0d96b30cd4da4423..aa8df7b71dddeec0d248da9714484cce35d2534a 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -61,7 +61,7 @@ struct AnyResponse<'a> { #[serde(default)] error: Option, #[serde(borrow)] - result: &'a RawValue, + result: Option<&'a RawValue>, } #[derive(Serialize, Deserialize)] @@ -157,8 +157,10 @@ impl LanguageServer { if let Some(handler) = response_handlers.lock().remove(&id) { if let Some(error) = error { handler(Err(error)); - } else { + } else if let Some(result) = result { handler(Ok(result.get())); + } else { + handler(Ok("null")); } } } else { @@ -459,7 +461,7 @@ impl FakeLanguageServer { let message = serde_json::to_vec(&AnyResponse { id: request_id.id, error: None, - result: &RawValue::from_string(result).unwrap(), + result: Some(&RawValue::from_string(result).unwrap()), }) .unwrap(); self.send(message).await; From 2c57703ad6701335e2746499e1028f54c306029d Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 2 Nov 2021 13:16:25 -0600 Subject: [PATCH 54/61] Explicitly shut down language servers when quitting the app Co-Authored-By: Max Brunsfeld Co-Authored-By: Antonio Scandurra --- crates/gpui/src/app.rs | 102 ++++++++++++++++++++++++++------- crates/gpui/src/executor.rs | 33 +---------- crates/lsp/src/lib.rs | 95 +++++++++++++++++------------- crates/project/src/worktree.rs | 20 +++++++ crates/zed/src/main.rs | 13 +---- 5 files changed, 161 insertions(+), 102 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index a7675470f5b151cea17d63b30a94e8284dd34970..dc7e4d19b584a1650bd97a4da20b7844444ff927 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -23,6 +23,7 @@ use std::{ mem, ops::{Deref, DerefMut}, path::{Path, PathBuf}, + pin::Pin, rc::{self, Rc}, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, @@ -35,6 +36,12 @@ pub trait Entity: 'static { type Event; fn release(&mut self, _: &mut MutableAppContext) {} + fn app_will_quit( + &mut self, + _: &mut MutableAppContext, + ) -> Option>>> { + None + } } pub trait View: Entity + Sized { @@ -198,8 +205,6 @@ pub struct App(Rc>); #[derive(Clone)] pub struct AsyncAppContext(Rc>); -pub struct BackgroundAppContext(*const RefCell); - #[derive(Clone)] pub struct TestAppContext { cx: Rc>, @@ -220,20 +225,29 @@ impl App { asset_source, )))); - let cx = app.0.clone(); - foreground_platform.on_menu_command(Box::new(move |action| { - let mut cx = cx.borrow_mut(); - if let Some(key_window_id) = cx.cx.platform.key_window_id() { - if let Some((presenter, _)) = cx.presenters_and_platform_windows.get(&key_window_id) - { - let presenter = presenter.clone(); - let path = presenter.borrow().dispatch_path(cx.as_ref()); - cx.dispatch_action_any(key_window_id, &path, action); + foreground_platform.on_quit(Box::new({ + let cx = app.0.clone(); + move || { + cx.borrow_mut().quit(); + } + })); + foreground_platform.on_menu_command(Box::new({ + let cx = app.0.clone(); + move |action| { + let mut cx = cx.borrow_mut(); + if let Some(key_window_id) = cx.cx.platform.key_window_id() { + if let Some((presenter, _)) = + cx.presenters_and_platform_windows.get(&key_window_id) + { + let presenter = presenter.clone(); + let path = presenter.borrow().dispatch_path(cx.as_ref()); + cx.dispatch_action_any(key_window_id, &path, action); + } else { + cx.dispatch_global_action_any(action); + } } else { cx.dispatch_global_action_any(action); } - } else { - cx.dispatch_global_action_any(action); } })); @@ -751,6 +765,39 @@ impl MutableAppContext { App(self.weak_self.as_ref().unwrap().upgrade().unwrap()) } + pub fn quit(&mut self) { + let mut futures = Vec::new(); + for model_id in self.cx.models.keys().copied().collect::>() { + let mut model = self.cx.models.remove(&model_id).unwrap(); + futures.extend(model.app_will_quit(self)); + self.cx.models.insert(model_id, model); + } + + for view_id in self.cx.views.keys().copied().collect::>() { + let mut view = self.cx.views.remove(&view_id).unwrap(); + futures.extend(view.app_will_quit(self)); + self.cx.views.insert(view_id, view); + } + + self.remove_all_windows(); + + let futures = futures::future::join_all(futures); + if self + .background + .block_with_timeout(Duration::from_millis(100), futures) + .is_err() + { + log::error!("timed out waiting on app_will_quit"); + } + } + + fn remove_all_windows(&mut self) { + for (window_id, _) in self.cx.windows.drain() { + self.presenters_and_platform_windows.remove(&window_id); + } + self.remove_dropped_entities(); + } + pub fn platform(&self) -> Arc { self.cx.platform.clone() } @@ -1230,13 +1277,6 @@ impl MutableAppContext { self.remove_dropped_entities(); } - pub fn remove_all_windows(&mut self) { - for (window_id, _) in self.cx.windows.drain() { - self.presenters_and_platform_windows.remove(&window_id); - } - self.remove_dropped_entities(); - } - fn open_platform_window(&mut self, window_id: usize, window_options: WindowOptions) { let mut window = self.cx @@ -1898,6 +1938,10 @@ pub trait AnyModel { fn as_any(&self) -> &dyn Any; fn as_any_mut(&mut self) -> &mut dyn Any; fn release(&mut self, cx: &mut MutableAppContext); + fn app_will_quit( + &mut self, + cx: &mut MutableAppContext, + ) -> Option>>>; } impl AnyModel for T @@ -1915,12 +1959,23 @@ where fn release(&mut self, cx: &mut MutableAppContext) { self.release(cx); } + + fn app_will_quit( + &mut self, + cx: &mut MutableAppContext, + ) -> Option>>> { + self.app_will_quit(cx) + } } pub trait AnyView { fn as_any(&self) -> &dyn Any; fn as_any_mut(&mut self) -> &mut dyn Any; fn release(&mut self, cx: &mut MutableAppContext); + fn app_will_quit( + &mut self, + cx: &mut MutableAppContext, + ) -> Option>>>; fn ui_name(&self) -> &'static str; fn render<'a>( &mut self, @@ -1951,6 +2006,13 @@ where self.release(cx); } + fn app_will_quit( + &mut self, + cx: &mut MutableAppContext, + ) -> Option>>> { + self.app_will_quit(cx) + } + fn ui_name(&self) -> &'static str { T::ui_name() } diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 5a2f244aa197fcec0f8eec756c2d9458cb0252f0..c5f976e6f53363143348f56871df13a0bd67672a 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -40,11 +40,9 @@ pub enum Foreground { pub enum Background { Deterministic { executor: Arc, - critical_tasks: Mutex>>, }, Production { executor: Arc>, - critical_tasks: Mutex>>, _stop: channel::Sender<()>, }, } @@ -504,7 +502,6 @@ impl Background { Self::Production { executor, - critical_tasks: Default::default(), _stop: stop.0, } } @@ -526,31 +523,6 @@ impl Background { Task::send(any_task) } - pub fn spawn_critical(&self, future: F) - where - T: 'static + Send, - F: Send + Future + 'static, - { - let task = self.spawn(async move { - future.await; - }); - match self { - Self::Production { critical_tasks, .. } - | Self::Deterministic { critical_tasks, .. } => critical_tasks.lock().push(task), - } - } - - pub fn block_on_critical_tasks(&self, timeout: Duration) -> bool { - match self { - Background::Production { critical_tasks, .. } - | Self::Deterministic { critical_tasks, .. } => { - let tasks = mem::take(&mut *critical_tasks.lock()); - self.block_with_timeout(timeout, futures::future::join_all(tasks)) - .is_ok() - } - } - } - pub fn block_with_timeout( &self, timeout: Duration, @@ -617,10 +589,7 @@ pub fn deterministic(seed: u64) -> (Rc, Arc) { let executor = Arc::new(Deterministic::new(seed)); ( Rc::new(Foreground::Deterministic(executor.clone())), - Arc::new(Background::Deterministic { - executor, - critical_tasks: Default::default(), - }), + Arc::new(Background::Deterministic { executor }), ) } diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index aa8df7b71dddeec0d248da9714484cce35d2534a..81c9431093f447cd35aa26a558e53a8707a9bc32 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -33,13 +33,13 @@ type ResponseHandler = Box)>; pub struct LanguageServer { next_id: AtomicUsize, - outbound_tx: channel::Sender>, + outbound_tx: RwLock>>>, notification_handlers: Arc>>, response_handlers: Arc>>, executor: Arc, - io_tasks: Option<(Task>, Task>)>, + io_tasks: Mutex>, Task>)>>, initialized: barrier::Receiver, - output_done_rx: Option, + output_done_rx: Mutex>, } pub struct Subscription { @@ -198,11 +198,11 @@ impl LanguageServer { notification_handlers, response_handlers, next_id: Default::default(), - outbound_tx, + outbound_tx: RwLock::new(Some(outbound_tx)), executor: executor.clone(), - io_tasks: Some((input_task, output_task)), + io_tasks: Mutex::new(Some((input_task, output_task))), initialized: initialized_rx, - output_done_rx: Some(output_done_rx), + output_done_rx: Mutex::new(Some(output_done_rx)), }); let root_uri = @@ -240,20 +240,45 @@ impl LanguageServer { }; let this = self.clone(); - Self::request_internal::( + let request = Self::request_internal::( &this.next_id, &this.response_handlers, - &this.outbound_tx, + this.outbound_tx.read().as_ref(), params, - ) - .await?; + ); + request.await?; Self::notify_internal::( - &this.outbound_tx, + this.outbound_tx.read().as_ref(), lsp_types::InitializedParams {}, )?; Ok(()) } + pub fn shutdown(&self) -> Option>> { + if let Some(tasks) = self.io_tasks.lock().take() { + let response_handlers = self.response_handlers.clone(); + let outbound_tx = self.outbound_tx.write().take(); + let next_id = AtomicUsize::new(self.next_id.load(SeqCst)); + let mut output_done = self.output_done_rx.lock().take().unwrap(); + Some(async move { + Self::request_internal::( + &next_id, + &response_handlers, + outbound_tx.as_ref(), + (), + ) + .await?; + Self::notify_internal::(outbound_tx.as_ref(), ())?; + drop(outbound_tx); + output_done.recv().await; + drop(tasks); + Ok(()) + }) + } else { + None + } + } + pub fn on_notification(&self, f: F) -> Subscription where T: lsp_types::notification::Notification, @@ -293,7 +318,7 @@ impl LanguageServer { Self::request_internal::( &this.next_id, &this.response_handlers, - &this.outbound_tx, + this.outbound_tx.read().as_ref(), params, ) .await @@ -303,9 +328,9 @@ impl LanguageServer { fn request_internal( next_id: &AtomicUsize, response_handlers: &Mutex>, - outbound_tx: &channel::Sender>, + outbound_tx: Option<&channel::Sender>>, params: T::Params, - ) -> impl Future> + ) -> impl 'static + Future> where T::Result: 'static + Send, { @@ -332,7 +357,15 @@ impl LanguageServer { }), ); - let send = outbound_tx.try_send(message); + let send = outbound_tx + .as_ref() + .ok_or_else(|| { + anyhow!("tried to send a request to a language server that has been shut down") + }) + .and_then(|outbound_tx| { + outbound_tx.try_send(message)?; + Ok(()) + }); async move { send?; rx.recv().await.unwrap() @@ -346,13 +379,13 @@ impl LanguageServer { let this = self.clone(); async move { this.initialized.clone().recv().await; - Self::notify_internal::(&this.outbound_tx, params)?; + Self::notify_internal::(this.outbound_tx.read().as_ref(), params)?; Ok(()) } } fn notify_internal( - outbound_tx: &channel::Sender>, + outbound_tx: Option<&channel::Sender>>, params: T::Params, ) -> Result<()> { let message = serde_json::to_vec(&Notification { @@ -361,6 +394,9 @@ impl LanguageServer { params, }) .unwrap(); + let outbound_tx = outbound_tx + .as_ref() + .ok_or_else(|| anyhow!("tried to notify a language server that has been shut down"))?; outbound_tx.try_send(message)?; Ok(()) } @@ -368,28 +404,9 @@ impl LanguageServer { impl Drop for LanguageServer { fn drop(&mut self) { - let tasks = self.io_tasks.take(); - let response_handlers = self.response_handlers.clone(); - let outbound_tx = self.outbound_tx.clone(); - let next_id = AtomicUsize::new(self.next_id.load(SeqCst)); - let mut output_done = self.output_done_rx.take().unwrap(); - self.executor.spawn_critical( - async move { - Self::request_internal::( - &next_id, - &response_handlers, - &outbound_tx, - (), - ) - .await?; - Self::notify_internal::(&outbound_tx, ())?; - drop(outbound_tx); - output_done.recv().await; - drop(tasks); - Ok(()) - } - .log_err(), - ) + if let Some(shutdown) = self.shutdown() { + self.executor.spawn(shutdown).detach(); + } } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 07b291c594e8824f0761d5299c211574ea70122d..54f76a0d7086f70327d205a02a8f0cbb1ede0521 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -20,6 +20,7 @@ use postage::{ prelude::{Sink as _, Stream as _}, watch, }; + use serde::Deserialize; use smol::channel::{self, Sender}; use std::{ @@ -90,6 +91,25 @@ impl Entity for Worktree { } } } + + fn app_will_quit( + &mut self, + _: &mut MutableAppContext, + ) -> Option>>> { + use futures::FutureExt; + + if let Some(server) = self.language_server() { + if let Some(shutdown) = server.shutdown() { + return Some( + async move { + shutdown.await.log_err(); + } + .boxed(), + ); + } + } + None + } } impl Worktree { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0fb111693ae2927221487efe291e0214554b8a48..d2357ea3c37ea2a0476957918eee27f001888972 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -7,7 +7,7 @@ use gpui::AssetSource; use log::LevelFilter; use parking_lot::Mutex; use simplelog::SimpleLogger; -use std::{fs, path::PathBuf, sync::Arc, time::Duration}; +use std::{fs, path::PathBuf, sync::Arc}; use theme::ThemeRegistry; use workspace::{self, settings, OpenNew}; use zed::{self, assets::Assets, fs::RealFs, language, menus, AppState, OpenParams, OpenPaths}; @@ -29,16 +29,7 @@ fn main() { let languages = Arc::new(language::build_language_registry()); languages.set_theme(&settings.borrow().theme.editor.syntax); - app.on_quit(|cx| { - cx.remove_all_windows(); - let did_finish = cx - .background() - .block_on_critical_tasks(Duration::from_millis(100)); - if !did_finish { - log::error!("timed out on quit before critical tasks finished"); - } - }) - .run(move |cx| { + app.run(move |cx| { let client = client::Client::new(); let http = http::client(); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx)); From 1995bd89a6ddd038cf15191612bf68877418bea5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 2 Nov 2021 12:29:03 -0700 Subject: [PATCH 55/61] Deserialize buffer's diagnostics Co-Authored-By: Nathan Sobo --- crates/language/src/lib.rs | 8 +++++++- crates/project/src/worktree.rs | 2 +- script/sqlx | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 306f08f3b8641ce600ccab9482b10843a96e99b9..dad8af83df27e93c68cd979485eb485d13fb5250 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -256,6 +256,7 @@ impl Buffer { replica_id: ReplicaId, message: proto::Buffer, file: Option>, + cx: &mut ModelContext, ) -> Result { let mut buffer = buffer::Buffer::new(replica_id, message.id, History::new(message.content.into())); @@ -268,7 +269,11 @@ impl Buffer { let set = proto::deserialize_selection_set(set); buffer.add_raw_selection_set(set.id, set); } - Ok(Self::build(buffer, file)) + let mut this = Self::build(buffer, file); + if let Some(diagnostics) = message.diagnostics { + this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx); + } + Ok(this) } pub fn to_proto(&self) -> proto::Buffer { @@ -1351,6 +1356,7 @@ impl Buffer { cx: &mut ModelContext, ) { self.diagnostics = diagnostics; + self.diagnostics_update_count += 1; cx.notify(); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 54f76a0d7086f70327d205a02a8f0cbb1ede0521..1862a342aa80b6ffa41d20eee5d894f2ef093694 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1391,7 +1391,7 @@ impl RemoteWorktree { let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?; let buffer_id = remote_buffer.id as usize; let buffer = cx.add_model(|cx| { - Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file))) + Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx) .unwrap() .with_language(language, None, cx) }); diff --git a/script/sqlx b/script/sqlx index 590aad67ebeb79734884d70096a42688b8d0555d..3d3ea00cc44260d2fa8b693105a65c0e45f22cf2 100755 --- a/script/sqlx +++ b/script/sqlx @@ -5,7 +5,7 @@ set -e # Install sqlx-cli if needed [[ "$(sqlx --version)" == "sqlx-cli 0.5.7" ]] || cargo install sqlx-cli --version 0.5.7 -cd server +cd crates/server # Export contents of .env.toml eval "$(cargo run --bin dotenv)" From 89392cd23d5f2573a8087c8ae219e7cd4cdb05fe Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 2 Nov 2021 14:33:55 -0700 Subject: [PATCH 56/61] Avoid using worktree handle in File's path methods This avoids a circular model update that was happening when trying to retrieve the absolute path from a buffer's file while applying remote operations. --- crates/language/src/lib.rs | 16 +++--- crates/project/src/worktree.rs | 100 +++++++++++++++++++-------------- crates/workspace/src/items.rs | 6 +- 3 files changed, 68 insertions(+), 54 deletions(-) diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index dad8af83df27e93c68cd979485eb485d13fb5250..735bd0f3f080000411e3f2dd26b44e024374013b 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -127,15 +127,15 @@ pub trait File { fn path(&self) -> &Arc; /// Returns the absolute path of this file. - fn abs_path(&self, cx: &AppContext) -> Option; + fn abs_path(&self) -> Option; /// Returns the path of this file relative to the worktree's parent directory (this means it /// includes the name of the worktree's root folder). - fn full_path(&self, cx: &AppContext) -> PathBuf; + fn full_path(&self) -> PathBuf; /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option; + fn file_name(&self) -> Option; fn is_deleted(&self) -> bool; @@ -455,7 +455,7 @@ impl Buffer { }; self.reparse(cx); - self.update_language_server(cx); + self.update_language_server(); } pub fn did_save( @@ -479,7 +479,7 @@ impl Buffer { lsp::DidSaveTextDocumentParams { text_document: lsp::TextDocumentIdentifier { uri: lsp::Url::from_file_path( - self.file.as_ref().unwrap().abs_path(cx).unwrap(), + self.file.as_ref().unwrap().abs_path().unwrap(), ) .unwrap(), }, @@ -1121,7 +1121,7 @@ impl Buffer { Ok(()) } - fn update_language_server(&mut self, cx: &AppContext) { + fn update_language_server(&mut self) { let language_server = if let Some(language_server) = self.language_server.as_mut() { language_server } else { @@ -1131,7 +1131,7 @@ impl Buffer { .file .as_ref() .map_or(Path::new("/").to_path_buf(), |file| { - file.abs_path(cx).unwrap() + file.abs_path().unwrap() }); let version = post_inc(&mut language_server.next_version); @@ -1266,7 +1266,7 @@ impl Buffer { } self.reparse(cx); - self.update_language_server(cx); + self.update_language_server(); cx.emit(Event::Edited); if !was_dirty { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1862a342aa80b6ffa41d20eee5d894f2ef093694..d8ae0e3aa86cd8832a74e56a221f08703354181a 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -616,6 +616,8 @@ impl Worktree { } }; + let local = self.as_local().is_some(); + let worktree_path = self.abs_path.clone(); let worktree_handle = cx.handle(); let mut buffers_to_delete = Vec::new(); for (buffer_id, buffer) in open_buffers { @@ -627,6 +629,8 @@ impl Worktree { .and_then(|entry_id| self.entry_for_id(entry_id)) { File { + is_local: local, + worktree_path: worktree_path.clone(), entry_id: Some(entry.id), mtime: entry.mtime, path: entry.path.clone(), @@ -634,6 +638,8 @@ impl Worktree { } } else if let Some(entry) = self.entry_for_path(old_file.path().as_ref()) { File { + is_local: local, + worktree_path: worktree_path.clone(), entry_id: Some(entry.id), mtime: entry.mtime, path: entry.path.clone(), @@ -641,6 +647,8 @@ impl Worktree { } } else { File { + is_local: local, + worktree_path: worktree_path.clone(), entry_id: None, path: old_file.path().clone(), mtime: old_file.mtime(), @@ -976,12 +984,9 @@ impl LocalWorktree { let (file, contents) = this .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx)) .await?; - let language = this.read_with(&cx, |this, cx| { + let language = this.read_with(&cx, |this, _| { use language::File; - - this.languages() - .select_language(file.full_path(cx)) - .cloned() + this.languages().select_language(file.full_path()).cloned() }); let diagnostics = this.update(&mut cx, |this, _| { this.as_local_mut() @@ -1144,6 +1149,7 @@ impl LocalWorktree { fn load(&self, path: &Path, cx: &mut ModelContext) -> Task> { let handle = cx.handle(); let path = Arc::from(path); + let worktree_path = self.abs_path.clone(); let abs_path = self.absolutize(&path); let background_snapshot = self.background_snapshot.clone(); let fs = self.fs.clone(); @@ -1152,7 +1158,17 @@ impl LocalWorktree { // Eagerly populate the snapshot with an updated entry for the loaded file let entry = refresh_entry(fs.as_ref(), &background_snapshot, path, &abs_path).await?; this.update(&mut cx, |this, cx| this.poll_snapshot(cx)); - Ok((File::new(entry.id, handle, entry.path, entry.mtime), text)) + Ok(( + File { + entry_id: Some(entry.id), + worktree: handle, + worktree_path, + path: entry.path, + mtime: entry.mtime, + is_local: true, + }, + text, + )) }) } @@ -1167,11 +1183,16 @@ impl LocalWorktree { cx.spawn(|this, mut cx| async move { let entry = save.await?; this.update(&mut cx, |this, cx| { - this.as_local_mut() - .unwrap() - .open_buffers - .insert(buffer.id(), buffer.downgrade()); - Ok(File::new(entry.id, cx.handle(), entry.path, entry.mtime)) + let this = this.as_local_mut().unwrap(); + this.open_buffers.insert(buffer.id(), buffer.downgrade()); + Ok(File { + entry_id: Some(entry.id), + worktree: cx.handle(), + worktree_path: this.abs_path.clone(), + path: entry.path, + mtime: entry.mtime, + is_local: true, + }) }) }) } @@ -1360,6 +1381,7 @@ impl RemoteWorktree { let rpc = self.client.clone(); let replica_id = self.replica_id; let remote_worktree_id = self.remote_id; + let root_path = self.snapshot.abs_path.clone(); let path = path.to_string_lossy().to_string(); cx.spawn_weak(|this, mut cx| async move { if let Some(existing_buffer) = existing_buffer { @@ -1380,13 +1402,17 @@ impl RemoteWorktree { let this = this .upgrade(&cx) .ok_or_else(|| anyhow!("worktree was closed"))?; - let file = File::new(entry.id, this.clone(), entry.path, entry.mtime); - let language = this.read_with(&cx, |this, cx| { + let file = File { + entry_id: Some(entry.id), + worktree: this.clone(), + worktree_path: root_path, + path: entry.path, + mtime: entry.mtime, + is_local: false, + }; + let language = this.read_with(&cx, |this, _| { use language::File; - - this.languages() - .select_language(file.full_path(cx)) - .cloned() + this.languages().select_language(file.full_path()).cloned() }); let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?; let buffer_id = remote_buffer.id as usize; @@ -1868,24 +1894,10 @@ impl fmt::Debug for Snapshot { pub struct File { entry_id: Option, worktree: ModelHandle, + worktree_path: Arc, pub path: Arc, pub mtime: SystemTime, -} - -impl File { - pub fn new( - entry_id: usize, - worktree: ModelHandle, - path: Arc, - mtime: SystemTime, - ) -> Self { - Self { - entry_id: Some(entry_id), - worktree, - path, - mtime, - } - } + is_local: bool, } impl language::File for File { @@ -1905,27 +1917,29 @@ impl language::File for File { &self.path } - fn abs_path(&self, cx: &AppContext) -> Option { - let worktree = self.worktree.read(cx); - worktree - .as_local() - .map(|worktree| worktree.absolutize(&self.path)) + fn abs_path(&self) -> Option { + if self.is_local { + Some(self.worktree_path.join(&self.path)) + } else { + None + } } - fn full_path(&self, cx: &AppContext) -> PathBuf { - let worktree = self.worktree.read(cx); + fn full_path(&self) -> PathBuf { let mut full_path = PathBuf::new(); - full_path.push(worktree.root_name()); + if let Some(worktree_name) = self.worktree_path.file_name() { + full_path.push(worktree_name); + } full_path.push(&self.path); full_path } /// Returns the last component of this handle's absolute path. If this handle refers to the root /// of its worktree, then this method will return the name of the worktree itself. - fn file_name<'a>(&'a self, cx: &'a AppContext) -> Option { + fn file_name<'a>(&'a self) -> Option { self.path .file_name() - .or_else(|| Some(OsStr::new(self.worktree.read(cx).root_name()))) + .or_else(|| self.worktree_path.file_name()) .map(Into::into) } diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index d1275aee7c5f60204785c087bad23ae8a835eacf..e9411e93cc1a93fd5e42ba20e21c8a8aa57210c8 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -77,7 +77,7 @@ impl ItemView for Editor { .buffer() .read(cx) .file() - .and_then(|file| file.file_name(cx)); + .and_then(|file| file.file_name()); if let Some(name) = filename { name.to_string_lossy().into() } else { @@ -127,8 +127,8 @@ impl ItemView for Editor { cx.spawn(|buffer, mut cx| async move { save_as.await.map(|new_file| { - let (language, language_server) = worktree.read_with(&cx, |worktree, cx| { - let language = worktree.languages().select_language(new_file.full_path(cx)); + let (language, language_server) = worktree.read_with(&cx, |worktree, _| { + let language = worktree.languages().select_language(new_file.full_path()); let language_server = worktree.language_server(); (language.cloned(), language_server.cloned()) }); From a66b81d60a5cadb4131f6078ad4657ef5248159c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 2 Nov 2021 14:57:24 -0700 Subject: [PATCH 57/61] Add an integration test for replicating buffer's diagnostics --- Cargo.lock | 1 + crates/server/src/rpc.rs | 111 ++++++++++++++++++++++++++++++++++++- crates/zed/Cargo.toml | 3 + crates/zed/src/language.rs | 2 +- crates/zed/src/lib.rs | 1 + 5 files changed, 116 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b0a7d6988c03a12cb637cd843338051343fe4ac8..530a2aff6e69a21cebd6d6dc3e7fef96062d86ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6256,6 +6256,7 @@ dependencies = [ "libc", "log", "log-panics", + "lsp", "num_cpus", "parking_lot", "people_panel", diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 81c2d2af275f5a4c7dc0942ab804f1c042f54217..2139b5560ebde1b37a47490437f4c1885c5da947 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -982,7 +982,8 @@ mod tests { }, editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, - language::LanguageRegistry, + language::{Diagnostic, LanguageRegistry, Point}, + lsp, people_panel::JoinWorktree, project::{ProjectPath, Worktree}, workspace::{Workspace, WorkspaceParams}, @@ -1602,6 +1603,114 @@ mod tests { .await; } + #[gpui::test] + async fn test_collaborating_with_diagnostics( + mut cx_a: TestAppContext, + mut cx_b: TestAppContext, + ) { + cx_a.foreground().forbid_parking(); + let lang_registry = Arc::new(LanguageRegistry::new()); + + let (language_server, mut fake_lsp) = lsp::LanguageServer::fake(cx_a.background()).await; + + // Connect to a server as 2 clients. + let mut server = TestServer::start().await; + let (client_a, _) = server.create_client(&mut cx_a, "user_a").await; + let (client_b, _) = server.create_client(&mut cx_a, "user_b").await; + + // Share a local worktree as client A + let fs = Arc::new(FakeFs::new()); + fs.insert_tree( + "/a", + json!({ + ".zed.toml": r#"collaborators = ["user_b"]"#, + "a.txt": "one two three", + "b.txt": "b-contents", + }), + ) + .await; + let worktree_a = Worktree::open_local( + client_a.clone(), + "/a".as_ref(), + fs, + lang_registry.clone(), + Some(language_server), + &mut cx_a.to_async(), + ) + .await + .unwrap(); + worktree_a + .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + let worktree_id = worktree_a + .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + .await + .unwrap(); + + // Simulate a language server reporting errors for a file. + fake_lsp + .notify::(lsp::PublishDiagnosticsParams { + uri: lsp::Url::from_file_path("/a/a.txt").unwrap(), + version: None, + diagnostics: vec![ + lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::ERROR), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)), + message: "message 1".to_string(), + ..Default::default() + }, + lsp::Diagnostic { + severity: Some(lsp::DiagnosticSeverity::WARNING), + range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 13)), + message: "message 2".to_string(), + ..Default::default() + }, + ], + }) + .await; + + // Join the worktree as client B. + let worktree_b = Worktree::open_remote( + client_b.clone(), + worktree_id, + lang_registry.clone(), + &mut cx_b.to_async(), + ) + .await + .unwrap(); + + // Open the file with the errors. + let buffer_b = cx_b + .background() + .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))) + .await + .unwrap(); + + buffer_b.read_with(&cx_b, |buffer, _| { + assert_eq!( + buffer + .diagnostics_in_range(0..buffer.len()) + .collect::>(), + &[ + ( + Point::new(0, 0)..Point::new(0, 3), + &Diagnostic { + message: "message 1".to_string(), + severity: lsp::DiagnosticSeverity::ERROR, + } + ), + ( + Point { row: 0, column: 8 }..Point { row: 0, column: 13 }, + &Diagnostic { + severity: lsp::DiagnosticSeverity::WARNING, + message: "message 2".to_string() + } + ) + ] + ); + }); + } + #[gpui::test] async fn test_basic_chat(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_a.foreground().forbid_parking(); diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 1a536ef73f5b7db5df85ffb2cad53e32e52ae7a6..3d454c89a706a57d4e94dd7d35e6915c8bbf828c 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -20,6 +20,7 @@ test-support = [ "editor/test-support", "gpui/test-support", "language/test-support", + "lsp/test-support", "project/test-support", "rpc/test-support", "tempdir", @@ -37,6 +38,7 @@ editor = { path = "../editor" } file_finder = { path = "../file_finder" } gpui = { path = "../gpui" } language = { path = "../language" } +lsp = { path = "../lsp" } people_panel = { path = "../people_panel" } project = { path = "../project" } project_panel = { path = "../project_panel" } @@ -90,6 +92,7 @@ buffer = { path = "../buffer", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } +lsp = { path = "../lsp", features = ["test-support"] } project = { path = "../project", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } client = { path = "../client", features = ["test-support"] } diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index a82f7a2cbb4c5c681b1fb9d57490fd53fc92afb7..2c60ddd92c81d458ce7267a76844a97de131c41a 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -1,4 +1,4 @@ -pub use language::{Language, LanguageRegistry}; +pub use language::{Buffer, Diagnostic, Language, LanguageRegistry, Point}; use rust_embed::RustEmbed; use std::borrow::Cow; use std::{str, sync::Arc}; diff --git a/crates/zed/src/lib.rs b/crates/zed/src/lib.rs index cec9e29aa817b51b2c9de9c2d67d3069b69560be..5f5a4b17b13003dcefe37f7f6a4a67ddb025abfa 100644 --- a/crates/zed/src/lib.rs +++ b/crates/zed/src/lib.rs @@ -15,6 +15,7 @@ use gpui::{ platform::WindowOptions, ModelHandle, MutableAppContext, PathPromptOptions, Task, ViewContext, }; +pub use lsp; use parking_lot::Mutex; pub use people_panel; use people_panel::PeoplePanel; From 8d3f42de52753d8d317d4c0fa5dd4959e223e055 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 2 Nov 2021 17:41:01 -0700 Subject: [PATCH 58/61] Start language servers based on buffers' languages Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 + crates/language/Cargo.toml | 8 +- crates/language/src/language.rs | 31 ++++++- crates/language/src/lib.rs | 5 +- crates/language/src/tests.rs | 8 +- crates/lsp/src/lib.rs | 7 +- crates/project/Cargo.toml | 1 + crates/project/src/lib.rs | 9 +- crates/project/src/worktree.rs | 159 +++++++++++++++++++------------- crates/server/src/rpc.rs | 59 +++++++----- crates/workspace/src/items.rs | 14 ++- crates/zed/src/language.rs | 2 +- 12 files changed, 197 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 530a2aff6e69a21cebd6d6dc3e7fef96062d86ee..3d70d53b05eddc194203581b3bbbcb28d5ab4531 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3831,6 +3831,7 @@ dependencies = [ "rpc", "serde 1.0.125", "serde_json 1.0.64", + "simplelog", "smol", "sum_tree", "tempdir", diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index b0a384e16a7667323bbbac702956b0b5992b7011..39423268e7b9489c12f319c746434a5e7f22bb8f 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -4,7 +4,12 @@ version = "0.1.0" edition = "2018" [features] -test-support = ["rand", "buffer/test-support", "lsp/test-support"] +test-support = [ + "rand", + "buffer/test-support", + "lsp/test-support", + "tree-sitter-rust", +] [dependencies] buffer = { path = "../buffer" } @@ -25,6 +30,7 @@ serde = { version = "1", features = ["derive"] } similar = "1.3" smol = "1.2" tree-sitter = "0.19.5" +tree-sitter-rust = { version = "0.19.0", optional = true } [dev-dependencies] buffer = { path = "../buffer", features = ["test-support"] } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7b42f5dcbc041dadf3e5907f87c406bc177a690d..1f949961237627d5d0809513e734375908318de8 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,6 +1,7 @@ use crate::HighlightMap; use anyhow::Result; -use gpui::AppContext; +use gpui::{executor::Background, AppContext}; +use lsp::LanguageServer; use parking_lot::Mutex; use serde::Deserialize; use std::{collections::HashSet, path::Path, str, sync::Arc}; @@ -16,10 +17,13 @@ pub struct LanguageConfig { pub language_server: Option, } -#[derive(Deserialize)] +#[derive(Default, Deserialize)] pub struct LanguageServerConfig { pub binary: String, pub disk_based_diagnostic_sources: HashSet, + #[cfg(any(test, feature = "test-support"))] + #[serde(skip)] + pub fake_server: Option<(Arc, Arc)>, } #[derive(Clone, Debug, Deserialize)] @@ -117,6 +121,12 @@ impl Language { cx: &AppContext, ) -> Result>> { if let Some(config) = &self.config.language_server { + #[cfg(any(test, feature = "test-support"))] + if let Some((server, started)) = &config.fake_server { + started.store(true, std::sync::atomic::Ordering::SeqCst); + return Ok(Some(server.clone())); + } + const ZED_BUNDLE: Option<&'static str> = option_env!("ZED_BUNDLE"); let binary_path = if ZED_BUNDLE.map_or(Ok(false), |b| b.parse())? { cx.platform() @@ -151,6 +161,23 @@ impl Language { } } +#[cfg(any(test, feature = "test-support"))] +impl LanguageServerConfig { + pub async fn fake(executor: Arc) -> (Self, lsp::FakeLanguageServer) { + let (server, fake) = lsp::LanguageServer::fake(executor).await; + fake.started + .store(false, std::sync::atomic::Ordering::SeqCst); + let started = fake.started.clone(); + ( + Self { + fake_server: Some((server, started)), + ..Default::default() + }, + fake, + ) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/language/src/lib.rs b/crates/language/src/lib.rs index 735bd0f3f080000411e3f2dd26b44e024374013b..893dc164a66d21f5b980f4b0f4c1075528a59d6c 100644 --- a/crates/language/src/lib.rs +++ b/crates/language/src/lib.rs @@ -6,7 +6,7 @@ mod tests; pub use self::{ highlight_map::{HighlightId, HighlightMap}, - language::{BracketPair, Language, LanguageConfig, LanguageRegistry}, + language::{BracketPair, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig}, }; use anyhow::{anyhow, Result}; pub use buffer::{Buffer as TextBuffer, Operation as _, *}; @@ -37,6 +37,9 @@ use std::{ use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; use util::{post_inc, TryFutureExt as _}; +#[cfg(any(test, feature = "test-support"))] +pub use tree_sitter_rust; + pub use lsp::DiagnosticSeverity; thread_local! { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 5ab67362995437dc0e9c6e53c2ec97e25d7231de..8ee3beebf48d5f577136bea1317ce7017ed9f0e3 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -1,7 +1,6 @@ use super::*; -use crate::language::LanguageServerConfig; use gpui::{ModelHandle, MutableAppContext}; -use std::{iter::FromIterator, rc::Rc}; +use std::rc::Rc; use unindent::Unindent as _; #[gpui::test] @@ -676,10 +675,7 @@ fn rust_lang() -> Option> { LanguageConfig { name: "Rust".to_string(), path_suffixes: vec!["rs".to_string()], - language_server: Some(LanguageServerConfig { - binary: "rust-analyzer".to_string(), - disk_based_diagnostic_sources: HashSet::from_iter(vec!["rustc".to_string()]), - }), + language_server: None, ..Default::default() }, tree_sitter_rust::language(), diff --git a/crates/lsp/src/lib.rs b/crates/lsp/src/lib.rs index 81c9431093f447cd35aa26a558e53a8707a9bc32..ef5435d80c59491f7c271311f6e8a3847a53bab6 100644 --- a/crates/lsp/src/lib.rs +++ b/crates/lsp/src/lib.rs @@ -16,7 +16,7 @@ use std::{ io::Write, str::FromStr, sync::{ - atomic::{AtomicUsize, Ordering::SeqCst}, + atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, Arc, }, }; @@ -427,6 +427,7 @@ pub struct FakeLanguageServer { buffer: Vec, stdin: smol::io::BufReader, stdout: smol::io::BufWriter, + pub started: Arc, } #[cfg(any(test, feature = "test-support"))] @@ -444,6 +445,7 @@ impl LanguageServer { stdin: smol::io::BufReader::new(stdin.1), stdout: smol::io::BufWriter::new(stdout.0), buffer: Vec::new(), + started: Arc::new(AtomicBool::new(true)), }; let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap(); @@ -460,6 +462,9 @@ impl LanguageServer { #[cfg(any(test, feature = "test-support"))] impl FakeLanguageServer { pub async fn notify(&mut self, params: T::Params) { + if !self.started.load(std::sync::atomic::Ordering::SeqCst) { + panic!("can't simulate an LSP notification before the server has been started"); + } let message = serde_json::to_vec(&Notification { jsonrpc: JSON_RPC_VERSION, method: T::METHOD, diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index bb00812251b95651c67874d46940b6d673a103fd..b19516055e0e8fda8f39fa8c9f28ffee78320603 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -40,5 +40,6 @@ lsp = { path = "../lsp", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } rand = "0.8.3" +simplelog = "0.9" tempdir = { version = "0.3.7" } unindent = "0.1.7" diff --git a/crates/project/src/lib.rs b/crates/project/src/lib.rs index 458e7bf3637e342218a276ebf5839d73e64ff7b8..3e129c8fb8a1d67feb7b7abdebb1bc89c0c5c1fe 100644 --- a/crates/project/src/lib.rs +++ b/crates/project/src/lib.rs @@ -12,7 +12,7 @@ use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, }; -use util::{ResultExt, TryFutureExt as _}; +use util::TryFutureExt as _; pub use fs::*; pub use worktree::*; @@ -73,13 +73,8 @@ impl Project { let rpc = self.client.clone(); let languages = self.languages.clone(); let path = Arc::from(abs_path); - let language_server = languages - .get_language("Rust") - .map(|language| language.start_server(&path, cx)); cx.spawn(|this, mut cx| async move { - let language_server = language_server.and_then(|language| language.log_err().flatten()); - let worktree = - Worktree::open_local(rpc, path, fs, languages, language_server, &mut cx).await?; + let worktree = Worktree::open_local(rpc, path, fs, languages, &mut cx).await?; this.update(&mut cx, |this, cx| { this.add_worktree(worktree.clone(), cx); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d8ae0e3aa86cd8832a74e56a221f08703354181a..13f33bfdb14e0d84d7950ebf72f6ee9f22d542ac 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -12,7 +12,7 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, }; -use language::{Buffer, LanguageRegistry, Operation, Rope}; +use language::{Buffer, Language, LanguageRegistry, Operation, Rope}; use lazy_static::lazy_static; use lsp::LanguageServer; use parking_lot::Mutex; @@ -98,17 +98,21 @@ impl Entity for Worktree { ) -> Option>>> { use futures::FutureExt; - if let Some(server) = self.language_server() { - if let Some(shutdown) = server.shutdown() { - return Some( - async move { - shutdown.await.log_err(); - } - .boxed(), - ); - } + if let Self::Local(worktree) = self { + let shutdown_futures = worktree + .language_servers + .drain() + .filter_map(|(_, server)| server.shutdown()) + .collect::>(); + Some( + async move { + futures::future::join_all(shutdown_futures).await; + } + .boxed(), + ) + } else { + None } - None } } @@ -118,11 +122,10 @@ impl Worktree { path: impl Into>, fs: Arc, languages: Arc, - language_server: Option>, cx: &mut AsyncAppContext, ) -> Result> { let (tree, scan_states_tx) = - LocalWorktree::new(rpc, path, fs.clone(), languages, language_server, cx).await?; + LocalWorktree::new(rpc, path, fs.clone(), languages, cx).await?; tree.update(cx, |tree, cx| { let tree = tree.as_local_mut().unwrap(); let abs_path = tree.snapshot.abs_path.clone(); @@ -315,13 +318,6 @@ impl Worktree { } } - pub fn language_server(&self) -> Option<&Arc> { - match self { - Worktree::Local(worktree) => worktree.language_server.as_ref(), - Worktree::Remote(_) => None, - } - } - pub fn handle_add_peer( &mut self, envelope: TypedEnvelope, @@ -781,7 +777,7 @@ pub struct LocalWorktree { languages: Arc, rpc: Arc, fs: Arc, - language_server: Option>, + language_servers: HashMap>, } #[derive(Default, Deserialize)] @@ -795,7 +791,6 @@ impl LocalWorktree { path: impl Into>, fs: Arc, languages: Arc, - language_server: Option>, cx: &mut AsyncAppContext, ) -> Result<(ModelHandle, Sender)> { let abs_path = path.into(); @@ -896,7 +891,7 @@ impl LocalWorktree { languages, rpc, fs, - language_server, + language_servers: Default::default(), }; cx.spawn_weak(|this, mut cx| async move { @@ -926,33 +921,57 @@ impl LocalWorktree { }) .detach(); - if let Some(language_server) = &tree.language_server { - let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded(); - language_server - .on_notification::(move |params| { - smol::block_on(diagnostics_tx.send(params)).ok(); - }) - .detach(); - cx.spawn_weak(|this, mut cx| async move { - while let Ok(diagnostics) = diagnostics_rx.recv().await { - if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { - handle.update(&mut cx, |this, cx| { - this.update_diagnostics(diagnostics, cx).log_err(); - }); - } else { - break; - } - } - }) - .detach(); - } - Worktree::Local(tree) }); Ok((tree, scan_states_tx)) } + pub fn languages(&self) -> &LanguageRegistry { + &self.languages + } + + pub fn ensure_language_server( + &mut self, + language: &Language, + cx: &mut ModelContext, + ) -> Option> { + if let Some(server) = self.language_servers.get(language.name()) { + return Some(server.clone()); + } + + if let Some(language_server) = language + .start_server(self.abs_path(), cx) + .log_err() + .flatten() + { + let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded(); + language_server + .on_notification::(move |params| { + smol::block_on(diagnostics_tx.send(params)).ok(); + }) + .detach(); + cx.spawn_weak(|this, mut cx| async move { + while let Ok(diagnostics) = diagnostics_rx.recv().await { + if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { + handle.update(&mut cx, |this, cx| { + this.update_diagnostics(diagnostics, cx).log_err(); + }); + } else { + break; + } + } + }) + .detach(); + + self.language_servers + .insert(language.name().to_string(), language_server.clone()); + Some(language_server.clone()) + } else { + None + } + } + pub fn open_buffer( &mut self, path: &Path, @@ -976,7 +995,6 @@ impl LocalWorktree { }); let path = Arc::from(path); - let language_server = self.language_server.clone(); cx.spawn(|this, mut cx| async move { if let Some(existing_buffer) = existing_buffer { Ok(existing_buffer) @@ -988,11 +1006,14 @@ impl LocalWorktree { use language::File; this.languages().select_language(file.full_path()).cloned() }); - let diagnostics = this.update(&mut cx, |this, _| { - this.as_local_mut() - .unwrap() - .diagnostics - .remove(path.as_ref()) + let (diagnostics, language_server) = this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + ( + this.diagnostics.remove(path.as_ref()), + language + .as_ref() + .and_then(|language| this.ensure_language_server(language, cx)), + ) }); let buffer = cx.add_model(|cx| { let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); @@ -2925,7 +2946,8 @@ mod tests { use buffer::Point; use client::test::FakeServer; use fs::RealFs; - use language::Diagnostic; + use language::{tree_sitter_rust, LanguageServerConfig}; + use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; use serde_json::json; @@ -2957,7 +2979,6 @@ mod tests { Arc::from(Path::new("/root")), Arc::new(fs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -2990,7 +3011,6 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3021,7 +3041,6 @@ mod tests { file_path.clone(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3068,7 +3087,6 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3229,7 +3247,6 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3284,7 +3301,6 @@ mod tests { "/path/to/the-dir".as_ref(), fs, Default::default(), - None, &mut cx.to_async(), ) .await @@ -3333,7 +3349,6 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3467,7 +3482,6 @@ mod tests { dir.path(), Arc::new(RealFs), Default::default(), - None, &mut cx.to_async(), ) .await @@ -3555,7 +3569,21 @@ mod tests { #[gpui::test] async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { - let (language_server, mut fake_lsp) = LanguageServer::fake(cx.background()).await; + simplelog::SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap(); + + let (language_server_config, mut fake_server) = + LanguageServerConfig::fake(cx.background()).await; + let mut languages = LanguageRegistry::new(); + languages.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + language_server: Some(language_server_config), + ..Default::default() + }, + tree_sitter_rust::language(), + ))); + let dir = temp_tree(json!({ "a.rs": "fn a() { A }", "b.rs": "const y: i32 = 1", @@ -3565,8 +3593,7 @@ mod tests { Client::new(), dir.path(), Arc::new(RealFs), - Default::default(), - Some(language_server), + Arc::new(languages), &mut cx.to_async(), ) .await @@ -3574,7 +3601,13 @@ mod tests { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - fake_lsp + // Cause worktree to start the fake language server + let _buffer = tree + .update(&mut cx, |tree, cx| tree.open_buffer("b.rs", cx)) + .await + .unwrap(); + + fake_server .notify::(lsp::PublishDiagnosticsParams { uri: Url::from_file_path(dir.path().join("a.rs")).unwrap(), version: None, diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 2139b5560ebde1b37a47490437f4c1885c5da947..aebebc589177910c7dd31a7992f3d7c084b39bfb 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -982,7 +982,10 @@ mod tests { }, editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, - language::{Diagnostic, LanguageRegistry, Point}, + language::{ + tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry, + LanguageServerConfig, Point, + }, lsp, people_panel::JoinWorktree, project::{ProjectPath, Worktree}, @@ -1017,7 +1020,6 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1126,7 +1128,6 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1219,7 +1220,6 @@ mod tests { "/a".as_ref(), fs.clone(), lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1356,7 +1356,6 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1441,7 +1440,6 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1508,7 +1506,6 @@ mod tests { "/dir".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1570,7 +1567,6 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await @@ -1609,9 +1605,20 @@ mod tests { mut cx_b: TestAppContext, ) { cx_a.foreground().forbid_parking(); - let lang_registry = Arc::new(LanguageRegistry::new()); + let (language_server_config, mut fake_language_server) = + LanguageServerConfig::fake(cx_a.background()).await; + let mut lang_registry = LanguageRegistry::new(); + lang_registry.add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + language_server: Some(language_server_config), + ..Default::default() + }, + tree_sitter_rust::language(), + ))); - let (language_server, mut fake_lsp) = lsp::LanguageServer::fake(cx_a.background()).await; + let lang_registry = Arc::new(lang_registry); // Connect to a server as 2 clients. let mut server = TestServer::start().await; @@ -1624,8 +1631,8 @@ mod tests { "/a", json!({ ".zed.toml": r#"collaborators = ["user_b"]"#, - "a.txt": "one two three", - "b.txt": "b-contents", + "a.rs": "let one = two", + "other.rs": "", }), ) .await; @@ -1634,7 +1641,6 @@ mod tests { "/a".as_ref(), fs, lang_registry.clone(), - Some(language_server), &mut cx_a.to_async(), ) .await @@ -1647,21 +1653,33 @@ mod tests { .await .unwrap(); + // Cause language server to start. + let _ = cx_a + .background() + .spawn(worktree_a.update(&mut cx_a, |worktree, cx| { + worktree.open_buffer("other.rs", cx) + })) + .await + .unwrap(); + // Simulate a language server reporting errors for a file. - fake_lsp + fake_language_server .notify::(lsp::PublishDiagnosticsParams { - uri: lsp::Url::from_file_path("/a/a.txt").unwrap(), + uri: lsp::Url::from_file_path("/a/a.rs").unwrap(), version: None, diagnostics: vec![ lsp::Diagnostic { severity: Some(lsp::DiagnosticSeverity::ERROR), - range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)), + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)), message: "message 1".to_string(), ..Default::default() }, lsp::Diagnostic { severity: Some(lsp::DiagnosticSeverity::WARNING), - range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 13)), + range: lsp::Range::new( + lsp::Position::new(0, 10), + lsp::Position::new(0, 13), + ), message: "message 2".to_string(), ..Default::default() }, @@ -1682,7 +1700,7 @@ mod tests { // Open the file with the errors. let buffer_b = cx_b .background() - .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))) + .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.rs", cx))) .await .unwrap(); @@ -1693,14 +1711,14 @@ mod tests { .collect::>(), &[ ( - Point::new(0, 0)..Point::new(0, 3), + Point::new(0, 4)..Point::new(0, 7), &Diagnostic { message: "message 1".to_string(), severity: lsp::DiagnosticSeverity::ERROR, } ), ( - Point { row: 0, column: 8 }..Point { row: 0, column: 13 }, + Point::new(0, 10)..Point::new(0, 13), &Diagnostic { severity: lsp::DiagnosticSeverity::WARNING, message: "message 2".to_string() @@ -2149,7 +2167,6 @@ mod tests { "/a".as_ref(), fs.clone(), lang_registry.clone(), - None, &mut cx_a.to_async(), ) .await diff --git a/crates/workspace/src/items.rs b/crates/workspace/src/items.rs index e9411e93cc1a93fd5e42ba20e21c8a8aa57210c8..0b4b5f0d51719843d6ff61fc6ca5720784e8f196 100644 --- a/crates/workspace/src/items.rs +++ b/crates/workspace/src/items.rs @@ -127,10 +127,16 @@ impl ItemView for Editor { cx.spawn(|buffer, mut cx| async move { save_as.await.map(|new_file| { - let (language, language_server) = worktree.read_with(&cx, |worktree, _| { - let language = worktree.languages().select_language(new_file.full_path()); - let language_server = worktree.language_server(); - (language.cloned(), language_server.cloned()) + let (language, language_server) = worktree.update(&mut cx, |worktree, cx| { + let worktree = worktree.as_local_mut().unwrap(); + let language = worktree + .languages() + .select_language(new_file.full_path()) + .cloned(); + let language_server = language + .as_ref() + .and_then(|language| worktree.ensure_language_server(language, cx)); + (language, language_server.clone()) }); buffer.update(&mut cx, |buffer, cx| { diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index 2c60ddd92c81d458ce7267a76844a97de131c41a..3b77a0cf3ad82dff5d14de8b167174669b072068 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -1,4 +1,4 @@ -pub use language::{Buffer, Diagnostic, Language, LanguageRegistry, Point}; +pub use language::*; use rust_embed::RustEmbed; use std::borrow::Cow; use std::{str, sync::Arc}; From 9a7ecfbc4f793089cec7077445472f4c7c0d586b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 2 Nov 2021 17:51:18 -0700 Subject: [PATCH 59/61] Use status colors for diagnostic underlines --- crates/zed/assets/themes/_base.toml | 5 +++++ crates/zed/assets/themes/black.toml | 4 ---- crates/zed/assets/themes/dark.toml | 4 ---- crates/zed/assets/themes/light.toml | 4 ---- 4 files changed, 5 insertions(+), 12 deletions(-) diff --git a/crates/zed/assets/themes/_base.toml b/crates/zed/assets/themes/_base.toml index d74780281c232c5a5ab806e34815c060f5fcdedf..d384b85d68c93cb7c7bcce4fc477f73970926fd1 100644 --- a/crates/zed/assets/themes/_base.toml +++ b/crates/zed/assets/themes/_base.toml @@ -226,3 +226,8 @@ line_number = "$text.2.color" line_number_active = "$text.0.color" selection = "$selection.host" guest_selections = "$selection.guests" + +error_underline = "$status.bad" +warning_underline = "$status.warn" +info_underline = "$status.info" +hint_underline = "$status.info" diff --git a/crates/zed/assets/themes/black.toml b/crates/zed/assets/themes/black.toml index e99629c5ce68b35c337484f0d24c55f8453e0774..d37b7905be476c8499a46e43e7f30b3e551e2385 100644 --- a/crates/zed/assets/themes/black.toml +++ b/crates/zed/assets/themes/black.toml @@ -39,10 +39,6 @@ bad = "#b7372e" active_line = "#00000033" hover = "#00000033" -[editor] -error_underline = "#ff0000" -warning_underline = "#00ffff" - [editor.syntax] keyword = { color = "#0086c0", weight = "bold" } function = "#dcdcaa" diff --git a/crates/zed/assets/themes/dark.toml b/crates/zed/assets/themes/dark.toml index ce64e3c3f0002462100c6e1d11dea11d620d76f5..694e3469111890d317c1528c04143b5e36a8d37f 100644 --- a/crates/zed/assets/themes/dark.toml +++ b/crates/zed/assets/themes/dark.toml @@ -39,10 +39,6 @@ bad = "#b7372e" active_line = "#00000022" hover = "#00000033" -[editor] -error_underline = "#ff0000" -warning_underline = "#00ffff" - [editor.syntax] keyword = { color = "#0086c0", weight = "bold" } function = "#dcdcaa" diff --git a/crates/zed/assets/themes/light.toml b/crates/zed/assets/themes/light.toml index 13803c11a8060395db10d7ef19aa64032522d198..e2bfbfb650e5c704ac306283cee949547a1a67eb 100644 --- a/crates/zed/assets/themes/light.toml +++ b/crates/zed/assets/themes/light.toml @@ -39,10 +39,6 @@ bad = "#b7372e" active_line = "#00000008" hover = "#0000000D" -[editor] -error_underline = "#ff0000" -warning_underline = "#00ffff" - [editor.syntax] keyword = { color = "#0000fa", weight = "bold" } function = "#795e26" From 20a77f4c5e97ccb572064b1aceb53177598e8647 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 3 Nov 2021 09:39:15 +0100 Subject: [PATCH 60/61] Fix test for disk-based diagnostics --- crates/language/src/tests.rs | 65 ++++++++++++++++++++---------------- 1 file changed, 37 insertions(+), 28 deletions(-) diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 8ee3beebf48d5f577136bea1317ce7017ed9f0e3..0bc81b08086d5e4c309cb9a2c7c94391ff2f59ba 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -1,6 +1,6 @@ use super::*; use gpui::{ModelHandle, MutableAppContext}; -use std::rc::Rc; +use std::{iter::FromIterator, rc::Rc}; use unindent::Unindent as _; #[gpui::test] @@ -79,7 +79,9 @@ async fn test_apply_diff(mut cx: gpui::TestAppContext) { #[gpui::test] async fn test_reparse(mut cx: gpui::TestAppContext) { let text = "fn a() {}"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(rust_lang(), None, cx)); + let buffer = cx.add_model(|cx| { + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx) + }); // Wait for the initial text to parse buffer @@ -221,7 +223,7 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { } " .unindent(); - Buffer::new(0, text, cx).with_language(rust_lang(), None, cx) + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx) }); let buffer = buffer.read(cx); assert_eq!( @@ -251,7 +253,8 @@ fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) { fn test_edit_with_autoindent(cx: &mut MutableAppContext) { cx.add_model(|cx| { let text = "fn a() {}"; - let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); + let mut buffer = + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); buffer.edit_with_autoindent([8..8], "\n\n", cx); assert_eq!(buffer.text(), "fn a() {\n \n}"); @@ -271,7 +274,8 @@ fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { cx.add_model(|cx| { let text = "fn a() {}"; - let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); + let mut buffer = + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); let selection_set_id = buffer.add_selection_set::(&[], cx); buffer.start_transaction(Some(selection_set_id)).unwrap(); @@ -329,7 +333,8 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut Muta " .unindent(); - let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); + let mut buffer = + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); // Lines 2 and 3 don't match the indentation suggestion. When editing these lines, // their indentation is not adjusted. @@ -378,7 +383,8 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte " .unindent(); - let mut buffer = Buffer::new(0, text, cx).with_language(rust_lang(), None, cx); + let mut buffer = + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); buffer.edit_with_autoindent([5..5], "\nb", cx); assert_eq!( @@ -409,6 +415,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte #[gpui::test] async fn test_diagnostics(mut cx: gpui::TestAppContext) { let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await; + let mut rust_lang = rust_lang(); + rust_lang.config.language_server = Some(LanguageServerConfig { + disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]), + ..Default::default() + }); let text = " fn a() { A } @@ -418,7 +429,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unindent(); let buffer = cx.add_model(|cx| { - Buffer::new(0, text, cx).with_language(rust_lang(), Some(language_server), cx) + Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx) }); let open_notification = fake @@ -586,14 +597,14 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)), severity: Some(lsp::DiagnosticSeverity::ERROR), message: "undefined variable 'BB'".to_string(), - source: Some("rustc".to_string()), + source: Some("disk".to_string()), ..Default::default() }, lsp::Diagnostic { range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), severity: Some(lsp::DiagnosticSeverity::ERROR), message: "undefined variable 'A'".to_string(), - source: Some("rustc".to_string()), + source: Some("disk".to_string()), ..Default::default() }, ], @@ -669,29 +680,27 @@ impl Buffer { } } -fn rust_lang() -> Option> { - Some(Arc::new( - Language::new( - LanguageConfig { - name: "Rust".to_string(), - path_suffixes: vec!["rs".to_string()], - language_server: None, - ..Default::default() - }, - tree_sitter_rust::language(), - ) - .with_indents_query( - r#" +fn rust_lang() -> Language { + Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + language_server: None, + ..Default::default() + }, + tree_sitter_rust::language(), + ) + .with_indents_query( + r#" (call_expression) @indent (field_expression) @indent (_ "(" ")" @end) @indent (_ "{" "}" @end) @indent "#, - ) - .unwrap() - .with_brackets_query(r#" ("{" @open "}" @close) "#) - .unwrap(), - )) + ) + .unwrap() + .with_brackets_query(r#" ("{" @open "}" @close) "#) + .unwrap() } fn empty(point: Point) -> Range { From 258b89bb7077035bea1fec94ad6f9b8f165dc575 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 3 Nov 2021 09:44:23 +0100 Subject: [PATCH 61/61] Request autoscroll when undoing/redoing --- crates/editor/src/lib.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/lib.rs b/crates/editor/src/lib.rs index d41fbf81cbb5c1d91543e2b8b64729b75a566ce6..56f93eb5af88bc827b576936103aa178fa88903b 100644 --- a/crates/editor/src/lib.rs +++ b/crates/editor/src/lib.rs @@ -1527,10 +1527,12 @@ impl Editor { pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| buffer.undo(cx)); + self.request_autoscroll(cx); } pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| buffer.redo(cx)); + self.request_autoscroll(cx); } pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext) { @@ -2344,10 +2346,8 @@ impl Editor { } if autoscroll { - self.autoscroll_requested = true; - cx.notify(); + self.request_autoscroll(cx); } - self.pause_cursor_blinking(cx); self.buffer.update(cx, |buffer, cx| { @@ -2357,6 +2357,11 @@ impl Editor { }); } + fn request_autoscroll(&mut self, cx: &mut ViewContext) { + self.autoscroll_requested = true; + cx.notify(); + } + fn start_transaction(&self, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, _| { buffer