Cargo.lock 🔗
@@ -3591,6 +3591,7 @@ dependencies = [
"serde",
"serde_json",
"sha2 0.10.2",
+ "similar",
"smol",
"sum_tree",
"tempdir",
Antonio Scandurra created
Notify all language servers when a buffer gets saved
Cargo.lock | 1
crates/client/src/user.rs | 2
crates/editor/src/editor.rs | 75 +-
crates/editor/src/multi_buffer.rs | 6
crates/gpui/src/app.rs | 83 +-
crates/language/src/buffer.rs | 559 +-------------------
crates/language/src/diagnostic_set.rs | 4
crates/language/src/language.rs | 56 +
crates/language/src/tests.rs | 708 ++-----------------------
crates/lsp/src/lsp.rs | 161 +----
crates/project/Cargo.toml | 1
crates/project/src/lsp_command.rs | 54 +
crates/project/src/project.rs | 792 +++++++++++++++++++++-------
crates/project/src/worktree.rs | 62 --
crates/rpc/proto/zed.proto | 1
crates/rpc/src/proto.rs | 2
crates/search/src/buffer_search.rs | 2
crates/server/src/rpc.rs | 125 ++--
crates/text/src/anchor.rs | 42
19 files changed, 1,005 insertions(+), 1,731 deletions(-)
@@ -3591,6 +3591,7 @@ dependencies = [
"serde",
"serde_json",
"sha2 0.10.2",
+ "similar",
"smol",
"sum_tree",
"tempdir",
@@ -186,7 +186,7 @@ impl UserStore {
cx: &mut ModelContext<Self>,
) -> Task<Result<Arc<User>>> {
if let Some(user) = self.users.get(&user_id).cloned() {
- return cx.spawn_weak(|_, _| async move { Ok(user) });
+ return cx.foreground().spawn(async move { Ok(user) });
}
let load_users = self.load_users(vec![user_id], cx);
@@ -32,8 +32,8 @@ use items::{BufferItemHandle, MultiBufferItemHandle};
use itertools::Itertools as _;
pub use language::{char_kind, CharKind};
use language::{
- AnchorRangeExt as _, BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
- DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId,
+ BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticSeverity,
+ Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
};
use multi_buffer::MultiBufferChunks;
pub use multi_buffer::{
@@ -5912,9 +5912,9 @@ pub fn styled_runs_for_code_label<'a>(
#[cfg(test)]
mod tests {
use super::*;
- use language::LanguageConfig;
+ use language::{LanguageConfig, LanguageServerConfig};
use lsp::FakeLanguageServer;
- use project::{FakeFs, ProjectPath};
+ use project::FakeFs;
use smol::stream::StreamExt;
use std::{cell::RefCell, rc::Rc, time::Instant};
use text::Point;
@@ -8196,18 +8196,24 @@ mod tests {
#[gpui::test]
async fn test_completion(cx: &mut gpui::TestAppContext) {
let settings = cx.read(Settings::test);
- let (language_server, mut fake) = cx.update(|cx| {
- lsp::LanguageServer::fake_with_capabilities(
- lsp::ServerCapabilities {
- completion_provider: Some(lsp::CompletionOptions {
- trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
- ..Default::default()
- }),
- ..Default::default()
- },
- cx,
- )
+
+ let (mut language_server_config, mut fake_servers) = LanguageServerConfig::fake();
+ language_server_config.set_fake_capabilities(lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
+ ..Default::default()
+ }),
+ ..Default::default()
});
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ language_server: Some(language_server_config),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
let text = "
one
@@ -8217,31 +8223,26 @@ mod tests {
.unindent();
let fs = FakeFs::new(cx.background().clone());
- fs.insert_file("/file", text).await;
+ fs.insert_file("/file.rs", text).await;
let project = Project::test(fs, cx);
+ project.update(cx, |project, _| project.languages().add(language));
- let (worktree, relative_path) = project
+ let worktree_id = project
.update(cx, |project, cx| {
- project.find_or_create_local_worktree("/file", true, cx)
+ project.find_or_create_local_worktree("/file.rs", true, cx)
})
.await
- .unwrap();
- let project_path = ProjectPath {
- worktree_id: worktree.read_with(cx, |worktree, _| worktree.id()),
- path: relative_path.into(),
- };
+ .unwrap()
+ .0
+ .read_with(cx, |tree, _| tree.id());
let buffer = project
- .update(cx, |project, cx| project.open_buffer(project_path, cx))
+ .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
.await
.unwrap();
- buffer.update(cx, |buffer, cx| {
- buffer.set_language_server(Some(language_server), cx);
- });
+ let mut fake_server = fake_servers.next().await.unwrap();
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
- buffer.next_notification(&cx).await;
-
let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx));
editor.update(cx, |editor, cx| {
@@ -8251,8 +8252,8 @@ mod tests {
});
handle_completion_request(
- &mut fake,
- "/file",
+ &mut fake_server,
+ "/file.rs",
Point::new(0, 4),
vec![
(Point::new(0, 4)..Point::new(0, 4), "first_completion"),
@@ -8282,7 +8283,7 @@ mod tests {
});
handle_resolve_completion_request(
- &mut fake,
+ &mut fake_server,
Some((Point::new(2, 5)..Point::new(2, 5), "\nadditional edit")),
)
.await;
@@ -8315,8 +8316,8 @@ mod tests {
});
handle_completion_request(
- &mut fake,
- "/file",
+ &mut fake_server,
+ "/file.rs",
Point::new(2, 7),
vec![
(Point::new(2, 6)..Point::new(2, 7), "fourth_completion"),
@@ -8334,8 +8335,8 @@ mod tests {
});
handle_completion_request(
- &mut fake,
- "/file",
+ &mut fake_server,
+ "/file.rs",
Point::new(2, 8),
vec![
(Point::new(2, 6)..Point::new(2, 8), "fourth_completion"),
@@ -8364,7 +8365,7 @@ mod tests {
);
apply_additional_edits
});
- handle_resolve_completion_request(&mut fake, None).await;
+ handle_resolve_completion_request(&mut fake_server, None).await;
apply_additional_edits.await.unwrap();
async fn handle_completion_request(
@@ -8,8 +8,8 @@ use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
pub use language::Completion;
use language::{
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
- Language, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _, ToPointUtf16 as _,
- TransactionId,
+ Language, OffsetRangeExt, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _,
+ ToPointUtf16 as _, TransactionId,
};
use std::{
cell::{Ref, RefCell},
@@ -25,7 +25,7 @@ use text::{
locator::Locator,
rope::TextDimension,
subscription::{Subscription, Topic},
- AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary,
+ Edit, Point, PointUtf16, TextSummary,
};
use theme::SyntaxTheme;
@@ -741,7 +741,7 @@ type GlobalActionCallback = dyn FnMut(&dyn AnyAction, &mut MutableAppContext);
type SubscriptionCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext) -> bool>;
type ObservationCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
-type ReleaseObservationCallback = Box<dyn FnMut(&mut MutableAppContext)>;
+type ReleaseObservationCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext)>;
pub struct MutableAppContext {
weak_self: Option<rc::Weak<RefCell<Self>>>,
@@ -1154,14 +1154,20 @@ impl MutableAppContext {
E: Entity,
E::Event: 'static,
H: Handle<E>,
- F: 'static + FnMut(&mut Self),
+ F: 'static + FnMut(&E, &mut Self),
{
let id = post_inc(&mut self.next_subscription_id);
self.release_observations
.lock()
.entry(handle.id())
.or_default()
- .insert(id, Box::new(move |cx| callback(cx)));
+ .insert(
+ id,
+ Box::new(move |released, cx| {
+ let released = released.downcast_ref().unwrap();
+ callback(released, cx)
+ }),
+ );
Subscription::ReleaseObservation {
id,
entity_id: handle.id(),
@@ -1520,9 +1526,8 @@ impl MutableAppContext {
self.observations.lock().remove(&model_id);
let mut model = self.cx.models.remove(&model_id).unwrap();
model.release(self);
- self.pending_effects.push_back(Effect::Release {
- entity_id: model_id,
- });
+ self.pending_effects
+ .push_back(Effect::ModelRelease { model_id, model });
}
for (window_id, view_id) in dropped_views {
@@ -1548,7 +1553,7 @@ impl MutableAppContext {
}
self.pending_effects
- .push_back(Effect::Release { entity_id: view_id });
+ .push_back(Effect::ViewRelease { view_id, view });
}
for key in dropped_element_states {
@@ -1575,7 +1580,12 @@ impl MutableAppContext {
self.notify_view_observers(window_id, view_id)
}
Effect::Deferred(callback) => callback(self),
- Effect::Release { entity_id } => self.notify_release_observers(entity_id),
+ Effect::ModelRelease { model_id, model } => {
+ self.notify_release_observers(model_id, model.as_any())
+ }
+ Effect::ViewRelease { view_id, view } => {
+ self.notify_release_observers(view_id, view.as_any())
+ }
Effect::Focus { window_id, view_id } => {
self.focus(window_id, view_id);
}
@@ -1738,11 +1748,11 @@ impl MutableAppContext {
}
}
- fn notify_release_observers(&mut self, entity_id: usize) {
+ fn notify_release_observers(&mut self, entity_id: usize, entity: &dyn Any) {
let callbacks = self.release_observations.lock().remove(&entity_id);
if let Some(callbacks) = callbacks {
for (_, mut callback) in callbacks {
- callback(self);
+ callback(entity, self);
}
}
}
@@ -2069,8 +2079,13 @@ pub enum Effect {
view_id: usize,
},
Deferred(Box<dyn FnOnce(&mut MutableAppContext)>),
- Release {
- entity_id: usize,
+ ModelRelease {
+ model_id: usize,
+ model: Box<dyn AnyModel>,
+ },
+ ViewRelease {
+ view_id: usize,
+ view: Box<dyn AnyView>,
},
Focus {
window_id: usize,
@@ -2099,9 +2114,13 @@ impl Debug for Effect {
.field("view_id", view_id)
.finish(),
Effect::Deferred(_) => f.debug_struct("Effect::Deferred").finish(),
- Effect::Release { entity_id } => f
- .debug_struct("Effect::Release")
- .field("entity_id", entity_id)
+ Effect::ModelRelease { model_id, .. } => f
+ .debug_struct("Effect::ModelRelease")
+ .field("model_id", model_id)
+ .finish(),
+ Effect::ViewRelease { view_id, .. } => f
+ .debug_struct("Effect::ViewRelease")
+ .field("view_id", view_id)
.finish(),
Effect::Focus { window_id, view_id } => f
.debug_struct("Effect::Focus")
@@ -2332,13 +2351,13 @@ impl<'a, T: Entity> ModelContext<'a, T> {
) -> Subscription
where
S: Entity,
- F: 'static + FnMut(&mut T, &mut ModelContext<T>),
+ F: 'static + FnMut(&mut T, &S, &mut ModelContext<T>),
{
let observer = self.weak_handle();
- self.app.observe_release(handle, move |cx| {
+ self.app.observe_release(handle, move |released, cx| {
if let Some(observer) = observer.upgrade(cx) {
observer.update(cx, |observer, cx| {
- callback(observer, cx);
+ callback(observer, released, cx);
});
}
})
@@ -2594,13 +2613,13 @@ impl<'a, T: View> ViewContext<'a, T> {
where
E: Entity,
H: Handle<E>,
- F: 'static + FnMut(&mut T, &mut ViewContext<T>),
+ F: 'static + FnMut(&mut T, &E, &mut ViewContext<T>),
{
let observer = self.weak_handle();
- self.app.observe_release(handle, move |cx| {
+ self.app.observe_release(handle, move |released, cx| {
if let Some(observer) = observer.upgrade(cx) {
observer.update(cx, |observer, cx| {
- callback(observer, cx);
+ callback(observer, released, cx);
});
}
})
@@ -4061,7 +4080,7 @@ mod tests {
}
#[crate::test(self)]
- fn test_subscribe_and_emit_from_model(cx: &mut MutableAppContext) {
+ fn test_model_events(cx: &mut MutableAppContext) {
#[derive(Default)]
struct Model {
events: Vec<usize>,
@@ -4073,11 +4092,11 @@ mod tests {
let handle_1 = cx.add_model(|_| Model::default());
let handle_2 = cx.add_model(|_| Model::default());
- handle_1.update(cx, |_, c| {
- c.subscribe(&handle_2, move |model: &mut Model, emitter, event, c| {
+ handle_1.update(cx, |_, cx| {
+ cx.subscribe(&handle_2, move |model: &mut Model, emitter, event, cx| {
model.events.push(*event);
- c.subscribe(&emitter, |model, _, event, _| {
+ cx.subscribe(&emitter, |model, _, event, _| {
model.events.push(*event * 2);
})
.detach();
@@ -4294,12 +4313,12 @@ mod tests {
cx.observe_release(&model, {
let model_release_observed = model_release_observed.clone();
- move |_| model_release_observed.set(true)
+ move |_, _| model_release_observed.set(true)
})
.detach();
cx.observe_release(&view, {
let view_release_observed = view_release_observed.clone();
- move |_| view_release_observed.set(true)
+ move |_, _| view_release_observed.set(true)
})
.detach();
@@ -4316,7 +4335,7 @@ mod tests {
}
#[crate::test(self)]
- fn test_subscribe_and_emit_from_view(cx: &mut MutableAppContext) {
+ fn test_view_events(cx: &mut MutableAppContext) {
#[derive(Default)]
struct View {
events: Vec<usize>,
@@ -4346,18 +4365,18 @@ mod tests {
let handle_2 = cx.add_view(window_id, |_| View::default());
let handle_3 = cx.add_model(|_| Model);
- handle_1.update(cx, |_, c| {
- c.subscribe(&handle_2, move |me, emitter, event, c| {
+ handle_1.update(cx, |_, cx| {
+ cx.subscribe(&handle_2, move |me, emitter, event, cx| {
me.events.push(*event);
- c.subscribe(&emitter, |me, _, event, _| {
+ cx.subscribe(&emitter, |me, _, event, _| {
me.events.push(*event * 2);
})
.detach();
})
.detach();
- c.subscribe(&handle_3, |me, _, event, _| {
+ cx.subscribe(&handle_3, |me, _, event, _| {
me.events.push(*event);
})
.detach();
@@ -7,16 +7,14 @@ pub use crate::{
use crate::{
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
outline::OutlineItem,
- range_from_lsp, CodeLabel, Outline, ToLspPosition,
+ CodeLabel, Outline,
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use futures::FutureExt as _;
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
-use lsp::LanguageServer;
use parking_lot::Mutex;
-use postage::{prelude::Stream, sink::Sink, watch};
use similar::{ChangeTag, TextDiff};
use smol::future::yield_now;
use std::{
@@ -26,7 +24,7 @@ use std::{
ffi::OsString,
future::Future,
iter::{Iterator, Peekable},
- ops::{Deref, DerefMut, Range, Sub},
+ ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
str,
sync::Arc,
@@ -34,11 +32,11 @@ use std::{
vec,
};
use sum_tree::TreeMap;
-use text::{operation_queue::OperationQueue, rope::TextDimension};
-pub use text::{Buffer as TextBuffer, Operation as _, *};
+use text::operation_queue::OperationQueue;
+pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
use theme::SyntaxTheme;
use tree_sitter::{InputEdit, QueryCursor, Tree};
-use util::{post_inc, TryFutureExt as _};
+use util::TryFutureExt as _;
#[cfg(any(test, feature = "test-support"))]
pub use tree_sitter_rust;
@@ -70,11 +68,8 @@ pub struct Buffer {
diagnostics_update_count: usize,
diagnostics_timestamp: clock::Lamport,
file_update_count: usize,
- language_server: Option<LanguageServerState>,
completion_triggers: Vec<String>,
deferred_ops: OperationQueue<Operation>,
- #[cfg(test)]
- pub(crate) operations: Vec<Operation>,
}
pub struct BufferSnapshot {
@@ -128,22 +123,7 @@ pub struct CodeAction {
pub lsp_action: lsp::CodeAction,
}
-struct LanguageServerState {
- server: Arc<LanguageServer>,
- latest_snapshot: watch::Sender<LanguageServerSnapshot>,
- pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
- next_version: usize,
- _maintain_server: Task<Option<()>>,
-}
-
-#[derive(Clone)]
-struct LanguageServerSnapshot {
- buffer_snapshot: text::BufferSnapshot,
- version: usize,
- path: Arc<Path>,
-}
-
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Operation {
Buffer(text::Operation),
UpdateDiagnostics {
@@ -160,8 +140,9 @@ pub enum Operation {
},
}
-#[derive(Clone, Debug, Eq, PartialEq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Event {
+ Operation(Operation),
Edited,
Dirtied,
Saved,
@@ -202,10 +183,6 @@ pub trait File {
cx: &mut MutableAppContext,
) -> Task<Result<(clock::Global, SystemTime)>>;
- fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
-
- fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
-
fn as_any(&self) -> &dyn Any;
fn to_proto(&self) -> rpc::proto::File;
@@ -226,83 +203,6 @@ pub trait LocalFile: File {
);
}
-#[cfg(any(test, feature = "test-support"))]
-pub struct FakeFile {
- pub path: Arc<Path>,
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl FakeFile {
- pub fn new(path: impl AsRef<Path>) -> Self {
- Self {
- path: path.as_ref().into(),
- }
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl File for FakeFile {
- fn as_local(&self) -> Option<&dyn LocalFile> {
- Some(self)
- }
-
- fn mtime(&self) -> SystemTime {
- SystemTime::UNIX_EPOCH
- }
-
- fn path(&self) -> &Arc<Path> {
- &self.path
- }
-
- fn full_path(&self, _: &AppContext) -> PathBuf {
- self.path.to_path_buf()
- }
-
- fn file_name(&self, _: &AppContext) -> OsString {
- self.path.file_name().unwrap().to_os_string()
- }
-
- fn is_deleted(&self) -> bool {
- false
- }
-
- fn save(
- &self,
- _: u64,
- _: Rope,
- _: clock::Global,
- cx: &mut MutableAppContext,
- ) -> Task<Result<(clock::Global, SystemTime)>> {
- cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
- }
-
- fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
-
- fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
-
- fn as_any(&self) -> &dyn Any {
- self
- }
-
- fn to_proto(&self) -> rpc::proto::File {
- unimplemented!()
- }
-}
-
-#[cfg(any(test, feature = "test-support"))]
-impl LocalFile for FakeFile {
- fn abs_path(&self, _: &AppContext) -> PathBuf {
- self.path.to_path_buf()
- }
-
- fn load(&self, cx: &AppContext) -> Task<Result<String>> {
- cx.background().spawn(async move { Ok(Default::default()) })
- }
-
- fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
- }
-}
-
pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
#[derive(Clone)]
@@ -488,15 +388,6 @@ impl Buffer {
self
}
- pub fn with_language_server(
- mut self,
- server: Arc<LanguageServer>,
- cx: &mut ModelContext<Self>,
- ) -> Self {
- self.set_language_server(Some(server), cx);
- self
- }
-
fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
let saved_mtime;
if let Some(file) = file.as_ref() {
@@ -523,11 +414,8 @@ impl Buffer {
diagnostics_update_count: 0,
diagnostics_timestamp: Default::default(),
file_update_count: 0,
- language_server: None,
completion_triggers: Default::default(),
deferred_ops: OperationQueue::new(),
- #[cfg(test)]
- operations: Default::default(),
}
}
@@ -547,6 +435,14 @@ impl Buffer {
}
}
+ pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
+ &self.text
+ }
+
+ pub fn text_snapshot(&self) -> text::BufferSnapshot {
+ self.text.snapshot()
+ }
+
pub fn file(&self) -> Option<&dyn File> {
self.file.as_deref()
}
@@ -572,123 +468,15 @@ impl Buffer {
})
}
+ pub fn saved_version(&self) -> &clock::Global {
+ &self.saved_version
+ }
+
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
self.language = language;
self.reparse(cx);
}
- pub fn set_language_server(
- &mut self,
- language_server: Option<Arc<lsp::LanguageServer>>,
- cx: &mut ModelContext<Self>,
- ) {
- self.language_server = if let Some((server, file)) =
- language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
- {
- let initial_snapshot = LanguageServerSnapshot {
- buffer_snapshot: self.text.snapshot(),
- version: 0,
- path: file.abs_path(cx).into(),
- };
- let (latest_snapshot_tx, mut latest_snapshot_rx) =
- watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
-
- Some(LanguageServerState {
- latest_snapshot: latest_snapshot_tx,
- pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
- next_version: 1,
- server: server.clone(),
- _maintain_server: cx.spawn_weak(|this, mut cx| async move {
- let capabilities = server.capabilities().await.or_else(|| {
- log::info!("language server exited");
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| this.language_server = None);
- }
- None
- })?;
-
- let triggers = capabilities
- .completion_provider
- .and_then(|c| c.trigger_characters)
- .unwrap_or_default();
- this.upgrade(&cx)?.update(&mut cx, |this, cx| {
- let lamport_timestamp = this.text.lamport_clock.tick();
- this.completion_triggers = triggers.clone();
- this.send_operation(
- Operation::UpdateCompletionTriggers {
- triggers,
- lamport_timestamp,
- },
- cx,
- );
- cx.notify();
- });
-
- let maintain_changes = cx.background().spawn(async move {
- let initial_snapshot =
- latest_snapshot_rx.recv().await.ok_or_else(|| {
- anyhow!("buffer dropped before sending DidOpenTextDocument")
- })?;
- server
- .notify::<lsp::notification::DidOpenTextDocument>(
- lsp::DidOpenTextDocumentParams {
- text_document: lsp::TextDocumentItem::new(
- lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
- Default::default(),
- initial_snapshot.version as i32,
- initial_snapshot.buffer_snapshot.text(),
- ),
- },
- )
- .await?;
-
- let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
- while let Some(snapshot) = latest_snapshot_rx.recv().await {
- let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
- let buffer_snapshot = snapshot.buffer_snapshot.clone();
- let content_changes = buffer_snapshot
- .edits_since::<(PointUtf16, usize)>(&prev_version)
- .map(|edit| {
- let edit_start = edit.new.start.0;
- let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
- let new_text = buffer_snapshot
- .text_for_range(edit.new.start.1..edit.new.end.1)
- .collect();
- lsp::TextDocumentContentChangeEvent {
- range: Some(lsp::Range::new(
- edit_start.to_lsp_position(),
- edit_end.to_lsp_position(),
- )),
- range_length: None,
- text: new_text,
- }
- })
- .collect();
- let changes = lsp::DidChangeTextDocumentParams {
- text_document: lsp::VersionedTextDocumentIdentifier::new(
- uri,
- snapshot.version as i32,
- ),
- content_changes,
- };
- server
- .notify::<lsp::notification::DidChangeTextDocument>(changes)
- .await?;
-
- prev_version = snapshot.buffer_snapshot.version().clone();
- }
-
- Ok::<_, anyhow::Error>(())
- });
-
- maintain_changes.log_err().await
- }),
- })
- } else {
- None
- };
- }
-
pub fn did_save(
&mut self,
version: clock::Global,
@@ -702,26 +490,6 @@ impl Buffer {
self.file = Some(new_file);
self.file_update_count += 1;
}
- if let Some((state, local_file)) = &self
- .language_server
- .as_ref()
- .zip(self.file.as_ref().and_then(|f| f.as_local()))
- {
- cx.background()
- .spawn(
- state
- .server
- .notify::<lsp::notification::DidSaveTextDocument>(
- lsp::DidSaveTextDocumentParams {
- text_document: lsp::TextDocumentIdentifier {
- uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
- },
- text: None,
- },
- ),
- )
- .detach()
- }
cx.emit(Event::Saved);
cx.notify();
}
@@ -815,10 +583,6 @@ impl Buffer {
self.language.as_ref()
}
- pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
- self.language_server.as_ref().map(|state| &state.server)
- }
-
pub fn parse_count(&self) -> usize {
self.parse_count
}
@@ -930,100 +694,14 @@ impl Buffer {
cx.notify();
}
- pub fn update_diagnostics<T>(
- &mut self,
- mut diagnostics: Vec<DiagnosticEntry<T>>,
- version: Option<i32>,
- cx: &mut ModelContext<Self>,
- ) -> Result<()>
- where
- T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
- {
- fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
- Ordering::Equal
- .then_with(|| b.is_primary.cmp(&a.is_primary))
- .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
- .then_with(|| a.severity.cmp(&b.severity))
- .then_with(|| a.message.cmp(&b.message))
- }
-
- let version = version.map(|version| version as usize);
- let content =
- if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
- language_server.snapshot_for_version(version)?
- } else {
- self.deref()
- };
-
- diagnostics.sort_unstable_by(|a, b| {
- Ordering::Equal
- .then_with(|| a.range.start.cmp(&b.range.start))
- .then_with(|| b.range.end.cmp(&a.range.end))
- .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
- });
-
- let mut sanitized_diagnostics = Vec::new();
- let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
- let mut last_edit_old_end = T::default();
- let mut last_edit_new_end = T::default();
- 'outer: for entry in diagnostics {
- let mut start = entry.range.start;
- let mut end = entry.range.end;
-
- // Some diagnostics are based on files on disk instead of buffers'
- // current contents. Adjust these diagnostics' ranges to reflect
- // any unsaved edits.
- if entry.diagnostic.is_disk_based {
- while let Some(edit) = edits_since_save.peek() {
- if edit.old.end <= start {
- last_edit_old_end = edit.old.end;
- last_edit_new_end = edit.new.end;
- edits_since_save.next();
- } else if edit.old.start <= end && edit.old.end >= start {
- continue 'outer;
- } else {
- break;
- }
- }
-
- let start_overshoot = start - last_edit_old_end;
- start = last_edit_new_end;
- start.add_assign(&start_overshoot);
-
- let end_overshoot = end - last_edit_old_end;
- end = last_edit_new_end;
- end.add_assign(&end_overshoot);
- }
-
- let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
- let mut range = range.start.to_point(content)..range.end.to_point(content);
- // Expand empty ranges by one character
- if range.start == range.end {
- range.end.column += 1;
- range.end = content.clip_point(range.end, Bias::Right);
- if range.start == range.end && range.end.column > 0 {
- range.start.column -= 1;
- range.start = content.clip_point(range.start, Bias::Left);
- }
- }
-
- sanitized_diagnostics.push(DiagnosticEntry {
- range,
- diagnostic: entry.diagnostic,
- });
- }
- drop(edits_since_save);
-
- let set = DiagnosticSet::new(sanitized_diagnostics, content);
+ pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
let lamport_timestamp = self.text.lamport_clock.tick();
- self.apply_diagnostic_update(set.clone(), lamport_timestamp, cx);
-
let op = Operation::UpdateDiagnostics {
- diagnostics: set.iter().cloned().collect(),
+ diagnostics: diagnostics.iter().cloned().collect(),
lamport_timestamp,
};
+ self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx);
self.send_operation(op, cx);
- Ok(())
}
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
@@ -1336,30 +1014,6 @@ impl Buffer {
self.set_active_selections(Arc::from([]), cx);
}
- fn update_language_server(&mut self, cx: &AppContext) {
- let language_server = if let Some(language_server) = self.language_server.as_mut() {
- language_server
- } else {
- return;
- };
- let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
- file
- } else {
- return;
- };
-
- let version = post_inc(&mut language_server.next_version);
- let snapshot = LanguageServerSnapshot {
- buffer_snapshot: self.text.snapshot(),
- version,
- path: Arc::from(file.abs_path(cx)),
- };
- language_server
- .pending_snapshots
- .insert(version, snapshot.clone());
- let _ = language_server.latest_snapshot.blocking_send(snapshot);
- }
-
pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
where
T: Into<String>,
@@ -1486,115 +1140,6 @@ impl Buffer {
Some(edit_id)
}
- pub fn edits_from_lsp(
- &mut self,
- lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
- version: Option<i32>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
- let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
- state
- .snapshot_for_version(version as usize)
- .map(Clone::clone)
- } else {
- Ok(TextBuffer::deref(self).clone())
- };
-
- cx.background().spawn(async move {
- let snapshot = snapshot?;
- let mut lsp_edits = lsp_edits
- .into_iter()
- .map(|edit| (range_from_lsp(edit.range), edit.new_text))
- .peekable();
-
- let mut edits = Vec::new();
- while let Some((mut range, mut new_text)) = lsp_edits.next() {
- // Combine any LSP edits that are adjacent.
- //
- // Also, combine LSP edits that are separated from each other by only
- // a newline. This is important because for some code actions,
- // Rust-analyzer rewrites the entire buffer via a series of edits that
- // are separated by unchanged newline characters.
- //
- // In order for the diffing logic below to work properly, any edits that
- // cancel each other out must be combined into one.
- while let Some((next_range, next_text)) = lsp_edits.peek() {
- if next_range.start > range.end {
- if next_range.start.row > range.end.row + 1
- || next_range.start.column > 0
- || snapshot.clip_point_utf16(
- PointUtf16::new(range.end.row, u32::MAX),
- Bias::Left,
- ) > range.end
- {
- break;
- }
- new_text.push('\n');
- }
- range.end = next_range.end;
- new_text.push_str(&next_text);
- lsp_edits.next();
- }
-
- if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
- || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
- {
- return Err(anyhow!("invalid edits received from language server"));
- }
-
- // For multiline edits, perform a diff of the old and new text so that
- // we can identify the changes more precisely, preserving the locations
- // of any anchors positioned in the unchanged regions.
- if range.end.row > range.start.row {
- let mut offset = range.start.to_offset(&snapshot);
- let old_text = snapshot.text_for_range(range).collect::<String>();
-
- let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
- let mut moved_since_edit = true;
- for change in diff.iter_all_changes() {
- let tag = change.tag();
- let value = change.value();
- match tag {
- ChangeTag::Equal => {
- offset += value.len();
- moved_since_edit = true;
- }
- ChangeTag::Delete => {
- let start = snapshot.anchor_after(offset);
- let end = snapshot.anchor_before(offset + value.len());
- if moved_since_edit {
- edits.push((start..end, String::new()));
- } else {
- edits.last_mut().unwrap().0.end = end;
- }
- offset += value.len();
- moved_since_edit = false;
- }
- ChangeTag::Insert => {
- if moved_since_edit {
- let anchor = snapshot.anchor_after(offset);
- edits.push((anchor.clone()..anchor, value.to_string()));
- } else {
- edits.last_mut().unwrap().1.push_str(value);
- }
- moved_since_edit = false;
- }
- }
- }
- } else if range.end == range.start {
- let anchor = snapshot.anchor_after(range.start);
- edits.push((anchor.clone()..anchor, new_text));
- } else {
- let edit_start = snapshot.anchor_after(range.start);
- let edit_end = snapshot.anchor_before(range.end);
- edits.push((edit_start..edit_end, new_text));
- }
- }
-
- Ok(edits)
- })
- }
-
fn did_edit(
&mut self,
old_version: &clock::Global,
@@ -1606,7 +1151,6 @@ impl Buffer {
}
self.reparse(cx);
- self.update_language_server(cx);
cx.emit(Event::Edited);
if !was_dirty {
@@ -1745,16 +1289,8 @@ impl Buffer {
}
}
- #[cfg(not(test))]
- pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
- if let Some(file) = &self.file {
- file.buffer_updated(self.remote_id(), operation, cx.as_mut());
- }
- }
-
- #[cfg(test)]
- pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
- self.operations.push(operation);
+ fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
+ cx.emit(Event::Operation(operation));
}
pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
@@ -1826,6 +1362,19 @@ impl Buffer {
redone
}
+ pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
+ self.completion_triggers = triggers.clone();
+ let lamport_timestamp = self.text.lamport_clock.tick();
+ self.send_operation(
+ Operation::UpdateCompletionTriggers {
+ triggers,
+ lamport_timestamp,
+ },
+ cx,
+ );
+ cx.notify();
+ }
+
pub fn completion_triggers(&self) -> &[String] {
&self.completion_triggers
}
@@ -1882,24 +1431,6 @@ impl Buffer {
impl Entity for Buffer {
type Event = Event;
-
- fn release(&mut self, cx: &mut gpui::MutableAppContext) {
- if let Some(file) = self.file.as_ref() {
- file.buffer_removed(self.remote_id(), cx);
- if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) {
- let request = lang_server
- .server
- .notify::<lsp::notification::DidCloseTextDocument>(
- lsp::DidCloseTextDocumentParams {
- text_document: lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(file.abs_path(cx)).unwrap(),
- ),
- },
- );
- cx.foreground().spawn(request).detach_and_log_err(cx);
- }
- }
- }
}
impl Deref for Buffer {
@@ -2632,20 +2163,6 @@ impl operation_queue::Operation for Operation {
}
}
-impl LanguageServerState {
- fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> {
- const OLD_VERSIONS_TO_RETAIN: usize = 10;
-
- self.pending_snapshots
- .retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version);
- let snapshot = self
- .pending_snapshots
- .get(&version)
- .ok_or_else(|| anyhow!("missing snapshot"))?;
- Ok(&snapshot.buffer_snapshot)
- }
-}
-
impl Default for Diagnostic {
fn default() -> Self {
Self {
@@ -6,7 +6,7 @@ use std::{
ops::Range,
};
use sum_tree::{self, Bias, SumTree};
-use text::{Anchor, FromAnchor, Point, ToOffset};
+use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
#[derive(Clone, Debug)]
pub struct DiagnosticSet {
@@ -46,7 +46,7 @@ impl DiagnosticSet {
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
- I: IntoIterator<Item = DiagnosticEntry<Point>>,
+ I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
{
let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
@@ -245,31 +245,46 @@ impl LanguageRegistry {
root_path: Arc<Path>,
http_client: Arc<dyn HttpClient>,
cx: &mut MutableAppContext,
- ) -> Option<Task<Result<Arc<lsp::LanguageServer>>>> {
+ ) -> Option<Task<Result<lsp::LanguageServer>>> {
#[cfg(any(test, feature = "test-support"))]
- if let Some(config) = &language.config.language_server {
- if let Some(fake_config) = &config.fake_config {
- let (server, mut fake_server) = lsp::LanguageServer::fake_with_capabilities(
- fake_config.capabilities.clone(),
- cx,
- );
-
- if let Some(initalizer) = &fake_config.initializer {
- initalizer(&mut fake_server);
+ if language
+ .config
+ .language_server
+ .as_ref()
+ .and_then(|config| config.fake_config.as_ref())
+ .is_some()
+ {
+ let language = language.clone();
+ return Some(cx.spawn(|mut cx| async move {
+ let fake_config = language
+ .config
+ .language_server
+ .as_ref()
+ .unwrap()
+ .fake_config
+ .as_ref()
+ .unwrap();
+ let (server, mut fake_server) = cx.update(|cx| {
+ lsp::LanguageServer::fake_with_capabilities(
+ fake_config.capabilities.clone(),
+ cx,
+ )
+ });
+ if let Some(initializer) = &fake_config.initializer {
+ initializer(&mut fake_server);
}
let servers_tx = fake_config.servers_tx.clone();
- let initialized = server.capabilities();
cx.background()
.spawn(async move {
- if initialized.await.is_some() {
- servers_tx.unbounded_send(fake_server).ok();
- }
+ fake_server
+ .receive_notification::<lsp::notification::Initialized>()
+ .await;
+ servers_tx.unbounded_send(fake_server).ok();
})
.detach();
-
- return Some(Task::ready(Ok(server.clone())));
- }
+ Ok(server)
+ }));
}
let download_dir = self
@@ -304,14 +319,13 @@ impl LanguageRegistry {
let server_binary_path = server_binary_path.await?;
let server_args = adapter.server_args();
- let server = lsp::LanguageServer::new(
+ lsp::LanguageServer::new(
&server_binary_path,
server_args,
- adapter.initialization_options(),
&root_path,
+ adapter.initialization_options(),
background,
- )?;
- Ok(server)
+ )
}))
}
@@ -6,7 +6,6 @@ use rand::prelude::*;
use std::{
cell::RefCell,
env,
- iter::FromIterator,
ops::Range,
rc::Rc,
time::{Duration, Instant},
@@ -76,43 +75,48 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) {
let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
- let buffer_ops = buffer1.update(cx, |buffer, cx| {
- let buffer_1_events = buffer_1_events.clone();
- cx.subscribe(&buffer1, move |_, _, event, _| {
- buffer_1_events.borrow_mut().push(event.clone())
- })
- .detach();
- let buffer_2_events = buffer_2_events.clone();
- cx.subscribe(&buffer2, move |_, _, event, _| {
- buffer_2_events.borrow_mut().push(event.clone())
- })
- .detach();
-
- // An edit emits an edited event, followed by a dirtied event,
- // since the buffer was previously in a clean state.
- buffer.edit(Some(2..4), "XYZ", cx);
+ let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
+ buffer1.update(cx, {
+ let buffer1_ops = buffer1_ops.clone();
+ |buffer, cx| {
+ let buffer_1_events = buffer_1_events.clone();
+ cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() {
+ Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
+ event @ _ => buffer_1_events.borrow_mut().push(event),
+ })
+ .detach();
+ let buffer_2_events = buffer_2_events.clone();
+ cx.subscribe(&buffer2, move |_, _, event, _| {
+ buffer_2_events.borrow_mut().push(event.clone())
+ })
+ .detach();
- // An empty transaction does not emit any events.
- buffer.start_transaction();
- buffer.end_transaction(cx);
+ // An edit emits an edited event, followed by a dirtied event,
+ // since the buffer was previously in a clean state.
+ buffer.edit(Some(2..4), "XYZ", cx);
- // A transaction containing two edits emits one edited event.
- now += Duration::from_secs(1);
- buffer.start_transaction_at(now);
- buffer.edit(Some(5..5), "u", cx);
- buffer.edit(Some(6..6), "w", cx);
- buffer.end_transaction_at(now, cx);
+ // An empty transaction does not emit any events.
+ buffer.start_transaction();
+ buffer.end_transaction(cx);
- // Undoing a transaction emits one edited event.
- buffer.undo(cx);
+ // A transaction containing two edits emits one edited event.
+ now += Duration::from_secs(1);
+ buffer.start_transaction_at(now);
+ buffer.edit(Some(5..5), "u", cx);
+ buffer.edit(Some(6..6), "w", cx);
+ buffer.end_transaction_at(now, cx);
- buffer.operations.clone()
+ // Undoing a transaction emits one edited event.
+ buffer.undo(cx);
+ }
});
// Incorporating a set of remote ops emits a single edited event,
// followed by a dirtied event.
buffer2.update(cx, |buffer, cx| {
- buffer.apply_ops(buffer_ops, cx).unwrap();
+ buffer
+ .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
+ .unwrap();
});
let buffer_1_events = buffer_1_events.borrow();
@@ -553,584 +557,6 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
});
}
-#[gpui::test]
-async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
- let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
- let mut rust_lang = rust_lang();
- rust_lang.config.language_server = Some(LanguageServerConfig {
- disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
- ..Default::default()
- });
-
- let text = "
- fn a() { A }
- fn b() { BB }
- fn c() { CCC }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang), cx)
- .with_language_server(language_server, cx)
- });
-
- let open_notification = fake
- .receive_notification::<lsp::notification::DidOpenTextDocument>()
- .await;
-
- // Edit the buffer, moving the content down
- buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
- let change_notification_1 = fake
- .receive_notification::<lsp::notification::DidChangeTextDocument>()
- .await;
- assert!(change_notification_1.text_document.version > open_notification.text_document.version);
-
- buffer.update(cx, |buffer, cx| {
- // Receive diagnostics for an earlier version of the buffer.
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- is_disk_based: true,
- message: "undefined variable 'CCC'".to_string(),
- group_id: 2,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(open_notification.text_document.version),
- cx,
- )
- .unwrap();
-
- // The diagnostics have moved down since they were created.
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(3, 9)..Point::new(3, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: Point::new(4, 9)..Point::new(4, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'CCC'".to_string(),
- is_disk_based: true,
- group_id: 2,
- is_primary: true,
- ..Default::default()
- }
- }
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, 0..buffer.len()),
- [
- ("\n\nfn a() { ".to_string(), None),
- ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn b() { ".to_string(), None),
- ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn c() { ".to_string(), None),
- ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\n".to_string(), None),
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
- [
- ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn c() { ".to_string(), None),
- ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
- ]
- );
-
- // Ensure overlapping diagnostics are highlighted correctly.
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::WARNING,
- message: "unreachable statement".to_string(),
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(open_notification.text_document.version),
- cx,
- )
- .unwrap();
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(2, 9)..Point::new(2, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::WARNING,
- message: "unreachable statement".to_string(),
- group_id: 1,
- is_primary: true,
- ..Default::default()
- }
- },
- DiagnosticEntry {
- range: Point::new(2, 9)..Point::new(2, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- }
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
- [
- ("fn a() { ".to_string(), None),
- ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
- ("\n".to_string(), None),
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
- [
- (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
- ("\n".to_string(), None),
- ]
- );
- });
-
- // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
- // changes since the last save.
- buffer.update(cx, |buffer, cx| {
- buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
- buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
- });
- let change_notification_2 = fake
- .receive_notification::<lsp::notification::DidChangeTextDocument>()
- .await;
- assert!(
- change_notification_2.text_document.version > change_notification_1.text_document.version
- );
-
- buffer.update(cx, |buffer, cx| {
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(change_notification_2.text_document.version),
- cx,
- )
- .unwrap();
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(0..buffer.len())
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(2, 21)..Point::new(2, 22),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- }
- },
- DiagnosticEntry {
- range: Point::new(3, 9)..Point::new(3, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- }
- ]
- );
- });
-}
-
-#[gpui::test]
-async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
- let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
-
- // Simulate the language server failing to start up.
- drop(fake);
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang()), cx)
- .with_language_server(language_server, cx)
- });
-
- // Run the buffer's task that retrieves the server's capabilities.
- cx.foreground().advance_clock(Duration::from_millis(1));
-
- buffer.read_with(cx, |buffer, _| {
- assert!(buffer.language_server().is_none());
- });
-}
-
-#[gpui::test]
-async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
- let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
-
- let text = "
- fn a() {
- f1();
- }
- fn b() {
- f2();
- }
- fn c() {
- f3();
- }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang()), cx)
- .with_language_server(language_server, cx)
- });
-
- let lsp_document_version = fake
- .receive_notification::<lsp::notification::DidOpenTextDocument>()
- .await
- .text_document
- .version;
-
- // Simulate editing the buffer after the language server computes some edits.
- buffer.update(cx, |buffer, cx| {
- buffer.edit(
- [Point::new(0, 0)..Point::new(0, 0)],
- "// above first function\n",
- cx,
- );
- buffer.edit(
- [Point::new(2, 0)..Point::new(2, 0)],
- " // inside first function\n",
- cx,
- );
- buffer.edit(
- [Point::new(6, 4)..Point::new(6, 4)],
- "// inside second function ",
- cx,
- );
-
- assert_eq!(
- buffer.text(),
- "
- // above first function
- fn a() {
- // inside first function
- f1();
- }
- fn b() {
- // inside second function f2();
- }
- fn c() {
- f3();
- }
- "
- .unindent()
- );
- });
-
- let edits = buffer
- .update(cx, |buffer, cx| {
- buffer.edits_from_lsp(
- vec![
- // replace body of first function
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
- new_text: "
- fn a() {
- f10();
- }
- "
- .unindent(),
- },
- // edit inside second function
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
- new_text: "00".into(),
- },
- // edit inside third function via two distinct edits
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
- new_text: "4000".into(),
- },
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
- new_text: "".into(),
- },
- ],
- Some(lsp_document_version),
- cx,
- )
- })
- .await
- .unwrap();
-
- buffer.update(cx, |buffer, cx| {
- for (range, new_text) in edits {
- buffer.edit([range], new_text, cx);
- }
- assert_eq!(
- buffer.text(),
- "
- // above first function
- fn a() {
- // inside first function
- f10();
- }
- fn b() {
- // inside second function f200();
- }
- fn c() {
- f4000();
- }
- "
- .unindent()
- );
- });
-}
-
-#[gpui::test]
-async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
- let text = "
- use a::b;
- use a::c;
-
- fn f() {
- b();
- c();
- }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
-
- // Simulate the language server sending us a small edit in the form of a very large diff.
- // Rust-analyzer does this when performing a merge-imports code action.
- let edits = buffer
- .update(cx, |buffer, cx| {
- buffer.edits_from_lsp(
- [
- // Replace the first use statement without editing the semicolon.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
- new_text: "a::{b, c}".into(),
- },
- // Reinsert the remainder of the file between the semicolon and the final
- // newline of the file.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
- new_text: "\n\n".into(),
- },
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
- new_text: "
- fn f() {
- b();
- c();
- }"
- .unindent(),
- },
- // Delete everything after the first newline of the file.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
- new_text: "".into(),
- },
- ],
- None,
- cx,
- )
- })
- .await
- .unwrap();
-
- buffer.update(cx, |buffer, cx| {
- let edits = edits
- .into_iter()
- .map(|(range, text)| {
- (
- range.start.to_point(&buffer)..range.end.to_point(&buffer),
- text,
- )
- })
- .collect::<Vec<_>>();
-
- assert_eq!(
- edits,
- [
- (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
- (Point::new(1, 0)..Point::new(2, 0), "".into())
- ]
- );
-
- for (range, new_text) in edits {
- buffer.edit([range], new_text, cx);
- }
- assert_eq!(
- buffer.text(),
- "
- use a::{b, c};
-
- fn f() {
- b();
- c();
- }
- "
- .unindent()
- );
- });
-}
-
-#[gpui::test]
-async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
- cx.add_model(|cx| {
- let text = concat!(
- "let one = ;\n", //
- "let two = \n",
- "let three = 3;\n",
- );
-
- let mut buffer = Buffer::new(0, text, cx);
- buffer.set_language(Some(Arc::new(rust_lang())), cx);
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "syntax error 1".to_string(),
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "syntax error 2".to_string(),
- ..Default::default()
- },
- },
- ],
- None,
- cx,
- )
- .unwrap();
-
- // An empty range is extended forward to include the following character.
- // At the end of a line, an empty range is extended backward to include
- // the preceding character.
- let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
- assert_eq!(
- chunks
- .iter()
- .map(|(s, d)| (s.as_str(), *d))
- .collect::<Vec<_>>(),
- &[
- ("let one = ", None),
- (";", Some(DiagnosticSeverity::ERROR)),
- ("\nlet two =", None),
- (" ", Some(DiagnosticSeverity::ERROR)),
- ("\nlet three = 3;\n", None)
- ]
- );
- buffer
- });
-}
-
#[gpui::test]
fn test_serialization(cx: &mut gpui::MutableAppContext) {
let mut now = Instant::now();
@@ -1177,17 +603,26 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
.collect::<String>();
let mut replica_ids = Vec::new();
let mut buffers = Vec::new();
- let mut network = Network::new(rng.clone());
+ let network = Rc::new(RefCell::new(Network::new(rng.clone())));
for i in 0..rng.gen_range(min_peers..=max_peers) {
let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ let network = network.clone();
+ cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
+ if let Event::Operation(op) = event {
+ network
+ .borrow_mut()
+ .broadcast(buffer.replica_id(), vec![proto::serialize_operation(&op)]);
+ }
+ })
+ .detach();
buffer
});
buffers.push(buffer);
replica_ids.push(i as ReplicaId);
- network.add_peer(i as ReplicaId);
+ network.borrow_mut().add_peer(i as ReplicaId);
log::info!("Adding initial peer with replica id {}", i);
}
@@ -1239,9 +674,10 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
40..=49 if mutation_count != 0 && replica_id == 0 => {
let entry_count = rng.gen_range(1..=5);
buffer.update(cx, |buffer, cx| {
- let diagnostics = (0..entry_count)
- .map(|_| {
+ let diagnostics = DiagnosticSet::new(
+ (0..entry_count).map(|_| {
let range = buffer.random_byte_range(0, &mut rng);
+ let range = range.to_point_utf16(buffer);
DiagnosticEntry {
range,
diagnostic: Diagnostic {
@@ -1249,10 +685,11 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
..Default::default()
},
}
- })
- .collect();
+ }),
+ buffer,
+ );
log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
- buffer.update_diagnostics(diagnostics, None, cx).unwrap();
+ buffer.update_diagnostics(diagnostics, cx);
});
mutation_count -= 1;
}
@@ -1268,10 +705,20 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
let mut new_buffer =
Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ let network = network.clone();
+ cx.subscribe(&cx.handle(), move |buffer, _, event, _| {
+ if let Event::Operation(op) = event {
+ network.borrow_mut().broadcast(
+ buffer.replica_id(),
+ vec![proto::serialize_operation(&op)],
+ );
+ }
+ })
+ .detach();
new_buffer
}));
replica_ids.push(new_replica_id);
- network.replicate(replica_id, new_replica_id);
+ network.borrow_mut().replicate(replica_id, new_replica_id);
}
60..=69 if mutation_count != 0 => {
buffer.update(cx, |buffer, cx| {
@@ -1280,8 +727,9 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
});
mutation_count -= 1;
}
- _ if network.has_unreceived(replica_id) => {
+ _ if network.borrow().has_unreceived(replica_id) => {
let ops = network
+ .borrow_mut()
.receive(replica_id)
.into_iter()
.map(|op| proto::deserialize_operation(op).unwrap());
@@ -1297,14 +745,6 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
_ => {}
}
- buffer.update(cx, |buffer, _| {
- let ops = buffer
- .operations
- .drain(..)
- .map(|op| proto::serialize_operation(&op))
- .collect();
- network.broadcast(buffer.replica_id(), ops);
- });
now += Duration::from_millis(rng.gen_range(0..=200));
buffers.extend(new_buffer);
@@ -1312,7 +752,7 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
buffer.read(cx).check_invariants();
}
- if mutation_count == 0 && network.is_idle() {
+ if mutation_count == 0 && network.borrow().is_idle() {
break;
}
}
@@ -1353,24 +793,6 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
}
}
-fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
- buffer: &Buffer,
- range: Range<T>,
-) -> Vec<(String, Option<DiagnosticSeverity>)> {
- let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
- for chunk in buffer.snapshot().chunks(range, true) {
- if chunks
- .last()
- .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
- {
- chunks.last_mut().unwrap().0.push_str(chunk.text);
- } else {
- chunks.push((chunk.text.to_string(), chunk.diagnostic));
- }
- }
- chunks
-}
-
#[test]
fn test_contiguous_ranges() {
assert_eq!(
@@ -3,7 +3,7 @@ use collections::HashMap;
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite};
use gpui::{executor, Task};
use parking_lot::{Mutex, RwLock};
-use postage::{barrier, prelude::Stream, watch};
+use postage::{barrier, prelude::Stream};
use serde::{Deserialize, Serialize};
use serde_json::{json, value::RawValue, Value};
use smol::{
@@ -14,6 +14,7 @@ use smol::{
use std::{
future::Future,
io::Write,
+ path::PathBuf,
str::FromStr,
sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
@@ -34,13 +35,14 @@ type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
pub struct LanguageServer {
next_id: AtomicUsize,
outbound_tx: channel::Sender<Vec<u8>>,
- capabilities: watch::Receiver<Option<ServerCapabilities>>,
+ capabilities: ServerCapabilities,
notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
executor: Arc<executor::Background>,
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
- initialized: barrier::Receiver,
output_done_rx: Mutex<Option<barrier::Receiver>>,
+ root_path: PathBuf,
+ options: Option<Value>,
}
pub struct Subscription {
@@ -103,10 +105,10 @@ impl LanguageServer {
pub fn new(
binary_path: &Path,
args: &[&str],
- options: Option<Value>,
root_path: &Path,
+ options: Option<Value>,
background: Arc<executor::Background>,
- ) -> Result<Arc<Self>> {
+ ) -> Result<Self> {
let mut server = Command::new(binary_path)
.current_dir(root_path)
.args(args)
@@ -116,7 +118,9 @@ impl LanguageServer {
.spawn()?;
let stdin = server.stdin.take().unwrap();
let stdout = server.stdout.take().unwrap();
- Self::new_internal(stdin, stdout, root_path, options, background)
+ Ok(Self::new_internal(
+ stdin, stdout, root_path, options, background,
+ ))
}
fn new_internal<Stdin, Stdout>(
@@ -125,7 +129,7 @@ impl LanguageServer {
root_path: &Path,
options: Option<Value>,
executor: Arc<executor::Background>,
- ) -> Result<Arc<Self>>
+ ) -> Self
where
Stdin: AsyncWrite + Unpin + Send + 'static,
Stdout: AsyncRead + Unpin + Send + 'static,
@@ -215,42 +219,24 @@ impl LanguageServer {
.log_err()
});
- let (initialized_tx, initialized_rx) = barrier::channel();
- let (mut capabilities_tx, capabilities_rx) = watch::channel();
- let this = Arc::new(Self {
+ Self {
notification_handlers,
response_handlers,
- capabilities: capabilities_rx,
+ capabilities: Default::default(),
next_id: Default::default(),
outbound_tx,
executor: executor.clone(),
io_tasks: Mutex::new(Some((input_task, output_task))),
- initialized: initialized_rx,
output_done_rx: Mutex::new(Some(output_done_rx)),
- });
-
- let root_uri = Url::from_file_path(root_path).map_err(|_| anyhow!("invalid root path"))?;
- executor
- .spawn({
- let this = this.clone();
- async move {
- if let Some(capabilities) = this.init(root_uri, options).log_err().await {
- *capabilities_tx.borrow_mut() = Some(capabilities);
- }
-
- drop(initialized_tx);
- }
- })
- .detach();
-
- Ok(this)
+ root_path: root_path.to_path_buf(),
+ options,
+ }
}
- async fn init(
- self: Arc<Self>,
- root_uri: Url,
- options: Option<Value>,
- ) -> Result<ServerCapabilities> {
+ pub async fn initialize(mut self) -> Result<Arc<Self>> {
+ let options = self.options.take();
+ let mut this = Arc::new(self);
+ let root_uri = Url::from_file_path(&this.root_path).unwrap();
#[allow(deprecated)]
let params = InitializeParams {
process_id: Default::default(),
@@ -305,19 +291,10 @@ impl LanguageServer {
locale: Default::default(),
};
- let this = self.clone();
- let request = Self::request_internal::<request::Initialize>(
- &this.next_id,
- &this.response_handlers,
- &this.outbound_tx,
- params,
- );
- let response = request.await?;
- Self::notify_internal::<notification::Initialized>(
- &this.outbound_tx,
- InitializedParams {},
- )?;
- Ok(response.capabilities)
+ let response = this.request::<request::Initialize>(params).await?;
+ Arc::get_mut(&mut this).unwrap().capabilities = response.capabilities;
+ this.notify::<notification::Initialized>(InitializedParams {})?;
+ Ok(this)
}
pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Option<()>>> {
@@ -352,7 +329,7 @@ impl LanguageServer {
}
}
- pub fn on_notification<T, F>(&self, mut f: F) -> Subscription
+ pub fn on_notification<T, F>(&mut self, mut f: F) -> Subscription
where
T: notification::Notification,
F: 'static + Send + Sync + FnMut(T::Params),
@@ -378,16 +355,8 @@ impl LanguageServer {
}
}
- pub fn capabilities(&self) -> impl 'static + Future<Output = Option<ServerCapabilities>> {
- let mut rx = self.capabilities.clone();
- async move {
- loop {
- let value = rx.recv().await?;
- if value.is_some() {
- return value;
- }
- }
- }
+ pub fn capabilities<'a>(self: &'a Arc<Self>) -> &'a ServerCapabilities {
+ &self.capabilities
}
pub fn request<T: request::Request>(
@@ -397,17 +366,12 @@ impl LanguageServer {
where
T::Result: 'static + Send,
{
- let this = self.clone();
- async move {
- this.initialized.clone().recv().await;
- Self::request_internal::<T>(
- &this.next_id,
- &this.response_handlers,
- &this.outbound_tx,
- params,
- )
- .await
- }
+ Self::request_internal::<T>(
+ &self.next_id,
+ &self.response_handlers,
+ &self.outbound_tx,
+ params,
+ )
}
fn request_internal<T: request::Request>(
@@ -452,16 +416,8 @@ impl LanguageServer {
}
}
- pub fn notify<T: notification::Notification>(
- self: &Arc<Self>,
- params: T::Params,
- ) -> impl Future<Output = Result<()>> {
- let this = self.clone();
- async move {
- this.initialized.clone().recv().await;
- Self::notify_internal::<T>(&this.outbound_tx, params)?;
- Ok(())
- }
+ pub fn notify<T: notification::Notification>(&self, params: T::Params) -> Result<()> {
+ Self::notify_internal::<T>(&self.outbound_tx, params)
}
fn notify_internal<T: notification::Notification>(
@@ -530,14 +486,14 @@ impl LanguageServer {
}
}
- pub fn fake(cx: &mut gpui::MutableAppContext) -> (Arc<Self>, FakeLanguageServer) {
+ pub fn fake(cx: &mut gpui::MutableAppContext) -> (Self, FakeLanguageServer) {
Self::fake_with_capabilities(Self::full_capabilities(), cx)
}
pub fn fake_with_capabilities(
capabilities: ServerCapabilities,
cx: &mut gpui::MutableAppContext,
- ) -> (Arc<Self>, FakeLanguageServer) {
+ ) -> (Self, FakeLanguageServer) {
let (stdin_writer, stdin_reader) = async_pipe::pipe();
let (stdout_writer, stdout_reader) = async_pipe::pipe();
@@ -550,15 +506,9 @@ impl LanguageServer {
}
});
- let server = Self::new_internal(
- stdin_writer,
- stdout_reader,
- Path::new("/"),
- None,
- cx.background().clone(),
- )
- .unwrap();
-
+ let executor = cx.background().clone();
+ let server =
+ Self::new_internal(stdin_writer, stdout_reader, Path::new("/"), None, executor);
(server, fake)
}
}
@@ -584,6 +534,7 @@ impl FakeLanguageServer {
let mut stdin = smol::io::BufReader::new(stdin);
while Self::receive(&mut stdin, &mut buffer).await.is_ok() {
cx.background().simulate_random_delay().await;
+
if let Ok(request) = serde_json::from_slice::<AnyRequest>(&buffer) {
assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
@@ -639,7 +590,7 @@ impl FakeLanguageServer {
}
}
- pub async fn notify<T: notification::Notification>(&mut self, params: T::Params) {
+ pub fn notify<T: notification::Notification>(&mut self, params: T::Params) {
let message = serde_json::to_vec(&Notification {
jsonrpc: JSON_RPC_VERSION,
method: T::METHOD,
@@ -704,16 +655,14 @@ impl FakeLanguageServer {
self.notify::<notification::Progress>(ProgressParams {
token: NumberOrString::String(token.into()),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(Default::default())),
- })
- .await;
+ });
}
pub async fn end_progress(&mut self, token: impl Into<String>) {
self.notify::<notification::Progress>(ProgressParams {
token: NumberOrString::String(token.into()),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
- })
- .await;
+ });
}
async fn receive(
@@ -758,7 +707,7 @@ mod tests {
#[gpui::test]
async fn test_fake(cx: &mut TestAppContext) {
- let (server, mut fake) = cx.update(LanguageServer::fake);
+ let (mut server, mut fake) = cx.update(LanguageServer::fake);
let (message_tx, message_rx) = channel::unbounded();
let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
@@ -773,6 +722,7 @@ mod tests {
})
.detach();
+ let server = server.initialize().await.unwrap();
server
.notify::<notification::DidOpenTextDocument>(DidOpenTextDocumentParams {
text_document: TextDocumentItem::new(
@@ -782,7 +732,6 @@ mod tests {
"".to_string(),
),
})
- .await
.unwrap();
assert_eq!(
fake.receive_notification::<notification::DidOpenTextDocument>()
@@ -796,14 +745,12 @@ mod tests {
fake.notify::<notification::ShowMessage>(ShowMessageParams {
typ: MessageType::ERROR,
message: "ok".to_string(),
- })
- .await;
+ });
fake.notify::<notification::PublishDiagnostics>(PublishDiagnosticsParams {
uri: Url::from_str("file://b/c").unwrap(),
version: Some(5),
diagnostics: vec![],
- })
- .await;
+ });
assert_eq!(message_rx.recv().await.unwrap().message, "ok");
assert_eq!(
diagnostics_rx.recv().await.unwrap().uri.as_str(),
@@ -815,16 +762,4 @@ mod tests {
drop(server);
fake.receive_notification::<notification::Exit>().await;
}
-
- pub enum ServerStatusNotification {}
-
- impl notification::Notification for ServerStatusNotification {
- type Params = ServerStatusParams;
- const METHOD: &'static str = "experimental/serverStatus";
- }
-
- #[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
- pub struct ServerStatusParams {
- pub quiescent: bool,
- }
}
@@ -42,6 +42,7 @@ regex = "1.5"
serde = { version = "1", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] }
sha2 = "0.10"
+similar = "1.3"
smol = "1.2.5"
toml = "0.5"
@@ -223,21 +223,21 @@ impl LspCommand for PerformRename {
mut cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
if let Some(edit) = message {
- let (language_name, language_server) = buffer.read_with(&cx, |buffer, _| {
- let language = buffer
- .language()
- .ok_or_else(|| anyhow!("buffer's language was removed"))?;
- let language_server = buffer
- .language_server()
- .cloned()
- .ok_or_else(|| anyhow!("buffer's language server was removed"))?;
- Ok::<_, anyhow::Error>((language.name().to_string(), language_server))
- })?;
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project
+ .language_server_for_buffer(buffer.read(cx), cx)
+ .cloned()
+ })
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
Project::deserialize_workspace_edit(
project,
edit,
self.push_to_history,
- language_name,
+ language.name(),
language_server,
&mut cx,
)
@@ -343,14 +343,16 @@ impl LspCommand for GetDefinition {
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut definitions = Vec::new();
- let (language, language_server) = buffer
- .read_with(&cx, |buffer, _| {
- buffer
- .language()
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project
+ .language_server_for_buffer(buffer.read(cx), cx)
.cloned()
- .zip(buffer.language_server().cloned())
})
- .ok_or_else(|| anyhow!("buffer no longer has language server"))?;
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
if let Some(message) = message {
let mut unresolved_locations = Vec::new();
@@ -375,7 +377,7 @@ impl LspCommand for GetDefinition {
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
target_uri,
- language.name().to_string(),
+ language.name(),
language_server.clone(),
cx,
)
@@ -519,14 +521,16 @@ impl LspCommand for GetReferences {
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut references = Vec::new();
- let (language, language_server) = buffer
- .read_with(&cx, |buffer, _| {
- buffer
- .language()
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project
+ .language_server_for_buffer(buffer.read(cx), cx)
.cloned()
- .zip(buffer.language_server().cloned())
})
- .ok_or_else(|| anyhow!("buffer no longer has language server"))?;
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
if let Some(locations) = locations {
for lsp_location in locations {
@@ -534,7 +538,7 @@ impl LspCommand for GetReferences {
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
lsp_location.uri,
- language.name().to_string(),
+ language.name(),
language_server.clone(),
cx,
)
@@ -16,9 +16,10 @@ use gpui::{
};
use language::{
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
- range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
- Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
- ToLspPosition, ToOffset, ToPointUtf16, Transaction,
+ range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
+ DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
+ LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
+ ToPointUtf16, Transaction,
};
use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
use lsp_command::*;
@@ -26,10 +27,11 @@ use postage::watch;
use rand::prelude::*;
use search::SearchQuery;
use sha2::{Digest, Sha256};
+use similar::{ChangeTag, TextDiff};
use smol::block_on;
use std::{
cell::RefCell,
- cmp,
+ cmp::{self, Ordering},
convert::TryInto,
hash::Hash,
mem,
@@ -48,9 +50,8 @@ pub struct Project {
worktrees: Vec<WorktreeHandle>,
active_entry: Option<ProjectEntry>,
languages: Arc<LanguageRegistry>,
- language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
- started_language_servers:
- HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
+ language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
+ started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
client: Arc<client::Client>,
user_store: ModelHandle<UserStore>,
fs: Arc<dyn Fs>,
@@ -67,6 +68,7 @@ pub struct Project {
loading_local_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
opened_buffers: HashMap<u64, OpenBuffer>,
+ buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
nonce: u128,
}
@@ -201,7 +203,6 @@ impl Project {
client.add_entity_message_handler(Self::handle_add_collaborator);
client.add_entity_message_handler(Self::handle_buffer_reloaded);
client.add_entity_message_handler(Self::handle_buffer_saved);
- client.add_entity_message_handler(Self::handle_close_buffer);
client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
client.add_entity_message_handler(Self::handle_remove_collaborator);
@@ -286,6 +287,7 @@ impl Project {
shared_buffers: Default::default(),
loading_buffers: Default::default(),
loading_local_worktrees: Default::default(),
+ buffer_snapshots: Default::default(),
client_state: ProjectClientState::Local {
is_shared: false,
remote_id_tx,
@@ -372,6 +374,7 @@ impl Project {
language_servers: Default::default(),
started_language_servers: Default::default(),
opened_buffers: Default::default(),
+ buffer_snapshots: Default::default(),
nonce: StdRng::from_entropy().gen(),
};
for worktree in worktrees {
@@ -723,7 +726,7 @@ impl Project {
let buffer = cx.add_model(|cx| {
Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
});
- self.register_buffer(&buffer, None, cx)?;
+ self.register_buffer(&buffer, cx)?;
Ok(buffer)
}
@@ -798,15 +801,9 @@ impl Project {
let worktree = worktree.as_local_mut().unwrap();
worktree.load_buffer(path, cx)
});
- let worktree = worktree.downgrade();
cx.spawn(|this, mut cx| async move {
let buffer = load_buffer.await?;
- let worktree = worktree
- .upgrade(&cx)
- .ok_or_else(|| anyhow!("worktree was removed"))?;
- this.update(&mut cx, |this, cx| {
- this.register_buffer(&buffer, Some(&worktree), cx)
- })?;
+ this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
Ok(buffer)
})
}
@@ -839,7 +836,7 @@ impl Project {
fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
- lang_name: String,
+ lang_name: Arc<str>,
lang_server: Arc<LanguageServer>,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
@@ -891,7 +888,8 @@ impl Project {
})
.await?;
this.update(&mut cx, |this, cx| {
- this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
+ this.assign_language_to_buffer(&buffer, cx);
+ this.register_buffer_with_language_server(&buffer, cx);
});
Ok(())
})
@@ -917,7 +915,6 @@ impl Project {
fn register_buffer(
&mut self,
buffer: &ModelHandle<Buffer>,
- worktree: Option<&ModelHandle<Worktree>>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
let remote_id = buffer.read(cx).remote_id();
@@ -945,74 +942,226 @@ impl Project {
remote_id
))?,
}
- self.assign_language_to_buffer(&buffer, worktree, cx);
+ cx.subscribe(buffer, |this, buffer, event, cx| {
+ this.on_buffer_event(buffer, event, cx);
+ })
+ .detach();
+
+ self.assign_language_to_buffer(buffer, cx);
+ self.register_buffer_with_language_server(buffer, cx);
+
Ok(())
}
- fn assign_language_to_buffer(
+ fn register_buffer_with_language_server(
&mut self,
- buffer: &ModelHandle<Buffer>,
- worktree: Option<&ModelHandle<Worktree>>,
+ buffer_handle: &ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
- ) -> Option<()> {
- let (path, full_path) = {
- let file = buffer.read(cx).file()?;
- (file.path().clone(), file.full_path(cx))
- };
+ ) {
+ let buffer = buffer_handle.read(cx);
+ let buffer_id = buffer.remote_id();
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if file.is_local() {
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ let initial_snapshot = buffer.text_snapshot();
+ let language_server = self.language_server_for_buffer(buffer, cx).cloned();
+
+ if let Some(local_worktree) = file.worktree.read(cx).as_local() {
+ if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
+ self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
+ .log_err();
+ }
+ }
- // If the buffer has a language, set it and start/assign the language server
- if let Some(language) = self.languages.select_language(&full_path) {
- buffer.update(cx, |buffer, cx| {
- buffer.set_language(Some(language.clone()), cx);
- });
+ if let Some(server) = language_server {
+ server
+ .notify::<lsp::notification::DidOpenTextDocument>(
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ uri,
+ Default::default(),
+ 0,
+ initial_snapshot.text(),
+ ),
+ }
+ .clone(),
+ )
+ .log_err();
+ buffer_handle.update(cx, |buffer, cx| {
+ buffer.set_completion_triggers(
+ server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|provider| provider.trigger_characters.clone())
+ .unwrap_or(Vec::new()),
+ cx,
+ )
+ });
+ self.buffer_snapshots
+ .insert(buffer_id, vec![(0, initial_snapshot)]);
+ }
- // For local worktrees, start a language server if needed.
- // Also assign the language server and any previously stored diagnostics to the buffer.
- if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
- let worktree_id = local_worktree.id();
- let worktree_abs_path = local_worktree.abs_path().clone();
- let buffer = buffer.downgrade();
- let language_server =
- self.start_language_server(worktree_id, worktree_abs_path, language, cx);
-
- cx.spawn_weak(|_, mut cx| async move {
- if let Some(language_server) = language_server.await {
- if let Some(buffer) = buffer.upgrade(&cx) {
- buffer.update(&mut cx, |buffer, cx| {
- buffer.set_language_server(Some(language_server), cx);
- });
+ cx.observe_release(buffer_handle, |this, buffer, cx| {
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if file.is_local() {
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ if let Some(server) = this.language_server_for_buffer(buffer, cx) {
+ server
+ .notify::<lsp::notification::DidCloseTextDocument>(
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(
+ uri.clone(),
+ ),
+ },
+ )
+ .log_err();
+ }
}
}
})
.detach();
}
}
+ }
- if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
- if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
- buffer.update(cx, |buffer, cx| {
- buffer.update_diagnostics(diagnostics, None, cx).log_err();
+ fn on_buffer_event(
+ &mut self,
+ buffer: ModelHandle<Buffer>,
+ event: &BufferEvent,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<()> {
+ match event {
+ BufferEvent::Operation(operation) => {
+ let project_id = self.remote_id()?;
+ let request = self.client.request(proto::UpdateBuffer {
+ project_id,
+ buffer_id: buffer.read(cx).remote_id(),
+ operations: vec![language::proto::serialize_operation(&operation)],
});
+ cx.background().spawn(request).detach_and_log_err(cx);
+ }
+ BufferEvent::Edited => {
+ let language_server = self
+ .language_server_for_buffer(buffer.read(cx), cx)?
+ .clone();
+ let buffer = buffer.read(cx);
+ let file = File::from_dyn(buffer.file())?;
+ let abs_path = file.as_local()?.abs_path(cx);
+ let uri = lsp::Url::from_file_path(abs_path).unwrap();
+ let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
+ let (version, prev_snapshot) = buffer_snapshots.last()?;
+ let next_snapshot = buffer.text_snapshot();
+ let next_version = version + 1;
+
+ let content_changes = buffer
+ .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
+ .map(|edit| {
+ let edit_start = edit.new.start.0;
+ let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
+ let new_text = next_snapshot
+ .text_for_range(edit.new.start.1..edit.new.end.1)
+ .collect();
+ lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(
+ edit_start.to_lsp_position(),
+ edit_end.to_lsp_position(),
+ )),
+ range_length: None,
+ text: new_text,
+ }
+ })
+ .collect();
+
+ buffer_snapshots.push((next_version, next_snapshot));
+
+ language_server
+ .notify::<lsp::notification::DidChangeTextDocument>(
+ lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(
+ uri,
+ next_version,
+ ),
+ content_changes,
+ },
+ )
+ .log_err();
+ }
+ BufferEvent::Saved => {
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ let worktree_id = file.worktree_id(cx);
+ let abs_path = file.as_local()?.abs_path(cx);
+ let text_document = lsp::TextDocumentIdentifier {
+ uri: lsp::Url::from_file_path(abs_path).unwrap(),
+ };
+
+ for (_, server) in self.language_servers_for_worktree(worktree_id) {
+ server
+ .notify::<lsp::notification::DidSaveTextDocument>(
+ lsp::DidSaveTextDocumentParams {
+ text_document: text_document.clone(),
+ text: None,
+ },
+ )
+ .log_err();
+ }
}
+ _ => {}
}
None
}
+ fn language_servers_for_worktree(
+ &self,
+ worktree_id: WorktreeId,
+ ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
+ self.language_servers.iter().filter_map(
+ move |((language_server_worktree_id, language_name), server)| {
+ if *language_server_worktree_id == worktree_id {
+ Some((language_name.as_ref(), server))
+ } else {
+ None
+ }
+ },
+ )
+ }
+
+ fn assign_language_to_buffer(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<()> {
+ // If the buffer has a language, set it and start the language server if we haven't already.
+ let full_path = buffer.read(cx).file()?.full_path(cx);
+ let language = self.languages.select_language(&full_path)?;
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(language.clone()), cx);
+ });
+
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ let worktree = file.worktree.read(cx).as_local()?;
+ let worktree_id = worktree.id();
+ let worktree_abs_path = worktree.abs_path().clone();
+ self.start_language_server(worktree_id, worktree_abs_path, language, cx);
+
+ None
+ }
+
fn start_language_server(
&mut self,
worktree_id: WorktreeId,
worktree_path: Arc<Path>,
language: Arc<Language>,
cx: &mut ModelContext<Self>,
- ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
+ ) {
enum LspEvent {
DiagnosticsStart,
DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
DiagnosticsFinish,
}
- let key = (worktree_id, language.name().to_string());
+ let key = (worktree_id, language.name());
self.started_language_servers
.entry(key.clone())
.or_insert_with(|| {
@@ -1024,12 +1173,8 @@ impl Project {
);
let rpc = self.client.clone();
cx.spawn_weak(|this, mut cx| async move {
- let language_server = language_server?.await.log_err()?;
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| {
- this.language_servers.insert(key, language_server.clone());
- });
- }
+ let mut language_server = language_server?.await.log_err()?;
+ let this = this.upgrade(&cx)?;
let disk_based_sources = language
.disk_based_diagnostic_sources()
@@ -1101,50 +1246,116 @@ impl Project {
.detach();
// Process all the LSP events.
- cx.spawn(|mut cx| async move {
- while let Ok(message) = diagnostics_rx.recv().await {
- let this = this.upgrade(&cx)?;
- match message {
- LspEvent::DiagnosticsStart => {
- this.update(&mut cx, |this, cx| {
- this.disk_based_diagnostics_started(cx);
- if let Some(project_id) = this.remote_id() {
- rpc.send(proto::DiskBasedDiagnosticsUpdating {
- project_id,
- })
+ cx.spawn(|mut cx| {
+ let this = this.downgrade();
+ async move {
+ while let Ok(message) = diagnostics_rx.recv().await {
+ let this = this.upgrade(&cx)?;
+ match message {
+ LspEvent::DiagnosticsStart => {
+ this.update(&mut cx, |this, cx| {
+ this.disk_based_diagnostics_started(cx);
+ if let Some(project_id) = this.remote_id() {
+ rpc.send(proto::DiskBasedDiagnosticsUpdating {
+ project_id,
+ })
+ .log_err();
+ }
+ });
+ }
+ LspEvent::DiagnosticsUpdate(mut params) => {
+ language.process_diagnostics(&mut params);
+ this.update(&mut cx, |this, cx| {
+ this.update_diagnostics(
+ params,
+ &disk_based_sources,
+ cx,
+ )
.log_err();
- }
- });
+ });
+ }
+ LspEvent::DiagnosticsFinish => {
+ this.update(&mut cx, |this, cx| {
+ this.disk_based_diagnostics_finished(cx);
+ if let Some(project_id) = this.remote_id() {
+ rpc.send(proto::DiskBasedDiagnosticsUpdated {
+ project_id,
+ })
+ .log_err();
+ }
+ });
+ }
}
- LspEvent::DiagnosticsUpdate(mut params) => {
- language.process_diagnostics(&mut params);
- this.update(&mut cx, |this, cx| {
- this.update_diagnostics(params, &disk_based_sources, cx)
- .log_err();
- });
+ }
+ Some(())
+ }
+ })
+ .detach();
+
+ let language_server = language_server.initialize().await.log_err()?;
+ this.update(&mut cx, |this, cx| {
+ this.language_servers
+ .insert(key.clone(), language_server.clone());
+
+ // Tell the language server about every open buffer in the worktree that matches the language.
+ for buffer in this.opened_buffers.values() {
+ if let Some(buffer_handle) = buffer.upgrade(cx) {
+ let buffer = buffer_handle.read(cx);
+ let file = if let Some(file) = File::from_dyn(buffer.file()) {
+ file
+ } else {
+ continue;
+ };
+ let language = if let Some(language) = buffer.language() {
+ language
+ } else {
+ continue;
+ };
+ if (file.worktree.read(cx).id(), language.name()) != key {
+ continue;
}
- LspEvent::DiagnosticsFinish => {
- this.update(&mut cx, |this, cx| {
- this.disk_based_diagnostics_finished(cx);
- if let Some(project_id) = this.remote_id() {
- rpc.send(proto::DiskBasedDiagnosticsUpdated {
- project_id,
+
+ let file = file.as_local()?;
+ let versions = this
+ .buffer_snapshots
+ .entry(buffer.remote_id())
+ .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
+ let (version, initial_snapshot) = versions.last().unwrap();
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ language_server
+ .notify::<lsp::notification::DidOpenTextDocument>(
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ uri,
+ Default::default(),
+ *version,
+ initial_snapshot.text(),
+ ),
+ },
+ )
+ .log_err()?;
+ buffer_handle.update(cx, |buffer, cx| {
+ buffer.set_completion_triggers(
+ language_server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|provider| {
+ provider.trigger_characters.clone()
})
- .log_err();
- }
- });
- }
+ .unwrap_or(Vec::new()),
+ cx,
+ )
+ });
}
}
+
Some(())
- })
- .detach();
+ });
Some(language_server)
})
- .shared()
- })
- .clone()
+ });
}
pub fn update_diagnostics(
@@ -1274,9 +1485,7 @@ impl Project {
.file()
.map_or(false, |file| *file.path() == project_path.path)
{
- buffer.update(cx, |buffer, cx| {
- buffer.update_diagnostics(diagnostics.clone(), version, cx)
- })?;
+ self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
break;
}
}
@@ -1291,6 +1500,90 @@ impl Project {
Ok(())
}
+ fn update_buffer_diagnostics(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
+ version: Option<i32>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
+ Ordering::Equal
+ .then_with(|| b.is_primary.cmp(&a.is_primary))
+ .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
+ .then_with(|| a.severity.cmp(&b.severity))
+ .then_with(|| a.message.cmp(&b.message))
+ }
+
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
+
+ diagnostics.sort_unstable_by(|a, b| {
+ Ordering::Equal
+ .then_with(|| a.range.start.cmp(&b.range.start))
+ .then_with(|| b.range.end.cmp(&a.range.end))
+ .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
+ });
+
+ let mut sanitized_diagnostics = Vec::new();
+ let mut edits_since_save = snapshot
+ .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
+ .peekable();
+ let mut last_edit_old_end = PointUtf16::zero();
+ let mut last_edit_new_end = PointUtf16::zero();
+ 'outer: for entry in diagnostics {
+ let mut start = entry.range.start;
+ let mut end = entry.range.end;
+
+ // Some diagnostics are based on files on disk instead of buffers'
+ // current contents. Adjust these diagnostics' ranges to reflect
+ // any unsaved edits.
+ if entry.diagnostic.is_disk_based {
+ while let Some(edit) = edits_since_save.peek() {
+ if edit.old.end <= start {
+ last_edit_old_end = edit.old.end;
+ last_edit_new_end = edit.new.end;
+ edits_since_save.next();
+ } else if edit.old.start <= end && edit.old.end >= start {
+ continue 'outer;
+ } else {
+ break;
+ }
+ }
+
+ let start_overshoot = start - last_edit_old_end;
+ start = last_edit_new_end;
+ start += start_overshoot;
+
+ let end_overshoot = end - last_edit_old_end;
+ end = last_edit_new_end;
+ end += end_overshoot;
+ }
+
+ let mut range = snapshot.clip_point_utf16(start, Bias::Left)
+ ..snapshot.clip_point_utf16(end, Bias::Right);
+
+ // Expand empty ranges by one character
+ if range.start == range.end {
+ range.end.column += 1;
+ range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
+ if range.start == range.end && range.end.column > 0 {
+ range.start.column -= 1;
+ range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
+ }
+ }
+
+ sanitized_diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: entry.diagnostic,
+ });
+ }
+ drop(edits_since_save);
+
+ let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
+ buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
+ Ok(())
+ }
+
pub fn format(
&self,
buffers: HashSet<ModelHandle<Buffer>>,
@@ -1301,25 +1594,11 @@ impl Project {
let mut remote_buffers = None;
for buffer_handle in buffers {
let buffer = buffer_handle.read(cx);
- let worktree;
if let Some(file) = File::from_dyn(buffer.file()) {
- worktree = file.worktree.clone();
if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
- let lang_server;
- if let Some(lang) = buffer.language() {
- if let Some(server) = self
- .language_servers
- .get(&(worktree.read(cx).id(), lang.name().to_string()))
- {
- lang_server = server.clone();
- } else {
- return Task::ready(Ok(Default::default()));
- };
- } else {
- return Task::ready(Ok(Default::default()));
+ if let Some(server) = self.language_server_for_buffer(buffer, cx) {
+ local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
}
-
- local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
} else {
remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
}
@@ -1353,21 +1632,17 @@ impl Project {
.await?;
}
- for (buffer, buffer_abs_path, lang_server) in local_buffers {
- let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
- capabilities
- } else {
- continue;
- };
-
+ for (buffer, buffer_abs_path, language_server) in local_buffers {
let text_document = lsp::TextDocumentIdentifier::new(
lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
);
+ let capabilities = &language_server.capabilities();
let lsp_edits = if capabilities
.document_formatting_provider
- .map_or(false, |provider| provider != lsp::OneOf::Left(false))
+ .as_ref()
+ .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
{
- lang_server
+ language_server
.request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
text_document,
options: Default::default(),
@@ -1376,13 +1651,14 @@ impl Project {
.await?
} else if capabilities
.document_range_formatting_provider
- .map_or(false, |provider| provider != lsp::OneOf::Left(false))
+ .as_ref()
+ .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
{
let buffer_start = lsp::Position::new(0, 0);
let buffer_end = buffer
.read_with(&cx, |buffer, _| buffer.max_point_utf16())
.to_lsp_position();
- lang_server
+ language_server
.request::<lsp::request::RangeFormatting>(
lsp::DocumentRangeFormattingParams {
text_document,
@@ -1397,9 +1673,9 @@ impl Project {
};
if let Some(lsp_edits) = lsp_edits {
- let edits = buffer
- .update(&mut cx, |buffer, cx| {
- buffer.edits_from_lsp(lsp_edits, None, cx)
+ let edits = this
+ .update(&mut cx, |this, cx| {
+ this.edits_from_lsp(&buffer, lsp_edits, None, cx)
})
.await?;
buffer.update(&mut cx, |buffer, cx| {
@@ -1564,10 +1840,10 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
if self.is_local() {
- let language_server = if let Some(server) = self
- .language_servers
- .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
- {
+ let language_server = if let Some(server) = self.language_servers.get(&(
+ symbol.source_worktree_id,
+ Arc::from(symbol.language_name.as_str()),
+ )) {
server.clone()
} else {
return Task::ready(Err(anyhow!(
@@ -1593,7 +1869,7 @@ impl Project {
self.open_local_buffer_via_lsp(
symbol_uri,
- symbol.language_name.clone(),
+ Arc::from(symbol.language_name.as_str()),
language_server,
cx,
)
@@ -1637,11 +1913,12 @@ impl Project {
if worktree.read(cx).as_local().is_some() {
let buffer_abs_path = buffer_abs_path.unwrap();
- let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
- server
- } else {
- return Task::ready(Ok(Default::default()));
- };
+ let lang_server =
+ if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
+ server.clone()
+ } else {
+ return Task::ready(Ok(Default::default()));
+ };
cx.spawn(|_, cx| async move {
let completions = lang_server
@@ -1748,19 +2025,21 @@ impl Project {
let buffer_id = buffer.remote_id();
if self.is_local() {
- let lang_server = if let Some(language_server) = buffer.language_server() {
- language_server.clone()
+ let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
+ server.clone()
} else {
- return Task::ready(Err(anyhow!("buffer does not have a language server")));
+ return Task::ready(Ok(Default::default()));
};
- cx.spawn(|_, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
let resolved_completion = lang_server
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
.await?;
if let Some(edits) = resolved_completion.additional_text_edits {
- let edits = buffer_handle
- .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
+ let edits = this
+ .update(&mut cx, |this, cx| {
+ this.edits_from_lsp(&buffer_handle, edits, None, cx)
+ })
.await?;
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.finalize_last_transaction();
@@ -1837,34 +2116,18 @@ impl Project {
if worktree.read(cx).as_local().is_some() {
let buffer_abs_path = buffer_abs_path.unwrap();
- let lang_name;
- let lang_server;
- if let Some(lang) = buffer.language() {
- lang_name = lang.name().to_string();
- if let Some(server) = self
- .language_servers
- .get(&(worktree.read(cx).id(), lang_name.clone()))
- {
- lang_server = server.clone();
- } else {
- return Task::ready(Ok(Default::default()));
- };
+ let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
+ server.clone()
} else {
return Task::ready(Ok(Default::default()));
- }
+ };
let lsp_range = lsp::Range::new(
range.start.to_point_utf16(buffer).to_lsp_position(),
range.end.to_point_utf16(buffer).to_lsp_position(),
);
cx.foreground().spawn(async move {
- if !lang_server
- .capabilities()
- .await
- .map_or(false, |capabilities| {
- capabilities.code_action_provider.is_some()
- })
- {
+ if !lang_server.capabilities().code_action_provider.is_some() {
return Ok(Default::default());
}
@@ -1941,14 +2204,14 @@ impl Project {
if self.is_local() {
let buffer = buffer_handle.read(cx);
let lang_name = if let Some(lang) = buffer.language() {
- lang.name().to_string()
+ lang.name()
} else {
return Task::ready(Ok(Default::default()));
};
- let lang_server = if let Some(language_server) = buffer.language_server() {
- language_server.clone()
+ let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
+ server.clone()
} else {
- return Task::ready(Err(anyhow!("buffer does not have a language server")));
+ return Task::ready(Ok(Default::default()));
};
let range = action.range.to_point_utf16(buffer);
@@ -2022,7 +2285,7 @@ impl Project {
this: ModelHandle<Self>,
edit: lsp::WorkspaceEdit,
push_to_history: bool,
- language_name: String,
+ language_name: Arc<str>,
language_server: Arc<LanguageServer>,
cx: &mut AsyncAppContext,
) -> Result<ProjectTransaction> {
@@ -2106,13 +2369,18 @@ impl Project {
})
.await?;
- let edits = buffer_to_edit
- .update(cx, |buffer, cx| {
+ let edits = this
+ .update(cx, |this, cx| {
let edits = op.edits.into_iter().map(|edit| match edit {
lsp::OneOf::Left(edit) => edit,
lsp::OneOf::Right(edit) => edit.text_edit,
});
- buffer.edits_from_lsp(edits, op.text_document.version, cx)
+ this.edits_from_lsp(
+ &buffer_to_edit,
+ edits,
+ op.text_document.version,
+ cx,
+ )
})
.await?;
@@ -2389,16 +2657,12 @@ impl Project {
let buffer = buffer_handle.read(cx);
if self.is_local() {
let file = File::from_dyn(buffer.file()).and_then(File::as_local);
- if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
+ if let Some((file, language_server)) =
+ file.zip(self.language_server_for_buffer(buffer, cx).cloned())
+ {
let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
return cx.spawn(|this, cx| async move {
- if !language_server
- .capabilities()
- .await
- .map_or(false, |capabilities| {
- request.check_capabilities(&capabilities)
- })
- {
+ if !request.check_capabilities(&language_server.capabilities()) {
return Ok(Default::default());
}
@@ -2550,7 +2814,7 @@ impl Project {
self.worktrees
.push(WorktreeHandle::Strong(worktree.clone()));
} else {
- cx.observe_release(&worktree, |this, cx| {
+ cx.observe_release(&worktree, |this, _, cx| {
this.worktrees
.retain(|worktree| worktree.upgrade(cx).is_some());
cx.notify();
@@ -3389,9 +3653,7 @@ impl Project {
Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
});
- this.update(&mut cx, |this, cx| {
- this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
- })?;
+ this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
*opened_buffer_tx.borrow_mut().borrow_mut() = ();
Ok(buffer)
@@ -3429,16 +3691,6 @@ impl Project {
})
}
- async fn handle_close_buffer(
- _: ModelHandle<Self>,
- _: TypedEnvelope<proto::CloseBuffer>,
- _: Arc<Client>,
- _: AsyncAppContext,
- ) -> Result<()> {
- // TODO: use this for following
- Ok(())
- }
-
async fn handle_buffer_saved(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::BufferSaved>,
@@ -3528,6 +3780,160 @@ impl Project {
.await
}
}
+
+ fn edits_from_lsp(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
+ version: Option<i32>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
+ cx.background().spawn(async move {
+ let snapshot = snapshot?;
+ let mut lsp_edits = lsp_edits
+ .into_iter()
+ .map(|edit| (range_from_lsp(edit.range), edit.new_text))
+ .peekable();
+
+ let mut edits = Vec::new();
+ while let Some((mut range, mut new_text)) = lsp_edits.next() {
+ // Combine any LSP edits that are adjacent.
+ //
+ // Also, combine LSP edits that are separated from each other by only
+ // a newline. This is important because for some code actions,
+ // Rust-analyzer rewrites the entire buffer via a series of edits that
+ // are separated by unchanged newline characters.
+ //
+ // In order for the diffing logic below to work properly, any edits that
+ // cancel each other out must be combined into one.
+ while let Some((next_range, next_text)) = lsp_edits.peek() {
+ if next_range.start > range.end {
+ if next_range.start.row > range.end.row + 1
+ || next_range.start.column > 0
+ || snapshot.clip_point_utf16(
+ PointUtf16::new(range.end.row, u32::MAX),
+ Bias::Left,
+ ) > range.end
+ {
+ break;
+ }
+ new_text.push('\n');
+ }
+ range.end = next_range.end;
+ new_text.push_str(&next_text);
+ lsp_edits.next();
+ }
+
+ if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
+ || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
+ {
+ return Err(anyhow!("invalid edits received from language server"));
+ }
+
+ // For multiline edits, perform a diff of the old and new text so that
+ // we can identify the changes more precisely, preserving the locations
+ // of any anchors positioned in the unchanged regions.
+ if range.end.row > range.start.row {
+ let mut offset = range.start.to_offset(&snapshot);
+ let old_text = snapshot.text_for_range(range).collect::<String>();
+
+ let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
+ let mut moved_since_edit = true;
+ for change in diff.iter_all_changes() {
+ let tag = change.tag();
+ let value = change.value();
+ match tag {
+ ChangeTag::Equal => {
+ offset += value.len();
+ moved_since_edit = true;
+ }
+ ChangeTag::Delete => {
+ let start = snapshot.anchor_after(offset);
+ let end = snapshot.anchor_before(offset + value.len());
+ if moved_since_edit {
+ edits.push((start..end, String::new()));
+ } else {
+ edits.last_mut().unwrap().0.end = end;
+ }
+ offset += value.len();
+ moved_since_edit = false;
+ }
+ ChangeTag::Insert => {
+ if moved_since_edit {
+ let anchor = snapshot.anchor_after(offset);
+ edits.push((anchor.clone()..anchor, value.to_string()));
+ } else {
+ edits.last_mut().unwrap().1.push_str(value);
+ }
+ moved_since_edit = false;
+ }
+ }
+ }
+ } else if range.end == range.start {
+ let anchor = snapshot.anchor_after(range.start);
+ edits.push((anchor.clone()..anchor, new_text));
+ } else {
+ let edit_start = snapshot.anchor_after(range.start);
+ let edit_end = snapshot.anchor_before(range.end);
+ edits.push((edit_start..edit_end, new_text));
+ }
+ }
+
+ Ok(edits)
+ })
+ }
+
+ fn buffer_snapshot_for_lsp_version(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ version: Option<i32>,
+ cx: &AppContext,
+ ) -> Result<TextBufferSnapshot> {
+ const OLD_VERSIONS_TO_RETAIN: i32 = 10;
+
+ if let Some(version) = version {
+ let buffer_id = buffer.read(cx).remote_id();
+ let snapshots = self
+ .buffer_snapshots
+ .get_mut(&buffer_id)
+ .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
+ let mut found_snapshot = None;
+ snapshots.retain(|(snapshot_version, snapshot)| {
+ if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
+ false
+ } else {
+ if *snapshot_version == version {
+ found_snapshot = Some(snapshot.clone());
+ }
+ true
+ }
+ });
+
+ found_snapshot.ok_or_else(|| {
+ anyhow!(
+ "snapshot not found for buffer {} at version {}",
+ buffer_id,
+ version
+ )
+ })
+ } else {
+ Ok((buffer.read(cx)).text_snapshot())
+ }
+ }
+
+ fn language_server_for_buffer(
+ &self,
+ buffer: &Buffer,
+ cx: &AppContext,
+ ) -> Option<&Arc<LanguageServer>> {
+ if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
+ let worktree_id = file.worktree_id(cx);
+ self.language_servers.get(&(worktree_id, language.name()))
+ } else {
+ None
+ }
+ }
}
impl WorktreeHandle {
@@ -19,7 +19,7 @@ use gpui::{
};
use language::{
proto::{deserialize_version, serialize_version},
- Buffer, DiagnosticEntry, Operation, PointUtf16, Rope,
+ Buffer, DiagnosticEntry, PointUtf16, Rope,
};
use lazy_static::lazy_static;
use parking_lot::Mutex;
@@ -71,7 +71,6 @@ pub struct LocalWorktree {
share: Option<ShareState>,
diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
- queued_operations: Vec<(u64, Operation)>,
client: Arc<Client>,
fs: Arc<dyn Fs>,
visible: bool,
@@ -84,7 +83,6 @@ pub struct RemoteWorktree {
client: Arc<Client>,
updates_tx: UnboundedSender<proto::UpdateWorktree>,
replica_id: ReplicaId,
- queued_operations: Vec<(u64, Operation)>,
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
visible: bool,
}
@@ -226,7 +224,6 @@ impl Worktree {
snapshot_rx: snapshot_rx.clone(),
updates_tx,
client: client.clone(),
- queued_operations: Default::default(),
diagnostic_summaries: TreeMap::from_ordered_entries(
worktree.diagnostic_summaries.into_iter().map(|summary| {
(
@@ -420,42 +417,6 @@ impl Worktree {
cx.notify();
}
-
- fn send_buffer_update(
- &mut self,
- buffer_id: u64,
- operation: Operation,
- cx: &mut ModelContext<Self>,
- ) {
- if let Some((project_id, rpc)) = match self {
- Worktree::Local(worktree) => worktree
- .share
- .as_ref()
- .map(|share| (share.project_id, worktree.client.clone())),
- Worktree::Remote(worktree) => Some((worktree.project_id, worktree.client.clone())),
- } {
- cx.spawn(|worktree, mut cx| async move {
- if let Err(error) = rpc
- .request(proto::UpdateBuffer {
- project_id,
- buffer_id,
- operations: vec![language::proto::serialize_operation(&operation)],
- })
- .await
- {
- worktree.update(&mut cx, |worktree, _| {
- log::error!("error sending buffer operation: {}", error);
- match worktree {
- Worktree::Local(t) => &mut t.queued_operations,
- Worktree::Remote(t) => &mut t.queued_operations,
- }
- .push((buffer_id, operation));
- });
- }
- })
- .detach();
- }
- }
}
impl LocalWorktree {
@@ -526,7 +487,6 @@ impl LocalWorktree {
poll_task: None,
diagnostics: Default::default(),
diagnostic_summaries: Default::default(),
- queued_operations: Default::default(),
client,
fs,
visible,
@@ -1455,26 +1415,6 @@ impl language::File for File {
})
}
- fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) {
- self.worktree.update(cx, |worktree, cx| {
- worktree.send_buffer_update(buffer_id, operation, cx);
- });
- }
-
- fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext) {
- self.worktree.update(cx, |worktree, _| {
- if let Worktree::Remote(worktree) = worktree {
- worktree
- .client
- .send(proto::CloseBuffer {
- project_id: worktree.project_id,
- buffer_id,
- })
- .log_err();
- }
- });
- }
-
fn as_any(&self) -> &dyn Any {
self
}
@@ -42,7 +42,6 @@ message Envelope {
OpenBuffer open_buffer = 35;
OpenBufferResponse open_buffer_response = 36;
- CloseBuffer close_buffer = 37;
UpdateBuffer update_buffer = 38;
UpdateBufferFile update_buffer_file = 39;
SaveBuffer save_buffer = 40;
@@ -146,7 +146,6 @@ messages!(
(BufferReloaded, Foreground),
(BufferSaved, Foreground),
(ChannelMessageSent, Foreground),
- (CloseBuffer, Foreground),
(DiskBasedDiagnosticsUpdated, Background),
(DiskBasedDiagnosticsUpdating, Background),
(Error, Foreground),
@@ -247,7 +246,6 @@ entity_messages!(
ApplyCompletionAdditionalEdits,
BufferReloaded,
BufferSaved,
- CloseBuffer,
DiskBasedDiagnosticsUpdated,
DiskBasedDiagnosticsUpdating,
FormatBuffers,
@@ -5,7 +5,7 @@ use gpui::{
action, elements::*, keymap::Binding, platform::CursorStyle, Entity, MutableAppContext,
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
-use language::AnchorRangeExt;
+use language::OffsetRangeExt;
use postage::watch;
use project::search::SearchQuery;
use std::ops::Range;
@@ -102,7 +102,6 @@ impl Server {
.add_request_handler(Server::forward_project_request::<proto::PrepareRename>)
.add_request_handler(Server::forward_project_request::<proto::PerformRename>)
.add_request_handler(Server::forward_project_request::<proto::FormatBuffers>)
- .add_message_handler(Server::close_buffer)
.add_request_handler(Server::update_buffer)
.add_message_handler(Server::update_buffer_file)
.add_message_handler(Server::buffer_reloaded)
@@ -581,19 +580,6 @@ impl Server {
.await?)
}
- async fn close_buffer(
- self: Arc<Server>,
- request: TypedEnvelope<proto::CloseBuffer>,
- ) -> tide::Result<()> {
- let host_connection_id = self
- .state()
- .read_project(request.payload.project_id, request.sender_id)?
- .host_connection_id;
- self.peer
- .forward_send(request.sender_id, host_connection_id, request.payload)?;
- Ok(())
- }
-
async fn save_buffer(
self: Arc<Server>,
request: TypedEnvelope<proto::SaveBuffer>,
@@ -1025,8 +1011,8 @@ mod tests {
};
use gpui::{executor, ModelHandle, TestAppContext};
use language::{
- tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
- LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
+ tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry,
+ LanguageServerConfig, OffsetRangeExt, Point, ToLspPosition,
};
use lsp;
use parking_lot::Mutex;
@@ -1034,6 +1020,7 @@ mod tests {
use project::{
fs::{FakeFs, Fs as _},
search::SearchQuery,
+ worktree::WorktreeHandle,
DiagnosticSummary, Project, ProjectPath,
};
use rand::prelude::*;
@@ -1411,6 +1398,8 @@ mod tests {
buffer_b.read_with(cx_b, |buf, _| assert!(!buf.is_dirty()));
buffer_c.condition(cx_c, |buf, _| !buf.is_dirty()).await;
+ worktree_a.flush_fs_events(cx_a).await;
+
// Make changes on host's file system, see those changes on guest worktrees.
fs.rename(
"/a/file1".as_ref(),
@@ -1844,13 +1833,13 @@ mod tests {
// Client A sees that a guest has joined.
project_a
- .condition(&cx_a, |p, _| p.collaborators().len() == 1)
+ .condition(cx_a, |p, _| p.collaborators().len() == 1)
.await;
// Drop client B's connection and ensure client A observes client B leaving the project.
client_b.disconnect(&cx_b.to_async()).unwrap();
project_a
- .condition(&cx_a, |p, _| p.collaborators().len() == 0)
+ .condition(cx_a, |p, _| p.collaborators().len() == 0)
.await;
// Rejoin the project as client B
@@ -1867,14 +1856,15 @@ mod tests {
// Client A sees that a guest has re-joined.
project_a
- .condition(&cx_a, |p, _| p.collaborators().len() == 1)
+ .condition(cx_a, |p, _| p.collaborators().len() == 1)
.await;
// Simulate connection loss for client B and ensure client A observes client B leaving the project.
+ client_b.wait_for_current_user(cx_b).await;
server.disconnect_client(client_b.current_user_id(cx_b));
cx_a.foreground().advance_clock(Duration::from_secs(3));
project_a
- .condition(&cx_a, |p, _| p.collaborators().len() == 0)
+ .condition(cx_a, |p, _| p.collaborators().len() == 0)
.await;
}
@@ -1956,7 +1946,10 @@ mod tests {
// Simulate a language server reporting errors for a file.
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server
- .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await;
+ fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
+ lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
version: None,
diagnostics: vec![lsp::Diagnostic {
@@ -1965,8 +1958,8 @@ mod tests {
message: "message 1".to_string(),
..Default::default()
}],
- })
- .await;
+ },
+ );
// Wait for server to see the diagnostics update.
server
@@ -2015,8 +2008,8 @@ mod tests {
});
// Simulate a language server reporting more errors for a file.
- fake_language_server
- .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
+ fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
+ lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
version: None,
diagnostics: vec![
@@ -2036,8 +2029,8 @@ mod tests {
..Default::default()
},
],
- })
- .await;
+ },
+ );
// Client b gets the updated summaries
project_b
@@ -2381,10 +2374,6 @@ mod tests {
.await
.unwrap();
- let format = project_b.update(cx_b, |project, cx| {
- project.format(HashSet::from_iter([buffer_b.clone()]), true, cx)
- });
-
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::Formatting, _>(|_, _| {
Some(vec![
@@ -2399,7 +2388,12 @@ mod tests {
])
});
- format.await.unwrap();
+ project_b
+ .update(cx_b, |project, cx| {
+ project.format(HashSet::from_iter([buffer_b.clone()]), true, cx)
+ })
+ .await
+ .unwrap();
assert_eq!(
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
"let honey = two"
@@ -2489,8 +2483,6 @@ mod tests {
.unwrap();
// Request the definition of a symbol as the guest.
- let definitions_1 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx));
-
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
@@ -2499,7 +2491,10 @@ mod tests {
)))
});
- let definitions_1 = definitions_1.await.unwrap();
+ let definitions_1 = project_b
+ .update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx))
+ .await
+ .unwrap();
cx_b.read(|cx| {
assert_eq!(definitions_1.len(), 1);
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
@@ -2516,7 +2511,6 @@ mod tests {
// Try getting more definitions for the same buffer, ensuring the buffer gets reused from
// the previous call to `definition`.
- let definitions_2 = project_b.update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx));
fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
lsp::Url::from_file_path("/root-2/b.rs").unwrap(),
@@ -2524,7 +2518,10 @@ mod tests {
)))
});
- let definitions_2 = definitions_2.await.unwrap();
+ let definitions_2 = project_b
+ .update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx))
+ .await
+ .unwrap();
cx_b.read(|cx| {
assert_eq!(definitions_2.len(), 1);
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
@@ -2625,8 +2622,6 @@ mod tests {
.unwrap();
// Request references to a symbol as the guest.
- let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx));
-
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::References, _>(|params, _| {
assert_eq!(
@@ -2649,7 +2644,10 @@ mod tests {
])
});
- let references = references.await.unwrap();
+ let references = project_b
+ .update(cx_b, |p, cx| p.references(&buffer_b, 7, cx))
+ .await
+ .unwrap();
cx_b.read(|cx| {
assert_eq!(references.len(), 3);
assert_eq!(project_b.read(cx).worktrees(cx).count(), 2);
@@ -2853,8 +2851,6 @@ mod tests {
.unwrap();
// Request document highlights as the guest.
- let highlights = project_b.update(cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx));
-
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::DocumentHighlightRequest, _>(
|params, _| {
@@ -2896,7 +2892,10 @@ mod tests {
},
);
- let highlights = highlights.await.unwrap();
+ let highlights = project_b
+ .update(cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx))
+ .await
+ .unwrap();
buffer_b.read_with(cx_b, |buffer, _| {
let snapshot = buffer.snapshot();
@@ -2998,8 +2997,6 @@ mod tests {
.await
.unwrap();
- // Request the definition of a symbol as the guest.
- let symbols = project_b.update(cx_b, |p, cx| p.symbols("two", cx));
let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::WorkspaceSymbol, _>(|_, _| {
#[allow(deprecated)]
@@ -3016,7 +3013,11 @@ mod tests {
}])
});
- let symbols = symbols.await.unwrap();
+ // Request the definition of a symbol as the guest.
+ let symbols = project_b
+ .update(cx_b, |p, cx| p.symbols("two", cx))
+ .await
+ .unwrap();
assert_eq!(symbols.len(), 1);
assert_eq!(symbols[0].name, "TWO");
@@ -3127,6 +3128,14 @@ mod tests {
.await
.unwrap();
+ let mut fake_language_server = fake_language_servers.next().await.unwrap();
+ fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
+ Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
+ lsp::Url::from_file_path("/root/b.rs").unwrap(),
+ lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
+ )))
+ });
+
let definitions;
let buffer_b2;
if rng.gen() {
@@ -3137,14 +3146,6 @@ mod tests {
definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
}
- let mut fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::GotoDefinition, _>(|_, _| {
- Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
- lsp::Url::from_file_path("/root/b.rs").unwrap(),
- lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
- )))
- });
-
let buffer_b2 = buffer_b2.await.unwrap();
let definitions = definitions.await.unwrap();
assert_eq!(definitions.len(), 1);
@@ -4478,17 +4479,16 @@ mod tests {
let peer_id = PeerId(connection_id_rx.next().await.unwrap().0);
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
- let mut authed_user =
- user_store.read_with(cx, |user_store, _| user_store.watch_current_user());
- while authed_user.next().await.unwrap().is_none() {}
- TestClient {
+ let client = TestClient {
client,
peer_id,
user_store,
project: Default::default(),
buffers: Default::default(),
- }
+ };
+ client.wait_for_current_user(cx).await;
+ client
}
fn disconnect_client(&self, user_id: UserId) {
@@ -4568,6 +4568,13 @@ mod tests {
)
}
+ async fn wait_for_current_user(&self, cx: &TestAppContext) {
+ let mut authed_user = self
+ .user_store
+ .read_with(cx, |user_store, _| user_store.watch_current_user());
+ while authed_user.next().await.unwrap().is_none() {}
+ }
+
fn simulate_host(
mut self,
project: ModelHandle<Project>,
@@ -1,5 +1,5 @@
use super::{Point, ToOffset};
-use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPointUtf16};
+use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPoint, ToPointUtf16};
use anyhow::Result;
use std::{cmp::Ordering, fmt::Debug, ops::Range};
use sum_tree::Bias;
@@ -74,11 +74,33 @@ impl Anchor {
}
}
+pub trait OffsetRangeExt {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
+}
+
+impl<T> OffsetRangeExt for Range<T>
+where
+ T: ToOffset,
+{
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
+ self.start.to_offset(snapshot)..self.end.to_offset(&snapshot)
+ }
+
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
+ self.start.to_offset(snapshot).to_point(snapshot)
+ ..self.end.to_offset(snapshot).to_point(snapshot)
+ }
+
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
+ self.start.to_offset(snapshot).to_point_utf16(snapshot)
+ ..self.end.to_offset(snapshot).to_point_utf16(snapshot)
+ }
+}
+
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
- fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
- fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
- fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16>;
}
impl AnchorRangeExt for Range<Anchor> {
@@ -88,16 +110,4 @@ impl AnchorRangeExt for Range<Anchor> {
ord @ _ => ord,
})
}
-
- fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
- self.start.to_offset(&content)..self.end.to_offset(&content)
- }
-
- fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
- self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
- }
-
- fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16> {
- self.start.to_point_utf16(content)..self.end.to_point_utf16(content)
- }
}