Cargo.lock 🔗
@@ -3591,6 +3591,7 @@ dependencies = [
"serde",
"serde_json",
"sha2 0.10.2",
+ "similar",
"smol",
"sum_tree",
"tempdir",
Antonio Scandurra , Nathan Sobo , Antonio Scandurra , and Keith Simmons created
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
Co-Authored-By: Antonio Scandurra <antonio@zed.dev>
Co-Authored-By: Keith Simmons <keith@zed.dev>
Cargo.lock | 1
crates/editor/src/editor.rs | 7
crates/editor/src/multi_buffer.rs | 6
crates/gpui/src/app.rs | 65 +
crates/language/src/buffer.rs | 425 ------------
crates/language/src/diagnostic_set.rs | 4
crates/language/src/tests.rs | 609 ------------------
crates/project/Cargo.toml | 1
crates/project/src/lsp_command.rs | 52
crates/project/src/project.rs | 955 +++++++++++++++++++++++-----
crates/project/src/worktree.rs | 1
crates/search/src/buffer_search.rs | 2
crates/server/src/rpc.rs | 4
crates/text/src/anchor.rs | 42
14 files changed, 915 insertions(+), 1,259 deletions(-)
@@ -3591,6 +3591,7 @@ dependencies = [
"serde",
"serde_json",
"sha2 0.10.2",
+ "similar",
"smol",
"sum_tree",
"tempdir",
@@ -32,8 +32,8 @@ use items::{BufferItemHandle, MultiBufferItemHandle};
use itertools::Itertools as _;
pub use language::{char_kind, CharKind};
use language::{
- AnchorRangeExt as _, BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
- DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId,
+ BracketPair, Buffer, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticSeverity,
+ Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
};
use multi_buffer::MultiBufferChunks;
pub use multi_buffer::{
@@ -8235,9 +8235,6 @@ mod tests {
.update(cx, |project, cx| project.open_buffer(project_path, cx))
.await
.unwrap();
- buffer.update(cx, |buffer, cx| {
- buffer.set_language_server(Some(language_server), cx);
- });
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
buffer.next_notification(&cx).await;
@@ -8,8 +8,8 @@ use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
pub use language::Completion;
use language::{
char_kind, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, DiagnosticEntry, Event, File,
- Language, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _, ToPointUtf16 as _,
- TransactionId,
+ Language, OffsetRangeExt, Outline, OutlineItem, Selection, ToOffset as _, ToPoint as _,
+ ToPointUtf16 as _, TransactionId,
};
use std::{
cell::{Ref, RefCell},
@@ -25,7 +25,7 @@ use text::{
locator::Locator,
rope::TextDimension,
subscription::{Subscription, Topic},
- AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary,
+ Edit, Point, PointUtf16, TextSummary,
};
use theme::SyntaxTheme;
@@ -742,7 +742,7 @@ type GlobalActionCallback = dyn FnMut(&dyn AnyAction, &mut MutableAppContext);
type SubscriptionCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext) -> bool>;
type DelegationCallback = Box<dyn FnMut(Box<dyn Any>, &mut MutableAppContext) -> bool>;
type ObservationCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
-type ReleaseObservationCallback = Box<dyn FnMut(&mut MutableAppContext)>;
+type ReleaseObservationCallback = Box<dyn FnMut(&dyn Any, &mut MutableAppContext)>;
pub struct MutableAppContext {
weak_self: Option<rc::Weak<RefCell<Self>>>,
@@ -1186,14 +1186,20 @@ impl MutableAppContext {
E: Entity,
E::Event: 'static,
H: Handle<E>,
- F: 'static + FnMut(&mut Self),
+ F: 'static + FnMut(&E, &mut Self),
{
let id = post_inc(&mut self.next_subscription_id);
self.release_observations
.lock()
.entry(handle.id())
.or_default()
- .insert(id, Box::new(move |cx| callback(cx)));
+ .insert(
+ id,
+ Box::new(move |released, cx| {
+ let released = released.downcast_ref().unwrap();
+ callback(released, cx)
+ }),
+ );
Subscription::ReleaseObservation {
id,
entity_id: handle.id(),
@@ -1552,9 +1558,8 @@ impl MutableAppContext {
self.observations.lock().remove(&model_id);
let mut model = self.cx.models.remove(&model_id).unwrap();
model.release(self);
- self.pending_effects.push_back(Effect::Release {
- entity_id: model_id,
- });
+ self.pending_effects
+ .push_back(Effect::ModelRelease { model_id, model });
}
for (window_id, view_id) in dropped_views {
@@ -1580,7 +1585,7 @@ impl MutableAppContext {
}
self.pending_effects
- .push_back(Effect::Release { entity_id: view_id });
+ .push_back(Effect::ViewRelease { view_id, view });
}
for key in dropped_element_states {
@@ -1607,7 +1612,12 @@ impl MutableAppContext {
self.notify_view_observers(window_id, view_id)
}
Effect::Deferred(callback) => callback(self),
- Effect::Release { entity_id } => self.notify_release_observers(entity_id),
+ Effect::ModelRelease { model_id, model } => {
+ self.notify_release_observers(model_id, model.as_any())
+ }
+ Effect::ViewRelease { view_id, view } => {
+ self.notify_release_observers(view_id, view.as_any())
+ }
Effect::Focus { window_id, view_id } => {
self.focus(window_id, view_id);
}
@@ -1781,11 +1791,11 @@ impl MutableAppContext {
}
}
- fn notify_release_observers(&mut self, entity_id: usize) {
+ fn notify_release_observers(&mut self, entity_id: usize, entity: &dyn Any) {
let callbacks = self.release_observations.lock().remove(&entity_id);
if let Some(callbacks) = callbacks {
for (_, mut callback) in callbacks {
- callback(self);
+ callback(entity, self);
}
}
}
@@ -2112,8 +2122,13 @@ pub enum Effect {
view_id: usize,
},
Deferred(Box<dyn FnOnce(&mut MutableAppContext)>),
- Release {
- entity_id: usize,
+ ModelRelease {
+ model_id: usize,
+ model: Box<dyn AnyModel>,
+ },
+ ViewRelease {
+ view_id: usize,
+ view: Box<dyn AnyView>,
},
Focus {
window_id: usize,
@@ -2142,9 +2157,13 @@ impl Debug for Effect {
.field("view_id", view_id)
.finish(),
Effect::Deferred(_) => f.debug_struct("Effect::Deferred").finish(),
- Effect::Release { entity_id } => f
- .debug_struct("Effect::Release")
- .field("entity_id", entity_id)
+ Effect::ModelRelease { model_id, .. } => f
+ .debug_struct("Effect::ModelRelease")
+ .field("model_id", model_id)
+ .finish(),
+ Effect::ViewRelease { view_id, .. } => f
+ .debug_struct("Effect::ViewRelease")
+ .field("view_id", view_id)
.finish(),
Effect::Focus { window_id, view_id } => f
.debug_struct("Effect::Focus")
@@ -2395,13 +2414,13 @@ impl<'a, T: Entity> ModelContext<'a, T> {
) -> Subscription
where
S: Entity,
- F: 'static + FnMut(&mut T, &mut ModelContext<T>),
+ F: 'static + FnMut(&mut T, &S, &mut ModelContext<T>),
{
let observer = self.weak_handle();
- self.app.observe_release(handle, move |cx| {
+ self.app.observe_release(handle, move |released, cx| {
if let Some(observer) = observer.upgrade(cx) {
observer.update(cx, |observer, cx| {
- callback(observer, cx);
+ callback(observer, released, cx);
});
}
})
@@ -2677,13 +2696,13 @@ impl<'a, T: View> ViewContext<'a, T> {
where
E: Entity,
H: Handle<E>,
- F: 'static + FnMut(&mut T, &mut ViewContext<T>),
+ F: 'static + FnMut(&mut T, &E, &mut ViewContext<T>),
{
let observer = self.weak_handle();
- self.app.observe_release(handle, move |cx| {
+ self.app.observe_release(handle, move |released, cx| {
if let Some(observer) = observer.upgrade(cx) {
observer.update(cx, |observer, cx| {
- callback(observer, cx);
+ callback(observer, released, cx);
});
}
})
@@ -4403,12 +4422,12 @@ mod tests {
cx.observe_release(&model, {
let model_release_observed = model_release_observed.clone();
- move |_| model_release_observed.set(true)
+ move |_, _| model_release_observed.set(true)
})
.detach();
cx.observe_release(&view, {
let view_release_observed = view_release_observed.clone();
- move |_| view_release_observed.set(true)
+ move |_, _| view_release_observed.set(true)
})
.detach();
@@ -7,16 +7,14 @@ pub use crate::{
use crate::{
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
outline::OutlineItem,
- range_from_lsp, CodeLabel, Outline, ToLspPosition,
+ CodeLabel, Outline,
};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use futures::FutureExt as _;
use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
use lazy_static::lazy_static;
-use lsp::LanguageServer;
use parking_lot::Mutex;
-use postage::{prelude::Stream, sink::Sink, watch};
use similar::{ChangeTag, TextDiff};
use smol::future::yield_now;
use std::{
@@ -26,7 +24,7 @@ use std::{
ffi::OsString,
future::Future,
iter::{Iterator, Peekable},
- ops::{Deref, DerefMut, Range, Sub},
+ ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
str,
sync::Arc,
@@ -34,11 +32,11 @@ use std::{
vec,
};
use sum_tree::TreeMap;
-use text::{operation_queue::OperationQueue, rope::TextDimension};
-pub use text::{Buffer as TextBuffer, Operation as _, *};
+use text::operation_queue::OperationQueue;
+pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, Operation as _, *};
use theme::SyntaxTheme;
use tree_sitter::{InputEdit, QueryCursor, Tree};
-use util::{post_inc, TryFutureExt as _};
+use util::TryFutureExt as _;
#[cfg(any(test, feature = "test-support"))]
pub use tree_sitter_rust;
@@ -70,7 +68,6 @@ pub struct Buffer {
diagnostics_update_count: usize,
diagnostics_timestamp: clock::Lamport,
file_update_count: usize,
- language_server: Option<LanguageServerState>,
completion_triggers: Vec<String>,
deferred_ops: OperationQueue<Operation>,
}
@@ -126,21 +123,6 @@ pub struct CodeAction {
pub lsp_action: lsp::CodeAction,
}
-struct LanguageServerState {
- server: Arc<LanguageServer>,
- latest_snapshot: watch::Sender<LanguageServerSnapshot>,
- pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
- next_version: usize,
- _maintain_server: Task<Option<()>>,
-}
-
-#[derive(Clone)]
-struct LanguageServerSnapshot {
- buffer_snapshot: text::BufferSnapshot,
- version: usize,
- path: Arc<Path>,
-}
-
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Operation {
Buffer(text::Operation),
@@ -479,15 +461,6 @@ impl Buffer {
self
}
- pub fn with_language_server(
- mut self,
- server: Arc<LanguageServer>,
- cx: &mut ModelContext<Self>,
- ) -> Self {
- self.set_language_server(Some(server), cx);
- self
- }
-
fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
let saved_mtime;
if let Some(file) = file.as_ref() {
@@ -514,7 +487,6 @@ impl Buffer {
diagnostics_update_count: 0,
diagnostics_timestamp: Default::default(),
file_update_count: 0,
- language_server: None,
completion_triggers: Default::default(),
deferred_ops: OperationQueue::new(),
}
@@ -536,6 +508,14 @@ impl Buffer {
}
}
+ pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
+ &self.text
+ }
+
+ pub fn text_snapshot(&self) -> text::BufferSnapshot {
+ self.text.snapshot()
+ }
+
pub fn file(&self) -> Option<&dyn File> {
self.file.as_deref()
}
@@ -561,123 +541,15 @@ impl Buffer {
})
}
+ pub fn saved_version(&self) -> &clock::Global {
+ &self.saved_version
+ }
+
pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
self.language = language;
self.reparse(cx);
}
- pub fn set_language_server(
- &mut self,
- language_server: Option<Arc<lsp::LanguageServer>>,
- cx: &mut ModelContext<Self>,
- ) {
- self.language_server = if let Some((server, file)) =
- language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
- {
- let initial_snapshot = LanguageServerSnapshot {
- buffer_snapshot: self.text.snapshot(),
- version: 0,
- path: file.abs_path(cx).into(),
- };
- let (latest_snapshot_tx, mut latest_snapshot_rx) =
- watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
-
- Some(LanguageServerState {
- latest_snapshot: latest_snapshot_tx,
- pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
- next_version: 1,
- server: server.clone(),
- _maintain_server: cx.spawn_weak(|this, mut cx| async move {
- let capabilities = server.capabilities().await.or_else(|| {
- log::info!("language server exited");
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| this.language_server = None);
- }
- None
- })?;
-
- let triggers = capabilities
- .completion_provider
- .and_then(|c| c.trigger_characters)
- .unwrap_or_default();
- this.upgrade(&cx)?.update(&mut cx, |this, cx| {
- let lamport_timestamp = this.text.lamport_clock.tick();
- this.completion_triggers = triggers.clone();
- this.send_operation(
- Operation::UpdateCompletionTriggers {
- triggers,
- lamport_timestamp,
- },
- cx,
- );
- cx.notify();
- });
-
- let maintain_changes = cx.background().spawn(async move {
- let initial_snapshot =
- latest_snapshot_rx.recv().await.ok_or_else(|| {
- anyhow!("buffer dropped before sending DidOpenTextDocument")
- })?;
- server
- .notify::<lsp::notification::DidOpenTextDocument>(
- lsp::DidOpenTextDocumentParams {
- text_document: lsp::TextDocumentItem::new(
- lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
- Default::default(),
- initial_snapshot.version as i32,
- initial_snapshot.buffer_snapshot.text(),
- ),
- },
- )
- .await?;
-
- let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
- while let Some(snapshot) = latest_snapshot_rx.recv().await {
- let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
- let buffer_snapshot = snapshot.buffer_snapshot.clone();
- let content_changes = buffer_snapshot
- .edits_since::<(PointUtf16, usize)>(&prev_version)
- .map(|edit| {
- let edit_start = edit.new.start.0;
- let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
- let new_text = buffer_snapshot
- .text_for_range(edit.new.start.1..edit.new.end.1)
- .collect();
- lsp::TextDocumentContentChangeEvent {
- range: Some(lsp::Range::new(
- edit_start.to_lsp_position(),
- edit_end.to_lsp_position(),
- )),
- range_length: None,
- text: new_text,
- }
- })
- .collect();
- let changes = lsp::DidChangeTextDocumentParams {
- text_document: lsp::VersionedTextDocumentIdentifier::new(
- uri,
- snapshot.version as i32,
- ),
- content_changes,
- };
- server
- .notify::<lsp::notification::DidChangeTextDocument>(changes)
- .await?;
-
- prev_version = snapshot.buffer_snapshot.version().clone();
- }
-
- Ok::<_, anyhow::Error>(())
- });
-
- maintain_changes.log_err().await
- }),
- })
- } else {
- None
- };
- }
-
pub fn did_save(
&mut self,
version: clock::Global,
@@ -784,10 +656,6 @@ impl Buffer {
self.language.as_ref()
}
- pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
- self.language_server.as_ref().map(|state| &state.server)
- }
-
pub fn parse_count(&self) -> usize {
self.parse_count
}
@@ -899,100 +767,14 @@ impl Buffer {
cx.notify();
}
- pub fn update_diagnostics<T>(
- &mut self,
- mut diagnostics: Vec<DiagnosticEntry<T>>,
- version: Option<i32>,
- cx: &mut ModelContext<Self>,
- ) -> Result<()>
- where
- T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
- {
- fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
- Ordering::Equal
- .then_with(|| b.is_primary.cmp(&a.is_primary))
- .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
- .then_with(|| a.severity.cmp(&b.severity))
- .then_with(|| a.message.cmp(&b.message))
- }
-
- let version = version.map(|version| version as usize);
- let content =
- if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
- language_server.snapshot_for_version(version)?
- } else {
- self.deref()
- };
-
- diagnostics.sort_unstable_by(|a, b| {
- Ordering::Equal
- .then_with(|| a.range.start.cmp(&b.range.start))
- .then_with(|| b.range.end.cmp(&a.range.end))
- .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
- });
-
- let mut sanitized_diagnostics = Vec::new();
- let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
- let mut last_edit_old_end = T::default();
- let mut last_edit_new_end = T::default();
- 'outer: for entry in diagnostics {
- let mut start = entry.range.start;
- let mut end = entry.range.end;
-
- // Some diagnostics are based on files on disk instead of buffers'
- // current contents. Adjust these diagnostics' ranges to reflect
- // any unsaved edits.
- if entry.diagnostic.is_disk_based {
- while let Some(edit) = edits_since_save.peek() {
- if edit.old.end <= start {
- last_edit_old_end = edit.old.end;
- last_edit_new_end = edit.new.end;
- edits_since_save.next();
- } else if edit.old.start <= end && edit.old.end >= start {
- continue 'outer;
- } else {
- break;
- }
- }
-
- let start_overshoot = start - last_edit_old_end;
- start = last_edit_new_end;
- start.add_assign(&start_overshoot);
-
- let end_overshoot = end - last_edit_old_end;
- end = last_edit_new_end;
- end.add_assign(&end_overshoot);
- }
-
- let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
- let mut range = range.start.to_point(content)..range.end.to_point(content);
- // Expand empty ranges by one character
- if range.start == range.end {
- range.end.column += 1;
- range.end = content.clip_point(range.end, Bias::Right);
- if range.start == range.end && range.end.column > 0 {
- range.start.column -= 1;
- range.start = content.clip_point(range.start, Bias::Left);
- }
- }
-
- sanitized_diagnostics.push(DiagnosticEntry {
- range,
- diagnostic: entry.diagnostic,
- });
- }
- drop(edits_since_save);
-
- let set = DiagnosticSet::new(sanitized_diagnostics, content);
+ pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
let lamport_timestamp = self.text.lamport_clock.tick();
- self.apply_diagnostic_update(set.clone(), lamport_timestamp, cx);
-
let op = Operation::UpdateDiagnostics {
- diagnostics: set.iter().cloned().collect(),
+ diagnostics: diagnostics.iter().cloned().collect(),
lamport_timestamp,
};
+ self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx);
self.send_operation(op, cx);
- Ok(())
}
fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
@@ -1305,30 +1087,6 @@ impl Buffer {
self.set_active_selections(Arc::from([]), cx);
}
- fn update_language_server(&mut self, cx: &AppContext) {
- let language_server = if let Some(language_server) = self.language_server.as_mut() {
- language_server
- } else {
- return;
- };
- let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
- file
- } else {
- return;
- };
-
- let version = post_inc(&mut language_server.next_version);
- let snapshot = LanguageServerSnapshot {
- buffer_snapshot: self.text.snapshot(),
- version,
- path: Arc::from(file.abs_path(cx)),
- };
- language_server
- .pending_snapshots
- .insert(version, snapshot.clone());
- let _ = language_server.latest_snapshot.blocking_send(snapshot);
- }
-
pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
where
T: Into<String>,
@@ -1455,115 +1213,6 @@ impl Buffer {
Some(edit_id)
}
- pub fn edits_from_lsp(
- &mut self,
- lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
- version: Option<i32>,
- cx: &mut ModelContext<Self>,
- ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
- let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
- state
- .snapshot_for_version(version as usize)
- .map(Clone::clone)
- } else {
- Ok(TextBuffer::deref(self).clone())
- };
-
- cx.background().spawn(async move {
- let snapshot = snapshot?;
- let mut lsp_edits = lsp_edits
- .into_iter()
- .map(|edit| (range_from_lsp(edit.range), edit.new_text))
- .peekable();
-
- let mut edits = Vec::new();
- while let Some((mut range, mut new_text)) = lsp_edits.next() {
- // Combine any LSP edits that are adjacent.
- //
- // Also, combine LSP edits that are separated from each other by only
- // a newline. This is important because for some code actions,
- // Rust-analyzer rewrites the entire buffer via a series of edits that
- // are separated by unchanged newline characters.
- //
- // In order for the diffing logic below to work properly, any edits that
- // cancel each other out must be combined into one.
- while let Some((next_range, next_text)) = lsp_edits.peek() {
- if next_range.start > range.end {
- if next_range.start.row > range.end.row + 1
- || next_range.start.column > 0
- || snapshot.clip_point_utf16(
- PointUtf16::new(range.end.row, u32::MAX),
- Bias::Left,
- ) > range.end
- {
- break;
- }
- new_text.push('\n');
- }
- range.end = next_range.end;
- new_text.push_str(&next_text);
- lsp_edits.next();
- }
-
- if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
- || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
- {
- return Err(anyhow!("invalid edits received from language server"));
- }
-
- // For multiline edits, perform a diff of the old and new text so that
- // we can identify the changes more precisely, preserving the locations
- // of any anchors positioned in the unchanged regions.
- if range.end.row > range.start.row {
- let mut offset = range.start.to_offset(&snapshot);
- let old_text = snapshot.text_for_range(range).collect::<String>();
-
- let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
- let mut moved_since_edit = true;
- for change in diff.iter_all_changes() {
- let tag = change.tag();
- let value = change.value();
- match tag {
- ChangeTag::Equal => {
- offset += value.len();
- moved_since_edit = true;
- }
- ChangeTag::Delete => {
- let start = snapshot.anchor_after(offset);
- let end = snapshot.anchor_before(offset + value.len());
- if moved_since_edit {
- edits.push((start..end, String::new()));
- } else {
- edits.last_mut().unwrap().0.end = end;
- }
- offset += value.len();
- moved_since_edit = false;
- }
- ChangeTag::Insert => {
- if moved_since_edit {
- let anchor = snapshot.anchor_after(offset);
- edits.push((anchor.clone()..anchor, value.to_string()));
- } else {
- edits.last_mut().unwrap().1.push_str(value);
- }
- moved_since_edit = false;
- }
- }
- }
- } else if range.end == range.start {
- let anchor = snapshot.anchor_after(range.start);
- edits.push((anchor.clone()..anchor, new_text));
- } else {
- let edit_start = snapshot.anchor_after(range.start);
- let edit_end = snapshot.anchor_before(range.end);
- edits.push((edit_start..edit_end, new_text));
- }
- }
-
- Ok(edits)
- })
- }
-
fn did_edit(
&mut self,
old_version: &clock::Global,
@@ -1575,7 +1224,6 @@ impl Buffer {
}
self.reparse(cx);
- self.update_language_server(cx);
cx.emit(Event::Edited);
if !was_dirty {
@@ -1788,7 +1436,7 @@ impl Buffer {
}
pub fn completion_triggers(&self) -> &[String] {
- &self.completion_triggers
+ todo!()
}
}
@@ -1843,23 +1491,6 @@ impl Buffer {
impl Entity for Buffer {
type Event = Event;
-
- fn release(&mut self, cx: &mut gpui::MutableAppContext) {
- if let Some(file) = self.file.as_ref() {
- if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) {
- let request = lang_server
- .server
- .notify::<lsp::notification::DidCloseTextDocument>(
- lsp::DidCloseTextDocumentParams {
- text_document: lsp::TextDocumentIdentifier::new(
- lsp::Url::from_file_path(file.abs_path(cx)).unwrap(),
- ),
- },
- );
- cx.foreground().spawn(request).detach_and_log_err(cx);
- }
- }
- }
}
impl Deref for Buffer {
@@ -2592,20 +2223,6 @@ impl operation_queue::Operation for Operation {
}
}
-impl LanguageServerState {
- fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> {
- const OLD_VERSIONS_TO_RETAIN: usize = 10;
-
- self.pending_snapshots
- .retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version);
- let snapshot = self
- .pending_snapshots
- .get(&version)
- .ok_or_else(|| anyhow!("missing snapshot"))?;
- Ok(&snapshot.buffer_snapshot)
- }
-}
-
impl Default for Diagnostic {
fn default() -> Self {
Self {
@@ -6,7 +6,7 @@ use std::{
ops::Range,
};
use sum_tree::{self, Bias, SumTree};
-use text::{Anchor, FromAnchor, Point, ToOffset};
+use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
#[derive(Clone, Debug)]
pub struct DiagnosticSet {
@@ -46,7 +46,7 @@ impl DiagnosticSet {
pub fn new<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
where
- I: IntoIterator<Item = DiagnosticEntry<Point>>,
+ I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
{
let mut entries = iter.into_iter().collect::<Vec<_>>();
entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
@@ -6,7 +6,6 @@ use rand::prelude::*;
use std::{
cell::RefCell,
env,
- iter::FromIterator,
ops::Range,
rc::Rc,
time::{Duration, Instant},
@@ -558,584 +557,6 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
});
}
-#[gpui::test]
-async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
- let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
- let mut rust_lang = rust_lang();
- rust_lang.config.language_server = Some(LanguageServerConfig {
- disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
- ..Default::default()
- });
-
- let text = "
- fn a() { A }
- fn b() { BB }
- fn c() { CCC }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang), cx)
- .with_language_server(language_server, cx)
- });
-
- let open_notification = fake
- .receive_notification::<lsp::notification::DidOpenTextDocument>()
- .await;
-
- // Edit the buffer, moving the content down
- buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
- let change_notification_1 = fake
- .receive_notification::<lsp::notification::DidChangeTextDocument>()
- .await;
- assert!(change_notification_1.text_document.version > open_notification.text_document.version);
-
- buffer.update(cx, |buffer, cx| {
- // Receive diagnostics for an earlier version of the buffer.
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- is_disk_based: true,
- message: "undefined variable 'CCC'".to_string(),
- group_id: 2,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(open_notification.text_document.version),
- cx,
- )
- .unwrap();
-
- // The diagnostics have moved down since they were created.
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(3, 9)..Point::new(3, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: Point::new(4, 9)..Point::new(4, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'CCC'".to_string(),
- is_disk_based: true,
- group_id: 2,
- is_primary: true,
- ..Default::default()
- }
- }
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, 0..buffer.len()),
- [
- ("\n\nfn a() { ".to_string(), None),
- ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn b() { ".to_string(), None),
- ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn c() { ".to_string(), None),
- ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\n".to_string(), None),
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
- [
- ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }\nfn c() { ".to_string(), None),
- ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
- ]
- );
-
- // Ensure overlapping diagnostics are highlighted correctly.
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::WARNING,
- message: "unreachable statement".to_string(),
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(open_notification.text_document.version),
- cx,
- )
- .unwrap();
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(2, 9)..Point::new(2, 12),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::WARNING,
- message: "unreachable statement".to_string(),
- group_id: 1,
- is_primary: true,
- ..Default::default()
- }
- },
- DiagnosticEntry {
- range: Point::new(2, 9)..Point::new(2, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- }
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
- [
- ("fn a() { ".to_string(), None),
- ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
- (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
- ("\n".to_string(), None),
- ]
- );
- assert_eq!(
- chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
- [
- (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
- ("\n".to_string(), None),
- ]
- );
- });
-
- // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
- // changes since the last save.
- buffer.update(cx, |buffer, cx| {
- buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
- buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
- });
- let change_notification_2 = fake
- .receive_notification::<lsp::notification::DidChangeTextDocument>()
- .await;
- assert!(
- change_notification_2.text_document.version > change_notification_1.text_document.version
- );
-
- buffer.update(cx, |buffer, cx| {
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- },
- },
- ],
- Some(change_notification_2.text_document.version),
- cx,
- )
- .unwrap();
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(0..buffer.len())
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(2, 21)..Point::new(2, 22),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'A'".to_string(),
- is_disk_based: true,
- group_id: 0,
- is_primary: true,
- ..Default::default()
- }
- },
- DiagnosticEntry {
- range: Point::new(3, 9)..Point::new(3, 11),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "undefined variable 'BB'".to_string(),
- is_disk_based: true,
- group_id: 1,
- is_primary: true,
- ..Default::default()
- },
- }
- ]
- );
- });
-}
-
-#[gpui::test]
-async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
- let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
-
- // Simulate the language server failing to start up.
- drop(fake);
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang()), cx)
- .with_language_server(language_server, cx)
- });
-
- // Run the buffer's task that retrieves the server's capabilities.
- cx.foreground().advance_clock(Duration::from_millis(1));
-
- buffer.read_with(cx, |buffer, _| {
- assert!(buffer.language_server().is_none());
- });
-}
-
-#[gpui::test]
-async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
- let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
-
- let text = "
- fn a() {
- f1();
- }
- fn b() {
- f2();
- }
- fn c() {
- f3();
- }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| {
- Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
- .with_language(Arc::new(rust_lang()), cx)
- .with_language_server(language_server, cx)
- });
-
- let lsp_document_version = fake
- .receive_notification::<lsp::notification::DidOpenTextDocument>()
- .await
- .text_document
- .version;
-
- // Simulate editing the buffer after the language server computes some edits.
- buffer.update(cx, |buffer, cx| {
- buffer.edit(
- [Point::new(0, 0)..Point::new(0, 0)],
- "// above first function\n",
- cx,
- );
- buffer.edit(
- [Point::new(2, 0)..Point::new(2, 0)],
- " // inside first function\n",
- cx,
- );
- buffer.edit(
- [Point::new(6, 4)..Point::new(6, 4)],
- "// inside second function ",
- cx,
- );
-
- assert_eq!(
- buffer.text(),
- "
- // above first function
- fn a() {
- // inside first function
- f1();
- }
- fn b() {
- // inside second function f2();
- }
- fn c() {
- f3();
- }
- "
- .unindent()
- );
- });
-
- let edits = buffer
- .update(cx, |buffer, cx| {
- buffer.edits_from_lsp(
- vec![
- // replace body of first function
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
- new_text: "
- fn a() {
- f10();
- }
- "
- .unindent(),
- },
- // edit inside second function
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
- new_text: "00".into(),
- },
- // edit inside third function via two distinct edits
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
- new_text: "4000".into(),
- },
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
- new_text: "".into(),
- },
- ],
- Some(lsp_document_version),
- cx,
- )
- })
- .await
- .unwrap();
-
- buffer.update(cx, |buffer, cx| {
- for (range, new_text) in edits {
- buffer.edit([range], new_text, cx);
- }
- assert_eq!(
- buffer.text(),
- "
- // above first function
- fn a() {
- // inside first function
- f10();
- }
- fn b() {
- // inside second function f200();
- }
- fn c() {
- f4000();
- }
- "
- .unindent()
- );
- });
-}
-
-#[gpui::test]
-async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
- let text = "
- use a::b;
- use a::c;
-
- fn f() {
- b();
- c();
- }
- "
- .unindent();
-
- let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
-
- // Simulate the language server sending us a small edit in the form of a very large diff.
- // Rust-analyzer does this when performing a merge-imports code action.
- let edits = buffer
- .update(cx, |buffer, cx| {
- buffer.edits_from_lsp(
- [
- // Replace the first use statement without editing the semicolon.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
- new_text: "a::{b, c}".into(),
- },
- // Reinsert the remainder of the file between the semicolon and the final
- // newline of the file.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
- new_text: "\n\n".into(),
- },
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
- new_text: "
- fn f() {
- b();
- c();
- }"
- .unindent(),
- },
- // Delete everything after the first newline of the file.
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
- new_text: "".into(),
- },
- ],
- None,
- cx,
- )
- })
- .await
- .unwrap();
-
- buffer.update(cx, |buffer, cx| {
- let edits = edits
- .into_iter()
- .map(|(range, text)| {
- (
- range.start.to_point(&buffer)..range.end.to_point(&buffer),
- text,
- )
- })
- .collect::<Vec<_>>();
-
- assert_eq!(
- edits,
- [
- (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
- (Point::new(1, 0)..Point::new(2, 0), "".into())
- ]
- );
-
- for (range, new_text) in edits {
- buffer.edit([range], new_text, cx);
- }
- assert_eq!(
- buffer.text(),
- "
- use a::{b, c};
-
- fn f() {
- b();
- c();
- }
- "
- .unindent()
- );
- });
-}
-
-#[gpui::test]
-async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
- cx.add_model(|cx| {
- let text = concat!(
- "let one = ;\n", //
- "let two = \n",
- "let three = 3;\n",
- );
-
- let mut buffer = Buffer::new(0, text, cx);
- buffer.set_language(Some(Arc::new(rust_lang())), cx);
- buffer
- .update_diagnostics(
- vec![
- DiagnosticEntry {
- range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "syntax error 1".to_string(),
- ..Default::default()
- },
- },
- DiagnosticEntry {
- range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
- diagnostic: Diagnostic {
- severity: DiagnosticSeverity::ERROR,
- message: "syntax error 2".to_string(),
- ..Default::default()
- },
- },
- ],
- None,
- cx,
- )
- .unwrap();
-
- // An empty range is extended forward to include the following character.
- // At the end of a line, an empty range is extended backward to include
- // the preceding character.
- let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
- assert_eq!(
- chunks
- .iter()
- .map(|(s, d)| (s.as_str(), *d))
- .collect::<Vec<_>>(),
- &[
- ("let one = ", None),
- (";", Some(DiagnosticSeverity::ERROR)),
- ("\nlet two =", None),
- (" ", Some(DiagnosticSeverity::ERROR)),
- ("\nlet three = 3;\n", None)
- ]
- );
- buffer
- });
-}
-
#[gpui::test]
fn test_serialization(cx: &mut gpui::MutableAppContext) {
let mut now = Instant::now();
@@ -1253,9 +674,10 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
40..=49 if mutation_count != 0 && replica_id == 0 => {
let entry_count = rng.gen_range(1..=5);
buffer.update(cx, |buffer, cx| {
- let diagnostics = (0..entry_count)
- .map(|_| {
+ let diagnostics = DiagnosticSet::new(
+ (0..entry_count).map(|_| {
let range = buffer.random_byte_range(0, &mut rng);
+ let range = range.to_point_utf16(buffer);
DiagnosticEntry {
range,
diagnostic: Diagnostic {
@@ -1263,10 +685,11 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
..Default::default()
},
}
- })
- .collect();
+ }),
+ buffer,
+ );
log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
- buffer.update_diagnostics(diagnostics, None, cx).unwrap();
+ buffer.update_diagnostics(diagnostics, cx);
});
mutation_count -= 1;
}
@@ -1370,24 +793,6 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
}
}
-fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
- buffer: &Buffer,
- range: Range<T>,
-) -> Vec<(String, Option<DiagnosticSeverity>)> {
- let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
- for chunk in buffer.snapshot().chunks(range, true) {
- if chunks
- .last()
- .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
- {
- chunks.last_mut().unwrap().0.push_str(chunk.text);
- } else {
- chunks.push((chunk.text.to_string(), chunk.diagnostic));
- }
- }
- chunks
-}
-
#[test]
fn test_contiguous_ranges() {
assert_eq!(
@@ -42,6 +42,7 @@ regex = "1.5"
serde = { version = "1", features = ["derive"] }
serde_json = { version = "1.0.64", features = ["preserve_order"] }
sha2 = "0.10"
+similar = "1.3"
smol = "1.2.5"
toml = "0.5"
@@ -223,21 +223,19 @@ impl LspCommand for PerformRename {
mut cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
if let Some(edit) = message {
- let (language_name, language_server) = buffer.read_with(&cx, |buffer, _| {
- let language = buffer
- .language()
- .ok_or_else(|| anyhow!("buffer's language was removed"))?;
- let language_server = buffer
- .language_server()
- .cloned()
- .ok_or_else(|| anyhow!("buffer's language server was removed"))?;
- Ok::<_, anyhow::Error>((language.name().to_string(), language_server))
- })?;
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project.language_server_for_buffer(&buffer, cx).cloned()
+ })
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
Project::deserialize_workspace_edit(
project,
edit,
self.push_to_history,
- language_name,
+ language.name(),
language_server,
&mut cx,
)
@@ -343,14 +341,14 @@ impl LspCommand for GetDefinition {
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut definitions = Vec::new();
- let (language, language_server) = buffer
- .read_with(&cx, |buffer, _| {
- buffer
- .language()
- .cloned()
- .zip(buffer.language_server().cloned())
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project.language_server_for_buffer(&buffer, cx).cloned()
})
- .ok_or_else(|| anyhow!("buffer no longer has language server"))?;
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
if let Some(message) = message {
let mut unresolved_locations = Vec::new();
@@ -375,7 +373,7 @@ impl LspCommand for GetDefinition {
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
target_uri,
- language.name().to_string(),
+ language.name(),
language_server.clone(),
cx,
)
@@ -519,14 +517,14 @@ impl LspCommand for GetReferences {
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut references = Vec::new();
- let (language, language_server) = buffer
- .read_with(&cx, |buffer, _| {
- buffer
- .language()
- .cloned()
- .zip(buffer.language_server().cloned())
+ let language_server = project
+ .read_with(&cx, |project, cx| {
+ project.language_server_for_buffer(&buffer, cx).cloned()
})
- .ok_or_else(|| anyhow!("buffer no longer has language server"))?;
+ .ok_or_else(|| anyhow!("no language server found for buffer"))?;
+ let language = buffer
+ .read_with(&cx, |buffer, _| buffer.language().cloned())
+ .ok_or_else(|| anyhow!("no language for buffer"))?;
if let Some(locations) = locations {
for lsp_location in locations {
@@ -534,7 +532,7 @@ impl LspCommand for GetReferences {
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
lsp_location.uri,
- language.name().to_string(),
+ language.name(),
language_server.clone(),
cx,
)
@@ -16,9 +16,10 @@ use gpui::{
};
use language::{
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
- range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
- Diagnostic, DiagnosticEntry, Event as BufferEvent, File as _, Language, LanguageRegistry,
- Operation, PointUtf16, ToLspPosition, ToOffset, ToPointUtf16, Transaction,
+ range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
+ DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
+ LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
+ ToPointUtf16, Transaction,
};
use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
use lsp_command::*;
@@ -26,10 +27,11 @@ use postage::watch;
use rand::prelude::*;
use search::SearchQuery;
use sha2::{Digest, Sha256};
+use similar::{ChangeTag, TextDiff};
use smol::block_on;
use std::{
cell::RefCell,
- cmp,
+ cmp::{self, Ordering},
convert::TryInto,
hash::Hash,
mem,
@@ -48,9 +50,8 @@ pub struct Project {
worktrees: Vec<WorktreeHandle>,
active_entry: Option<ProjectEntry>,
languages: Arc<LanguageRegistry>,
- language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
- started_language_servers:
- HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
+ language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
+ started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
client: Arc<client::Client>,
user_store: ModelHandle<UserStore>,
fs: Arc<dyn Fs>,
@@ -67,6 +68,7 @@ pub struct Project {
loading_local_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
opened_buffers: HashMap<u64, OpenBuffer>,
+ buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
nonce: u128,
}
@@ -285,6 +287,7 @@ impl Project {
shared_buffers: Default::default(),
loading_buffers: Default::default(),
loading_local_worktrees: Default::default(),
+ buffer_snapshots: Default::default(),
client_state: ProjectClientState::Local {
is_shared: false,
remote_id_tx,
@@ -371,6 +374,7 @@ impl Project {
language_servers: Default::default(),
started_language_servers: Default::default(),
opened_buffers: Default::default(),
+ buffer_snapshots: Default::default(),
nonce: StdRng::from_entropy().gen(),
};
for worktree in worktrees {
@@ -722,7 +726,7 @@ impl Project {
let buffer = cx.add_model(|cx| {
Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
});
- self.register_buffer(&buffer, None, cx)?;
+ self.register_buffer(&buffer, cx)?;
Ok(buffer)
}
@@ -797,15 +801,9 @@ impl Project {
let worktree = worktree.as_local_mut().unwrap();
worktree.load_buffer(path, cx)
});
- let worktree = worktree.downgrade();
cx.spawn(|this, mut cx| async move {
let buffer = load_buffer.await?;
- let worktree = worktree
- .upgrade(&cx)
- .ok_or_else(|| anyhow!("worktree was removed"))?;
- this.update(&mut cx, |this, cx| {
- this.register_buffer(&buffer, Some(&worktree), cx)
- })?;
+ this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
Ok(buffer)
})
}
@@ -838,7 +836,7 @@ impl Project {
fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Url,
- lang_name: String,
+ lang_name: Arc<str>,
lang_server: Arc<LanguageServer>,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
@@ -890,7 +888,8 @@ impl Project {
})
.await?;
this.update(&mut cx, |this, cx| {
- this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
+ this.assign_language_to_buffer(&buffer, cx);
+ this.register_buffer_with_language_servers(&buffer, cx);
});
Ok(())
})
@@ -916,7 +915,6 @@ impl Project {
fn register_buffer(
&mut self,
buffer: &ModelHandle<Buffer>,
- worktree: Option<&ModelHandle<Worktree>>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
let remote_id = buffer.read(cx).remote_id();
@@ -944,109 +942,215 @@ impl Project {
remote_id
))?,
}
- cx.become_delegate(buffer, Self::on_buffer_event).detach();
- self.assign_language_to_buffer(buffer, worktree, cx);
+ cx.become_delegate(buffer, |this, buffer, event, cx| {
+ this.on_buffer_event(buffer, event, cx);
+ })
+ .detach();
+
+ self.assign_language_to_buffer(buffer, cx);
+ self.register_buffer_with_language_servers(buffer, cx);
Ok(())
}
+ fn register_buffer_with_language_servers(
+ &mut self,
+ buffer_handle: &ModelHandle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let buffer = buffer_handle.read(cx);
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ let worktree_id = file.worktree_id(cx);
+ if file.is_local() {
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ let initial_snapshot = buffer.as_text_snapshot();
+ self.buffer_snapshots
+ .insert(buffer.remote_id(), vec![(0, initial_snapshot.clone())]);
+
+ let mut notifications = Vec::new();
+ let did_open_text_document = lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ uri,
+ Default::default(),
+ 0,
+ initial_snapshot.text(),
+ ),
+ };
+
+ for lang_server in self.language_servers_for_worktree(worktree_id) {
+ notifications.push(
+ lang_server.notify::<lsp::notification::DidOpenTextDocument>(
+ did_open_text_document.clone(),
+ ),
+ );
+ }
+
+ if let Some(local_worktree) = file.worktree.read(cx).as_local() {
+ if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
+ self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
+ .log_err();
+ }
+ }
+
+ cx.observe_release(buffer_handle, |this, buffer, cx| {
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ let worktree_id = file.worktree_id(cx);
+ if file.is_local() {
+ let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ let mut notifications = Vec::new();
+ for lang_server in this.language_servers_for_worktree(worktree_id) {
+ notifications.push(
+ lang_server.notify::<lsp::notification::DidCloseTextDocument>(
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(
+ uri.clone(),
+ ),
+ },
+ ),
+ );
+ }
+ cx.background()
+ .spawn(futures::future::try_join_all(notifications))
+ .detach_and_log_err(cx);
+ }
+ }
+ })
+ .detach();
+
+ cx.background()
+ .spawn(futures::future::try_join_all(notifications))
+ .detach_and_log_err(cx);
+ }
+ }
+ }
+
fn on_buffer_event(
&mut self,
buffer: ModelHandle<Buffer>,
event: BufferEvent,
cx: &mut ModelContext<Self>,
- ) {
+ ) -> Option<()> {
match event {
BufferEvent::Operation(operation) => {
- if let Some(project_id) = self.remote_id() {
- let request = self.client.request(proto::UpdateBuffer {
- project_id,
- buffer_id: buffer.read(cx).remote_id(),
- operations: vec![language::proto::serialize_operation(&operation)],
- });
- cx.background().spawn(request).detach_and_log_err(cx);
- }
+ let project_id = self.remote_id()?;
+ let request = self.client.request(proto::UpdateBuffer {
+ project_id,
+ buffer_id: buffer.read(cx).remote_id(),
+ operations: vec![language::proto::serialize_operation(&operation)],
+ });
+ cx.background().spawn(request).detach_and_log_err(cx);
}
- BufferEvent::Saved => {
- if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
- let worktree_id = file.worktree_id(cx);
- if let Some(abs_path) = file.as_local().map(|file| file.abs_path(cx)) {
- let text_document = lsp::TextDocumentIdentifier {
- uri: lsp::Url::from_file_path(abs_path).unwrap(),
- };
+ BufferEvent::Edited => {
+ let buffer = buffer.read(cx);
+ let file = File::from_dyn(buffer.file())?;
+ let worktree_id = file.worktree_id(cx);
+ let abs_path = file.as_local()?.abs_path(cx);
+ let uri = lsp::Url::from_file_path(abs_path).unwrap();
+ let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
+ let (version, prev_snapshot) = buffer_snapshots.last()?;
+ let next_snapshot = buffer.text_snapshot();
+ let next_version = version + 1;
+
+ let content_changes = buffer
+ .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
+ .map(|edit| {
+ let edit_start = edit.new.start.0;
+ let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
+ let new_text = next_snapshot
+ .text_for_range(edit.new.start.1..edit.new.end.1)
+ .collect();
+ lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(
+ edit_start.to_lsp_position(),
+ edit_end.to_lsp_position(),
+ )),
+ range_length: None,
+ text: new_text,
+ }
+ })
+ .collect();
- let mut notifications = Vec::new();
- for ((lang_server_worktree_id, _), lang_server) in &self.language_servers {
- if *lang_server_worktree_id != worktree_id {
- continue;
- }
+ let changes = lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(uri, next_version),
+ content_changes,
+ };
- notifications.push(
- lang_server.notify::<lsp::notification::DidSaveTextDocument>(
- lsp::DidSaveTextDocumentParams {
- text_document: text_document.clone(),
- text: None,
- },
- ),
- );
- }
+ buffer_snapshots.push((next_version, next_snapshot));
- cx.background()
- .spawn(futures::future::try_join_all(notifications))
- .detach_and_log_err(cx);
- }
+ let mut notifications = Vec::new();
+ for lang_server in self.language_servers_for_worktree(worktree_id) {
+ notifications.push(
+ lang_server
+ .notify::<lsp::notification::DidChangeTextDocument>(changes.clone()),
+ );
+ }
+
+ cx.background()
+ .spawn(futures::future::try_join_all(notifications))
+ .detach_and_log_err(cx);
+ }
+ BufferEvent::Saved => {
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ let worktree_id = file.worktree_id(cx);
+ let abs_path = file.as_local()?.abs_path(cx);
+ let text_document = lsp::TextDocumentIdentifier {
+ uri: lsp::Url::from_file_path(abs_path).unwrap(),
+ };
+
+ let mut notifications = Vec::new();
+ for lang_server in self.language_servers_for_worktree(worktree_id) {
+ notifications.push(
+ lang_server.notify::<lsp::notification::DidSaveTextDocument>(
+ lsp::DidSaveTextDocumentParams {
+ text_document: text_document.clone(),
+ text: None,
+ },
+ ),
+ );
}
+
+ cx.background()
+ .spawn(futures::future::try_join_all(notifications))
+ .detach_and_log_err(cx);
}
_ => {}
}
+
+ None
+ }
+
+ fn language_servers_for_worktree(
+ &self,
+ worktree_id: WorktreeId,
+ ) -> impl Iterator<Item = &Arc<LanguageServer>> {
+ self.language_servers.iter().filter_map(
+ move |((lang_server_worktree_id, _), lang_server)| {
+ if *lang_server_worktree_id == worktree_id {
+ Some(lang_server)
+ } else {
+ None
+ }
+ },
+ )
}
fn assign_language_to_buffer(
&mut self,
buffer: &ModelHandle<Buffer>,
- worktree: Option<&ModelHandle<Worktree>>,
cx: &mut ModelContext<Self>,
) -> Option<()> {
- let (path, full_path) = {
- let file = buffer.read(cx).file()?;
- (file.path().clone(), file.full_path(cx))
- };
-
- // If the buffer has a language, set it and start/assign the language server
- if let Some(language) = self.languages.select_language(&full_path) {
- buffer.update(cx, |buffer, cx| {
- buffer.set_language(Some(language.clone()), cx);
- });
-
- // For local worktrees, start a language server if needed.
- // Also assign the language server and any previously stored diagnostics to the buffer.
- if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
- let worktree_id = local_worktree.id();
- let worktree_abs_path = local_worktree.abs_path().clone();
- let buffer = buffer.downgrade();
- let language_server =
- self.start_language_server(worktree_id, worktree_abs_path, language, cx);
-
- cx.spawn_weak(|_, mut cx| async move {
- if let Some(language_server) = language_server.await {
- if let Some(buffer) = buffer.upgrade(&cx) {
- buffer.update(&mut cx, |buffer, cx| {
- buffer.set_language_server(Some(language_server), cx);
- });
- }
- }
- })
- .detach();
- }
- }
+ // If the buffer has a language, set it and start the language server if we haven't already.
+ let full_path = buffer.read(cx).file()?.full_path(cx);
+ let language = self.languages.select_language(&full_path)?;
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(Some(language.clone()), cx);
+ });
- if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
- if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
- buffer.update(cx, |buffer, cx| {
- buffer.update_diagnostics(diagnostics, None, cx).log_err();
- });
- }
- }
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ let worktree = file.worktree.read(cx).as_local()?;
+ let worktree_id = worktree.id();
+ let worktree_abs_path = worktree.abs_path().clone();
+ self.start_language_server(worktree_id, worktree_abs_path, language, cx);
None
}
@@ -1057,14 +1161,14 @@ impl Project {
worktree_path: Arc<Path>,
language: Arc<Language>,
cx: &mut ModelContext<Self>,
- ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
+ ) {
enum LspEvent {
DiagnosticsStart,
DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
DiagnosticsFinish,
}
- let key = (worktree_id, language.name().to_string());
+ let key = (worktree_id, language.name());
self.started_language_servers
.entry(key.clone())
.or_insert_with(|| {
@@ -1077,11 +1181,44 @@ impl Project {
let rpc = self.client.clone();
cx.spawn_weak(|this, mut cx| async move {
let language_server = language_server?.await.log_err()?;
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, _| {
- this.language_servers.insert(key, language_server.clone());
- });
- }
+ let this = this.upgrade(&cx)?;
+ let mut open_notifications = Vec::new();
+ this.update(&mut cx, |this, cx| {
+ this.language_servers.insert(key, language_server.clone());
+ for buffer in this.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ let buffer = buffer.read(cx);
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if let Some(file) = file.as_local() {
+ let versions = this
+ .buffer_snapshots
+ .entry(buffer.remote_id())
+ .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
+ let (version, initial_snapshot) = versions.last().unwrap();
+ let uri =
+ lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
+ open_notifications.push(
+ language_server
+ .notify::<lsp::notification::DidOpenTextDocument>(
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ uri,
+ Default::default(),
+ *version,
+ initial_snapshot.text(),
+ ),
+ },
+ ),
+ );
+ }
+ }
+ }
+ }
+ });
+
+ futures::future::try_join_all(open_notifications)
+ .await
+ .log_err();
let disk_based_sources = language
.disk_based_diagnostic_sources()
@@ -1153,6 +1290,7 @@ impl Project {
.detach();
// Process all the LSP events.
+ let this = this.downgrade();
cx.spawn(|mut cx| async move {
while let Ok(message) = diagnostics_rx.recv().await {
let this = this.upgrade(&cx)?;
@@ -1194,9 +1332,7 @@ impl Project {
Some(language_server)
})
- .shared()
- })
- .clone()
+ });
}
pub fn update_diagnostics(
@@ -1326,9 +1462,7 @@ impl Project {
.file()
.map_or(false, |file| *file.path() == project_path.path)
{
- buffer.update(cx, |buffer, cx| {
- buffer.update_diagnostics(diagnostics.clone(), version, cx)
- })?;
+ self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
break;
}
}
@@ -1343,6 +1477,90 @@ impl Project {
Ok(())
}
+ fn update_buffer_diagnostics(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
+ version: Option<i32>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
+ Ordering::Equal
+ .then_with(|| b.is_primary.cmp(&a.is_primary))
+ .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
+ .then_with(|| a.severity.cmp(&b.severity))
+ .then_with(|| a.message.cmp(&b.message))
+ }
+
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
+
+ diagnostics.sort_unstable_by(|a, b| {
+ Ordering::Equal
+ .then_with(|| a.range.start.cmp(&b.range.start))
+ .then_with(|| b.range.end.cmp(&a.range.end))
+ .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
+ });
+
+ let mut sanitized_diagnostics = Vec::new();
+ let mut edits_since_save = snapshot
+ .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
+ .peekable();
+ let mut last_edit_old_end = PointUtf16::zero();
+ let mut last_edit_new_end = PointUtf16::zero();
+ 'outer: for entry in diagnostics {
+ let mut start = entry.range.start;
+ let mut end = entry.range.end;
+
+ // Some diagnostics are based on files on disk instead of buffers'
+ // current contents. Adjust these diagnostics' ranges to reflect
+ // any unsaved edits.
+ if entry.diagnostic.is_disk_based {
+ while let Some(edit) = edits_since_save.peek() {
+ if edit.old.end <= start {
+ last_edit_old_end = edit.old.end;
+ last_edit_new_end = edit.new.end;
+ edits_since_save.next();
+ } else if edit.old.start <= end && edit.old.end >= start {
+ continue 'outer;
+ } else {
+ break;
+ }
+ }
+
+ let start_overshoot = start - last_edit_old_end;
+ start = last_edit_new_end;
+ start += start_overshoot;
+
+ let end_overshoot = end - last_edit_old_end;
+ end = last_edit_new_end;
+ end += end_overshoot;
+ }
+
+ let mut range = snapshot.clip_point_utf16(start, Bias::Left)
+ ..snapshot.clip_point_utf16(end, Bias::Right);
+
+ // Expand empty ranges by one character
+ if range.start == range.end {
+ range.end.column += 1;
+ range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
+ if range.start == range.end && range.end.column > 0 {
+ range.start.column -= 1;
+ range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
+ }
+ }
+
+ sanitized_diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: entry.diagnostic,
+ });
+ }
+ drop(edits_since_save);
+
+ let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
+ buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
+ Ok(())
+ }
+
pub fn format(
&self,
buffers: HashSet<ModelHandle<Buffer>>,
@@ -1361,7 +1579,7 @@ impl Project {
if let Some(lang) = buffer.language() {
if let Some(server) = self
.language_servers
- .get(&(worktree.read(cx).id(), lang.name().to_string()))
+ .get(&(worktree.read(cx).id(), lang.name()))
{
lang_server = server.clone();
} else {
@@ -1449,9 +1667,9 @@ impl Project {
};
if let Some(lsp_edits) = lsp_edits {
- let edits = buffer
- .update(&mut cx, |buffer, cx| {
- buffer.edits_from_lsp(lsp_edits, None, cx)
+ let edits = this
+ .update(&mut cx, |this, cx| {
+ this.edits_from_lsp(&buffer, lsp_edits, None, cx)
})
.await?;
buffer.update(&mut cx, |buffer, cx| {
@@ -1616,10 +1834,10 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
if self.is_local() {
- let language_server = if let Some(server) = self
- .language_servers
- .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
- {
+ let language_server = if let Some(server) = self.language_servers.get(&(
+ symbol.source_worktree_id,
+ Arc::from(symbol.language_name.as_str()),
+ )) {
server.clone()
} else {
return Task::ready(Err(anyhow!(
@@ -1645,7 +1863,7 @@ impl Project {
self.open_local_buffer_via_lsp(
symbol_uri,
- symbol.language_name.clone(),
+ Arc::from(symbol.language_name.as_str()),
language_server,
cx,
)
@@ -1689,11 +1907,12 @@ impl Project {
if worktree.read(cx).as_local().is_some() {
let buffer_abs_path = buffer_abs_path.unwrap();
- let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
- server
- } else {
- return Task::ready(Ok(Default::default()));
- };
+ let lang_server =
+ if let Some(server) = self.language_server_for_buffer(&source_buffer_handle, cx) {
+ server.clone()
+ } else {
+ return Task::ready(Ok(Default::default()));
+ };
cx.spawn(|_, cx| async move {
let completions = lang_server
@@ -1800,19 +2019,22 @@ impl Project {
let buffer_id = buffer.remote_id();
if self.is_local() {
- let lang_server = if let Some(language_server) = buffer.language_server() {
- language_server.clone()
- } else {
- return Task::ready(Err(anyhow!("buffer does not have a language server")));
- };
+ let lang_server =
+ if let Some(server) = self.language_server_for_buffer(&buffer_handle, cx) {
+ server.clone()
+ } else {
+ return Task::ready(Ok(Default::default()));
+ };
- cx.spawn(|_, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
let resolved_completion = lang_server
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
.await?;
if let Some(edits) = resolved_completion.additional_text_edits {
- let edits = buffer_handle
- .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
+ let edits = this
+ .update(&mut cx, |this, cx| {
+ this.edits_from_lsp(&buffer_handle, edits, None, cx)
+ })
.await?;
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.finalize_last_transaction();
@@ -1892,7 +2114,7 @@ impl Project {
let lang_name;
let lang_server;
if let Some(lang) = buffer.language() {
- lang_name = lang.name().to_string();
+ lang_name = lang.name();
if let Some(server) = self
.language_servers
.get(&(worktree.read(cx).id(), lang_name.clone()))
@@ -1993,15 +2215,16 @@ impl Project {
if self.is_local() {
let buffer = buffer_handle.read(cx);
let lang_name = if let Some(lang) = buffer.language() {
- lang.name().to_string()
+ lang.name()
} else {
return Task::ready(Ok(Default::default()));
};
- let lang_server = if let Some(language_server) = buffer.language_server() {
- language_server.clone()
- } else {
- return Task::ready(Err(anyhow!("buffer does not have a language server")));
- };
+ let lang_server =
+ if let Some(server) = self.language_server_for_buffer(&buffer_handle, cx) {
+ server.clone()
+ } else {
+ return Task::ready(Ok(Default::default()));
+ };
let range = action.range.to_point_utf16(buffer);
cx.spawn(|this, mut cx| async move {
@@ -2074,7 +2297,7 @@ impl Project {
this: ModelHandle<Self>,
edit: lsp::WorkspaceEdit,
push_to_history: bool,
- language_name: String,
+ language_name: Arc<str>,
language_server: Arc<LanguageServer>,
cx: &mut AsyncAppContext,
) -> Result<ProjectTransaction> {
@@ -2158,13 +2381,18 @@ impl Project {
})
.await?;
- let edits = buffer_to_edit
- .update(cx, |buffer, cx| {
+ let edits = this
+ .update(cx, |this, cx| {
let edits = op.edits.into_iter().map(|edit| match edit {
lsp::OneOf::Left(edit) => edit,
lsp::OneOf::Right(edit) => edit.text_edit,
});
- buffer.edits_from_lsp(edits, op.text_document.version, cx)
+ this.edits_from_lsp(
+ &buffer_to_edit,
+ edits,
+ op.text_document.version,
+ cx,
+ )
})
.await?;
@@ -2441,7 +2669,9 @@ impl Project {
let buffer = buffer_handle.read(cx);
if self.is_local() {
let file = File::from_dyn(buffer.file()).and_then(File::as_local);
- if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
+ if let Some((file, language_server)) =
+ file.zip(self.language_server_for_buffer(&buffer_handle, cx).cloned())
+ {
let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
return cx.spawn(|this, cx| async move {
if !language_server
@@ -2602,7 +2832,7 @@ impl Project {
self.worktrees
.push(WorktreeHandle::Strong(worktree.clone()));
} else {
- cx.observe_release(&worktree, |this, cx| {
+ cx.observe_release(&worktree, |this, _, cx| {
this.worktrees
.retain(|worktree| worktree.upgrade(cx).is_some());
cx.notify();
@@ -3441,9 +3671,7 @@ impl Project {
Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
});
- this.update(&mut cx, |this, cx| {
- this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
- })?;
+ this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
*opened_buffer_tx.borrow_mut().borrow_mut() = ();
Ok(buffer)
@@ -3570,6 +3798,161 @@ impl Project {
.await
}
}
+
+ fn edits_from_lsp(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
+ version: Option<i32>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
+ cx.background().spawn(async move {
+ let snapshot = snapshot?;
+ let mut lsp_edits = lsp_edits
+ .into_iter()
+ .map(|edit| (range_from_lsp(edit.range), edit.new_text))
+ .peekable();
+
+ let mut edits = Vec::new();
+ while let Some((mut range, mut new_text)) = lsp_edits.next() {
+ // Combine any LSP edits that are adjacent.
+ //
+ // Also, combine LSP edits that are separated from each other by only
+ // a newline. This is important because for some code actions,
+ // Rust-analyzer rewrites the entire buffer via a series of edits that
+ // are separated by unchanged newline characters.
+ //
+ // In order for the diffing logic below to work properly, any edits that
+ // cancel each other out must be combined into one.
+ while let Some((next_range, next_text)) = lsp_edits.peek() {
+ if next_range.start > range.end {
+ if next_range.start.row > range.end.row + 1
+ || next_range.start.column > 0
+ || snapshot.clip_point_utf16(
+ PointUtf16::new(range.end.row, u32::MAX),
+ Bias::Left,
+ ) > range.end
+ {
+ break;
+ }
+ new_text.push('\n');
+ }
+ range.end = next_range.end;
+ new_text.push_str(&next_text);
+ lsp_edits.next();
+ }
+
+ if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
+ || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
+ {
+ return Err(anyhow!("invalid edits received from language server"));
+ }
+
+ // For multiline edits, perform a diff of the old and new text so that
+ // we can identify the changes more precisely, preserving the locations
+ // of any anchors positioned in the unchanged regions.
+ if range.end.row > range.start.row {
+ let mut offset = range.start.to_offset(&snapshot);
+ let old_text = snapshot.text_for_range(range).collect::<String>();
+
+ let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
+ let mut moved_since_edit = true;
+ for change in diff.iter_all_changes() {
+ let tag = change.tag();
+ let value = change.value();
+ match tag {
+ ChangeTag::Equal => {
+ offset += value.len();
+ moved_since_edit = true;
+ }
+ ChangeTag::Delete => {
+ let start = snapshot.anchor_after(offset);
+ let end = snapshot.anchor_before(offset + value.len());
+ if moved_since_edit {
+ edits.push((start..end, String::new()));
+ } else {
+ edits.last_mut().unwrap().0.end = end;
+ }
+ offset += value.len();
+ moved_since_edit = false;
+ }
+ ChangeTag::Insert => {
+ if moved_since_edit {
+ let anchor = snapshot.anchor_after(offset);
+ edits.push((anchor.clone()..anchor, value.to_string()));
+ } else {
+ edits.last_mut().unwrap().1.push_str(value);
+ }
+ moved_since_edit = false;
+ }
+ }
+ }
+ } else if range.end == range.start {
+ let anchor = snapshot.anchor_after(range.start);
+ edits.push((anchor.clone()..anchor, new_text));
+ } else {
+ let edit_start = snapshot.anchor_after(range.start);
+ let edit_end = snapshot.anchor_before(range.end);
+ edits.push((edit_start..edit_end, new_text));
+ }
+ }
+
+ Ok(edits)
+ })
+ }
+
+ fn buffer_snapshot_for_lsp_version(
+ &mut self,
+ buffer: &ModelHandle<Buffer>,
+ version: Option<i32>,
+ cx: &AppContext,
+ ) -> Result<TextBufferSnapshot> {
+ const OLD_VERSIONS_TO_RETAIN: i32 = 10;
+
+ if let Some(version) = version {
+ let buffer_id = buffer.read(cx).remote_id();
+ let snapshots = self
+ .buffer_snapshots
+ .get_mut(&buffer_id)
+ .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
+ let mut found_snapshot = None;
+ snapshots.retain(|(snapshot_version, snapshot)| {
+ if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
+ false
+ } else {
+ if *snapshot_version == version {
+ found_snapshot = Some(snapshot.clone());
+ }
+ true
+ }
+ });
+
+ found_snapshot.ok_or_else(|| {
+ anyhow!(
+ "snapshot not found for buffer {} at version {}",
+ buffer_id,
+ version
+ )
+ })
+ } else {
+ Ok((**buffer.read(cx)).clone())
+ }
+ }
+
+ fn language_server_for_buffer(
+ &self,
+ buffer: &ModelHandle<Buffer>,
+ cx: &AppContext,
+ ) -> Option<&Arc<LanguageServer>> {
+ let buffer = buffer.read(cx);
+ if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
+ let worktree_id = file.worktree_id(cx);
+ self.language_servers.get(&(worktree_id, language.name()))
+ } else {
+ None
+ }
+ }
}
impl WorktreeHandle {
@@ -3802,7 +4185,8 @@ mod tests {
use futures::StreamExt;
use gpui::test::subscribe;
use language::{
- tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
+ tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
+ ToPoint,
};
use lsp::Url;
use serde_json::json;
@@ -3875,66 +4259,289 @@ mod tests {
}
#[gpui::test]
- async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
- let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
- let progress_token = language_server_config
- .disk_based_diagnostics_progress_token
- .clone()
- .unwrap();
+ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
- let language = Arc::new(Language::new(
+ let (lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
+ let rust_language = Arc::new(Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
- language_server: Some(language_server_config),
+ language_server: Some(lsp_config),
..Default::default()
},
Some(tree_sitter_rust::language()),
));
+ let (json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
+ let json_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "JSON".into(),
+ path_suffixes: vec!["json".to_string()],
+ language_server: Some(json_lsp_config),
+ ..Default::default()
+ },
+ None,
+ ));
+
let fs = FakeFs::new(cx.background());
fs.insert_tree(
- "/dir",
+ "/the-root",
json!({
- "a.rs": "fn a() { A }",
- "b.rs": "const y: i32 = 1",
+ "test.rs": "const A: i32 = 1;",
+ "Cargo.toml": "a = 1",
+ "package.json": "{\"a\": 1}",
}),
)
.await;
let project = Project::test(fs, cx);
project.update(cx, |project, _| {
- Arc::get_mut(&mut project.languages).unwrap().add(language);
+ project.languages.add(rust_language);
+ project.languages.add(json_language);
});
- let (tree, _) = project
+ let worktree_id = project
.update(cx, |project, cx| {
- project.find_or_create_local_worktree("/dir", true, cx)
+ project.find_or_create_local_worktree("/the-root", true, cx)
})
.await
- .unwrap();
- let worktree_id = tree.read_with(cx, |tree, _| tree.id());
-
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
+ .unwrap()
+ .0
+ .read_with(cx, |tree, _| tree.id());
- // Cause worktree to start the fake language server
- let _buffer = project
+ // Open a buffer without an associated language server.
+ let toml_buffer = project
.update(cx, |project, cx| {
- project.open_buffer((worktree_id, Path::new("b.rs")), cx)
+ project.open_buffer((worktree_id, "Cargo.toml"), cx)
})
.await
.unwrap();
- let mut events = subscribe(&project, cx);
+ // Open a buffer with an associated language server.
+ let rust_buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer((worktree_id, "test.rs"), cx)
+ })
+ .await
+ .unwrap();
- let mut fake_server = fake_servers.next().await.unwrap();
- fake_server.start_progress(&progress_token).await;
+ // A server is started up, and it is notified about both open buffers.
+ let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
assert_eq!(
- events.next().await.unwrap(),
- Event::DiskBasedDiagnosticsStarted
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
+ version: 0,
+ text: "a = 1".to_string(),
+ language_id: Default::default()
+ }
);
-
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ version: 0,
+ text: "const A: i32 = 1;".to_string(),
+ language_id: Default::default()
+ }
+ );
+
+ // Edit a buffer. The changes are reported to the language server.
+ rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidChangeTextDocument>()
+ .await
+ .text_document,
+ lsp::VersionedTextDocumentIdentifier::new(
+ lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ 1
+ )
+ );
+
+ // Open a third buffer with a different associated language server.
+ let json_buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer((worktree_id, "package.json"), cx)
+ })
+ .await
+ .unwrap();
+
+ // Another language server is started up, and it is notified about
+ // all three open buffers.
+ let mut fake_json_server = fake_json_servers.next().await.unwrap();
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
+ version: 0,
+ text: "a = 1".to_string(),
+ language_id: Default::default()
+ }
+ );
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ version: 0,
+ text: "{\"a\": 1}".to_string(),
+ language_id: Default::default()
+ }
+ );
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
+ version: 1,
+ text: "const A: i32 = 12;".to_string(),
+ language_id: Default::default()
+ }
+ );
+
+ // The first language server is also notified about the new open buffer.
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp::TextDocumentItem {
+ uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ version: 0,
+ text: "{\"a\": 1}".to_string(),
+ language_id: Default::default()
+ }
+ );
+
+ // Edit a buffer. The changes are reported to both the language servers.
+ toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidChangeTextDocument>()
+ .await
+ .text_document,
+ lsp::VersionedTextDocumentIdentifier::new(
+ lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
+ 1
+ )
+ );
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp::notification::DidChangeTextDocument>()
+ .await,
+ lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(
+ lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
+ 1
+ ),
+ content_changes: vec![lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(
+ lsp::Position::new(0, 5),
+ lsp::Position::new(0, 5)
+ )),
+ range_length: None,
+ text: "23".to_string(),
+ }],
+ },
+ );
+
+ // Close a buffer. Both language servers are notified.
+ cx.update(|_| drop(json_buffer));
+ let close_message = lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(
+ lsp::Url::from_file_path("/the-root/package.json").unwrap(),
+ ),
+ };
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ close_message,
+ );
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ close_message,
+ );
+ }
+
+ #[gpui::test]
+ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
+ let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
+ let progress_token = language_server_config
+ .disk_based_diagnostics_progress_token
+ .clone()
+ .unwrap();
+
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ language_server: Some(language_server_config),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": "fn a() { A }",
+ "b.rs": "const y: i32 = 1",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, cx);
+ project.update(cx, |project, _| project.languages.add(language));
+
+ let (tree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_local_worktree("/dir", true, cx)
+ })
+ .await
+ .unwrap();
+ let worktree_id = tree.read_with(cx, |tree, _| tree.id());
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ // Cause worktree to start the fake language server
+ let _buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer((worktree_id, Path::new("b.rs")), cx)
+ })
+ .await
+ .unwrap();
+
+ let mut events = subscribe(&project, cx);
+
+ let mut fake_server = fake_servers.next().await.unwrap();
+ fake_server.start_progress(&progress_token).await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsStarted
+ );
+
fake_server.start_progress(&progress_token).await;
fake_server.end_progress(&progress_token).await;
fake_server.start_progress(&progress_token).await;
@@ -556,6 +556,7 @@ impl LocalWorktree {
}
pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
+ dbg!(&self.diagnostics);
self.diagnostics.get(path).cloned()
}
@@ -5,7 +5,7 @@ use gpui::{
action, elements::*, keymap::Binding, platform::CursorStyle, Entity, MutableAppContext,
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
-use language::AnchorRangeExt;
+use language::OffsetRangeExt;
use postage::watch;
use project::search::SearchQuery;
use std::ops::Range;
@@ -1011,8 +1011,8 @@ mod tests {
};
use gpui::{executor, ModelHandle, TestAppContext};
use language::{
- tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig,
- LanguageRegistry, LanguageServerConfig, Point, ToLspPosition,
+ tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry,
+ LanguageServerConfig, OffsetRangeExt, Point, ToLspPosition,
};
use lsp;
use parking_lot::Mutex;
@@ -1,5 +1,5 @@
use super::{Point, ToOffset};
-use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPointUtf16};
+use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPoint, ToPointUtf16};
use anyhow::Result;
use std::{cmp::Ordering, fmt::Debug, ops::Range};
use sum_tree::Bias;
@@ -74,11 +74,33 @@ impl Anchor {
}
}
+pub trait OffsetRangeExt {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
+}
+
+impl<T> OffsetRangeExt for Range<T>
+where
+ T: ToOffset,
+{
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
+ self.start.to_offset(snapshot)..self.end.to_offset(&snapshot)
+ }
+
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
+ self.start.to_offset(snapshot).to_point(snapshot)
+ ..self.end.to_offset(snapshot).to_point(snapshot)
+ }
+
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
+ self.start.to_offset(snapshot).to_point_utf16(snapshot)
+ ..self.end.to_offset(snapshot).to_point_utf16(snapshot)
+ }
+}
+
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
- fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
- fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
- fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16>;
}
impl AnchorRangeExt for Range<Anchor> {
@@ -88,16 +110,4 @@ impl AnchorRangeExt for Range<Anchor> {
ord @ _ => ord,
})
}
-
- fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
- self.start.to_offset(&content)..self.end.to_offset(&content)
- }
-
- fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
- self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
- }
-
- fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16> {
- self.start.to_point_utf16(content)..self.end.to_point_utf16(content)
- }
}