Detailed changes
@@ -21,6 +21,15 @@ pub struct Lamport {
}
impl Local {
+ pub const MIN: Self = Self {
+ replica_id: ReplicaId::MIN,
+ value: Seq::MIN,
+ };
+ pub const MAX: Self = Self {
+ replica_id: ReplicaId::MAX,
+ value: Seq::MAX,
+ };
+
pub fn new(replica_id: ReplicaId) -> Self {
Self {
replica_id,
@@ -407,7 +407,7 @@ struct SelectNextState {
#[derive(Debug)]
struct BracketPairState {
- ranges: AnchorRangeSet,
+ ranges: Vec<Range<Anchor>>,
pair: BracketPair,
}
@@ -1292,10 +1292,9 @@ impl Editor {
}
fn autoclose_pairs(&mut self, cx: &mut ViewContext<Self>) {
- let selections = self.selections::<usize>(cx);
- let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| {
- let buffer_snapshot = buffer.snapshot(cx);
- let autoclose_pair = buffer_snapshot.language().and_then(|language| {
+ let selections = self.selections::<usize>(cx).collect::<Vec<_>>();
+ let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| {
+ let autoclose_pair = buffer.language().and_then(|language| {
let first_selection_start = selections.first().unwrap().start;
let pair = language.brackets().iter().find(|pair| {
buffer_snapshot.contains_str_at(
@@ -1333,15 +1332,14 @@ impl Editor {
if pair.end.len() == 1 {
let mut delta = 0;
Some(BracketPairState {
- ranges: buffer.anchor_range_set(
- Bias::Left,
- Bias::Right,
- selections.iter().map(move |selection| {
+ ranges: selections
+ .iter()
+ .map(move |selection| {
let offset = selection.start + delta;
delta += 1;
- offset..offset
- }),
- ),
+ buffer.anchor_before(offset)..buffer.anchor_after(offset)
+ })
+ .collect(),
pair,
})
} else {
@@ -1349,26 +1347,26 @@ impl Editor {
}
})
});
- self.autoclose_stack.extend(new_autoclose_pair_state);
+ self.autoclose_stack.extend(new_autoclose_pair);
}
fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
- let old_selections = self.selections::<usize>(cx);
- let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() {
- autoclose_pair_state
+ let old_selections = self.selections::<usize>(cx).collect::<Vec<_>>();
+ let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
+ autoclose_pair
} else {
return false;
};
- if text != autoclose_pair_state.pair.end {
+ if text != autoclose_pair.pair.end {
return false;
}
- debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len());
+ debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
let buffer = self.buffer.read(cx).snapshot(cx);
if old_selections
.iter()
- .zip(autoclose_pair_state.ranges.ranges::<usize>(&buffer))
+ .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer)))
.all(|(selection, autoclose_range)| {
let autoclose_range_end = autoclose_range.end.to_offset(&buffer);
selection.is_empty() && selection.start == autoclose_range_end
@@ -2832,12 +2830,12 @@ impl Editor {
loop {
let next_group = buffer
.diagnostics_in_range::<_, usize>(search_start..buffer.len())
- .find_map(|(range, diagnostic)| {
- if diagnostic.is_primary
- && !range.is_empty()
- && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end())
+ .find_map(|entry| {
+ if entry.diagnostic.is_primary
+ && !entry.range.is_empty()
+ && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end())
{
- Some((range, diagnostic.group_id))
+ Some((entry.range, entry.diagnostic.group_id))
} else {
None
}
@@ -2872,11 +2870,11 @@ impl Editor {
let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer);
let is_valid = buffer
.diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone())
- .any(|(range, diagnostic)| {
- diagnostic.is_primary
- && !range.is_empty()
- && range.start == primary_range_start
- && diagnostic.message == active_diagnostics.primary_message
+ .any(|entry| {
+ entry.diagnostic.is_primary
+ && !entry.range.is_empty()
+ && entry.range.start == primary_range_start
+ && entry.diagnostic.message == active_diagnostics.primary_message
});
if is_valid != active_diagnostics.is_valid {
@@ -2907,15 +2905,15 @@ impl Editor {
let mut group_end = Point::zero();
let diagnostic_group = buffer
.diagnostic_group::<Point>(group_id)
- .map(|(range, diagnostic)| {
- if range.end > group_end {
- group_end = range.end;
+ .map(|entry| {
+ if entry.range.end > group_end {
+ group_end = entry.range.end;
}
- if diagnostic.is_primary {
- primary_range = Some(range.clone());
- primary_message = Some(diagnostic.message.clone());
+ if entry.diagnostic.is_primary {
+ primary_range = Some(entry.range.clone());
+ primary_message = Some(entry.diagnostic.message.clone());
}
- (range, diagnostic.clone())
+ entry
})
.collect::<Vec<_>>();
let primary_range = primary_range.unwrap();
@@ -2925,13 +2923,13 @@ impl Editor {
let blocks = display_map
.insert_blocks(
- diagnostic_group.iter().map(|(range, diagnostic)| {
+ diagnostic_group.iter().map(|entry| {
let build_settings = self.build_settings.clone();
- let diagnostic = diagnostic.clone();
+ let diagnostic = entry.diagnostic.clone();
let message_height = diagnostic.message.lines().count() as u8;
BlockProperties {
- position: range.start,
+ position: entry.range.start,
height: message_height,
render: Arc::new(move |cx| {
let settings = build_settings.borrow()(cx.cx);
@@ -2944,11 +2942,7 @@ impl Editor {
cx,
)
.into_iter()
- .zip(
- diagnostic_group
- .into_iter()
- .map(|(_, diagnostic)| diagnostic),
- )
+ .zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic))
.collect();
Some(ActiveDiagnosticGroup {
@@ -3171,12 +3165,12 @@ impl Editor {
self.add_selections_state = None;
self.select_next_state = None;
self.select_larger_syntax_node_stack.clear();
- while let Some(autoclose_pair_state) = self.autoclose_stack.last() {
+ while let Some(autoclose_pair) = self.autoclose_stack.last() {
let all_selections_inside_autoclose_ranges =
- if selections.len() == autoclose_pair_state.ranges.len() {
+ if selections.len() == autoclose_pair.ranges.len() {
selections
.iter()
- .zip(autoclose_pair_state.ranges.ranges::<Point>(&buffer))
+ .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer)))
.all(|(selection, autoclose_range)| {
let head = selection.head().to_point(&buffer);
autoclose_range.start <= head && autoclose_range.end >= head
@@ -324,14 +324,13 @@ impl DiagnosticMessage {
fn update(&mut self, editor: ViewHandle<Editor>, cx: &mut ViewContext<Self>) {
let editor = editor.read(cx);
- let cursor_position = editor.newest_selection(cx).head();
- let new_diagnostic = editor
- .buffer()
- .read(cx)
- .diagnostics_in_range::<usize, usize>(cursor_position..cursor_position)
- .filter(|(range, _)| !range.is_empty())
- .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len()))
- .map(|(_, diagnostic)| diagnostic.clone());
+ let cursor_position = editor.newest_selection::<usize>(cx).head();
+ let buffer = editor.buffer().read(cx);
+ let new_diagnostic = buffer
+ .diagnostics_in_range::<_, usize>(cursor_position..cursor_position)
+ .filter(|entry| !entry.range.is_empty())
+ .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len()))
+ .map(|entry| entry.diagnostic);
if new_diagnostic != self.diagnostic {
self.diagnostic = new_diagnostic;
cx.notify();
@@ -1,4 +1,6 @@
+use crate::diagnostic_set::DiagnosticEntry;
pub use crate::{
+ diagnostic_set::DiagnosticSet,
highlight_map::{HighlightId, HighlightMap},
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
PLAIN_TEXT,
@@ -21,6 +23,7 @@ use std::{
ffi::OsString,
future::Future,
iter::{Iterator, Peekable},
+ mem,
ops::{Deref, DerefMut, Range},
path::{Path, PathBuf},
str,
@@ -28,6 +31,7 @@ use std::{
time::{Duration, Instant, SystemTime, UNIX_EPOCH},
vec,
};
+use text::operation_queue::OperationQueue;
pub use text::{Buffer as TextBuffer, Operation as _, *};
use theme::SyntaxTheme;
use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
@@ -61,9 +65,10 @@ pub struct Buffer {
syntax_tree: Mutex<Option<SyntaxTree>>,
parsing_in_background: bool,
parse_count: usize,
- diagnostics: AnchorRangeMultimap<Diagnostic>,
+ diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
language_server: Option<LanguageServerState>,
+ deferred_ops: OperationQueue<Operation>,
#[cfg(test)]
pub(crate) operations: Vec<Operation>,
}
@@ -71,7 +76,7 @@ pub struct Buffer {
pub struct BufferSnapshot {
text: text::BufferSnapshot,
tree: Option<Tree>,
- diagnostics: AnchorRangeMultimap<Diagnostic>,
+ diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
is_parsing: bool,
language: Option<Arc<Language>>,
@@ -101,10 +106,13 @@ struct LanguageServerSnapshot {
path: Arc<Path>,
}
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub enum Operation {
Buffer(text::Operation),
- UpdateDiagnostics(AnchorRangeMultimap<Diagnostic>),
+ UpdateDiagnostics {
+ diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
+ lamport_timestamp: clock::Lamport,
+ },
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -173,8 +181,8 @@ struct SyntaxTree {
struct AutoindentRequest {
selection_set_ids: HashSet<SelectionSetId>,
before_edit: BufferSnapshot,
- edited: AnchorSet,
- inserted: Option<AnchorRangeSet>,
+ edited: Vec<Anchor>,
+ inserted: Option<Vec<Range<Anchor>>>,
}
#[derive(Debug)]
@@ -275,9 +283,11 @@ impl Buffer {
buffer.add_raw_selection_set(set.id, set);
}
let mut this = Self::build(buffer, file);
- if let Some(diagnostics) = message.diagnostics {
- this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx);
- }
+ this.apply_diagnostic_update(
+ Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
+ cx,
+ );
+
Ok(this)
}
@@ -294,7 +304,7 @@ impl Buffer {
.selection_sets()
.map(|(_, set)| proto::serialize_selection_set(set))
.collect(),
- diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)),
+ diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
}
}
@@ -331,6 +341,7 @@ impl Buffer {
diagnostics: Default::default(),
diagnostics_update_count: 0,
language_server: None,
+ deferred_ops: OperationQueue::new(),
#[cfg(test)]
operations: Default::default(),
}
@@ -690,6 +701,8 @@ impl Buffer {
mut diagnostics: Vec<lsp::Diagnostic>,
cx: &mut ModelContext<Self>,
) -> Result<Operation> {
+ diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
+
let version = version.map(|version| version as usize);
let content = if let Some(version) = version {
let language_server = self.language_server.as_mut().unwrap();
@@ -710,91 +723,92 @@ impl Buffer {
.and_then(|language| language.disk_based_diagnostic_sources())
.unwrap_or(&empty_set);
- diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
- self.diagnostics = {
- let mut edits_since_save = content
- .edits_since::<PointUtf16>(&self.saved_version)
- .peekable();
- let mut last_edit_old_end = PointUtf16::zero();
- let mut last_edit_new_end = PointUtf16::zero();
- let mut group_ids_by_diagnostic_range = HashMap::new();
- let mut diagnostics_by_group_id = HashMap::new();
- let mut next_group_id = 0;
- 'outer: for diagnostic in &diagnostics {
- let mut start = diagnostic.range.start.to_point_utf16();
- let mut end = diagnostic.range.end.to_point_utf16();
- let source = diagnostic.source.as_ref();
- let code = diagnostic.code.as_ref();
- let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
- .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
- .copied()
- .unwrap_or_else(|| {
- let group_id = post_inc(&mut next_group_id);
- for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
- group_ids_by_diagnostic_range.insert((source, code, range), group_id);
- }
- group_id
- });
-
- if diagnostic
- .source
- .as_ref()
- .map_or(false, |source| disk_based_sources.contains(source))
- {
- while let Some(edit) = edits_since_save.peek() {
- if edit.old.end <= start {
- last_edit_old_end = edit.old.end;
- last_edit_new_end = edit.new.end;
- edits_since_save.next();
- } else if edit.old.start <= end && edit.old.end >= start {
- continue 'outer;
- } else {
- break;
- }
+ let mut edits_since_save = content
+ .edits_since::<PointUtf16>(&self.saved_version)
+ .peekable();
+ let mut last_edit_old_end = PointUtf16::zero();
+ let mut last_edit_new_end = PointUtf16::zero();
+ let mut group_ids_by_diagnostic_range = HashMap::new();
+ let mut diagnostics_by_group_id = HashMap::new();
+ let mut next_group_id = 0;
+ 'outer: for diagnostic in &diagnostics {
+ let mut start = diagnostic.range.start.to_point_utf16();
+ let mut end = diagnostic.range.end.to_point_utf16();
+ let source = diagnostic.source.as_ref();
+ let code = diagnostic.code.as_ref();
+ let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
+ .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
+ .copied()
+ .unwrap_or_else(|| {
+ let group_id = post_inc(&mut next_group_id);
+ for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
+ group_ids_by_diagnostic_range.insert((source, code, range), group_id);
}
+ group_id
+ });
- start = last_edit_new_end + (start - last_edit_old_end);
- end = last_edit_new_end + (end - last_edit_old_end);
- }
-
- let mut range = content.clip_point_utf16(start, Bias::Left)
- ..content.clip_point_utf16(end, Bias::Right);
- if range.start == range.end {
- range.end.column += 1;
- range.end = content.clip_point_utf16(range.end, Bias::Right);
- if range.start == range.end && range.end.column > 0 {
- range.start.column -= 1;
- range.start = content.clip_point_utf16(range.start, Bias::Left);
+ if diagnostic
+ .source
+ .as_ref()
+ .map_or(false, |source| disk_based_sources.contains(source))
+ {
+ while let Some(edit) = edits_since_save.peek() {
+ if edit.old.end <= start {
+ last_edit_old_end = edit.old.end;
+ last_edit_new_end = edit.new.end;
+ edits_since_save.next();
+ } else if edit.old.start <= end && edit.old.end >= start {
+ continue 'outer;
+ } else {
+ break;
}
}
- diagnostics_by_group_id
- .entry(group_id)
- .or_insert(Vec::new())
- .push((
- range,
- Diagnostic {
- severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
- message: diagnostic.message.clone(),
- group_id,
- is_primary: false,
- },
- ));
+ start = last_edit_new_end + (start - last_edit_old_end);
+ end = last_edit_new_end + (end - last_edit_old_end);
}
- content.anchor_range_multimap(
- Bias::Left,
- Bias::Right,
- diagnostics_by_group_id
- .into_values()
- .flat_map(|mut diagnostics| {
- let primary_diagnostic =
- diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap();
- primary_diagnostic.1.is_primary = true;
- diagnostics
- }),
- )
- };
+ let mut range = content.clip_point_utf16(start, Bias::Left)
+ ..content.clip_point_utf16(end, Bias::Right);
+ if range.start == range.end {
+ range.end.column += 1;
+ range.end = content.clip_point_utf16(range.end, Bias::Right);
+ if range.start == range.end && range.end.column > 0 {
+ range.start.column -= 1;
+ range.start = content.clip_point_utf16(range.start, Bias::Left);
+ }
+ }
+
+ diagnostics_by_group_id
+ .entry(group_id)
+ .or_insert(Vec::new())
+ .push(DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
+ message: diagnostic.message.clone(),
+ group_id,
+ is_primary: false,
+ },
+ });
+ }
+
+ drop(edits_since_save);
+ let mut diagnostics = mem::take(&mut self.diagnostics);
+ diagnostics.reset(
+ diagnostics_by_group_id
+ .into_values()
+ .flat_map(|mut diagnostics| {
+ let primary = diagnostics
+ .iter_mut()
+ .min_by_key(|entry| entry.diagnostic.severity)
+ .unwrap();
+ primary.diagnostic.is_primary = true;
+ diagnostics
+ }),
+ self,
+ );
+ self.diagnostics = diagnostics;
if let Some(version) = version {
let language_server = self.language_server.as_mut().unwrap();
@@ -811,32 +825,31 @@ impl Buffer {
self.diagnostics_update_count += 1;
cx.notify();
cx.emit(Event::DiagnosticsUpdated);
- Ok(Operation::UpdateDiagnostics(self.diagnostics.clone()))
+ Ok(Operation::UpdateDiagnostics {
+ diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
+ lamport_timestamp: self.lamport_timestamp(),
+ })
}
pub fn diagnostics_in_range<'a, T, O>(
&'a self,
search_range: Range<T>,
- ) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
T: 'a + ToOffset,
O: 'a + FromAnchor,
{
- self.diagnostics
- .intersecting_ranges(search_range, self, true)
- .map(move |(_, range, diagnostic)| (range, diagnostic))
+ self.diagnostics.range(search_range, self, true)
}
pub fn diagnostic_group<'a, O>(
&'a self,
group_id: usize,
- ) -> impl Iterator<Item = (Range<O>, &Diagnostic)> + 'a
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
where
O: 'a + FromAnchor,
{
- self.diagnostics
- .filter(self, move |diagnostic| diagnostic.group_id == group_id)
- .map(move |(_, range, diagnostic)| (range, diagnostic))
+ self.diagnostics.group(group_id, self)
}
pub fn diagnostics_update_count(&self) -> usize {
@@ -879,13 +892,13 @@ impl Buffer {
for request in autoindent_requests {
let old_to_new_rows = request
.edited
- .iter::<Point>(&request.before_edit)
- .map(|point| point.row)
+ .iter()
+ .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
.zip(
request
.edited
- .iter::<Point>(&snapshot)
- .map(|point| point.row),
+ .iter()
+ .map(|anchor| anchor.summary::<Point>(&snapshot).row),
)
.collect::<BTreeMap<u32, u32>>();
@@ -947,7 +960,8 @@ impl Buffer {
if let Some(inserted) = request.inserted.as_ref() {
let inserted_row_ranges = contiguous_ranges(
inserted
- .ranges::<Point>(&snapshot)
+ .iter()
+ .map(|range| range.to_point(&snapshot))
.flat_map(|range| range.start.row..range.end.row + 1),
max_rows_between_yields,
);
@@ -1264,17 +1278,17 @@ impl Buffer {
self.pending_autoindent.take();
let autoindent_request = if autoindent && self.language.is_some() {
let before_edit = self.snapshot();
- let edited = self.anchor_set(
- Bias::Left,
- ranges.iter().filter_map(|range| {
+ let edited = ranges
+ .iter()
+ .filter_map(|range| {
let start = range.start.to_point(self);
if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
None
} else {
- Some(range.start)
+ Some(self.anchor_before(range.start))
}
- }),
- );
+ })
+ .collect();
Some((before_edit, edited))
} else {
None
@@ -1289,17 +1303,19 @@ impl Buffer {
let mut inserted = None;
if let Some(first_newline_ix) = first_newline_ix {
let mut delta = 0isize;
- inserted = Some(self.anchor_range_set(
- Bias::Left,
- Bias::Right,
- ranges.iter().map(|range| {
- let start = (delta + range.start as isize) as usize + first_newline_ix + 1;
- let end = (delta + range.start as isize) as usize + new_text_len;
- delta +=
- (range.end as isize - range.start as isize) + new_text_len as isize;
- start..end
- }),
- ));
+ inserted = Some(
+ ranges
+ .iter()
+ .map(|range| {
+ let start =
+ (delta + range.start as isize) as usize + first_newline_ix + 1;
+ let end = (delta + range.start as isize) as usize + new_text_len;
+ delta +=
+ (range.end as isize - range.start as isize) + new_text_len as isize;
+ self.anchor_before(start)..self.anchor_after(end)
+ })
+ .collect(),
+ );
}
let selection_set_ids = self
@@ -1401,17 +1417,23 @@ impl Buffer {
self.pending_autoindent.take();
let was_dirty = self.is_dirty();
let old_version = self.version.clone();
+ let mut deferred_ops = Vec::new();
let buffer_ops = ops
.into_iter()
.filter_map(|op| match op {
Operation::Buffer(op) => Some(op),
- Operation::UpdateDiagnostics(diagnostics) => {
- self.apply_diagnostic_update(diagnostics, cx);
+ _ => {
+ if self.can_apply_op(&op) {
+ self.apply_op(op, cx);
+ } else {
+ deferred_ops.push(op);
+ }
None
}
})
.collect::<Vec<_>>();
self.text.apply_ops(buffer_ops)?;
+ self.flush_deferred_ops(cx);
self.did_edit(&old_version, was_dirty, cx);
// Notify independently of whether the buffer was edited as the operations could include a
// selection update.
@@ -1419,12 +1441,49 @@ impl Buffer {
Ok(())
}
+ fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
+ let mut deferred_ops = Vec::new();
+ for op in self.deferred_ops.drain().iter().cloned() {
+ if self.can_apply_op(&op) {
+ self.apply_op(op, cx);
+ } else {
+ deferred_ops.push(op);
+ }
+ }
+ self.deferred_ops.insert(deferred_ops);
+ }
+
+ fn can_apply_op(&self, operation: &Operation) -> bool {
+ match operation {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be applied at this layer")
+ }
+ Operation::UpdateDiagnostics { diagnostics, .. } => {
+ diagnostics.iter().all(|diagnostic| {
+ self.text.can_resolve(&diagnostic.range.start)
+ && self.text.can_resolve(&diagnostic.range.end)
+ })
+ }
+ }
+ }
+
+ fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
+ match operation {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be applied at this layer")
+ }
+ Operation::UpdateDiagnostics { diagnostics, .. } => {
+ self.apply_diagnostic_update(diagnostics, cx);
+ }
+ }
+ }
+
fn apply_diagnostic_update(
&mut self,
- diagnostics: AnchorRangeMultimap<Diagnostic>,
+ diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
cx: &mut ModelContext<Self>,
) {
- self.diagnostics = diagnostics;
+ self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
self.diagnostics_update_count += 1;
cx.notify();
}
@@ -1632,19 +1691,19 @@ impl BufferSnapshot {
let mut highlights = None;
let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
if let Some(theme) = theme {
- for (_, range, diagnostic) in
- self.diagnostics
- .intersecting_ranges(range.clone(), self, true)
+ for entry in self
+ .diagnostics
+ .range::<_, usize>(range.clone(), self, true)
{
diagnostic_endpoints.push(DiagnosticEndpoint {
- offset: range.start,
+ offset: entry.range.start,
is_start: true,
- severity: diagnostic.severity,
+ severity: entry.diagnostic.severity,
});
diagnostic_endpoints.push(DiagnosticEndpoint {
- offset: range.end,
+ offset: entry.range.end,
is_start: false,
- severity: diagnostic.severity,
+ severity: entry.diagnostic.severity,
});
}
diagnostic_endpoints
@@ -1939,6 +1998,19 @@ impl ToPointUtf16 for lsp::Position {
}
}
+impl operation_queue::Operation for Operation {
+ fn lamport_timestamp(&self) -> clock::Lamport {
+ match self {
+ Operation::Buffer(_) => {
+ unreachable!("buffer operations should never be deferred at this layer")
+ }
+ Operation::UpdateDiagnostics {
+ lamport_timestamp, ..
+ } => *lamport_timestamp,
+ }
+ }
+}
+
fn diagnostic_ranges<'a>(
diagnostic: &'a lsp::Diagnostic,
abs_path: Option<&'a Path>,
@@ -1968,7 +2040,7 @@ fn diagnostic_ranges<'a>(
}
pub fn contiguous_ranges(
- values: impl IntoIterator<Item = u32>,
+ values: impl Iterator<Item = u32>,
max_len: usize,
) -> impl Iterator<Item = Range<u32>> {
let mut values = values.into_iter();
@@ -0,0 +1,165 @@
+use crate::Diagnostic;
+use std::{
+ cmp::{Ordering, Reverse},
+ iter,
+ ops::Range,
+};
+use sum_tree::{self, Bias, SumTree};
+use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
+
+#[derive(Clone, Default)]
+pub struct DiagnosticSet {
+ diagnostics: SumTree<DiagnosticEntry<Anchor>>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct DiagnosticEntry<T> {
+ pub range: Range<T>,
+ pub diagnostic: Diagnostic,
+}
+
+#[derive(Clone, Debug)]
+pub struct Summary {
+ start: Anchor,
+ end: Anchor,
+ min_start: Anchor,
+ max_end: Anchor,
+ count: usize,
+}
+
+impl DiagnosticSet {
+ pub fn from_sorted_entries<I>(iter: I, buffer: &text::Snapshot) -> Self
+ where
+ I: IntoIterator<Item = DiagnosticEntry<Anchor>>,
+ {
+ Self {
+ diagnostics: SumTree::from_iter(iter, buffer),
+ }
+ }
+
+ pub fn reset<I>(&mut self, iter: I, buffer: &text::Snapshot)
+ where
+ I: IntoIterator<Item = DiagnosticEntry<PointUtf16>>,
+ {
+ let mut entries = iter.into_iter().collect::<Vec<_>>();
+ entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end)));
+ self.diagnostics = SumTree::from_iter(
+ entries.into_iter().map(|entry| DiagnosticEntry {
+ range: buffer.anchor_before(entry.range.start)
+ ..buffer.anchor_after(entry.range.end),
+ diagnostic: entry.diagnostic,
+ }),
+ buffer,
+ );
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &DiagnosticEntry<Anchor>> {
+ self.diagnostics.iter()
+ }
+
+ pub fn range<'a, T, O>(
+ &'a self,
+ range: Range<T>,
+ buffer: &'a text::Snapshot,
+ inclusive: bool,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
+ where
+ T: 'a + ToOffset,
+ O: FromAnchor,
+ {
+ let end_bias = if inclusive { Bias::Right } else { Bias::Left };
+ let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias);
+ let mut cursor = self.diagnostics.filter::<_, ()>(
+ {
+ move |summary: &Summary| {
+ let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap();
+ let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap();
+ if inclusive {
+ start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
+ } else {
+ start_cmp == Ordering::Less && end_cmp == Ordering::Greater
+ }
+ }
+ },
+ buffer,
+ );
+
+ iter::from_fn({
+ move || {
+ if let Some(diagnostic) = cursor.item() {
+ cursor.next(buffer);
+ Some(diagnostic.resolve(buffer))
+ } else {
+ None
+ }
+ }
+ })
+ }
+
+ pub fn group<'a, O: FromAnchor>(
+ &'a self,
+ group_id: usize,
+ buffer: &'a text::Snapshot,
+ ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>> {
+ self.iter()
+ .filter(move |entry| entry.diagnostic.group_id == group_id)
+ .map(|entry| entry.resolve(buffer))
+ }
+}
+
+impl sum_tree::Item for DiagnosticEntry<Anchor> {
+ type Summary = Summary;
+
+ fn summary(&self) -> Self::Summary {
+ Summary {
+ start: self.range.start.clone(),
+ end: self.range.end.clone(),
+ min_start: self.range.start.clone(),
+ max_end: self.range.end.clone(),
+ count: 1,
+ }
+ }
+}
+
+impl DiagnosticEntry<Anchor> {
+ pub fn resolve<O: FromAnchor>(&self, buffer: &text::Snapshot) -> DiagnosticEntry<O> {
+ DiagnosticEntry {
+ range: O::from_anchor(&self.range.start, buffer)
+ ..O::from_anchor(&self.range.end, buffer),
+ diagnostic: self.diagnostic.clone(),
+ }
+ }
+}
+
+impl Default for Summary {
+ fn default() -> Self {
+ Self {
+ start: Anchor::min(),
+ end: Anchor::max(),
+ min_start: Anchor::max(),
+ max_end: Anchor::min(),
+ count: 0,
+ }
+ }
+}
+
+impl sum_tree::Summary for Summary {
+ type Context = text::Snapshot;
+
+ fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+ if other
+ .min_start
+ .cmp(&self.min_start, buffer)
+ .unwrap()
+ .is_lt()
+ {
+ self.min_start = other.min_start.clone();
+ }
+ if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() {
+ self.max_end = other.max_end.clone();
+ }
+ self.start = other.start.clone();
+ self.end = other.end.clone();
+ self.count += other.count;
+ }
+}
@@ -1,4 +1,5 @@
mod buffer;
+mod diagnostic_set;
mod highlight_map;
pub mod multi_buffer;
pub mod proto;
@@ -8,6 +9,7 @@ mod tests;
use anyhow::{anyhow, Result};
pub use buffer::Operation;
pub use buffer::*;
+pub use diagnostic_set::DiagnosticEntry;
use gpui::{executor::Background, AppContext};
use highlight_map::HighlightMap;
use lazy_static::lazy_static;
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use crate::{Diagnostic, Operation};
+use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation};
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use lsp::DiagnosticSeverity;
@@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
replica_id: set_id.replica_id as u32,
local_timestamp: set_id.value,
lamport_timestamp: lamport_timestamp.value,
- version: selections.version().into(),
selections: selections
- .full_offset_ranges()
- .map(|(range, state)| proto::Selection {
- id: state.id as u64,
- start: range.start.0 as u64,
- end: range.end.0 as u64,
- reversed: state.reversed,
+ .iter()
+ .map(|selection| proto::Selection {
+ id: selection.id as u64,
+ start: Some(serialize_anchor(&selection.start)),
+ end: Some(serialize_anchor(&selection.end)),
+ reversed: selection.reversed,
})
.collect(),
}),
@@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
lamport_timestamp: lamport_timestamp.value,
},
),
- Operation::UpdateDiagnostics(diagnostic_set) => {
- proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set))
- }
+ Operation::UpdateDiagnostics {
+ diagnostics,
+ lamport_timestamp,
+ } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics {
+ replica_id: lamport_timestamp.replica_id as u32,
+ lamport_timestamp: lamport_timestamp.value,
+ diagnostics: serialize_diagnostics(diagnostics.iter()),
+ }),
}),
}
}
@@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::
}
pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet {
- let version = set.selections.version();
- let entries = set.selections.full_offset_ranges();
proto::SelectionSet {
replica_id: set.id.replica_id as u32,
lamport_timestamp: set.id.value as u32,
is_active: set.active,
- version: version.into(),
- selections: entries
- .map(|(range, state)| proto::Selection {
- id: state.id as u64,
- start: range.start.0 as u64,
- end: range.end.0 as u64,
- reversed: state.reversed,
+ selections: set
+ .selections
+ .iter()
+ .map(|selection| proto::Selection {
+ id: selection.id as u64,
+ start: Some(serialize_anchor(&selection.start)),
+ end: Some(serialize_anchor(&selection.end)),
+ reversed: selection.reversed,
})
.collect(),
}
}
-pub fn serialize_diagnostics(map: &AnchorRangeMultimap<Diagnostic>) -> proto::DiagnosticSet {
- proto::DiagnosticSet {
- version: map.version().into(),
- diagnostics: map
- .full_offset_ranges()
- .map(|(range, diagnostic)| proto::Diagnostic {
- start: range.start.0 as u64,
- end: range.end.0 as u64,
- message: diagnostic.message.clone(),
- severity: match diagnostic.severity {
- DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
- DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
- DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
- DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
- _ => proto::diagnostic::Severity::None,
- } as i32,
- group_id: diagnostic.group_id as u64,
- is_primary: diagnostic.is_primary,
- })
- .collect(),
+pub fn serialize_diagnostics<'a>(
+ diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<Anchor>>,
+) -> Vec<proto::Diagnostic> {
+ diagnostics
+ .into_iter()
+ .map(|entry| proto::Diagnostic {
+ start: Some(serialize_anchor(&entry.range.start)),
+ end: Some(serialize_anchor(&entry.range.end)),
+ message: entry.diagnostic.message.clone(),
+ severity: match entry.diagnostic.severity {
+ DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error,
+ DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning,
+ DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information,
+ DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint,
+ _ => proto::diagnostic::Severity::None,
+ } as i32,
+ group_id: entry.diagnostic.group_id as u64,
+ is_primary: entry.diagnostic.is_primary,
+ })
+ .collect()
+}
+
+fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
+ proto::Anchor {
+ replica_id: anchor.timestamp.replica_id as u32,
+ local_timestamp: anchor.timestamp.value,
+ offset: anchor.offset as u64,
+ bias: match anchor.bias {
+ Bias::Left => proto::Bias::Left as i32,
+ Bias::Right => proto::Bias::Right as i32,
+ },
}
}
@@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
},
}),
proto::operation::Variant::UpdateSelections(message) => {
- let version = message.version.into();
- let entries = message
+ let selections = message
.selections
- .iter()
- .map(|selection| {
- let range = FullOffset(selection.start as usize)
- ..FullOffset(selection.end as usize);
- let state = SelectionState {
+ .into_iter()
+ .filter_map(|selection| {
+ Some(Selection {
id: selection.id as usize,
+ start: deserialize_anchor(selection.start?)?,
+ end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
- };
- (range, state)
+ })
})
- .collect();
- let selections = AnchorRangeMap::from_full_offset_ranges(
- version,
- Bias::Left,
- Bias::Left,
- entries,
- );
+ .collect::<Vec<_>>();
Operation::Buffer(text::Operation::UpdateSelections {
set_id: clock::Lamport {
@@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
},
})
}
- proto::operation::Variant::UpdateDiagnostics(message) => {
- Operation::UpdateDiagnostics(deserialize_diagnostics(message))
- }
+ proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics {
+ diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)),
+ lamport_timestamp: clock::Lamport {
+ replica_id: message.replica_id as ReplicaId,
+ value: message.lamport_timestamp,
+ },
+ },
},
)
}
@@ -277,36 +287,32 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet {
value: set.lamport_timestamp,
},
active: set.is_active,
- selections: Arc::new(AnchorRangeMap::from_full_offset_ranges(
- set.version.into(),
- Bias::Left,
- Bias::Left,
+ selections: Arc::from(
set.selections
.into_iter()
- .map(|selection| {
- let range =
- FullOffset(selection.start as usize)..FullOffset(selection.end as usize);
- let state = SelectionState {
+ .filter_map(|selection| {
+ Some(Selection {
id: selection.id as usize,
+ start: deserialize_anchor(selection.start?)?,
+ end: deserialize_anchor(selection.end?)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
- };
- (range, state)
+ })
})
- .collect(),
- )),
+ .collect::<Vec<_>>(),
+ ),
}
}
-pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap<Diagnostic> {
- AnchorRangeMultimap::from_full_offset_ranges(
- message.version.into(),
- Bias::Left,
- Bias::Right,
- message.diagnostics.into_iter().filter_map(|diagnostic| {
- Some((
- FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize),
- Diagnostic {
+pub fn deserialize_diagnostics(
+ diagnostics: Vec<proto::Diagnostic>,
+) -> Vec<DiagnosticEntry<Anchor>> {
+ diagnostics
+ .into_iter()
+ .filter_map(|diagnostic| {
+ Some(DiagnosticEntry {
+ range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?,
+ diagnostic: Diagnostic {
severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? {
proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR,
proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING,
@@ -318,7 +324,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult
group_id: diagnostic.group_id as usize,
is_primary: diagnostic.is_primary,
},
- ))
- }),
- )
+ })
+ })
+ .collect()
+}
+
+fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
+ Some(Anchor {
+ timestamp: clock::Local {
+ replica_id: anchor.replica_id as ReplicaId,
+ value: anchor.local_timestamp,
+ },
+ offset: anchor.offset as usize,
+ bias: match proto::Bias::from_i32(anchor.bias)? {
+ proto::Bias::Left => Bias::Left,
+ proto::Bias::Right => Bias::Right,
+ },
+ })
}
@@ -539,27 +539,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
// The diagnostics have moved down since they were created.
assert_eq!(
buffer
- .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0))
+ .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
.collect::<Vec<_>>(),
&[
- (
- Point::new(3, 9)..Point::new(3, 11),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(3, 9)..Point::new(3, 11),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'BB'".to_string(),
group_id: 1,
is_primary: true,
},
- ),
- (
- Point::new(4, 9)..Point::new(4, 12),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(4, 9)..Point::new(4, 12),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'CCC'".to_string(),
group_id: 2,
is_primary: true,
}
- )
+ }
]
);
assert_eq!(
@@ -606,27 +606,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.unwrap();
assert_eq!(
buffer
- .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0))
+ .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
.collect::<Vec<_>>(),
&[
- (
- Point::new(2, 9)..Point::new(2, 12),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(2, 9)..Point::new(2, 12),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "unreachable statement".to_string(),
group_id: 1,
is_primary: true,
}
- ),
- (
- Point::new(2, 9)..Point::new(2, 10),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(2, 9)..Point::new(2, 10),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'A'".to_string(),
group_id: 0,
is_primary: true,
},
- )
+ }
]
);
assert_eq!(
@@ -685,27 +685,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
.unwrap();
assert_eq!(
buffer
- .diagnostics_in_range(0..buffer.len())
+ .diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>(),
&[
- (
- Point::new(2, 21)..Point::new(2, 22),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(2, 21)..Point::new(2, 22),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'A'".to_string(),
group_id: 0,
is_primary: true,
}
- ),
- (
- Point::new(3, 9)..Point::new(3, 11),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(3, 9)..Point::new(3, 11),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "undefined variable 'BB'".to_string(),
group_id: 1,
is_primary: true,
},
- )
+ }
]
);
});
@@ -873,107 +873,107 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) {
.diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>(),
&[
- (
- Point::new(1, 8)..Point::new(1, 9),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(1, 8)..Point::new(1, 9),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "error 1".to_string(),
group_id: 0,
is_primary: true,
}
- ),
- (
- Point::new(1, 8)..Point::new(1, 9),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(1, 8)..Point::new(1, 9),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 1 hint 1".to_string(),
group_id: 0,
is_primary: false,
}
- ),
- (
- Point::new(1, 13)..Point::new(1, 15),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(1, 13)..Point::new(1, 15),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 1".to_string(),
group_id: 1,
is_primary: false,
}
- ),
- (
- Point::new(1, 13)..Point::new(1, 15),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(1, 13)..Point::new(1, 15),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 2".to_string(),
group_id: 1,
is_primary: false,
}
- ),
- (
- Point::new(2, 8)..Point::new(2, 17),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(2, 8)..Point::new(2, 17),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "error 2".to_string(),
group_id: 1,
is_primary: true,
}
- )
+ }
]
);
assert_eq!(
- buffer.diagnostic_group(0).collect::<Vec<_>>(),
+ buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
&[
- (
- Point::new(1, 8)..Point::new(1, 9),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(1, 8)..Point::new(1, 9),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::WARNING,
message: "error 1".to_string(),
group_id: 0,
is_primary: true,
}
- ),
- (
- Point::new(1, 8)..Point::new(1, 9),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(1, 8)..Point::new(1, 9),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 1 hint 1".to_string(),
group_id: 0,
is_primary: false,
}
- ),
+ },
]
);
assert_eq!(
- buffer.diagnostic_group(1).collect::<Vec<_>>(),
+ buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
&[
- (
- Point::new(1, 13)..Point::new(1, 15),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(1, 13)..Point::new(1, 15),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 1".to_string(),
group_id: 1,
is_primary: false,
}
- ),
- (
- Point::new(1, 13)..Point::new(1, 15),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(1, 13)..Point::new(1, 15),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::HINT,
message: "error 2 hint 2".to_string(),
group_id: 1,
is_primary: false,
}
- ),
- (
- Point::new(2, 8)..Point::new(2, 17),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(2, 8)..Point::new(2, 17),
+ diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "error 2".to_string(),
group_id: 1,
is_primary: true,
}
- )
+ }
]
);
@@ -1002,13 +1002,17 @@ fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
#[test]
fn test_contiguous_ranges() {
assert_eq!(
- contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::<Vec<_>>(),
+ contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
&[1..4, 5..7, 9..13]
);
// Respects the `max_len` parameter
assert_eq!(
- contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::<Vec<_>>(),
+ contiguous_ranges(
+ [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
+ 3
+ )
+ .collect::<Vec<_>>(),
&[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
);
}
@@ -3005,7 +3005,7 @@ mod tests {
use anyhow::Result;
use client::test::{FakeHttpClient, FakeServer};
use fs::RealFs;
- use language::{tree_sitter_rust, LanguageServerConfig};
+ use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig};
use language::{Diagnostic, LanguageConfig};
use lsp::Url;
use rand::prelude::*;
@@ -3721,19 +3721,19 @@ mod tests {
buffer.read_with(&cx, |buffer, _| {
let diagnostics = buffer
- .diagnostics_in_range(0..buffer.len())
+ .diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>();
assert_eq!(
diagnostics,
- &[(
- Point::new(0, 9)..Point::new(0, 10),
- &Diagnostic {
+ &[DiagnosticEntry {
+ range: Point::new(0, 9)..Point::new(0, 10),
+ diagnostic: Diagnostic {
severity: lsp::DiagnosticSeverity::ERROR,
message: "undefined variable 'A'".to_string(),
group_id: 0,
is_primary: true
}
- )]
+ }]
)
});
}
@@ -229,32 +229,44 @@ message Buffer {
string content = 2;
repeated Operation.Edit history = 3;
repeated SelectionSet selections = 4;
- DiagnosticSet diagnostics = 5;
+ repeated Diagnostic diagnostics = 5;
}
message SelectionSet {
uint32 replica_id = 1;
uint32 lamport_timestamp = 2;
bool is_active = 3;
- repeated VectorClockEntry version = 4;
- repeated Selection selections = 5;
+ repeated Selection selections = 4;
}
message Selection {
uint64 id = 1;
- uint64 start = 2;
- uint64 end = 3;
+ Anchor start = 2;
+ Anchor end = 3;
bool reversed = 4;
}
-message DiagnosticSet {
- repeated VectorClockEntry version = 1;
- repeated Diagnostic diagnostics = 2;
+message Anchor {
+ uint32 replica_id = 1;
+ uint32 local_timestamp = 2;
+ uint64 offset = 3;
+ Bias bias = 4;
+}
+
+enum Bias {
+ Left = 0;
+ Right = 1;
+}
+
+message UpdateDiagnostics {
+ uint32 replica_id = 1;
+ uint32 lamport_timestamp = 2;
+ repeated Diagnostic diagnostics = 3;
}
message Diagnostic {
- uint64 start = 1;
- uint64 end = 2;
+ Anchor start = 1;
+ Anchor end = 2;
Severity severity = 3;
string message = 4;
uint64 group_id = 5;
@@ -268,8 +280,6 @@ message Diagnostic {
}
}
-
-
message Operation {
oneof variant {
Edit edit = 1;
@@ -277,7 +287,7 @@ message Operation {
UpdateSelections update_selections = 3;
RemoveSelections remove_selections = 4;
SetActiveSelections set_active_selections = 5;
- DiagnosticSet update_diagnostics = 6;
+ UpdateDiagnostics update_diagnostics = 6;
}
message Edit {
@@ -308,8 +318,7 @@ message Operation {
uint32 replica_id = 1;
uint32 local_timestamp = 2;
uint32 lamport_timestamp = 3;
- repeated VectorClockEntry version = 4;
- repeated Selection selections = 5;
+ repeated Selection selections = 4;
}
message RemoveSelections {
@@ -400,7 +400,7 @@ mod tests {
content: "path/one content".to_string(),
history: vec![],
selections: vec![],
- diagnostics: None,
+ diagnostics: vec![],
}),
}
);
@@ -422,7 +422,7 @@ mod tests {
content: "path/two content".to_string(),
history: vec![],
selections: vec![],
- diagnostics: None,
+ diagnostics: vec![],
}),
}
);
@@ -453,7 +453,7 @@ mod tests {
content: "path/one content".to_string(),
history: vec![],
selections: vec![],
- diagnostics: None,
+ diagnostics: vec![],
}),
}
}
@@ -465,7 +465,7 @@ mod tests {
content: "path/two content".to_string(),
history: vec![],
selections: vec![],
- diagnostics: None,
+ diagnostics: vec![],
}),
}
}
@@ -208,9 +208,25 @@ impl RepoClient {
"Authorization",
self.installation_token_header(false).await?,
);
- let client = surf::client().with(surf::middleware::Redirect::new(5));
+
+ let client = surf::client();
let mut response = client.send(request).await?;
+ // Avoid using `surf::middleware::Redirect` because that type forwards
+ // the original request headers to the redirect URI. In this case, the
+ // redirect will be to S3, which forbids us from supplying an
+ // `Authorization` header.
+ if response.status().is_redirection() {
+ if let Some(url) = response.header("location") {
+ let request = surf::get(url.as_str()).header("Accept", "application/octet-stream");
+ response = client.send(request).await?;
+ }
+ }
+
+ if !response.status().is_success() {
+ Err(anyhow!("failed to fetch release asset {} {}", tag, name))?;
+ }
+
Ok(response.take_body())
}
@@ -1705,27 +1705,27 @@ mod tests {
buffer_b.read_with(&cx_b, |buffer, _| {
assert_eq!(
buffer
- .diagnostics_in_range(0..buffer.len())
+ .diagnostics_in_range::<_, Point>(0..buffer.len())
.collect::<Vec<_>>(),
&[
- (
- Point::new(0, 4)..Point::new(0, 7),
- &Diagnostic {
+ DiagnosticEntry {
+ range: Point::new(0, 4)..Point::new(0, 7),
+ diagnostic: Diagnostic {
group_id: 0,
message: "message 1".to_string(),
severity: lsp::DiagnosticSeverity::ERROR,
is_primary: true
}
- ),
- (
- Point::new(0, 10)..Point::new(0, 13),
- &Diagnostic {
+ },
+ DiagnosticEntry {
+ range: Point::new(0, 10)..Point::new(0, 13),
+ diagnostic: Diagnostic {
group_id: 1,
severity: lsp::DiagnosticSeverity::WARNING,
message: "message 2".to_string(),
is_primary: true
}
- )
+ }
]
);
});
@@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> {
at_end: bool,
}
+pub struct Iter<'a, T: Item> {
+ tree: &'a SumTree<T>,
+ stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
+}
+
impl<'a, T, D> Cursor<'a, T, D>
where
T: Item,
@@ -487,6 +492,71 @@ where
}
}
+impl<'a, T: Item> Iter<'a, T> {
+ pub(crate) fn new(tree: &'a SumTree<T>) -> Self {
+ Self {
+ tree,
+ stack: Default::default(),
+ }
+ }
+}
+
+impl<'a, T: Item> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let mut descend = false;
+
+ if self.stack.is_empty() {
+ self.stack.push(StackEntry {
+ tree: self.tree,
+ index: 0,
+ position: (),
+ });
+ descend = true;
+ }
+
+ while self.stack.len() > 0 {
+ let new_subtree = {
+ let entry = self.stack.last_mut().unwrap();
+ match entry.tree.0.as_ref() {
+ Node::Internal { child_trees, .. } => {
+ if !descend {
+ entry.index += 1;
+ }
+ child_trees.get(entry.index)
+ }
+ Node::Leaf { items, .. } => {
+ if !descend {
+ entry.index += 1;
+ }
+
+ if let Some(next_item) = items.get(entry.index) {
+ return Some(next_item);
+ } else {
+ None
+ }
+ }
+ }
+ };
+
+ if let Some(subtree) = new_subtree {
+ descend = true;
+ self.stack.push(StackEntry {
+ tree: subtree,
+ index: 0,
+ position: (),
+ });
+ } else {
+ descend = false;
+ self.stack.pop();
+ }
+ }
+
+ None
+ }
+}
+
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
where
T: Item<Summary = S>,
@@ -1,8 +1,7 @@
mod cursor;
use arrayvec::ArrayVec;
-pub use cursor::Cursor;
-pub use cursor::FilterCursor;
+pub use cursor::{Cursor, FilterCursor, Iter};
use std::marker::PhantomData;
use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc};
@@ -156,6 +155,10 @@ impl<T: Item> SumTree<T> {
items
}
+ pub fn iter(&self) -> Iter<T> {
+ Iter::new(self)
+ }
+
pub fn cursor<'a, S>(&'a self) -> Cursor<T, S>
where
S: Dimension<'a, T::Summary>,
@@ -722,6 +725,10 @@ mod tests {
};
assert_eq!(tree.items(&()), reference_items);
+ assert_eq!(
+ tree.iter().collect::<Vec<_>>(),
+ tree.cursor::<()>().collect::<Vec<_>>()
+ );
let mut filter_cursor =
tree.filter::<_, Count>(|summary| summary.contains_even, &());
@@ -1,94 +1,36 @@
-use super::{FromAnchor, FullOffset, Point, ToOffset};
+use super::{Point, ToOffset};
use crate::{rope::TextDimension, BufferSnapshot};
use anyhow::Result;
-use std::{
- cmp::Ordering,
- fmt::{Debug, Formatter},
- ops::Range,
-};
-use sum_tree::{Bias, SumTree};
+use std::{cmp::Ordering, fmt::Debug, ops::Range};
+use sum_tree::Bias;
#[derive(Clone, Eq, PartialEq, Debug, Hash)]
pub struct Anchor {
- pub full_offset: FullOffset,
+ pub timestamp: clock::Local,
+ pub offset: usize,
pub bias: Bias,
- pub version: clock::Global,
-}
-
-#[derive(Clone)]
-pub struct AnchorMap<T> {
- pub(crate) version: clock::Global,
- pub(crate) bias: Bias,
- pub(crate) entries: Vec<(FullOffset, T)>,
-}
-
-#[derive(Clone)]
-pub struct AnchorSet(pub(crate) AnchorMap<()>);
-
-#[derive(Clone)]
-pub struct AnchorRangeMap<T> {
- pub(crate) version: clock::Global,
- pub(crate) entries: Vec<(Range<FullOffset>, T)>,
- pub(crate) start_bias: Bias,
- pub(crate) end_bias: Bias,
-}
-
-#[derive(Clone)]
-pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>);
-
-#[derive(Clone)]
-pub struct AnchorRangeMultimap<T: Clone> {
- pub(crate) entries: SumTree<AnchorRangeMultimapEntry<T>>,
- pub(crate) version: clock::Global,
- pub(crate) start_bias: Bias,
- pub(crate) end_bias: Bias,
-}
-
-#[derive(Clone)]
-pub(crate) struct AnchorRangeMultimapEntry<T> {
- pub(crate) range: FullOffsetRange,
- pub(crate) value: T,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct FullOffsetRange {
- pub(crate) start: FullOffset,
- pub(crate) end: FullOffset,
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct AnchorRangeMultimapSummary {
- start: FullOffset,
- end: FullOffset,
- min_start: FullOffset,
- max_end: FullOffset,
- count: usize,
}
impl Anchor {
pub fn min() -> Self {
Self {
- full_offset: FullOffset(0),
+ timestamp: clock::Local::MIN,
+ offset: usize::MIN,
bias: Bias::Left,
- version: Default::default(),
}
}
pub fn max() -> Self {
Self {
- full_offset: FullOffset::MAX,
+ timestamp: clock::Local::MAX,
+ offset: usize::MAX,
bias: Bias::Right,
- version: Default::default(),
}
}
pub fn cmp<'a>(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result<Ordering> {
- if self == other {
- return Ok(Ordering::Equal);
- }
-
- let offset_comparison = if self.version == other.version {
- self.full_offset.cmp(&other.full_offset)
+ let offset_comparison = if self.timestamp == other.timestamp {
+ self.offset.cmp(&other.offset)
} else {
buffer
.full_offset_for_anchor(self)
@@ -122,455 +64,10 @@ impl Anchor {
}
}
-impl<T> AnchorMap<T> {
- pub fn version(&self) -> &clock::Global {
- &self.version
- }
-
- pub fn len(&self) -> usize {
- self.entries.len()
- }
-
- pub fn iter<'a, D>(
- &'a self,
- snapshot: &'a BufferSnapshot,
- ) -> impl Iterator<Item = (D, &'a T)> + 'a
- where
- D: TextDimension,
- {
- snapshot
- .summaries_for_anchors(
- self.version.clone(),
- self.bias,
- self.entries.iter().map(|e| &e.0),
- )
- .zip(self.entries.iter().map(|e| &e.1))
- }
-}
-
-impl AnchorSet {
- pub fn version(&self) -> &clock::Global {
- &self.0.version
- }
-
- pub fn len(&self) -> usize {
- self.0.len()
- }
-
- pub fn iter<'a, D>(&'a self, content: &'a BufferSnapshot) -> impl Iterator<Item = D> + 'a
- where
- D: TextDimension,
- {
- self.0.iter(content).map(|(position, _)| position)
- }
-}
-
-impl<T> AnchorRangeMap<T> {
- pub fn version(&self) -> &clock::Global {
- &self.version
- }
-
- pub fn len(&self) -> usize {
- self.entries.len()
- }
-
- pub fn from_full_offset_ranges(
- version: clock::Global,
- start_bias: Bias,
- end_bias: Bias,
- entries: Vec<(Range<FullOffset>, T)>,
- ) -> Self {
- Self {
- version,
- start_bias,
- end_bias,
- entries,
- }
- }
-
- pub fn ranges<'a, D>(
- &'a self,
- content: &'a BufferSnapshot,
- ) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
- where
- D: TextDimension,
- {
- content
- .summaries_for_anchor_ranges(
- self.version.clone(),
- self.start_bias,
- self.end_bias,
- self.entries.iter().map(|e| &e.0),
- )
- .zip(self.entries.iter().map(|e| &e.1))
- }
-
- pub fn intersecting_ranges<'a, D, I>(
- &'a self,
- range: Range<(I, Bias)>,
- content: &'a BufferSnapshot,
- ) -> impl Iterator<Item = (Range<D>, &'a T)> + 'a
- where
- D: TextDimension,
- I: ToOffset,
- {
- let range = content.anchor_at(range.start.0, range.start.1)
- ..content.anchor_at(range.end.0, range.end.1);
-
- let mut probe_anchor = Anchor {
- full_offset: Default::default(),
- bias: self.start_bias,
- version: self.version.clone(),
- };
- let start_ix = self.entries.binary_search_by(|probe| {
- probe_anchor.full_offset = probe.0.end;
- probe_anchor.cmp(&range.start, &content).unwrap()
- });
-
- match start_ix {
- Ok(start_ix) | Err(start_ix) => content
- .summaries_for_anchor_ranges(
- self.version.clone(),
- self.start_bias,
- self.end_bias,
- self.entries[start_ix..].iter().map(|e| &e.0),
- )
- .zip(self.entries.iter().map(|e| &e.1)),
- }
- }
-
- pub fn full_offset_ranges(&self) -> impl Iterator<Item = &(Range<FullOffset>, T)> {
- self.entries.iter()
- }
-
- pub fn min_by_key<'a, D, F, K>(
- &self,
- content: &'a BufferSnapshot,
- mut extract_key: F,
- ) -> Option<(Range<D>, &T)>
- where
- D: TextDimension,
- F: FnMut(&T) -> K,
- K: Ord,
- {
- self.entries
- .iter()
- .min_by_key(|(_, value)| extract_key(value))
- .map(|(range, value)| (self.resolve_range(range, &content), value))
- }
-
- pub fn max_by_key<'a, D, F, K>(
- &self,
- content: &'a BufferSnapshot,
- mut extract_key: F,
- ) -> Option<(Range<D>, &T)>
- where
- D: TextDimension,
- F: FnMut(&T) -> K,
- K: Ord,
- {
- self.entries
- .iter()
- .max_by_key(|(_, value)| extract_key(value))
- .map(|(range, value)| (self.resolve_range(range, &content), value))
- }
-
- fn resolve_range<'a, D>(
- &self,
- range: &Range<FullOffset>,
- content: &'a BufferSnapshot,
- ) -> Range<D>
- where
- D: TextDimension,
- {
- let mut anchor = Anchor {
- full_offset: range.start,
- bias: self.start_bias,
- version: self.version.clone(),
- };
- let start = content.summary_for_anchor(&anchor);
-
- anchor.full_offset = range.end;
- anchor.bias = self.end_bias;
- let end = content.summary_for_anchor(&anchor);
-
- start..end
- }
-}
-
-impl<T: PartialEq> PartialEq for AnchorRangeMap<T> {
- fn eq(&self, other: &Self) -> bool {
- self.version == other.version && self.entries == other.entries
- }
-}
-
-impl<T: Eq> Eq for AnchorRangeMap<T> {}
-
-impl<T: Debug> Debug for AnchorRangeMap<T> {
- fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
- let mut f = f.debug_map();
- for (range, value) in &self.entries {
- f.key(range);
- f.value(value);
- }
- f.finish()
- }
-}
-
-impl Debug for AnchorRangeSet {
- fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- let mut f = f.debug_set();
- for (range, _) in &self.0.entries {
- f.entry(range);
- }
- f.finish()
- }
-}
-
-impl AnchorRangeSet {
- pub fn len(&self) -> usize {
- self.0.len()
- }
-
- pub fn version(&self) -> &clock::Global {
- self.0.version()
- }
-
- pub fn ranges<'a, D>(
- &'a self,
- content: &'a BufferSnapshot,
- ) -> impl 'a + Iterator<Item = Range<Point>>
- where
- D: TextDimension,
- {
- self.0.ranges(content).map(|(range, _)| range)
- }
-}
-
-impl<T: Clone> Default for AnchorRangeMultimap<T> {
- fn default() -> Self {
- Self {
- entries: Default::default(),
- version: Default::default(),
- start_bias: Bias::Left,
- end_bias: Bias::Left,
- }
- }
-}
-
-impl<T: Clone> AnchorRangeMultimap<T> {
- pub fn version(&self) -> &clock::Global {
- &self.version
- }
-
- pub fn intersecting_ranges<'a, I, O>(
- &'a self,
- range: Range<I>,
- content: &'a BufferSnapshot,
- inclusive: bool,
- ) -> impl Iterator<Item = (usize, Range<O>, &T)> + 'a
- where
- I: ToOffset,
- O: FromAnchor,
- {
- let end_bias = if inclusive { Bias::Right } else { Bias::Left };
- let range = range.start.to_full_offset(&content, Bias::Left)
- ..range.end.to_full_offset(&content, end_bias);
- let mut cursor = self.entries.filter::<_, usize>(
- {
- let mut endpoint = Anchor {
- full_offset: FullOffset(0),
- bias: Bias::Right,
- version: self.version.clone(),
- };
- move |summary: &AnchorRangeMultimapSummary| {
- endpoint.full_offset = summary.max_end;
- endpoint.bias = self.end_bias;
- let max_end = endpoint.to_full_offset(&content, self.end_bias);
- let start_cmp = range.start.cmp(&max_end);
-
- endpoint.full_offset = summary.min_start;
- endpoint.bias = self.start_bias;
- let min_start = endpoint.to_full_offset(&content, self.start_bias);
- let end_cmp = range.end.cmp(&min_start);
-
- if inclusive {
- start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal
- } else {
- start_cmp == Ordering::Less && end_cmp == Ordering::Greater
- }
- }
- },
- &(),
- );
-
- std::iter::from_fn({
- let mut endpoint = Anchor {
- full_offset: FullOffset(0),
- bias: Bias::Left,
- version: self.version.clone(),
- };
- move || {
- if let Some(item) = cursor.item() {
- let ix = *cursor.start();
- endpoint.full_offset = item.range.start;
- endpoint.bias = self.start_bias;
- let start = O::from_anchor(&endpoint, &content);
- endpoint.full_offset = item.range.end;
- endpoint.bias = self.end_bias;
- let end = O::from_anchor(&endpoint, &content);
- let value = &item.value;
- cursor.next(&());
- Some((ix, start..end, value))
- } else {
- None
- }
- }
- })
- }
-
- pub fn from_full_offset_ranges(
- version: clock::Global,
- start_bias: Bias,
- end_bias: Bias,
- entries: impl Iterator<Item = (Range<FullOffset>, T)>,
- ) -> Self {
- Self {
- version,
- start_bias,
- end_bias,
- entries: SumTree::from_iter(
- entries.map(|(range, value)| AnchorRangeMultimapEntry {
- range: FullOffsetRange {
- start: range.start,
- end: range.end,
- },
- value,
- }),
- &(),
- ),
- }
- }
-
- pub fn full_offset_ranges(&self) -> impl Iterator<Item = (Range<FullOffset>, &T)> {
- self.entries
- .cursor::<()>()
- .map(|entry| (entry.range.start..entry.range.end, &entry.value))
- }
-
- pub fn filter<'a, O, F>(
- &'a self,
- content: &'a BufferSnapshot,
- mut f: F,
- ) -> impl 'a + Iterator<Item = (usize, Range<O>, &T)>
- where
- O: FromAnchor,
- F: 'a + FnMut(&'a T) -> bool,
- {
- let mut endpoint = Anchor {
- full_offset: FullOffset(0),
- bias: Bias::Left,
- version: self.version.clone(),
- };
- self.entries
- .cursor::<()>()
- .enumerate()
- .filter_map(move |(ix, entry)| {
- if f(&entry.value) {
- endpoint.full_offset = entry.range.start;
- endpoint.bias = self.start_bias;
- let start = O::from_anchor(&endpoint, &content);
- endpoint.full_offset = entry.range.end;
- endpoint.bias = self.end_bias;
- let end = O::from_anchor(&endpoint, &content);
- Some((ix, start..end, &entry.value))
- } else {
- None
- }
- })
- }
-}
-
-impl<T: Clone> sum_tree::Item for AnchorRangeMultimapEntry<T> {
- type Summary = AnchorRangeMultimapSummary;
-
- fn summary(&self) -> Self::Summary {
- AnchorRangeMultimapSummary {
- start: self.range.start,
- end: self.range.end,
- min_start: self.range.start,
- max_end: self.range.end,
- count: 1,
- }
- }
-}
-
-impl Default for AnchorRangeMultimapSummary {
- fn default() -> Self {
- Self {
- start: FullOffset(0),
- end: FullOffset::MAX,
- min_start: FullOffset::MAX,
- max_end: FullOffset(0),
- count: 0,
- }
- }
-}
-
-impl sum_tree::Summary for AnchorRangeMultimapSummary {
- type Context = ();
-
- fn add_summary(&mut self, other: &Self, _: &Self::Context) {
- self.min_start = self.min_start.min(other.min_start);
- self.max_end = self.max_end.max(other.max_end);
-
- #[cfg(debug_assertions)]
- {
- let start_comparison = self.start.cmp(&other.start);
- assert!(start_comparison <= Ordering::Equal);
- if start_comparison == Ordering::Equal {
- assert!(self.end.cmp(&other.end) >= Ordering::Equal);
- }
- }
-
- self.start = other.start;
- self.end = other.end;
- self.count += other.count;
- }
-}
-
-impl Default for FullOffsetRange {
- fn default() -> Self {
- Self {
- start: FullOffset(0),
- end: FullOffset::MAX,
- }
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize {
- fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
- *self += summary.count;
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange {
- fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) {
- self.start = summary.start;
- self.end = summary.end;
- }
-}
-
-impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange {
- fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering {
- Ord::cmp(&self.start, &cursor_location.start)
- .then_with(|| Ord::cmp(&cursor_location.end, &self.end))
- }
-}
-
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
+ fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
}
impl AnchorRangeExt for Range<Anchor> {
@@ -584,4 +81,8 @@ impl AnchorRangeExt for Range<Anchor> {
fn to_offset(&self, content: &BufferSnapshot) -> Range<usize> {
self.start.to_offset(&content)..self.end.to_offset(&content)
}
+
+ fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
+ self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
+ }
}
@@ -0,0 +1,83 @@
+use smallvec::{smallvec, SmallVec};
+use std::iter;
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 4]>);
+
+impl Locator {
+ pub fn min() -> Self {
+ Self(smallvec![u64::MIN])
+ }
+
+ pub fn max() -> Self {
+ Self(smallvec![u64::MAX])
+ }
+
+ pub fn assign(&mut self, other: &Self) {
+ self.0.resize(other.0.len(), 0);
+ self.0.copy_from_slice(&other.0);
+ }
+
+ pub fn between(lhs: &Self, rhs: &Self) -> Self {
+ let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
+ let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
+ let mut location = SmallVec::new();
+ for (lhs, rhs) in lhs.zip(rhs) {
+ let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
+ location.push(mid);
+ if mid > lhs {
+ break;
+ }
+ }
+ Self(location)
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+}
+
+impl Default for Locator {
+ fn default() -> Self {
+ Self::min()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rand::prelude::*;
+ use std::mem;
+
+ #[gpui::test(iterations = 100)]
+ fn test_locators(mut rng: StdRng) {
+ let mut lhs = Default::default();
+ let mut rhs = Default::default();
+ while lhs == rhs {
+ lhs = Locator(
+ (0..rng.gen_range(1..=5))
+ .map(|_| rng.gen_range(0..=100))
+ .collect(),
+ );
+ rhs = Locator(
+ (0..rng.gen_range(1..=5))
+ .map(|_| rng.gen_range(0..=100))
+ .collect(),
+ );
+ }
+
+ if lhs > rhs {
+ mem::swap(&mut lhs, &mut rhs);
+ }
+
+ let middle = Locator::between(&lhs, &rhs);
+ assert!(middle > lhs);
+ assert!(middle < rhs);
+ for ix in 0..middle.0.len() - 1 {
+ assert!(
+ middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
+ || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
+ );
+ }
+ }
+}
@@ -1,9 +1,15 @@
-use super::Operation;
use std::{fmt::Debug, ops::Add};
-use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+
+pub trait Operation: Clone + Debug {
+ fn lamport_timestamp(&self) -> clock::Lamport;
+}
+
+#[derive(Clone, Debug)]
+struct OperationItem<T>(T);
#[derive(Clone, Debug)]
-pub struct OperationQueue(SumTree<Operation>);
+pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
pub struct OperationKey(clock::Lamport);
@@ -20,7 +26,7 @@ impl OperationKey {
}
}
-impl OperationQueue {
+impl<T: Operation> OperationQueue<T> {
pub fn new() -> Self {
OperationQueue(SumTree::new())
}
@@ -29,11 +35,15 @@ impl OperationQueue {
self.0.summary().len
}
- pub fn insert(&mut self, mut ops: Vec<Operation>) {
+ pub fn insert(&mut self, mut ops: Vec<T>) {
ops.sort_by_key(|op| op.lamport_timestamp());
ops.dedup_by_key(|op| op.lamport_timestamp());
- self.0
- .edit(ops.into_iter().map(Edit::Insert).collect(), &());
+ self.0.edit(
+ ops.into_iter()
+ .map(|op| Edit::Insert(OperationItem(op)))
+ .collect(),
+ &(),
+ );
}
pub fn drain(&mut self) -> Self {
@@ -42,8 +52,8 @@ impl OperationQueue {
clone
}
- pub fn cursor(&self) -> Cursor<Operation, ()> {
- self.0.cursor()
+ pub fn iter(&self) -> impl Iterator<Item = &T> {
+ self.0.cursor::<()>().map(|i| &i.0)
}
}
@@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey {
}
}
-impl Item for Operation {
+impl<T: Operation> Item for OperationItem<T> {
type Summary = OperationSummary;
fn summary(&self) -> Self::Summary {
OperationSummary {
- key: OperationKey::new(self.lamport_timestamp()),
+ key: OperationKey::new(self.0.lamport_timestamp()),
len: 1,
}
}
}
-impl KeyedItem for Operation {
+impl<T: Operation> KeyedItem for OperationItem<T> {
type Key = OperationKey;
fn key(&self) -> Self::Key {
- OperationKey::new(self.lamport_timestamp())
+ OperationKey::new(self.0.lamport_timestamp())
}
}
@@ -107,21 +117,27 @@ mod tests {
assert_eq!(queue.len(), 0);
queue.insert(vec![
- Operation::Test(clock.tick()),
- Operation::Test(clock.tick()),
+ TestOperation(clock.tick()),
+ TestOperation(clock.tick()),
]);
assert_eq!(queue.len(), 2);
- queue.insert(vec![Operation::Test(clock.tick())]);
+ queue.insert(vec![TestOperation(clock.tick())]);
assert_eq!(queue.len(), 3);
drop(queue.drain());
assert_eq!(queue.len(), 0);
- queue.insert(vec![Operation::Test(clock.tick())]);
+ queue.insert(vec![TestOperation(clock.tick())]);
assert_eq!(queue.len(), 1);
}
#[derive(Clone, Debug, Eq, PartialEq)]
struct TestOperation(clock::Lamport);
+
+ impl Operation for TestOperation {
+ fn lamport_timestamp(&self) -> clock::Lamport {
+ self.0
+ }
+ }
}
@@ -22,13 +22,13 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
match self.0.gen_range(0..100) {
// whitespace
- 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(),
+ 0..=5 => ['\n'].choose(&mut self.0).copied(),
// two-byte greek letters
- 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
- // three-byte characters
- 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
- // four-byte characters
- 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
+ // 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
+ // // three-byte characters
+ // 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
+ // // four-byte characters
+ // 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
// ascii letters
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
}
@@ -1,4 +1,5 @@
-use crate::{rope::TextDimension, AnchorRangeMap, BufferSnapshot, ToOffset, ToPoint};
+use crate::Anchor;
+use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint};
use std::{cmp::Ordering, ops::Range, sync::Arc};
use sum_tree::Bias;
@@ -25,7 +26,7 @@ pub struct Selection<T> {
pub struct SelectionSet {
pub id: SelectionSetId,
pub active: bool,
- pub selections: Arc<AnchorRangeMap<SelectionState>>,
+ pub selections: Arc<[Selection<Anchor>]>,
}
#[derive(Debug, Eq, PartialEq)]
@@ -75,6 +76,21 @@ impl<T: ToOffset + ToPoint + Copy + Ord> Selection<T> {
}
}
+impl Selection<Anchor> {
+ pub fn resolve<'a, D: 'a + TextDimension>(
+ &'a self,
+ snapshot: &'a BufferSnapshot,
+ ) -> Selection<D> {
+ Selection {
+ id: self.id,
+ start: snapshot.summary_for_anchor(&self.start),
+ end: snapshot.summary_for_anchor(&self.end),
+ reversed: self.reversed,
+ goal: self.goal,
+ }
+ }
+}
+
impl SelectionSet {
pub fn len(&self) -> usize {
self.selections.len()
@@ -82,69 +98,70 @@ impl SelectionSet {
pub fn selections<'a, D>(
&'a self,
- content: &'a BufferSnapshot,
+ snapshot: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = Selection<D>>
where
D: TextDimension,
{
- self.selections
- .ranges(content)
- .map(|(range, state)| Selection {
- id: state.id,
- start: range.start,
- end: range.end,
- reversed: state.reversed,
- goal: state.goal,
- })
+ let anchors = self
+ .selections
+ .iter()
+ .flat_map(|selection| [&selection.start, &selection.end].into_iter());
+ let mut positions = snapshot.summaries_for_anchors::<D, _>(anchors);
+ self.selections.iter().map(move |selection| Selection {
+ start: positions.next().unwrap(),
+ end: positions.next().unwrap(),
+ goal: selection.goal,
+ reversed: selection.reversed,
+ id: selection.id,
+ })
}
pub fn intersecting_selections<'a, D, I>(
&'a self,
range: Range<(I, Bias)>,
- content: &'a BufferSnapshot,
+ snapshot: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = Selection<D>>
where
D: TextDimension,
I: 'a + ToOffset,
{
- self.selections
- .intersecting_ranges(range, content)
- .map(|(range, state)| Selection {
- id: state.id,
- start: range.start,
- end: range.end,
- reversed: state.reversed,
- goal: state.goal,
- })
+ let start = snapshot.anchor_at(range.start.0, range.start.1);
+ let end = snapshot.anchor_at(range.end.0, range.end.1);
+ let start_ix = match self
+ .selections
+ .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap())
+ {
+ Ok(ix) | Err(ix) => ix,
+ };
+ let end_ix = match self
+ .selections
+ .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap())
+ {
+ Ok(ix) | Err(ix) => ix,
+ };
+ self.selections[start_ix..end_ix]
+ .iter()
+ .map(|s| s.resolve(snapshot))
}
- pub fn oldest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option<Selection<D>>
+ pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option<Selection<D>>
where
D: TextDimension,
{
self.selections
- .min_by_key(content, |selection| selection.id)
- .map(|(range, state)| Selection {
- id: state.id,
- start: range.start,
- end: range.end,
- reversed: state.reversed,
- goal: state.goal,
- })
+ .iter()
+ .min_by_key(|s| s.id)
+ .map(|s| s.resolve(snapshot))
}
- pub fn newest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option<Selection<D>>
+ pub fn newest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option<Selection<D>>
where
D: TextDimension,
{
self.selections
- .max_by_key(content, |selection| selection.id)
- .map(|(range, state)| Selection {
- id: state.id,
- start: range.start,
- end: range.end,
- reversed: state.reversed,
- goal: state.goal,
- })
+ .iter()
+ .max_by_key(|s| s.id)
+ .map(|s| s.resolve(snapshot))
}
}
@@ -78,6 +78,8 @@ fn test_random_edits(mut rng: StdRng) {
TextSummary::from(&reference_string[range])
);
+ buffer.check_invariants();
+
if rng.gen_bool(0.3) {
buffer_versions.push((buffer.clone(), buffer.subscribe()));
}
@@ -603,6 +605,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
}
_ => {}
}
+ buffer.check_invariants();
if mutation_count == 0 && network.is_idle() {
break;
@@ -629,6 +632,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) {
.all_selection_ranges::<usize>()
.collect::<HashMap<_, _>>()
);
+ buffer.check_invariants();
}
}
@@ -644,6 +648,39 @@ struct Network<T: Clone, R: rand::Rng> {
rng: R,
}
+impl Buffer {
+ fn check_invariants(&self) {
+ // Ensure every fragment is ordered by locator in the fragment tree and corresponds
+ // to an insertion fragment in the insertions tree.
+ let mut prev_fragment_id = Locator::min();
+ for fragment in self.snapshot.fragments.items(&None) {
+ assert!(fragment.id > prev_fragment_id);
+ prev_fragment_id = fragment.id.clone();
+
+ let insertion_fragment = self
+ .snapshot
+ .insertions
+ .get(
+ &InsertionFragmentKey {
+ timestamp: fragment.insertion_timestamp.local(),
+ split_offset: fragment.insertion_offset,
+ },
+ &(),
+ )
+ .unwrap();
+ assert_eq!(insertion_fragment.fragment_id, fragment.id);
+ }
+
+ let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
+ for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
+ cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+ let fragment = cursor.item().unwrap();
+ assert_eq!(insertion_fragment.fragment_id, fragment.id);
+ assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
+ }
+ }
+}
+
impl<T: Clone, R: rand::Rng> Network<T, R> {
fn new(rng: R) -> Self {
Network {
@@ -1,5 +1,6 @@
mod anchor;
-mod operation_queue;
+mod locator;
+pub mod operation_queue;
mod patch;
mod point;
mod point_utf16;
@@ -15,6 +16,7 @@ pub use anchor::*;
use anyhow::{anyhow, Result};
use clock::ReplicaId;
use collections::{HashMap, HashSet};
+use locator::Locator;
use operation_queue::OperationQueue;
pub use patch::Patch;
pub use point::*;
@@ -25,7 +27,7 @@ use rope::TextDimension;
pub use rope::{Chunks, Rope, TextSummary};
pub use selection::*;
use std::{
- cmp::{self, Reverse},
+ cmp::{self, Ordering},
iter::Iterator,
ops::{self, Deref, Range, Sub},
str,
@@ -41,7 +43,7 @@ pub struct Buffer {
last_edit: clock::Local,
history: History,
selection_sets: HashMap<SelectionSetId, SelectionSet>,
- deferred_ops: OperationQueue,
+ deferred_ops: OperationQueue<Operation>,
deferred_replicas: HashSet<ReplicaId>,
replica_id: ReplicaId,
remote_id: u64,
@@ -56,6 +58,7 @@ pub struct BufferSnapshot {
deleted_text: Rope,
undo_map: UndoMap,
fragments: SumTree<Fragment>,
+ insertions: SumTree<InsertionFragment>,
pub version: clock::Global,
}
@@ -65,8 +68,8 @@ pub struct Transaction {
end: clock::Global,
edits: Vec<clock::Local>,
ranges: Vec<Range<FullOffset>>,
- selections_before: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
- selections_after: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
+ selections_before: HashMap<SelectionSetId, Arc<[Selection<Anchor>]>>,
+ selections_after: HashMap<SelectionSetId, Arc<[Selection<Anchor>]>>,
first_edit_at: Instant,
last_edit_at: Instant,
}
@@ -153,7 +156,7 @@ impl History {
fn start_transaction(
&mut self,
start: clock::Global,
- selections_before: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
+ selections_before: HashMap<SelectionSetId, Arc<[Selection<Anchor>]>>,
now: Instant,
) {
self.transaction_depth += 1;
@@ -173,7 +176,7 @@ impl History {
fn end_transaction(
&mut self,
- selections_after: HashMap<SelectionSetId, Arc<AnchorRangeMap<SelectionState>>>,
+ selections_after: HashMap<SelectionSetId, Arc<[Selection<Anchor>]>>,
now: Instant,
) -> Option<&Transaction> {
assert_ne!(self.transaction_depth, 0);
@@ -344,7 +347,7 @@ impl<D1, D2> Edit<(D1, D2)> {
}
}
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)]
pub struct InsertionTimestamp {
pub replica_id: ReplicaId,
pub local: clock::Seq,
@@ -369,7 +372,9 @@ impl InsertionTimestamp {
#[derive(Eq, PartialEq, Clone, Debug)]
struct Fragment {
- timestamp: InsertionTimestamp,
+ id: Locator,
+ insertion_timestamp: InsertionTimestamp,
+ insertion_offset: usize,
len: usize,
visible: bool,
deletions: HashSet<clock::Local>,
@@ -379,6 +384,7 @@ struct Fragment {
#[derive(Eq, PartialEq, Clone, Debug)]
pub struct FragmentSummary {
text: FragmentTextSummary,
+ max_id: Locator,
max_version: clock::Global,
min_insertion_version: clock::Global,
max_insertion_version: clock::Global,
@@ -403,6 +409,19 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
}
}
+#[derive(Eq, PartialEq, Clone, Debug)]
+struct InsertionFragment {
+ timestamp: clock::Local,
+ split_offset: usize,
+ fragment_id: Locator,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct InsertionFragmentKey {
+ timestamp: clock::Local,
+ split_offset: usize,
+}
+
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Operation {
Edit(EditOperation),
@@ -412,7 +431,7 @@ pub enum Operation {
},
UpdateSelections {
set_id: SelectionSetId,
- selections: Arc<AnchorRangeMap<SelectionState>>,
+ selections: Arc<[Selection<Anchor>]>,
lamport_timestamp: clock::Lamport,
},
RemoveSelections {
@@ -423,8 +442,6 @@ pub enum Operation {
set_id: Option<SelectionSetId>,
lamport_timestamp: clock::Lamport,
},
- #[cfg(test)]
- Test(clock::Lamport),
}
#[derive(Clone, Debug, Eq, PartialEq)]
@@ -446,30 +463,33 @@ pub struct UndoOperation {
impl Buffer {
pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer {
let mut fragments = SumTree::new();
+ let mut insertions = SumTree::new();
let mut local_clock = clock::Local::new(replica_id);
let mut lamport_clock = clock::Lamport::new(replica_id);
let mut version = clock::Global::new();
let visible_text = Rope::from(history.base_text.as_ref());
if visible_text.len() > 0 {
- let timestamp = InsertionTimestamp {
+ let insertion_timestamp = InsertionTimestamp {
replica_id: 0,
local: 1,
lamport: 1,
};
- local_clock.observe(timestamp.local());
- lamport_clock.observe(timestamp.lamport());
- version.observe(timestamp.local());
- fragments.push(
- Fragment {
- timestamp,
- len: visible_text.len(),
- visible: true,
- deletions: Default::default(),
- max_undos: Default::default(),
- },
- &None,
- );
+ local_clock.observe(insertion_timestamp.local());
+ lamport_clock.observe(insertion_timestamp.lamport());
+ version.observe(insertion_timestamp.local());
+ let fragment_id = Locator::between(&Locator::min(), &Locator::max());
+ let fragment = Fragment {
+ id: fragment_id,
+ insertion_timestamp,
+ insertion_offset: 0,
+ len: visible_text.len(),
+ visible: true,
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ };
+ insertions.push(InsertionFragment::new(&fragment), &());
+ fragments.push(fragment, &None);
}
Buffer {
@@ -477,6 +497,7 @@ impl Buffer {
visible_text,
deleted_text: Rope::new(),
fragments,
+ insertions,
version,
undo_map: Default::default(),
},
@@ -498,19 +519,17 @@ impl Buffer {
}
pub fn snapshot(&self) -> BufferSnapshot {
- BufferSnapshot {
- visible_text: self.visible_text.clone(),
- deleted_text: self.deleted_text.clone(),
- undo_map: self.undo_map.clone(),
- fragments: self.fragments.clone(),
- version: self.version.clone(),
- }
+ self.snapshot.clone()
}
pub fn replica_id(&self) -> ReplicaId {
self.local_clock.replica_id
}
+ pub fn lamport_timestamp(&self) -> clock::Lamport {
+ self.lamport_clock
+ }
+
pub fn remote_id(&self) -> u64 {
self.remote_id
}
@@ -563,6 +582,8 @@ impl Buffer {
ranges: Vec::with_capacity(ranges.len()),
new_text: None,
};
+ let mut new_insertions = Vec::new();
+ let mut insertion_offset = 0;
let mut ranges = ranges
.map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self))
@@ -588,6 +609,8 @@ impl Buffer {
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end - fragment_start;
+ suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
@@ -606,6 +629,9 @@ impl Buffer {
if fragment_start < range.start {
let mut prefix = old_fragments.item().unwrap().clone();
prefix.len = range.start - fragment_start;
+ prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+ prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+ new_insertions.push(InsertionFragment::insert_new(&prefix));
new_ropes.push_fragment(&prefix, prefix.visible);
new_fragments.push(prefix, &None);
fragment_start = range.start;
@@ -618,17 +644,24 @@ impl Buffer {
old: fragment_start..fragment_start,
new: new_start..new_start + new_text.len(),
});
+ let fragment = Fragment {
+ id: Locator::between(
+ &new_fragments.summary().max_id,
+ old_fragments
+ .item()
+ .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+ ),
+ insertion_timestamp: timestamp,
+ insertion_offset,
+ len: new_text.len(),
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ visible: true,
+ };
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_str(new_text);
- new_fragments.push(
- Fragment {
- timestamp,
- len: new_text.len(),
- deletions: Default::default(),
- max_undos: Default::default(),
- visible: true,
- },
- &None,
- );
+ new_fragments.push(fragment, &None);
+ insertion_offset += new_text.len();
}
// Advance through every fragment that intersects this range, marking the intersecting
@@ -640,6 +673,9 @@ impl Buffer {
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.visible {
intersection.len = intersection_end - fragment_start;
+ intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+ intersection.id =
+ Locator::between(&new_fragments.summary().max_id, &intersection.id);
intersection.deletions.insert(timestamp.local());
intersection.visible = false;
}
@@ -651,6 +687,7 @@ impl Buffer {
new: new_start..new_start,
});
}
+ new_insertions.push(InsertionFragment::insert_new(&intersection));
new_ropes.push_fragment(&intersection, fragment.visible);
new_fragments.push(intersection, &None);
fragment_start = intersection_end;
@@ -671,6 +708,8 @@ impl Buffer {
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end - fragment_start;
+ suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
@@ -684,6 +723,7 @@ impl Buffer {
drop(old_fragments);
self.snapshot.fragments = new_fragments;
+ self.snapshot.insertions.edit(new_insertions, &());
self.snapshot.visible_text = visible_text;
self.snapshot.deleted_text = deleted_text;
self.subscriptions.publish_mut(&edits);
@@ -771,8 +811,6 @@ impl Buffer {
}
self.lamport_clock.observe(lamport_timestamp);
}
- #[cfg(test)]
- Operation::Test(_) => {}
}
Ok(())
}
@@ -790,6 +828,8 @@ impl Buffer {
let mut edits = Patch::default();
let cx = Some(version.clone());
+ let mut new_insertions = Vec::new();
+ let mut insertion_offset = 0;
let mut new_ropes =
RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
@@ -813,6 +853,9 @@ impl Buffer {
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0;
+ suffix.insertion_offset +=
+ fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
@@ -831,6 +874,8 @@ impl Buffer {
if fragment_end == range.start && fragment_end > fragment_start {
let mut fragment = old_fragments.item().unwrap().clone();
fragment.len = fragment_end.0 - fragment_start.0;
+ fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_fragment(&fragment, fragment.visible);
new_fragments.push(fragment, &None);
old_fragments.next(&cx);
@@ -841,7 +886,7 @@ impl Buffer {
// timestamp.
while let Some(fragment) = old_fragments.item() {
if fragment_start == range.start
- && fragment.timestamp.lamport() > timestamp.lamport()
+ && fragment.insertion_timestamp.lamport() > timestamp.lamport()
{
new_ropes.push_fragment(fragment, fragment.visible);
new_fragments.push(fragment.clone(), &None);
@@ -857,6 +902,9 @@ impl Buffer {
if fragment_start < range.start {
let mut prefix = old_fragments.item().unwrap().clone();
prefix.len = range.start.0 - fragment_start.0;
+ prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+ new_insertions.push(InsertionFragment::insert_new(&prefix));
fragment_start = range.start;
new_ropes.push_fragment(&prefix, prefix.visible);
new_fragments.push(prefix, &None);
@@ -873,17 +921,24 @@ impl Buffer {
old: old_start..old_start,
new: new_start..new_start + new_text.len(),
});
+ let fragment = Fragment {
+ id: Locator::between(
+ &new_fragments.summary().max_id,
+ old_fragments
+ .item()
+ .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+ ),
+ insertion_timestamp: timestamp,
+ insertion_offset,
+ len: new_text.len(),
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ visible: true,
+ };
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
new_ropes.push_str(new_text);
- new_fragments.push(
- Fragment {
- timestamp,
- len: new_text.len(),
- deletions: Default::default(),
- max_undos: Default::default(),
- visible: true,
- },
- &None,
- );
+ new_fragments.push(fragment, &None);
+ insertion_offset += new_text.len();
}
// Advance through every fragment that intersects this range, marking the intersecting
@@ -895,6 +950,10 @@ impl Buffer {
let intersection_end = cmp::min(range.end, fragment_end);
if fragment.was_visible(version, &self.undo_map) {
intersection.len = intersection_end.0 - fragment_start.0;
+ intersection.insertion_offset +=
+ fragment_start - old_fragments.start().0.full_offset();
+ intersection.id =
+ Locator::between(&new_fragments.summary().max_id, &intersection.id);
intersection.deletions.insert(timestamp.local());
intersection.visible = false;
}
@@ -908,6 +967,7 @@ impl Buffer {
new: new_start..new_start,
});
}
+ new_insertions.push(InsertionFragment::insert_new(&intersection));
new_ropes.push_fragment(&intersection, fragment.visible);
new_fragments.push(intersection, &None);
fragment_start = intersection_end;
@@ -925,6 +985,8 @@ impl Buffer {
if fragment_end > fragment_start {
let mut suffix = old_fragments.item().unwrap().clone();
suffix.len = fragment_end.0 - fragment_start.0;
+ suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
new_ropes.push_fragment(&suffix, suffix.visible);
new_fragments.push(suffix, &None);
}
@@ -940,6 +1002,7 @@ impl Buffer {
self.snapshot.fragments = new_fragments;
self.snapshot.visible_text = visible_text;
self.snapshot.deleted_text = deleted_text;
+ self.snapshot.insertions.edit(new_insertions, &());
self.local_clock.observe(timestamp.local());
self.lamport_clock.observe(timestamp.lamport());
self.subscriptions.publish_mut(&edits);
@@ -984,7 +1047,9 @@ impl Buffer {
let fragment_was_visible = fragment.visible;
if fragment.was_visible(&undo.version, &self.undo_map)
- || undo.counts.contains_key(&fragment.timestamp.local())
+ || undo
+ .counts
+ .contains_key(&fragment.insertion_timestamp.local())
{
fragment.visible = fragment.is_visible(&self.undo_map);
fragment.max_undos.observe(undo.id);
@@ -1039,7 +1104,7 @@ impl Buffer {
fn flush_deferred_ops(&mut self) -> Result<()> {
self.deferred_replicas.clear();
let mut deferred_ops = Vec::new();
- for op in self.deferred_ops.drain().cursor().cloned() {
+ for op in self.deferred_ops.drain().iter().cloned() {
if self.can_apply_op(&op) {
self.apply_op(op)?;
} else {
@@ -1058,19 +1123,23 @@ impl Buffer {
match op {
Operation::Edit(edit) => self.version.ge(&edit.version),
Operation::Undo { undo, .. } => self.version.ge(&undo.version),
- Operation::UpdateSelections { selections, .. } => {
- self.version.ge(selections.version())
- }
+ Operation::UpdateSelections { selections, .. } => selections
+ .iter()
+ .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
Operation::RemoveSelections { .. } => true,
Operation::SetActiveSelections { set_id, .. } => {
set_id.map_or(true, |set_id| self.selection_sets.contains_key(&set_id))
}
- #[cfg(test)]
- Operation::Test(_) => true,
}
}
}
+ pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+ *anchor == Anchor::min()
+ || *anchor == Anchor::max()
+ || self.version.observed(anchor.timestamp)
+ }
+
pub fn peek_undo_stack(&self) -> Option<&Transaction> {
self.history.undo_stack.last()
}
@@ -1203,25 +1272,22 @@ impl Buffer {
self.selection_sets.iter()
}
- fn build_selection_anchor_range_map<T: ToOffset>(
+ fn build_anchor_selection_set<T: ToOffset>(
&self,
selections: &[Selection<T>],
- ) -> Arc<AnchorRangeMap<SelectionState>> {
- Arc::new(self.anchor_range_map(
- Bias::Left,
- Bias::Left,
- selections.iter().map(|selection| {
- let start = selection.start.to_offset(self);
- let end = selection.end.to_offset(self);
- let range = start..end;
- let state = SelectionState {
+ ) -> Arc<[Selection<Anchor>]> {
+ Arc::from(
+ selections
+ .iter()
+ .map(|selection| Selection {
id: selection.id,
+ start: self.anchor_before(&selection.start),
+ end: self.anchor_before(&selection.end),
reversed: selection.reversed,
goal: selection.goal,
- };
- (range, state)
- }),
- ))
+ })
+ .collect::<Vec<_>>(),
+ )
}
pub fn update_selection_set<T: ToOffset>(
@@ -1229,7 +1295,7 @@ impl Buffer {
set_id: SelectionSetId,
selections: &[Selection<T>],
) -> Result<Operation> {
- let selections = self.build_selection_anchor_range_map(selections);
+ let selections = self.build_anchor_selection_set(selections);
let set = self
.selection_sets
.get_mut(&set_id)
@@ -1245,7 +1311,7 @@ impl Buffer {
pub fn restore_selection_set(
&mut self,
set_id: SelectionSetId,
- selections: Arc<AnchorRangeMap<SelectionState>>,
+ selections: Arc<[Selection<Anchor>]>,
) -> Result<Operation> {
let set = self
.selection_sets
@@ -1260,7 +1326,7 @@ impl Buffer {
}
pub fn add_selection_set<T: ToOffset>(&mut self, selections: &[Selection<T>]) -> Operation {
- let selections = self.build_selection_anchor_range_map(selections);
+ let selections = self.build_anchor_selection_set(selections);
let set_id = self.lamport_clock.tick();
self.selection_sets.insert(
set_id,
@@ -1602,25 +1668,6 @@ impl BufferSnapshot {
result
}
- fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D
- where
- D: TextDimension,
- {
- let cx = Some(anchor.version.clone());
- let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
- cursor.seek(
- &VersionedFullOffset::Offset(anchor.full_offset),
- anchor.bias,
- &cx,
- );
- let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
- anchor.full_offset - cursor.start().0.full_offset()
- } else {
- 0
- };
- self.text_summary_for_range(0..cursor.start().1 + overshoot)
- }
-
pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range<O>) -> D
where
D: TextDimension,
@@ -1630,68 +1677,129 @@ impl BufferSnapshot {
.summary(range.end.to_offset(self))
}
- fn summaries_for_anchors<'a, D, I>(
- &'a self,
- version: clock::Global,
- bias: Bias,
- ranges: I,
- ) -> impl 'a + Iterator<Item = D>
+ pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
where
- D: TextDimension,
- I: 'a + IntoIterator<Item = &'a FullOffset>,
+ D: 'a + TextDimension,
+ A: 'a + IntoIterator<Item = &'a Anchor>,
{
- let cx = Some(version.clone());
- let mut summary = D::default();
- let mut rope_cursor = self.visible_text.cursor(0);
- let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
- ranges.into_iter().map(move |offset| {
- cursor.seek_forward(&VersionedFullOffset::Offset(*offset), bias, &cx);
- let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
- *offset - cursor.start().0.full_offset()
- } else {
- 0
+ let anchors = anchors.into_iter();
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ let mut text_cursor = self.visible_text.cursor(0);
+ let mut position = D::default();
+
+ anchors.map(move |anchor| {
+ if *anchor == Anchor::min() {
+ return D::default();
+ } else if *anchor == Anchor::max() {
+ return D::from_text_summary(&self.visible_text.summary());
+ }
+
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
};
- summary.add_assign(&rope_cursor.summary(cursor.start().1 + overshoot));
- summary.clone()
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
+ } else {
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+ fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let mut fragment_offset = fragment_cursor.start().1;
+ if fragment.visible {
+ fragment_offset += anchor.offset - insertion.split_offset;
+ }
+
+ position.add_assign(&text_cursor.summary(fragment_offset));
+ position.clone()
})
}
- fn summaries_for_anchor_ranges<'a, D, I>(
- &'a self,
- version: clock::Global,
- start_bias: Bias,
- end_bias: Bias,
- ranges: I,
- ) -> impl 'a + Iterator<Item = Range<D>>
+ fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D
where
D: TextDimension,
- I: 'a + IntoIterator<Item = &'a Range<FullOffset>>,
{
- let cx = Some(version);
- let mut summary = D::default();
- let mut rope_cursor = self.visible_text.cursor(0);
- let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
- ranges.into_iter().map(move |range| {
- cursor.seek_forward(&VersionedFullOffset::Offset(range.start), start_bias, &cx);
- let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
- range.start - cursor.start().0.full_offset()
- } else {
- 0
+ if *anchor == Anchor::min() {
+ D::default()
+ } else if *anchor == Anchor::max() {
+ D::from_text_summary(&self.visible_text.summary())
+ } else {
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
};
- summary.add_assign(&rope_cursor.summary::<D>(cursor.start().1 + overshoot));
- let start_summary = summary.clone();
-
- cursor.seek_forward(&VersionedFullOffset::Offset(range.end), end_bias, &cx);
- let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) {
- range.end - cursor.start().0.full_offset()
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
} else {
- 0
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+ let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let mut fragment_offset = fragment_cursor.start().1;
+ if fragment.visible {
+ fragment_offset += anchor.offset - insertion.split_offset;
+ }
+ self.text_summary_for_range(0..fragment_offset)
+ }
+ }
+
+ fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset {
+ if *anchor == Anchor::min() {
+ Default::default()
+ } else if *anchor == Anchor::max() {
+ let text = self.fragments.summary().text;
+ FullOffset(text.visible + text.deleted)
+ } else {
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
};
- summary.add_assign(&rope_cursor.summary::<D>(cursor.start().1 + overshoot));
- let end_summary = summary.clone();
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
+ } else {
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
- start_summary..end_summary
- })
+ let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, FullOffset)>();
+ fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+ fragment_cursor.start().1 + (anchor.offset - insertion.split_offset)
+ }
}
pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
@@ -1703,139 +1811,22 @@ impl BufferSnapshot {
}
pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
- Anchor {
- full_offset: position.to_full_offset(self, bias),
- bias,
- version: self.version.clone(),
- }
- }
-
- pub fn anchor_map<T, E>(&self, bias: Bias, entries: E) -> AnchorMap<T>
- where
- E: IntoIterator<Item = (usize, T)>,
- {
- let version = self.version.clone();
- let mut cursor = self.fragments.cursor::<FragmentTextSummary>();
- let entries = entries
- .into_iter()
- .map(|(offset, value)| {
- cursor.seek_forward(&offset, bias, &None);
- let full_offset = FullOffset(cursor.start().deleted + offset);
- (full_offset, value)
- })
- .collect();
-
- AnchorMap {
- version,
- bias,
- entries,
- }
- }
-
- pub fn anchor_range_map<T, E>(
- &self,
- start_bias: Bias,
- end_bias: Bias,
- entries: E,
- ) -> AnchorRangeMap<T>
- where
- E: IntoIterator<Item = (Range<usize>, T)>,
- {
- let version = self.version.clone();
- let mut cursor = self.fragments.cursor::<FragmentTextSummary>();
- let entries = entries
- .into_iter()
- .map(|(range, value)| {
- let Range {
- start: start_offset,
- end: end_offset,
- } = range;
- cursor.seek_forward(&start_offset, start_bias, &None);
- let full_start_offset = FullOffset(cursor.start().deleted + start_offset);
- cursor.seek_forward(&end_offset, end_bias, &None);
- let full_end_offset = FullOffset(cursor.start().deleted + end_offset);
- (full_start_offset..full_end_offset, value)
- })
- .collect();
-
- AnchorRangeMap {
- version,
- start_bias,
- end_bias,
- entries,
- }
- }
-
- pub fn anchor_set<E>(&self, bias: Bias, entries: E) -> AnchorSet
- where
- E: IntoIterator<Item = usize>,
- {
- AnchorSet(self.anchor_map(bias, entries.into_iter().map(|range| (range, ()))))
- }
-
- pub fn anchor_range_set<E>(
- &self,
- start_bias: Bias,
- end_bias: Bias,
- entries: E,
- ) -> AnchorRangeSet
- where
- E: IntoIterator<Item = Range<usize>>,
- {
- AnchorRangeSet(self.anchor_range_map(
- start_bias,
- end_bias,
- entries.into_iter().map(|range| (range, ())),
- ))
- }
-
- pub fn anchor_range_multimap<T, E, O>(
- &self,
- start_bias: Bias,
- end_bias: Bias,
- entries: E,
- ) -> AnchorRangeMultimap<T>
- where
- T: Clone,
- E: IntoIterator<Item = (Range<O>, T)>,
- O: ToOffset,
- {
- let mut entries = entries
- .into_iter()
- .map(|(range, value)| AnchorRangeMultimapEntry {
- range: FullOffsetRange {
- start: range.start.to_full_offset(self, start_bias),
- end: range.end.to_full_offset(self, end_bias),
- },
- value,
- })
- .collect::<Vec<_>>();
- entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end)));
- AnchorRangeMultimap {
- entries: SumTree::from_iter(entries, &()),
- version: self.version.clone(),
- start_bias,
- end_bias,
- }
- }
-
- fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset {
- let cx = Some(anchor.version.clone());
- let mut cursor = self
- .fragments
- .cursor::<(VersionedFullOffset, FragmentTextSummary)>();
- cursor.seek(
- &VersionedFullOffset::Offset(anchor.full_offset),
- anchor.bias,
- &cx,
- );
- let overshoot = if cursor.item().is_some() {
- anchor.full_offset - cursor.start().0.full_offset()
+ let offset = position.to_offset(self);
+ if bias == Bias::Left && offset == 0 {
+ Anchor::min()
+ } else if bias == Bias::Right && offset == self.len() {
+ Anchor::max()
} else {
- 0
- };
- let summary = cursor.start().1;
- FullOffset(summary.visible + summary.deleted + overshoot)
+ let mut fragment_cursor = self.fragments.cursor::<usize>();
+ fragment_cursor.seek(&offset, bias, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let overshoot = offset - *fragment_cursor.start();
+ Anchor {
+ timestamp: fragment.insertion_timestamp.local(),
+ offset: fragment.insertion_offset + overshoot,
+ bias,
+ }
+ }
}
pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
@@ -2052,13 +2043,13 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo
impl Fragment {
fn is_visible(&self, undos: &UndoMap) -> bool {
- !undos.is_undone(self.timestamp.local())
+ !undos.is_undone(self.insertion_timestamp.local())
&& self.deletions.iter().all(|d| undos.is_undone(*d))
}
fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
- (version.observed(self.timestamp.local())
- && !undos.was_undone(self.timestamp.local(), version))
+ (version.observed(self.insertion_timestamp.local())
+ && !undos.was_undone(self.insertion_timestamp.local(), version))
&& self
.deletions
.iter()
@@ -2071,17 +2062,18 @@ impl sum_tree::Item for Fragment {
fn summary(&self) -> Self::Summary {
let mut max_version = clock::Global::new();
- max_version.observe(self.timestamp.local());
+ max_version.observe(self.insertion_timestamp.local());
for deletion in &self.deletions {
max_version.observe(*deletion);
}
max_version.join(&self.max_undos);
let mut min_insertion_version = clock::Global::new();
- min_insertion_version.observe(self.timestamp.local());
+ min_insertion_version.observe(self.insertion_timestamp.local());
let max_insertion_version = min_insertion_version.clone();
if self.visible {
FragmentSummary {
+ max_id: self.id.clone(),
text: FragmentTextSummary {
visible: self.len,
deleted: 0,
@@ -2092,6 +2084,7 @@ impl sum_tree::Item for Fragment {
}
} else {
FragmentSummary {
+ max_id: self.id.clone(),
text: FragmentTextSummary {
visible: 0,
deleted: self.len,
@@ -2108,6 +2101,7 @@ impl sum_tree::Summary for FragmentSummary {
type Context = Option<clock::Global>;
fn add_summary(&mut self, other: &Self, _: &Self::Context) {
+ self.max_id.assign(&other.max_id);
self.text.visible += &other.text.visible;
self.text.deleted += &other.text.deleted;
self.max_version.join(&other.max_version);
@@ -2121,6 +2115,7 @@ impl sum_tree::Summary for FragmentSummary {
impl Default for FragmentSummary {
fn default() -> Self {
FragmentSummary {
+ max_id: Locator::min(),
text: FragmentTextSummary::default(),
max_version: clock::Global::new(),
min_insertion_version: clock::Global::new(),
@@ -2129,13 +2124,50 @@ impl Default for FragmentSummary {
}
}
-#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct FullOffset(pub usize);
+impl sum_tree::Item for InsertionFragment {
+ type Summary = InsertionFragmentKey;
-impl FullOffset {
- const MAX: Self = FullOffset(usize::MAX);
+ fn summary(&self) -> Self::Summary {
+ InsertionFragmentKey {
+ timestamp: self.timestamp,
+ split_offset: self.split_offset,
+ }
+ }
}
+impl sum_tree::KeyedItem for InsertionFragment {
+ type Key = InsertionFragmentKey;
+
+ fn key(&self) -> Self::Key {
+ sum_tree::Item::summary(self)
+ }
+}
+
+impl InsertionFragment {
+ fn new(fragment: &Fragment) -> Self {
+ Self {
+ timestamp: fragment.insertion_timestamp.local(),
+ split_offset: fragment.insertion_offset,
+ fragment_id: fragment.id.clone(),
+ }
+ }
+
+ fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
+ sum_tree::Edit::Insert(Self::new(fragment))
+ }
+}
+
+impl sum_tree::Summary for InsertionFragmentKey {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &()) {
+ *self = *summary;
+ }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct FullOffset(pub usize);
+
impl ops::AddAssign<usize> for FullOffset {
fn add_assign(&mut self, rhs: usize) {
self.0 += rhs;