port text2 to zed2

KCaverly created

Change summary

Cargo.lock                          |   25 
crates/text2/Cargo.toml             |   37 
crates/text2/src/anchor.rs          |  144 +
crates/text2/src/locator.rs         |  125 +
crates/text2/src/network.rs         |   69 
crates/text2/src/operation_queue.rs |  153 +
crates/text2/src/patch.rs           |  594 ++++++
crates/text2/src/selection.rs       |  123 +
crates/text2/src/subscription.rs    |   48 
crates/text2/src/tests.rs           |  764 ++++++++
crates/text2/src/text2.rs           | 2682 +++++++++++++++++++++++++++++++
crates/text2/src/undo_map.rs        |  112 +
crates/zed2/Cargo.toml              |    4 
13 files changed, 4,877 insertions(+), 3 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -8806,6 +8806,29 @@ dependencies = [
  "util",
 ]
 
+[[package]]
+name = "text2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clock",
+ "collections",
+ "ctor",
+ "digest 0.9.0",
+ "env_logger 0.9.3",
+ "gpui2",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "postage",
+ "rand 0.8.5",
+ "regex",
+ "rope",
+ "smallvec",
+ "sum_tree",
+ "util",
+]
+
 [[package]]
 name = "textwrap"
 version = "0.16.0"
@@ -11052,7 +11075,7 @@ dependencies = [
  "smol",
  "sum_tree",
  "tempdir",
- "text",
+ "text2",
  "theme2",
  "thiserror",
  "tiny_http",

crates/text2/Cargo.toml 🔗

@@ -0,0 +1,37 @@
+[package]
+name = "text2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/text2.rs"
+doctest = false
+
+[features]
+test-support = ["rand"]
+
+[dependencies]
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+rope = { path = "../rope" }
+sum_tree = { path = "../sum_tree" }
+util = { path = "../util" }
+
+anyhow.workspace = true
+digest = { version = "0.9", features = ["std"] }
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+rand = { workspace = true, optional = true }
+smallvec.workspace = true
+regex.workspace = true
+
+[dev-dependencies]
+collections = { path = "../collections", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+ctor.workspace = true
+env_logger.workspace = true
+rand.workspace = true

crates/text2/src/anchor.rs 🔗

@@ -0,0 +1,144 @@
+use crate::{
+    locator::Locator, BufferSnapshot, Point, PointUtf16, TextDimension, ToOffset, ToPoint,
+    ToPointUtf16,
+};
+use anyhow::Result;
+use std::{cmp::Ordering, fmt::Debug, ops::Range};
+use sum_tree::Bias;
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)]
+pub struct Anchor {
+    pub timestamp: clock::Lamport,
+    pub offset: usize,
+    pub bias: Bias,
+    pub buffer_id: Option<u64>,
+}
+
+impl Anchor {
+    pub const MIN: Self = Self {
+        timestamp: clock::Lamport::MIN,
+        offset: usize::MIN,
+        bias: Bias::Left,
+        buffer_id: None,
+    };
+
+    pub const MAX: Self = Self {
+        timestamp: clock::Lamport::MAX,
+        offset: usize::MAX,
+        bias: Bias::Right,
+        buffer_id: None,
+    };
+
+    pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering {
+        let fragment_id_comparison = if self.timestamp == other.timestamp {
+            Ordering::Equal
+        } else {
+            buffer
+                .fragment_id_for_anchor(self)
+                .cmp(buffer.fragment_id_for_anchor(other))
+        };
+
+        fragment_id_comparison
+            .then_with(|| self.offset.cmp(&other.offset))
+            .then_with(|| self.bias.cmp(&other.bias))
+    }
+
+    pub fn min(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+        if self.cmp(other, buffer).is_le() {
+            *self
+        } else {
+            *other
+        }
+    }
+
+    pub fn max(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+        if self.cmp(other, buffer).is_ge() {
+            *self
+        } else {
+            *other
+        }
+    }
+
+    pub fn bias(&self, bias: Bias, buffer: &BufferSnapshot) -> Anchor {
+        if bias == Bias::Left {
+            self.bias_left(buffer)
+        } else {
+            self.bias_right(buffer)
+        }
+    }
+
+    pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor {
+        if self.bias == Bias::Left {
+            *self
+        } else {
+            buffer.anchor_before(self)
+        }
+    }
+
+    pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor {
+        if self.bias == Bias::Right {
+            *self
+        } else {
+            buffer.anchor_after(self)
+        }
+    }
+
+    pub fn summary<D>(&self, content: &BufferSnapshot) -> D
+    where
+        D: TextDimension,
+    {
+        content.summary_for_anchor(self)
+    }
+
+    /// Returns true when the [Anchor] is located inside a visible fragment.
+    pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool {
+        if *self == Anchor::MIN || *self == Anchor::MAX {
+            true
+        } else {
+            let fragment_id = buffer.fragment_id_for_anchor(self);
+            let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>();
+            fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None);
+            fragment_cursor
+                .item()
+                .map_or(false, |fragment| fragment.visible)
+        }
+    }
+}
+
+pub trait OffsetRangeExt {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
+}
+
+impl<T> OffsetRangeExt for Range<T>
+where
+    T: ToOffset,
+{
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
+        self.start.to_offset(snapshot)..self.end.to_offset(snapshot)
+    }
+
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
+        self.start.to_offset(snapshot).to_point(snapshot)
+            ..self.end.to_offset(snapshot).to_point(snapshot)
+    }
+
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
+        self.start.to_offset(snapshot).to_point_utf16(snapshot)
+            ..self.end.to_offset(snapshot).to_point_utf16(snapshot)
+    }
+}
+
+pub trait AnchorRangeExt {
+    fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
+}
+
+impl AnchorRangeExt for Range<Anchor> {
+    fn cmp(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering> {
+        Ok(match self.start.cmp(&other.start, buffer) {
+            Ordering::Equal => other.end.cmp(&self.end, buffer),
+            ord => ord,
+        })
+    }
+}

crates/text2/src/locator.rs 🔗

@@ -0,0 +1,125 @@
+use lazy_static::lazy_static;
+use smallvec::{smallvec, SmallVec};
+use std::iter;
+
+lazy_static! {
+    static ref MIN: Locator = Locator::min();
+    static ref MAX: Locator = Locator::max();
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 4]>);
+
+impl Locator {
+    pub fn min() -> Self {
+        Self(smallvec![u64::MIN])
+    }
+
+    pub fn max() -> Self {
+        Self(smallvec![u64::MAX])
+    }
+
+    pub fn min_ref() -> &'static Self {
+        &*MIN
+    }
+
+    pub fn max_ref() -> &'static Self {
+        &*MAX
+    }
+
+    pub fn assign(&mut self, other: &Self) {
+        self.0.resize(other.0.len(), 0);
+        self.0.copy_from_slice(&other.0);
+    }
+
+    pub fn between(lhs: &Self, rhs: &Self) -> Self {
+        let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
+        let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
+        let mut location = SmallVec::new();
+        for (lhs, rhs) in lhs.zip(rhs) {
+            let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
+            location.push(mid);
+            if mid > lhs {
+                break;
+            }
+        }
+        Self(location)
+    }
+
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+}
+
+impl Default for Locator {
+    fn default() -> Self {
+        Self::min()
+    }
+}
+
+impl sum_tree::Item for Locator {
+    type Summary = Locator;
+
+    fn summary(&self) -> Self::Summary {
+        self.clone()
+    }
+}
+
+impl sum_tree::KeyedItem for Locator {
+    type Key = Locator;
+
+    fn key(&self) -> Self::Key {
+        self.clone()
+    }
+}
+
+impl sum_tree::Summary for Locator {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        self.assign(summary);
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::prelude::*;
+    use std::mem;
+
+    #[gpui2::test(iterations = 100)]
+    fn test_locators(mut rng: StdRng) {
+        let mut lhs = Default::default();
+        let mut rhs = Default::default();
+        while lhs == rhs {
+            lhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+            rhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+        }
+
+        if lhs > rhs {
+            mem::swap(&mut lhs, &mut rhs);
+        }
+
+        let middle = Locator::between(&lhs, &rhs);
+        assert!(middle > lhs);
+        assert!(middle < rhs);
+        for ix in 0..middle.0.len() - 1 {
+            assert!(
+                middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
+                    || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
+            );
+        }
+    }
+}

crates/text2/src/network.rs 🔗

@@ -0,0 +1,69 @@
+use clock::ReplicaId;
+
+pub struct Network<T: Clone, R: rand::Rng> {
+    inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
+    all_messages: Vec<T>,
+    rng: R,
+}
+
+#[derive(Clone)]
+struct Envelope<T: Clone> {
+    message: T,
+}
+
+impl<T: Clone, R: rand::Rng> Network<T, R> {
+    pub fn new(rng: R) -> Self {
+        Network {
+            inboxes: Default::default(),
+            all_messages: Vec::new(),
+            rng,
+        }
+    }
+
+    pub fn add_peer(&mut self, id: ReplicaId) {
+        self.inboxes.insert(id, Vec::new());
+    }
+
+    pub fn replicate(&mut self, old_replica_id: ReplicaId, new_replica_id: ReplicaId) {
+        self.inboxes
+            .insert(new_replica_id, self.inboxes[&old_replica_id].clone());
+    }
+
+    pub fn is_idle(&self) -> bool {
+        self.inboxes.values().all(|i| i.is_empty())
+    }
+
+    pub fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
+        for (replica, inbox) in self.inboxes.iter_mut() {
+            if *replica != sender {
+                for message in &messages {
+                    // Insert one or more duplicates of this message, potentially *before* the previous
+                    // message sent by this peer to simulate out-of-order delivery.
+                    for _ in 0..self.rng.gen_range(1..4) {
+                        let insertion_index = self.rng.gen_range(0..inbox.len() + 1);
+                        inbox.insert(
+                            insertion_index,
+                            Envelope {
+                                message: message.clone(),
+                            },
+                        );
+                    }
+                }
+            }
+        }
+        self.all_messages.extend(messages);
+    }
+
+    pub fn has_unreceived(&self, receiver: ReplicaId) -> bool {
+        !self.inboxes[&receiver].is_empty()
+    }
+
+    pub fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
+        let inbox = self.inboxes.get_mut(&receiver).unwrap();
+        let count = self.rng.gen_range(0..inbox.len() + 1);
+        inbox
+            .drain(0..count)
+            .map(|envelope| envelope.message)
+            .collect()
+    }
+}

crates/text2/src/operation_queue.rs 🔗

@@ -0,0 +1,153 @@
+use std::{fmt::Debug, ops::Add};
+use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+
+pub trait Operation: Clone + Debug {
+    fn lamport_timestamp(&self) -> clock::Lamport;
+}
+
+#[derive(Clone, Debug)]
+struct OperationItem<T>(T);
+
+#[derive(Clone, Debug)]
+pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
+
+#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
+pub struct OperationKey(clock::Lamport);
+
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+pub struct OperationSummary {
+    pub key: OperationKey,
+    pub len: usize,
+}
+
+impl OperationKey {
+    pub fn new(timestamp: clock::Lamport) -> Self {
+        Self(timestamp)
+    }
+}
+
+impl<T: Operation> Default for OperationQueue<T> {
+    fn default() -> Self {
+        OperationQueue::new()
+    }
+}
+
+impl<T: Operation> OperationQueue<T> {
+    pub fn new() -> Self {
+        OperationQueue(SumTree::new())
+    }
+
+    pub fn len(&self) -> usize {
+        self.0.summary().len
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    pub fn insert(&mut self, mut ops: Vec<T>) {
+        ops.sort_by_key(|op| op.lamport_timestamp());
+        ops.dedup_by_key(|op| op.lamport_timestamp());
+        self.0.edit(
+            ops.into_iter()
+                .map(|op| Edit::Insert(OperationItem(op)))
+                .collect(),
+            &(),
+        );
+    }
+
+    pub fn drain(&mut self) -> Self {
+        let clone = self.clone();
+        self.0 = SumTree::new();
+        clone
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &T> {
+        self.0.iter().map(|i| &i.0)
+    }
+}
+
+impl Summary for OperationSummary {
+    type Context = ();
+
+    fn add_summary(&mut self, other: &Self, _: &()) {
+        assert!(self.key < other.key);
+        self.key = other.key;
+        self.len += other.len;
+    }
+}
+
+impl<'a> Add<&'a Self> for OperationSummary {
+    type Output = Self;
+
+    fn add(self, other: &Self) -> Self {
+        assert!(self.key < other.key);
+        OperationSummary {
+            key: other.key,
+            len: self.len + other.len,
+        }
+    }
+}
+
+impl<'a> Dimension<'a, OperationSummary> for OperationKey {
+    fn add_summary(&mut self, summary: &OperationSummary, _: &()) {
+        assert!(*self <= summary.key);
+        *self = summary.key;
+    }
+}
+
+impl<T: Operation> Item for OperationItem<T> {
+    type Summary = OperationSummary;
+
+    fn summary(&self) -> Self::Summary {
+        OperationSummary {
+            key: OperationKey::new(self.0.lamport_timestamp()),
+            len: 1,
+        }
+    }
+}
+
+impl<T: Operation> KeyedItem for OperationItem<T> {
+    type Key = OperationKey;
+
+    fn key(&self) -> Self::Key {
+        OperationKey::new(self.0.lamport_timestamp())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_len() {
+        let mut clock = clock::Lamport::new(0);
+
+        let mut queue = OperationQueue::new();
+        assert_eq!(queue.len(), 0);
+
+        queue.insert(vec![
+            TestOperation(clock.tick()),
+            TestOperation(clock.tick()),
+        ]);
+        assert_eq!(queue.len(), 2);
+
+        queue.insert(vec![TestOperation(clock.tick())]);
+        assert_eq!(queue.len(), 3);
+
+        drop(queue.drain());
+        assert_eq!(queue.len(), 0);
+
+        queue.insert(vec![TestOperation(clock.tick())]);
+        assert_eq!(queue.len(), 1);
+    }
+
+    #[derive(Clone, Debug, Eq, PartialEq)]
+    struct TestOperation(clock::Lamport);
+
+    impl Operation for TestOperation {
+        fn lamport_timestamp(&self) -> clock::Lamport {
+            self.0
+        }
+    }
+}

crates/text2/src/patch.rs 🔗

@@ -0,0 +1,594 @@
+use crate::Edit;
+use std::{
+    cmp, mem,
+    ops::{Add, AddAssign, Sub},
+};
+
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
+pub struct Patch<T>(Vec<Edit<T>>);
+
+impl<T> Patch<T>
+where
+    T: 'static
+        + Clone
+        + Copy
+        + Ord
+        + Sub<T, Output = T>
+        + Add<T, Output = T>
+        + AddAssign
+        + Default
+        + PartialEq,
+{
+    pub fn new(edits: Vec<Edit<T>>) -> Self {
+        #[cfg(debug_assertions)]
+        {
+            let mut last_edit: Option<&Edit<T>> = None;
+            for edit in &edits {
+                if let Some(last_edit) = last_edit {
+                    assert!(edit.old.start > last_edit.old.end);
+                    assert!(edit.new.start > last_edit.new.end);
+                }
+                last_edit = Some(edit);
+            }
+        }
+        Self(edits)
+    }
+
+    pub fn edits(&self) -> &[Edit<T>] {
+        &self.0
+    }
+
+    pub fn into_inner(self) -> Vec<Edit<T>> {
+        self.0
+    }
+
+    pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
+        let mut old_edits_iter = self.0.iter().cloned().peekable();
+        let mut new_edits_iter = new_edits_iter.into_iter().peekable();
+        let mut composed = Patch(Vec::new());
+
+        let mut old_start = T::default();
+        let mut new_start = T::default();
+        loop {
+            let old_edit = old_edits_iter.peek_mut();
+            let new_edit = new_edits_iter.peek_mut();
+
+            // Push the old edit if its new end is before the new edit's old start.
+            if let Some(old_edit) = old_edit.as_ref() {
+                let new_edit = new_edit.as_ref();
+                if new_edit.map_or(true, |new_edit| old_edit.new.end < new_edit.old.start) {
+                    let catchup = old_edit.old.start - old_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let old_end = old_start + old_edit.old_len();
+                    let new_end = new_start + old_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+                    old_start = old_end;
+                    new_start = new_end;
+                    old_edits_iter.next();
+                    continue;
+                }
+            }
+
+            // Push the new edit if its old end is before the old edit's new start.
+            if let Some(new_edit) = new_edit.as_ref() {
+                let old_edit = old_edit.as_ref();
+                if old_edit.map_or(true, |old_edit| new_edit.old.end < old_edit.new.start) {
+                    let catchup = new_edit.new.start - new_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let old_end = old_start + new_edit.old_len();
+                    let new_end = new_start + new_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+                    old_start = old_end;
+                    new_start = new_end;
+                    new_edits_iter.next();
+                    continue;
+                }
+            }
+
+            // If we still have edits by this point then they must intersect, so we compose them.
+            if let Some((old_edit, new_edit)) = old_edit.zip(new_edit) {
+                if old_edit.new.start < new_edit.old.start {
+                    let catchup = old_edit.old.start - old_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let overshoot = new_edit.old.start - old_edit.new.start;
+                    let old_end = cmp::min(old_start + overshoot, old_edit.old.end);
+                    let new_end = new_start + overshoot;
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    old_edit.old.start = old_end;
+                    old_edit.new.start += overshoot;
+                    old_start = old_end;
+                    new_start = new_end;
+                } else {
+                    let catchup = new_edit.new.start - new_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let overshoot = old_edit.new.start - new_edit.old.start;
+                    let old_end = old_start + overshoot;
+                    let new_end = cmp::min(new_start + overshoot, new_edit.new.end);
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    new_edit.old.start += overshoot;
+                    new_edit.new.start = new_end;
+                    old_start = old_end;
+                    new_start = new_end;
+                }
+
+                if old_edit.new.end > new_edit.old.end {
+                    let old_end = old_start + cmp::min(old_edit.old_len(), new_edit.old_len());
+                    let new_end = new_start + new_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    old_edit.old.start = old_end;
+                    old_edit.new.start = new_edit.old.end;
+                    old_start = old_end;
+                    new_start = new_end;
+                    new_edits_iter.next();
+                } else {
+                    let old_end = old_start + old_edit.old_len();
+                    let new_end = new_start + cmp::min(old_edit.new_len(), new_edit.new_len());
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    new_edit.old.start = old_edit.new.end;
+                    new_edit.new.start = new_end;
+                    old_start = old_end;
+                    new_start = new_end;
+                    old_edits_iter.next();
+                }
+            } else {
+                break;
+            }
+        }
+
+        composed
+    }
+
+    pub fn invert(&mut self) -> &mut Self {
+        for edit in &mut self.0 {
+            mem::swap(&mut edit.old, &mut edit.new);
+        }
+        self
+    }
+
+    pub fn clear(&mut self) {
+        self.0.clear();
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.0.is_empty()
+    }
+
+    pub fn push(&mut self, edit: Edit<T>) {
+        if edit.is_empty() {
+            return;
+        }
+
+        if let Some(last) = self.0.last_mut() {
+            if last.old.end >= edit.old.start {
+                last.old.end = edit.old.end;
+                last.new.end = edit.new.end;
+            } else {
+                self.0.push(edit);
+            }
+        } else {
+            self.0.push(edit);
+        }
+    }
+
+    pub fn old_to_new(&self, old: T) -> T {
+        let ix = match self.0.binary_search_by(|probe| probe.old.start.cmp(&old)) {
+            Ok(ix) => ix,
+            Err(ix) => {
+                if ix == 0 {
+                    return old;
+                } else {
+                    ix - 1
+                }
+            }
+        };
+        if let Some(edit) = self.0.get(ix) {
+            if old >= edit.old.end {
+                edit.new.end + (old - edit.old.end)
+            } else {
+                edit.new.start
+            }
+        } else {
+            old
+        }
+    }
+}
+
+impl<T: Clone> IntoIterator for Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::vec::IntoIter<Edit<T>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a mut Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::prelude::*;
+    use std::env;
+
+    #[gpui2::test]
+    fn test_one_disjoint_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..4,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 1..3,
+                    new: 5..8,
+                },
+            ]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 5..9,
+                new: 5..7,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 1..3,
+                    new: 1..4,
+                },
+                Edit {
+                    old: 4..8,
+                    new: 5..7,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_one_overlapping_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 3..5,
+                new: 3..6,
+            }]),
+            Patch(vec![Edit {
+                old: 1..4,
+                new: 1..6,
+            }]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_disjoint_and_overlapping() {
+        assert_patch_composition(
+            Patch(vec![
+                Edit {
+                    old: 1..3,
+                    new: 1..4,
+                },
+                Edit {
+                    old: 8..12,
+                    new: 9..11,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 3..10,
+                    new: 7..9,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 1..12,
+                    new: 5..10,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_new_edits_overlapping_one_old_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..3,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..1,
+                },
+                Edit {
+                    old: 1..2,
+                    new: 2..2,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..3,
+            }]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 2..3,
+                new: 2..4,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..2,
+                    new: 0..1,
+                },
+                Edit {
+                    old: 3..3,
+                    new: 2..5,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..3,
+                new: 0..6,
+            }]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..2,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..2,
+                },
+                Edit {
+                    old: 2..5,
+                    new: 4..4,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..3,
+                new: 0..4,
+            }]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_new_edits_touching_one_old_edit() {
+        assert_patch_composition(
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..4,
+                },
+                Edit {
+                    old: 7..7,
+                    new: 8..11,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..2,
+                },
+                Edit {
+                    old: 4..4,
+                    new: 3..4,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..4,
+                },
+                Edit {
+                    old: 7..7,
+                    new: 8..11,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_old_to_new() {
+        let patch = Patch(vec![
+            Edit {
+                old: 2..4,
+                new: 2..4,
+            },
+            Edit {
+                old: 7..8,
+                new: 7..11,
+            },
+        ]);
+        assert_eq!(patch.old_to_new(0), 0);
+        assert_eq!(patch.old_to_new(1), 1);
+        assert_eq!(patch.old_to_new(2), 2);
+        assert_eq!(patch.old_to_new(3), 2);
+        assert_eq!(patch.old_to_new(4), 4);
+        assert_eq!(patch.old_to_new(5), 5);
+        assert_eq!(patch.old_to_new(6), 6);
+        assert_eq!(patch.old_to_new(7), 7);
+        assert_eq!(patch.old_to_new(8), 11);
+        assert_eq!(patch.old_to_new(9), 12);
+    }
+
+    #[gpui2::test(iterations = 100)]
+    fn test_random_patch_compositions(mut rng: StdRng) {
+        let operations = env::var("OPERATIONS")
+            .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+            .unwrap_or(20);
+
+        let initial_chars = (0..rng.gen_range(0..=100))
+            .map(|_| rng.gen_range(b'a'..=b'z') as char)
+            .collect::<Vec<_>>();
+        log::info!("initial chars: {:?}", initial_chars);
+
+        // Generate two sequential patches
+        let mut patches = Vec::new();
+        let mut expected_chars = initial_chars.clone();
+        for i in 0..2 {
+            log::info!("patch {}:", i);
+
+            let mut delta = 0i32;
+            let mut last_edit_end = 0;
+            let mut edits = Vec::new();
+
+            for _ in 0..operations {
+                if last_edit_end >= expected_chars.len() {
+                    break;
+                }
+
+                let end = rng.gen_range(last_edit_end..=expected_chars.len());
+                let start = rng.gen_range(last_edit_end..=end);
+                let old_len = end - start;
+
+                let mut new_len = rng.gen_range(0..=3);
+                if start == end && new_len == 0 {
+                    new_len += 1;
+                }
+
+                last_edit_end = start + new_len + 1;
+
+                let new_chars = (0..new_len)
+                    .map(|_| rng.gen_range(b'A'..=b'Z') as char)
+                    .collect::<Vec<_>>();
+                log::info!(
+                    "  editing {:?}: {:?}",
+                    start..end,
+                    new_chars.iter().collect::<String>()
+                );
+                edits.push(Edit {
+                    old: (start as i32 - delta) as u32..(end as i32 - delta) as u32,
+                    new: start as u32..(start + new_len) as u32,
+                });
+                expected_chars.splice(start..end, new_chars);
+
+                delta += new_len as i32 - old_len as i32;
+            }
+
+            patches.push(Patch(edits));
+        }
+
+        log::info!("old patch: {:?}", &patches[0]);
+        log::info!("new patch: {:?}", &patches[1]);
+        log::info!("initial chars: {:?}", initial_chars);
+        log::info!("final chars: {:?}", expected_chars);
+
+        // Compose the patches, and verify that it has the same effect as applying the
+        // two patches separately.
+        let composed = patches[0].compose(&patches[1]);
+        log::info!("composed patch: {:?}", &composed);
+
+        let mut actual_chars = initial_chars;
+        for edit in composed.0 {
+            actual_chars.splice(
+                edit.new.start as usize..edit.new.start as usize + edit.old.len(),
+                expected_chars[edit.new.start as usize..edit.new.end as usize]
+                    .iter()
+                    .copied(),
+            );
+        }
+
+        assert_eq!(actual_chars, expected_chars);
+    }
+
+    #[track_caller]
+    fn assert_patch_composition(old: Patch<u32>, new: Patch<u32>, composed: Patch<u32>) {
+        let original = ('a'..'z').collect::<Vec<_>>();
+        let inserted = ('A'..'Z').collect::<Vec<_>>();
+
+        let mut expected = original.clone();
+        apply_patch(&mut expected, &old, &inserted);
+        apply_patch(&mut expected, &new, &inserted);
+
+        let mut actual = original;
+        apply_patch(&mut actual, &composed, &expected);
+        assert_eq!(
+            actual.into_iter().collect::<String>(),
+            expected.into_iter().collect::<String>(),
+            "expected patch is incorrect"
+        );
+
+        assert_eq!(old.compose(&new), composed);
+    }
+
+    fn apply_patch(text: &mut Vec<char>, patch: &Patch<u32>, new_text: &[char]) {
+        for edit in patch.0.iter().rev() {
+            text.splice(
+                edit.old.start as usize..edit.old.end as usize,
+                new_text[edit.new.start as usize..edit.new.end as usize]
+                    .iter()
+                    .copied(),
+            );
+        }
+    }
+}

crates/text2/src/selection.rs 🔗

@@ -0,0 +1,123 @@
+use crate::{Anchor, BufferSnapshot, TextDimension};
+use std::cmp::Ordering;
+use std::ops::Range;
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum SelectionGoal {
+    None,
+    HorizontalPosition(f32),
+    HorizontalRange { start: f32, end: f32 },
+    WrappedHorizontalPosition((u32, f32)),
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct Selection<T> {
+    pub id: usize,
+    pub start: T,
+    pub end: T,
+    pub reversed: bool,
+    pub goal: SelectionGoal,
+}
+
+impl Default for SelectionGoal {
+    fn default() -> Self {
+        Self::None
+    }
+}
+
+impl<T: Clone> Selection<T> {
+    pub fn head(&self) -> T {
+        if self.reversed {
+            self.start.clone()
+        } else {
+            self.end.clone()
+        }
+    }
+
+    pub fn tail(&self) -> T {
+        if self.reversed {
+            self.end.clone()
+        } else {
+            self.start.clone()
+        }
+    }
+
+    pub fn map<F, S>(&self, f: F) -> Selection<S>
+    where
+        F: Fn(T) -> S,
+    {
+        Selection::<S> {
+            id: self.id,
+            start: f(self.start.clone()),
+            end: f(self.end.clone()),
+            reversed: self.reversed,
+            goal: self.goal,
+        }
+    }
+
+    pub fn collapse_to(&mut self, point: T, new_goal: SelectionGoal) {
+        self.start = point.clone();
+        self.end = point;
+        self.goal = new_goal;
+        self.reversed = false;
+    }
+}
+
+impl<T: Copy + Ord> Selection<T> {
+    pub fn is_empty(&self) -> bool {
+        self.start == self.end
+    }
+
+    pub fn set_head(&mut self, head: T, new_goal: SelectionGoal) {
+        if head.cmp(&self.tail()) < Ordering::Equal {
+            if !self.reversed {
+                self.end = self.start;
+                self.reversed = true;
+            }
+            self.start = head;
+        } else {
+            if self.reversed {
+                self.start = self.end;
+                self.reversed = false;
+            }
+            self.end = head;
+        }
+        self.goal = new_goal;
+    }
+
+    pub fn range(&self) -> Range<T> {
+        self.start..self.end
+    }
+}
+
+impl Selection<usize> {
+    #[cfg(feature = "test-support")]
+    pub fn from_offset(offset: usize) -> Self {
+        Selection {
+            id: 0,
+            start: offset,
+            end: offset,
+            goal: SelectionGoal::None,
+            reversed: false,
+        }
+    }
+
+    pub fn equals(&self, offset_range: &Range<usize>) -> bool {
+        self.start == offset_range.start && self.end == offset_range.end
+    }
+}
+
+impl Selection<Anchor> {
+    pub fn resolve<'a, D: 'a + TextDimension>(
+        &'a self,
+        snapshot: &'a BufferSnapshot,
+    ) -> Selection<D> {
+        Selection {
+            id: self.id,
+            start: snapshot.summary_for_anchor(&self.start),
+            end: snapshot.summary_for_anchor(&self.end),
+            reversed: self.reversed,
+            goal: self.goal,
+        }
+    }
+}

crates/text2/src/subscription.rs 🔗

@@ -0,0 +1,48 @@
+use crate::{Edit, Patch};
+use parking_lot::Mutex;
+use std::{
+    mem,
+    sync::{Arc, Weak},
+};
+
+#[derive(Default)]
+pub struct Topic(Mutex<Vec<Weak<Mutex<Patch<usize>>>>>);
+
+pub struct Subscription(Arc<Mutex<Patch<usize>>>);
+
+impl Topic {
+    pub fn subscribe(&mut self) -> Subscription {
+        let subscription = Subscription(Default::default());
+        self.0.get_mut().push(Arc::downgrade(&subscription.0));
+        subscription
+    }
+
+    pub fn publish(&self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(&mut *self.0.lock(), edits);
+    }
+
+    pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(self.0.get_mut(), edits);
+    }
+}
+
+impl Subscription {
+    pub fn consume(&self) -> Patch<usize> {
+        mem::take(&mut *self.0.lock())
+    }
+}
+
+fn publish(
+    subscriptions: &mut Vec<Weak<Mutex<Patch<usize>>>>,
+    edits: impl Clone + IntoIterator<Item = Edit<usize>>,
+) {
+    subscriptions.retain(|subscription| {
+        if let Some(subscription) = subscription.upgrade() {
+            let mut patch = subscription.lock();
+            *patch = patch.compose(edits.clone());
+            true
+        } else {
+            false
+        }
+    });
+}

crates/text2/src/tests.rs 🔗

@@ -0,0 +1,764 @@
+use super::{network::Network, *};
+use clock::ReplicaId;
+use rand::prelude::*;
+use std::{
+    cmp::Ordering,
+    env,
+    iter::Iterator,
+    time::{Duration, Instant},
+};
+
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+    if std::env::var("RUST_LOG").is_ok() {
+        env_logger::init();
+    }
+}
+
+#[test]
+fn test_edit() {
+    let mut buffer = Buffer::new(0, 0, "abc".into());
+    assert_eq!(buffer.text(), "abc");
+    buffer.edit([(3..3, "def")]);
+    assert_eq!(buffer.text(), "abcdef");
+    buffer.edit([(0..0, "ghi")]);
+    assert_eq!(buffer.text(), "ghiabcdef");
+    buffer.edit([(5..5, "jkl")]);
+    assert_eq!(buffer.text(), "ghiabjklcdef");
+    buffer.edit([(6..7, "")]);
+    assert_eq!(buffer.text(), "ghiabjlcdef");
+    buffer.edit([(4..9, "mno")]);
+    assert_eq!(buffer.text(), "ghiamnoef");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_edits(mut rng: StdRng) {
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let reference_string_len = rng.gen_range(0..3);
+    let mut reference_string = RandomCharIter::new(&mut rng)
+        .take(reference_string_len)
+        .collect::<String>();
+    let mut buffer = Buffer::new(0, 0, reference_string.clone());
+    LineEnding::normalize(&mut reference_string);
+
+    buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+    let mut buffer_versions = Vec::new();
+    log::info!(
+        "buffer text {:?}, version: {:?}",
+        buffer.text(),
+        buffer.version()
+    );
+
+    for _i in 0..operations {
+        let (edits, _) = buffer.randomly_edit(&mut rng, 5);
+        for (old_range, new_text) in edits.iter().rev() {
+            reference_string.replace_range(old_range.clone(), new_text);
+        }
+
+        assert_eq!(buffer.text(), reference_string);
+        log::info!(
+            "buffer text {:?}, version: {:?}",
+            buffer.text(),
+            buffer.version()
+        );
+
+        if rng.gen_bool(0.25) {
+            buffer.randomly_undo_redo(&mut rng);
+            reference_string = buffer.text();
+            log::info!(
+                "buffer text {:?}, version: {:?}",
+                buffer.text(),
+                buffer.version()
+            );
+        }
+
+        let range = buffer.random_byte_range(0, &mut rng);
+        assert_eq!(
+            buffer.text_summary_for_range::<TextSummary, _>(range.clone()),
+            TextSummary::from(&reference_string[range])
+        );
+
+        buffer.check_invariants();
+
+        if rng.gen_bool(0.3) {
+            buffer_versions.push((buffer.clone(), buffer.subscribe()));
+        }
+    }
+
+    for (old_buffer, subscription) in buffer_versions {
+        let edits = buffer
+            .edits_since::<usize>(&old_buffer.version)
+            .collect::<Vec<_>>();
+
+        log::info!(
+            "applying edits since version {:?} to old text: {:?}: {:?}",
+            old_buffer.version(),
+            old_buffer.text(),
+            edits,
+        );
+
+        let mut text = old_buffer.visible_text.clone();
+        for edit in edits {
+            let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+        }
+        assert_eq!(text.to_string(), buffer.text());
+
+        for _ in 0..5 {
+            let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
+            let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+            let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
+            let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
+            let edits = buffer
+                .edits_since_in_range::<usize>(&old_buffer.version, range.clone())
+                .collect::<Vec<_>>();
+            log::info!(
+                "applying edits since version {:?} to old text in range {:?}: {:?}: {:?}",
+                old_buffer.version(),
+                start_ix..end_ix,
+                old_text,
+                edits,
+            );
+
+            let new_text = buffer.text_for_range(range).collect::<String>();
+            for edit in edits {
+                old_text.replace_range(
+                    edit.new.start..edit.new.start + edit.old_len(),
+                    &new_text[edit.new],
+                );
+            }
+            assert_eq!(old_text, new_text);
+        }
+
+        let subscription_edits = subscription.consume();
+        log::info!(
+            "applying subscription edits since version {:?} to old text: {:?}: {:?}",
+            old_buffer.version(),
+            old_buffer.text(),
+            subscription_edits,
+        );
+
+        let mut text = old_buffer.visible_text.clone();
+        for edit in subscription_edits.into_inner() {
+            let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+        }
+        assert_eq!(text.to_string(), buffer.text());
+    }
+}
+
+#[test]
+fn test_line_endings() {
+    assert_eq!(LineEnding::detect(&"🍐✅\n".repeat(1000)), LineEnding::Unix);
+    assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix);
+    assert_eq!(
+        LineEnding::detect(&"🍐✅\r\n".repeat(1000)),
+        LineEnding::Windows
+    );
+    assert_eq!(
+        LineEnding::detect(&"abcd\r\n".repeat(1000)),
+        LineEnding::Windows
+    );
+
+    let mut buffer = Buffer::new(0, 0, "one\r\ntwo\rthree".into());
+    assert_eq!(buffer.text(), "one\ntwo\nthree");
+    assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    buffer.check_invariants();
+
+    buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]);
+    buffer.edit([(0..0, "zero\r\n")]);
+    assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
+    assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_line_len() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abcd\nefg\nhij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs\n")]);
+    buffer.edit([(18..21, "\nPQ")]);
+
+    assert_eq!(buffer.line_len(0), 4);
+    assert_eq!(buffer.line_len(1), 3);
+    assert_eq!(buffer.line_len(2), 5);
+    assert_eq!(buffer.line_len(3), 3);
+    assert_eq!(buffer.line_len(4), 4);
+    assert_eq!(buffer.line_len(5), 0);
+}
+
+#[test]
+fn test_common_prefix_at_position() {
+    let text = "a = str; b = δα";
+    let buffer = Buffer::new(0, 0, text.into());
+
+    let offset1 = offset_after(text, "str");
+    let offset2 = offset_after(text, "δα");
+
+    // the preceding word is a prefix of the suggestion
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "string"),
+        range_of(text, "str"),
+    );
+    // a suffix of the preceding word is a prefix of the suggestion
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "tree"),
+        range_of(text, "tr"),
+    );
+    // the preceding word is a substring of the suggestion, but not a prefix
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "astro"),
+        empty_range_after(text, "str"),
+    );
+
+    // prefix matching is case insensitive.
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "Strαngε"),
+        range_of(text, "str"),
+    );
+    assert_eq!(
+        buffer.common_prefix_at(offset2, "ΔΑΜΝ"),
+        range_of(text, "δα"),
+    );
+
+    fn offset_after(text: &str, part: &str) -> usize {
+        text.find(part).unwrap() + part.len()
+    }
+
+    fn empty_range_after(text: &str, part: &str) -> Range<usize> {
+        let offset = offset_after(text, part);
+        offset..offset
+    }
+
+    fn range_of(text: &str, part: &str) -> Range<usize> {
+        let start = text.find(part).unwrap();
+        start..start + part.len()
+    }
+}
+
+#[test]
+fn test_text_summary_for_range() {
+    let buffer = Buffer::new(0, 0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz".into());
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(1..3),
+        TextSummary {
+            len: 2,
+            len_utf16: OffsetUtf16(2),
+            lines: Point::new(1, 0),
+            first_line_chars: 1,
+            last_line_chars: 0,
+            last_line_len_utf16: 0,
+            longest_row: 0,
+            longest_row_chars: 1,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(1..12),
+        TextSummary {
+            len: 11,
+            len_utf16: OffsetUtf16(11),
+            lines: Point::new(3, 0),
+            first_line_chars: 1,
+            last_line_chars: 0,
+            last_line_len_utf16: 0,
+            longest_row: 2,
+            longest_row_chars: 4,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(0..20),
+        TextSummary {
+            len: 20,
+            len_utf16: OffsetUtf16(20),
+            lines: Point::new(4, 1),
+            first_line_chars: 2,
+            last_line_chars: 1,
+            last_line_len_utf16: 1,
+            longest_row: 3,
+            longest_row_chars: 6,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(0..22),
+        TextSummary {
+            len: 22,
+            len_utf16: OffsetUtf16(22),
+            lines: Point::new(4, 3),
+            first_line_chars: 2,
+            last_line_chars: 3,
+            last_line_len_utf16: 3,
+            longest_row: 3,
+            longest_row_chars: 6,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(7..22),
+        TextSummary {
+            len: 15,
+            len_utf16: OffsetUtf16(15),
+            lines: Point::new(2, 3),
+            first_line_chars: 4,
+            last_line_chars: 3,
+            last_line_len_utf16: 3,
+            longest_row: 1,
+            longest_row_chars: 6,
+        }
+    );
+}
+
+#[test]
+fn test_chars_at() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abcd\nefgh\nij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs")]);
+    buffer.edit([(18..21, "\nPQ")]);
+
+    let chars = buffer.chars_at(Point::new(0, 0));
+    assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(1, 0));
+    assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(2, 0));
+    assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(3, 0));
+    assert_eq!(chars.collect::<String>(), "mno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(4, 0));
+    assert_eq!(chars.collect::<String>(), "PQrs");
+
+    // Regression test:
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")]);
+    buffer.edit([(60..60, "\n")]);
+
+    let chars = buffer.chars_at(Point::new(6, 0));
+    assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
+}
+
+#[test]
+fn test_anchors() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abc")]);
+    let left_anchor = buffer.anchor_before(2);
+    let right_anchor = buffer.anchor_after(2);
+
+    buffer.edit([(1..1, "def\n")]);
+    assert_eq!(buffer.text(), "adef\nbc");
+    assert_eq!(left_anchor.to_offset(&buffer), 6);
+    assert_eq!(right_anchor.to_offset(&buffer), 6);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+    buffer.edit([(2..3, "")]);
+    assert_eq!(buffer.text(), "adf\nbc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 5);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+    buffer.edit([(5..5, "ghi\n")]);
+    assert_eq!(buffer.text(), "adf\nbghi\nc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 9);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
+
+    buffer.edit([(7..9, "")]);
+    assert_eq!(buffer.text(), "adf\nbghc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 7);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },);
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 });
+
+    // Ensure anchoring to a point is equivalent to anchoring to an offset.
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 0 }),
+        buffer.anchor_before(0)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 1 }),
+        buffer.anchor_before(1)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 2 }),
+        buffer.anchor_before(2)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 3 }),
+        buffer.anchor_before(3)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 0 }),
+        buffer.anchor_before(4)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 1 }),
+        buffer.anchor_before(5)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 2 }),
+        buffer.anchor_before(6)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 3 }),
+        buffer.anchor_before(7)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 4 }),
+        buffer.anchor_before(8)
+    );
+
+    // Comparison between anchors.
+    let anchor_at_offset_0 = buffer.anchor_before(0);
+    let anchor_at_offset_1 = buffer.anchor_before(1);
+    let anchor_at_offset_2 = buffer.anchor_before(2);
+
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Equal
+    );
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Equal
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Equal
+    );
+
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Less
+    );
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Less
+    );
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Less
+    );
+
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Greater
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Greater
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Greater
+    );
+}
+
+#[test]
+fn test_anchors_at_start_and_end() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    let before_start_anchor = buffer.anchor_before(0);
+    let after_end_anchor = buffer.anchor_after(0);
+
+    buffer.edit([(0..0, "abc")]);
+    assert_eq!(buffer.text(), "abc");
+    assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+    assert_eq!(after_end_anchor.to_offset(&buffer), 3);
+
+    let after_start_anchor = buffer.anchor_after(0);
+    let before_end_anchor = buffer.anchor_before(3);
+
+    buffer.edit([(3..3, "def")]);
+    buffer.edit([(0..0, "ghi")]);
+    assert_eq!(buffer.text(), "ghiabcdef");
+    assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+    assert_eq!(after_start_anchor.to_offset(&buffer), 3);
+    assert_eq!(before_end_anchor.to_offset(&buffer), 6);
+    assert_eq!(after_end_anchor.to_offset(&buffer), 9);
+}
+
+#[test]
+fn test_undo_redo() {
+    let mut buffer = Buffer::new(0, 0, "1234".into());
+    // Set group interval to zero so as to not group edits in the undo stack.
+    buffer.set_group_interval(Duration::from_secs(0));
+
+    buffer.edit([(1..1, "abx")]);
+    buffer.edit([(3..4, "yzef")]);
+    buffer.edit([(3..5, "cd")]);
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    let entries = buffer.history.undo_stack.clone();
+    assert_eq!(entries.len(), 3);
+
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1cdef234");
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdx234");
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abx234");
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abyzef234");
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abyzef234");
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1yzef234");
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1234");
+}
+
+#[test]
+fn test_history() {
+    let mut now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "123456".into());
+    buffer.set_group_interval(Duration::from_millis(300));
+
+    let transaction_1 = buffer.start_transaction_at(now).unwrap();
+    buffer.edit([(2..4, "cd")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(4..5, "e")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "12cde6");
+
+    now += buffer.transaction_group_interval() + Duration::from_millis(1);
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    // Last transaction happened past the group interval, undo it on its own.
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // First two transactions happened within the group interval, undo them together.
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    // Redo the first two transactions together.
+    buffer.redo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // Redo the last transaction on its own.
+    buffer.redo();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    buffer.start_transaction_at(now);
+    assert!(buffer.end_transaction_at(now).is_none());
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // Redo stack gets cleared after performing an edit.
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..0, "X")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    // Transactions can be grouped manually.
+    buffer.redo();
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.group_until_transaction(transaction_1);
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+}
+
+#[test]
+fn test_finalize_last_transaction() {
+    let now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "123456".into());
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(2..4, "cd")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.finalize_last_transaction();
+    buffer.start_transaction_at(now);
+    buffer.edit([(4..5, "e")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "12cde6");
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    buffer.redo();
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.redo();
+    assert_eq!(buffer.text(), "ab2cde6");
+}
+
+#[test]
+fn test_edited_ranges_for_transaction() {
+    let now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "1234567".into());
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(2..4, "cd")]);
+    buffer.edit([(6..6, "efg")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56efg7");
+
+    let tx = buffer.finalize_last_transaction().unwrap().clone();
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 6..9]
+    );
+
+    buffer.edit([(5..5, "hijk")]);
+    assert_eq!(buffer.text(), "12cd5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 10..13]
+    );
+
+    buffer.edit([(4..4, "l")]);
+    assert_eq!(buffer.text(), "12cdl5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 11..14]
+    );
+}
+
+#[test]
+fn test_concurrent_edits() {
+    let text = "abcdef";
+
+    let mut buffer1 = Buffer::new(1, 0, text.into());
+    let mut buffer2 = Buffer::new(2, 0, text.into());
+    let mut buffer3 = Buffer::new(3, 0, text.into());
+
+    let buf1_op = buffer1.edit([(1..2, "12")]);
+    assert_eq!(buffer1.text(), "a12cdef");
+    let buf2_op = buffer2.edit([(3..4, "34")]);
+    assert_eq!(buffer2.text(), "abc34ef");
+    let buf3_op = buffer3.edit([(5..6, "56")]);
+    assert_eq!(buffer3.text(), "abcde56");
+
+    buffer1.apply_op(buf2_op.clone()).unwrap();
+    buffer1.apply_op(buf3_op.clone()).unwrap();
+    buffer2.apply_op(buf1_op.clone()).unwrap();
+    buffer2.apply_op(buf3_op).unwrap();
+    buffer3.apply_op(buf1_op).unwrap();
+    buffer3.apply_op(buf2_op).unwrap();
+
+    assert_eq!(buffer1.text(), "a12c34e56");
+    assert_eq!(buffer2.text(), "a12c34e56");
+    assert_eq!(buffer3.text(), "a12c34e56");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_concurrent_edits(mut rng: StdRng) {
+    let peers = env::var("PEERS")
+        .map(|i| i.parse().expect("invalid `PEERS` variable"))
+        .unwrap_or(5);
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let base_text_len = rng.gen_range(0..10);
+    let base_text = RandomCharIter::new(&mut rng)
+        .take(base_text_len)
+        .collect::<String>();
+    let mut replica_ids = Vec::new();
+    let mut buffers = Vec::new();
+    let mut network = Network::new(rng.clone());
+
+    for i in 0..peers {
+        let mut buffer = Buffer::new(i as ReplicaId, 0, base_text.clone());
+        buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
+        buffers.push(buffer);
+        replica_ids.push(i as u16);
+        network.add_peer(i as u16);
+    }
+
+    log::info!("initial text: {:?}", base_text);
+
+    let mut mutation_count = operations;
+    loop {
+        let replica_index = rng.gen_range(0..peers);
+        let replica_id = replica_ids[replica_index];
+        let buffer = &mut buffers[replica_index];
+        match rng.gen_range(0..=100) {
+            0..=50 if mutation_count != 0 => {
+                let op = buffer.randomly_edit(&mut rng, 5).1;
+                network.broadcast(buffer.replica_id, vec![op]);
+                log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
+                mutation_count -= 1;
+            }
+            51..=70 if mutation_count != 0 => {
+                let ops = buffer.randomly_undo_redo(&mut rng);
+                network.broadcast(buffer.replica_id, ops);
+                mutation_count -= 1;
+            }
+            71..=100 if network.has_unreceived(replica_id) => {
+                let ops = network.receive(replica_id);
+                if !ops.is_empty() {
+                    log::info!(
+                        "peer {} applying {} ops from the network.",
+                        replica_id,
+                        ops.len()
+                    );
+                    buffer.apply_ops(ops).unwrap();
+                }
+            }
+            _ => {}
+        }
+        buffer.check_invariants();
+
+        if mutation_count == 0 && network.is_idle() {
+            break;
+        }
+    }
+
+    let first_buffer = &buffers[0];
+    for buffer in &buffers[1..] {
+        assert_eq!(
+            buffer.text(),
+            first_buffer.text(),
+            "Replica {} text != Replica 0 text",
+            buffer.replica_id
+        );
+        buffer.check_invariants();
+    }
+}

crates/text2/src/text2.rs 🔗

@@ -0,0 +1,2682 @@
+mod anchor;
+pub mod locator;
+#[cfg(any(test, feature = "test-support"))]
+pub mod network;
+pub mod operation_queue;
+mod patch;
+mod selection;
+pub mod subscription;
+#[cfg(test)]
+mod tests;
+mod undo_map;
+
+pub use anchor::*;
+use anyhow::{anyhow, Result};
+pub use clock::ReplicaId;
+use collections::{HashMap, HashSet};
+use locator::Locator;
+use operation_queue::OperationQueue;
+pub use patch::Patch;
+use postage::{oneshot, prelude::*};
+
+use lazy_static::lazy_static;
+use regex::Regex;
+pub use rope::*;
+pub use selection::*;
+use std::{
+    borrow::Cow,
+    cmp::{self, Ordering, Reverse},
+    future::Future,
+    iter::Iterator,
+    ops::{self, Deref, Range, Sub},
+    str,
+    sync::Arc,
+    time::{Duration, Instant},
+};
+pub use subscription::*;
+pub use sum_tree::Bias;
+use sum_tree::{FilterCursor, SumTree, TreeMap};
+use undo_map::UndoMap;
+use util::ResultExt;
+
+#[cfg(any(test, feature = "test-support"))]
+use util::RandomCharIter;
+
+lazy_static! {
+    static ref LINE_SEPARATORS_REGEX: Regex = Regex::new("\r\n|\r|\u{2028}|\u{2029}").unwrap();
+}
+
+pub type TransactionId = clock::Lamport;
+
+pub struct Buffer {
+    snapshot: BufferSnapshot,
+    history: History,
+    deferred_ops: OperationQueue<Operation>,
+    deferred_replicas: HashSet<ReplicaId>,
+    pub lamport_clock: clock::Lamport,
+    subscriptions: Topic,
+    edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
+    wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
+}
+
+#[derive(Clone)]
+pub struct BufferSnapshot {
+    replica_id: ReplicaId,
+    remote_id: u64,
+    visible_text: Rope,
+    deleted_text: Rope,
+    line_ending: LineEnding,
+    undo_map: UndoMap,
+    fragments: SumTree<Fragment>,
+    insertions: SumTree<InsertionFragment>,
+    pub version: clock::Global,
+}
+
+#[derive(Clone, Debug)]
+pub struct HistoryEntry {
+    transaction: Transaction,
+    first_edit_at: Instant,
+    last_edit_at: Instant,
+    suppress_grouping: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Transaction {
+    pub id: TransactionId,
+    pub edit_ids: Vec<clock::Lamport>,
+    pub start: clock::Global,
+}
+
+impl HistoryEntry {
+    pub fn transaction_id(&self) -> TransactionId {
+        self.transaction.id
+    }
+}
+
+struct History {
+    base_text: Rope,
+    operations: TreeMap<clock::Lamport, Operation>,
+    insertion_slices: HashMap<clock::Lamport, Vec<InsertionSlice>>,
+    undo_stack: Vec<HistoryEntry>,
+    redo_stack: Vec<HistoryEntry>,
+    transaction_depth: usize,
+    group_interval: Duration,
+}
+
+#[derive(Clone, Debug)]
+struct InsertionSlice {
+    insertion_id: clock::Lamport,
+    range: Range<usize>,
+}
+
+impl History {
+    pub fn new(base_text: Rope) -> Self {
+        Self {
+            base_text,
+            operations: Default::default(),
+            insertion_slices: Default::default(),
+            undo_stack: Vec::new(),
+            redo_stack: Vec::new(),
+            transaction_depth: 0,
+            // Don't group transactions in tests unless we opt in, because it's a footgun.
+            #[cfg(any(test, feature = "test-support"))]
+            group_interval: Duration::ZERO,
+            #[cfg(not(any(test, feature = "test-support")))]
+            group_interval: Duration::from_millis(300),
+        }
+    }
+
+    fn push(&mut self, op: Operation) {
+        self.operations.insert(op.timestamp(), op);
+    }
+
+    fn start_transaction(
+        &mut self,
+        start: clock::Global,
+        now: Instant,
+        clock: &mut clock::Lamport,
+    ) -> Option<TransactionId> {
+        self.transaction_depth += 1;
+        if self.transaction_depth == 1 {
+            let id = clock.tick();
+            self.undo_stack.push(HistoryEntry {
+                transaction: Transaction {
+                    id,
+                    start,
+                    edit_ids: Default::default(),
+                },
+                first_edit_at: now,
+                last_edit_at: now,
+                suppress_grouping: false,
+            });
+            Some(id)
+        } else {
+            None
+        }
+    }
+
+    fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
+        assert_ne!(self.transaction_depth, 0);
+        self.transaction_depth -= 1;
+        if self.transaction_depth == 0 {
+            if self
+                .undo_stack
+                .last()
+                .unwrap()
+                .transaction
+                .edit_ids
+                .is_empty()
+            {
+                self.undo_stack.pop();
+                None
+            } else {
+                self.redo_stack.clear();
+                let entry = self.undo_stack.last_mut().unwrap();
+                entry.last_edit_at = now;
+                Some(entry)
+            }
+        } else {
+            None
+        }
+    }
+
+    fn group(&mut self) -> Option<TransactionId> {
+        let mut count = 0;
+        let mut entries = self.undo_stack.iter();
+        if let Some(mut entry) = entries.next_back() {
+            while let Some(prev_entry) = entries.next_back() {
+                if !prev_entry.suppress_grouping
+                    && entry.first_edit_at - prev_entry.last_edit_at <= self.group_interval
+                {
+                    entry = prev_entry;
+                    count += 1;
+                } else {
+                    break;
+                }
+            }
+        }
+        self.group_trailing(count)
+    }
+
+    fn group_until(&mut self, transaction_id: TransactionId) {
+        let mut count = 0;
+        for entry in self.undo_stack.iter().rev() {
+            if entry.transaction_id() == transaction_id {
+                self.group_trailing(count);
+                break;
+            } else if entry.suppress_grouping {
+                break;
+            } else {
+                count += 1;
+            }
+        }
+    }
+
+    fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
+        let new_len = self.undo_stack.len() - n;
+        let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
+        if let Some(last_entry) = entries_to_keep.last_mut() {
+            for entry in &*entries_to_merge {
+                for edit_id in &entry.transaction.edit_ids {
+                    last_entry.transaction.edit_ids.push(*edit_id);
+                }
+            }
+
+            if let Some(entry) = entries_to_merge.last_mut() {
+                last_entry.last_edit_at = entry.last_edit_at;
+            }
+        }
+
+        self.undo_stack.truncate(new_len);
+        self.undo_stack.last().map(|e| e.transaction.id)
+    }
+
+    fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.undo_stack.last_mut().map(|entry| {
+            entry.suppress_grouping = true;
+            &entry.transaction
+        })
+    }
+
+    fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        assert_eq!(self.transaction_depth, 0);
+        self.undo_stack.push(HistoryEntry {
+            transaction,
+            first_edit_at: now,
+            last_edit_at: now,
+            suppress_grouping: false,
+        });
+        self.redo_stack.clear();
+    }
+
+    fn push_undo(&mut self, op_id: clock::Lamport) {
+        assert_ne!(self.transaction_depth, 0);
+        if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
+            let last_transaction = self.undo_stack.last_mut().unwrap();
+            last_transaction.transaction.edit_ids.push(op_id);
+        }
+    }
+
+    fn pop_undo(&mut self) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry) = self.undo_stack.pop() {
+            self.redo_stack.push(entry);
+            self.redo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+
+        let entry_ix = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)?;
+        let entry = self.undo_stack.remove(entry_ix);
+        self.redo_stack.push(entry);
+        self.redo_stack.last()
+    }
+
+    fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+        assert_eq!(self.transaction_depth, 0);
+
+        let redo_stack_start_len = self.redo_stack.len();
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.redo_stack
+                .extend(self.undo_stack.drain(entry_ix..).rev());
+        }
+        &self.redo_stack[redo_stack_start_len..]
+    }
+
+    fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            Some(self.undo_stack.remove(entry_ix).transaction)
+        } else if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            Some(self.redo_stack.remove(entry_ix).transaction)
+        } else {
+            None
+        }
+    }
+
+    fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
+        let entry = self
+            .undo_stack
+            .iter_mut()
+            .rfind(|entry| entry.transaction.id == transaction_id)
+            .or_else(|| {
+                self.redo_stack
+                    .iter_mut()
+                    .rfind(|entry| entry.transaction.id == transaction_id)
+            })?;
+        Some(&mut entry.transaction)
+    }
+
+    fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+        if let Some(transaction) = self.forget(transaction) {
+            if let Some(destination) = self.transaction_mut(destination) {
+                destination.edit_ids.extend(transaction.edit_ids);
+            }
+        }
+    }
+
+    fn pop_redo(&mut self) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry) = self.redo_stack.pop() {
+            self.undo_stack.push(entry);
+            self.undo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+        assert_eq!(self.transaction_depth, 0);
+
+        let undo_stack_start_len = self.undo_stack.len();
+        if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.undo_stack
+                .extend(self.redo_stack.drain(entry_ix..).rev());
+        }
+        &self.undo_stack[undo_stack_start_len..]
+    }
+}
+
+struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
+    visible_cursor: rope::Cursor<'a>,
+    deleted_cursor: rope::Cursor<'a>,
+    fragments_cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
+    undos: &'a UndoMap,
+    since: &'a clock::Global,
+    old_end: D,
+    new_end: D,
+    range: Range<(&'a Locator, usize)>,
+    buffer_id: u64,
+}
+
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
+pub struct Edit<D> {
+    pub old: Range<D>,
+    pub new: Range<D>,
+}
+
+impl<D> Edit<D>
+where
+    D: Sub<D, Output = D> + PartialEq + Copy,
+{
+    pub fn old_len(&self) -> D {
+        self.old.end - self.old.start
+    }
+
+    pub fn new_len(&self) -> D {
+        self.new.end - self.new.start
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.old.start == self.old.end && self.new.start == self.new.end
+    }
+}
+
+impl<D1, D2> Edit<(D1, D2)> {
+    pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
+        (
+            Edit {
+                old: self.old.start.0..self.old.end.0,
+                new: self.new.start.0..self.new.end.0,
+            },
+            Edit {
+                old: self.old.start.1..self.old.end.1,
+                new: self.new.start.1..self.new.end.1,
+            },
+        )
+    }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct Fragment {
+    pub id: Locator,
+    pub timestamp: clock::Lamport,
+    pub insertion_offset: usize,
+    pub len: usize,
+    pub visible: bool,
+    pub deletions: HashSet<clock::Lamport>,
+    pub max_undos: clock::Global,
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct FragmentSummary {
+    text: FragmentTextSummary,
+    max_id: Locator,
+    max_version: clock::Global,
+    min_insertion_version: clock::Global,
+    max_insertion_version: clock::Global,
+}
+
+#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
+struct FragmentTextSummary {
+    visible: usize,
+    deleted: usize,
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+        self.visible += summary.text.visible;
+        self.deleted += summary.text.deleted;
+    }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+struct InsertionFragment {
+    timestamp: clock::Lamport,
+    split_offset: usize,
+    fragment_id: Locator,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct InsertionFragmentKey {
+    timestamp: clock::Lamport,
+    split_offset: usize,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum Operation {
+    Edit(EditOperation),
+    Undo(UndoOperation),
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct EditOperation {
+    pub timestamp: clock::Lamport,
+    pub version: clock::Global,
+    pub ranges: Vec<Range<FullOffset>>,
+    pub new_text: Vec<Arc<str>>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct UndoOperation {
+    pub timestamp: clock::Lamport,
+    pub version: clock::Global,
+    pub counts: HashMap<clock::Lamport, u32>,
+}
+
+impl Buffer {
+    pub fn new(replica_id: u16, remote_id: u64, mut base_text: String) -> Buffer {
+        let line_ending = LineEnding::detect(&base_text);
+        LineEnding::normalize(&mut base_text);
+
+        let history = History::new(Rope::from(base_text.as_ref()));
+        let mut fragments = SumTree::new();
+        let mut insertions = SumTree::new();
+
+        let mut lamport_clock = clock::Lamport::new(replica_id);
+        let mut version = clock::Global::new();
+
+        let visible_text = history.base_text.clone();
+        if !visible_text.is_empty() {
+            let insertion_timestamp = clock::Lamport {
+                replica_id: 0,
+                value: 1,
+            };
+            lamport_clock.observe(insertion_timestamp);
+            version.observe(insertion_timestamp);
+            let fragment_id = Locator::between(&Locator::min(), &Locator::max());
+            let fragment = Fragment {
+                id: fragment_id,
+                timestamp: insertion_timestamp,
+                insertion_offset: 0,
+                len: visible_text.len(),
+                visible: true,
+                deletions: Default::default(),
+                max_undos: Default::default(),
+            };
+            insertions.push(InsertionFragment::new(&fragment), &());
+            fragments.push(fragment, &None);
+        }
+
+        Buffer {
+            snapshot: BufferSnapshot {
+                replica_id,
+                remote_id,
+                visible_text,
+                deleted_text: Rope::new(),
+                line_ending,
+                fragments,
+                insertions,
+                version,
+                undo_map: Default::default(),
+            },
+            history,
+            deferred_ops: OperationQueue::new(),
+            deferred_replicas: HashSet::default(),
+            lamport_clock,
+            subscriptions: Default::default(),
+            edit_id_resolvers: Default::default(),
+            wait_for_version_txs: Default::default(),
+        }
+    }
+
+    pub fn version(&self) -> clock::Global {
+        self.version.clone()
+    }
+
+    pub fn snapshot(&self) -> BufferSnapshot {
+        self.snapshot.clone()
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        self.lamport_clock.replica_id
+    }
+
+    pub fn remote_id(&self) -> u64 {
+        self.remote_id
+    }
+
+    pub fn deferred_ops_len(&self) -> usize {
+        self.deferred_ops.len()
+    }
+
+    pub fn transaction_group_interval(&self) -> Duration {
+        self.history.group_interval
+    }
+
+    pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
+    where
+        R: IntoIterator<IntoIter = I>,
+        I: ExactSizeIterator<Item = (Range<S>, T)>,
+        S: ToOffset,
+        T: Into<Arc<str>>,
+    {
+        let edits = edits
+            .into_iter()
+            .map(|(range, new_text)| (range, new_text.into()));
+
+        self.start_transaction();
+        let timestamp = self.lamport_clock.tick();
+        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
+
+        self.history.push(operation.clone());
+        self.history.push_undo(operation.timestamp());
+        self.snapshot.version.observe(operation.timestamp());
+        self.end_transaction();
+        operation
+    }
+
+    fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
+        &mut self,
+        edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
+        timestamp: clock::Lamport,
+    ) -> EditOperation {
+        let mut edits_patch = Patch::default();
+        let mut edit_op = EditOperation {
+            timestamp,
+            version: self.version(),
+            ranges: Vec::with_capacity(edits.len()),
+            new_text: Vec::with_capacity(edits.len()),
+        };
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
+        let mut insertion_slices = Vec::new();
+
+        let mut edits = edits
+            .map(|(range, new_text)| (range.to_offset(&*self), new_text))
+            .peekable();
+
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+        let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
+        let mut new_fragments =
+            old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
+        new_ropes.append(new_fragments.summary().text);
+
+        let mut fragment_start = old_fragments.start().visible;
+        for (range, new_text) in edits {
+            let new_text = LineEnding::normalize_arc(new_text.into());
+            let fragment_end = old_fragments.end(&None).visible;
+
+            // If the current fragment ends before this range, then jump ahead to the first fragment
+            // that extends past the start of this range, reusing any intervening fragments.
+            if fragment_end < range.start {
+                // If the current fragment has been partially consumed, then consume the rest of it
+                // and advance to the next fragment before slicing.
+                if fragment_start > old_fragments.start().visible {
+                    if fragment_end > fragment_start {
+                        let mut suffix = old_fragments.item().unwrap().clone();
+                        suffix.len = fragment_end - fragment_start;
+                        suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
+                        new_ropes.push_fragment(&suffix, suffix.visible);
+                        new_fragments.push(suffix, &None);
+                    }
+                    old_fragments.next(&None);
+                }
+
+                let slice = old_fragments.slice(&range.start, Bias::Right, &None);
+                new_ropes.append(slice.summary().text);
+                new_fragments.append(slice, &None);
+                fragment_start = old_fragments.start().visible;
+            }
+
+            let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
+
+            // Preserve any portion of the current fragment that precedes this range.
+            if fragment_start < range.start {
+                let mut prefix = old_fragments.item().unwrap().clone();
+                prefix.len = range.start - fragment_start;
+                prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
+                new_ropes.push_fragment(&prefix, prefix.visible);
+                new_fragments.push(prefix, &None);
+                fragment_start = range.start;
+            }
+
+            // Insert the new text before any existing fragments within the range.
+            if !new_text.is_empty() {
+                let new_start = new_fragments.summary().text.visible;
+
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                edits_patch.push(Edit {
+                    old: fragment_start..fragment_start,
+                    new: new_start..new_start + new_text.len(),
+                });
+                insertion_slices.push(fragment.insertion_slice());
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_str(new_text.as_ref());
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
+            }
+
+            // Advance through every fragment that intersects this range, marking the intersecting
+            // portions as deleted.
+            while fragment_start < range.end {
+                let fragment = old_fragments.item().unwrap();
+                let fragment_end = old_fragments.end(&None).visible;
+                let mut intersection = fragment.clone();
+                let intersection_end = cmp::min(range.end, fragment_end);
+                if fragment.visible {
+                    intersection.len = intersection_end - fragment_start;
+                    intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
+                    intersection.deletions.insert(timestamp);
+                    intersection.visible = false;
+                }
+                if intersection.len > 0 {
+                    if fragment.visible && !intersection.visible {
+                        let new_start = new_fragments.summary().text.visible;
+                        edits_patch.push(Edit {
+                            old: fragment_start..intersection_end,
+                            new: new_start..new_start,
+                        });
+                        insertion_slices.push(intersection.insertion_slice());
+                    }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
+                    new_ropes.push_fragment(&intersection, fragment.visible);
+                    new_fragments.push(intersection, &None);
+                    fragment_start = intersection_end;
+                }
+                if fragment_end <= range.end {
+                    old_fragments.next(&None);
+                }
+            }
+
+            let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
+            edit_op.ranges.push(full_range_start..full_range_end);
+            edit_op.new_text.push(new_text);
+        }
+
+        // If the current fragment has been partially consumed, then consume the rest of it
+        // and advance to the next fragment before slicing.
+        if fragment_start > old_fragments.start().visible {
+            let fragment_end = old_fragments.end(&None).visible;
+            if fragment_end > fragment_start {
+                let mut suffix = old_fragments.item().unwrap().clone();
+                suffix.len = fragment_end - fragment_start;
+                suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
+                new_ropes.push_fragment(&suffix, suffix.visible);
+                new_fragments.push(suffix, &None);
+            }
+            old_fragments.next(&None);
+        }
+
+        let suffix = old_fragments.suffix(&None);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        drop(old_fragments);
+
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.insertions.edit(new_insertions, &());
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.subscriptions.publish_mut(&edits_patch);
+        self.history
+            .insertion_slices
+            .insert(timestamp, insertion_slices);
+        edit_op
+    }
+
+    pub fn set_line_ending(&mut self, line_ending: LineEnding) {
+        self.snapshot.line_ending = line_ending;
+    }
+
+    pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> {
+        let mut deferred_ops = Vec::new();
+        for op in ops {
+            self.history.push(op.clone());
+            if self.can_apply_op(&op) {
+                self.apply_op(op)?;
+            } else {
+                self.deferred_replicas.insert(op.replica_id());
+                deferred_ops.push(op);
+            }
+        }
+        self.deferred_ops.insert(deferred_ops);
+        self.flush_deferred_ops()?;
+        Ok(())
+    }
+
+    fn apply_op(&mut self, op: Operation) -> Result<()> {
+        match op {
+            Operation::Edit(edit) => {
+                if !self.version.observed(edit.timestamp) {
+                    self.apply_remote_edit(
+                        &edit.version,
+                        &edit.ranges,
+                        &edit.new_text,
+                        edit.timestamp,
+                    );
+                    self.snapshot.version.observe(edit.timestamp);
+                    self.lamport_clock.observe(edit.timestamp);
+                    self.resolve_edit(edit.timestamp);
+                }
+            }
+            Operation::Undo(undo) => {
+                if !self.version.observed(undo.timestamp) {
+                    self.apply_undo(&undo)?;
+                    self.snapshot.version.observe(undo.timestamp);
+                    self.lamport_clock.observe(undo.timestamp);
+                }
+            }
+        }
+        self.wait_for_version_txs.retain_mut(|(version, tx)| {
+            if self.snapshot.version().observed_all(version) {
+                tx.try_send(()).ok();
+                false
+            } else {
+                true
+            }
+        });
+        Ok(())
+    }
+
+    fn apply_remote_edit(
+        &mut self,
+        version: &clock::Global,
+        ranges: &[Range<FullOffset>],
+        new_text: &[Arc<str>],
+        timestamp: clock::Lamport,
+    ) {
+        if ranges.is_empty() {
+            return;
+        }
+
+        let edits = ranges.iter().zip(new_text.iter());
+        let mut edits_patch = Patch::default();
+        let mut insertion_slices = Vec::new();
+        let cx = Some(version.clone());
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+        let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
+        let mut new_fragments = old_fragments.slice(
+            &VersionedFullOffset::Offset(ranges[0].start),
+            Bias::Left,
+            &cx,
+        );
+        new_ropes.append(new_fragments.summary().text);
+
+        let mut fragment_start = old_fragments.start().0.full_offset();
+        for (range, new_text) in edits {
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+
+            // If the current fragment ends before this range, then jump ahead to the first fragment
+            // that extends past the start of this range, reusing any intervening fragments.
+            if fragment_end < range.start {
+                // If the current fragment has been partially consumed, then consume the rest of it
+                // and advance to the next fragment before slicing.
+                if fragment_start > old_fragments.start().0.full_offset() {
+                    if fragment_end > fragment_start {
+                        let mut suffix = old_fragments.item().unwrap().clone();
+                        suffix.len = fragment_end.0 - fragment_start.0;
+                        suffix.insertion_offset +=
+                            fragment_start - old_fragments.start().0.full_offset();
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
+                        new_ropes.push_fragment(&suffix, suffix.visible);
+                        new_fragments.push(suffix, &None);
+                    }
+                    old_fragments.next(&cx);
+                }
+
+                let slice =
+                    old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
+                new_ropes.append(slice.summary().text);
+                new_fragments.append(slice, &None);
+                fragment_start = old_fragments.start().0.full_offset();
+            }
+
+            // If we are at the end of a non-concurrent fragment, advance to the next one.
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+            if fragment_end == range.start && fragment_end > fragment_start {
+                let mut fragment = old_fragments.item().unwrap().clone();
+                fragment.len = fragment_end.0 - fragment_start.0;
+                fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_fragment(&fragment, fragment.visible);
+                new_fragments.push(fragment, &None);
+                old_fragments.next(&cx);
+                fragment_start = old_fragments.start().0.full_offset();
+            }
+
+            // Skip over insertions that are concurrent to this edit, but have a lower lamport
+            // timestamp.
+            while let Some(fragment) = old_fragments.item() {
+                if fragment_start == range.start && fragment.timestamp > timestamp {
+                    new_ropes.push_fragment(fragment, fragment.visible);
+                    new_fragments.push(fragment.clone(), &None);
+                    old_fragments.next(&cx);
+                    debug_assert_eq!(fragment_start, range.start);
+                } else {
+                    break;
+                }
+            }
+            debug_assert!(fragment_start <= range.start);
+
+            // Preserve any portion of the current fragment that precedes this range.
+            if fragment_start < range.start {
+                let mut prefix = old_fragments.item().unwrap().clone();
+                prefix.len = range.start.0 - fragment_start.0;
+                prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
+                fragment_start = range.start;
+                new_ropes.push_fragment(&prefix, prefix.visible);
+                new_fragments.push(prefix, &None);
+            }
+
+            // Insert the new text before any existing fragments within the range.
+            if !new_text.is_empty() {
+                let mut old_start = old_fragments.start().1;
+                if old_fragments.item().map_or(false, |f| f.visible) {
+                    old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
+                }
+                let new_start = new_fragments.summary().text.visible;
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                edits_patch.push(Edit {
+                    old: old_start..old_start,
+                    new: new_start..new_start + new_text.len(),
+                });
+                insertion_slices.push(fragment.insertion_slice());
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_str(new_text);
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
+            }
+
+            // Advance through every fragment that intersects this range, marking the intersecting
+            // portions as deleted.
+            while fragment_start < range.end {
+                let fragment = old_fragments.item().unwrap();
+                let fragment_end = old_fragments.end(&cx).0.full_offset();
+                let mut intersection = fragment.clone();
+                let intersection_end = cmp::min(range.end, fragment_end);
+                if fragment.was_visible(version, &self.undo_map) {
+                    intersection.len = intersection_end.0 - fragment_start.0;
+                    intersection.insertion_offset +=
+                        fragment_start - old_fragments.start().0.full_offset();
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
+                    intersection.deletions.insert(timestamp);
+                    intersection.visible = false;
+                    insertion_slices.push(intersection.insertion_slice());
+                }
+                if intersection.len > 0 {
+                    if fragment.visible && !intersection.visible {
+                        let old_start = old_fragments.start().1
+                            + (fragment_start.0 - old_fragments.start().0.full_offset().0);
+                        let new_start = new_fragments.summary().text.visible;
+                        edits_patch.push(Edit {
+                            old: old_start..old_start + intersection.len,
+                            new: new_start..new_start,
+                        });
+                    }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
+                    new_ropes.push_fragment(&intersection, fragment.visible);
+                    new_fragments.push(intersection, &None);
+                    fragment_start = intersection_end;
+                }
+                if fragment_end <= range.end {
+                    old_fragments.next(&cx);
+                }
+            }
+        }
+
+        // If the current fragment has been partially consumed, then consume the rest of it
+        // and advance to the next fragment before slicing.
+        if fragment_start > old_fragments.start().0.full_offset() {
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+            if fragment_end > fragment_start {
+                let mut suffix = old_fragments.item().unwrap().clone();
+                suffix.len = fragment_end.0 - fragment_start.0;
+                suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
+                new_ropes.push_fragment(&suffix, suffix.visible);
+                new_fragments.push(suffix, &None);
+            }
+            old_fragments.next(&cx);
+        }
+
+        let suffix = old_fragments.suffix(&cx);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        drop(old_fragments);
+
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.snapshot.insertions.edit(new_insertions, &());
+        self.history
+            .insertion_slices
+            .insert(timestamp, insertion_slices);
+        self.subscriptions.publish_mut(&edits_patch)
+    }
+
+    fn fragment_ids_for_edits<'a>(
+        &'a self,
+        edit_ids: impl Iterator<Item = &'a clock::Lamport>,
+    ) -> Vec<&'a Locator> {
+        // Get all of the insertion slices changed by the given edits.
+        let mut insertion_slices = Vec::new();
+        for edit_id in edit_ids {
+            if let Some(slices) = self.history.insertion_slices.get(edit_id) {
+                insertion_slices.extend_from_slice(slices)
+            }
+        }
+        insertion_slices
+            .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
+
+        // Get all of the fragments corresponding to these insertion slices.
+        let mut fragment_ids = Vec::new();
+        let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+        for insertion_slice in &insertion_slices {
+            if insertion_slice.insertion_id != insertions_cursor.start().timestamp
+                || insertion_slice.range.start > insertions_cursor.start().split_offset
+            {
+                insertions_cursor.seek_forward(
+                    &InsertionFragmentKey {
+                        timestamp: insertion_slice.insertion_id,
+                        split_offset: insertion_slice.range.start,
+                    },
+                    Bias::Left,
+                    &(),
+                );
+            }
+            while let Some(item) = insertions_cursor.item() {
+                if item.timestamp != insertion_slice.insertion_id
+                    || item.split_offset >= insertion_slice.range.end
+                {
+                    break;
+                }
+                fragment_ids.push(&item.fragment_id);
+                insertions_cursor.next(&());
+            }
+        }
+        fragment_ids.sort_unstable();
+        fragment_ids
+    }
+
+    fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> {
+        self.snapshot.undo_map.insert(undo);
+
+        let mut edits = Patch::default();
+        let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let mut new_fragments = SumTree::new();
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+
+        for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
+            let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
+            new_ropes.append(preceding_fragments.summary().text);
+            new_fragments.append(preceding_fragments, &None);
+
+            if let Some(fragment) = old_fragments.item() {
+                let mut fragment = fragment.clone();
+                let fragment_was_visible = fragment.visible;
+
+                fragment.visible = fragment.is_visible(&self.undo_map);
+                fragment.max_undos.observe(undo.timestamp);
+
+                let old_start = old_fragments.start().1;
+                let new_start = new_fragments.summary().text.visible;
+                if fragment_was_visible && !fragment.visible {
+                    edits.push(Edit {
+                        old: old_start..old_start + fragment.len,
+                        new: new_start..new_start,
+                    });
+                } else if !fragment_was_visible && fragment.visible {
+                    edits.push(Edit {
+                        old: old_start..old_start,
+                        new: new_start..new_start + fragment.len,
+                    });
+                }
+                new_ropes.push_fragment(&fragment, fragment_was_visible);
+                new_fragments.push(fragment, &None);
+
+                old_fragments.next(&None);
+            }
+        }
+
+        let suffix = old_fragments.suffix(&None);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+
+        drop(old_fragments);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.subscriptions.publish_mut(&edits);
+        Ok(())
+    }
+
+    fn flush_deferred_ops(&mut self) -> Result<()> {
+        self.deferred_replicas.clear();
+        let mut deferred_ops = Vec::new();
+        for op in self.deferred_ops.drain().iter().cloned() {
+            if self.can_apply_op(&op) {
+                self.apply_op(op)?;
+            } else {
+                self.deferred_replicas.insert(op.replica_id());
+                deferred_ops.push(op);
+            }
+        }
+        self.deferred_ops.insert(deferred_ops);
+        Ok(())
+    }
+
+    fn can_apply_op(&self, op: &Operation) -> bool {
+        if self.deferred_replicas.contains(&op.replica_id()) {
+            false
+        } else {
+            self.version.observed_all(match op {
+                Operation::Edit(edit) => &edit.version,
+                Operation::Undo(undo) => &undo.version,
+            })
+        }
+    }
+
+    pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
+        self.history.undo_stack.last()
+    }
+
+    pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
+        self.history.redo_stack.last()
+    }
+
+    pub fn start_transaction(&mut self) -> Option<TransactionId> {
+        self.start_transaction_at(Instant::now())
+    }
+
+    pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+        self.history
+            .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
+    }
+
+    pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
+        self.end_transaction_at(Instant::now())
+    }
+
+    pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
+        if let Some(entry) = self.history.end_transaction(now) {
+            let since = entry.transaction.start.clone();
+            let id = self.history.group().unwrap();
+            Some((id, since))
+        } else {
+            None
+        }
+    }
+
+    pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.history.finalize_last_transaction()
+    }
+
+    pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
+        self.history.group_until(transaction_id);
+    }
+
+    pub fn base_text(&self) -> &Rope {
+        &self.history.base_text
+    }
+
+    pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
+        &self.history.operations
+    }
+
+    pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
+        if let Some(entry) = self.history.pop_undo() {
+            let transaction = entry.transaction.clone();
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
+        }
+    }
+
+    pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
+        let transaction = self
+            .history
+            .remove_from_undo(transaction_id)?
+            .transaction
+            .clone();
+        self.undo_or_redo(transaction).log_err()
+    }
+
+    #[allow(clippy::needless_collect)]
+    pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_undo_until(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
+    }
+
+    pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
+        self.history.forget(transaction_id);
+    }
+
+    pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+        self.history.merge_transactions(transaction, destination);
+    }
+
+    pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
+        if let Some(entry) = self.history.pop_redo() {
+            let transaction = entry.transaction.clone();
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
+        }
+    }
+
+    #[allow(clippy::needless_collect)]
+    pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_redo(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
+    }
+
+    fn undo_or_redo(&mut self, transaction: Transaction) -> Result<Operation> {
+        let mut counts = HashMap::default();
+        for edit_id in transaction.edit_ids {
+            counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1);
+        }
+
+        let undo = UndoOperation {
+            timestamp: self.lamport_clock.tick(),
+            version: self.version(),
+            counts,
+        };
+        self.apply_undo(&undo)?;
+        self.snapshot.version.observe(undo.timestamp);
+        let operation = Operation::Undo(undo);
+        self.history.push(operation.clone());
+        Ok(operation)
+    }
+
+    pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        self.history.push_transaction(transaction, now);
+        self.history.finalize_last_transaction();
+    }
+
+    pub fn edited_ranges_for_transaction<'a, D>(
+        &'a self,
+        transaction: &'a Transaction,
+    ) -> impl 'a + Iterator<Item = Range<D>>
+    where
+        D: TextDimension,
+    {
+        // get fragment ranges
+        let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let offset_ranges = self
+            .fragment_ids_for_edits(transaction.edit_ids.iter())
+            .into_iter()
+            .filter_map(move |fragment_id| {
+                cursor.seek_forward(&Some(fragment_id), Bias::Left, &None);
+                let fragment = cursor.item()?;
+                let start_offset = cursor.start().1;
+                let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
+                Some(start_offset..end_offset)
+            });
+
+        // combine adjacent ranges
+        let mut prev_range: Option<Range<usize>> = None;
+        let disjoint_ranges = offset_ranges
+            .map(Some)
+            .chain([None])
+            .filter_map(move |range| {
+                if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut()) {
+                    if prev_range.end == range.start {
+                        prev_range.end = range.end;
+                        return None;
+                    }
+                }
+                let result = prev_range.clone();
+                prev_range = range;
+                result
+            });
+
+        // convert to the desired text dimension.
+        let mut position = D::default();
+        let mut rope_cursor = self.visible_text.cursor(0);
+        disjoint_ranges.map(move |range| {
+            position.add_assign(&rope_cursor.summary(range.start));
+            let start = position.clone();
+            position.add_assign(&rope_cursor.summary(range.end));
+            let end = position.clone();
+            start..end
+        })
+    }
+
+    pub fn subscribe(&mut self) -> Subscription {
+        self.subscriptions.subscribe()
+    }
+
+    pub fn wait_for_edits(
+        &mut self,
+        edit_ids: impl IntoIterator<Item = clock::Lamport>,
+    ) -> impl 'static + Future<Output = Result<()>> {
+        let mut futures = Vec::new();
+        for edit_id in edit_ids {
+            if !self.version.observed(edit_id) {
+                let (tx, rx) = oneshot::channel();
+                self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
+                futures.push(rx);
+            }
+        }
+
+        async move {
+            for mut future in futures {
+                if future.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for edits"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn wait_for_anchors(
+        &mut self,
+        anchors: impl IntoIterator<Item = Anchor>,
+    ) -> impl 'static + Future<Output = Result<()>> {
+        let mut futures = Vec::new();
+        for anchor in anchors {
+            if !self.version.observed(anchor.timestamp)
+                && anchor != Anchor::MAX
+                && anchor != Anchor::MIN
+            {
+                let (tx, rx) = oneshot::channel();
+                self.edit_id_resolvers
+                    .entry(anchor.timestamp)
+                    .or_default()
+                    .push(tx);
+                futures.push(rx);
+            }
+        }
+
+        async move {
+            for mut future in futures {
+                if future.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for anchors"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
+        let mut rx = None;
+        if !self.snapshot.version.observed_all(&version) {
+            let channel = oneshot::channel();
+            self.wait_for_version_txs.push((version, channel.0));
+            rx = Some(channel.1);
+        }
+        async move {
+            if let Some(mut rx) = rx {
+                if rx.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for version"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn give_up_waiting(&mut self) {
+        self.edit_id_resolvers.clear();
+        self.wait_for_version_txs.clear();
+    }
+
+    fn resolve_edit(&mut self, edit_id: clock::Lamport) {
+        for mut tx in self
+            .edit_id_resolvers
+            .remove(&edit_id)
+            .into_iter()
+            .flatten()
+        {
+            tx.try_send(()).ok();
+        }
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Buffer {
+    pub fn edit_via_marked_text(&mut self, marked_string: &str) {
+        let edits = self.edits_for_marked_text(marked_string);
+        self.edit(edits);
+    }
+
+    pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
+        let old_text = self.text();
+        let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
+        if ranges.is_empty() {
+            ranges.push(0..new_text.len());
+        }
+
+        assert_eq!(
+            old_text[..ranges[0].start],
+            new_text[..ranges[0].start],
+            "invalid edit"
+        );
+
+        let mut delta = 0;
+        let mut edits = Vec::new();
+        let mut ranges = ranges.into_iter().peekable();
+
+        while let Some(inserted_range) = ranges.next() {
+            let new_start = inserted_range.start;
+            let old_start = (new_start as isize - delta) as usize;
+
+            let following_text = if let Some(next_range) = ranges.peek() {
+                &new_text[inserted_range.end..next_range.start]
+            } else {
+                &new_text[inserted_range.end..]
+            };
+
+            let inserted_len = inserted_range.len();
+            let deleted_len = old_text[old_start..]
+                .find(following_text)
+                .expect("invalid edit");
+
+            let old_range = old_start..old_start + deleted_len;
+            edits.push((old_range, new_text[inserted_range].to_string()));
+            delta += inserted_len as isize - deleted_len as isize;
+        }
+
+        assert_eq!(
+            old_text.len() as isize + delta,
+            new_text.len() as isize,
+            "invalid edit"
+        );
+
+        edits
+    }
+
+    pub fn check_invariants(&self) {
+        // Ensure every fragment is ordered by locator in the fragment tree and corresponds
+        // to an insertion fragment in the insertions tree.
+        let mut prev_fragment_id = Locator::min();
+        for fragment in self.snapshot.fragments.items(&None) {
+            assert!(fragment.id > prev_fragment_id);
+            prev_fragment_id = fragment.id.clone();
+
+            let insertion_fragment = self
+                .snapshot
+                .insertions
+                .get(
+                    &InsertionFragmentKey {
+                        timestamp: fragment.timestamp,
+                        split_offset: fragment.insertion_offset,
+                    },
+                    &(),
+                )
+                .unwrap();
+            assert_eq!(
+                insertion_fragment.fragment_id, fragment.id,
+                "fragment: {:?}\ninsertion: {:?}",
+                fragment, insertion_fragment
+            );
+        }
+
+        let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
+        for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
+            cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+            let fragment = cursor.item().unwrap();
+            assert_eq!(insertion_fragment.fragment_id, fragment.id);
+            assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
+        }
+
+        let fragment_summary = self.snapshot.fragments.summary();
+        assert_eq!(
+            fragment_summary.text.visible,
+            self.snapshot.visible_text.len()
+        );
+        assert_eq!(
+            fragment_summary.text.deleted,
+            self.snapshot.deleted_text.len()
+        );
+
+        assert!(!self.text().contains("\r\n"));
+    }
+
+    pub fn set_group_interval(&mut self, group_interval: Duration) {
+        self.history.group_interval = group_interval;
+    }
+
+    pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
+        let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
+        let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+        start..end
+    }
+
+    pub fn get_random_edits<T>(
+        &self,
+        rng: &mut T,
+        edit_count: usize,
+    ) -> Vec<(Range<usize>, Arc<str>)>
+    where
+        T: rand::Rng,
+    {
+        let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
+        let mut last_end = None;
+        for _ in 0..edit_count {
+            if last_end.map_or(false, |last_end| last_end >= self.len()) {
+                break;
+            }
+            let new_start = last_end.map_or(0, |last_end| last_end + 1);
+            let range = self.random_byte_range(new_start, rng);
+            last_end = Some(range.end);
+
+            let new_text_len = rng.gen_range(0..10);
+            let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+
+            edits.push((range, new_text.into()));
+        }
+        edits
+    }
+
+    #[allow(clippy::type_complexity)]
+    pub fn randomly_edit<T>(
+        &mut self,
+        rng: &mut T,
+        edit_count: usize,
+    ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
+    where
+        T: rand::Rng,
+    {
+        let mut edits = self.get_random_edits(rng, edit_count);
+        log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
+
+        let op = self.edit(edits.iter().cloned());
+        if let Operation::Edit(edit) = &op {
+            assert_eq!(edits.len(), edit.new_text.len());
+            for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
+                edit.1 = new_text.clone();
+            }
+        } else {
+            unreachable!()
+        }
+
+        (edits, op)
+    }
+
+    pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
+        use rand::prelude::*;
+
+        let mut ops = Vec::new();
+        for _ in 0..rng.gen_range(1..=5) {
+            if let Some(entry) = self.history.undo_stack.choose(rng) {
+                let transaction = entry.transaction.clone();
+                log::info!(
+                    "undoing buffer {} transaction {:?}",
+                    self.replica_id,
+                    transaction
+                );
+                ops.push(self.undo_or_redo(transaction).unwrap());
+            }
+        }
+        ops
+    }
+}
+
+impl Deref for Buffer {
+    type Target = BufferSnapshot;
+
+    fn deref(&self) -> &Self::Target {
+        &self.snapshot
+    }
+}
+
+impl BufferSnapshot {
+    pub fn as_rope(&self) -> &Rope {
+        &self.visible_text
+    }
+
+    pub fn remote_id(&self) -> u64 {
+        self.remote_id
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        self.replica_id
+    }
+
+    pub fn row_count(&self) -> u32 {
+        self.max_point().row + 1
+    }
+
+    pub fn len(&self) -> usize {
+        self.visible_text.len()
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
+        self.chars_at(0)
+    }
+
+    pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
+        self.text_for_range(range).flat_map(str::chars)
+    }
+
+    pub fn reversed_chars_for_range<T: ToOffset>(
+        &self,
+        range: Range<T>,
+    ) -> impl Iterator<Item = char> + '_ {
+        self.reversed_chunks_in_range(range)
+            .flat_map(|chunk| chunk.chars().rev())
+    }
+
+    pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
+    where
+        T: ToOffset,
+    {
+        let position = position.to_offset(self);
+        position == self.clip_offset(position, Bias::Left)
+            && self
+                .bytes_in_range(position..self.len())
+                .flatten()
+                .copied()
+                .take(needle.len())
+                .eq(needle.bytes())
+    }
+
+    pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
+    where
+        T: ToOffset + TextDimension,
+    {
+        let offset = position.to_offset(self);
+        let common_prefix_len = needle
+            .char_indices()
+            .map(|(index, _)| index)
+            .chain([needle.len()])
+            .take_while(|&len| len <= offset)
+            .filter(|&len| {
+                let left = self
+                    .chars_for_range(offset - len..offset)
+                    .flat_map(char::to_lowercase);
+                let right = needle[..len].chars().flat_map(char::to_lowercase);
+                left.eq(right)
+            })
+            .last()
+            .unwrap_or(0);
+        let start_offset = offset - common_prefix_len;
+        let start = self.text_summary_for_range(0..start_offset);
+        start..position
+    }
+
+    pub fn text(&self) -> String {
+        self.visible_text.to_string()
+    }
+
+    pub fn line_ending(&self) -> LineEnding {
+        self.line_ending
+    }
+
+    pub fn deleted_text(&self) -> String {
+        self.deleted_text.to_string()
+    }
+
+    pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
+        self.fragments.iter()
+    }
+
+    pub fn text_summary(&self) -> TextSummary {
+        self.visible_text.summary()
+    }
+
+    pub fn max_point(&self) -> Point {
+        self.visible_text.max_point()
+    }
+
+    pub fn max_point_utf16(&self) -> PointUtf16 {
+        self.visible_text.max_point_utf16()
+    }
+
+    pub fn point_to_offset(&self, point: Point) -> usize {
+        self.visible_text.point_to_offset(point)
+    }
+
+    pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+        self.visible_text.point_utf16_to_offset(point)
+    }
+
+    pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
+        self.visible_text.unclipped_point_utf16_to_offset(point)
+    }
+
+    pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
+        self.visible_text.unclipped_point_utf16_to_point(point)
+    }
+
+    pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
+        self.visible_text.offset_utf16_to_offset(offset)
+    }
+
+    pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
+        self.visible_text.offset_to_offset_utf16(offset)
+    }
+
+    pub fn offset_to_point(&self, offset: usize) -> Point {
+        self.visible_text.offset_to_point(offset)
+    }
+
+    pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+        self.visible_text.offset_to_point_utf16(offset)
+    }
+
+    pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+        self.visible_text.point_to_point_utf16(point)
+    }
+
+    pub fn version(&self) -> &clock::Global {
+        &self.version
+    }
+
+    pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+        let offset = position.to_offset(self);
+        self.visible_text.chars_at(offset)
+    }
+
+    pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+        let offset = position.to_offset(self);
+        self.visible_text.reversed_chars_at(offset)
+    }
+
+    pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+        self.visible_text.reversed_chunks_in_range(range)
+    }
+
+    pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.bytes_in_range(start..end)
+    }
+
+    pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.reversed_bytes_in_range(start..end)
+    }
+
+    pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.chunks_in_range(start..end)
+    }
+
+    pub fn line_len(&self, row: u32) -> u32 {
+        let row_start_offset = Point::new(row, 0).to_offset(self);
+        let row_end_offset = if row >= self.max_point().row {
+            self.len()
+        } else {
+            Point::new(row + 1, 0).to_offset(self) - 1
+        };
+        (row_end_offset - row_start_offset) as u32
+    }
+
+    pub fn is_line_blank(&self, row: u32) -> bool {
+        self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
+            .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
+    }
+
+    pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
+    where
+        D: TextDimension,
+    {
+        self.visible_text
+            .cursor(range.start.to_offset(self))
+            .summary(range.end.to_offset(self))
+    }
+
+    pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
+    where
+        D: 'a + TextDimension,
+        A: 'a + IntoIterator<Item = &'a Anchor>,
+    {
+        let anchors = anchors.into_iter();
+        self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
+            .map(|d| d.0)
+    }
+
+    pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
+        &'a self,
+        anchors: A,
+    ) -> impl 'a + Iterator<Item = (D, T)>
+    where
+        D: 'a + TextDimension,
+        A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
+    {
+        let anchors = anchors.into_iter();
+        let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+        let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let mut text_cursor = self.visible_text.cursor(0);
+        let mut position = D::default();
+
+        anchors.map(move |(anchor, payload)| {
+            if *anchor == Anchor::MIN {
+                return (D::default(), payload);
+            } else if *anchor == Anchor::MAX {
+                return (D::from_text_summary(&self.visible_text.summary()), payload);
+            }
+
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+            fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let mut fragment_offset = fragment_cursor.start().1;
+            if fragment.visible {
+                fragment_offset += anchor.offset - insertion.split_offset;
+            }
+
+            position.add_assign(&text_cursor.summary(fragment_offset));
+            (position.clone(), payload)
+        })
+    }
+
+    fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
+    where
+        D: TextDimension,
+    {
+        if *anchor == Anchor::MIN {
+            D::default()
+        } else if *anchor == Anchor::MAX {
+            D::from_text_summary(&self.visible_text.summary())
+        } else {
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+            let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+            fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let mut fragment_offset = fragment_cursor.start().1;
+            if fragment.visible {
+                fragment_offset += anchor.offset - insertion.split_offset;
+            }
+            self.text_summary_for_range(0..fragment_offset)
+        }
+    }
+
+    fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
+        if *anchor == Anchor::MIN {
+            Locator::min_ref()
+        } else if *anchor == Anchor::MAX {
+            Locator::max_ref()
+        } else {
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+            &insertion.fragment_id
+        }
+    }
+
+    pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Left)
+    }
+
+    pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Right)
+    }
+
+    pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
+        self.anchor_at_offset(position.to_offset(self), bias)
+    }
+
+    fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
+        if bias == Bias::Left && offset == 0 {
+            Anchor::MIN
+        } else if bias == Bias::Right && offset == self.len() {
+            Anchor::MAX
+        } else {
+            let mut fragment_cursor = self.fragments.cursor::<usize>();
+            fragment_cursor.seek(&offset, bias, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let overshoot = offset - *fragment_cursor.start();
+            Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset + overshoot,
+                bias,
+                buffer_id: Some(self.remote_id),
+            }
+        }
+    }
+
+    pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+        *anchor == Anchor::MIN
+            || *anchor == Anchor::MAX
+            || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
+    }
+
+    pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
+        self.visible_text.clip_offset(offset, bias)
+    }
+
+    pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+        self.visible_text.clip_point(point, bias)
+    }
+
+    pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
+        self.visible_text.clip_offset_utf16(offset, bias)
+    }
+
+    pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
+        self.visible_text.clip_point_utf16(point, bias)
+    }
+
+    pub fn edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: TextDimension + Ord,
+    {
+        self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+    }
+
+    pub fn anchored_edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+    where
+        D: TextDimension + Ord,
+    {
+        self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+    }
+
+    pub fn edits_since_in_range<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+        range: Range<Anchor>,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: TextDimension + Ord,
+    {
+        self.anchored_edits_since_in_range(since, range)
+            .map(|item| item.0)
+    }
+
+    pub fn anchored_edits_since_in_range<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+        range: Range<Anchor>,
+    ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+    where
+        D: TextDimension + Ord,
+    {
+        let fragments_cursor = if *since == self.version {
+            None
+        } else {
+            let mut cursor = self
+                .fragments
+                .filter(move |summary| !since.observed_all(&summary.max_version));
+            cursor.next(&None);
+            Some(cursor)
+        };
+        let mut cursor = self
+            .fragments
+            .cursor::<(Option<&Locator>, FragmentTextSummary)>();
+
+        let start_fragment_id = self.fragment_id_for_anchor(&range.start);
+        cursor.seek(&Some(start_fragment_id), Bias::Left, &None);
+        let mut visible_start = cursor.start().1.visible;
+        let mut deleted_start = cursor.start().1.deleted;
+        if let Some(fragment) = cursor.item() {
+            let overshoot = range.start.offset - fragment.insertion_offset;
+            if fragment.visible {
+                visible_start += overshoot;
+            } else {
+                deleted_start += overshoot;
+            }
+        }
+        let end_fragment_id = self.fragment_id_for_anchor(&range.end);
+
+        Edits {
+            visible_cursor: self.visible_text.cursor(visible_start),
+            deleted_cursor: self.deleted_text.cursor(deleted_start),
+            fragments_cursor,
+            undos: &self.undo_map,
+            since,
+            old_end: Default::default(),
+            new_end: Default::default(),
+            range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
+            buffer_id: self.remote_id,
+        }
+    }
+}
+
+struct RopeBuilder<'a> {
+    old_visible_cursor: rope::Cursor<'a>,
+    old_deleted_cursor: rope::Cursor<'a>,
+    new_visible: Rope,
+    new_deleted: Rope,
+}
+
+impl<'a> RopeBuilder<'a> {
+    fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
+        Self {
+            old_visible_cursor,
+            old_deleted_cursor,
+            new_visible: Rope::new(),
+            new_deleted: Rope::new(),
+        }
+    }
+
+    fn append(&mut self, len: FragmentTextSummary) {
+        self.push(len.visible, true, true);
+        self.push(len.deleted, false, false);
+    }
+
+    fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
+        debug_assert!(fragment.len > 0);
+        self.push(fragment.len, was_visible, fragment.visible)
+    }
+
+    fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
+        let text = if was_visible {
+            self.old_visible_cursor
+                .slice(self.old_visible_cursor.offset() + len)
+        } else {
+            self.old_deleted_cursor
+                .slice(self.old_deleted_cursor.offset() + len)
+        };
+        if is_visible {
+            self.new_visible.append(text);
+        } else {
+            self.new_deleted.append(text);
+        }
+    }
+
+    fn push_str(&mut self, text: &str) {
+        self.new_visible.push(text);
+    }
+
+    fn finish(mut self) -> (Rope, Rope) {
+        self.new_visible.append(self.old_visible_cursor.suffix());
+        self.new_deleted.append(self.old_deleted_cursor.suffix());
+        (self.new_visible, self.new_deleted)
+    }
+}
+
+impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> {
+    type Item = (Edit<D>, Range<Anchor>);
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let mut pending_edit: Option<Self::Item> = None;
+        let cursor = self.fragments_cursor.as_mut()?;
+
+        while let Some(fragment) = cursor.item() {
+            if fragment.id < *self.range.start.0 {
+                cursor.next(&None);
+                continue;
+            } else if fragment.id > *self.range.end.0 {
+                break;
+            }
+
+            if cursor.start().visible > self.visible_cursor.offset() {
+                let summary = self.visible_cursor.summary(cursor.start().visible);
+                self.old_end.add_assign(&summary);
+                self.new_end.add_assign(&summary);
+            }
+
+            if pending_edit
+                .as_ref()
+                .map_or(false, |(change, _)| change.new.end < self.new_end)
+            {
+                break;
+            }
+
+            let start_anchor = Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset,
+                bias: Bias::Right,
+                buffer_id: Some(self.buffer_id),
+            };
+            let end_anchor = Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset + fragment.len,
+                bias: Bias::Left,
+                buffer_id: Some(self.buffer_id),
+            };
+
+            if !fragment.was_visible(self.since, self.undos) && fragment.visible {
+                let mut visible_end = cursor.end(&None).visible;
+                if fragment.id == *self.range.end.0 {
+                    visible_end = cmp::min(
+                        visible_end,
+                        cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
+                    );
+                }
+
+                let fragment_summary = self.visible_cursor.summary(visible_end);
+                let mut new_end = self.new_end.clone();
+                new_end.add_assign(&fragment_summary);
+                if let Some((edit, range)) = pending_edit.as_mut() {
+                    edit.new.end = new_end.clone();
+                    range.end = end_anchor;
+                } else {
+                    pending_edit = Some((
+                        Edit {
+                            old: self.old_end.clone()..self.old_end.clone(),
+                            new: self.new_end.clone()..new_end.clone(),
+                        },
+                        start_anchor..end_anchor,
+                    ));
+                }
+
+                self.new_end = new_end;
+            } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
+                let mut deleted_end = cursor.end(&None).deleted;
+                if fragment.id == *self.range.end.0 {
+                    deleted_end = cmp::min(
+                        deleted_end,
+                        cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
+                    );
+                }
+
+                if cursor.start().deleted > self.deleted_cursor.offset() {
+                    self.deleted_cursor.seek_forward(cursor.start().deleted);
+                }
+                let fragment_summary = self.deleted_cursor.summary(deleted_end);
+                let mut old_end = self.old_end.clone();
+                old_end.add_assign(&fragment_summary);
+                if let Some((edit, range)) = pending_edit.as_mut() {
+                    edit.old.end = old_end.clone();
+                    range.end = end_anchor;
+                } else {
+                    pending_edit = Some((
+                        Edit {
+                            old: self.old_end.clone()..old_end.clone(),
+                            new: self.new_end.clone()..self.new_end.clone(),
+                        },
+                        start_anchor..end_anchor,
+                    ));
+                }
+
+                self.old_end = old_end;
+            }
+
+            cursor.next(&None);
+        }
+
+        pending_edit
+    }
+}
+
+impl Fragment {
+    fn insertion_slice(&self) -> InsertionSlice {
+        InsertionSlice {
+            insertion_id: self.timestamp,
+            range: self.insertion_offset..self.insertion_offset + self.len,
+        }
+    }
+
+    fn is_visible(&self, undos: &UndoMap) -> bool {
+        !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
+    }
+
+    fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
+        (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
+            && self
+                .deletions
+                .iter()
+                .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
+    }
+}
+
+impl sum_tree::Item for Fragment {
+    type Summary = FragmentSummary;
+
+    fn summary(&self) -> Self::Summary {
+        let mut max_version = clock::Global::new();
+        max_version.observe(self.timestamp);
+        for deletion in &self.deletions {
+            max_version.observe(*deletion);
+        }
+        max_version.join(&self.max_undos);
+
+        let mut min_insertion_version = clock::Global::new();
+        min_insertion_version.observe(self.timestamp);
+        let max_insertion_version = min_insertion_version.clone();
+        if self.visible {
+            FragmentSummary {
+                max_id: self.id.clone(),
+                text: FragmentTextSummary {
+                    visible: self.len,
+                    deleted: 0,
+                },
+                max_version,
+                min_insertion_version,
+                max_insertion_version,
+            }
+        } else {
+            FragmentSummary {
+                max_id: self.id.clone(),
+                text: FragmentTextSummary {
+                    visible: 0,
+                    deleted: self.len,
+                },
+                max_version,
+                min_insertion_version,
+                max_insertion_version,
+            }
+        }
+    }
+}
+
+impl sum_tree::Summary for FragmentSummary {
+    type Context = Option<clock::Global>;
+
+    fn add_summary(&mut self, other: &Self, _: &Self::Context) {
+        self.max_id.assign(&other.max_id);
+        self.text.visible += &other.text.visible;
+        self.text.deleted += &other.text.deleted;
+        self.max_version.join(&other.max_version);
+        self.min_insertion_version
+            .meet(&other.min_insertion_version);
+        self.max_insertion_version
+            .join(&other.max_insertion_version);
+    }
+}
+
+impl Default for FragmentSummary {
+    fn default() -> Self {
+        FragmentSummary {
+            max_id: Locator::min(),
+            text: FragmentTextSummary::default(),
+            max_version: clock::Global::new(),
+            min_insertion_version: clock::Global::new(),
+            max_insertion_version: clock::Global::new(),
+        }
+    }
+}
+
+impl sum_tree::Item for InsertionFragment {
+    type Summary = InsertionFragmentKey;
+
+    fn summary(&self) -> Self::Summary {
+        InsertionFragmentKey {
+            timestamp: self.timestamp,
+            split_offset: self.split_offset,
+        }
+    }
+}
+
+impl sum_tree::KeyedItem for InsertionFragment {
+    type Key = InsertionFragmentKey;
+
+    fn key(&self) -> Self::Key {
+        sum_tree::Item::summary(self)
+    }
+}
+
+impl InsertionFragment {
+    fn new(fragment: &Fragment) -> Self {
+        Self {
+            timestamp: fragment.timestamp,
+            split_offset: fragment.insertion_offset,
+            fragment_id: fragment.id.clone(),
+        }
+    }
+
+    fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
+        sum_tree::Edit::Insert(Self::new(fragment))
+    }
+}
+
+impl sum_tree::Summary for InsertionFragmentKey {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        *self = *summary;
+    }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct FullOffset(pub usize);
+
+impl ops::AddAssign<usize> for FullOffset {
+    fn add_assign(&mut self, rhs: usize) {
+        self.0 += rhs;
+    }
+}
+
+impl ops::Add<usize> for FullOffset {
+    type Output = Self;
+
+    fn add(mut self, rhs: usize) -> Self::Output {
+        self += rhs;
+        self
+    }
+}
+
+impl ops::Sub for FullOffset {
+    type Output = usize;
+
+    fn sub(self, rhs: Self) -> Self::Output {
+        self.0 - rhs.0
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize {
+    fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+        *self += summary.text.visible;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset {
+    fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+        self.0 += summary.text.visible + summary.text.deleted;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+        *self = Some(&summary.max_id);
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize {
+    fn cmp(
+        &self,
+        cursor_location: &FragmentTextSummary,
+        _: &Option<clock::Global>,
+    ) -> cmp::Ordering {
+        Ord::cmp(self, &cursor_location.visible)
+    }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum VersionedFullOffset {
+    Offset(FullOffset),
+    Invalid,
+}
+
+impl VersionedFullOffset {
+    fn full_offset(&self) -> FullOffset {
+        if let Self::Offset(position) = self {
+            *position
+        } else {
+            panic!("invalid version")
+        }
+    }
+}
+
+impl Default for VersionedFullOffset {
+    fn default() -> Self {
+        Self::Offset(Default::default())
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
+        if let Self::Offset(offset) = self {
+            let version = cx.as_ref().unwrap();
+            if version.observed_all(&summary.max_insertion_version) {
+                *offset += summary.text.visible + summary.text.deleted;
+            } else if version.observed_any(&summary.min_insertion_version) {
+                *self = Self::Invalid;
+            }
+        }
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset {
+    fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
+        match (self, cursor_position) {
+            (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
+            (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
+            (Self::Invalid, _) => unreachable!(),
+        }
+    }
+}
+
+impl Operation {
+    fn replica_id(&self) -> ReplicaId {
+        operation_queue::Operation::lamport_timestamp(self).replica_id
+    }
+
+    pub fn timestamp(&self) -> clock::Lamport {
+        match self {
+            Operation::Edit(edit) => edit.timestamp,
+            Operation::Undo(undo) => undo.timestamp,
+        }
+    }
+
+    pub fn as_edit(&self) -> Option<&EditOperation> {
+        match self {
+            Operation::Edit(edit) => Some(edit),
+            _ => None,
+        }
+    }
+
+    pub fn is_edit(&self) -> bool {
+        matches!(self, Operation::Edit { .. })
+    }
+}
+
+impl operation_queue::Operation for Operation {
+    fn lamport_timestamp(&self) -> clock::Lamport {
+        match self {
+            Operation::Edit(edit) => edit.timestamp,
+            Operation::Undo(undo) => undo.timestamp,
+        }
+    }
+}
+
+pub trait ToOffset {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
+}
+
+impl ToOffset for Point {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.point_to_offset(*self)
+    }
+}
+
+impl ToOffset for usize {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        assert!(
+            *self <= snapshot.len(),
+            "offset {} is out of range, max allowed is {}",
+            self,
+            snapshot.len()
+        );
+        *self
+    }
+}
+
+impl ToOffset for Anchor {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl<'a, T: ToOffset> ToOffset for &'a T {
+    fn to_offset(&self, content: &BufferSnapshot) -> usize {
+        (*self).to_offset(content)
+    }
+}
+
+impl ToOffset for PointUtf16 {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.point_utf16_to_offset(*self)
+    }
+}
+
+impl ToOffset for Unclipped<PointUtf16> {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.unclipped_point_utf16_to_offset(*self)
+    }
+}
+
+pub trait ToPoint {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
+}
+
+impl ToPoint for Anchor {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToPoint for usize {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.offset_to_point(*self)
+    }
+}
+
+impl ToPoint for Point {
+    fn to_point(&self, _: &BufferSnapshot) -> Point {
+        *self
+    }
+}
+
+impl ToPoint for Unclipped<PointUtf16> {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.unclipped_point_utf16_to_point(*self)
+    }
+}
+
+pub trait ToPointUtf16 {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
+}
+
+impl ToPointUtf16 for Anchor {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToPointUtf16 for usize {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.offset_to_point_utf16(*self)
+    }
+}
+
+impl ToPointUtf16 for PointUtf16 {
+    fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
+        *self
+    }
+}
+
+impl ToPointUtf16 for Point {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.point_to_point_utf16(*self)
+    }
+}
+
+pub trait ToOffsetUtf16 {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
+}
+
+impl ToOffsetUtf16 for Anchor {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToOffsetUtf16 for usize {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        snapshot.offset_to_offset_utf16(*self)
+    }
+}
+
+impl ToOffsetUtf16 for OffsetUtf16 {
+    fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        *self
+    }
+}
+
+pub trait FromAnchor {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
+}
+
+impl FromAnchor for Point {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+impl FromAnchor for PointUtf16 {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+impl FromAnchor for usize {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum LineEnding {
+    Unix,
+    Windows,
+}
+
+impl Default for LineEnding {
+    fn default() -> Self {
+        #[cfg(unix)]
+        return Self::Unix;
+
+        #[cfg(not(unix))]
+        return Self::CRLF;
+    }
+}
+
+impl LineEnding {
+    pub fn as_str(&self) -> &'static str {
+        match self {
+            LineEnding::Unix => "\n",
+            LineEnding::Windows => "\r\n",
+        }
+    }
+
+    pub fn detect(text: &str) -> Self {
+        let mut max_ix = cmp::min(text.len(), 1000);
+        while !text.is_char_boundary(max_ix) {
+            max_ix -= 1;
+        }
+
+        if let Some(ix) = text[..max_ix].find(&['\n']) {
+            if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
+                Self::Windows
+            } else {
+                Self::Unix
+            }
+        } else {
+            Self::default()
+        }
+    }
+
+    pub fn normalize(text: &mut String) {
+        if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
+            *text = replaced;
+        }
+    }
+
+    pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
+        if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
+            replaced.into()
+        } else {
+            text
+        }
+    }
+}

crates/text2/src/undo_map.rs 🔗

@@ -0,0 +1,112 @@
+use crate::UndoOperation;
+use std::cmp;
+use sum_tree::{Bias, SumTree};
+
+#[derive(Copy, Clone, Debug)]
+struct UndoMapEntry {
+    key: UndoMapKey,
+    undo_count: u32,
+}
+
+impl sum_tree::Item for UndoMapEntry {
+    type Summary = UndoMapKey;
+
+    fn summary(&self) -> Self::Summary {
+        self.key
+    }
+}
+
+impl sum_tree::KeyedItem for UndoMapEntry {
+    type Key = UndoMapKey;
+
+    fn key(&self) -> Self::Key {
+        self.key
+    }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct UndoMapKey {
+    edit_id: clock::Lamport,
+    undo_id: clock::Lamport,
+}
+
+impl sum_tree::Summary for UndoMapKey {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
+        *self = cmp::max(*self, *summary);
+    }
+}
+
+#[derive(Clone, Default)]
+pub struct UndoMap(SumTree<UndoMapEntry>);
+
+impl UndoMap {
+    pub fn insert(&mut self, undo: &UndoOperation) {
+        let edits = undo
+            .counts
+            .iter()
+            .map(|(edit_id, count)| {
+                sum_tree::Edit::Insert(UndoMapEntry {
+                    key: UndoMapKey {
+                        edit_id: *edit_id,
+                        undo_id: undo.timestamp,
+                    },
+                    undo_count: *count,
+                })
+            })
+            .collect::<Vec<_>>();
+        self.0.edit(edits, &());
+    }
+
+    pub fn is_undone(&self, edit_id: clock::Lamport) -> bool {
+        self.undo_count(edit_id) % 2 == 1
+    }
+
+    pub fn was_undone(&self, edit_id: clock::Lamport, version: &clock::Global) -> bool {
+        let mut cursor = self.0.cursor::<UndoMapKey>();
+        cursor.seek(
+            &UndoMapKey {
+                edit_id,
+                undo_id: Default::default(),
+            },
+            Bias::Left,
+            &(),
+        );
+
+        let mut undo_count = 0;
+        for entry in cursor {
+            if entry.key.edit_id != edit_id {
+                break;
+            }
+
+            if version.observed(entry.key.undo_id) {
+                undo_count = cmp::max(undo_count, entry.undo_count);
+            }
+        }
+
+        undo_count % 2 == 1
+    }
+
+    pub fn undo_count(&self, edit_id: clock::Lamport) -> u32 {
+        let mut cursor = self.0.cursor::<UndoMapKey>();
+        cursor.seek(
+            &UndoMapKey {
+                edit_id,
+                undo_id: Default::default(),
+            },
+            Bias::Left,
+            &(),
+        );
+
+        let mut undo_count = 0;
+        for entry in cursor {
+            if entry.key.edit_id != edit_id {
+                break;
+            }
+
+            undo_count = cmp::max(undo_count, entry.undo_count);
+        }
+        undo_count
+    }
+}

crates/zed2/Cargo.toml 🔗

@@ -63,7 +63,7 @@ settings2 = { path = "../settings2" }
 feature_flags2 = { path = "../feature_flags2" }
 sum_tree = { path = "../sum_tree" }
 shellexpand = "2.1.0"
-text = { path = "../text" }
+text2 = { path = "../text2" }
 # terminal_view = { path = "../terminal_view" }
 theme2 = { path = "../theme2" }
 # theme_selector = { path = "../theme_selector" }
@@ -152,7 +152,7 @@ language2 = { path = "../language2", features = ["test-support"] }
 project2 = { path = "../project2", features = ["test-support"] }
 # rpc = { path = "../rpc", features = ["test-support"] }
 # settings = { path = "../settings", features = ["test-support"] }
-# text = { path = "../text", features = ["test-support"] }
+text2 = { path = "../text2", features = ["test-support"] }
 # util = { path = "../util", features = ["test-support"] }
 # workspace = { path = "../workspace", features = ["test-support"] }
 unindent.workspace = true