locator.rs

  1use smallvec::SmallVec;
  2use std::iter;
  3
  4/// An identifier for a position in a ordered collection.
  5///
  6/// Allows prepending and appending without needing to renumber existing locators
  7/// using `Locator::between(lhs, rhs)`.
  8///
  9/// The initial location for a collection should be `Locator::between(Locator::min(), Locator::max())`,
 10/// leaving room for items to be inserted before and after it.
 11#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
 12pub struct Locator(SmallVec<[u64; 2]>);
 13
 14impl Clone for Locator {
 15    fn clone(&self) -> Self {
 16        // We manually implement clone to avoid the overhead of SmallVec's clone implementation.
 17        // Using `from_slice` is faster than `clone` for SmallVec as we can use our `Copy` implementation of u64.
 18        Self {
 19            0: SmallVec::from_slice(&self.0),
 20        }
 21    }
 22
 23    fn clone_from(&mut self, source: &Self) {
 24        self.0.clone_from(&source.0);
 25    }
 26}
 27
 28impl Locator {
 29    pub const fn min() -> Self {
 30        // SAFETY: 1 is <= 2
 31        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 2], 1) })
 32    }
 33
 34    pub const fn max() -> Self {
 35        // SAFETY: 1 is <= 2
 36        Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 2], 1) })
 37    }
 38
 39    pub const fn min_ref() -> &'static Self {
 40        const { &Self::min() }
 41    }
 42
 43    pub const fn max_ref() -> &'static Self {
 44        const { &Self::max() }
 45    }
 46
 47    pub fn assign(&mut self, other: &Self) {
 48        self.0.resize(other.0.len(), 0);
 49        self.0.copy_from_slice(&other.0);
 50    }
 51
 52    pub fn between(lhs: &Self, rhs: &Self) -> Self {
 53        let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
 54        let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
 55        let mut location = SmallVec::new();
 56        for (lhs, rhs) in lhs.zip(rhs) {
 57            // This shift is essential! It optimizes for the common case of sequential typing.
 58            let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
 59            location.push(mid);
 60            if mid > lhs {
 61                break;
 62            }
 63        }
 64        Self(location)
 65    }
 66
 67    pub fn len(&self) -> usize {
 68        self.0.len()
 69    }
 70
 71    pub fn is_empty(&self) -> bool {
 72        self.len() == 0
 73    }
 74}
 75
 76impl Default for Locator {
 77    fn default() -> Self {
 78        Self::min()
 79    }
 80}
 81
 82impl sum_tree::Item for Locator {
 83    type Summary = Locator;
 84
 85    fn summary(&self, _cx: ()) -> Self::Summary {
 86        self.clone()
 87    }
 88}
 89
 90impl sum_tree::KeyedItem for Locator {
 91    type Key = Locator;
 92
 93    fn key(&self) -> Self::Key {
 94        self.clone()
 95    }
 96}
 97
 98impl sum_tree::ContextLessSummary for Locator {
 99    fn zero() -> Self {
100        Default::default()
101    }
102
103    fn add_summary(&mut self, summary: &Self) {
104        self.assign(summary);
105    }
106}
107
108#[cfg(test)]
109mod tests {
110    use super::*;
111    use rand::prelude::*;
112    use std::mem;
113
114    #[gpui::test(iterations = 100)]
115    fn test_locators(mut rng: StdRng) {
116        let mut lhs = Default::default();
117        let mut rhs = Default::default();
118        while lhs == rhs {
119            lhs = Locator(
120                (0..rng.random_range(1..=5))
121                    .map(|_| rng.random_range(0..=100))
122                    .collect(),
123            );
124            rhs = Locator(
125                (0..rng.random_range(1..=5))
126                    .map(|_| rng.random_range(0..=100))
127                    .collect(),
128            );
129        }
130
131        if lhs > rhs {
132            mem::swap(&mut lhs, &mut rhs);
133        }
134
135        let middle = Locator::between(&lhs, &rhs);
136        assert!(middle > lhs);
137        assert!(middle < rhs);
138        for ix in 0..middle.0.len() - 1 {
139            assert!(
140                middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
141                    || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
142            );
143        }
144    }
145
146    // Simulates 100,000 sequential forward appends (the pattern used when
147    // building a buffer's initial fragments and when
148    // `push_fragments_for_insertion` chains new text fragments).
149    #[test]
150    fn test_sequential_forward_append_stays_at_depth_1() {
151        let mut prev = Locator::min();
152        let max = Locator::max();
153        for _ in 0..100_000 {
154            let loc = Locator::between(&prev, &max);
155            assert_eq!(loc.len(), 1, "sequential forward append grew past depth 1");
156            prev = loc;
157        }
158    }
159
160    // Simulates the most common real editing pattern: a fragment is split
161    // (producing a depth-2 prefix), then 10,000 new fragments are inserted
162    // sequentially forward within that split region.
163    #[test]
164    fn test_typing_at_cursor_stays_at_depth_2() {
165        let initial = Locator::between(&Locator::min(), &Locator::max());
166        let prefix = Locator::between(&Locator::min(), &initial);
167        assert_eq!(prefix.len(), 2);
168
169        let suffix_id = initial;
170        let mut prev = prefix;
171        for _ in 0..10_000 {
172            let loc = Locator::between(&prev, &suffix_id);
173            assert_eq!(loc.len(), 2, "forward typing after split grew past depth 2");
174            prev = loc;
175        }
176    }
177}