arena.rs

  1use std::{
  2    alloc::{self, handle_alloc_error},
  3    cell::Cell,
  4    ops::{Deref, DerefMut},
  5    ptr,
  6    rc::Rc,
  7};
  8
  9struct ArenaElement {
 10    value: *mut u8,
 11    drop: unsafe fn(*mut u8),
 12}
 13
 14impl Drop for ArenaElement {
 15    #[inline(always)]
 16    fn drop(&mut self) {
 17        unsafe {
 18            (self.drop)(self.value);
 19        }
 20    }
 21}
 22
 23struct Chunk {
 24    start: *mut u8,
 25    end: *mut u8,
 26    offset: *mut u8,
 27}
 28
 29impl Drop for Chunk {
 30    fn drop(&mut self) {
 31        unsafe {
 32            let chunk_size = self.end.offset_from_unsigned(self.start);
 33            // this never fails as it succeeded during allocation
 34            let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
 35            alloc::dealloc(self.start, layout);
 36        }
 37    }
 38}
 39
 40impl Chunk {
 41    fn new(chunk_size: usize) -> Self {
 42        unsafe {
 43            // this only fails if chunk_size is unreasonably huge
 44            let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
 45            let start = alloc::alloc(layout);
 46            if start.is_null() {
 47                handle_alloc_error(layout);
 48            }
 49            let end = start.add(chunk_size);
 50            Self {
 51                start,
 52                end,
 53                offset: start,
 54            }
 55        }
 56    }
 57
 58    fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> {
 59        unsafe {
 60            let aligned = self.offset.add(self.offset.align_offset(layout.align()));
 61            let next = aligned.add(layout.size());
 62
 63            if next <= self.end {
 64                self.offset = next;
 65                Some(aligned)
 66            } else {
 67                None
 68            }
 69        }
 70    }
 71
 72    fn reset(&mut self) {
 73        self.offset = self.start;
 74    }
 75}
 76
 77pub struct Arena {
 78    chunks: Vec<Chunk>,
 79    elements: Vec<ArenaElement>,
 80    valid: Rc<Cell<bool>>,
 81    current_chunk_index: usize,
 82    chunk_size: usize,
 83}
 84
 85impl Drop for Arena {
 86    fn drop(&mut self) {
 87        self.clear();
 88    }
 89}
 90
 91impl Arena {
 92    pub fn new(chunk_size: usize) -> Self {
 93        assert!(chunk_size > 0);
 94        Self {
 95            chunks: vec![Chunk::new(chunk_size)],
 96            elements: Vec::new(),
 97            valid: Rc::new(Cell::new(true)),
 98            current_chunk_index: 0,
 99            chunk_size,
100        }
101    }
102
103    pub fn capacity(&self) -> usize {
104        self.chunks.len() * self.chunk_size
105    }
106
107    pub fn clear(&mut self) {
108        self.valid.set(false);
109        self.valid = Rc::new(Cell::new(true));
110        self.elements.clear();
111        for chunk_index in 0..=self.current_chunk_index {
112            self.chunks[chunk_index].reset();
113        }
114        self.current_chunk_index = 0;
115    }
116
117    #[inline(always)]
118    pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
119        #[inline(always)]
120        unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
121        where
122            F: FnOnce() -> T,
123        {
124            unsafe {
125                ptr::write(ptr, f());
126            }
127        }
128
129        unsafe fn drop<T>(ptr: *mut u8) {
130            unsafe {
131                std::ptr::drop_in_place(ptr.cast::<T>());
132            }
133        }
134
135        unsafe {
136            let layout = alloc::Layout::new::<T>();
137            let mut current_chunk = &mut self.chunks[self.current_chunk_index];
138            let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
139                ptr
140            } else {
141                self.current_chunk_index += 1;
142                if self.current_chunk_index >= self.chunks.len() {
143                    self.chunks.push(Chunk::new(self.chunk_size));
144                    assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
145                    log::info!(
146                        "increased element arena capacity to {}kb",
147                        self.capacity() / 1024,
148                    );
149                }
150                current_chunk = &mut self.chunks[self.current_chunk_index];
151                if let Some(ptr) = current_chunk.allocate(layout) {
152                    ptr
153                } else {
154                    panic!(
155                        "Arena chunk_size of {} is too small to allocate {} bytes",
156                        self.chunk_size,
157                        layout.size()
158                    );
159                }
160            };
161
162            inner_writer(ptr.cast(), f);
163            self.elements.push(ArenaElement {
164                value: ptr,
165                drop: drop::<T>,
166            });
167
168            ArenaBox {
169                ptr: ptr.cast(),
170                valid: self.valid.clone(),
171            }
172        }
173    }
174}
175
176pub struct ArenaBox<T: ?Sized> {
177    ptr: *mut T,
178    valid: Rc<Cell<bool>>,
179}
180
181impl<T: ?Sized> ArenaBox<T> {
182    #[inline(always)]
183    pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
184        ArenaBox {
185            ptr: f(&mut self),
186            valid: self.valid,
187        }
188    }
189
190    #[track_caller]
191    fn validate(&self) {
192        assert!(
193            self.valid.get(),
194            "attempted to dereference an ArenaRef after its Arena was cleared"
195        );
196    }
197}
198
199impl<T: ?Sized> Deref for ArenaBox<T> {
200    type Target = T;
201
202    #[inline(always)]
203    fn deref(&self) -> &Self::Target {
204        self.validate();
205        unsafe { &*self.ptr }
206    }
207}
208
209impl<T: ?Sized> DerefMut for ArenaBox<T> {
210    #[inline(always)]
211    fn deref_mut(&mut self) -> &mut Self::Target {
212        self.validate();
213        unsafe { &mut *self.ptr }
214    }
215}
216
217pub struct ArenaRef<T: ?Sized>(ArenaBox<T>);
218
219impl<T: ?Sized> From<ArenaBox<T>> for ArenaRef<T> {
220    fn from(value: ArenaBox<T>) -> Self {
221        ArenaRef(value)
222    }
223}
224
225impl<T: ?Sized> Clone for ArenaRef<T> {
226    fn clone(&self) -> Self {
227        Self(ArenaBox {
228            ptr: self.0.ptr,
229            valid: self.0.valid.clone(),
230        })
231    }
232}
233
234impl<T: ?Sized> Deref for ArenaRef<T> {
235    type Target = T;
236
237    #[inline(always)]
238    fn deref(&self) -> &Self::Target {
239        self.0.deref()
240    }
241}
242
243#[cfg(test)]
244mod tests {
245    use std::{cell::Cell, rc::Rc};
246
247    use super::*;
248
249    #[test]
250    fn test_arena() {
251        let mut arena = Arena::new(1024);
252        let a = arena.alloc(|| 1u64);
253        let b = arena.alloc(|| 2u32);
254        let c = arena.alloc(|| 3u16);
255        let d = arena.alloc(|| 4u8);
256        assert_eq!(*a, 1);
257        assert_eq!(*b, 2);
258        assert_eq!(*c, 3);
259        assert_eq!(*d, 4);
260
261        arena.clear();
262        let a = arena.alloc(|| 5u64);
263        let b = arena.alloc(|| 6u32);
264        let c = arena.alloc(|| 7u16);
265        let d = arena.alloc(|| 8u8);
266        assert_eq!(*a, 5);
267        assert_eq!(*b, 6);
268        assert_eq!(*c, 7);
269        assert_eq!(*d, 8);
270
271        // Ensure drop gets called.
272        let dropped = Rc::new(Cell::new(false));
273        struct DropGuard(Rc<Cell<bool>>);
274        impl Drop for DropGuard {
275            fn drop(&mut self) {
276                self.0.set(true);
277            }
278        }
279        arena.alloc(|| DropGuard(dropped.clone()));
280        arena.clear();
281        assert!(dropped.get());
282    }
283
284    #[test]
285    fn test_arena_grow() {
286        let mut arena = Arena::new(8);
287        arena.alloc(|| 1u64);
288        arena.alloc(|| 2u64);
289
290        assert_eq!(arena.capacity(), 16);
291
292        arena.alloc(|| 3u32);
293        arena.alloc(|| 4u32);
294
295        assert_eq!(arena.capacity(), 24);
296    }
297
298    #[test]
299    fn test_arena_alignment() {
300        let mut arena = Arena::new(256);
301        let x1 = arena.alloc(|| 1u8);
302        let x2 = arena.alloc(|| 2u16);
303        let x3 = arena.alloc(|| 3u32);
304        let x4 = arena.alloc(|| 4u64);
305        let x5 = arena.alloc(|| 5u64);
306
307        assert_eq!(*x1, 1);
308        assert_eq!(*x2, 2);
309        assert_eq!(*x3, 3);
310        assert_eq!(*x4, 4);
311        assert_eq!(*x5, 5);
312
313        assert_eq!(x1.ptr.align_offset(std::mem::align_of_val(&*x1)), 0);
314        assert_eq!(x2.ptr.align_offset(std::mem::align_of_val(&*x2)), 0);
315    }
316
317    #[test]
318    #[should_panic(expected = "attempted to dereference an ArenaRef after its Arena was cleared")]
319    fn test_arena_use_after_clear() {
320        let mut arena = Arena::new(16);
321        let value = arena.alloc(|| 1u64);
322
323        arena.clear();
324        let _read_value = *value;
325    }
326}