arena.rs

  1use std::{
  2    alloc::{self, handle_alloc_error},
  3    cell::Cell,
  4    ops::{Deref, DerefMut},
  5    ptr,
  6    rc::Rc,
  7};
  8
  9struct ArenaElement {
 10    value: *mut u8,
 11    drop: unsafe fn(*mut u8),
 12}
 13
 14impl Drop for ArenaElement {
 15    #[inline(always)]
 16    fn drop(&mut self) {
 17        unsafe {
 18            (self.drop)(self.value);
 19        }
 20    }
 21}
 22
 23struct Chunk {
 24    start: *mut u8,
 25    end: *mut u8,
 26    offset: *mut u8,
 27}
 28
 29impl Drop for Chunk {
 30    fn drop(&mut self) {
 31        unsafe {
 32            let chunk_size = self.end.offset_from_unsigned(self.start);
 33            // this never fails as it succeeded during allocation
 34            let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
 35            alloc::dealloc(self.start, layout);
 36        }
 37    }
 38}
 39
 40impl Chunk {
 41    fn new(chunk_size: usize) -> Self {
 42        unsafe {
 43            // this only fails if chunk_size is unreasonably huge
 44            let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
 45            let start = alloc::alloc(layout);
 46            if start.is_null() {
 47                handle_alloc_error(layout);
 48            }
 49            let end = start.add(chunk_size);
 50            Self {
 51                start,
 52                end,
 53                offset: start,
 54            }
 55        }
 56    }
 57
 58    fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> {
 59        unsafe {
 60            let aligned = self.offset.add(self.offset.align_offset(layout.align()));
 61            let next = aligned.add(layout.size());
 62
 63            if next <= self.end {
 64                self.offset = next;
 65                Some(aligned)
 66            } else {
 67                None
 68            }
 69        }
 70    }
 71
 72    fn reset(&mut self) {
 73        self.offset = self.start;
 74    }
 75}
 76
 77pub struct Arena {
 78    chunks: Vec<Chunk>,
 79    elements: Vec<ArenaElement>,
 80    valid: Rc<Cell<bool>>,
 81    current_chunk_index: usize,
 82    chunk_size: usize,
 83}
 84
 85impl Drop for Arena {
 86    fn drop(&mut self) {
 87        self.clear();
 88    }
 89}
 90
 91impl Arena {
 92    pub fn new(chunk_size: usize) -> Self {
 93        assert!(chunk_size > 0);
 94        Self {
 95            chunks: vec![Chunk::new(chunk_size)],
 96            elements: Vec::new(),
 97            valid: Rc::new(Cell::new(true)),
 98            current_chunk_index: 0,
 99            chunk_size,
100        }
101    }
102
103    pub fn capacity(&self) -> usize {
104        self.chunks.len() * self.chunk_size
105    }
106
107    pub fn clear(&mut self) {
108        self.valid.set(false);
109        self.valid = Rc::new(Cell::new(true));
110        self.elements.clear();
111        for chunk_index in 0..=self.current_chunk_index {
112            self.chunks[chunk_index].reset();
113        }
114        self.current_chunk_index = 0;
115    }
116
117    #[inline(always)]
118    pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
119        #[inline(always)]
120        unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
121        where
122            F: FnOnce() -> T,
123        {
124            unsafe {
125                ptr::write(ptr, f());
126            }
127        }
128
129        unsafe fn drop<T>(ptr: *mut u8) {
130            unsafe {
131                std::ptr::drop_in_place(ptr.cast::<T>());
132            }
133        }
134
135        unsafe {
136            let layout = alloc::Layout::new::<T>();
137            let mut current_chunk = &mut self.chunks[self.current_chunk_index];
138            let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
139                ptr
140            } else {
141                self.current_chunk_index += 1;
142                if self.current_chunk_index >= self.chunks.len() {
143                    self.chunks.push(Chunk::new(self.chunk_size));
144                    assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
145                    log::trace!(
146                        "increased element arena capacity to {}kb",
147                        self.capacity() / 1024,
148                    );
149                }
150                current_chunk = &mut self.chunks[self.current_chunk_index];
151                if let Some(ptr) = current_chunk.allocate(layout) {
152                    ptr
153                } else {
154                    panic!(
155                        "Arena chunk_size of {} is too small to allocate {} bytes",
156                        self.chunk_size,
157                        layout.size()
158                    );
159                }
160            };
161
162            inner_writer(ptr.cast(), f);
163            self.elements.push(ArenaElement {
164                value: ptr,
165                drop: drop::<T>,
166            });
167
168            ArenaBox {
169                ptr: ptr.cast(),
170                valid: self.valid.clone(),
171            }
172        }
173    }
174}
175
176pub struct ArenaBox<T: ?Sized> {
177    ptr: *mut T,
178    valid: Rc<Cell<bool>>,
179}
180
181impl<T: ?Sized> ArenaBox<T> {
182    #[inline(always)]
183    pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
184        ArenaBox {
185            ptr: f(&mut self),
186            valid: self.valid,
187        }
188    }
189
190    #[track_caller]
191    fn validate(&self) {
192        assert!(
193            self.valid.get(),
194            "attempted to dereference an ArenaRef after its Arena was cleared"
195        );
196    }
197}
198
199impl<T: ?Sized> Deref for ArenaBox<T> {
200    type Target = T;
201
202    #[inline(always)]
203    fn deref(&self) -> &Self::Target {
204        self.validate();
205        unsafe { &*self.ptr }
206    }
207}
208
209impl<T: ?Sized> DerefMut for ArenaBox<T> {
210    #[inline(always)]
211    fn deref_mut(&mut self) -> &mut Self::Target {
212        self.validate();
213        unsafe { &mut *self.ptr }
214    }
215}
216
217#[cfg(test)]
218mod tests {
219    use std::{cell::Cell, rc::Rc};
220
221    use super::*;
222
223    #[test]
224    fn test_arena() {
225        let mut arena = Arena::new(1024);
226        let a = arena.alloc(|| 1u64);
227        let b = arena.alloc(|| 2u32);
228        let c = arena.alloc(|| 3u16);
229        let d = arena.alloc(|| 4u8);
230        assert_eq!(*a, 1);
231        assert_eq!(*b, 2);
232        assert_eq!(*c, 3);
233        assert_eq!(*d, 4);
234
235        arena.clear();
236        let a = arena.alloc(|| 5u64);
237        let b = arena.alloc(|| 6u32);
238        let c = arena.alloc(|| 7u16);
239        let d = arena.alloc(|| 8u8);
240        assert_eq!(*a, 5);
241        assert_eq!(*b, 6);
242        assert_eq!(*c, 7);
243        assert_eq!(*d, 8);
244
245        // Ensure drop gets called.
246        let dropped = Rc::new(Cell::new(false));
247        struct DropGuard(Rc<Cell<bool>>);
248        impl Drop for DropGuard {
249            fn drop(&mut self) {
250                self.0.set(true);
251            }
252        }
253        arena.alloc(|| DropGuard(dropped.clone()));
254        arena.clear();
255        assert!(dropped.get());
256    }
257
258    #[test]
259    fn test_arena_grow() {
260        let mut arena = Arena::new(8);
261        arena.alloc(|| 1u64);
262        arena.alloc(|| 2u64);
263
264        assert_eq!(arena.capacity(), 16);
265
266        arena.alloc(|| 3u32);
267        arena.alloc(|| 4u32);
268
269        assert_eq!(arena.capacity(), 24);
270    }
271
272    #[test]
273    fn test_arena_alignment() {
274        let mut arena = Arena::new(256);
275        let x1 = arena.alloc(|| 1u8);
276        let x2 = arena.alloc(|| 2u16);
277        let x3 = arena.alloc(|| 3u32);
278        let x4 = arena.alloc(|| 4u64);
279        let x5 = arena.alloc(|| 5u64);
280
281        assert_eq!(*x1, 1);
282        assert_eq!(*x2, 2);
283        assert_eq!(*x3, 3);
284        assert_eq!(*x4, 4);
285        assert_eq!(*x5, 5);
286
287        assert_eq!(x1.ptr.align_offset(std::mem::align_of_val(&*x1)), 0);
288        assert_eq!(x2.ptr.align_offset(std::mem::align_of_val(&*x2)), 0);
289    }
290
291    #[test]
292    #[should_panic(expected = "attempted to dereference an ArenaRef after its Arena was cleared")]
293    fn test_arena_use_after_clear() {
294        let mut arena = Arena::new(16);
295        let value = arena.alloc(|| 1u64);
296
297        arena.clear();
298        let _read_value = *value;
299    }
300}