1use std::{
2 alloc::{self, handle_alloc_error},
3 cell::Cell,
4 num::NonZeroUsize,
5 ops::{Deref, DerefMut},
6 ptr::{self, NonNull},
7 rc::Rc,
8};
9
10struct ArenaElement {
11 value: *mut u8,
12 drop: unsafe fn(*mut u8),
13}
14
15impl Drop for ArenaElement {
16 #[inline(always)]
17 fn drop(&mut self) {
18 unsafe { (self.drop)(self.value) };
19 }
20}
21
22struct Chunk {
23 start: *mut u8,
24 end: *mut u8,
25 offset: *mut u8,
26}
27
28impl Drop for Chunk {
29 fn drop(&mut self) {
30 unsafe {
31 let chunk_size = self.end.offset_from_unsigned(self.start);
32 // SAFETY: This succeeded during allocation.
33 let layout = alloc::Layout::from_size_align_unchecked(chunk_size, 1);
34 alloc::dealloc(self.start, layout);
35 }
36 }
37}
38
39impl Chunk {
40 fn new(chunk_size: NonZeroUsize) -> Self {
41 // this only fails if chunk_size is unreasonably huge
42 let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap();
43 let start = unsafe { alloc::alloc(layout) };
44 if start.is_null() {
45 handle_alloc_error(layout);
46 }
47 let end = unsafe { start.add(chunk_size.get()) };
48 Self {
49 start,
50 end,
51 offset: start,
52 }
53 }
54
55 fn allocate(&mut self, layout: alloc::Layout) -> Option<NonNull<u8>> {
56 let aligned = unsafe { self.offset.add(self.offset.align_offset(layout.align())) };
57 let next = unsafe { aligned.add(layout.size()) };
58
59 if next <= self.end {
60 self.offset = next;
61 NonNull::new(aligned)
62 } else {
63 None
64 }
65 }
66
67 fn reset(&mut self) {
68 self.offset = self.start;
69 }
70}
71
72pub struct Arena {
73 chunks: Vec<Chunk>,
74 elements: Vec<ArenaElement>,
75 valid: Rc<Cell<bool>>,
76 current_chunk_index: usize,
77 chunk_size: NonZeroUsize,
78}
79
80impl Drop for Arena {
81 fn drop(&mut self) {
82 self.clear();
83 }
84}
85
86impl Arena {
87 pub fn new(chunk_size: usize) -> Self {
88 let chunk_size = NonZeroUsize::try_from(chunk_size).unwrap();
89 Self {
90 chunks: vec![Chunk::new(chunk_size)],
91 elements: Vec::new(),
92 valid: Rc::new(Cell::new(true)),
93 current_chunk_index: 0,
94 chunk_size,
95 }
96 }
97
98 pub fn capacity(&self) -> usize {
99 self.chunks.len() * self.chunk_size.get()
100 }
101
102 pub fn clear(&mut self) {
103 self.valid.set(false);
104 self.valid = Rc::new(Cell::new(true));
105 self.elements.clear();
106 for chunk_index in 0..=self.current_chunk_index {
107 self.chunks[chunk_index].reset();
108 }
109 self.current_chunk_index = 0;
110 }
111
112 #[inline(always)]
113 pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
114 #[inline(always)]
115 unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
116 where
117 F: FnOnce() -> T,
118 {
119 unsafe { ptr::write(ptr, f()) };
120 }
121
122 unsafe fn drop<T>(ptr: *mut u8) {
123 unsafe { std::ptr::drop_in_place(ptr.cast::<T>()) };
124 }
125
126 let layout = alloc::Layout::new::<T>();
127 let mut current_chunk = &mut self.chunks[self.current_chunk_index];
128 let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
129 ptr.as_ptr()
130 } else {
131 self.current_chunk_index += 1;
132 if self.current_chunk_index >= self.chunks.len() {
133 self.chunks.push(Chunk::new(self.chunk_size));
134 assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
135 log::trace!(
136 "increased element arena capacity to {}kb",
137 self.capacity() / 1024,
138 );
139 }
140 current_chunk = &mut self.chunks[self.current_chunk_index];
141 if let Some(ptr) = current_chunk.allocate(layout) {
142 ptr.as_ptr()
143 } else {
144 panic!(
145 "Arena chunk_size of {} is too small to allocate {} bytes",
146 self.chunk_size,
147 layout.size()
148 );
149 }
150 };
151
152 unsafe { inner_writer(ptr.cast(), f) };
153 self.elements.push(ArenaElement {
154 value: ptr,
155 drop: drop::<T>,
156 });
157
158 ArenaBox {
159 ptr: ptr.cast(),
160 valid: self.valid.clone(),
161 }
162 }
163}
164
165pub struct ArenaBox<T: ?Sized> {
166 ptr: *mut T,
167 valid: Rc<Cell<bool>>,
168}
169
170impl<T: ?Sized> ArenaBox<T> {
171 #[inline(always)]
172 pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
173 ArenaBox {
174 ptr: f(&mut self),
175 valid: self.valid,
176 }
177 }
178
179 #[track_caller]
180 fn validate(&self) {
181 assert!(
182 self.valid.get(),
183 "attempted to dereference an ArenaRef after its Arena was cleared"
184 );
185 }
186}
187
188impl<T: ?Sized> Deref for ArenaBox<T> {
189 type Target = T;
190
191 #[inline(always)]
192 fn deref(&self) -> &Self::Target {
193 self.validate();
194 unsafe { &*self.ptr }
195 }
196}
197
198impl<T: ?Sized> DerefMut for ArenaBox<T> {
199 #[inline(always)]
200 fn deref_mut(&mut self) -> &mut Self::Target {
201 self.validate();
202 unsafe { &mut *self.ptr }
203 }
204}
205
206#[cfg(test)]
207mod tests {
208 use std::{cell::Cell, rc::Rc};
209
210 use super::*;
211
212 #[test]
213 fn test_arena() {
214 let mut arena = Arena::new(1024);
215 let a = arena.alloc(|| 1u64);
216 let b = arena.alloc(|| 2u32);
217 let c = arena.alloc(|| 3u16);
218 let d = arena.alloc(|| 4u8);
219 assert_eq!(*a, 1);
220 assert_eq!(*b, 2);
221 assert_eq!(*c, 3);
222 assert_eq!(*d, 4);
223
224 arena.clear();
225 let a = arena.alloc(|| 5u64);
226 let b = arena.alloc(|| 6u32);
227 let c = arena.alloc(|| 7u16);
228 let d = arena.alloc(|| 8u8);
229 assert_eq!(*a, 5);
230 assert_eq!(*b, 6);
231 assert_eq!(*c, 7);
232 assert_eq!(*d, 8);
233
234 // Ensure drop gets called.
235 let dropped = Rc::new(Cell::new(false));
236 struct DropGuard(Rc<Cell<bool>>);
237 impl Drop for DropGuard {
238 fn drop(&mut self) {
239 self.0.set(true);
240 }
241 }
242 arena.alloc(|| DropGuard(dropped.clone()));
243 arena.clear();
244 assert!(dropped.get());
245 }
246
247 #[test]
248 fn test_arena_grow() {
249 let mut arena = Arena::new(8);
250 arena.alloc(|| 1u64);
251 arena.alloc(|| 2u64);
252
253 assert_eq!(arena.capacity(), 16);
254
255 arena.alloc(|| 3u32);
256 arena.alloc(|| 4u32);
257
258 assert_eq!(arena.capacity(), 24);
259 }
260
261 #[test]
262 fn test_arena_alignment() {
263 let mut arena = Arena::new(256);
264 let x1 = arena.alloc(|| 1u8);
265 let x2 = arena.alloc(|| 2u16);
266 let x3 = arena.alloc(|| 3u32);
267 let x4 = arena.alloc(|| 4u64);
268 let x5 = arena.alloc(|| 5u64);
269
270 assert_eq!(*x1, 1);
271 assert_eq!(*x2, 2);
272 assert_eq!(*x3, 3);
273 assert_eq!(*x4, 4);
274 assert_eq!(*x5, 5);
275
276 assert_eq!(x1.ptr.align_offset(std::mem::align_of_val(&*x1)), 0);
277 assert_eq!(x2.ptr.align_offset(std::mem::align_of_val(&*x2)), 0);
278 }
279
280 #[test]
281 #[should_panic(expected = "attempted to dereference an ArenaRef after its Arena was cleared")]
282 fn test_arena_use_after_clear() {
283 let mut arena = Arena::new(16);
284 let value = arena.alloc(|| 1u64);
285
286 arena.clear();
287 let _read_value = *value;
288 }
289}