1use std::{
2 alloc::{self, handle_alloc_error},
3 cell::Cell,
4 num::NonZeroUsize,
5 ops::{Deref, DerefMut},
6 ptr::{self, NonNull},
7 rc::Rc,
8};
9
10struct ArenaElement {
11 value: *mut u8,
12 drop: unsafe fn(*mut u8),
13}
14
15impl Drop for ArenaElement {
16 #[inline(always)]
17 fn drop(&mut self) {
18 unsafe {
19 (self.drop)(self.value);
20 }
21 }
22}
23
24struct Chunk {
25 start: *mut u8,
26 end: *mut u8,
27 offset: *mut u8,
28}
29
30impl Drop for Chunk {
31 fn drop(&mut self) {
32 unsafe {
33 let chunk_size = self.end.offset_from_unsigned(self.start);
34 // SAFETY: This succeeded during allocation.
35 let layout = alloc::Layout::from_size_align_unchecked(chunk_size, 1);
36 alloc::dealloc(self.start, layout);
37 }
38 }
39}
40
41impl Chunk {
42 fn new(chunk_size: NonZeroUsize) -> Self {
43 unsafe {
44 // this only fails if chunk_size is unreasonably huge
45 let layout = alloc::Layout::from_size_align(chunk_size.get(), 1).unwrap();
46 let start = alloc::alloc(layout);
47 if start.is_null() {
48 handle_alloc_error(layout);
49 }
50 let end = start.add(chunk_size.get());
51 Self {
52 start,
53 end,
54 offset: start,
55 }
56 }
57 }
58
59 fn allocate(&mut self, layout: alloc::Layout) -> Option<NonNull<u8>> {
60 unsafe {
61 let aligned = self.offset.add(self.offset.align_offset(layout.align()));
62 let next = aligned.add(layout.size());
63
64 if next <= self.end {
65 self.offset = next;
66 NonNull::new(aligned)
67 } else {
68 None
69 }
70 }
71 }
72
73 fn reset(&mut self) {
74 self.offset = self.start;
75 }
76}
77
78pub struct Arena {
79 chunks: Vec<Chunk>,
80 elements: Vec<ArenaElement>,
81 valid: Rc<Cell<bool>>,
82 current_chunk_index: usize,
83 chunk_size: NonZeroUsize,
84}
85
86impl Drop for Arena {
87 fn drop(&mut self) {
88 self.clear();
89 }
90}
91
92impl Arena {
93 pub fn new(chunk_size: usize) -> Self {
94 let chunk_size = NonZeroUsize::try_from(chunk_size).unwrap();
95 Self {
96 chunks: vec![Chunk::new(chunk_size)],
97 elements: Vec::new(),
98 valid: Rc::new(Cell::new(true)),
99 current_chunk_index: 0,
100 chunk_size,
101 }
102 }
103
104 pub fn capacity(&self) -> usize {
105 self.chunks.len() * self.chunk_size.get()
106 }
107
108 pub fn clear(&mut self) {
109 self.valid.set(false);
110 self.valid = Rc::new(Cell::new(true));
111 self.elements.clear();
112 for chunk_index in 0..=self.current_chunk_index {
113 self.chunks[chunk_index].reset();
114 }
115 self.current_chunk_index = 0;
116 }
117
118 #[inline(always)]
119 pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
120 #[inline(always)]
121 unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
122 where
123 F: FnOnce() -> T,
124 {
125 unsafe {
126 ptr::write(ptr, f());
127 }
128 }
129
130 unsafe fn drop<T>(ptr: *mut u8) {
131 unsafe {
132 std::ptr::drop_in_place(ptr.cast::<T>());
133 }
134 }
135
136 unsafe {
137 let layout = alloc::Layout::new::<T>();
138 let mut current_chunk = &mut self.chunks[self.current_chunk_index];
139 let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
140 ptr.as_ptr()
141 } else {
142 self.current_chunk_index += 1;
143 if self.current_chunk_index >= self.chunks.len() {
144 self.chunks.push(Chunk::new(self.chunk_size));
145 assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
146 log::trace!(
147 "increased element arena capacity to {}kb",
148 self.capacity() / 1024,
149 );
150 }
151 current_chunk = &mut self.chunks[self.current_chunk_index];
152 if let Some(ptr) = current_chunk.allocate(layout) {
153 ptr.as_ptr()
154 } else {
155 panic!(
156 "Arena chunk_size of {} is too small to allocate {} bytes",
157 self.chunk_size,
158 layout.size()
159 );
160 }
161 };
162
163 inner_writer(ptr.cast(), f);
164 self.elements.push(ArenaElement {
165 value: ptr,
166 drop: drop::<T>,
167 });
168
169 ArenaBox {
170 ptr: ptr.cast(),
171 valid: self.valid.clone(),
172 }
173 }
174 }
175}
176
177pub struct ArenaBox<T: ?Sized> {
178 ptr: *mut T,
179 valid: Rc<Cell<bool>>,
180}
181
182impl<T: ?Sized> ArenaBox<T> {
183 #[inline(always)]
184 pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
185 ArenaBox {
186 ptr: f(&mut self),
187 valid: self.valid,
188 }
189 }
190
191 #[track_caller]
192 fn validate(&self) {
193 assert!(
194 self.valid.get(),
195 "attempted to dereference an ArenaRef after its Arena was cleared"
196 );
197 }
198}
199
200impl<T: ?Sized> Deref for ArenaBox<T> {
201 type Target = T;
202
203 #[inline(always)]
204 fn deref(&self) -> &Self::Target {
205 self.validate();
206 unsafe { &*self.ptr }
207 }
208}
209
210impl<T: ?Sized> DerefMut for ArenaBox<T> {
211 #[inline(always)]
212 fn deref_mut(&mut self) -> &mut Self::Target {
213 self.validate();
214 unsafe { &mut *self.ptr }
215 }
216}
217
218#[cfg(test)]
219mod tests {
220 use std::{cell::Cell, rc::Rc};
221
222 use super::*;
223
224 #[test]
225 fn test_arena() {
226 let mut arena = Arena::new(1024);
227 let a = arena.alloc(|| 1u64);
228 let b = arena.alloc(|| 2u32);
229 let c = arena.alloc(|| 3u16);
230 let d = arena.alloc(|| 4u8);
231 assert_eq!(*a, 1);
232 assert_eq!(*b, 2);
233 assert_eq!(*c, 3);
234 assert_eq!(*d, 4);
235
236 arena.clear();
237 let a = arena.alloc(|| 5u64);
238 let b = arena.alloc(|| 6u32);
239 let c = arena.alloc(|| 7u16);
240 let d = arena.alloc(|| 8u8);
241 assert_eq!(*a, 5);
242 assert_eq!(*b, 6);
243 assert_eq!(*c, 7);
244 assert_eq!(*d, 8);
245
246 // Ensure drop gets called.
247 let dropped = Rc::new(Cell::new(false));
248 struct DropGuard(Rc<Cell<bool>>);
249 impl Drop for DropGuard {
250 fn drop(&mut self) {
251 self.0.set(true);
252 }
253 }
254 arena.alloc(|| DropGuard(dropped.clone()));
255 arena.clear();
256 assert!(dropped.get());
257 }
258
259 #[test]
260 fn test_arena_grow() {
261 let mut arena = Arena::new(8);
262 arena.alloc(|| 1u64);
263 arena.alloc(|| 2u64);
264
265 assert_eq!(arena.capacity(), 16);
266
267 arena.alloc(|| 3u32);
268 arena.alloc(|| 4u32);
269
270 assert_eq!(arena.capacity(), 24);
271 }
272
273 #[test]
274 fn test_arena_alignment() {
275 let mut arena = Arena::new(256);
276 let x1 = arena.alloc(|| 1u8);
277 let x2 = arena.alloc(|| 2u16);
278 let x3 = arena.alloc(|| 3u32);
279 let x4 = arena.alloc(|| 4u64);
280 let x5 = arena.alloc(|| 5u64);
281
282 assert_eq!(*x1, 1);
283 assert_eq!(*x2, 2);
284 assert_eq!(*x3, 3);
285 assert_eq!(*x4, 4);
286 assert_eq!(*x5, 5);
287
288 assert_eq!(x1.ptr.align_offset(std::mem::align_of_val(&*x1)), 0);
289 assert_eq!(x2.ptr.align_offset(std::mem::align_of_val(&*x2)), 0);
290 }
291
292 #[test]
293 #[should_panic(expected = "attempted to dereference an ArenaRef after its Arena was cleared")]
294 fn test_arena_use_after_clear() {
295 let mut arena = Arena::new(16);
296 let value = arena.alloc(|| 1u64);
297
298 arena.clear();
299 let _read_value = *value;
300 }
301}