1use std::{
2 alloc,
3 cell::Cell,
4 ops::{Deref, DerefMut},
5 ptr,
6 rc::Rc,
7};
8
9struct ArenaElement {
10 value: *mut u8,
11 drop: unsafe fn(*mut u8),
12}
13
14impl Drop for ArenaElement {
15 #[inline(always)]
16 fn drop(&mut self) {
17 unsafe {
18 (self.drop)(self.value);
19 }
20 }
21}
22
23pub struct Arena {
24 start: *mut u8,
25 end: *mut u8,
26 offset: *mut u8,
27 elements: Vec<ArenaElement>,
28 valid: Rc<Cell<bool>>,
29}
30
31impl Arena {
32 pub fn new(size_in_bytes: usize) -> Self {
33 unsafe {
34 let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap();
35 let start = alloc::alloc(layout);
36 let end = start.add(size_in_bytes);
37 Self {
38 start,
39 end,
40 offset: start,
41 elements: Vec::new(),
42 valid: Rc::new(Cell::new(true)),
43 }
44 }
45 }
46
47 pub fn len(&self) -> usize {
48 self.offset as usize - self.start as usize
49 }
50
51 pub fn capacity(&self) -> usize {
52 self.end as usize - self.start as usize
53 }
54
55 pub fn clear(&mut self) {
56 self.valid.set(false);
57 self.valid = Rc::new(Cell::new(true));
58 self.elements.clear();
59 self.offset = self.start;
60 }
61
62 #[inline(always)]
63 pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
64 #[inline(always)]
65 unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
66 where
67 F: FnOnce() -> T,
68 {
69 unsafe {
70 ptr::write(ptr, f());
71 }
72 }
73
74 unsafe fn drop<T>(ptr: *mut u8) {
75 unsafe {
76 std::ptr::drop_in_place(ptr.cast::<T>());
77 }
78 }
79
80 unsafe {
81 let layout = alloc::Layout::new::<T>();
82 let offset = self.offset.add(self.offset.align_offset(layout.align()));
83 let next_offset = offset.add(layout.size());
84 assert!(next_offset <= self.end, "not enough space in Arena");
85
86 let result = ArenaBox {
87 ptr: offset.cast(),
88 valid: self.valid.clone(),
89 };
90
91 inner_writer(result.ptr, f);
92 self.elements.push(ArenaElement {
93 value: offset,
94 drop: drop::<T>,
95 });
96 self.offset = next_offset;
97
98 result
99 }
100 }
101}
102
103impl Drop for Arena {
104 fn drop(&mut self) {
105 self.clear();
106 }
107}
108
109pub struct ArenaBox<T: ?Sized> {
110 ptr: *mut T,
111 valid: Rc<Cell<bool>>,
112}
113
114impl<T: ?Sized> ArenaBox<T> {
115 #[inline(always)]
116 pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
117 ArenaBox {
118 ptr: f(&mut self),
119 valid: self.valid,
120 }
121 }
122
123 #[track_caller]
124 fn validate(&self) {
125 assert!(
126 self.valid.get(),
127 "attempted to dereference an ArenaRef after its Arena was cleared"
128 );
129 }
130}
131
132impl<T: ?Sized> Deref for ArenaBox<T> {
133 type Target = T;
134
135 #[inline(always)]
136 fn deref(&self) -> &Self::Target {
137 self.validate();
138 unsafe { &*self.ptr }
139 }
140}
141
142impl<T: ?Sized> DerefMut for ArenaBox<T> {
143 #[inline(always)]
144 fn deref_mut(&mut self) -> &mut Self::Target {
145 self.validate();
146 unsafe { &mut *self.ptr }
147 }
148}
149
150pub struct ArenaRef<T: ?Sized>(ArenaBox<T>);
151
152impl<T: ?Sized> From<ArenaBox<T>> for ArenaRef<T> {
153 fn from(value: ArenaBox<T>) -> Self {
154 ArenaRef(value)
155 }
156}
157
158impl<T: ?Sized> Clone for ArenaRef<T> {
159 fn clone(&self) -> Self {
160 Self(ArenaBox {
161 ptr: self.0.ptr,
162 valid: self.0.valid.clone(),
163 })
164 }
165}
166
167impl<T: ?Sized> Deref for ArenaRef<T> {
168 type Target = T;
169
170 #[inline(always)]
171 fn deref(&self) -> &Self::Target {
172 self.0.deref()
173 }
174}
175
176#[cfg(test)]
177mod tests {
178 use std::{cell::Cell, rc::Rc};
179
180 use super::*;
181
182 #[test]
183 fn test_arena() {
184 let mut arena = Arena::new(1024);
185 let a = arena.alloc(|| 1u64);
186 let b = arena.alloc(|| 2u32);
187 let c = arena.alloc(|| 3u16);
188 let d = arena.alloc(|| 4u8);
189 assert_eq!(*a, 1);
190 assert_eq!(*b, 2);
191 assert_eq!(*c, 3);
192 assert_eq!(*d, 4);
193
194 arena.clear();
195 let a = arena.alloc(|| 5u64);
196 let b = arena.alloc(|| 6u32);
197 let c = arena.alloc(|| 7u16);
198 let d = arena.alloc(|| 8u8);
199 assert_eq!(*a, 5);
200 assert_eq!(*b, 6);
201 assert_eq!(*c, 7);
202 assert_eq!(*d, 8);
203
204 // Ensure drop gets called.
205 let dropped = Rc::new(Cell::new(false));
206 struct DropGuard(Rc<Cell<bool>>);
207 impl Drop for DropGuard {
208 fn drop(&mut self) {
209 self.0.set(true);
210 }
211 }
212 arena.alloc(|| DropGuard(dropped.clone()));
213 arena.clear();
214 assert!(dropped.get());
215 }
216
217 #[test]
218 #[should_panic(expected = "not enough space in Arena")]
219 fn test_arena_overflow() {
220 let mut arena = Arena::new(16);
221 arena.alloc(|| 1u64);
222 arena.alloc(|| 2u64);
223 // This should panic.
224 arena.alloc(|| 3u64);
225 }
226
227 #[test]
228 fn test_arena_alignment() {
229 let mut arena = Arena::new(256);
230 let x1 = arena.alloc(|| 1u8);
231 let x2 = arena.alloc(|| 2u16);
232 let x3 = arena.alloc(|| 3u32);
233 let x4 = arena.alloc(|| 4u64);
234 let x5 = arena.alloc(|| 5u64);
235
236 assert_eq!(*x1, 1);
237 assert_eq!(*x2, 2);
238 assert_eq!(*x3, 3);
239 assert_eq!(*x4, 4);
240 assert_eq!(*x5, 5);
241
242 assert_eq!(x1.ptr.align_offset(std::mem::align_of_val(&*x1)), 0);
243 assert_eq!(x2.ptr.align_offset(std::mem::align_of_val(&*x2)), 0);
244 }
245
246 #[test]
247 #[should_panic(expected = "attempted to dereference an ArenaRef after its Arena was cleared")]
248 fn test_arena_use_after_clear() {
249 let mut arena = Arena::new(16);
250 let value = arena.alloc(|| 1u64);
251
252 arena.clear();
253 let _read_value = *value;
254 }
255}