1use std::{
2 alloc,
3 cell::Cell,
4 ops::{Deref, DerefMut},
5 ptr,
6 rc::Rc,
7};
8
9struct ArenaElement {
10 value: *mut u8,
11 drop: unsafe fn(*mut u8),
12}
13
14impl Drop for ArenaElement {
15 #[inline(always)]
16 fn drop(&mut self) {
17 unsafe {
18 (self.drop)(self.value);
19 }
20 }
21}
22
23pub struct Arena {
24 start: *mut u8,
25 end: *mut u8,
26 offset: *mut u8,
27 elements: Vec<ArenaElement>,
28 valid: Rc<Cell<bool>>,
29}
30
31impl Arena {
32 pub fn new(size_in_bytes: usize) -> Self {
33 unsafe {
34 let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap();
35 let start = alloc::alloc(layout);
36 let end = start.add(size_in_bytes);
37 Self {
38 start,
39 end,
40 offset: start,
41 elements: Vec::new(),
42 valid: Rc::new(Cell::new(true)),
43 }
44 }
45 }
46
47 pub fn clear(&mut self) {
48 self.valid.set(false);
49 self.valid = Rc::new(Cell::new(true));
50 self.elements.clear();
51 self.offset = self.start;
52 }
53
54 #[inline(always)]
55 pub fn alloc<T>(&mut self, f: impl FnOnce() -> T) -> ArenaBox<T> {
56 #[inline(always)]
57 unsafe fn inner_writer<T, F>(ptr: *mut T, f: F)
58 where
59 F: FnOnce() -> T,
60 {
61 ptr::write(ptr, f());
62 }
63
64 unsafe fn drop<T>(ptr: *mut u8) {
65 std::ptr::drop_in_place(ptr.cast::<T>());
66 }
67
68 unsafe {
69 let layout = alloc::Layout::new::<T>().pad_to_align();
70 let next_offset = self.offset.add(layout.size());
71 assert!(next_offset <= self.end);
72
73 let result = ArenaBox {
74 ptr: self.offset.cast(),
75 valid: self.valid.clone(),
76 };
77
78 inner_writer(result.ptr, f);
79 self.elements.push(ArenaElement {
80 value: self.offset,
81 drop: drop::<T>,
82 });
83 self.offset = next_offset;
84
85 result
86 }
87 }
88}
89
90impl Drop for Arena {
91 fn drop(&mut self) {
92 self.clear();
93 }
94}
95
96pub struct ArenaBox<T: ?Sized> {
97 ptr: *mut T,
98 valid: Rc<Cell<bool>>,
99}
100
101impl<T: ?Sized> ArenaBox<T> {
102 #[inline(always)]
103 pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaBox<U> {
104 ArenaBox {
105 ptr: f(&mut self),
106 valid: self.valid,
107 }
108 }
109
110 fn validate(&self) {
111 assert!(
112 self.valid.get(),
113 "attempted to dereference an ArenaRef after its Arena was cleared"
114 );
115 }
116}
117
118impl<T: ?Sized> Deref for ArenaBox<T> {
119 type Target = T;
120
121 #[inline(always)]
122 fn deref(&self) -> &Self::Target {
123 self.validate();
124 unsafe { &*self.ptr }
125 }
126}
127
128impl<T: ?Sized> DerefMut for ArenaBox<T> {
129 #[inline(always)]
130 fn deref_mut(&mut self) -> &mut Self::Target {
131 self.validate();
132 unsafe { &mut *self.ptr }
133 }
134}
135
136pub struct ArenaRef<T: ?Sized>(ArenaBox<T>);
137
138impl<T: ?Sized> From<ArenaBox<T>> for ArenaRef<T> {
139 fn from(value: ArenaBox<T>) -> Self {
140 ArenaRef(value)
141 }
142}
143
144impl<T: ?Sized> Clone for ArenaRef<T> {
145 fn clone(&self) -> Self {
146 Self(ArenaBox {
147 ptr: self.0.ptr,
148 valid: self.0.valid.clone(),
149 })
150 }
151}
152
153impl<T: ?Sized> Deref for ArenaRef<T> {
154 type Target = T;
155
156 #[inline(always)]
157 fn deref(&self) -> &Self::Target {
158 self.0.deref()
159 }
160}
161
162#[cfg(test)]
163mod tests {
164 use std::{cell::Cell, rc::Rc};
165
166 use super::*;
167
168 #[test]
169 fn test_arena() {
170 let mut arena = Arena::new(1024);
171 let a = arena.alloc(|| 1u64);
172 let b = arena.alloc(|| 2u32);
173 let c = arena.alloc(|| 3u16);
174 let d = arena.alloc(|| 4u8);
175 assert_eq!(*a, 1);
176 assert_eq!(*b, 2);
177 assert_eq!(*c, 3);
178 assert_eq!(*d, 4);
179
180 arena.clear();
181 let a = arena.alloc(|| 5u64);
182 let b = arena.alloc(|| 6u32);
183 let c = arena.alloc(|| 7u16);
184 let d = arena.alloc(|| 8u8);
185 assert_eq!(*a, 5);
186 assert_eq!(*b, 6);
187 assert_eq!(*c, 7);
188 assert_eq!(*d, 8);
189
190 // Ensure drop gets called.
191 let dropped = Rc::new(Cell::new(false));
192 struct DropGuard(Rc<Cell<bool>>);
193 impl Drop for DropGuard {
194 fn drop(&mut self) {
195 self.0.set(true);
196 }
197 }
198 arena.alloc(|| DropGuard(dropped.clone()));
199 arena.clear();
200 assert!(dropped.get());
201 }
202}