@@ -1,5 +1,5 @@
use std::{
- alloc,
+ alloc::{self, handle_alloc_error},
cell::Cell,
ops::{Deref, DerefMut},
ptr,
@@ -20,43 +20,98 @@ impl Drop for ArenaElement {
}
}
-pub struct Arena {
+struct Chunk {
start: *mut u8,
end: *mut u8,
offset: *mut u8,
- elements: Vec<ArenaElement>,
- valid: Rc<Cell<bool>>,
}
-impl Arena {
- pub fn new(size_in_bytes: usize) -> Self {
+impl Drop for Chunk {
+ fn drop(&mut self) {
unsafe {
- let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap();
+ let chunk_size = self.end.offset_from_unsigned(self.start);
+ // this never fails as it succeeded during allocation
+ let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
+ alloc::dealloc(self.start, layout);
+ }
+ }
+}
+
+impl Chunk {
+ fn new(chunk_size: usize) -> Self {
+ unsafe {
+ // this only fails if chunk_size is unreasonably huge
+ let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
let start = alloc::alloc(layout);
- let end = start.add(size_in_bytes);
+ if start.is_null() {
+ handle_alloc_error(layout);
+ }
+ let end = start.add(chunk_size);
Self {
start,
end,
offset: start,
- elements: Vec::new(),
- valid: Rc::new(Cell::new(true)),
}
}
}
- pub fn len(&self) -> usize {
- self.offset as usize - self.start as usize
+ fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> {
+ unsafe {
+ let aligned = self.offset.add(self.offset.align_offset(layout.align()));
+ let next = aligned.add(layout.size());
+
+ if next <= self.end {
+ self.offset = next;
+ Some(aligned)
+ } else {
+ None
+ }
+ }
+ }
+
+ fn reset(&mut self) {
+ self.offset = self.start;
+ }
+}
+
+pub struct Arena {
+ chunks: Vec<Chunk>,
+ elements: Vec<ArenaElement>,
+ valid: Rc<Cell<bool>>,
+ current_chunk_index: usize,
+ chunk_size: usize,
+}
+
+impl Drop for Arena {
+ fn drop(&mut self) {
+ self.clear();
+ }
+}
+
+impl Arena {
+ pub fn new(chunk_size: usize) -> Self {
+ assert!(chunk_size > 0);
+ Self {
+ chunks: vec![Chunk::new(chunk_size)],
+ elements: Vec::new(),
+ valid: Rc::new(Cell::new(true)),
+ current_chunk_index: 0,
+ chunk_size,
+ }
}
pub fn capacity(&self) -> usize {
- self.end as usize - self.start as usize
+ self.chunks.len() * self.chunk_size
}
pub fn clear(&mut self) {
self.valid.set(false);
self.valid = Rc::new(Cell::new(true));
self.elements.clear();
- self.offset = self.start;
+ for chunk_index in 0..=self.current_chunk_index {
+ self.chunks[chunk_index].reset();
+ }
+ self.current_chunk_index = 0;
}
#[inline(always)]
@@ -79,33 +134,45 @@ impl Arena {
unsafe {
let layout = alloc::Layout::new::<T>();
- let offset = self.offset.add(self.offset.align_offset(layout.align()));
- let next_offset = offset.add(layout.size());
- assert!(next_offset <= self.end, "not enough space in Arena");
-
- let result = ArenaBox {
- ptr: offset.cast(),
- valid: self.valid.clone(),
+ let mut current_chunk = &mut self.chunks[self.current_chunk_index];
+ let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
+ ptr
+ } else {
+ self.current_chunk_index += 1;
+ if self.current_chunk_index >= self.chunks.len() {
+ self.chunks.push(Chunk::new(self.chunk_size));
+ assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
+ log::info!(
+ "increased element arena capacity to {}kb",
+ self.capacity() / 1024,
+ );
+ }
+ current_chunk = &mut self.chunks[self.current_chunk_index];
+ if let Some(ptr) = current_chunk.allocate(layout) {
+ ptr
+ } else {
+ panic!(
+ "Arena chunk_size of {} is too small to allocate {} bytes",
+ self.chunk_size,
+ layout.size()
+ );
+ }
};
- inner_writer(result.ptr, f);
+ inner_writer(ptr.cast(), f);
self.elements.push(ArenaElement {
- value: offset,
+ value: ptr,
drop: drop::<T>,
});
- self.offset = next_offset;
- result
+ ArenaBox {
+ ptr: ptr.cast(),
+ valid: self.valid.clone(),
+ }
}
}
}
-impl Drop for Arena {
- fn drop(&mut self) {
- self.clear();
- }
-}
-
pub struct ArenaBox<T: ?Sized> {
ptr: *mut T,
valid: Rc<Cell<bool>>,
@@ -215,13 +282,17 @@ mod tests {
}
#[test]
- #[should_panic(expected = "not enough space in Arena")]
- fn test_arena_overflow() {
- let mut arena = Arena::new(16);
+ fn test_arena_grow() {
+ let mut arena = Arena::new(8);
arena.alloc(|| 1u64);
arena.alloc(|| 2u64);
- // This should panic.
- arena.alloc(|| 3u64);
+
+ assert_eq!(arena.capacity(), 16);
+
+ arena.alloc(|| 3u32);
+ arena.alloc(|| 4u32);
+
+ assert_eq!(arena.capacity(), 24);
}
#[test]
@@ -206,8 +206,7 @@ slotmap::new_key_type! {
}
thread_local! {
- /// 8MB wasn't quite enough...
- pub(crate) static ELEMENT_ARENA: RefCell<Arena> = RefCell::new(Arena::new(32 * 1024 * 1024));
+ pub(crate) static ELEMENT_ARENA: RefCell<Arena> = RefCell::new(Arena::new(1024 * 1024));
}
/// Returned when the element arena has been used and so must be cleared before the next draw.
@@ -218,12 +217,8 @@ impl ArenaClearNeeded {
/// Clear the element arena.
pub fn clear(self) {
ELEMENT_ARENA.with_borrow_mut(|element_arena| {
- let percentage = (element_arena.len() as f32 / element_arena.capacity() as f32) * 100.;
- if percentage >= 80. {
- log::warn!("elevated element arena occupation: {}.", percentage);
- }
element_arena.clear();
- })
+ });
}
}