diff --git a/oscars/src/alloc/arena3/alloc.rs b/oscars/src/alloc/arena3/alloc.rs new file mode 100644 index 0000000..56a72a0 --- /dev/null +++ b/oscars/src/alloc/arena3/alloc.rs @@ -0,0 +1,239 @@ +use core::{alloc::LayoutError, marker::PhantomData, ptr::NonNull}; +use rust_alloc::{ + alloc::{Layout, alloc, handle_alloc_error}, + vec::Vec, +}; + +#[derive(Debug, Clone)] +pub enum ArenaAllocError { + LayoutError(LayoutError), + OutOfMemory, + AlignmentNotPossible, +} + +impl From for ArenaAllocError { + fn from(value: LayoutError) -> Self { + Self::LayoutError(value) + } +} + +#[derive(Debug, Clone, Copy)] +#[repr(transparent)] +pub struct ErasedArenaPointer<'arena>(NonNull, PhantomData<&'arena ()>); + +impl<'arena> ErasedArenaPointer<'arena> { + fn from_raw(raw: NonNull) -> Self { + Self(raw, PhantomData) + } + + pub fn as_non_null(&self) -> NonNull { + self.0 + } + + pub fn as_raw_ptr(&self) -> *mut u8 { + self.0.as_ptr() + } + + /// Returns an [`ArenaPointer`] for the current [`ErasedArenaPointer`] + /// + /// # Safety + /// + /// - `T` must be the correct type for the pointer. Casting to an invalid + /// type may cause undefined behavior. + pub unsafe fn to_typed_arena_pointer(self) -> ArenaPointer<'arena, T> { + ArenaPointer(self, PhantomData) + } +} + +#[derive(Debug, Clone, Copy)] +#[repr(transparent)] +pub struct ArenaPointer<'arena, T>(ErasedArenaPointer<'arena>, PhantomData<&'arena T>); + +impl<'arena, T> ArenaPointer<'arena, T> { + unsafe fn from_raw(raw: NonNull) -> Self { + Self(ErasedArenaPointer::from_raw(raw.cast::()), PhantomData) + } + + pub fn as_inner_ref(&self) -> &'arena T { + // SAFETY: HeapItem is non-null and valid for dereferencing. + unsafe { + let typed_ptr = self.0.as_raw_ptr().cast::(); + &(*typed_ptr) + } + } + + /// Return a pointer to the inner T + /// + /// SAFETY: + /// + /// - Caller must ensure that T is not dropped + /// - Caller must ensure that the lifetime of T does not exceed it's Arena. + pub fn as_ptr(&self) -> NonNull { + self.0.as_non_null().cast::() + } + + /// Convert the current ArenaPointer into an `ErasedArenaPointer` + pub fn to_erased(self) -> ErasedArenaPointer<'arena> { + self.0 + } +} + +pub struct ArenaAllocationData { + bit_index: usize, + required_cells: usize, +} + +#[derive(Debug)] +#[repr(C)] +pub struct BitmapArena<'arena> { + pub layout: Layout, + pub buffer: NonNull, + pub bitmap: Vec, + + _marker: PhantomData<&'arena ()>, +} + +impl<'arena> BitmapArena<'arena> { + /// Initializes a new Arena within a provided raw buffer. + pub fn new(arena_size: usize, max_alignment: usize) -> Result { + let layout = Layout::from_size_align(arena_size, max_alignment)?; + let buffer = unsafe { + let ptr = alloc(layout); + let Some(data) = NonNull::new(ptr) else { + handle_alloc_error(layout) + }; + data + }; + + // Calculation check for 4096 / 64: + // total_cells = 64 bits needed + let total_cells = arena_size / max_alignment; + // how many u64 words to hold those bits? (64 + 63) / 64 = 1 + let bitmap_u64_len = (total_cells + 63) / 64; + + // Allocate only needed size for the bitmap + let mut bitmap = Vec::with_capacity(bitmap_u64_len); + bitmap.resize(bitmap_u64_len, 0); + + Ok(Self { + bitmap, + layout, + buffer, + _marker: PhantomData, + }) + } + + pub fn alloc(&mut self, value: T) -> ArenaPointer<'arena, T> { + self.try_alloc(value).unwrap() + } + + /// Allocate a value and return that value. + pub fn try_alloc(&mut self, value: T) -> Result, ArenaAllocError> { + let allocation_data = self.get_allocation_data::()?; + // SAFETY: We have checked that the allocation is valid. + unsafe { Ok(self.alloc_unchecked(value, allocation_data)) } + } + + pub unsafe fn alloc_unchecked( + &mut self, + value: T, + alloc_data: ArenaAllocationData, + ) -> ArenaPointer<'arena, T> { + // 1. Mark as allocated + self.mark_range(alloc_data.bit_index, alloc_data.required_cells, true); + + // 2. Calculate physical address + let offset = alloc_data.bit_index * alloc_data.required_cells; + unsafe { + let dst = self.buffer.as_ptr().add(offset) as *mut T; + dst.write(value); + + ArenaPointer::from_raw(NonNull::new_unchecked(dst)) + } + } + + pub fn get_allocation_data(&self) -> Result { + let layout = Layout::new::(); + + // Safety check: Ensure the object doesn't require MORE alignment + // than our Arena cells provide (64 bytes). + if layout.align() > self.layout.align() { + return Err(ArenaAllocError::AlignmentNotPossible); + } + + // Snapping the size to our 64-byte grid + let cell_size = self.layout.align(); // 64 + let required_cells = (layout.size() + (cell_size - 1)) / cell_size; + + // 2. Find space in the bitmap + let bit_index = self + .find_free_cells(required_cells) + .ok_or(ArenaAllocError::OutOfMemory)?; + + Ok(ArenaAllocationData { + bit_index, + required_cells, + }) + } + + /// Helper to set (allocate) or clear (deallocate) a range of cells + pub fn mark_range(&mut self, start_bit: usize, cells: usize, is_allocated: bool) { + for i in 0..cells { + let bit_idx = start_bit + i; + let word_idx = bit_idx / 64; + let bit_pos = bit_idx % 64; + + if is_allocated { + self.bitmap[word_idx] |= 1 << bit_pos; + } else { + self.bitmap[word_idx] &= !(1 << bit_pos); + } + } + } + + /// Searches the bitmap for 'count' consecutive free bits (0s). + fn find_free_cells(&self, count: usize) -> Option { + // For a single cell request (most common in JS), this is ultra-fast. + if count == 1 { + for (word_idx, &word) in self.bitmap.iter().enumerate() { + if word != !0u64 { + // If the word isn't full (all 1s) + let bit_pos = (!word).trailing_zeros() as usize; + return Some(word_idx * 64 + bit_pos); + } + } + return None; + } + + // Multi-cell allocation search (e.g., for large objects) + self.find_consecutive_bits(count) + } + + fn find_consecutive_bits(&self, count: usize) -> Option { + let mut continuous_free = 0; + let mut start_index = 0; + + // Total bits to check + let total_bits = self.bitmap.len() * 64; + + for i in 0..total_bits { + let word_idx = i / 64; + let bit_pos = i % 64; + + let is_free = (self.bitmap[word_idx] & (1 << bit_pos)) == 0; + + if is_free { + if continuous_free == 0 { + start_index = i; + } + continuous_free += 1; + if continuous_free == count { + return Some(start_index); + } + } else { + continuous_free = 0; + } + } + None + } +} diff --git a/oscars/src/alloc/arena3/mod.rs b/oscars/src/alloc/arena3/mod.rs new file mode 100644 index 0000000..8b289ef --- /dev/null +++ b/oscars/src/alloc/arena3/mod.rs @@ -0,0 +1,91 @@ +use rust_alloc::vec::Vec; + +mod alloc; +pub use alloc::{ArenaAllocError, ArenaPointer, BitmapArena}; + +const DEFAULT_ARENA_SIZE: usize = 4096; + +/// Default upper limit of 2MB (2 ^ 21) +const DEFAULT_HEAP_THRESHOLD: usize = 2_097_152; + +#[derive(Debug)] +pub struct ArenaAllocator<'alloc> { + heap_threshold: usize, + arena_size: usize, + current_arena_idx: usize, + arenas: Vec>, +} + +impl<'alloc> Default for ArenaAllocator<'alloc> { + fn default() -> Self { + Self { + heap_threshold: DEFAULT_HEAP_THRESHOLD, + arena_size: DEFAULT_ARENA_SIZE, + arenas: Vec::new(), + current_arena_idx: 0, + } + } +} + +impl<'alloc> ArenaAllocator<'alloc> { + pub fn with_arena_size(mut self, arena_size: usize) -> Self { + self.arena_size = arena_size; + self + } + + pub fn with_heap_threshold(mut self, heap_threshold: usize) -> Self { + self.heap_threshold = heap_threshold; + self + } + + pub fn arenas_len(&self) -> usize { + self.arenas.len() + } + + pub fn heap_size(&self) -> usize { + self.arenas_len() * self.arena_size + } + + pub fn is_below_threshold(&self) -> bool { + self.heap_size() <= self.heap_threshold - self.arena_size + } + + pub fn increase_threshold(&mut self) { + self.heap_threshold += self.arena_size * 4 + } +} + +impl<'alloc> ArenaAllocator<'alloc> { + pub fn try_alloc(&mut self, value: T) -> Result, ArenaAllocError> { + let active = match self.get_active_arena_mut() { + Some(arena) => arena, + None => { + self.initialize_new_arena()?; + self.get_active_arena_mut().expect("must exist, we just set it") + } + }; + + match active.get_allocation_data::() { + Ok(data) => unsafe { Ok(active.alloc_unchecked::(value, data)) }, + Err(ArenaAllocError::OutOfMemory) => { + self.initialize_new_arena()?; + let new_active = self.get_active_arena_mut().expect("must exist"); + new_active.try_alloc(value) + } + Err(e) => Err(e), + } + } + + pub fn initialize_new_arena(&mut self) -> Result<(), ArenaAllocError> { + let new_arena = BitmapArena::new(self.arena_size, 16)?; + self.arenas.push(new_arena); + self.current_arena_idx = self.arenas.len() - 1; + Ok(()) + } + + pub fn get_active_arena_mut(&mut self) -> Option<&mut BitmapArena<'alloc>> { + self.arenas.last_mut() + } + + pub fn drop_dead_arenas(&mut self) {} +} diff --git a/oscars/src/alloc/mod.rs b/oscars/src/alloc/mod.rs index 327608d..ee55fdb 100644 --- a/oscars/src/alloc/mod.rs +++ b/oscars/src/alloc/mod.rs @@ -2,5 +2,6 @@ pub mod arena; pub mod arena2; +pub mod arena3; pub mod mempool; pub mod mempool2;