Made MemArena so it will actually work for multiple allocations.

Silly mistake.
This commit is contained in:
Nathan Vegdahl 2017-04-09 17:29:42 -07:00
parent 662f02a78d
commit a8bea5c09d

View File

@ -1,4 +1,5 @@
use std::slice; use std::slice;
use std::cell::RefCell;
use std::mem::{size_of, align_of}; use std::mem::{size_of, align_of};
const DEFAULT_BLOCK_SIZE: usize = (1 << 20) * 32; // 32 MiB const DEFAULT_BLOCK_SIZE: usize = (1 << 20) * 32; // 32 MiB
@ -21,7 +22,7 @@ fn alignment_offset(addr: usize, alignment: usize) -> usize {
/// The block size and large allocation threshold size are configurable. /// The block size and large allocation threshold size are configurable.
#[derive(Debug)] #[derive(Debug)]
pub struct MemArena { pub struct MemArena {
blocks: Vec<Vec<u8>>, blocks: RefCell<Vec<Vec<u8>>>,
block_size: usize, block_size: usize,
large_alloc_threshold: usize, large_alloc_threshold: usize,
} }
@ -30,7 +31,7 @@ impl MemArena {
/// Create a new arena, with default block size and large allocation threshold /// Create a new arena, with default block size and large allocation threshold
pub fn new() -> MemArena { pub fn new() -> MemArena {
MemArena { MemArena {
blocks: vec![Vec::with_capacity(DEFAULT_BLOCK_SIZE)], blocks: RefCell::new(vec![Vec::with_capacity(DEFAULT_BLOCK_SIZE)]),
block_size: DEFAULT_BLOCK_SIZE, block_size: DEFAULT_BLOCK_SIZE,
large_alloc_threshold: DEFAULT_LARGE_ALLOCATION_THRESHOLD, large_alloc_threshold: DEFAULT_LARGE_ALLOCATION_THRESHOLD,
} }
@ -40,7 +41,7 @@ impl MemArena {
assert!(large_alloc_threshold <= block_size); assert!(large_alloc_threshold <= block_size);
MemArena { MemArena {
blocks: vec![Vec::with_capacity(block_size)], blocks: RefCell::new(vec![Vec::with_capacity(block_size)]),
block_size: block_size, block_size: block_size,
large_alloc_threshold: large_alloc_threshold, large_alloc_threshold: large_alloc_threshold,
} }
@ -51,14 +52,16 @@ impl MemArena {
/// ///
/// CAUTION: this is unsafe because it does NOT ensure that all references to the data are /// CAUTION: this is unsafe because it does NOT ensure that all references to the data are
/// gone, so this can potentially lead to dangling references. /// gone, so this can potentially lead to dangling references.
pub unsafe fn free_all_and_reset(&mut self) { pub unsafe fn free_all_and_reset(&self) {
self.blocks.clear(); let mut blocks = self.blocks.borrow_mut();
self.blocks.shrink_to_fit();
self.blocks.push(Vec::with_capacity(self.block_size)); blocks.clear();
blocks.shrink_to_fit();
blocks.push(Vec::with_capacity(self.block_size));
} }
/// Allocates memory for and initializes a type T, returning a mutable reference to it. /// Allocates memory for and initializes a type T, returning a mutable reference to it.
pub fn alloc<'a, T: Copy>(&'a mut self, value: T) -> &'a mut T { pub fn alloc<'a, T: Copy>(&'a self, value: T) -> &'a mut T {
let mut memory = unsafe { self.alloc_uninitialized() }; let mut memory = unsafe { self.alloc_uninitialized() };
*memory = value; *memory = value;
memory memory
@ -67,15 +70,15 @@ impl MemArena {
/// Allocates memory for a type `T`, returning a mutable reference to it. /// Allocates memory for a type `T`, returning a mutable reference to it.
/// ///
/// CAUTION: the memory returned is uninitialized. Make sure to initalize before using! /// CAUTION: the memory returned is uninitialized. Make sure to initalize before using!
pub unsafe fn alloc_uninitialized<'a, T: Copy>(&'a mut self) -> &'a mut T { pub unsafe fn alloc_uninitialized<'a, T: Copy>(&'a self) -> &'a mut T {
let memory = self.alloc_space(size_of::<T>(), align_of::<T>()) as *mut T; let memory = self.alloc_raw(size_of::<T>(), align_of::<T>()) as *mut T;
memory.as_mut().unwrap() memory.as_mut().unwrap()
} }
/// Allocates memory for `len` values of type `T`, returning a mutable slice to it. /// Allocates memory for `len` values of type `T`, returning a mutable slice to it.
/// All elements are initialized to the given `value`. /// All elements are initialized to the given `value`.
pub fn alloc_array<'a, T: Copy>(&'a mut self, len: usize, value: T) -> &'a mut [T] { pub fn alloc_array<'a, T: Copy>(&'a self, len: usize, value: T) -> &'a mut [T] {
let memory = unsafe { self.alloc_array_uninitialized(len) }; let memory = unsafe { self.alloc_array_uninitialized(len) };
for v in memory.iter_mut() { for v in memory.iter_mut() {
@ -85,67 +88,80 @@ impl MemArena {
memory memory
} }
/// Allocates and initializes memory to duplicate the given slice, returning a mutable slice
/// to the new copy.
pub fn copy_slice<'a, T: Copy>(&'a self, other: &[T]) -> &'a mut [T] {
let memory = unsafe { self.alloc_array_uninitialized(other.len()) };
for (v, other) in memory.iter_mut().zip(other.iter()) {
*v = *other;
}
memory
}
/// Allocates memory for `len` values of type `T`, returning a mutable slice to it. /// Allocates memory for `len` values of type `T`, returning a mutable slice to it.
/// All elements are initialized to the given `value`. /// All elements are initialized to the given `value`.
pub unsafe fn alloc_array_uninitialized<'a, T: Copy>(&'a mut self, len: usize) -> &'a mut [T] { pub unsafe fn alloc_array_uninitialized<'a, T: Copy>(&'a self, len: usize) -> &'a mut [T] {
let array_mem_size = { let array_mem_size = {
let alignment_padding = alignment_offset(size_of::<T>(), align_of::<T>()); let alignment_padding = alignment_offset(size_of::<T>(), align_of::<T>());
let aligned_type_size = size_of::<T>() + alignment_padding; let aligned_type_size = size_of::<T>() + alignment_padding;
aligned_type_size * len aligned_type_size * len
}; };
let memory = self.alloc_space(array_mem_size, align_of::<T>()) as *mut T; let memory = self.alloc_raw(array_mem_size, align_of::<T>()) as *mut T;
slice::from_raw_parts_mut(memory, len) slice::from_raw_parts_mut(memory, len)
} }
/// Allocates space with a given size and alignment. /// Allocates space with a given size and alignment.
/// ///
/// CAUTION: this is unsafe because it returns uninitialized memory. /// CAUTION: this returns uninitialized memory. Make sure to initialize the
/// Make sure to initialize the memory after calling. /// memory after calling.
unsafe fn alloc_space<'a>(&'a mut self, size: usize, alignment: usize) -> *mut u8 { unsafe fn alloc_raw(&self, size: usize, alignment: usize) -> *mut u8 {
assert!(size > 0); assert!(size > 0);
assert!(alignment > 0); assert!(alignment > 0);
let mut blocks = self.blocks.borrow_mut();
// If the desired size is considered a "large allocation", give it its own memory block. // If the desired size is considered a "large allocation", give it its own memory block.
if size > self.large_alloc_threshold { if size > self.large_alloc_threshold {
self.blocks.push(Vec::with_capacity(size + alignment - 1)); blocks.push(Vec::with_capacity(size + alignment - 1));
self.blocks.last_mut().unwrap().set_len(size + alignment - 1); blocks.last_mut().unwrap().set_len(size + alignment - 1);
let start_index = alignment_offset(self.blocks.last().unwrap().as_ptr() as usize, let start_index = alignment_offset(blocks.last().unwrap().as_ptr() as usize, alignment);
alignment);
let block_ptr = self.blocks.last_mut().unwrap().as_mut_ptr(); let block_ptr = blocks.last_mut().unwrap().as_mut_ptr();
return block_ptr.offset(start_index as isize); return block_ptr.offset(start_index as isize);
} }
// If the desired size is not a "large allocation", try to fit it into the current // If the desired size is not a "large allocation", try to fit it into the current
// block, and only create a new block if doesn't fit. // block, and only create a new block if doesn't fit.
else { else {
let start_index = { let start_index = {
let block_addr = self.blocks.first().unwrap().as_ptr() as usize; let block_addr = blocks.first().unwrap().as_ptr() as usize;
let block_filled = self.blocks.first().unwrap().len(); let block_filled = blocks.first().unwrap().len();
block_filled + alignment_offset(block_addr + block_filled, alignment) block_filled + alignment_offset(block_addr + block_filled, alignment)
}; };
// If it will fit in the current block, use the current block. // If it will fit in the current block, use the current block.
if (start_index + size) <= self.blocks.first().unwrap().capacity() { if (start_index + size) <= blocks.first().unwrap().capacity() {
self.blocks.first_mut().unwrap().set_len(start_index + size); blocks.first_mut().unwrap().set_len(start_index + size);
let block_ptr = self.blocks.first_mut().unwrap().as_mut_ptr(); let block_ptr = blocks.first_mut().unwrap().as_mut_ptr();
return block_ptr.offset(start_index as isize); return block_ptr.offset(start_index as isize);
} }
// If it won't fit in the current block, create a new block and use that. // If it won't fit in the current block, create a new block and use that.
else { else {
self.blocks.push(Vec::with_capacity(self.block_size)); blocks.push(Vec::with_capacity(self.block_size));
let block_count = self.blocks.len(); let block_count = blocks.len();
self.blocks.swap(0, block_count - 1); blocks.swap(0, block_count - 1);
let start_index = alignment_offset(self.blocks.first().unwrap().as_ptr() as usize, let start_index = alignment_offset(blocks.first().unwrap().as_ptr() as usize,
alignment); alignment);
self.blocks.first_mut().unwrap().set_len(start_index + size); blocks.first_mut().unwrap().set_len(start_index + size);
let block_ptr = self.blocks.first_mut().unwrap().as_mut_ptr(); let block_ptr = blocks.first_mut().unwrap().as_mut_ptr();
return block_ptr.offset(start_index as isize); return block_ptr.offset(start_index as isize);
} }
} }