Perform a bounds check when allocating in the arena

This ensures we don't invoke undefined behavior when overflowing.

Co-Authored-By: Antonio <antonio@zed.dev>
This commit is contained in:
Nathan Sobo 2023-12-20 08:20:00 -07:00
parent 8f9e813302
commit 12bb13b9fc

View file

@ -2,13 +2,13 @@ use std::{
alloc, alloc,
cell::Cell, cell::Cell,
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
ptr::{self, NonNull}, ptr,
rc::Rc, rc::Rc,
}; };
struct ArenaElement { struct ArenaElement {
value: NonNull<u8>, value: *mut u8,
drop: unsafe fn(NonNull<u8>), drop: unsafe fn(*mut u8),
} }
impl Drop for ArenaElement { impl Drop for ArenaElement {
@ -21,8 +21,9 @@ impl Drop for ArenaElement {
} }
pub struct Arena { pub struct Arena {
start: NonNull<u8>, start: *mut u8,
offset: usize, end: *mut u8,
offset: *mut u8,
elements: Vec<ArenaElement>, elements: Vec<ArenaElement>,
valid: Rc<Cell<bool>>, valid: Rc<Cell<bool>>,
} }
@ -31,10 +32,12 @@ impl Arena {
pub fn new(size_in_bytes: usize) -> Self { pub fn new(size_in_bytes: usize) -> Self {
unsafe { unsafe {
let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap(); let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap();
let ptr = alloc::alloc(layout); let start = alloc::alloc(layout);
let end = start.add(size_in_bytes);
Self { Self {
start: NonNull::new_unchecked(ptr), start,
offset: 0, end,
offset: start,
elements: Vec::new(), elements: Vec::new(),
valid: Rc::new(Cell::new(true)), valid: Rc::new(Cell::new(true)),
} }
@ -45,7 +48,7 @@ impl Arena {
self.valid.set(false); self.valid.set(false);
self.valid = Rc::new(Cell::new(true)); self.valid = Rc::new(Cell::new(true));
self.elements.clear(); self.elements.clear();
self.offset = 0; self.offset = self.start;
} }
#[inline(always)] #[inline(always)]
@ -58,24 +61,28 @@ impl Arena {
ptr::write(ptr, f()); ptr::write(ptr, f());
} }
unsafe fn drop<T>(ptr: NonNull<u8>) { unsafe fn drop<T>(ptr: *mut u8) {
std::ptr::drop_in_place(ptr.cast::<T>().as_ptr()); std::ptr::drop_in_place(ptr.cast::<T>());
} }
unsafe { unsafe {
let layout = alloc::Layout::new::<T>().pad_to_align(); let layout = alloc::Layout::new::<T>().pad_to_align();
let ptr = NonNull::new_unchecked(self.start.as_ptr().add(self.offset).cast::<T>()); let next_offset = self.offset.add(layout.size());
inner_writer(ptr.as_ptr(), f); assert!(next_offset <= self.end);
let result = ArenaRef {
ptr: self.offset.cast(),
valid: self.valid.clone(),
};
inner_writer(result.ptr, f);
self.elements.push(ArenaElement { self.elements.push(ArenaElement {
value: ptr.cast(), value: self.offset,
drop: drop::<T>, drop: drop::<T>,
}); });
self.offset += layout.size(); self.offset = next_offset;
ArenaRef {
ptr, result
valid: self.valid.clone(),
}
} }
} }
} }
@ -87,7 +94,7 @@ impl Drop for Arena {
} }
pub struct ArenaRef<T: ?Sized> { pub struct ArenaRef<T: ?Sized> {
ptr: NonNull<T>, ptr: *mut T,
valid: Rc<Cell<bool>>, valid: Rc<Cell<bool>>,
} }
@ -104,7 +111,7 @@ impl<T: ?Sized> ArenaRef<T> {
#[inline(always)] #[inline(always)]
pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaRef<U> { pub fn map<U: ?Sized>(mut self, f: impl FnOnce(&mut T) -> &mut U) -> ArenaRef<U> {
ArenaRef { ArenaRef {
ptr: unsafe { NonNull::new_unchecked(f(&mut *self)) }, ptr: f(&mut self),
valid: self.valid, valid: self.valid,
} }
} }
@ -123,7 +130,7 @@ impl<T: ?Sized> Deref for ArenaRef<T> {
#[inline(always)] #[inline(always)]
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
self.validate(); self.validate();
unsafe { self.ptr.as_ref() } unsafe { &*self.ptr }
} }
} }
@ -131,7 +138,7 @@ impl<T: ?Sized> DerefMut for ArenaRef<T> {
#[inline(always)] #[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
self.validate(); self.validate();
unsafe { self.ptr.as_mut() } unsafe { &mut *self.ptr }
} }
} }