Skip to content

Commit 9e963b9

Browse files
Add a non type-specific arena.
It relies on the underlying types not having special Drop behavior.
1 parent ca0cc27 commit 9e963b9

File tree

1 file changed

+117
-0
lines changed

1 file changed

+117
-0
lines changed

src/libarena/lib.rs

+117
Original file line numberDiff line numberDiff line change
@@ -280,6 +280,123 @@ impl<T> Drop for TypedArena<T> {
280280

281281
unsafe impl<T: Send> Send for TypedArena<T> {}
282282

283+
pub struct DroplessArena {
284+
/// A pointer to the next object to be allocated.
285+
ptr: Cell<*mut u8>,
286+
287+
/// A pointer to the end of the allocated area. When this pointer is
288+
/// reached, a new chunk is allocated.
289+
end: Cell<*mut u8>,
290+
291+
/// A vector of arena chunks.
292+
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
293+
}
294+
295+
impl DroplessArena {
296+
pub fn new() -> DroplessArena {
297+
DroplessArena {
298+
ptr: Cell::new(0 as *mut u8),
299+
end: Cell::new(0 as *mut u8),
300+
chunks: RefCell::new(vec![]),
301+
}
302+
}
303+
304+
fn align_for<T>(&self) {
305+
let align = mem::align_of::<T>();
306+
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
307+
self.ptr.set(final_address as *mut u8);
308+
assert!(self.ptr <= self.end);
309+
}
310+
311+
#[inline(never)]
312+
#[cold]
313+
fn grow<T>(&self, n: usize) {
314+
let needed_bytes = n * mem::size_of::<T>();
315+
unsafe {
316+
let mut chunks = self.chunks.borrow_mut();
317+
let (chunk, mut new_capacity);
318+
if let Some(last_chunk) = chunks.last_mut() {
319+
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
320+
if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) {
321+
self.end.set(last_chunk.end());
322+
return;
323+
} else {
324+
new_capacity = last_chunk.storage.cap();
325+
loop {
326+
new_capacity = new_capacity.checked_mul(2).unwrap();
327+
if new_capacity >= used_bytes + needed_bytes {
328+
break;
329+
}
330+
}
331+
}
332+
} else {
333+
new_capacity = needed_bytes;
334+
}
335+
chunk = TypedArenaChunk::<u8>::new(new_capacity);
336+
self.ptr.set(chunk.start());
337+
self.end.set(chunk.end());
338+
self.align_for::<T>();
339+
chunks.push(chunk);
340+
}
341+
}
342+
343+
#[inline]
344+
pub fn alloc<T>(&self, object: T) -> &mut T {
345+
unsafe {
346+
assert!(!intrinsics::needs_drop::<T>());
347+
assert!(mem::size_of::<T>() != 0);
348+
349+
self.align_for::<T>();
350+
let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
351+
if (future_end as *mut u8) >= self.end.get() {
352+
self.grow::<T>(1)
353+
}
354+
355+
let ptr = self.ptr.get();
356+
// Set the pointer past ourselves
357+
self.ptr.set(intrinsics::arith_offset(
358+
self.ptr.get(), mem::size_of::<T>() as isize
359+
) as *mut u8);
360+
// Write into uninitialized memory.
361+
ptr::write(ptr as *mut T, object);
362+
&mut *(ptr as *mut T)
363+
}
364+
}
365+
366+
/// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
367+
/// reference to it. Will panic if passed a zero-sized type.
368+
///
369+
/// Panics:
370+
/// - Zero-sized types
371+
/// - Zero-length slices
372+
#[inline]
373+
pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
374+
where T: Copy {
375+
unsafe {
376+
assert!(!intrinsics::needs_drop::<T>());
377+
}
378+
assert!(mem::size_of::<T>() != 0);
379+
assert!(slice.len() != 0);
380+
self.align_for::<T>();
381+
382+
let future_end = unsafe {
383+
intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
384+
};
385+
if (future_end as *mut u8) >= self.end.get() {
386+
self.grow::<T>(slice.len());
387+
}
388+
389+
unsafe {
390+
let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
391+
self.ptr.set(intrinsics::arith_offset(
392+
self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
393+
) as *mut u8);
394+
arena_slice.copy_from_slice(slice);
395+
arena_slice
396+
}
397+
}
398+
}
399+
283400
#[cfg(test)]
284401
mod tests {
285402
extern crate test;

0 commit comments

Comments
 (0)