Skip to content

Consider regions that lead to very small back paddings as unsuitable #71

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Oct 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ jobs:
name: "Miri tests"
runs-on: ubuntu-latest
env:
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers -Zmiri-ignore-leaks"
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-strict-provenance -Zmiri-tag-raw-pointers"
steps:
- uses: actions/checkout@v1
- run: rustup toolchain install nightly --profile minimal --component rust-src miri
Expand Down
77 changes: 26 additions & 51 deletions src/hole.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,31 +120,31 @@ impl Cursor {
alloc_ptr = aligned_addr;
alloc_size = required_size;

// Okay, time to move onto the back padding. Here, we are opportunistic -
// if it fits, we sits. Otherwise we just skip adding the back padding, and
// sort of assume that the allocation is actually a bit larger than it
// actually needs to be.
//
// NOTE: Because we always use `HoleList::align_layout`, the size of
// the new allocation is always "rounded up" to cover any partial gaps that
// would have occurred. For this reason, we DON'T need to "round up"
// to account for an unaligned hole spot.
let hole_layout = Layout::new::<Hole>();
let back_padding_start = align_up(allocation_end, hole_layout.align());
let back_padding_end = back_padding_start.wrapping_add(hole_layout.size());

// Will the proposed new back padding actually fit in the old hole slot?
back_padding = if back_padding_end <= hole_end {
// Yes, it does! Place a back padding node
Some(HoleInfo {
addr: back_padding_start,
size: (hole_end as usize) - (back_padding_start as usize),
})
} else {
// No, it does not. We are now pretending the allocation now
// holds the extra 0..size_of::<Hole>() bytes that are not
// big enough to hold what SHOULD be back_padding
// Okay, time to move onto the back padding.
let back_padding_size = hole_end as usize - allocation_end as usize;
back_padding = if back_padding_size == 0 {
None
} else {
// NOTE: Because we always use `HoleList::align_layout`, the size of
// the new allocation is always "rounded up" to cover any partial gaps that
// would have occurred. For this reason, we DON'T need to "round up"
// to account for an unaligned hole spot.
let hole_layout = Layout::new::<Hole>();
let back_padding_start = align_up(allocation_end, hole_layout.align());
let back_padding_end = back_padding_start.wrapping_add(hole_layout.size());

// Will the proposed new back padding actually fit in the old hole slot?
if back_padding_end <= hole_end {
// Yes, it does! Place a back padding node
Some(HoleInfo {
addr: back_padding_start,
size: back_padding_size,
})
} else {
// No, it does not. We don't want to leak any heap bytes, so we
// consider this hole unsuitable for the requested allocation.
return Err(self);
}
};
}

Expand Down Expand Up @@ -697,34 +697,9 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
#[cfg(test)]
pub mod test {
use super::HoleList;
use crate::{align_down_size, Heap};
use crate::{align_down_size, test::new_heap};
use core::mem::size_of;
use std::{alloc::Layout, convert::TryInto, mem::MaybeUninit, prelude::v1::*, ptr::NonNull};

#[repr(align(128))]
struct Chonk<const N: usize> {
data: [MaybeUninit<u8>; N],
}

impl<const N: usize> Chonk<N> {
pub fn new() -> Self {
Self {
data: [MaybeUninit::uninit(); N],
}
}
}

fn new_heap() -> Heap {
const HEAP_SIZE: usize = 1000;
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
let data = &mut heap_space.data;
let assumed_location = data.as_mut_ptr().cast();

let heap = Heap::from_slice(data);
assert_eq!(heap.bottom(), assumed_location);
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
heap
}
use std::{alloc::Layout, convert::TryInto, prelude::v1::*, ptr::NonNull};

#[test]
fn cursor() {
Expand Down
82 changes: 63 additions & 19 deletions src/test.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
use super::*;
use core::alloc::Layout;
use std::mem::{align_of, size_of, MaybeUninit};
use std::prelude::v1::*;
use core::{
alloc::Layout,
ops::{Deref, DerefMut},
};
use std::{
mem::{align_of, size_of, MaybeUninit},
prelude::v1::*,
};

#[repr(align(128))]
struct Chonk<const N: usize> {
Expand All @@ -16,30 +21,69 @@ impl<const N: usize> Chonk<N> {
}
}

fn new_heap() -> Heap {
pub struct OwnedHeap<F> {
heap: Heap,
_drop: F,
}

impl<F> Deref for OwnedHeap<F> {
type Target = Heap;

fn deref(&self) -> &Self::Target {
&self.heap
}
}

impl<F> DerefMut for OwnedHeap<F> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.heap
}
}

pub fn new_heap() -> OwnedHeap<impl Sized> {
const HEAP_SIZE: usize = 1000;
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
let data = &mut heap_space.data;
let assumed_location = data.as_mut_ptr().cast();

let heap = Heap::from_slice(data);
let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };
assert_eq!(heap.bottom(), assumed_location);
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
heap

let drop = move || {
let _ = heap_space;
};
OwnedHeap { heap, _drop: drop }
}

fn new_max_heap() -> Heap {
fn new_max_heap() -> OwnedHeap<impl Sized> {
const HEAP_SIZE: usize = 1024;
const HEAP_SIZE_MAX: usize = 2048;
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE_MAX>::new()));
let mut heap_space = Box::new(Chonk::<HEAP_SIZE_MAX>::new());
let data = &mut heap_space.data;
let start_ptr = data.as_mut_ptr().cast();

// Unsafe so that we have provenance over the whole allocation.
let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) };
assert_eq!(heap.bottom(), start_ptr);
assert_eq!(heap.size(), HEAP_SIZE);
heap

let drop = move || {
let _ = heap_space;
};
OwnedHeap { heap, _drop: drop }
}

fn new_heap_skip(ct: usize) -> OwnedHeap<impl Sized> {
const HEAP_SIZE: usize = 1000;
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
let data = &mut heap_space.data[ct..];
let heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };

let drop = move || {
let _ = heap_space;
};
OwnedHeap { heap, _drop: drop }
}

#[test]
Expand All @@ -51,7 +95,15 @@ fn empty() {

#[test]
fn oom() {
let mut heap = new_heap();
const HEAP_SIZE: usize = 1000;
let mut heap_space = Box::new(Chonk::<HEAP_SIZE>::new());
let data = &mut heap_space.data;
let assumed_location = data.as_mut_ptr().cast();

let mut heap = unsafe { Heap::new(data.as_mut_ptr().cast(), data.len()) };
assert_eq!(heap.bottom(), assumed_location);
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));

let layout = Layout::from_size_align(heap.size() + 1, align_of::<usize>());
let addr = heap.allocate_first_fit(layout.unwrap());
assert!(addr.is_err());
Expand Down Expand Up @@ -388,14 +440,6 @@ fn allocate_multiple_unaligned() {
}
}

fn new_heap_skip(ct: usize) -> Heap {
const HEAP_SIZE: usize = 1000;
let heap_space = Box::leak(Box::new(Chonk::<HEAP_SIZE>::new()));
let data = &mut heap_space.data[ct..];
let heap = Heap::from_slice(data);
heap
}

#[test]
fn allocate_usize() {
let mut heap = new_heap();
Expand Down