Skip to content

Commit 63df552

Browse files
committed
Add logic to merge to start/end of alloc region
1 parent cf6a569 commit 63df552

File tree

3 files changed

+272
-81
lines changed

3 files changed

+272
-81
lines changed

src/hole.rs

+124-46
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use core::alloc::Layout;
2-
use core::convert::TryInto;
32
use core::mem;
43
use core::mem::{align_of, size_of};
4+
use core::ptr::null_mut;
55
use core::ptr::NonNull;
66

77
use crate::align_up_size;
@@ -11,11 +11,27 @@ use super::align_up;
1111
/// A sorted list of holes. It uses the the holes itself to store its nodes.
1212
pub struct HoleList {
1313
pub(crate) first: Hole, // dummy
14+
pub(crate) bottom: *mut u8,
15+
pub(crate) top: *mut u8,
1416
}
1517

1618
pub(crate) struct Cursor {
1719
prev: NonNull<Hole>,
1820
hole: NonNull<Hole>,
21+
top: *mut u8,
22+
}
23+
24+
/// A block containing free memory. It points to the next hole and thus forms a linked list.
25+
pub(crate) struct Hole {
26+
pub size: usize,
27+
pub next: Option<NonNull<Hole>>,
28+
}
29+
30+
/// Basic information about a hole.
31+
#[derive(Debug, Clone, Copy)]
32+
struct HoleInfo {
33+
addr: *mut u8,
34+
size: usize,
1935
}
2036

2137
impl Cursor {
@@ -24,6 +40,7 @@ impl Cursor {
2440
self.hole.as_mut().next.map(|nhole| Cursor {
2541
prev: self.hole,
2642
hole: nhole,
43+
top: self.top,
2744
})
2845
}
2946
}
@@ -133,7 +150,9 @@ impl Cursor {
133150
////////////////////////////////////////////////////////////////////////////
134151
// This is where we actually perform surgery on the linked list.
135152
////////////////////////////////////////////////////////////////////////////
136-
let Cursor { mut prev, mut hole } = self;
153+
let Cursor {
154+
mut prev, mut hole, ..
155+
} = self;
137156
// Remove the current location from the previous node
138157
unsafe {
139158
prev.as_mut().next = None;
@@ -200,6 +219,42 @@ impl Cursor {
200219
}
201220
}
202221

222+
// See if we can extend this hole towards the end of the allocation region
223+
// If so: increase the size of the node. If no: keep the node as-is
224+
fn check_merge_top(mut node: NonNull<Hole>, top: *mut u8) {
225+
let node_u8 = node.as_ptr().cast::<u8>();
226+
let node_sz = unsafe { node.as_ref().size };
227+
228+
// If this is the last node, we need to see if we need to merge to the end
229+
// TODO(AJM): We MIGHT need this for merging ALL holes.
230+
let end = node_u8.wrapping_add(node_sz);
231+
let hole_layout = Layout::new::<Hole>();
232+
if end < top {
233+
let next_hole_end = align_up(end, hole_layout.align()).wrapping_add(hole_layout.size());
234+
235+
if next_hole_end > top {
236+
let offset = (top as usize) - (end as usize);
237+
unsafe {
238+
node.as_mut().size += offset;
239+
}
240+
}
241+
}
242+
}
243+
244+
// See if we can scoot this hole back to the bottom of the allocation region
245+
// If so: create and return the new hole. If not: return the existing hole
246+
fn check_merge_bottom(node: NonNull<Hole>, bottom: *mut u8) -> NonNull<Hole> {
247+
debug_assert_eq!(bottom as usize % align_of::<Hole>(), 0);
248+
249+
if bottom.wrapping_add(core::mem::size_of::<Hole>()) > node.as_ptr().cast::<u8>() {
250+
let offset = (node.as_ptr() as usize) - (bottom as usize);
251+
let size = unsafe { node.as_ref() }.size + offset;
252+
unsafe { make_hole(bottom, size) }
253+
} else {
254+
node
255+
}
256+
}
257+
203258
impl HoleList {
204259
/// Creates an empty `HoleList`.
205260
#[cfg(not(feature = "const_mut_refs"))]
@@ -209,6 +264,8 @@ impl HoleList {
209264
size: 0,
210265
next: None,
211266
},
267+
bottom: null_mut(),
268+
top: null_mut(),
212269
}
213270
}
214271

@@ -220,6 +277,8 @@ impl HoleList {
220277
size: 0,
221278
next: None,
222279
},
280+
bottom: null_mut(),
281+
top: null_mut(),
223282
}
224283
}
225284

@@ -228,6 +287,7 @@ impl HoleList {
228287
Some(Cursor {
229288
hole,
230289
prev: NonNull::new(&mut self.first)?,
290+
top: self.top,
231291
})
232292
} else {
233293
None
@@ -274,8 +334,7 @@ impl HoleList {
274334
let aligned_hole_addr = align_up(hole_addr, align_of::<Hole>());
275335
let ptr = aligned_hole_addr as *mut Hole;
276336
ptr.write(Hole {
277-
size: hole_size
278-
.saturating_sub(aligned_hole_addr.offset_from(hole_addr).try_into().unwrap()),
337+
size: hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize)),
279338
next: None,
280339
});
281340

@@ -284,6 +343,8 @@ impl HoleList {
284343
size: 0,
285344
next: Some(NonNull::new_unchecked(ptr)),
286345
},
346+
bottom: aligned_hole_addr,
347+
top: hole_addr.wrapping_add(hole_size),
287348
}
288349
}
289350

@@ -370,19 +431,6 @@ impl HoleList {
370431
}
371432
}
372433

373-
/// A block containing free memory. It points to the next hole and thus forms a linked list.
374-
pub(crate) struct Hole {
375-
pub size: usize,
376-
pub next: Option<NonNull<Hole>>,
377-
}
378-
379-
/// Basic information about a hole.
380-
#[derive(Debug, Clone, Copy)]
381-
struct HoleInfo {
382-
addr: *mut u8,
383-
size: usize,
384-
}
385-
386434
unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull<Hole> {
387435
let hole_addr = addr.cast::<Hole>();
388436
debug_assert_eq!(
@@ -395,7 +443,7 @@ unsafe fn make_hole(addr: *mut u8, size: usize) -> NonNull<Hole> {
395443
}
396444

397445
impl Cursor {
398-
fn try_insert_back(self, mut node: NonNull<Hole>) -> Result<Self, Self> {
446+
fn try_insert_back(self, node: NonNull<Hole>, bottom: *mut u8) -> Result<Self, Self> {
399447
// Covers the case where the new hole exists BEFORE the current pointer,
400448
// which only happens when previous is the stub pointer
401449
if node < self.hole {
@@ -409,59 +457,86 @@ impl Cursor {
409457
);
410458
debug_assert_eq!(self.previous().size, 0);
411459

412-
let Cursor { mut prev, hole } = self;
460+
let Cursor {
461+
mut prev,
462+
hole,
463+
top,
464+
} = self;
413465
unsafe {
466+
let mut node = check_merge_bottom(node, bottom);
414467
prev.as_mut().next = Some(node);
415468
node.as_mut().next = Some(hole);
416469
}
417-
Ok(Cursor { prev, hole: node })
470+
Ok(Cursor {
471+
prev,
472+
hole: node,
473+
top,
474+
})
418475
} else {
419476
Err(self)
420477
}
421478
}
422479

423480
fn try_insert_after(&mut self, mut node: NonNull<Hole>) -> Result<(), ()> {
424-
if self.hole < node {
425-
let node_u8 = node.as_ptr().cast::<u8>();
426-
let node_size = unsafe { node.as_ref().size };
427-
let hole_u8 = self.hole.as_ptr().cast::<u8>();
428-
let hole_size = self.current().size;
481+
let node_u8 = node.as_ptr().cast::<u8>();
482+
let node_size = unsafe { node.as_ref().size };
429483

430-
// Does hole overlap node?
431-
assert!(
432-
hole_u8.wrapping_add(hole_size) <= node_u8,
433-
"Freed node aliases existing hole! Bad free?",
434-
);
435-
436-
// If we have a next, does the node overlap next?
437-
if let Some(next) = self.current().next.as_ref() {
484+
// If we have a next, does the node overlap next?
485+
if let Some(next) = self.current().next.as_ref() {
486+
if node < *next {
438487
let node_u8 = node_u8 as *const u8;
439488
assert!(
440489
node_u8.wrapping_add(node_size) <= next.as_ptr().cast::<u8>(),
441490
"Freed node aliases existing hole! Bad free?",
442491
);
492+
} else {
493+
// The new hole isn't between current and next.
494+
return Err(());
443495
}
496+
}
444497

445-
// All good! Let's insert that after.
446-
unsafe {
447-
let maybe_next = self.hole.as_mut().next.replace(node);
448-
node.as_mut().next = maybe_next;
449-
}
450-
Ok(())
451-
} else {
452-
Err(())
498+
// At this point, we either have no "next" pointer, or the hole is
499+
// between current and "next". The following assert can only trigger
500+
// if we've gotten our list out of order.
501+
debug_assert!(self.hole < node, "Hole list out of order?");
502+
503+
let hole_u8 = self.hole.as_ptr().cast::<u8>();
504+
let hole_size = self.current().size;
505+
506+
// Does hole overlap node?
507+
assert!(
508+
hole_u8.wrapping_add(hole_size) <= node_u8,
509+
"Freed node aliases existing hole! Bad free?",
510+
);
511+
512+
// All good! Let's insert that after.
513+
unsafe {
514+
let maybe_next = self.hole.as_mut().next.replace(node);
515+
node.as_mut().next = maybe_next;
453516
}
517+
518+
Ok(())
454519
}
455520

456521
// Merge the current node with up to n following nodes
457522
fn try_merge_next_n(self, max: usize) {
458-
let Cursor { prev: _, mut hole } = self;
523+
let Cursor {
524+
prev: _,
525+
mut hole,
526+
top,
527+
..
528+
} = self;
459529

460530
for _ in 0..max {
461531
// Is there a next node?
462532
let mut next = if let Some(next) = unsafe { hole.as_mut() }.next.as_ref() {
463533
*next
464534
} else {
535+
// Since there is no NEXT node, we need to check whether the current
536+
// hole SHOULD extend to the end, but doesn't. This would happen when
537+
// there isn't enough remaining space to place a hole after the current
538+
// node's placement.
539+
check_merge_top(hole, top);
465540
return;
466541
};
467542

@@ -515,7 +590,10 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
515590
cursor
516591
} else {
517592
// Oh hey, there are no "real" holes at all. That means this just
518-
// becomes the only "real" hole!
593+
// becomes the only "real" hole! Check if this is touching the end
594+
// or the beginning of the allocation range
595+
let hole = check_merge_bottom(hole, list.bottom);
596+
check_merge_top(hole, list.top);
519597
list.first.next = Some(hole);
520598
return;
521599
};
@@ -525,7 +603,7 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
525603
// previous location the cursor was pointing to.
526604
//
527605
// Otherwise, our cursor will point at the current non-"dummy" head of the list
528-
let (cursor, n) = match cursor.try_insert_back(hole) {
606+
let (cursor, n) = match cursor.try_insert_back(hole, list.bottom) {
529607
Ok(cursor) => {
530608
// Yup! It lives at the front of the list. Hooray! Attempt to merge
531609
// it with just ONE next node, since it is at the front of the list
@@ -578,8 +656,8 @@ pub mod test {
578656
let assumed_location = data.as_mut_ptr().cast();
579657

580658
let heap = Heap::from_slice(data);
581-
assert!(heap.bottom == assumed_location);
582-
assert!(heap.size == HEAP_SIZE);
659+
assert!(heap.bottom() == assumed_location);
660+
assert!(heap.size() == HEAP_SIZE);
583661
heap
584662
}
585663

0 commit comments

Comments
 (0)