Skip to content

more dynamic allocation work #14322

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 22, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions src/liballoc/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

// FIXME: #13994: port to the sized deallocation API when available
// FIXME: #13996: need a way to mark the `allocate` and `reallocate` return values as `noalias`
// FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` and `nonnull`

use core::intrinsics::{abort, cttz32};
use core::option::{None, Option};
Expand Down Expand Up @@ -119,14 +119,8 @@ pub fn stats_print() {
/// The allocator for unique pointers.
#[cfg(not(test))]
#[lang="exchange_malloc"]
#[inline(always)]
pub unsafe fn exchange_malloc_(size: uint, align: uint) -> *mut u8 {
exchange_malloc(size, align)
}

/// The allocator for unique pointers.
#[inline]
pub unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
// The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size
// allocations can point to this `static`. It would be incorrect to use a null
// pointer, due to enums assuming types like unique pointers are never null.
Expand All @@ -139,14 +133,20 @@ pub unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
}
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I look forward to the day we can remove this entirely.


#[cfg(not(test))]
#[cfg(not(test), stage0)]
#[lang="exchange_free"]
#[inline]
// FIXME: #13994 (rustc should pass align and size here)
unsafe fn exchange_free(ptr: *mut u8) {
deallocate(ptr, 0, 8);
}

#[cfg(not(test), not(stage0))]
#[lang="exchange_free"]
#[inline]
unsafe fn exchange_free(ptr: *mut u8, size: uint, align: uint) {
deallocate(ptr, size, align);
}

// FIXME: #7496
#[cfg(not(test))]
#[lang="closure_exchange_malloc"]
Expand All @@ -167,16 +167,16 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uin
#[doc(hidden)]
#[deprecated]
#[cfg(not(test))]
pub unsafe extern "C" fn rust_malloc(size: uint, align: uint) -> *mut u8 {
exchange_malloc(size, align)
pub unsafe extern "C" fn rust_allocate(size: uint, align: uint) -> *mut u8 {
allocate(size, align)
}

// hack for libcore
#[no_mangle]
#[doc(hidden)]
#[deprecated]
#[cfg(not(test))]
pub unsafe extern "C" fn rust_free(ptr: *mut u8, size: uint, align: uint) {
pub unsafe extern "C" fn rust_deallocate(ptr: *mut u8, size: uint, align: uint) {
deallocate(ptr, size, align)
}

Expand Down
5 changes: 2 additions & 3 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ use std::mem;
use std::num;
use std::ptr::read;
use std::rc::Rc;
use std::rt::heap::exchange_malloc;
use std::rt::heap::allocate;

// The way arena uses arrays is really deeply awful. The arrays are
// allocated, and have capacities reserved, but the fill for the array
Expand Down Expand Up @@ -358,8 +358,7 @@ impl<T> TypedArenaChunk<T> {
size = size.checked_add(&elems_size).unwrap();

let mut chunk = unsafe {
let chunk = exchange_malloc(size,
mem::min_align_of::<TypedArenaChunk<T>>());
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>());
let mut chunk: Box<TypedArenaChunk<T>> = mem::transmute(chunk);
mem::overwrite(&mut chunk.next, next);
chunk
Expand Down
10 changes: 5 additions & 5 deletions src/libcore/should_not_exist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ use str::StrSlice;

#[allow(ctypes)]
extern {
fn rust_malloc(size: uint, align: uint) -> *u8;
fn rust_free(ptr: *u8, size: uint, align: uint);
fn rust_allocate(size: uint, align: uint) -> *u8;
fn rust_deallocate(ptr: *u8, size: uint, align: uint);
}

unsafe fn alloc(cap: uint) -> *mut Vec<()> {
let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
// this should use the real alignment, but the new representation will take care of that
let ret = rust_malloc(cap, 8) as *mut Vec<()>;
let ret = rust_allocate(cap, 8) as *mut Vec<()>;
if ret.is_null() {
intrinsics::abort();
}
Expand Down Expand Up @@ -119,7 +119,7 @@ impl FromIterator<char> for ~str {
&(*ptr).data,
len);
// FIXME: #13994: port to the sized deallocation API when available
rust_free(ptr as *u8, 0, 8);
rust_deallocate(ptr as *u8, 0, 8);
mem::forget(ret);
ret = mem::transmute(ptr2);
ptr = ptr2;
Expand Down Expand Up @@ -191,7 +191,7 @@ impl<A: Clone> Clone for ~[A] {
for j in range(0, *i as int) {
ptr::read(&*p.offset(j));
}
rust_free(ret as *u8, 0, 8);
rust_deallocate(ret as *u8, 0, 8);
});
mem::transmute(ret)
}
Expand Down
11 changes: 7 additions & 4 deletions src/librustc/middle/trans/cleanup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,13 +278,14 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> {
fn schedule_free_value(&self,
cleanup_scope: ScopeId,
val: ValueRef,
heap: Heap) {
heap: Heap,
content_ty: ty::t) {
/*!
* Schedules a call to `free(val)`. Note that this is a shallow
* operation.
*/

let drop = box FreeValue { ptr: val, heap: heap };
let drop = box FreeValue { ptr: val, heap: heap, content_ty: content_ty };

debug!("schedule_free_value({:?}, val={}, heap={:?})",
cleanup_scope,
Expand Down Expand Up @@ -847,6 +848,7 @@ pub enum Heap {
pub struct FreeValue {
ptr: ValueRef,
heap: Heap,
content_ty: ty::t
}

impl Cleanup for FreeValue {
Expand All @@ -860,7 +862,7 @@ impl Cleanup for FreeValue {
glue::trans_free(bcx, self.ptr)
}
HeapExchange => {
glue::trans_exchange_free(bcx, self.ptr)
glue::trans_exchange_free_ty(bcx, self.ptr, self.content_ty)
}
}
}
Expand Down Expand Up @@ -931,7 +933,8 @@ pub trait CleanupMethods<'a> {
fn schedule_free_value(&self,
cleanup_scope: ScopeId,
val: ValueRef,
heap: Heap);
heap: Heap,
content_ty: ty::t);
fn schedule_clean(&self,
cleanup_scope: ScopeId,
cleanup: Box<Cleanup>);
Expand Down
9 changes: 5 additions & 4 deletions src/librustc/middle/trans/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1183,7 +1183,7 @@ fn trans_uniq_expr<'a>(bcx: &'a Block<'a>,
} else {
let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
val, cleanup::HeapExchange);
val, cleanup::HeapExchange, contents_ty);
let bcx = trans_into(bcx, contents, SaveIn(val));
fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
bcx
Expand All @@ -1205,7 +1205,7 @@ fn trans_managed_expr<'a>(bcx: &'a Block<'a>,

let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
bx, cleanup::HeapManaged);
bx, cleanup::HeapManaged, contents_ty);
let bcx = trans_into(bcx, contents, SaveIn(body));
fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
immediate_rvalue_bcx(bcx, bx, box_ty).to_expr_datumblock()
Expand Down Expand Up @@ -1789,13 +1789,14 @@ fn deref_once<'a>(bcx: &'a Block<'a>,
let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
let ptr = Load(bcx, datum.val);
if !type_is_zero_size(bcx.ccx(), content_ty) {
bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange);
bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
}
}
RvalueExpr(Rvalue { mode: ByValue }) => {
let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
if !type_is_zero_size(bcx.ccx(), content_ty) {
bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange);
bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
content_ty);
}
}
LvalueExpr => { }
Expand Down
40 changes: 28 additions & 12 deletions src/librustc/middle/trans/glue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,30 @@ pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> {
Some(expr::Ignore)).bcx
}

pub fn trans_exchange_free<'a>(cx: &'a Block<'a>, v: ValueRef)
-> &'a Block<'a> {
fn trans_exchange_free<'a>(cx: &'a Block<'a>, v: ValueRef, size: u64,
align: u64) -> &'a Block<'a> {
let _icx = push_ctxt("trans_exchange_free");
let ccx = cx.ccx();
callee::trans_lang_call(cx,
langcall(cx, None, "", ExchangeFreeFnLangItem),
[PointerCast(cx, v, Type::i8p(cx.ccx()))],
[PointerCast(cx, v, Type::i8p(ccx)), C_uint(ccx, size as uint), C_uint(ccx, align as uint)],
Some(expr::Ignore)).bcx
}

pub fn trans_exchange_free_ty<'a>(bcx: &'a Block<'a>, ptr: ValueRef,
content_ty: ty::t) -> &'a Block<'a> {
let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);

// `Box<ZeroSizeType>` does not allocate.
if content_size != 0 {
let content_align = llalign_of_min(bcx.ccx(), sizing_type);
trans_exchange_free(bcx, ptr, content_size, content_align)
} else {
bcx
}
}

pub fn take_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
-> &'a Block<'a> {
// NB: v is an *alias* of type t here, not a direct value.
Expand Down Expand Up @@ -87,17 +102,15 @@ fn get_drop_glue_type(ccx: &CrateContext, t: ty::t) -> ty::t {
ty::ty_vec(_, None) | ty::ty_str => t,
_ => {
let llty = sizing_type_of(ccx, typ);
// Unique boxes do not allocate for zero-size types. The standard
// library may assume that `free` is never called on the pointer
// returned for `Box<ZeroSizeType>`.
// `Box<ZeroSizeType>` does not allocate.
if llsize_of_alloc(ccx, llty) == 0 {
ty::mk_i8()
} else {
ty::mk_uniq(tcx, ty::mk_i8())
}
}
}
}
}
}
_ => t
}
}
Expand Down Expand Up @@ -285,20 +298,22 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
ty::ty_vec(mt, None) => {
with_cond(bcx, not_null, |bcx| {
let bcx = tvec::make_drop_glue_unboxed(bcx, llbox, mt.ty);
trans_exchange_free(bcx, llbox)
// FIXME: #13994: the old `Box<[T]>` will not support sized deallocation
trans_exchange_free(bcx, llbox, 0, 8)
})
}
ty::ty_str => {
with_cond(bcx, not_null, |bcx| {
let unit_ty = ty::sequence_element_type(bcx.tcx(), t);
let bcx = tvec::make_drop_glue_unboxed(bcx, llbox, unit_ty);
trans_exchange_free(bcx, llbox)
// FIXME: #13994: the old `Box<str>` will not support sized deallocation
trans_exchange_free(bcx, llbox, 0, 8)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And soon we'll be able to remove these entirely!

})
}
_ => {
with_cond(bcx, not_null, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty);
trans_exchange_free(bcx, llbox)
trans_exchange_free_ty(bcx, llbox, content_ty)
})
}
}
Expand Down Expand Up @@ -340,7 +355,8 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p(bcx.ccx()))], []);

// Free the environment itself
trans_exchange_free(bcx, env)
// FIXME: #13994: pass align and size here
trans_exchange_free(bcx, env, 0, 8)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These should be known, right? (today, that is)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, it needs to be fixed by freeing the allocation in the destructor function pointer as we do for ~Trait. It could just be done while migrating proc to being represented via a trait object.

})
}
_ => {
Expand Down
5 changes: 4 additions & 1 deletion src/librustc/middle/trans/tvec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,8 +287,11 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
// Create a temporary scope lest execution should fail while
// constructing the vector.
let temp_scope = fcx.push_custom_cleanup_scope();

// FIXME: #13994: the old `Box<[T]> will not support sized deallocation, this is a placeholder
let content_ty = vt.unit_ty;
fcx.schedule_free_value(cleanup::CustomScope(temp_scope),
val, cleanup::HeapExchange);
val, cleanup::HeapExchange, content_ty);

let dataptr = get_dataptr(bcx, val);

Expand Down
6 changes: 3 additions & 3 deletions src/libstd/slice.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ use ops::Drop;
use option::{None, Option, Some};
use ptr::RawPtr;
use ptr;
use rt::heap::{exchange_malloc, deallocate};
use rt::heap::{allocate, deallocate};
use unstable::finally::try_finally;
use vec::Vec;

Expand Down Expand Up @@ -304,7 +304,7 @@ impl<'a, T: Clone> CloneableVector<T> for &'a [T] {

unsafe {
// this should pass the real required alignment
let ret = exchange_malloc(size, 8) as *mut RawVec<()>;
let ret = allocate(size, 8) as *mut RawVec<()>;

let a_size = mem::size_of::<T>();
let a_size = if a_size == 0 {1} else {a_size};
Expand Down Expand Up @@ -968,7 +968,7 @@ mod tests {
assert_eq!(v_b[0], 2);
assert_eq!(v_b[1], 3);

// Test on exchange heap.
// Test `Box<[T]>`
let vec_unique = box [1, 2, 3, 4, 5, 6];
let v_d = vec_unique.slice(1u, 6u).to_owned();
assert_eq!(v_d.len(), 5u);
Expand Down