Skip to content

Commit a5ed0c5

Browse files
committed
auto merge of #11565 : mozilla/rust/snapshot, r=huonw
2 parents 3697121 + 29840ad commit a5ed0c5

File tree

9 files changed

+25
-277
lines changed

9 files changed

+25
-277
lines changed

src/libnative/lib.rs

-24
Original file line numberDiff line numberDiff line change
@@ -39,30 +39,6 @@ static OS_DEFAULT_STACK_ESTIMATE: uint = 1 << 20;
3939
#[cfg(unix, not(android))]
4040
static OS_DEFAULT_STACK_ESTIMATE: uint = 2 * (1 << 20);
4141

42-
43-
// XXX: this should not exist here
44-
#[cfg(stage0, nativestart)]
45-
#[lang = "start"]
46-
pub fn lang_start(main: *u8, argc: int, argv: **u8) -> int {
47-
use std::cast;
48-
use std::task;
49-
50-
do start(argc, argv) {
51-
// Instead of invoking main directly on this thread, invoke it on
52-
// another spawned thread that we are guaranteed to know the size of the
53-
// stack of. Currently, we do not have a method of figuring out the size
54-
// of the main thread's stack, so for stack overflow detection to work
55-
// we must spawn the task in a subtask which we know the stack size of.
56-
let main: extern "Rust" fn() = unsafe { cast::transmute(main) };
57-
let mut task = task::task();
58-
task.name("<main>");
59-
match do task.try { main() } {
60-
Ok(()) => { os::set_exit_status(0); }
61-
Err(..) => { os::set_exit_status(rt::DEFAULT_ERROR_CODE); }
62-
}
63-
}
64-
}
65-
6642
/// Executes the given procedure after initializing the runtime with the given
6743
/// argc/argv.
6844
///

src/libstd/cleanup.rs

+16-28
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ use unstable::raw;
1717

1818
type DropGlue<'a> = 'a |**TyDesc, *c_void|;
1919

20+
static RC_IMMORTAL : uint = 0x77777777;
21+
2022
/*
2123
* Box annihilation
2224
*
@@ -25,24 +27,21 @@ type DropGlue<'a> = 'a |**TyDesc, *c_void|;
2527

2628
struct AnnihilateStats {
2729
n_total_boxes: uint,
28-
n_unique_boxes: uint,
2930
n_bytes_freed: uint
3031
}
3132

3233
unsafe fn each_live_alloc(read_next_before: bool,
33-
f: |alloc: *mut raw::Box<()>, uniq: bool| -> bool)
34+
f: |alloc: *mut raw::Box<()>| -> bool)
3435
-> bool {
3536
//! Walks the internal list of allocations
3637
37-
use managed;
3838
use rt::local_heap;
3939

4040
let mut alloc = local_heap::live_allocs();
4141
while alloc != ptr::mut_null() {
4242
let next_before = (*alloc).next;
43-
let uniq = (*alloc).ref_count == managed::RC_MANAGED_UNIQUE;
4443

45-
if !f(alloc, uniq) {
44+
if !f(alloc) {
4645
return false;
4746
}
4847

@@ -70,25 +69,19 @@ fn debug_mem() -> bool {
7069
pub unsafe fn annihilate() {
7170
use rt::local_heap::local_free;
7271
use mem;
73-
use managed;
7472

7573
let mut stats = AnnihilateStats {
7674
n_total_boxes: 0,
77-
n_unique_boxes: 0,
7875
n_bytes_freed: 0
7976
};
8077

8178
// Pass 1: Make all boxes immortal.
8279
//
8380
// In this pass, nothing gets freed, so it does not matter whether
8481
// we read the next field before or after the callback.
85-
each_live_alloc(true, |alloc, uniq| {
82+
each_live_alloc(true, |alloc| {
8683
stats.n_total_boxes += 1;
87-
if uniq {
88-
stats.n_unique_boxes += 1;
89-
} else {
90-
(*alloc).ref_count = managed::RC_IMMORTAL;
91-
}
84+
(*alloc).ref_count = RC_IMMORTAL;
9285
true
9386
});
9487

@@ -97,12 +90,10 @@ pub unsafe fn annihilate() {
9790
// In this pass, unique-managed boxes may get freed, but not
9891
// managed boxes, so we must read the `next` field *after* the
9992
// callback, as the original value may have been freed.
100-
each_live_alloc(false, |alloc, uniq| {
101-
if !uniq {
102-
let tydesc = (*alloc).type_desc;
103-
let data = &(*alloc).data as *();
104-
((*tydesc).drop_glue)(data as *i8);
105-
}
93+
each_live_alloc(false, |alloc| {
94+
let tydesc = (*alloc).type_desc;
95+
let data = &(*alloc).data as *();
96+
((*tydesc).drop_glue)(data as *i8);
10697
true
10798
});
10899

@@ -112,22 +103,19 @@ pub unsafe fn annihilate() {
112103
// unique-managed boxes, though I think that none of those are
113104
// left), so we must read the `next` field before, since it will
114105
// not be valid after.
115-
each_live_alloc(true, |alloc, uniq| {
116-
if !uniq {
117-
stats.n_bytes_freed +=
118-
(*((*alloc).type_desc)).size
119-
+ mem::size_of::<raw::Box<()>>();
120-
local_free(alloc as *i8);
121-
}
106+
each_live_alloc(true, |alloc| {
107+
stats.n_bytes_freed +=
108+
(*((*alloc).type_desc)).size
109+
+ mem::size_of::<raw::Box<()>>();
110+
local_free(alloc as *i8);
122111
true
123112
});
124113

125114
if debug_mem() {
126115
// We do logging here w/o allocation.
127116
debug!("annihilator stats:\n \
128117
total boxes: {}\n \
129-
unique boxes: {}\n \
130118
bytes freed: {}",
131-
stats.n_total_boxes, stats.n_unique_boxes, stats.n_bytes_freed);
119+
stats.n_total_boxes, stats.n_bytes_freed);
132120
}
133121
}

src/libstd/managed.rs

-3
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,6 @@ use ptr::to_unsafe_ptr;
1414

1515
#[cfg(not(test))] use cmp::*;
1616

17-
pub static RC_MANAGED_UNIQUE : uint = (-2) as uint;
18-
pub static RC_IMMORTAL : uint = 0x77777777;
19-
2017
/// Returns the refcount of a shared box (as just before calling this)
2118
#[inline]
2219
pub fn refcount<T>(t: @T) -> uint {

src/libstd/reflect.rs

-16
Original file line numberDiff line numberDiff line change
@@ -227,14 +227,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
227227
true
228228
}
229229

230-
#[cfg(stage0)]
231-
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
232-
self.align_to::<~u8>();
233-
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
234-
self.bump_past::<~u8>();
235-
true
236-
}
237-
238230
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
239231
self.align_to::<*u8>();
240232
if ! self.inner.visit_ptr(mtbl, inner) { return false; }
@@ -276,14 +268,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
276268
true
277269
}
278270

279-
#[cfg(stage0)]
280-
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
281-
self.align_to::<~[@u8]>();
282-
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
283-
self.bump_past::<~[@u8]>();
284-
true
285-
}
286-
287271
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
288272
self.align_to::<&'static [u8]>();
289273
if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }

src/libstd/repr.rs

-17
Original file line numberDiff line numberDiff line change
@@ -310,15 +310,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
310310
})
311311
}
312312

313-
#[cfg(stage0)]
314-
fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
315-
self.writer.write(['~' as u8]);
316-
self.get::<&raw::Box<()>>(|this, b| {
317-
let p = ptr::to_unsafe_ptr(&b.data) as *c_void;
318-
this.visit_ptr_inner(p, inner);
319-
})
320-
}
321-
322313
fn visit_ptr(&mut self, mtbl: uint, _inner: *TyDesc) -> bool {
323314
self.get::<*c_void>(|this, p| {
324315
write!(this.writer, "({} as *", *p);
@@ -359,14 +350,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
359350
})
360351
}
361352

362-
#[cfg(stage0)]
363-
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
364-
self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
365-
this.writer.write(['~' as u8]);
366-
this.write_unboxed_vec_repr(mtbl, &b.data, inner);
367-
})
368-
}
369-
370353
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
371354
self.get::<raw::Slice<()>>(|this, s| {
372355
this.writer.write(['&' as u8]);

src/libstd/unstable/intrinsics.rs

+1-38
Original file line numberDiff line numberDiff line change
@@ -47,41 +47,8 @@ pub use realstd::unstable::intrinsics::{TyDesc, Opaque, TyVisitor, TypeId};
4747

4848
pub type GlueFn = extern "Rust" fn(*i8);
4949

50-
// NOTE remove after next snapshot
5150
#[lang="ty_desc"]
52-
#[cfg(not(test), stage0)]
53-
pub struct TyDesc {
54-
// sizeof(T)
55-
size: uint,
56-
57-
// alignof(T)
58-
align: uint,
59-
60-
// Called on a copy of a value of type `T` *after* memcpy
61-
take_glue: GlueFn,
62-
63-
// Called when a value of type `T` is no longer needed
64-
drop_glue: GlueFn,
65-
66-
// Called by drop glue when a value of type `T` can be freed
67-
free_glue: GlueFn,
68-
69-
// Called by reflection visitor to visit a value of type `T`
70-
visit_glue: GlueFn,
71-
72-
// If T represents a box pointer (`@U` or `~U`), then
73-
// `borrow_offset` is the amount that the pointer must be adjusted
74-
// to find the payload. This is always derivable from the type
75-
// `U`, but in the case of `@Trait` or `~Trait` objects, the type
76-
// `U` is unknown.
77-
borrow_offset: uint,
78-
79-
// Name corresponding to the type
80-
name: &'static str
81-
}
82-
83-
#[lang="ty_desc"]
84-
#[cfg(not(test), not(stage0))]
51+
#[cfg(not(test))]
8552
pub struct TyDesc {
8653
// sizeof(T)
8754
size: uint,
@@ -139,17 +106,13 @@ pub trait TyVisitor {
139106

140107
fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
141108
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
142-
#[cfg(stage0)]
143-
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
144109
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
145110
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
146111

147112
fn visit_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
148113
fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
149114
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
150115
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
151-
#[cfg(stage0)]
152-
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
153116
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
154117
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
155118
mtbl: uint, inner: *TyDesc) -> bool;

0 commit comments

Comments
 (0)