Skip to content

Commit 4687e1d

Browse files
committed
remove the TyDesc from TypedArena
This prevents generating visit glue when using a TypedArena. The problem still exists for the untyped Arena.
1 parent 698042d commit 4687e1d

File tree

1 file changed

+29
-45
lines changed

1 file changed

+29
-45
lines changed

src/libarena/lib.rs

+29-45
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ use std::cast::{transmute, transmute_mut, transmute_mut_region};
3131
use std::cast;
3232
use std::cell::{Cell, RefCell};
3333
use std::mem;
34+
use std::ptr::read;
3435
use std::cmp;
3536
use std::num;
3637
use std::kinds::marker;
@@ -345,37 +346,32 @@ pub struct TypedArena<T> {
345346
/// reached, a new chunk is allocated.
346347
priv end: *T,
347348

348-
/// The type descriptor of the objects in the arena. This should not be
349-
/// necessary, but is until generic destructors are supported.
350-
priv tydesc: *TyDesc,
351-
352349
/// A pointer to the first arena segment.
353-
priv first: Option<~TypedArenaChunk>,
350+
priv first: Option<~TypedArenaChunk<T>>,
354351
}
355352

356-
struct TypedArenaChunk {
353+
struct TypedArenaChunk<T> {
357354
/// Pointer to the next arena segment.
358-
next: Option<~TypedArenaChunk>,
355+
next: Option<~TypedArenaChunk<T>>,
359356

360357
/// The number of elements that this chunk can hold.
361358
capacity: uint,
362359

363360
// Objects follow here, suitably aligned.
364361
}
365362

366-
impl TypedArenaChunk {
363+
impl<T> TypedArenaChunk<T> {
367364
#[inline]
368-
fn new<T>(next: Option<~TypedArenaChunk>, capacity: uint)
369-
-> ~TypedArenaChunk {
370-
let mut size = mem::size_of::<TypedArenaChunk>();
365+
fn new(next: Option<~TypedArenaChunk<T>>, capacity: uint) -> ~TypedArenaChunk<T> {
366+
let mut size = mem::size_of::<TypedArenaChunk<T>>();
371367
size = round_up(size, mem::min_align_of::<T>());
372368
let elem_size = mem::size_of::<T>();
373369
let elems_size = elem_size.checked_mul(&capacity).unwrap();
374370
size = size.checked_add(&elems_size).unwrap();
375371

376372
let mut chunk = unsafe {
377373
let chunk = global_heap::exchange_malloc(size);
378-
let mut chunk: ~TypedArenaChunk = cast::transmute(chunk);
374+
let mut chunk: ~TypedArenaChunk<T> = cast::transmute(chunk);
379375
mem::move_val_init(&mut chunk.next, next);
380376
chunk
381377
};
@@ -387,16 +383,13 @@ impl TypedArenaChunk {
387383
/// Destroys this arena chunk. If the type descriptor is supplied, the
388384
/// drop glue is called; otherwise, drop glue is not called.
389385
#[inline]
390-
unsafe fn destroy(&mut self, len: uint, opt_tydesc: Option<*TyDesc>) {
386+
unsafe fn destroy(&mut self, len: uint) {
391387
// Destroy all the allocated objects.
392-
match opt_tydesc {
393-
None => {}
394-
Some(tydesc) => {
395-
let mut start = self.start(tydesc);
396-
for _ in range(0, len) {
397-
((*tydesc).drop_glue)(start as *i8);
398-
start = start.offset((*tydesc).size as int)
399-
}
388+
if intrinsics::needs_drop::<T>() {
389+
let mut start = self.start();
390+
for _ in range(0, len) {
391+
read(start as *T); // run the destructor on the pointer
392+
start = start.offset(mem::size_of::<T>() as int)
400393
}
401394
}
402395

@@ -406,26 +399,26 @@ impl TypedArenaChunk {
406399
None => {}
407400
Some(mut next) => {
408401
// We assume that the next chunk is completely filled.
409-
next.destroy(next.capacity, opt_tydesc)
402+
next.destroy(next.capacity)
410403
}
411404
}
412405
}
413406

414407
// Returns a pointer to the first allocated object.
415408
#[inline]
416-
fn start(&self, tydesc: *TyDesc) -> *u8 {
417-
let this: *TypedArenaChunk = self;
409+
fn start(&self) -> *u8 {
410+
let this: *TypedArenaChunk<T> = self;
418411
unsafe {
419-
cast::transmute(round_up(this.offset(1) as uint, (*tydesc).align))
412+
cast::transmute(round_up(this.offset(1) as uint, mem::min_align_of::<T>()))
420413
}
421414
}
422415

423416
// Returns a pointer to the end of the allocated space.
424417
#[inline]
425-
fn end(&self, tydesc: *TyDesc) -> *u8 {
418+
fn end(&self) -> *u8 {
426419
unsafe {
427-
let size = (*tydesc).size.checked_mul(&self.capacity).unwrap();
428-
self.start(tydesc).offset(size as int)
420+
let size = mem::size_of::<T>().checked_mul(&self.capacity).unwrap();
421+
self.start().offset(size as int)
429422
}
430423
}
431424
}
@@ -441,14 +434,10 @@ impl<T> TypedArena<T> {
441434
/// objects.
442435
#[inline]
443436
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
444-
let chunk = TypedArenaChunk::new::<T>(None, capacity);
445-
let tydesc = unsafe {
446-
intrinsics::get_tydesc::<T>()
447-
};
437+
let chunk = TypedArenaChunk::<T>::new(None, capacity);
448438
TypedArena {
449-
ptr: chunk.start(tydesc) as *T,
450-
end: chunk.end(tydesc) as *T,
451-
tydesc: tydesc,
439+
ptr: chunk.start() as *T,
440+
end: chunk.end() as *T,
452441
first: Some(chunk),
453442
}
454443
}
@@ -475,9 +464,9 @@ impl<T> TypedArena<T> {
475464
fn grow(&mut self) {
476465
let chunk = self.first.take_unwrap();
477466
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
478-
let chunk = TypedArenaChunk::new::<T>(Some(chunk), new_capacity);
479-
self.ptr = chunk.start(self.tydesc) as *T;
480-
self.end = chunk.end(self.tydesc) as *T;
467+
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
468+
self.ptr = chunk.start() as *T;
469+
self.end = chunk.end() as *T;
481470
self.first = Some(chunk)
482471
}
483472
}
@@ -486,18 +475,13 @@ impl<T> TypedArena<T> {
486475
impl<T> Drop for TypedArena<T> {
487476
fn drop(&mut self) {
488477
// Determine how much was filled.
489-
let start = self.first.get_ref().start(self.tydesc) as uint;
478+
let start = self.first.get_ref().start() as uint;
490479
let end = self.ptr as uint;
491480
let diff = (end - start) / mem::size_of::<T>();
492481

493482
// Pass that to the `destroy` method.
494483
unsafe {
495-
let opt_tydesc = if intrinsics::needs_drop::<T>() {
496-
Some(self.tydesc)
497-
} else {
498-
None
499-
};
500-
self.first.get_mut_ref().destroy(diff, opt_tydesc)
484+
self.first.get_mut_ref().destroy(diff)
501485
}
502486
}
503487
}

0 commit comments

Comments
 (0)