Skip to content

Commit 56ce17d

Browse files
committed
Make Rc allocator_api APIs more consistent.
* Add `into_raw_with_allocator` on `Rc`/`Weak` * Remove `where A: Clone` from `Rc::assume_init`s, `Rc::unwrap_or_clone`, and `Rc::downcast(_unchecked)` * Add `fn allocator(&self)` to `Rc`/`Weak` for all `T: ?Sized` * Make `TryFrom<Rc<[T]>> for Rc<[T; N]>` allocator-aware
1 parent d573564 commit 56ce17d

File tree

1 file changed

+82
-52
lines changed

1 file changed

+82
-52
lines changed

library/alloc/src/rc.rs

+82-52
Original file line numberDiff line numberDiff line change
@@ -661,16 +661,6 @@ impl<T> Rc<T> {
661661
}
662662

663663
impl<T, A: Allocator> Rc<T, A> {
664-
/// Returns a reference to the underlying allocator.
665-
///
666-
/// Note: this is an associated function, which means that you have
667-
/// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
668-
/// is so that there is no conflict with a method on the inner type.
669-
#[inline]
670-
#[unstable(feature = "allocator_api", issue = "32838")]
671-
pub fn allocator(this: &Self) -> &A {
672-
&this.alloc
673-
}
674664
/// Constructs a new `Rc` in the provided allocator.
675665
///
676666
/// # Examples
@@ -1127,12 +1117,9 @@ impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
11271117
/// ```
11281118
#[unstable(feature = "new_uninit", issue = "63291")]
11291119
#[inline]
1130-
pub unsafe fn assume_init(self) -> Rc<T, A>
1131-
where
1132-
A: Clone,
1133-
{
1134-
let md_self = mem::ManuallyDrop::new(self);
1135-
unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
1120+
pub unsafe fn assume_init(self) -> Rc<T, A> {
1121+
let (ptr, alloc) = Self::into_raw_with_allocator(self);
1122+
unsafe { Rc::from_raw_in(ptr.cast(), alloc) }
11361123
}
11371124
}
11381125

@@ -1171,12 +1158,9 @@ impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
11711158
/// ```
11721159
#[unstable(feature = "new_uninit", issue = "63291")]
11731160
#[inline]
1174-
pub unsafe fn assume_init(self) -> Rc<[T], A>
1175-
where
1176-
A: Clone,
1177-
{
1178-
let md_self = mem::ManuallyDrop::new(self);
1179-
unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
1161+
pub unsafe fn assume_init(self) -> Rc<[T], A> {
1162+
let (ptr, alloc) = Self::into_raw_with_allocator(self);
1163+
unsafe { Rc::from_raw_in(ptr as *const [T], alloc) }
11801164
}
11811165
}
11821166

@@ -1315,6 +1299,17 @@ impl<T: ?Sized> Rc<T> {
13151299
}
13161300

13171301
impl<T: ?Sized, A: Allocator> Rc<T, A> {
1302+
/// Returns a reference to the underlying allocator.
1303+
///
1304+
/// Note: this is an associated function, which means that you have
1305+
/// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1306+
/// is so that there is no conflict with a method on the inner type.
1307+
#[inline]
1308+
#[unstable(feature = "allocator_api", issue = "32838")]
1309+
pub fn allocator(this: &Self) -> &A {
1310+
&this.alloc
1311+
}
1312+
13181313
/// Consumes the `Rc`, returning the wrapped pointer.
13191314
///
13201315
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
@@ -1337,6 +1332,33 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
13371332
ptr
13381333
}
13391334

1335+
/// Consumes the `Rc`, returning the wrapped pointer and allocator.
1336+
///
1337+
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
1338+
/// [`Rc::from_raw_in`].
1339+
///
1340+
/// # Examples
1341+
///
1342+
/// ```
1343+
/// #![feature(allocator_api)]
1344+
/// use std::rc::Rc;
1345+
/// use std::alloc::System;
1346+
///
1347+
/// let x = Rc::new_in("hello".to_owned(), System);
1348+
/// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1349+
/// assert_eq!(unsafe { &*ptr }, "hello");
1350+
/// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1351+
/// assert_eq!(&*x, "hello");
1352+
/// ```
1353+
#[unstable(feature = "allocator_api", issue = "32838")]
1354+
pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1355+
let this = mem::ManuallyDrop::new(this);
1356+
let ptr = Self::as_ptr(&this);
1357+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1358+
let alloc = unsafe { ptr::read(Self::allocator(&this)) };
1359+
(ptr, alloc)
1360+
}
1361+
13401362
/// Provides a raw pointer to the data.
13411363
///
13421364
/// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
@@ -1790,7 +1812,9 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
17901812
// reference to the allocation.
17911813
unsafe { &mut this.ptr.as_mut().value }
17921814
}
1815+
}
17931816

1817+
impl<T: Clone, A: Allocator> Rc<T, A> {
17941818
/// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
17951819
/// clone.
17961820
///
@@ -1826,7 +1850,7 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
18261850
}
18271851
}
18281852

1829-
impl<A: Allocator + Clone> Rc<dyn Any, A> {
1853+
impl<A: Allocator> Rc<dyn Any, A> {
18301854
/// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
18311855
///
18321856
/// # Examples
@@ -1849,12 +1873,11 @@ impl<A: Allocator + Clone> Rc<dyn Any, A> {
18491873
#[stable(feature = "rc_downcast", since = "1.29.0")]
18501874
pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
18511875
if (*self).is::<T>() {
1852-
unsafe {
1853-
let ptr = self.ptr.cast::<RcBox<T>>();
1854-
let alloc = self.alloc.clone();
1855-
forget(self);
1856-
Ok(Rc::from_inner_in(ptr, alloc))
1857-
}
1876+
let this = mem::ManuallyDrop::new(self);
1877+
let ptr = this.ptr.cast::<RcBox<T>>();
1878+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1879+
let alloc = unsafe { ptr::read(&this.alloc) };
1880+
unsafe { Ok(Rc::from_inner_in(ptr, alloc)) }
18581881
} else {
18591882
Err(self)
18601883
}
@@ -1889,12 +1912,11 @@ impl<A: Allocator + Clone> Rc<dyn Any, A> {
18891912
#[inline]
18901913
#[unstable(feature = "downcast_unchecked", issue = "90850")]
18911914
pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
1892-
unsafe {
1893-
let ptr = self.ptr.cast::<RcBox<T>>();
1894-
let alloc = self.alloc.clone();
1895-
mem::forget(self);
1896-
Rc::from_inner_in(ptr, alloc)
1897-
}
1915+
let this = mem::ManuallyDrop::new(self);
1916+
let ptr = this.ptr.cast::<RcBox<T>>();
1917+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1918+
let alloc = unsafe { ptr::read(&this.alloc) };
1919+
unsafe { Rc::from_inner_in(ptr, alloc) }
18981920
}
18991921
}
19001922

@@ -2639,12 +2661,13 @@ impl From<Rc<str>> for Rc<[u8]> {
26392661
}
26402662

26412663
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2642-
impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
2643-
type Error = Rc<[T]>;
2664+
impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2665+
type Error = Rc<[T], A>;
26442666

2645-
fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
2667+
fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
26462668
if boxed_slice.len() == N {
2647-
Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) })
2669+
let (ptr, alloc) = Rc::into_raw_with_allocator(boxed_slice);
2670+
Ok(unsafe { Rc::from_raw_in(ptr as *mut [T; N], alloc) })
26482671
} else {
26492672
Err(boxed_slice)
26502673
}
@@ -2897,6 +2920,13 @@ impl<T: ?Sized> Weak<T> {
28972920
}
28982921

28992922
impl<T: ?Sized, A: Allocator> Weak<T, A> {
2923+
/// Returns a reference to the underlying allocator.
2924+
#[inline]
2925+
#[unstable(feature = "allocator_api", issue = "32838")]
2926+
pub fn allocator(&self) -> &A {
2927+
&self.alloc
2928+
}
2929+
29002930
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
29012931
///
29022932
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -2974,42 +3004,42 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
29743004
result
29753005
}
29763006

2977-
/// Consumes the `Weak<T>` and turns it into a raw pointer.
3007+
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
29783008
///
29793009
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
29803010
/// one weak reference (the weak count is not modified by this operation). It can be turned
2981-
/// back into the `Weak<T>` with [`from_raw`].
3011+
/// back into the `Weak<T>` with [`from_raw_in`].
29823012
///
29833013
/// The same restrictions of accessing the target of the pointer as with
29843014
/// [`as_ptr`] apply.
29853015
///
29863016
/// # Examples
29873017
///
29883018
/// ```
3019+
/// #![feature(allocator_api)]
29893020
/// use std::rc::{Rc, Weak};
3021+
/// use std::alloc::System;
29903022
///
2991-
/// let strong = Rc::new("hello".to_owned());
3023+
/// let strong = Rc::new_in("hello".to_owned(), System);
29923024
/// let weak = Rc::downgrade(&strong);
2993-
/// let raw = weak.into_raw();
3025+
/// let (raw, alloc) = weak.into_raw_with_allocator();
29943026
///
29953027
/// assert_eq!(1, Rc::weak_count(&strong));
29963028
/// assert_eq!("hello", unsafe { &*raw });
29973029
///
2998-
/// drop(unsafe { Weak::from_raw(raw) });
3030+
/// drop(unsafe { Weak::from_raw_in(raw, alloc) });
29993031
/// assert_eq!(0, Rc::weak_count(&strong));
30003032
/// ```
30013033
///
3002-
/// [`from_raw`]: Weak::from_raw
3034+
/// [`from_raw_in`]: Weak::from_raw_in
30033035
/// [`as_ptr`]: Weak::as_ptr
30043036
#[inline]
30053037
#[unstable(feature = "allocator_api", issue = "32838")]
3006-
pub fn into_raw_and_alloc(self) -> (*const T, A)
3007-
where
3008-
A: Clone,
3009-
{
3010-
let result = self.as_ptr();
3011-
let alloc = self.alloc.clone();
3012-
mem::forget(self);
3038+
pub fn into_raw_with_allocator(self) -> (*const T, A) {
3039+
let this = mem::ManuallyDrop::new(self);
3040+
let result = this.as_ptr();
3041+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3042+
let alloc = unsafe { ptr::read(this.allocator()) };
30133043
(result, alloc)
30143044
}
30153045

0 commit comments

Comments
 (0)