Skip to content

Add Clone-less converter from Arc to Vec #89215

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions library/alloc/src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1077,6 +1077,36 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
// additional requirements.
unsafe { Pin::new_unchecked(boxed) }
}

/// Allocates a box with the requested layout, which may be for a possibly-unsized value where
/// the layout has been determined from a value.
///
/// The function `mem_to_imbued` is the called to turn the raw memory pointer into a pointer to
/// the type `T`. It's expected that this will add additional fat-pointer metadata.
#[cfg(not(no_global_oom_handling))]
pub(crate) unsafe fn allocate_for_layout(
allocator: &A,
layout: Layout,
mem_to_imbued: impl FnOnce(*mut u8) -> *mut T,
) -> *mut T {
mem_to_imbued(
allocator.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)).as_ptr()
as *mut u8,
)
}

/// Allocates a box with sufficient space for the pointee and copy the metadata.
#[unstable(feature = "unwrap_rc_as_box", issue = "none")]
#[cfg(not(no_global_oom_handling))]
#[doc(hidden)]
pub unsafe fn allocate_for_ptr(allocator: &A, ptr: *const T) -> *mut T {
// Allocate for the `ArcInner<T>` using the given value.
unsafe {
Self::allocate_for_layout(allocator, Layout::for_value(&*ptr), |mem| {
ptr.set_ptr_value(mem) as *mut T
})
}
}
}

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@
#![feature(unicode_internals)]
#![feature(unsize)]
#![feature(unsized_fn_params)]
#![cfg_attr(not(no_global_oom_handling), feature(unwrap_rc_as_box))]
#![feature(allocator_internals)]
#![feature(slice_partition_dedup)]
#![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_uninit_array)]
Expand Down
85 changes: 70 additions & 15 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -612,21 +612,10 @@ impl<T> Rc<T> {
#[inline]
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
if Rc::strong_count(&this) == 1 {
unsafe {
let val = ptr::read(&*this); // copy the contained object

// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.inner().dec_strong();
let _weak = Weak { ptr: this.ptr };
forget(this);
Ok(val)
}
} else {
Err(this)
let weak = Self::leak_as_owning_weak(this)?;
unsafe {
let val = ptr::read(weak.as_ptr()); // copy the contained object
Ok(val)
}
}
}
Expand Down Expand Up @@ -997,6 +986,72 @@ impl<T: ?Sized> Rc<T> {
unsafe { mem::drop(Rc::from_raw(ptr)) };
}

/// Reduce the strong count, if the `Rc` has exactly one strong reference.
///
/// Otherwise, an [`Err`] is returned with the same `Rc` that was passed in.
///
/// This will succeed even if there are outstanding weak references.
///
/// After this operation succeeds, no more strong references to the allocation can be created,
/// making the caller the owner of the contained value. This returns a `Weak` that manages the
/// allocation while the caller can (unsafely) take advantage of their ownership. In contrast
/// to `try_unwrap` this also works for unsized pointees.
fn leak_as_owning_weak(this: Self) -> Result<Weak<T>, Self> {
if Rc::strong_count(&this) == 1 {
// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then produce the implicit "strong weak"
// pointer that is still handling dropping of the allocation.
this.inner().dec_strong();
let this = mem::ManuallyDrop::new(this);
let weak = Weak { ptr: this.ptr };
// Return the 'fake weak'.
Ok(weak)
} else {
Err(this)
}
}

/// Returns the boxed inner value, if the `Rc` has exactly one strong reference.
///
/// Otherwise, an [`Err`] is returned with the same `Rc` that was
/// passed in.
///
/// This will succeed even if there are outstanding weak references.
///
/// # Examples
///
/// ```
/// #![feature(unwrap_rc_as_box)]
///
/// use std::rc::Rc;
///
/// let x: Rc<str> = Rc::from("Hello, world");
/// assert!(matches!(
/// Rc::try_unwrap_as_box(x),
/// Ok(b) if &b[..2] == ("He")
/// ));
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")]
pub fn try_unwrap_as_box(this: Self) -> Result<Box<T>, Self> {
let owning_weak = Self::leak_as_owning_weak(this)?;
let src_ptr = owning_weak.as_ptr();

unsafe {
// We 'own' this value right now so it is still initialized.
let size = mem::size_of_val(&*src_ptr);
// The raw allocation for our Box—after this we don't panic as otherwise we would leak
// this memory. We can't use MaybeUninit here as that is only valid for sized types.
let raw_box = Box::<T>::allocate_for_ptr(&Global, src_ptr);

// This is a new allocation so it can not overlap with the one which `owning_weak` is
// still holding onto.
ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size);

Ok(Box::from_raw(raw_box))
}
}

/// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
/// this allocation.
#[inline]
Expand Down
75 changes: 64 additions & 11 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -617,19 +617,10 @@ impl<T> Arc<T> {
#[inline]
#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
return Err(this);
}

acquire!(this.inner().strong);
let weak = Self::leak_as_owning_weak(this)?;

unsafe {
let elem = ptr::read(&this.ptr.as_ref().data);

// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { ptr: this.ptr };
mem::forget(this);

let elem = ptr::read(&weak.ptr.as_ref().data);
Ok(elem)
}
}
Expand Down Expand Up @@ -1047,6 +1038,68 @@ impl<T: ?Sized> Arc<T> {
unsafe { mem::drop(Arc::from_raw(ptr)) };
}

/// Reduce the strong count, if this is the last strong reference.
///
/// When this operation succeeds that no more strong references to the allocation can be
/// created, making this the owner of the contained value. This returns a `Weak` that manages
/// the allocation while the caller can (unsafely) take advantage of their ownership. In
/// contrast to `try_unwrap` this also works for unsized pointees.
fn leak_as_owning_weak(this: Self) -> Result<Weak<T>, Self> {
if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
return Err(this);
}

acquire!(this.inner().strong);

// At this point we own the pointee. We keep it alive by a Weak reference while having the
// caller handling ownership. This leaks the value but not the allocation, which is
// eventually deallocated via the returned `Weak`.
// The weak pointer also cleans up the implicit strong-weak reference
let this = mem::ManuallyDrop::new(this);
Ok(Weak { ptr: this.ptr })
}

/// Returns the boxed inner value, if the `Arc` has exactly one strong reference.
///
/// Otherwise, an [`Err`] is returned with the same `Arc` that was
/// passed in.
///
/// This will succeed even if there are outstanding weak references.
///
/// # Examples
///
/// ```
/// #![feature(unwrap_rc_as_box)]
///
/// use std::sync::Arc;
///
/// let x: Arc<str> = Arc::from("Hello, world");
/// assert!(matches!(
/// Arc::try_unwrap_as_box(x),
/// Ok(b) if &b[..2] == ("He")
/// ));
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")]
pub fn try_unwrap_as_box(this: Self) -> Result<Box<T>, Self> {
let owning_weak = Self::leak_as_owning_weak(this)?;
let src_ptr = owning_weak.as_ptr();

unsafe {
// We 'own' this value right now so it is still initialized.
let size = mem::size_of_val(&*src_ptr);
// The raw allocation for our Box—after this we don't panic as otherwise we would leak
// this memory. We can't use MaybeUninit here as that is only valid for sized types.
let raw_box = Box::<T>::allocate_for_ptr(&Global, src_ptr);

// This is a new allocation so it can not overlap with the one which `owning_weak` is
// still holding onto.
ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size);

Ok(Box::from_raw(raw_box))
}
}

#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed
Expand Down