Skip to content

Commit 1d6d496

Browse files
committed
Add RawRc methods for sized values
1 parent 8a5fb9b commit 1d6d496

File tree

1 file changed

+209
-4
lines changed

1 file changed

+209
-4
lines changed

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 209 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
1-
use core::alloc::Allocator;
1+
use core::alloc::{AllocError, Allocator};
22
use core::cell::UnsafeCell;
33
#[cfg(not(no_global_oom_handling))]
44
use core::clone::CloneToUninit;
5+
#[cfg(not(no_global_oom_handling))]
6+
use core::convert;
57
use core::marker::PhantomData;
8+
use core::mem::DropGuard;
69
#[cfg(not(no_global_oom_handling))]
7-
use core::mem::{self, DropGuard};
10+
use core::mem::{self, MaybeUninit, SizedTypeProperties};
811
#[cfg(not(no_global_oom_handling))]
9-
use core::ops::DerefMut;
12+
use core::ops::{DerefMut, Residual, Try};
1013
#[cfg(not(no_global_oom_handling))]
1114
use core::ptr;
1215
use core::ptr::NonNull;
@@ -20,7 +23,7 @@ use crate::raw_rc::raw_weak::RawWeak;
2023
#[cfg(not(no_global_oom_handling))]
2124
use crate::raw_rc::rc_alloc;
2225
#[cfg(not(no_global_oom_handling))]
23-
use crate::raw_rc::rc_layout::RcLayout;
26+
use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt};
2427
use crate::raw_rc::rc_value_pointer::RcValuePointer;
2528

2629
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`; the user should call
@@ -336,6 +339,196 @@ where
336339
}
337340
}
338341

342+
impl<T, A> RawRc<T, A> {
343+
/// # Safety
344+
///
345+
/// `weak` must be non-dangling.
346+
unsafe fn from_weak_with_value(weak: RawWeak<T, A>, value: T) -> Self {
347+
unsafe {
348+
weak.as_ptr().write(value);
349+
350+
Self::from_weak(weak)
351+
}
352+
}
353+
354+
#[inline]
355+
pub(crate) fn try_new(value: T) -> Result<Self, AllocError>
356+
where
357+
A: Allocator + Default,
358+
{
359+
RawWeak::try_new_uninit::<1>()
360+
.map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
361+
}
362+
363+
#[inline]
364+
pub(crate) fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError>
365+
where
366+
A: Allocator,
367+
{
368+
RawWeak::try_new_uninit_in::<1>(alloc)
369+
.map(|weak| unsafe { Self::from_weak_with_value(weak, value) })
370+
}
371+
372+
#[cfg(not(no_global_oom_handling))]
373+
#[inline]
374+
pub(crate) fn new(value: T) -> Self
375+
where
376+
A: Allocator + Default,
377+
{
378+
unsafe { Self::from_weak_with_value(RawWeak::new_uninit::<1>(), value) }
379+
}
380+
381+
#[cfg(not(no_global_oom_handling))]
382+
#[inline]
383+
pub(crate) fn new_in(value: T, alloc: A) -> Self
384+
where
385+
A: Allocator,
386+
{
387+
unsafe { Self::from_weak_with_value(RawWeak::new_uninit_in::<1>(alloc), value) }
388+
}
389+
390+
#[cfg(not(no_global_oom_handling))]
391+
fn new_with<F>(f: F) -> Self
392+
where
393+
A: Allocator + Default,
394+
F: FnOnce() -> T,
395+
{
396+
let (ptr, alloc) = rc_alloc::allocate_with::<A, _, 1>(T::RC_LAYOUT, |ptr| unsafe {
397+
ptr.as_ptr().cast().write(f())
398+
});
399+
400+
unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }
401+
}
402+
403+
/// Maps the value in an `RawRc`, reusing the allocation if possible.
404+
///
405+
/// # Safety
406+
///
407+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
408+
#[cfg(not(no_global_oom_handling))]
409+
pub(crate) unsafe fn map<R, U>(self, f: impl FnOnce(&T) -> U) -> RawRc<U, A>
410+
where
411+
A: Allocator,
412+
R: RefCounter,
413+
{
414+
fn wrap_fn<T, U>(f: impl FnOnce(&T) -> U) -> impl FnOnce(&T) -> Result<U, !> {
415+
|x| Ok(f(x))
416+
}
417+
418+
let f = wrap_fn(f);
419+
420+
unsafe { self.try_map::<R, _, _>(f, convert::identity) }.into_ok()
421+
}
422+
423+
/// Attempts to map the value in an `Rc`, reusing the allocation if possible.
424+
///
425+
/// # Safety
426+
///
427+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
428+
#[cfg(not(no_global_oom_handling))]
429+
pub(crate) unsafe fn try_map<R, U, V>(
430+
mut self,
431+
f: impl FnOnce(&T) -> U,
432+
output_mapper: impl FnOnce(RawRc<U::Output, A>) -> V, // How to remove this argument?
433+
) -> <U::Residual as Residual<V>>::TryType
434+
where
435+
A: Allocator,
436+
R: RefCounter,
437+
U: Try,
438+
U::Residual: Residual<V>,
439+
{
440+
let result = if T::LAYOUT == U::Output::LAYOUT && unsafe { self.is_unique::<R>() } {
441+
let value = unsafe { self.as_ptr().read() };
442+
let mut allocation = unsafe { self.cast::<MaybeUninit<U::Output>>() };
443+
444+
// Destruct `self` as `RawRc<MaybeUninit<U::Output>, A>` if `f` panics or returns a
445+
// failure value.
446+
let guard = unsafe { new_rc_guard::<MaybeUninit<U::Output>, A, R>(&mut allocation) };
447+
448+
let mapped_value = f(&value)?;
449+
450+
drop(value);
451+
mem::forget(guard);
452+
453+
unsafe {
454+
allocation.get_mut_unchecked().write(mapped_value);
455+
456+
allocation.cast()
457+
}
458+
} else {
459+
// Destruct `self` if `f` panics or returns a failure value.
460+
let guard = unsafe { new_rc_guard::<T, A, R>(&mut self) };
461+
462+
let mapped_value = f(unsafe { guard.as_ptr().as_ref() })?;
463+
464+
drop(guard);
465+
466+
let alloc = self.into_raw_parts().1;
467+
468+
RawRc::new_in(mapped_value, alloc)
469+
};
470+
471+
try { output_mapper(result) }
472+
}
473+
474+
/// # Safety
475+
///
476+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
477+
pub(crate) unsafe fn into_inner<R>(self) -> Option<T>
478+
where
479+
A: Allocator,
480+
R: RefCounter,
481+
{
482+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
483+
484+
is_last_strong_ref.then(|| unsafe { self.weak.assume_init_into_inner::<R>() })
485+
}
486+
487+
/// # Safety
488+
///
489+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
490+
pub(crate) unsafe fn try_unwrap<R>(self) -> Result<T, RawRc<T, A>>
491+
where
492+
A: Allocator,
493+
R: RefCounter,
494+
{
495+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> bool
496+
where
497+
R: RefCounter,
498+
{
499+
unsafe {
500+
R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).try_lock_strong_count()
501+
}
502+
}
503+
504+
let is_last_strong_ref = unsafe { inner::<R>(self.value_ptr()) };
505+
506+
if is_last_strong_ref {
507+
Ok(unsafe { self.weak.assume_init_into_inner::<R>() })
508+
} else {
509+
Err(self)
510+
}
511+
}
512+
513+
/// # Safety
514+
///
515+
/// All accesses to `self` must use the same `RefCounter` implementation for `R`.
516+
pub(crate) unsafe fn unwrap_or_clone<R>(self) -> T
517+
where
518+
T: Clone,
519+
A: Allocator,
520+
R: RefCounter,
521+
{
522+
// SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
523+
// implementation.
524+
unsafe { self.try_unwrap::<R>() }.unwrap_or_else(|mut rc| {
525+
let guard = unsafe { new_rc_guard::<T, A, R>(&mut rc) };
526+
527+
T::clone(unsafe { guard.as_ptr().as_ref() })
528+
})
529+
}
530+
}
531+
339532
/// Decrements strong reference count in a reference-counted allocation with a value object that is
340533
/// pointed to by `value_ptr`.
341534
#[inline]
@@ -367,3 +560,15 @@ where
367560
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
368561
}
369562
}
563+
564+
/// Returns a drop guard that calls `Rc::drop::<R>()` on drop.
565+
unsafe fn new_rc_guard<'a, T, A, R>(
566+
rc: &'a mut RawRc<T, A>,
567+
) -> DropGuard<&'a mut RawRc<T, A>, impl FnOnce(&'a mut RawRc<T, A>)>
568+
where
569+
T: ?Sized,
570+
A: Allocator,
571+
R: RefCounter,
572+
{
573+
DropGuard::new(rc, |rc| unsafe { rc.drop::<R>() })
574+
}

0 commit comments

Comments
 (0)