@@ -2,17 +2,20 @@ use core::alloc::{AllocError, Allocator};
22use core:: cell:: UnsafeCell ;
33use core:: clone:: CloneToUninit ;
44use core:: marker:: PhantomData ;
5+ use core:: mem:: DropGuard ;
56#[ cfg( not( no_global_oom_handling) ) ]
6- use core:: mem:: { self , DropGuard } ;
7+ use core:: mem:: { self , MaybeUninit , SizedTypeProperties } ;
78#[ cfg( not( no_global_oom_handling) ) ]
8- use core:: ops:: DerefMut ;
9+ use core:: ops:: { ControlFlow , DerefMut , Try } ;
910use core:: ptr:: NonNull ;
1011
1112#[ cfg( not( no_global_oom_handling) ) ]
1213use crate :: raw_rc:: MakeMutStrategy ;
1314#[ cfg( not( no_global_oom_handling) ) ]
1415use crate :: raw_rc:: raw_weak;
1516use crate :: raw_rc:: raw_weak:: RawWeak ;
17+ #[ cfg( not( no_global_oom_handling) ) ]
18+ use crate :: raw_rc:: rc_layout:: RcLayoutExt ;
1619use crate :: raw_rc:: rc_value_pointer:: RcValuePointer ;
1720use crate :: raw_rc:: { RefCounter , rc_alloc} ;
1821
@@ -354,6 +357,196 @@ where
354357 }
355358}
356359
360+ impl < T , A > RawRc < T , A > {
361+ /// # Safety
362+ ///
363+ /// `weak` must be non-dangling.
364+ unsafe fn from_weak_with_value ( weak : RawWeak < T , A > , value : T ) -> Self {
365+ unsafe {
366+ weak. as_ptr ( ) . write ( value) ;
367+
368+ Self :: from_weak ( weak)
369+ }
370+ }
371+
372+ #[ inline]
373+ pub ( crate ) fn try_new_in ( value : T , alloc : A ) -> Result < Self , AllocError >
374+ where
375+ A : Allocator ,
376+ {
377+ RawWeak :: try_new_uninit_in :: < 1 > ( alloc)
378+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
379+ }
380+
381+ #[ inline]
382+ pub ( crate ) fn try_new ( value : T ) -> Result < Self , AllocError >
383+ where
384+ A : Allocator + Default ,
385+ {
386+ RawWeak :: try_new_uninit :: < 1 > ( )
387+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
388+ }
389+
390+ #[ cfg( not( no_global_oom_handling) ) ]
391+ #[ inline]
392+ pub ( crate ) fn new_in ( value : T , alloc : A ) -> Self
393+ where
394+ A : Allocator ,
395+ {
396+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit_in :: < 1 > ( alloc) , value) }
397+ }
398+
399+ #[ cfg( not( no_global_oom_handling) ) ]
400+ #[ inline]
401+ pub ( crate ) fn new ( value : T ) -> Self
402+ where
403+ A : Allocator + Default ,
404+ {
405+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit :: < 1 > ( ) , value) }
406+ }
407+
408+ #[ cfg( not( no_global_oom_handling) ) ]
409+ fn new_with < F > ( f : F ) -> Self
410+ where
411+ A : Allocator + Default ,
412+ F : FnOnce ( ) -> T ,
413+ {
414+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( T :: RC_LAYOUT , |ptr| unsafe {
415+ ptr. as_ptr ( ) . cast ( ) . write ( f ( ) )
416+ } ) ;
417+
418+ unsafe { Self :: from_raw_parts ( ptr. as_ptr ( ) . cast ( ) , alloc) }
419+ }
420+
421+ /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
422+ ///
423+ /// # Safety
424+ ///
425+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
426+ #[ cfg( not( no_global_oom_handling) ) ]
427+ pub ( crate ) unsafe fn try_map < R , U > (
428+ mut self ,
429+ f : impl FnOnce ( & T ) -> U ,
430+ ) -> ControlFlow < U :: Residual , RawRc < U :: Output , A > >
431+ where
432+ A : Allocator ,
433+ R : RefCounter ,
434+ U : Try ,
435+ {
436+ let result = if T :: LAYOUT == U :: Output :: LAYOUT && unsafe { self . is_unique :: < R > ( ) } {
437+ let value = unsafe { self . as_ptr ( ) . read ( ) } ;
438+ let mut allocation = unsafe { self . cast :: < MaybeUninit < U :: Output > > ( ) } ;
439+
440+ // Destruct `self` as `RawRc<MaybeUninit<U::Output>, A>` if `f` panics or returns a
441+ // failure value.
442+ let guard = unsafe { new_rc_guard :: < MaybeUninit < U :: Output > , A , R > ( & mut allocation) } ;
443+
444+ let mapped_value = f ( & value) . branch ( ) ?;
445+
446+ drop ( value) ;
447+ mem:: forget ( guard) ;
448+
449+ unsafe {
450+ allocation. get_mut_unchecked ( ) . write ( mapped_value) ;
451+
452+ allocation. cast ( )
453+ }
454+ } else {
455+ // Destruct `self` if `f` panics or returns a failure value.
456+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut self ) } ;
457+
458+ let mapped_value = f ( guard. as_ref ( ) ) . branch ( ) ?;
459+
460+ drop ( guard) ;
461+
462+ let alloc = self . into_raw_parts ( ) . 1 ;
463+
464+ RawRc :: new_in ( mapped_value, alloc)
465+ } ;
466+
467+ ControlFlow :: Continue ( result)
468+ }
469+
470+ /// Maps the value in an `RawRc`, reusing the allocation if possible.
471+ ///
472+ /// # Safety
473+ ///
474+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
475+ #[ cfg( not( no_global_oom_handling) ) ]
476+ pub ( crate ) unsafe fn map < R , U > ( self , f : impl FnOnce ( & T ) -> U ) -> RawRc < U , A >
477+ where
478+ A : Allocator ,
479+ R : RefCounter ,
480+ {
481+ fn wrap_fn < T , U > ( f : impl FnOnce ( & T ) -> U ) -> impl FnOnce ( & T ) -> ControlFlow < !, U > {
482+ |x| ControlFlow :: Continue ( f ( x) )
483+ }
484+
485+ let f = wrap_fn ( f) ;
486+
487+ match unsafe { self . try_map :: < R , _ > ( f) } {
488+ ControlFlow :: Continue ( output) => output,
489+ }
490+ }
491+
492+ /// # Safety
493+ ///
494+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
495+ pub ( crate ) unsafe fn into_inner < R > ( self ) -> Option < T >
496+ where
497+ A : Allocator ,
498+ R : RefCounter ,
499+ {
500+ let is_last_strong_ref = unsafe { decrement_strong_ref_count :: < R > ( self . value_ptr ( ) ) } ;
501+
502+ is_last_strong_ref. then ( || unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
503+ }
504+
505+ /// # Safety
506+ ///
507+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
508+ pub ( crate ) unsafe fn try_unwrap < R > ( self ) -> Result < T , RawRc < T , A > >
509+ where
510+ A : Allocator ,
511+ R : RefCounter ,
512+ {
513+ unsafe fn inner < R > ( value_ptr : RcValuePointer ) -> bool
514+ where
515+ R : RefCounter ,
516+ {
517+ unsafe {
518+ R :: from_raw_counter ( value_ptr. strong_count_ptr ( ) . as_ref ( ) ) . try_lock_strong_count ( )
519+ }
520+ }
521+
522+ let is_last_strong_ref = unsafe { inner :: < R > ( self . value_ptr ( ) ) } ;
523+
524+ if is_last_strong_ref {
525+ Ok ( unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
526+ } else {
527+ Err ( self )
528+ }
529+ }
530+
531+ /// # Safety
532+ ///
533+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
534+ pub ( crate ) unsafe fn unwrap_or_clone < R > ( self ) -> T
535+ where
536+ T : Clone ,
537+ A : Allocator ,
538+ R : RefCounter ,
539+ {
540+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
541+ // implementation.
542+ unsafe { self . try_unwrap :: < R > ( ) } . unwrap_or_else ( |mut rc| {
543+ let guard = unsafe { new_rc_guard :: < T , A , R > ( & mut rc) } ;
544+
545+ T :: clone ( guard. as_ref ( ) )
546+ } )
547+ }
548+ }
549+
357550/// Decrements strong reference count in a reference-counted allocation with a value object that is
358551/// pointed to by `value_ptr`.
359552#[ inline]
@@ -385,3 +578,15 @@ where
385578 R :: is_unique ( R :: from_raw_counter ( & ref_counts. strong ) , R :: from_raw_counter ( & ref_counts. weak ) )
386579 }
387580}
581+
582+ /// Returns a drop guard that calls `Rc::drop::<R>()` on drop.
583+ unsafe fn new_rc_guard < ' a , T , A , R > (
584+ rc : & ' a mut RawRc < T , A > ,
585+ ) -> DropGuard < & ' a mut RawRc < T , A > , impl FnOnce ( & ' a mut RawRc < T , A > ) >
586+ where
587+ T : ?Sized ,
588+ A : Allocator ,
589+ R : RefCounter ,
590+ {
591+ DropGuard :: new ( rc, |rc| unsafe { rc. drop :: < R > ( ) } )
592+ }
0 commit comments