Skip to content

Commit 87177cf

Browse files
committed
Add RawRc type
1 parent 8c8aee5 commit 87177cf

File tree

3 files changed

+420
-0
lines changed

3 files changed

+420
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ use core::cell::UnsafeCell;
6565
use core::mem;
6666
use core::sync::atomic::Atomic;
6767

68+
mod raw_rc;
6869
mod raw_weak;
6970
mod rc_alloc;
7071
mod rc_layout;

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 387 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,387 @@
1+
use core::alloc::{AllocError, Allocator};
2+
use core::cell::UnsafeCell;
3+
use core::clone::CloneToUninit;
4+
use core::marker::PhantomData;
5+
#[cfg(not(no_global_oom_handling))]
6+
use core::mem::{self, DropGuard};
7+
#[cfg(not(no_global_oom_handling))]
8+
use core::ops::DerefMut;
9+
use core::ptr::NonNull;
10+
11+
#[cfg(not(no_global_oom_handling))]
12+
use crate::raw_rc::MakeMutStrategy;
13+
#[cfg(not(no_global_oom_handling))]
14+
use crate::raw_rc::raw_weak;
15+
use crate::raw_rc::raw_weak::RawWeak;
16+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
17+
use crate::raw_rc::{RefCounter, rc_alloc};
18+
19+
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`; the user should call
20+
/// `RawRc::drop` manually to destroy this object.
21+
#[repr(transparent)]
22+
pub(crate) struct RawRc<T, A>
23+
where
24+
T: ?Sized,
25+
{
26+
/// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count owned by the
27+
/// `RawRc` object. The weak pointer is always non-dangling.
28+
weak: RawWeak<T, A>,
29+
30+
// Defines the ownership of `T` for drop-check.
31+
_phantom_data: PhantomData<T>,
32+
}
33+
34+
impl<T, A> RawRc<T, A>
35+
where
36+
T: ?Sized,
37+
{
38+
#[cfg(not(no_global_oom_handling))]
39+
pub(crate) fn clone_from_ref_in(value: &T, alloc: A) -> Self
40+
where
41+
A: Allocator,
42+
T: CloneToUninit,
43+
{
44+
let ptr = rc_alloc::allocate_with_cloned_in::<T, A, 1>(value, &alloc);
45+
46+
unsafe { Self::from_raw_parts(ptr, alloc) }
47+
}
48+
49+
#[cfg(not(no_global_oom_handling))]
50+
pub(crate) fn clone_from_ref(value: &T) -> Self
51+
where
52+
A: Allocator + Default,
53+
T: CloneToUninit,
54+
{
55+
let (ptr, alloc) = rc_alloc::allocate_with_cloned::<T, A, 1>(value);
56+
57+
unsafe { Self::from_raw_parts(ptr, alloc) }
58+
}
59+
60+
pub(crate) fn try_clone_from_ref_in(value: &T, alloc: A) -> Result<Self, AllocError>
61+
where
62+
A: Allocator,
63+
T: CloneToUninit,
64+
{
65+
rc_alloc::try_allocate_with_cloned_in::<T, A, 1>(value, &alloc)
66+
.map(|ptr| unsafe { Self::from_raw_parts(ptr, alloc) })
67+
}
68+
69+
pub(crate) fn try_clone_from_ref(value: &T) -> Result<Self, AllocError>
70+
where
71+
A: Allocator + Default,
72+
T: CloneToUninit,
73+
{
74+
rc_alloc::try_allocate_with_cloned::<T, A, 1>(value)
75+
.map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr, alloc) })
76+
}
77+
78+
/// # Safety
79+
///
80+
/// - `ptr` points to a value inside a reference-counted allocation.
81+
/// - The allocation can be freed by `A::default()`.
82+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
83+
where
84+
A: Default,
85+
{
86+
unsafe { Self::from_raw_parts(ptr, A::default()) }
87+
}
88+
89+
/// # Safety
90+
///
91+
/// - `ptr` points to a value inside a reference-counted allocation.
92+
/// - The allocation can be freed by `alloc`.
93+
pub(crate) unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
94+
unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) }
95+
}
96+
97+
/// # Safety
98+
///
99+
/// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will
100+
/// take the ownership of exactly one strong reference count.
101+
pub(super) unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
102+
Self { weak, _phantom_data: PhantomData }
103+
}
104+
105+
pub(crate) fn allocator(&self) -> &A {
106+
&self.weak.allocator()
107+
}
108+
109+
pub(crate) const fn as_ptr(&self) -> NonNull<T> {
110+
self.weak.as_ptr()
111+
}
112+
113+
const fn as_ref(&self) -> &T {
114+
unsafe { self.as_ptr().as_ref() }
115+
}
116+
117+
pub(crate) unsafe fn cast<U>(self) -> RawRc<U, A> {
118+
unsafe { RawRc::from_weak(self.weak.cast()) }
119+
}
120+
121+
#[inline]
122+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawRc<U, A>
123+
where
124+
U: ?Sized,
125+
F: FnOnce(NonNull<T>) -> NonNull<U>,
126+
{
127+
unsafe { RawRc::from_weak(self.weak.cast_with(f)) }
128+
}
129+
130+
#[inline]
131+
pub(crate) unsafe fn clone<R>(&self) -> Self
132+
where
133+
A: Clone,
134+
R: RefCounter,
135+
{
136+
unsafe {
137+
increment_strong_ref_count::<R>(self.value_ptr());
138+
139+
Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
140+
}
141+
}
142+
143+
pub(crate) unsafe fn decrement_strong_count<R>(ptr: NonNull<T>)
144+
where
145+
A: Allocator + Default,
146+
R: RefCounter,
147+
{
148+
unsafe { Self::decrement_strong_count_in::<R>(ptr, A::default()) };
149+
}
150+
151+
pub(crate) unsafe fn decrement_strong_count_in<R>(ptr: NonNull<T>, alloc: A)
152+
where
153+
A: Allocator,
154+
R: RefCounter,
155+
{
156+
unsafe { RawRc::from_raw_parts(ptr, alloc).drop::<R>() };
157+
}
158+
159+
pub(crate) unsafe fn increment_strong_count<R>(ptr: NonNull<T>)
160+
where
161+
R: RefCounter,
162+
{
163+
unsafe { increment_strong_ref_count::<R>(RcValuePointer::from_value_ptr(ptr.cast())) };
164+
}
165+
166+
pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
167+
where
168+
A: Clone,
169+
R: RefCounter,
170+
{
171+
unsafe fn inner<R>(value_ptr: RcValuePointer)
172+
where
173+
R: RefCounter,
174+
{
175+
unsafe {
176+
R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()).downgrade_increment_weak();
177+
}
178+
}
179+
180+
unsafe {
181+
inner::<R>(self.value_ptr());
182+
183+
RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
184+
}
185+
}
186+
187+
#[inline]
188+
pub(crate) unsafe fn drop<R>(&mut self)
189+
where
190+
A: Allocator,
191+
R: RefCounter,
192+
{
193+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
194+
195+
if is_last_strong_ref {
196+
unsafe { self.weak.assume_init_drop::<R>() }
197+
}
198+
}
199+
200+
pub(crate) unsafe fn get_mut<R>(&mut self) -> Option<&mut T>
201+
where
202+
R: RefCounter,
203+
{
204+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> Option<RcValuePointer>
205+
where
206+
R: RefCounter,
207+
{
208+
unsafe { is_unique::<R>(value_ptr) }.then_some(value_ptr)
209+
}
210+
211+
let (ptr, metadata) = self.weak.as_ptr().to_raw_parts();
212+
213+
unsafe { inner::<R>(RcValuePointer::from_value_ptr(ptr)) }
214+
.map(|ptr| unsafe { NonNull::from_raw_parts(ptr.as_ptr(), metadata).as_mut() })
215+
}
216+
217+
/// Returns a mutable reference to the contained value.
218+
///
219+
/// # Safety
220+
///
221+
/// No other active references to the contained value should exist, and no new references to the
222+
/// contained value will be acquired for the duration of the returned borrow.
223+
pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T {
224+
// SAFETY: The caller guarantees that we can access the contained value exclusively. Note
225+
// that we can't create mutable references that have access to reference counters, because
226+
// the caller only guarantee exclusive access to the contained value, not the reference
227+
// counters.
228+
unsafe { self.weak.as_ptr().as_mut() }
229+
}
230+
231+
pub(crate) fn into_raw(self) -> NonNull<T> {
232+
self.weak.into_raw()
233+
}
234+
235+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
236+
self.weak.into_raw_parts()
237+
}
238+
239+
#[cfg(not(no_global_oom_handling))]
240+
pub(crate) unsafe fn is_unique<R>(&self) -> bool
241+
where
242+
R: RefCounter,
243+
{
244+
unsafe { is_unique::<R>(self.value_ptr()) }
245+
}
246+
247+
#[cfg(not(no_global_oom_handling))]
248+
pub(crate) unsafe fn make_mut<R>(&mut self) -> &mut T
249+
where
250+
T: CloneToUninit,
251+
A: Allocator + Clone,
252+
R: RefCounter,
253+
{
254+
/// Returns a drop guard that sets the pointer in `rc` to `ptr` on drop.
255+
///
256+
/// # Safety
257+
///
258+
/// - `ptr` must point to a valid reference-counted value that can be deallocated with the
259+
/// allocator associated with `rc`.
260+
/// - The value pointed to by `ptr` must have an unowned strong reference count that can be
261+
/// taken ownership of by `rc`.
262+
unsafe fn set_rc_ptr_on_drop<'a, T, A>(
263+
rc: &'a mut RawRc<T, A>,
264+
ptr: NonNull<T>,
265+
) -> impl DerefMut<Target = &'a mut RawRc<T, A>>
266+
where
267+
T: ?Sized,
268+
{
269+
DropGuard::new(rc, move |rc| unsafe { rc.weak.set_ptr(ptr) })
270+
}
271+
272+
unsafe {
273+
let ref_counts = self.ref_counts();
274+
275+
if let Some(strategy) = R::make_mut(
276+
R::from_raw_counter(&ref_counts.strong),
277+
R::from_raw_counter(&ref_counts.weak),
278+
) {
279+
match strategy {
280+
MakeMutStrategy::Move => {
281+
// `R::make_mut` has set the strong reference count to zero, so the `RawRc`
282+
// is essentially a `RawWeak` object whose value is initialized. This means
283+
// we are the only owner of the value and can safely move it into a new
284+
// allocation.
285+
286+
// `guard` ensures the old `RawRc` object is dropped even if the allocation
287+
// panics.
288+
let guard = raw_weak::new_weak_guard::<T, A, R>(&mut self.weak);
289+
290+
let new_ptr = rc_alloc::allocate_with_value_in_unchecked::<T, A, 1>(
291+
guard.as_ptr().as_ref(),
292+
&guard.allocator(),
293+
);
294+
295+
// No panic occurred, defuse the guard.
296+
mem::forget(guard);
297+
298+
// Ensure the value pointer in `self` is updated to `new_ptr`.
299+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
300+
301+
// `MakeMutStrategy::Move` guarantees that the strong count is zero, also we
302+
// have copied the value to a new allocation, so we can pretend the original
303+
// `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak`
304+
// destructor to finish the cleanup.
305+
update_ptr_on_drop.weak.drop_unchecked::<R>();
306+
}
307+
MakeMutStrategy::Clone => {
308+
// There are multiple owners of the value, so we need to clone the value
309+
// into a new allocation.
310+
311+
let new_ptr = rc_alloc::allocate_with_cloned_in::<T, A, 1>(
312+
self.as_ref(),
313+
self.allocator(),
314+
);
315+
316+
// Ensure the value pointer in `self` is updated to `new_ptr`.
317+
let mut update_ptr_on_drop = set_rc_ptr_on_drop(self, new_ptr);
318+
319+
// Manually drop old `RawRc`.
320+
update_ptr_on_drop.drop::<R>();
321+
}
322+
}
323+
}
324+
325+
self.get_mut_unchecked()
326+
}
327+
}
328+
329+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
330+
RawWeak::ptr_eq(&self.weak, &other.weak)
331+
}
332+
333+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
334+
RawWeak::ptr_ne(&self.weak, &other.weak)
335+
}
336+
337+
#[cfg(not(no_global_oom_handling))]
338+
pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts {
339+
unsafe { self.weak.ref_counts_unchecked() }
340+
}
341+
342+
pub(crate) fn strong_count(&self) -> &UnsafeCell<usize> {
343+
unsafe { self.weak.strong_count_unchecked() }
344+
}
345+
346+
pub(crate) fn weak_count(&self) -> &UnsafeCell<usize> {
347+
unsafe { self.weak.weak_count_unchecked() }
348+
}
349+
350+
#[inline]
351+
fn value_ptr(&self) -> RcValuePointer {
352+
// SAFETY: `self.weak` is guaranteed to be non-dangling.
353+
unsafe { self.weak.value_ptr_unchecked() }
354+
}
355+
}
356+
357+
/// Decrements strong reference count in a reference-counted allocation with a value object that is
358+
/// pointed to by `value_ptr`.
359+
#[inline]
360+
unsafe fn decrement_strong_ref_count<R>(value_ptr: RcValuePointer) -> bool
361+
where
362+
R: RefCounter,
363+
{
364+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).decrement() }
365+
}
366+
367+
/// Increments strong reference count in a reference-counted allocation with a value object that is
368+
/// pointed to by `value_ptr`.
369+
#[inline]
370+
unsafe fn increment_strong_ref_count<R>(value_ptr: RcValuePointer)
371+
where
372+
R: RefCounter,
373+
{
374+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).increment() };
375+
}
376+
377+
#[inline]
378+
unsafe fn is_unique<R>(value_ptr: RcValuePointer) -> bool
379+
where
380+
R: RefCounter,
381+
{
382+
let ref_counts = unsafe { value_ptr.ref_counts_ptr().as_ref() };
383+
384+
unsafe {
385+
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
386+
}
387+
}

0 commit comments

Comments
 (0)