Skip to content

Commit 6595252

Browse files
committed
Add RawWeak type
1 parent 412708a commit 6595252

File tree

2 files changed

+348
-0
lines changed

2 files changed

+348
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ use core::cell::UnsafeCell;
6565
use core::mem;
6666
use core::sync::atomic::Atomic;
6767

68+
mod raw_weak;
6869
mod rc_alloc;
6970
mod rc_layout;
7071
mod rc_value_pointer;
Lines changed: 347 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,347 @@
1+
use core::alloc::Allocator;
2+
use core::cell::UnsafeCell;
3+
use core::mem::{self, DropGuard};
4+
use core::num::NonZeroUsize;
5+
use core::ptr::{self, NonNull};
6+
7+
use crate::raw_rc::rc_layout::RcLayout;
8+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
9+
use crate::raw_rc::{RefCounter, RefCounts, rc_alloc};
10+
11+
/// Base implementation of a weak pointer. `RawWeak` does not implement `Drop`; the user should call
12+
/// `RawWeak::drop` or `RawWeak::drop_unchecked` manually to destroy this object.
13+
///
14+
/// A `RawWeak` can be either dangling or non-dangling. A dangling `RawWeak` does not point to a
15+
/// valid value. A non-dangling `RawWeak` points to a valid reference-counted allocation. The value
16+
/// pointed to by a `RawWeak` may be uninitialized.
17+
pub(crate) struct RawWeak<T, A>
18+
where
19+
T: ?Sized,
20+
{
21+
/// Points to a (possibly uninitialized or dropped) `T` value inside of a reference-counted
22+
/// allocation.
23+
ptr: NonNull<T>,
24+
25+
/// The allocator for `ptr`.
26+
alloc: A,
27+
}
28+
29+
impl<T, A> RawWeak<T, A>
30+
where
31+
T: ?Sized,
32+
{
33+
pub(crate) const unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
34+
Self { ptr, alloc }
35+
}
36+
37+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
38+
where
39+
A: Default,
40+
{
41+
unsafe { Self::from_raw_parts(ptr, A::default()) }
42+
}
43+
44+
pub(crate) fn allocator(&self) -> &A {
45+
&self.alloc
46+
}
47+
48+
pub(crate) fn as_ptr(&self) -> NonNull<T> {
49+
self.ptr
50+
}
51+
52+
#[inline(never)]
53+
unsafe fn assume_init_drop_slow<R>(&mut self)
54+
where
55+
A: Allocator,
56+
R: RefCounter,
57+
{
58+
let guard = unsafe { new_weak_guard::<T, A, R>(self) };
59+
60+
unsafe { guard.ptr.drop_in_place() };
61+
}
62+
63+
/// Drops the value along with the `RawWeak` object, assuming the value pointed to by `ptr` is
64+
/// initialized,
65+
#[inline]
66+
pub(super) unsafe fn assume_init_drop<R>(&mut self)
67+
where
68+
A: Allocator,
69+
R: RefCounter,
70+
{
71+
if const { mem::needs_drop::<T>() } {
72+
unsafe { self.assume_init_drop_slow::<R>() };
73+
} else {
74+
unsafe { self.drop_unchecked::<R>() };
75+
}
76+
}
77+
78+
pub(crate) unsafe fn cast<U>(self) -> RawWeak<U, A> {
79+
unsafe { self.cast_with(NonNull::cast) }
80+
}
81+
82+
#[inline]
83+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawWeak<U, A>
84+
where
85+
U: ?Sized,
86+
F: FnOnce(NonNull<T>) -> NonNull<U>,
87+
{
88+
unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) }
89+
}
90+
91+
/// Increments the weak count, and returns the corresponding `RawWeak` object.
92+
///
93+
/// # Safety
94+
///
95+
/// - `self` must only be handled by the same `RefCounter` implementation.
96+
#[inline]
97+
pub(crate) unsafe fn clone<R>(&self) -> Self
98+
where
99+
A: Clone,
100+
R: RefCounter,
101+
{
102+
// For reducing monomorphization cost.
103+
unsafe fn inner<R>(ptr: NonNull<()>)
104+
where
105+
R: RefCounter,
106+
{
107+
if let Some(value_ptr) = unsafe { try_get_rc_value_ptr(ptr) } {
108+
unsafe { increment_weak_ref_count::<R>(value_ptr) }
109+
}
110+
}
111+
112+
unsafe {
113+
inner::<R>(self.ptr.cast());
114+
115+
Self::from_raw_parts(self.ptr, self.alloc.clone())
116+
}
117+
}
118+
119+
/// Increments the weak count, and returns the corresponding `RawWeak` object, assuming `self`
120+
/// is non-dangling.
121+
///
122+
/// # Safety
123+
///
124+
/// - `self` must only be handled by the same `RefCounter` implementation.
125+
/// - `self` is non-dangling.
126+
pub(crate) unsafe fn clone_unchecked<R>(&self) -> Self
127+
where
128+
A: Clone,
129+
R: RefCounter,
130+
{
131+
unsafe {
132+
increment_weak_ref_count::<R>(self.value_ptr_unchecked());
133+
134+
Self::from_raw_parts(self.ptr, self.alloc.clone())
135+
}
136+
}
137+
138+
/// Drops this weak pointer.
139+
#[inline]
140+
pub(crate) unsafe fn drop<R>(&mut self)
141+
where
142+
A: Allocator,
143+
R: RefCounter,
144+
{
145+
if !is_dangling(self.ptr.cast()) {
146+
unsafe { self.drop_unchecked::<R>() };
147+
}
148+
}
149+
150+
/// Drops this weak pointer, assuming `self` is non-dangling.
151+
///
152+
/// # Safety
153+
///
154+
/// `self` is non-dangling.
155+
#[inline]
156+
pub(super) unsafe fn drop_unchecked<R>(&mut self)
157+
where
158+
A: Allocator,
159+
R: RefCounter,
160+
{
161+
// SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value
162+
// location in a valid reference-counted allocation.
163+
let value_ptr = unsafe { self.value_ptr_unchecked() };
164+
165+
let is_last_weak_ref = unsafe { decrement_weak_ref_count::<R>(value_ptr) };
166+
167+
if is_last_weak_ref {
168+
let rc_layout = unsafe { RcLayout::from_value_ptr_unchecked(self.ptr) };
169+
170+
unsafe { rc_alloc::deallocate::<A>(value_ptr, &self.alloc, rc_layout) }
171+
}
172+
}
173+
174+
pub(crate) fn into_raw(self) -> NonNull<T> {
175+
self.ptr
176+
}
177+
178+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
179+
(self.ptr, self.alloc)
180+
}
181+
182+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
183+
ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
184+
}
185+
186+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
187+
!ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
188+
}
189+
190+
/// Returns the `RefCounts` object inside the reference-counted allocation, assume `self` is
191+
/// non-dangling.
192+
///
193+
/// # Safety
194+
///
195+
/// `self` is non-dangling.
196+
#[cfg(not(no_global_oom_handling))]
197+
pub(super) unsafe fn ref_counts_unchecked(&self) -> &RefCounts {
198+
unsafe { self.value_ptr_unchecked().ref_counts_ptr().as_ref() }
199+
}
200+
201+
/// Returns the strong reference count object inside the reference-counted allocation if `self`
202+
/// is non-dangling.
203+
pub(crate) fn strong_count(&self) -> Option<&UnsafeCell<usize>> {
204+
(!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() })
205+
}
206+
207+
/// Returns the strong reference count object inside the reference-counted allocation, assume
208+
/// `self` is non-dangling.
209+
///
210+
/// # Safety
211+
///
212+
/// `self` is non-dangling.
213+
pub(super) unsafe fn strong_count_unchecked(&self) -> &UnsafeCell<usize> {
214+
unsafe { self.value_ptr_unchecked().strong_count_ptr().as_ref() }
215+
}
216+
217+
/// Returns the weak reference count object inside the reference-counted allocation if `self` is
218+
/// non-dangling.
219+
pub(crate) fn weak_count(&self) -> Option<&UnsafeCell<usize>> {
220+
(!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() })
221+
}
222+
223+
/// Returns the weak reference count object inside the reference-counted allocation, assume
224+
/// `self` is non-dangling.
225+
///
226+
/// # Safety
227+
///
228+
/// `self` is non-dangling.
229+
pub(super) unsafe fn weak_count_unchecked(&self) -> &UnsafeCell<usize> {
230+
unsafe { self.value_ptr_unchecked().weak_count_ptr().as_ref() }
231+
}
232+
233+
/// Sets the contained pointer to a new value.
234+
///
235+
/// # Safety
236+
///
237+
/// - `ptr` must be a valid pointer to a value object that lives in a reference-counted
238+
/// allocation.
239+
/// - The allocation can be deallocated with the associated allocator.
240+
#[cfg(not(no_global_oom_handling))]
241+
pub(super) unsafe fn set_ptr(&mut self, ptr: NonNull<T>) {
242+
self.ptr = ptr;
243+
}
244+
245+
/// Returns a pointer to the value location of the reference-counted allocation, assume `self`
246+
/// is non-dangling.
247+
///
248+
/// # Safety
249+
///
250+
/// `self` is non-dangling.
251+
#[inline]
252+
pub(super) unsafe fn value_ptr_unchecked(&self) -> RcValuePointer {
253+
// SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value
254+
// location in a valid reference-counted allocation.
255+
unsafe { RcValuePointer::from_value_ptr(self.ptr.cast()) }
256+
}
257+
}
258+
259+
// We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because:
260+
//
261+
// - It does not point to any object that is stored inside a reference-counted allocation. Because
262+
// otherwise the corresponding `RefCounts` object will be placed at
263+
// `NonZeroUsize::MAX - size_of::<RefCounts>()`, which is an odd number that violates `RefCounts`'s
264+
// alignment requirement.
265+
// - All bytes in the byte representation of `NonZeroUsize::MAX` are the same, which makes it
266+
// possible to utilize `memset` in certain situations like creating an array of dangling weak
267+
// pointers.
268+
const DANGLING_WEAK_ADDRESS: NonZeroUsize = {
269+
let address = NonZeroUsize::MAX;
270+
271+
// Verifies that `address` must not be a valid address in a reference-counted allocation so it
272+
// can be safely used as the dangling pointer address.
273+
assert!(address.get().wrapping_sub(size_of::<RefCounts>()) % align_of::<RefCounts>() != 0);
274+
275+
address
276+
};
277+
278+
#[inline]
279+
fn is_dangling(value_ptr: NonNull<()>) -> bool {
280+
value_ptr.addr() == DANGLING_WEAK_ADDRESS
281+
}
282+
283+
/// # Safety
284+
///
285+
/// Either `is_dangling(dangling_or_value_ptr)`, or `dangling_or_value_ptr` has a valid address for
286+
/// the value location of a reference-counted allocation.
287+
#[inline]
288+
unsafe fn try_get_rc_value_ptr(dangling_or_value_ptr: NonNull<()>) -> Option<RcValuePointer> {
289+
if is_dangling(dangling_or_value_ptr) {
290+
None
291+
} else {
292+
// SAFETY: We have checked `dangling_or_value_ptr` not being dangling, and caller guarantees
293+
// the validity of `dangling_or_value_ptr`.
294+
295+
Some(unsafe { RcValuePointer::from_value_ptr(dangling_or_value_ptr) })
296+
}
297+
}
298+
299+
/// Decrements weak reference count in a reference-counted allocation with a value object that is
300+
/// pointed to by `value_ptr`.
301+
///
302+
/// # Safety
303+
///
304+
/// - `value_ptr` must point to the value location within a valid reference-counted allocation.
305+
/// - The corresponding weak count must not be zero.
306+
#[inline]
307+
unsafe fn decrement_weak_ref_count<R>(value_ptr: RcValuePointer) -> bool
308+
where
309+
R: RefCounter,
310+
{
311+
unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.decrement()
312+
}
313+
314+
/// Increments weak reference count in a reference-counted allocation with a value object that is
315+
/// pointed to by `value_ptr`.
316+
///
317+
/// # Safety
318+
///
319+
/// `value_ptr` must point to the value location within a valid reference-counted allocation.
320+
#[inline]
321+
unsafe fn increment_weak_ref_count<R>(value_ptr: RcValuePointer)
322+
where
323+
R: RefCounter,
324+
{
325+
unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.increment()
326+
}
327+
328+
/// Creates a drop guard that calls `RawWeak::drop_unchecked` on drop.
329+
///
330+
/// # Safety
331+
///
332+
/// - `weak` is non-dangling.
333+
/// - After the returned `DropGuard` being dropped, the allocation pointed to by the weak pointer
334+
/// must not be accessed anymore.
335+
/// - All accesses to `weak` must use the same `R` for `RefCounter`.
336+
pub(super) unsafe fn new_weak_guard<'a, T, A, R>(
337+
weak: &'a mut RawWeak<T, A>,
338+
) -> DropGuard<&'a mut RawWeak<T, A>, impl FnOnce(&'a mut RawWeak<T, A>)>
339+
where
340+
T: ?Sized,
341+
A: Allocator,
342+
R: RefCounter,
343+
{
344+
// SAFETY: Caller guarantees that `weak` is non-dangling and the corresponding allocation will
345+
// not be accessed after dropping.
346+
DropGuard::new(weak, |weak| unsafe { weak.drop_unchecked::<R>() })
347+
}

0 commit comments

Comments
 (0)