|
1 | | -use core::alloc::Allocator; |
| 1 | +use core::alloc::{AllocError, Allocator}; |
2 | 2 | use core::cell::UnsafeCell; |
3 | 3 | use core::mem::{self, DropGuard}; |
4 | 4 | use core::num::NonZeroUsize; |
5 | 5 | use core::ptr::{self, NonNull}; |
6 | 6 |
|
7 | | -use crate::raw_rc::rc_layout::RcLayout; |
| 7 | +use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt}; |
8 | 8 | use crate::raw_rc::rc_value_pointer::RcValuePointer; |
9 | 9 | use crate::raw_rc::{RefCounter, RefCounts, rc_alloc}; |
10 | 10 |
|
@@ -256,6 +256,123 @@ where |
256 | 256 | } |
257 | 257 | } |
258 | 258 |
|
| 259 | +impl<T, A> RawWeak<T, A> { |
| 260 | + pub(crate) fn new_dangling() -> Self |
| 261 | + where |
| 262 | + A: Default, |
| 263 | + { |
| 264 | + Self::new_dangling_in(A::default()) |
| 265 | + } |
| 266 | + |
| 267 | + pub(crate) const fn new_dangling_in(alloc: A) -> Self { |
| 268 | + unsafe { Self::from_raw_parts(NonNull::without_provenance(DANGLING_WEAK_ADDRESS), alloc) } |
| 269 | + } |
| 270 | + |
| 271 | + pub(crate) fn try_new_uninit<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 272 | + where |
| 273 | + A: Allocator + Default, |
| 274 | + { |
| 275 | + rc_alloc::try_allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 276 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 277 | + } |
| 278 | + |
| 279 | + pub(crate) fn try_new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 280 | + where |
| 281 | + A: Allocator, |
| 282 | + { |
| 283 | + rc_alloc::try_allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 284 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 285 | + } |
| 286 | + |
| 287 | + pub(crate) fn try_new_zeroed<const STRONG_COUNT: usize>() -> Result<Self, AllocError> |
| 288 | + where |
| 289 | + A: Allocator + Default, |
| 290 | + { |
| 291 | + rc_alloc::try_allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT) |
| 292 | + .map(|(ptr, alloc)| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 293 | + } |
| 294 | + |
| 295 | + pub(crate) fn try_new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Result<Self, AllocError> |
| 296 | + where |
| 297 | + A: Allocator, |
| 298 | + { |
| 299 | + rc_alloc::try_allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 300 | + .map(|ptr| unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) }) |
| 301 | + } |
| 302 | + |
| 303 | + #[cfg(not(no_global_oom_handling))] |
| 304 | + pub(crate) fn new_uninit<const STRONG_COUNT: usize>() -> Self |
| 305 | + where |
| 306 | + A: Allocator + Default, |
| 307 | + { |
| 308 | + let (ptr, alloc) = rc_alloc::allocate_uninit::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 309 | + |
| 310 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 311 | + } |
| 312 | + |
| 313 | + #[cfg(not(no_global_oom_handling))] |
| 314 | + pub(crate) fn new_uninit_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 315 | + where |
| 316 | + A: Allocator, |
| 317 | + { |
| 318 | + unsafe { |
| 319 | + Self::from_raw_parts( |
| 320 | + rc_alloc::allocate_uninit_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 321 | + .as_ptr() |
| 322 | + .cast(), |
| 323 | + alloc, |
| 324 | + ) |
| 325 | + } |
| 326 | + } |
| 327 | + |
| 328 | + #[cfg(not(no_global_oom_handling))] |
| 329 | + pub(crate) fn new_zeroed<const STRONG_COUNT: usize>() -> Self |
| 330 | + where |
| 331 | + A: Allocator + Default, |
| 332 | + { |
| 333 | + let (ptr, alloc) = rc_alloc::allocate_zeroed::<A, STRONG_COUNT>(T::RC_LAYOUT); |
| 334 | + |
| 335 | + unsafe { Self::from_raw_parts(ptr.as_ptr().cast(), alloc) } |
| 336 | + } |
| 337 | + |
| 338 | + #[cfg(not(no_global_oom_handling))] |
| 339 | + pub(crate) fn new_zeroed_in<const STRONG_COUNT: usize>(alloc: A) -> Self |
| 340 | + where |
| 341 | + A: Allocator, |
| 342 | + { |
| 343 | + unsafe { |
| 344 | + Self::from_raw_parts( |
| 345 | + rc_alloc::allocate_zeroed_in::<A, STRONG_COUNT>(&alloc, T::RC_LAYOUT) |
| 346 | + .as_ptr() |
| 347 | + .cast(), |
| 348 | + alloc, |
| 349 | + ) |
| 350 | + } |
| 351 | + } |
| 352 | + |
| 353 | + /// Consumes the `RawWeak` object and returns the contained value, assuming the value is |
| 354 | + /// initialized. |
| 355 | + /// |
| 356 | + /// # Safety |
| 357 | + /// |
| 358 | + /// - `self` is non-dangling. |
| 359 | + /// - The value pointed to by `self` is initialized. |
| 360 | + /// - The strong reference count is zero. |
| 361 | + pub(super) unsafe fn assume_init_into_inner<R>(mut self) -> T |
| 362 | + where |
| 363 | + A: Allocator, |
| 364 | + R: RefCounter, |
| 365 | + { |
| 366 | + unsafe { |
| 367 | + let result = self.ptr.read(); |
| 368 | + |
| 369 | + self.drop_unchecked::<R>(); |
| 370 | + |
| 371 | + result |
| 372 | + } |
| 373 | + } |
| 374 | +} |
| 375 | + |
259 | 376 | // We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because: |
260 | 377 | // |
261 | 378 | // - It does not point to any object that is stored inside a reference-counted allocation. Because |
|
0 commit comments