Skip to content

Commit 577d275

Browse files
[create-pull-request] automated change
1 parent 758c70f commit 577d275

File tree

1 file changed

+93
-39
lines changed
  • verifast-proofs/alloc/raw_vec/mod.rs/verified

1 file changed

+93
-39
lines changed

verifast-proofs/alloc/raw_vec/mod.rs/verified/raw_vec.rs

Lines changed: 93 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,7 @@ lem mul_zero(x: i32, y: i32)
4848
// ensure that the code generation related to these panics is minimal as there's
4949
// only one location which panics rather than a bunch throughout the module.
5050
#[cfg(not(no_global_oom_handling))]
51-
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
52-
#[track_caller]
51+
#[cfg_attr(not(panic = "immediate-abort"), inline(never))]
5352
fn capacity_overflow() -> !
5453
//@ req true;
5554
//@ ens false;
@@ -433,7 +432,6 @@ impl<T> RawVec<T, Global> {
433432
#[cfg(not(any(no_global_oom_handling, test)))]
434433
#[must_use]
435434
#[inline]
436-
#[track_caller]
437435
pub(crate) fn with_capacity(capacity: usize) -> Self {
438436
Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData }
439437
}
@@ -442,7 +440,6 @@ impl<T> RawVec<T, Global> {
442440
#[cfg(not(any(no_global_oom_handling, test)))]
443441
#[must_use]
444442
#[inline]
445-
#[track_caller]
446443
pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self {
447444
Self {
448445
inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT),
@@ -455,7 +452,6 @@ impl RawVecInner<Global> {
455452
#[cfg(not(any(no_global_oom_handling, test)))]
456453
#[must_use]
457454
#[inline]
458-
#[track_caller]
459455
fn with_capacity(capacity: usize, elem_layout: Layout) -> Self {
460456
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) {
461457
Ok(res) => res,
@@ -487,6 +483,8 @@ impl<T, A: Allocator> RawVec<T, A> {
487483
#[cfg(not(no_global_oom_handling))]
488484
pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::<T>());
489485

486+
// Check assumption made in `current_memory`
487+
const { assert!(T::LAYOUT.size() % T::LAYOUT.align() == 0) };
490488
/// Like `new`, but parameterized over the choice of allocator for
491489
/// the returned `RawVec`.
492490
#[inline]
@@ -516,7 +514,6 @@ impl<T, A: Allocator> RawVec<T, A> {
516514
/// allocator for the returned `RawVec`.
517515
#[cfg(not(no_global_oom_handling))]
518516
#[inline]
519-
#[track_caller]
520517
pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self
521518
//@ req thread_token(?t) &*& Allocator(t, alloc, ?alloc_id) &*& t == currentThread;
522519
//@ ens thread_token(t) &*& RawVec(t, result, alloc_id, ?ptr, ?capacity_) &*& array_at_lft_(alloc_id.lft, ptr, capacity_, _) &*& capacity <= capacity_;
@@ -553,7 +550,6 @@ impl<T, A: Allocator> RawVec<T, A> {
553550
/// of allocator for the returned `RawVec`.
554551
#[cfg(not(no_global_oom_handling))]
555552
#[inline]
556-
#[track_caller]
557553
pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
558554
Self {
559555
inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT),
@@ -791,18 +787,18 @@ impl<T, A: Allocator> RawVec<T, A> {
791787
/// Aborts on OOM.
792788
#[cfg(not(no_global_oom_handling))]
793789
#[inline]
794-
#[track_caller]
795790
pub(crate) fn reserve(&mut self, len: usize, additional: usize) {
796-
self.inner.reserve(len, additional, T::LAYOUT)
791+
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
792+
unsafe { self.inner.reserve(len, additional, T::LAYOUT) }
797793
}
798794

799795
/// A specialized version of `self.reserve(len, 1)` which requires the
800796
/// caller to ensure `len == self.capacity()`.
801797
#[cfg(not(no_global_oom_handling))]
802798
#[inline(never)]
803-
#[track_caller]
804799
pub(crate) fn grow_one(&mut self) {
805-
self.inner.grow_one(T::LAYOUT)
800+
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
801+
unsafe { self.inner.grow_one(T::LAYOUT) }
806802
}
807803

808804
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
@@ -888,9 +884,9 @@ impl<T, A: Allocator> RawVec<T, A> {
888884
///
889885
/// Aborts on OOM.
890886
#[cfg(not(no_global_oom_handling))]
891-
#[track_caller]
892887
pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) {
893-
self.inner.reserve_exact(len, additional, T::LAYOUT)
888+
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
889+
unsafe { self.inner.reserve_exact(len, additional, T::LAYOUT) }
894890
}
895891

896892
/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
@@ -969,10 +965,10 @@ impl<T, A: Allocator> RawVec<T, A> {
969965
///
970966
/// Aborts on OOM.
971967
#[cfg(not(no_global_oom_handling))]
972-
#[track_caller]
973968
#[inline]
974969
pub(crate) fn shrink_to_fit(&mut self, cap: usize) {
975-
self.inner.shrink_to_fit(cap, T::LAYOUT)
970+
// SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout
971+
unsafe { self.inner.shrink_to_fit(cap, T::LAYOUT) }
976972
}
977973
}
978974

@@ -1015,7 +1011,6 @@ impl<A: Allocator> RawVecInner<A> {
10151011

10161012
#[cfg(not(no_global_oom_handling))]
10171013
#[inline]
1018-
#[track_caller]
10191014
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self
10201015
//@ req thread_token(?t) &*& Allocator(t, alloc, ?alloc_id) &*& t == currentThread &*& Layout::size_(elem_layout) % Layout::align_(elem_layout) == 0;
10211016
//@ ens thread_token(t) &*& RawVecInner(t, result, elem_layout, alloc_id, ?ptr, ?capacity_) &*& array_at_lft_(alloc_id.lft, ptr, Layout::size_(elem_layout) * capacity_, _) &*& capacity <= capacity_;
@@ -1071,7 +1066,6 @@ impl<A: Allocator> RawVecInner<A> {
10711066

10721067
#[cfg(not(no_global_oom_handling))]
10731068
#[inline]
1074-
#[track_caller]
10751069
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
10761070
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
10771071
Ok(res) => res,
@@ -1147,6 +1141,10 @@ impl<A: Allocator> RawVecInner<A> {
11471141
}
11481142
//@ end_lifetime(k);
11491143
//@ std::alloc::end_ref_Allocator_at_lifetime::<A>();
1144+
/// # Safety
1145+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1146+
/// initially construct `self`
1147+
/// - `elem_layout`'s size must be a multiple of its alignment
11501148
r
11511149
}
11521150
#[cfg(not(no_global_oom_handling))]
@@ -1164,6 +1162,10 @@ impl<A: Allocator> RawVecInner<A> {
11641162
//@ std::alloc::end_ref_Allocator_at_lifetime::<A>();
11651163
r
11661164
}
1165+
/// # Safety
1166+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1167+
/// initially construct `self`
1168+
/// - `elem_layout`'s size must be a multiple of its alignment
11671169
};
11681170
let ptr = match result {
11691171
Ok(ptr) => ptr,
@@ -1348,39 +1350,49 @@ impl<A: Allocator> RawVecInner<A> {
13481350

13491351
#[cfg(not(no_global_oom_handling))]
13501352
#[inline]
1351-
#[track_caller]
1352-
fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
1353+
unsafe fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
13531354
// Callers expect this function to be very cheap when there is already sufficient capacity.
13541355
// Therefore, we move all the resizing and error-handling logic from grow_amortized and
13551356
// handle_reserve behind a call, while making sure that this function is likely to be
13561357
// inlined as just a comparison and a call if the comparison fails.
13571358
#[cold]
1358-
fn do_reserve_and_handle<A: Allocator>(
1359+
unsafe fn do_reserve_and_handle<A: Allocator>(
13591360
slf: &mut RawVecInner<A>,
13601361
len: usize,
13611362
additional: usize,
13621363
elem_layout: Layout,
13631364
) {
1364-
if let Err(err) = slf.grow_amortized(len, additional, elem_layout) {
1365+
// SAFETY: Precondition passed to caller
1366+
if let Err(err) = unsafe { slf.grow_amortized(len, additional, elem_layout) } {
13651367
handle_error(err);
13661368
}
13671369
}
13681370

13691371
if self.needs_to_grow(len, additional, elem_layout) {
1370-
do_reserve_and_handle(self, len, additional, elem_layout);
1372+
unsafe {
1373+
do_reserve_and_handle(self, len, additional, elem_layout);
1374+
}
13711375
}
13721376
}
13731377

1378+
/// # Safety
1379+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1380+
/// initially construct `self`
1381+
/// - `elem_layout`'s size must be a multiple of its alignment
13741382
#[cfg(not(no_global_oom_handling))]
13751383
#[inline]
1376-
#[track_caller]
1377-
fn grow_one(&mut self, elem_layout: Layout) {
1378-
if let Err(err) = self.grow_amortized(self.cap.as_inner(), 1, elem_layout) {
1384+
unsafe fn grow_one(&mut self, elem_layout: Layout) {
1385+
// SAFETY: Precondition passed to caller
1386+
if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } {
13791387
handle_error(err);
13801388
}
13811389
}
13821390

1383-
fn try_reserve(
1391+
/// # Safety
1392+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1393+
/// initially construct `self`
1394+
/// - `elem_layout`'s size must be a multiple of its alignment
1395+
unsafe fn try_reserve(
13841396
&mut self,
13851397
len: usize,
13861398
additional: usize,
@@ -1419,7 +1431,10 @@ impl<A: Allocator> RawVecInner<A> {
14191431
//@ end_share_RawVecInner(self);
14201432

14211433
if needs_to_grow {
1422-
self.grow_amortized(len, additional, elem_layout)?;
1434+
// SAFETY: Precondition passed to caller
1435+
unsafe {
1436+
self.grow_amortized(len, additional, elem_layout)?;
1437+
}
14231438
}
14241439
unsafe {
14251440
//@ let k2 = begin_lifetime();
@@ -1441,15 +1456,23 @@ impl<A: Allocator> RawVecInner<A> {
14411456
Ok(())
14421457
}
14431458

1459+
/// # Safety
1460+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1461+
/// initially construct `self`
1462+
/// - `elem_layout`'s size must be a multiple of its alignment
14441463
#[cfg(not(no_global_oom_handling))]
1445-
#[track_caller]
1446-
fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
1447-
if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) {
1464+
unsafe fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
1465+
// SAFETY: Precondition passed to caller
1466+
if let Err(err) = unsafe { self.try_reserve_exact(len, additional, elem_layout) } {
14481467
handle_error(err);
14491468
}
14501469
}
14511470

1452-
fn try_reserve_exact(
1471+
/// # Safety
1472+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1473+
/// initially construct `self`
1474+
/// - `elem_layout`'s size must be a multiple of its alignment
1475+
unsafe fn try_reserve_exact(
14531476
&mut self,
14541477
len: usize,
14551478
additional: usize,
@@ -1488,7 +1511,10 @@ impl<A: Allocator> RawVecInner<A> {
14881511
//@ end_share_RawVecInner(self);
14891512

14901513
if needs_to_grow {
1491-
self.grow_exact(len, additional, elem_layout)?;
1514+
// SAFETY: Precondition passed to caller
1515+
unsafe {
1516+
self.grow_exact(len, additional, elem_layout)?;
1517+
}
14921518
}
14931519
unsafe {
14941520
//@ let k2 = begin_lifetime();
@@ -1497,6 +1523,11 @@ impl<A: Allocator> RawVecInner<A> {
14971523
//@ init_ref_RawVecInner(self_ref2);
14981524
//@ open_frac_borrow(k2, ref_initialized_(self_ref2), 1/2);
14991525
//@ open [?f2]ref_initialized_::<RawVecInner<A>>(self_ref2)();
1526+
/// # Safety
1527+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1528+
/// initially construct `self`
1529+
/// - `elem_layout`'s size must be a multiple of its alignment
1530+
/// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
15001531
let needs_to_grow2 = self.needs_to_grow(len, additional, elem_layout);
15011532
//@ close [f2]ref_initialized_::<RawVecInner<A>>(self_ref2)();
15021533
//@ close_frac_borrow(f2, ref_initialized_(self_ref2));
@@ -1512,9 +1543,8 @@ impl<A: Allocator> RawVecInner<A> {
15121543

15131544
#[cfg(not(no_global_oom_handling))]
15141545
#[inline]
1515-
#[track_caller]
1516-
fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
1517-
if let Err(err) = self.shrink(cap, elem_layout) {
1546+
unsafe fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
1547+
if let Err(err) = unsafe { self.shrink(cap, elem_layout) } {
15181548
handle_error(err);
15191549
}
15201550
}
@@ -1541,7 +1571,13 @@ impl<A: Allocator> RawVecInner<A> {
15411571
self.cap = unsafe { Cap::new_unchecked(cap) };
15421572
}
15431573

1544-
fn grow_amortized(
1574+
/// # Safety
1575+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1576+
/// initially construct `self`
1577+
/// - `elem_layout`'s size must be a multiple of its alignment
1578+
/// - The sum of `len` and `additional` must be greater than or equal to
1579+
/// `self.capacity(elem_layout.size())`
1580+
unsafe fn grow_amortized(
15451581
&mut self,
15461582
len: usize,
15471583
additional: usize,
@@ -1569,6 +1605,7 @@ impl<A: Allocator> RawVecInner<A> {
15691605
//@ safety_proof { assume(false); }
15701606
{
15711607
// This is ensured by the calling contexts.
1608+
// SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items
15721609
if cfg!(debug_assertions) { //~allow_dead_code // FIXME: The source location associated with a dead `else` branch is the entire `if` statement :-(
15731610
assert!(additional > 0);
15741611
}
@@ -1653,7 +1690,13 @@ impl<A: Allocator> RawVecInner<A> {
16531690
}
16541691
}
16551692

1656-
fn grow_exact(
1693+
/// # Safety
1694+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1695+
/// initially construct `self`
1696+
/// - `elem_layout`'s size must be a multiple of its alignment
1697+
/// - The sum of `len` and `additional` must be greater than or equal to
1698+
/// `self.capacity(elem_layout.size())`
1699+
unsafe fn grow_exact(
16571700
&mut self,
16581701
len: usize,
16591702
additional: usize,
@@ -1676,6 +1719,11 @@ impl<A: Allocator> RawVecInner<A> {
16761719
Result::Err(e) =>
16771720
RawVecInner(t, self1, elem_layout, alloc_id, ptr0, capacity0) &*& array_at_lft_(alloc_id.lft, ptr0, capacity0 * Layout::size_(elem_layout), _) &*&
16781721
<std::collections::TryReserveError>.own(t, e)
1722+
/// # Safety
1723+
/// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to
1724+
/// initially construct `self`
1725+
/// - `elem_layout`'s size must be a multiple of its alignment
1726+
/// - `cap` must be less than or equal to `self.capacity(elem_layout.size())`
16791727
};
16801728
@*/
16811729
//@ safety_proof { assume(false); }
@@ -1745,6 +1793,13 @@ impl<A: Allocator> RawVecInner<A> {
17451793
//@ assert 0 <= Cap_as_inner_(self0.cap);
17461794
//@ assert 0 <= logical_capacity(self0.cap, Layout::size_(elem_layout));
17471795
//@ assert cap != 0;
1796+
/// # Safety
1797+
/// If `current_memory` matches `Some((ptr, old_layout))`:
1798+
/// - `ptr` must denote a block of memory *currently allocated* via `alloc`
1799+
/// - `old_layout` must *fit* that block of memory
1800+
/// - `new_layout` must have the same alignment as `old_layout`
1801+
/// - `new_layout.size()` must be greater than or equal to `old_layout.size()`
1802+
/// If `current_memory` is `None`, this function is safe.
17481803
//@ std::alloc::Layout_inv(new_layout);
17491804
//@ close RawVecInner0(alloc_id, Unique::from_non_null_(ptr.ptr), UsizeNoHighBit::new_(cap), elem_layout, NonNull_ptr(ptr.ptr), _);
17501805
//@ close RawVecInner::<A>(t, self1, elem_layout, alloc_id, _, _);
@@ -1975,7 +2030,7 @@ impl<A: Allocator> RawVecInner<A> {
19752030
// not marked inline(never) since we want optimizers to be able to observe the specifics of this
19762031
// function, see tests/codegen-llvm/vec-reserve-extend.rs.
19772032
#[cold]
1978-
fn finish_grow<A>(
2033+
unsafe fn finish_grow<A>(
19792034
new_layout: Layout,
19802035
current_memory: Option<(NonNull<u8>, Layout)>,
19812036
alloc: &mut A,
@@ -2070,7 +2125,6 @@ ens thread_token(t) &*& *alloc |-> ?alloc1 &*& Allocator(t, alloc1, alloc_id) &*
20702125
#[cfg(not(no_global_oom_handling))]
20712126
#[cold]
20722127
#[optimize(size)]
2073-
#[track_caller]
20742128
fn handle_error(e: TryReserveError) -> !
20752129
//@ req true;
20762130
//@ ens false;

0 commit comments

Comments
 (0)