From 91a616b51bc4fd46bfcaaef90bbf960f9c6941d8 Mon Sep 17 00:00:00 2001 From: Casper Neo Date: Mon, 26 Apr 2021 13:36:12 -0400 Subject: [PATCH 1/2] Fix Miri flag passing and bump Rust version. --- rust/flatbuffers/Cargo.toml | 2 +- tests/RustTest.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rust/flatbuffers/Cargo.toml b/rust/flatbuffers/Cargo.toml index e8cefbd28a4..ee0f7cc30d0 100644 --- a/rust/flatbuffers/Cargo.toml +++ b/rust/flatbuffers/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flatbuffers" -version = "0.8.4" +version = "0.8.5" edition = "2018" authors = ["Robert Winslow ", "FlatBuffers Maintainers"] license = "Apache-2.0" diff --git a/tests/RustTest.sh b/tests/RustTest.sh index eb21e9f7db3..7a7894ad64d 100755 --- a/tests/RustTest.sh +++ b/tests/RustTest.sh @@ -63,5 +63,5 @@ fi # RUST_NIGHTLY environment variable set in dockerfile. if [[ $RUST_NIGHTLY == 1 ]]; then rustup +nightly component add miri - cargo +nightly miri test -- -Zmiri-disable-isolation + MIRIFLAGS="-Zmiri-disable-isolation" cargo +nightly miri test fi From 6c6d377e1adc154517387bdfa5d7a0c383a050e4 Mon Sep 17 00:00:00 2001 From: Casper Neo Date: Mon, 26 Apr 2021 15:27:39 -0400 Subject: [PATCH 2/2] Fix Miri problems from Arrays PR. SafeSliceAccess was removed for Arrays. It's kind of unsound. It has two properties: 1. EndianSafe 2. Alignment 1 We only need 1. in create_vector_direct to memcpy data. We both 1. and 2. for accessing things with slices as buffers are built on &[u8] which is unaligned. Conditional compilation implements SafeSliceAccess for >1byte scalars (like f32) on LittleEndian machines which is wrong since they don't satisfy 2. This UB is still accessible for Vectors (though not exercised our tests) as it implements SafeSliceAccess. I'll fix this later by splitting SafeSliceAccess into its 2 properties. --- rust/flatbuffers/src/array.rs | 22 +++++++++---------- tests/rust_usage_test/tests/arrays_test.rs | 11 +++------- .../rust_usage_test/tests/integration_test.rs | 1 + 3 files changed, 15 insertions(+), 19 deletions(-) diff --git a/rust/flatbuffers/src/array.rs b/rust/flatbuffers/src/array.rs index 0a254911e88..fae1f93291f 100644 --- a/rust/flatbuffers/src/array.rs +++ b/rust/flatbuffers/src/array.rs @@ -52,6 +52,9 @@ impl<'a, T: 'a, const N: usize> Array<'a, T, N> { pub fn len(&self) -> usize { N } + pub fn as_ptr(&self) -> *const u8 { + self.0.as_ptr() + } } impl<'a, T: Follow<'a> + 'a, const N: usize> Array<'a, T, N> { @@ -75,14 +78,7 @@ impl<'a, T: Follow<'a> + Debug, const N: usize> Into<[T::Inner; N]> for Array<'a } } -impl<'a, T: SafeSliceAccess + 'a, const N: usize> Array<'a, T, N> { - pub fn safe_slice(self) -> &'a [T] { - let sz = size_of::(); - debug_assert!(sz > 0); - let ptr = self.0.as_ptr() as *const T; - unsafe { from_raw_parts(ptr, N) } - } -} +// TODO(caspern): Implement some future safe version of SafeSliceAccess. /// Implement Follow for all possible Arrays that have Follow-able elements. impl<'a, T: Follow<'a> + 'a, const N: usize> Follow<'a> for Array<'a, T, N> { @@ -98,12 +94,16 @@ pub fn emplace_scalar_array( loc: usize, src: &[T; N], ) { - let mut buf_ptr = buf[loc..].as_mut_ptr() as *mut T; + let mut buf_ptr = buf[loc..].as_mut_ptr(); for item in src.iter() { let item_le = item.to_little_endian(); unsafe { - buf_ptr.write(item_le); - buf_ptr = buf_ptr.add(1); + core::ptr::copy_nonoverlapping( + &item_le as *const T as *const u8, + buf_ptr, + size_of::(), + ); + buf_ptr = buf_ptr.add(size_of::()); } } } diff --git a/tests/rust_usage_test/tests/arrays_test.rs b/tests/rust_usage_test/tests/arrays_test.rs index 1f316666640..41e7590fa73 100644 --- a/tests/rust_usage_test/tests/arrays_test.rs +++ b/tests/rust_usage_test/tests/arrays_test.rs @@ -225,8 +225,8 @@ fn verify_struct_array_alignment() { let array_table = root_as_array_table(buf).unwrap(); let array_struct = array_table.a().unwrap(); let struct_start_ptr = array_struct.0.as_ptr() as usize; - let b_start_ptr = array_struct.b().safe_slice().as_ptr() as usize; - let d_start_ptr = array_struct.d().safe_slice().as_ptr() as usize; + let b_start_ptr = array_struct.b().as_ptr() as usize; + let d_start_ptr = array_struct.d().as_ptr() as usize; // The T type of b let b_aln = ::std::mem::align_of::(); assert_eq!((b_start_ptr - struct_start_ptr) % b_aln, 0); @@ -272,8 +272,6 @@ mod array_fuzz { let arr: flatbuffers::Array<$ty, ARRAY_SIZE> = flatbuffers::Array::follow(&test_buf, 0); let got: [$ty; ARRAY_SIZE] = arr.into(); assert_eq!(got, xs.0); - #[cfg(target_endian = "little")] - assert_eq!(arr.safe_slice(), xs.0); } #[test] fn $test_name() { @@ -317,13 +315,10 @@ mod array_fuzz { let arr: flatbuffers::Array = flatbuffers::Array::follow(&test_buf, 0); let got: [&NestedStruct; ARRAY_SIZE] = arr.into(); assert_eq!(got, native_struct_array); - let arr_slice = arr.safe_slice(); - for i in 0..ARRAY_SIZE { - assert_eq!(arr_slice[i], *native_struct_array[i]); - } } #[test] + #[cfg(not(miri))] // slow. fn test_struct() { quickcheck::QuickCheck::new().max_tests(MAX_TESTS).quickcheck(prop_struct as fn(FakeArray)); } diff --git a/tests/rust_usage_test/tests/integration_test.rs b/tests/rust_usage_test/tests/integration_test.rs index f2deb838249..9ec8daceba0 100644 --- a/tests/rust_usage_test/tests/integration_test.rs +++ b/tests/rust_usage_test/tests/integration_test.rs @@ -1115,6 +1115,7 @@ mod roundtrip_byteswap { // fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f64 as fn(f64)); } } +#[cfg(not(miri))] quickcheck! { fn struct_of_structs( a_id: u32,