diff options
-rw-r--r-- | rust/flatbuffers/src/array.rs | 4 | ||||
-rw-r--r-- | rust/flatbuffers/src/builder.rs | 12 | ||||
-rw-r--r-- | rust/flatbuffers/src/endian_scalar.rs | 33 | ||||
-rw-r--r-- | rust/flatbuffers/src/primitives.rs | 14 | ||||
-rw-r--r-- | rust/flatbuffers/src/push.rs | 4 | ||||
-rw-r--r-- | rust/flatbuffers/src/vector.rs | 10 | ||||
-rw-r--r-- | rust/flatbuffers/src/vtable.rs | 16 | ||||
-rw-r--r-- | rust/flatbuffers/src/vtable_writer.rs | 14 | ||||
-rw-r--r-- | samples/monster_generated.rs | 12 | ||||
-rw-r--r-- | samples/sample_flexbuffers.rs | 18 | ||||
-rw-r--r-- | src/idl_gen_rust.cpp | 10 | ||||
-rw-r--r-- | tests/arrays_test_generated.rs | 6 | ||||
-rw-r--r-- | tests/include_test/sub/include_test2_generated.rs | 6 | ||||
-rw-r--r-- | tests/monster_test_generated.rs | 30 | ||||
-rw-r--r-- | tests/more_defaults_generated.rs | 6 | ||||
-rw-r--r-- | tests/namespace_test/namespace_test1_generated.rs | 12 | ||||
-rw-r--r-- | tests/namespace_test/namespace_test2_generated.rs | 12 | ||||
-rw-r--r-- | tests/optional_scalars_generated.rs | 6 | ||||
-rw-r--r-- | tests/rust_usage_test/tests/flexbuffers_tests/rwyw.rs | 3 | ||||
-rw-r--r-- | tests/rust_usage_test/tests/integration_test.rs | 6 |
20 files changed, 134 insertions, 100 deletions
diff --git a/rust/flatbuffers/src/array.rs b/rust/flatbuffers/src/array.rs index 0a254911..f5d68ac7 100644 --- a/rust/flatbuffers/src/array.rs +++ b/rust/flatbuffers/src/array.rs @@ -37,6 +37,8 @@ where } } +#[allow(clippy::len_without_is_empty)] +#[allow(clippy::from_over_into)] // TODO(caspern): Go from From to Into. impl<'a, T: 'a, const N: usize> Array<'a, T, N> { #[inline(always)] pub fn new(buf: &'a [u8]) -> Self { @@ -49,7 +51,7 @@ impl<'a, T: 'a, const N: usize> Array<'a, T, N> { } #[inline(always)] - pub fn len(&self) -> usize { + pub const fn len(&self) -> usize { N } } diff --git a/rust/flatbuffers/src/builder.rs b/rust/flatbuffers/src/builder.rs index afc43113..3b536737 100644 --- a/rust/flatbuffers/src/builder.rs +++ b/rust/flatbuffers/src/builder.rs @@ -564,12 +564,14 @@ impl<'fbb> FlatBufferBuilder<'fbb> { { let n = self.head + self.used_space() - object_revloc_to_vtable.value() as usize; - let saw = read_scalar_at::<UOffsetT>(&self.owned_buf, n); + let saw = unsafe { read_scalar_at::<UOffsetT>(&self.owned_buf, n) }; debug_assert_eq!(saw, 0xF0F0_F0F0); - emplace_scalar::<SOffsetT>( - &mut self.owned_buf[n..n + SIZE_SOFFSET], - vt_use as SOffsetT - object_revloc_to_vtable.value() as SOffsetT, - ); + unsafe { + emplace_scalar::<SOffsetT>( + &mut self.owned_buf[n..n + SIZE_SOFFSET], + vt_use as SOffsetT - object_revloc_to_vtable.value() as SOffsetT, + ); + } } self.field_locs.clear(); diff --git a/rust/flatbuffers/src/endian_scalar.rs b/rust/flatbuffers/src/endian_scalar.rs index 695601e4..7e8d8e3e 100644 --- a/rust/flatbuffers/src/endian_scalar.rs +++ b/rust/flatbuffers/src/endian_scalar.rs @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +#![allow(clippy::wrong_self_convention)] use std::mem::size_of; @@ -148,34 +149,36 @@ pub fn byte_swap_f64(x: f64) -> f64 { /// Place an EndianScalar into the provided mutable byte slice. Performs /// endian conversion, if necessary. +/// # Safety +/// Caller must ensure `s.len() > size_of::<T>()` +/// and `x` does not overlap with `s`. #[inline] -pub fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) { +pub unsafe fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) { let x_le = x.to_little_endian(); - unsafe { - core::ptr::copy_nonoverlapping( - &x_le as *const T as *const u8, - s.as_mut_ptr() as *mut u8, - size_of::<T>(), - ); - } + core::ptr::copy_nonoverlapping( + &x_le as *const T as *const u8, + s.as_mut_ptr() as *mut u8, + size_of::<T>(), + ); } /// Read an EndianScalar from the provided byte slice at the specified location. /// Performs endian conversion, if necessary. +/// # Safety +/// Caller must ensure `s.len() > loc + size_of::<T>()`. #[inline] -pub fn read_scalar_at<T: EndianScalar>(s: &[u8], loc: usize) -> T { +pub unsafe fn read_scalar_at<T: EndianScalar>(s: &[u8], loc: usize) -> T { read_scalar(&s[loc..]) } /// Read an EndianScalar from the provided byte slice. Performs endian /// conversion, if necessary. +/// # Safety +/// Caller must ensure `s.len() > size_of::<T>()`. #[inline] -pub fn read_scalar<T: EndianScalar>(s: &[u8]) -> T { +pub unsafe fn read_scalar<T: EndianScalar>(s: &[u8]) -> T { let mut mem = core::mem::MaybeUninit::<T>::uninit(); // Since [u8] has alignment 1, we copy it into T which may have higher alignment. - let x = unsafe { - core::ptr::copy_nonoverlapping(s.as_ptr(), mem.as_mut_ptr() as *mut u8, size_of::<T>()); - mem.assume_init() - }; - x.from_little_endian() + core::ptr::copy_nonoverlapping(s.as_ptr(), mem.as_mut_ptr() as *mut u8, size_of::<T>()); + mem.assume_init().from_little_endian() } diff --git a/rust/flatbuffers/src/primitives.rs b/rust/flatbuffers/src/primitives.rs index 3d9d4c82..b7b4942d 100644 --- a/rust/flatbuffers/src/primitives.rs +++ b/rust/flatbuffers/src/primitives.rs @@ -137,7 +137,9 @@ impl<T> Push for WIPOffset<T> { #[inline(always)] fn push(&self, dst: &mut [u8], rest: &[u8]) { let n = (SIZE_UOFFSET + rest.len() - self.value() as usize) as UOffsetT; - emplace_scalar::<UOffsetT>(dst, n); + unsafe { + emplace_scalar::<UOffsetT>(dst, n); + } } } @@ -179,7 +181,7 @@ impl<'a, T: Follow<'a>> Follow<'a> for ForwardsUOffset<T> { #[inline(always)] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let slice = &buf[loc..loc + SIZE_UOFFSET]; - let off = read_scalar::<u32>(slice) as usize; + let off = unsafe { read_scalar::<u32>(slice) as usize }; T::follow(buf, loc + off) } } @@ -200,7 +202,7 @@ impl<'a, T: Follow<'a>> Follow<'a> for ForwardsVOffset<T> { #[inline(always)] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let slice = &buf[loc..loc + SIZE_VOFFSET]; - let off = read_scalar::<VOffsetT>(slice) as usize; + let off = unsafe { read_scalar::<VOffsetT>(slice) as usize }; T::follow(buf, loc + off) } } @@ -230,7 +232,7 @@ impl<'a, T: Follow<'a>> Follow<'a> for BackwardsSOffset<T> { #[inline(always)] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { let slice = &buf[loc..loc + SIZE_SOFFSET]; - let off = read_scalar::<SOffsetT>(slice); + let off = unsafe { read_scalar::<SOffsetT>(slice) }; T::follow(buf, (loc as SOffsetT - off) as usize) } } @@ -293,7 +295,7 @@ impl<'a> Follow<'a> for bool { type Inner = bool; #[inline(always)] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - read_scalar_at::<u8>(buf, loc) != 0 + unsafe { read_scalar_at::<u8>(buf, loc) != 0 } } } @@ -308,7 +310,7 @@ macro_rules! impl_follow_for_endian_scalar { type Inner = $ty; #[inline(always)] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - read_scalar_at::<$ty>(buf, loc) + unsafe { read_scalar_at::<$ty>(buf, loc) } } } }; diff --git a/rust/flatbuffers/src/push.rs b/rust/flatbuffers/src/push.rs index c461372c..72ff88cd 100644 --- a/rust/flatbuffers/src/push.rs +++ b/rust/flatbuffers/src/push.rs @@ -61,7 +61,9 @@ macro_rules! impl_push_for_endian_scalar { #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - emplace_scalar::<$ty>(dst, *self); + unsafe { + emplace_scalar::<$ty>(dst, *self); + } } } }; diff --git a/rust/flatbuffers/src/vector.rs b/rust/flatbuffers/src/vector.rs index 74eb8e95..fe46c503 100644 --- a/rust/flatbuffers/src/vector.rs +++ b/rust/flatbuffers/src/vector.rs @@ -73,7 +73,7 @@ impl<'a, T: 'a> Vector<'a, T> { #[inline(always)] pub fn len(&self) -> usize { - read_scalar_at::<UOffsetT>(&self.0, self.1) as usize + unsafe { read_scalar_at::<UOffsetT>(&self.0, self.1) as usize } } #[inline(always)] pub fn is_empty(&self) -> bool { @@ -84,7 +84,7 @@ impl<'a, T: 'a> Vector<'a, T> { impl<'a, T: Follow<'a> + 'a> Vector<'a, T> { #[inline(always)] pub fn get(&self, idx: usize) -> T::Inner { - debug_assert!(idx < read_scalar_at::<u32>(&self.0, self.1) as usize); + debug_assert!(idx < self.len() as usize); let sz = size_of::<T>(); debug_assert!(sz > 0); T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx) @@ -103,7 +103,7 @@ impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> { let loc = self.1; let sz = size_of::<T>(); debug_assert!(sz > 0); - let len = read_scalar_at::<UOffsetT>(&buf, loc) as usize; + let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) } as usize; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &'a [T] = unsafe { from_raw_parts(ptr, len) }; @@ -144,7 +144,7 @@ pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T { impl<'a> Follow<'a> for &'a str { type Inner = &'a str; fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let len = read_scalar_at::<UOffsetT>(&buf, loc) as usize; + let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) } as usize; let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len]; unsafe { from_utf8_unchecked(slice) } } @@ -154,7 +154,7 @@ impl<'a> Follow<'a> for &'a str { fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] { let sz = size_of::<T>(); debug_assert!(sz > 0); - let len = read_scalar_at::<UOffsetT>(&buf, loc) as usize; + let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) as usize }; let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz]; let ptr = data_buf.as_ptr() as *const T; let s: &[T] = unsafe { from_raw_parts(ptr, len) }; diff --git a/rust/flatbuffers/src/vtable.rs b/rust/flatbuffers/src/vtable.rs index 98fb1e23..bbb7190a 100644 --- a/rust/flatbuffers/src/vtable.rs +++ b/rust/flatbuffers/src/vtable.rs @@ -40,10 +40,10 @@ impl<'a> VTable<'a> { (self.num_bytes() / SIZE_VOFFSET) - 2 } pub fn num_bytes(&self) -> usize { - read_scalar_at::<VOffsetT>(self.buf, self.loc) as usize + unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc) as usize } } pub fn object_inline_num_bytes(&self) -> usize { - let n = read_scalar_at::<VOffsetT>(self.buf, self.loc + SIZE_VOFFSET); + let n = unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc + SIZE_VOFFSET) }; n as usize } pub fn get_field(&self, idx: usize) -> VOffsetT { @@ -51,17 +51,19 @@ impl<'a> VTable<'a> { if idx > self.num_fields() { return 0; } - read_scalar_at::<VOffsetT>( - self.buf, - self.loc + SIZE_VOFFSET + SIZE_VOFFSET + SIZE_VOFFSET * idx, - ) + unsafe { + read_scalar_at::<VOffsetT>( + self.buf, + self.loc + SIZE_VOFFSET + SIZE_VOFFSET + SIZE_VOFFSET * idx, + ) + } } pub fn get(&self, byte_loc: VOffsetT) -> VOffsetT { // TODO(rw): distinguish between None and 0? if byte_loc as usize >= self.num_bytes() { return 0; } - read_scalar_at::<VOffsetT>(self.buf, self.loc + byte_loc as usize) + unsafe { read_scalar_at::<VOffsetT>(self.buf, self.loc + byte_loc as usize) } } pub fn as_bytes(&self) -> &[u8] { let len = self.num_bytes(); diff --git a/rust/flatbuffers/src/vtable_writer.rs b/rust/flatbuffers/src/vtable_writer.rs index 57380bd1..75eabd49 100644 --- a/rust/flatbuffers/src/vtable_writer.rs +++ b/rust/flatbuffers/src/vtable_writer.rs @@ -40,14 +40,18 @@ impl<'a> VTableWriter<'a> { /// to the provided value. #[inline(always)] pub fn write_vtable_byte_length(&mut self, n: VOffsetT) { - emplace_scalar::<VOffsetT>(&mut self.buf[..SIZE_VOFFSET], n); + unsafe { + emplace_scalar::<VOffsetT>(&mut self.buf[..SIZE_VOFFSET], n); + } debug_assert_eq!(n as usize, self.buf.len()); } /// Writes an object length (in bytes) into the vtable. #[inline(always)] pub fn write_object_inline_size(&mut self, n: VOffsetT) { - emplace_scalar::<VOffsetT>(&mut self.buf[SIZE_VOFFSET..2 * SIZE_VOFFSET], n); + unsafe { + emplace_scalar::<VOffsetT>(&mut self.buf[SIZE_VOFFSET..2 * SIZE_VOFFSET], n); + } } /// Gets an object field offset from the vtable. Only used for debugging. @@ -57,7 +61,7 @@ impl<'a> VTableWriter<'a> { #[inline(always)] pub fn get_field_offset(&self, vtable_offset: VOffsetT) -> VOffsetT { let idx = vtable_offset as usize; - read_scalar_at::<VOffsetT>(&self.buf, idx) + unsafe { read_scalar_at::<VOffsetT>(&self.buf, idx) } } /// Writes an object field offset into the vtable. @@ -67,7 +71,9 @@ impl<'a> VTableWriter<'a> { #[inline(always)] pub fn write_field_offset(&mut self, vtable_offset: VOffsetT, object_data_offset: VOffsetT) { let idx = vtable_offset as usize; - emplace_scalar::<VOffsetT>(&mut self.buf[idx..idx + SIZE_VOFFSET], object_data_offset); + unsafe { + emplace_scalar::<VOffsetT>(&mut self.buf[idx..idx + SIZE_VOFFSET], object_data_offset); + } } /// Clears all data in this VTableWriter. Used to cleanly undo a diff --git a/samples/monster_generated.rs b/samples/monster_generated.rs index d2f17ee5..b3d82674 100644 --- a/samples/monster_generated.rs +++ b/samples/monster_generated.rs @@ -76,7 +76,9 @@ impl<'a> flatbuffers::Follow<'a> for Color { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -85,7 +87,7 @@ impl flatbuffers::Push for Color { type Output = Color; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } @@ -161,7 +163,9 @@ impl<'a> flatbuffers::Follow<'a> for Equipment { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -170,7 +174,7 @@ impl flatbuffers::Push for Equipment { type Output = Equipment; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } diff --git a/samples/sample_flexbuffers.rs b/samples/sample_flexbuffers.rs index 677bcacb..237dbf0b 100644 --- a/samples/sample_flexbuffers.rs +++ b/samples/sample_flexbuffers.rs @@ -149,24 +149,6 @@ fn main() { .iter() .map(|r| r.as_u8()) .eq(vec![5, 10, 25, 25, 25, 100].into_iter())); - // For very speed sensitive applications, you can directly read the slice if all of the - // following are true: - // - // * The provided data buffer contains a valid flexbuffer. - // * You correctly specify the flexbuffer type and width. - // * The host machine is little endian. - // * The provided data buffer itself is aligned in memory to 8 bytes. - // - // Vec<u8> has alignment 1 so special care is needed to get your buffer's alignment to 8. - #[cfg(target_endian = "little")] - { - if monster_coins.is_aligned() { - assert_eq!( - monster_coins.get_slice::<i8>().unwrap(), - &[5, 10, 25, 25, 25, 100] - ); - } - } // Build the answer to life the universe and everything. Reusing a builder resets it. The // reused internals won't need to reallocate leading to a potential 2x speedup. diff --git a/src/idl_gen_rust.cpp b/src/idl_gen_rust.cpp index 9e7e51e6..455780cd 100644 --- a/src/idl_gen_rust.cpp +++ b/src/idl_gen_rust.cpp @@ -746,9 +746,9 @@ class RustGenerator : public BaseGenerator { code_ += " type Inner = Self;"; code_ += " #[inline]"; code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {"; - code_ += - " let b = flatbuffers::read_scalar_at::<{{BASE_TYPE}}>(buf," - " loc);"; + code_ += " let b = unsafe {"; + code_ += " flatbuffers::read_scalar_at::<{{BASE_TYPE}}>(buf, loc)"; + code_ += " };"; code_ += " {{FROM_BASE}}"; code_ += " }"; code_ += "}"; @@ -758,8 +758,8 @@ class RustGenerator : public BaseGenerator { code_ += " #[inline]"; code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {"; code_ += - " flatbuffers::emplace_scalar::<{{BASE_TYPE}}>" - "(dst, {{INTO_BASE}});"; + " unsafe { flatbuffers::emplace_scalar::<{{BASE_TYPE}}>" + "(dst, {{INTO_BASE}}); }"; code_ += " }"; code_ += "}"; code_ += ""; diff --git a/tests/arrays_test_generated.rs b/tests/arrays_test_generated.rs index 2c7a7c6f..e8c61615 100644 --- a/tests/arrays_test_generated.rs +++ b/tests/arrays_test_generated.rs @@ -76,7 +76,9 @@ impl<'a> flatbuffers::Follow<'a> for TestEnum { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -85,7 +87,7 @@ impl flatbuffers::Push for TestEnum { type Output = TestEnum; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } diff --git a/tests/include_test/sub/include_test2_generated.rs b/tests/include_test/sub/include_test2_generated.rs index 9f0b40d1..92e09bc8 100644 --- a/tests/include_test/sub/include_test2_generated.rs +++ b/tests/include_test/sub/include_test2_generated.rs @@ -71,7 +71,9 @@ impl<'a> flatbuffers::Follow<'a> for FromInclude { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i64>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i64>(buf, loc) + }; Self(b) } } @@ -80,7 +82,7 @@ impl flatbuffers::Push for FromInclude { type Output = FromInclude; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i64>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i64>(dst, self.0); } } } diff --git a/tests/monster_test_generated.rs b/tests/monster_test_generated.rs index ba4b5389..ef1ee30c 100644 --- a/tests/monster_test_generated.rs +++ b/tests/monster_test_generated.rs @@ -272,7 +272,9 @@ impl<'a> flatbuffers::Follow<'a> for Color { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; unsafe { Self::from_bits_unchecked(b) } } } @@ -281,7 +283,7 @@ impl flatbuffers::Push for Color { type Output = Color; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.bits()); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.bits()); } } } @@ -365,7 +367,9 @@ impl<'a> flatbuffers::Follow<'a> for Race { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -374,7 +378,7 @@ impl flatbuffers::Push for Race { type Output = Race; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } @@ -458,7 +462,9 @@ impl<'a> flatbuffers::Follow<'a> for Any { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -467,7 +473,7 @@ impl flatbuffers::Push for Any { type Output = Any; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } @@ -647,7 +653,9 @@ impl<'a> flatbuffers::Follow<'a> for AnyUniqueAliases { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -656,7 +664,7 @@ impl flatbuffers::Push for AnyUniqueAliases { type Output = AnyUniqueAliases; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } @@ -836,7 +844,9 @@ impl<'a> flatbuffers::Follow<'a> for AnyAmbiguousAliases { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -845,7 +855,7 @@ impl flatbuffers::Push for AnyAmbiguousAliases { type Output = AnyAmbiguousAliases; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } diff --git a/tests/more_defaults_generated.rs b/tests/more_defaults_generated.rs index 42b3eff7..be818bc2 100644 --- a/tests/more_defaults_generated.rs +++ b/tests/more_defaults_generated.rs @@ -59,7 +59,9 @@ impl<'a> flatbuffers::Follow<'a> for ABC { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i32>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i32>(buf, loc) + }; Self(b) } } @@ -68,7 +70,7 @@ impl flatbuffers::Push for ABC { type Output = ABC; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i32>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i32>(dst, self.0); } } } diff --git a/tests/namespace_test/namespace_test1_generated.rs b/tests/namespace_test/namespace_test1_generated.rs index 0f293925..37b0175a 100644 --- a/tests/namespace_test/namespace_test1_generated.rs +++ b/tests/namespace_test/namespace_test1_generated.rs @@ -72,7 +72,9 @@ impl<'a> flatbuffers::Follow<'a> for UnionInNestedNS { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -81,7 +83,7 @@ impl flatbuffers::Push for UnionInNestedNS { type Output = UnionInNestedNS; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } @@ -209,7 +211,9 @@ impl<'a> flatbuffers::Follow<'a> for EnumInNestedNS { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -218,7 +222,7 @@ impl flatbuffers::Push for EnumInNestedNS { type Output = EnumInNestedNS; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } diff --git a/tests/namespace_test/namespace_test2_generated.rs b/tests/namespace_test/namespace_test2_generated.rs index d9391861..4db42a06 100644 --- a/tests/namespace_test/namespace_test2_generated.rs +++ b/tests/namespace_test/namespace_test2_generated.rs @@ -72,7 +72,9 @@ impl<'a> flatbuffers::Follow<'a> for UnionInNestedNS { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<u8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<u8>(buf, loc) + }; Self(b) } } @@ -81,7 +83,7 @@ impl flatbuffers::Push for UnionInNestedNS { type Output = UnionInNestedNS; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<u8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); } } } @@ -209,7 +211,9 @@ impl<'a> flatbuffers::Follow<'a> for EnumInNestedNS { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -218,7 +222,7 @@ impl flatbuffers::Push for EnumInNestedNS { type Output = EnumInNestedNS; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } diff --git a/tests/optional_scalars_generated.rs b/tests/optional_scalars_generated.rs index 868989cd..392fdf58 100644 --- a/tests/optional_scalars_generated.rs +++ b/tests/optional_scalars_generated.rs @@ -68,7 +68,9 @@ impl<'a> flatbuffers::Follow<'a> for OptionalByte { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { - let b = flatbuffers::read_scalar_at::<i8>(buf, loc); + let b = unsafe { + flatbuffers::read_scalar_at::<i8>(buf, loc) + }; Self(b) } } @@ -77,7 +79,7 @@ impl flatbuffers::Push for OptionalByte { type Output = OptionalByte; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { - flatbuffers::emplace_scalar::<i8>(dst, self.0); + unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } } diff --git a/tests/rust_usage_test/tests/flexbuffers_tests/rwyw.rs b/tests/rust_usage_test/tests/flexbuffers_tests/rwyw.rs index dc8a201b..8c27e6bc 100644 --- a/tests/rust_usage_test/tests/flexbuffers_tests/rwyw.rs +++ b/tests/rust_usage_test/tests/flexbuffers_tests/rwyw.rs @@ -255,6 +255,7 @@ fn vector_uint4() { assert_eq!(v.idx(2).get_u64(), Ok(5)); assert_eq!(v.idx(3).get_u64(), Ok(7)); assert!(v.index(4).is_err()); + #[allow(deprecated)] #[cfg(target_endian = "little")] { assert_eq!(r.get_slice::<u8>().unwrap(), [2, 3, 5, 7]); @@ -399,7 +400,7 @@ fn serde_serious() { b: u16, c: u32, d: u64, - }; + } #[derive(Debug, PartialEq, Serialize, Deserialize)] struct MyUnitStruct(Vec<String>); diff --git a/tests/rust_usage_test/tests/integration_test.rs b/tests/rust_usage_test/tests/integration_test.rs index f2deb838..567f7bd5 100644 --- a/tests/rust_usage_test/tests/integration_test.rs +++ b/tests/rust_usage_test/tests/integration_test.rs @@ -1682,8 +1682,10 @@ mod roundtrip_scalars { fn prop<T: PartialEq + ::std::fmt::Debug + Copy + flatbuffers::EndianScalar>(x: T) { let mut buf = vec![0u8; ::std::mem::size_of::<T>()]; - flatbuffers::emplace_scalar(&mut buf[..], x); - let y = flatbuffers::read_scalar(&buf[..]); + let y = unsafe { + flatbuffers::emplace_scalar(&mut buf[..], x); + flatbuffers::read_scalar(&buf[..]) + }; assert_eq!(x, y); } |