summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEddie Linder <eddilinn@gmail.com>2021-04-16 18:15:59 +0300
committerGitHub <noreply@github.com>2021-04-16 11:15:59 -0400
commitda3bb64ef6c8410fbe05a80523d60735e6f50833 (patch)
treeb4b8cb429e59c8dfe3821fc3f533b0e1afe46ccc
parent151900ba9645253fa8e1f780b5446d32fc30c4a5 (diff)
downloadflatbuffers-da3bb64ef6c8410fbe05a80523d60735e6f50833.tar.gz
flatbuffers-da3bb64ef6c8410fbe05a80523d60735e6f50833.tar.bz2
flatbuffers-da3bb64ef6c8410fbe05a80523d60735e6f50833.zip
[Rust] Add support for fixed size arrays (#6548)
* Add support for fixed size arrays * clang-format * Update rust image to 1.51 to support const generics * Handle correctly big endian * Add fuzz tests and clean code * Add struct fuzz test and optimize struct arrays for api * Bump flatbuffers crate version
-rw-r--r--rust/flatbuffers/Cargo.toml2
-rw-r--r--rust/flatbuffers/src/array.rs136
-rw-r--r--rust/flatbuffers/src/builder.rs21
-rw-r--r--rust/flatbuffers/src/endian_scalar.rs8
-rw-r--r--rust/flatbuffers/src/lib.rs2
-rw-r--r--rust/flatbuffers/src/vector.rs22
-rw-r--r--rust/flatbuffers/src/verifier.rs12
-rw-r--r--samples/monster_generated.rs8
-rw-r--r--src/idl_gen_rust.cpp179
-rw-r--r--src/idl_parser.cpp2
-rw-r--r--tests/arrays_test_generated.rs725
-rw-r--r--tests/docker/languages/Dockerfile.testing.rust.1_51_0 (renamed from tests/docker/languages/Dockerfile.testing.rust.1_40_0)2
-rw-r--r--tests/docker/languages/Dockerfile.testing.rust.big_endian.1_51_0 (renamed from tests/docker/languages/Dockerfile.testing.rust.big_endian.1_40_0)2
-rw-r--r--tests/generate_code.bat2
-rwxr-xr-xtests/generate_code.sh2
-rw-r--r--tests/include_test/include_test1_generated.rs2
-rw-r--r--tests/include_test/sub/include_test2_generated.rs8
-rw-r--r--tests/monster_test_generated.rs16
-rw-r--r--tests/more_defaults_generated.rs2
-rw-r--r--tests/namespace_test/namespace_test1_generated.rs8
-rw-r--r--tests/namespace_test/namespace_test2_generated.rs10
-rw-r--r--tests/optional_scalars_generated.rs4
-rw-r--r--tests/rust_usage_test/Cargo.toml1
-rw-r--r--tests/rust_usage_test/tests/arrays_test.rs330
-rw-r--r--tests/rust_usage_test/tests/integration_test.rs13
25 files changed, 1442 insertions, 77 deletions
diff --git a/rust/flatbuffers/Cargo.toml b/rust/flatbuffers/Cargo.toml
index d2bd785f..02a268ea 100644
--- a/rust/flatbuffers/Cargo.toml
+++ b/rust/flatbuffers/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "flatbuffers"
-version = "0.8.3"
+version = "0.8.4"
edition = "2018"
authors = ["Robert Winslow <hello@rwinslow.com>", "FlatBuffers Maintainers"]
license = "Apache-2.0"
diff --git a/rust/flatbuffers/src/array.rs b/rust/flatbuffers/src/array.rs
new file mode 100644
index 00000000..0a254911
--- /dev/null
+++ b/rust/flatbuffers/src/array.rs
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::follow::Follow;
+use crate::{
+ vector::{SafeSliceAccess, VectorIter},
+ EndianScalar,
+};
+use std::fmt::{Debug, Formatter, Result};
+use std::marker::PhantomData;
+use std::mem::size_of;
+use std::slice::from_raw_parts;
+
+#[derive(Copy, Clone)]
+pub struct Array<'a, T: 'a, const N: usize>(&'a [u8], PhantomData<T>);
+
+impl<'a, T: 'a, const N: usize> Debug for Array<'a, T, N>
+where
+ T: 'a + Follow<'a>,
+ <T as Follow<'a>>::Inner: Debug,
+{
+ fn fmt(&self, f: &mut Formatter) -> Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<'a, T: 'a, const N: usize> Array<'a, T, N> {
+ #[inline(always)]
+ pub fn new(buf: &'a [u8]) -> Self {
+ debug_assert!(size_of::<T>() * N == buf.len());
+
+ Array {
+ 0: buf,
+ 1: PhantomData,
+ }
+ }
+
+ #[inline(always)]
+ pub fn len(&self) -> usize {
+ N
+ }
+}
+
+impl<'a, T: Follow<'a> + 'a, const N: usize> Array<'a, T, N> {
+ #[inline(always)]
+ pub fn get(&self, idx: usize) -> T::Inner {
+ debug_assert!(idx < N);
+ let sz = size_of::<T>();
+ T::follow(self.0, sz * idx)
+ }
+
+ #[inline(always)]
+ pub fn iter(&self) -> VectorIter<'a, T> {
+ VectorIter::from_slice(self.0, self.len())
+ }
+}
+
+impl<'a, T: Follow<'a> + Debug, const N: usize> Into<[T::Inner; N]> for Array<'a, T, N> {
+ #[inline(always)]
+ fn into(self) -> [T::Inner; N] {
+ array_init(|i| self.get(i))
+ }
+}
+
+impl<'a, T: SafeSliceAccess + 'a, const N: usize> Array<'a, T, N> {
+ pub fn safe_slice(self) -> &'a [T] {
+ let sz = size_of::<T>();
+ debug_assert!(sz > 0);
+ let ptr = self.0.as_ptr() as *const T;
+ unsafe { from_raw_parts(ptr, N) }
+ }
+}
+
+/// Implement Follow for all possible Arrays that have Follow-able elements.
+impl<'a, T: Follow<'a> + 'a, const N: usize> Follow<'a> for Array<'a, T, N> {
+ type Inner = Array<'a, T, N>;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Array::new(&buf[loc..loc + N * size_of::<T>()])
+ }
+}
+
+pub fn emplace_scalar_array<T: EndianScalar, const N: usize>(
+ buf: &mut [u8],
+ loc: usize,
+ src: &[T; N],
+) {
+ let mut buf_ptr = buf[loc..].as_mut_ptr() as *mut T;
+ for item in src.iter() {
+ let item_le = item.to_little_endian();
+ unsafe {
+ buf_ptr.write(item_le);
+ buf_ptr = buf_ptr.add(1);
+ }
+ }
+}
+
+impl<'a, T: Follow<'a> + 'a, const N: usize> IntoIterator for Array<'a, T, N> {
+ type Item = T::Inner;
+ type IntoIter = VectorIter<'a, T>;
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+#[inline]
+pub fn array_init<F, T, const N: usize>(mut initializer: F) -> [T; N]
+where
+ F: FnMut(usize) -> T,
+{
+ let mut array: core::mem::MaybeUninit<[T; N]> = core::mem::MaybeUninit::uninit();
+ let mut ptr_i = array.as_mut_ptr() as *mut T;
+
+ unsafe {
+ for i in 0..N {
+ let value_i = initializer(i);
+ ptr_i.write(value_i);
+ ptr_i = ptr_i.add(1);
+ }
+ array.assume_init()
+ }
+}
diff --git a/rust/flatbuffers/src/builder.rs b/rust/flatbuffers/src/builder.rs
index 2db363bc..afc43113 100644
--- a/rust/flatbuffers/src/builder.rs
+++ b/rust/flatbuffers/src/builder.rs
@@ -253,12 +253,9 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
// Gets The pointer to the size of the string
let str_memory = &buf[buf.len() - ptr..];
// Gets the size of the written string from buffer
- let size = u32::from_le_bytes([
- str_memory[0],
- str_memory[1],
- str_memory[2],
- str_memory[3],
- ]) as usize;
+ let size =
+ u32::from_le_bytes([str_memory[0], str_memory[1], str_memory[2], str_memory[3]])
+ as usize;
// Size of the string size
let string_size: usize = 4;
// Fetches actual string bytes from index of string after string size
@@ -728,23 +725,21 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
// could be empty (e.g. for empty tables, or for all-default values).
debug_assert!(
self.nested,
- format!(
- "incorrect FlatBufferBuilder usage: {} must be called while in a nested state",
- fn_name
- )
+ "incorrect FlatBufferBuilder usage: {} must be called while in a nested state",
+ fn_name
);
}
#[inline]
fn assert_not_nested(&self, msg: &'static str) {
- debug_assert!(!self.nested, msg);
+ debug_assert!(!self.nested, "{}", msg);
}
#[inline]
fn assert_finished(&self, msg: &'static str) {
- debug_assert!(self.finished, msg);
+ debug_assert!(self.finished, "{}", msg);
}
#[inline]
fn assert_not_finished(&self, msg: &'static str) {
- debug_assert!(!self.finished, msg);
+ debug_assert!(!self.finished, "{}", msg);
}
}
diff --git a/rust/flatbuffers/src/endian_scalar.rs b/rust/flatbuffers/src/endian_scalar.rs
index 93233206..695601e4 100644
--- a/rust/flatbuffers/src/endian_scalar.rs
+++ b/rust/flatbuffers/src/endian_scalar.rs
@@ -155,7 +155,7 @@ pub fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) {
core::ptr::copy_nonoverlapping(
&x_le as *const T as *const u8,
s.as_mut_ptr() as *mut u8,
- size_of::<T>()
+ size_of::<T>(),
);
}
}
@@ -174,11 +174,7 @@ pub fn read_scalar<T: EndianScalar>(s: &[u8]) -> T {
let mut mem = core::mem::MaybeUninit::<T>::uninit();
// Since [u8] has alignment 1, we copy it into T which may have higher alignment.
let x = unsafe {
- core::ptr::copy_nonoverlapping(
- s.as_ptr(),
- mem.as_mut_ptr() as *mut u8,
- size_of::<T>()
- );
+ core::ptr::copy_nonoverlapping(s.as_ptr(), mem.as_mut_ptr() as *mut u8, size_of::<T>());
mem.assume_init()
};
x.from_little_endian()
diff --git a/rust/flatbuffers/src/lib.rs b/rust/flatbuffers/src/lib.rs
index 77d51f82..465e1690 100644
--- a/rust/flatbuffers/src/lib.rs
+++ b/rust/flatbuffers/src/lib.rs
@@ -28,6 +28,7 @@
//! At this time, to generate Rust code, you will need the latest `master` version of `flatc`, available from here: <https://github.com/google/flatbuffers>
//! (On OSX, you can install FlatBuffers from `HEAD` with the Homebrew package manager.)
+mod array;
mod builder;
mod endian_scalar;
mod follow;
@@ -40,6 +41,7 @@ mod verifier;
mod vtable;
mod vtable_writer;
+pub use crate::array::{array_init, emplace_scalar_array, Array};
pub use crate::builder::FlatBufferBuilder;
pub use crate::endian_scalar::{
byte_swap_f32, byte_swap_f64, emplace_scalar, read_scalar, read_scalar_at, EndianScalar,
diff --git a/rust/flatbuffers/src/vector.rs b/rust/flatbuffers/src/vector.rs
index b54d3ec8..74eb8e95 100644
--- a/rust/flatbuffers/src/vector.rs
+++ b/rust/flatbuffers/src/vector.rs
@@ -29,11 +29,15 @@ use crate::primitives::*;
pub struct Vector<'a, T: 'a>(&'a [u8], usize, PhantomData<T>);
-impl<'a, T:'a> Default for Vector<'a, T> {
+impl<'a, T: 'a> Default for Vector<'a, T> {
fn default() -> Self {
// Static, length 0 vector.
// Note that derived default causes UB due to issues in read_scalar_at /facepalm.
- Self(&[0; core::mem::size_of::<UOffsetT>()], 0, Default::default())
+ Self(
+ &[0; core::mem::size_of::<UOffsetT>()],
+ 0,
+ Default::default(),
+ )
}
}
@@ -88,7 +92,7 @@ impl<'a, T: Follow<'a> + 'a> Vector<'a, T> {
#[inline(always)]
pub fn iter(&self) -> VectorIter<'a, T> {
- VectorIter::new(*self)
+ VectorIter::from_vector(*self)
}
}
@@ -185,7 +189,7 @@ pub struct VectorIter<'a, T: 'a> {
impl<'a, T: 'a> VectorIter<'a, T> {
#[inline]
- pub fn new(inner: Vector<'a, T>) -> Self {
+ pub fn from_vector(inner: Vector<'a, T>) -> Self {
VectorIter {
buf: inner.0,
// inner.1 is the location of the data for the vector.
@@ -196,6 +200,16 @@ impl<'a, T: 'a> VectorIter<'a, T> {
phantom: PhantomData,
}
}
+
+ #[inline]
+ pub fn from_slice(buf: &'a [u8], items_num: usize) -> Self {
+ VectorIter {
+ buf,
+ loc: 0,
+ remaining: items_num,
+ phantom: PhantomData,
+ }
+ }
}
impl<'a, T: Follow<'a> + 'a> Clone for VectorIter<'a, T> {
diff --git a/rust/flatbuffers/src/verifier.rs b/rust/flatbuffers/src/verifier.rs
index ec9e5101..12a02f96 100644
--- a/rust/flatbuffers/src/verifier.rs
+++ b/rust/flatbuffers/src/verifier.rs
@@ -39,8 +39,10 @@ pub enum InvalidFlatbuffer {
required: &'static str,
error_trace: ErrorTrace,
},
- #[error("Union exactly one of union discriminant (`{field_type}`) and value \
- (`{field}`) are present.\n{error_trace}")]
+ #[error(
+ "Union exactly one of union discriminant (`{field_type}`) and value \
+ (`{field}`) are present.\n{error_trace}"
+ )]
InconsistentUnion {
field: &'static str,
field_type: &'static str,
@@ -70,8 +72,10 @@ pub enum InvalidFlatbuffer {
range: Range<usize>,
error_trace: ErrorTrace,
},
- #[error("Signed offset at position {position} has value {soffset} which points out of bounds.\
- \n{error_trace}")]
+ #[error(
+ "Signed offset at position {position} has value {soffset} which points out of bounds.\
+ \n{error_trace}"
+ )]
SignedOffsetOutOfBounds {
soffset: SOffsetT,
position: usize,
diff --git a/samples/monster_generated.rs b/samples/monster_generated.rs
index 017fbdf2..d2f17ee5 100644
--- a/samples/monster_generated.rs
+++ b/samples/monster_generated.rs
@@ -6,7 +6,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod my_game {
@@ -15,7 +15,7 @@ pub mod my_game {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod sample {
@@ -23,7 +23,7 @@ pub mod sample {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_COLOR: i8 = 0;
@@ -313,7 +313,7 @@ impl<'a> flatbuffers::Verifiable for Vec3 {
v.in_buffer::<Self>(pos)
}
}
-impl Vec3 {
+impl<'a> Vec3 {
#[allow(clippy::too_many_arguments)]
pub fn new(
x: f32,
diff --git a/src/idl_gen_rust.cpp b/src/idl_gen_rust.cpp
index a1d01935..9e7e51e6 100644
--- a/src/idl_gen_rust.cpp
+++ b/src/idl_gen_rust.cpp
@@ -81,6 +81,10 @@ enum FullType {
ftVectorOfTable = 14,
ftVectorOfString = 15,
ftVectorOfUnionValue = 16,
+
+ ftArrayOfBuiltin = 17,
+ ftArrayOfEnum = 18,
+ ftArrayOfStruct = 19,
};
// Convert a Type to a FullType (exhaustive).
@@ -127,6 +131,23 @@ FullType GetFullType(const Type &type) {
FLATBUFFERS_ASSERT(false && "vector of vectors are unsupported");
}
}
+ } else if (IsArray(type)) {
+ switch (GetFullType(type.VectorType())) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool: {
+ return ftArrayOfBuiltin;
+ }
+ case ftStruct: {
+ return ftArrayOfStruct;
+ }
+ case ftEnumKey: {
+ return ftArrayOfEnum;
+ }
+ default: {
+ FLATBUFFERS_ASSERT(false && "Unsupported type for fixed array");
+ }
+ }
} else if (type.enum_def != nullptr) {
if (type.enum_def->is_union) {
if (type.base_type == BASE_TYPE_UNION) {
@@ -567,6 +588,12 @@ class RustGenerator : public BaseGenerator {
case ftUnionKey: {
return GetTypeBasic(type);
}
+ case ftArrayOfBuiltin:
+ case ftArrayOfEnum:
+ case ftArrayOfStruct: {
+ return "[" + GetTypeGet(type.VectorType()) + "; " +
+ NumToString(type.fixed_length) + "]";
+ }
case ftTable: {
return WrapInNameSpace(type.struct_def->defined_namespace,
type.struct_def->name) +
@@ -937,6 +964,9 @@ class RustGenerator : public BaseGenerator {
return "INVALID_CODE_GENERATION";
}
+ case ftArrayOfStruct:
+ case ftArrayOfEnum:
+ case ftArrayOfBuiltin:
case ftVectorOfBool:
case ftVectorOfFloat:
case ftVectorOfInteger:
@@ -1035,6 +1065,12 @@ class RustGenerator : public BaseGenerator {
case ftVectorOfUnionValue: {
return WrapUOffsetsVector("flatbuffers::Table<" + lifetime + ">");
}
+ case ftArrayOfEnum:
+ case ftArrayOfStruct:
+ case ftArrayOfBuiltin: {
+ FLATBUFFERS_ASSERT(false && "arrays are not supported within tables");
+ return "ARRAYS_NOT_SUPPORTED_IN_TABLES";
+ }
}
return "INVALID_CODE_GENERATION"; // for return analysis
}
@@ -1101,6 +1137,21 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported");
return "INVALID_CODE_GENERATION"; // OH NO!
}
+ case ftArrayOfEnum: {
+ ty = "[" + WrapInNameSpace(*type.VectorType().enum_def) + "; " +
+ NumToString(type.fixed_length) + "]";
+ break;
+ }
+ case ftArrayOfStruct: {
+ ty = "[" + NamespacedNativeName(*type.VectorType().struct_def) + "; " +
+ NumToString(type.fixed_length) + "]";
+ break;
+ }
+ case ftArrayOfBuiltin: {
+ ty = "[" + GetTypeBasic(type.VectorType()) + "; " +
+ NumToString(type.fixed_length) + "]";
+ break;
+ }
}
if (in_a_table && !IsUnion(type) && field.IsOptional()) {
return "Option<" + ty + ">";
@@ -1170,6 +1221,21 @@ class RustGenerator : public BaseGenerator {
case ftUnionValue: {
return "flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>";
}
+ case ftArrayOfBuiltin: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ return "flatbuffers::Array<" + lifetime + ", " + typname + ", " +
+ NumToString(type.fixed_length) + ">";
+ }
+ case ftArrayOfEnum: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "flatbuffers::Array<" + lifetime + ", " + typname + ", " +
+ NumToString(type.fixed_length) + ">";
+ }
+ case ftArrayOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "flatbuffers::Array<" + lifetime + ", " + typname + ", " +
+ NumToString(type.fixed_length) + ">";
+ }
}
return "INVALID_CODE_GENERATION"; // for return analysis
@@ -1217,6 +1283,12 @@ class RustGenerator : public BaseGenerator {
case ftVectorOfUnionValue: {
return "self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>";
}
+ case ftArrayOfEnum:
+ case ftArrayOfStruct:
+ case ftArrayOfBuiltin: {
+ FLATBUFFERS_ASSERT(false && "arrays are not supported within tables");
+ return "ARRAYS_NOT_SUPPORTED_IN_TABLES";
+ }
}
return "INVALID_CODE_GENERATION"; // for return analysis
}
@@ -1289,6 +1361,12 @@ class RustGenerator : public BaseGenerator {
// Into trait to convert tables to typesafe union values.
return "INVALID_CODE_GENERATION"; // for return analysis
}
+ case ftArrayOfEnum:
+ case ftArrayOfStruct:
+ case ftArrayOfBuiltin: {
+ FLATBUFFERS_ASSERT(false && "arrays are not supported within tables");
+ return "ARRAYS_NOT_SUPPORTED_IN_TABLES";
+ }
}
return "INVALID_CODE_GENERATION"; // for return analysis
}
@@ -1302,6 +1380,10 @@ class RustGenerator : public BaseGenerator {
const auto WrapVector = [&](std::string ty) -> std::string {
return "flatbuffers::Vector<" + lifetime + ", " + ty + ">";
};
+ const auto WrapArray = [&](std::string ty, uint16_t length) -> std::string {
+ return "flatbuffers::Array<" + lifetime + ", " + ty + ", " +
+ NumToString(length) + ">";
+ };
switch (GetFullType(type)) {
case ftInteger:
case ftFloat:
@@ -1351,6 +1433,18 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported");
return "INVALID_CODE_GENERATION"; // for return analysis
}
+ case ftArrayOfEnum: {
+ const auto typname = WrapInNameSpace(*type.VectorType().enum_def);
+ return WrapArray(typname, type.fixed_length);
+ }
+ case ftArrayOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return WrapArray(typname, type.fixed_length);
+ }
+ case ftArrayOfBuiltin: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ return WrapArray(typname, type.fixed_length);
+ }
}
return "INVALID_CODE_GENERATION"; // for return analysis
}
@@ -1595,6 +1689,13 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(false && "vectors of unions not yet supported");
return;
}
+ case ftArrayOfEnum:
+ case ftArrayOfStruct:
+ case ftArrayOfBuiltin: {
+ FLATBUFFERS_ASSERT(false &&
+ "arrays are not supported within tables");
+ return;
+ }
}
if (field.IsOptional()) {
code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}().map(|x| {";
@@ -2037,6 +2138,12 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(false && "vectors of unions not yet supported");
return;
}
+ case ftArrayOfEnum:
+ case ftArrayOfStruct:
+ case ftArrayOfBuiltin: {
+ FLATBUFFERS_ASSERT(false && "arrays are not supported within tables");
+ return;
+ }
}
});
code_ += " {{STRUCT_NAME}}::create(_fbb, &{{STRUCT_NAME}}Args{";
@@ -2317,11 +2424,11 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("FIELD_OBJECT_TYPE", ObjectFieldType(field, false));
code_.SetValue("FIELD_NAME", Name(field));
code_.SetValue("FIELD_OFFSET", NumToString(offset_to_field));
- code_.SetValue("REF", IsStruct(field.value.type) ? "&" : "");
+ code_.SetValue(
+ "REF",
+ IsStruct(field.value.type) || IsArray(field.value.type) ? "&" : "");
cb(field);
- const size_t size = IsStruct(field.value.type)
- ? field.value.type.struct_def->bytesize
- : SizeOf(field.value.type.base_type);
+ const size_t size = InlineSize(field.value.type);
offset_to_field += size + field.padding;
}
}
@@ -2425,7 +2532,7 @@ class RustGenerator : public BaseGenerator {
code_ += "}";
// Generate a constructor that takes all fields as arguments.
- code_ += "impl {{STRUCT_NAME}} {";
+ code_ += "impl<'a> {{STRUCT_NAME}} {";
code_ += " #[allow(clippy::too_many_arguments)]";
code_ += " pub fn new(";
ForAllStructFields(struct_def, [&](const FieldDef &unused) {
@@ -2456,6 +2563,14 @@ class RustGenerator : public BaseGenerator {
" unsafe {"
" &*(self.0[{{FIELD_OFFSET}}..].as_ptr() as *const"
" {{FIELD_TYPE}}) }";
+ } else if (IsArray(field.value.type)) {
+ code_.SetValue("ARRAY_SIZE",
+ NumToString(field.value.type.fixed_length));
+ code_.SetValue("ARRAY_ITEM", GetTypeGet(field.value.type.VectorType()));
+ code_ +=
+ " pub fn {{FIELD_NAME}}(&'a self) -> "
+ "flatbuffers::Array<'a, {{ARRAY_ITEM}}, {{ARRAY_SIZE}}> {";
+ code_ += " flatbuffers::Array::follow(&self.0, {{FIELD_OFFSET}})";
} else {
code_ += " pub fn {{FIELD_NAME}}(&self) -> {{FIELD_TYPE}} {";
code_ +=
@@ -2473,12 +2588,37 @@ class RustGenerator : public BaseGenerator {
code_ += " }\n";
// Setter.
if (IsStruct(field.value.type)) {
- code_.SetValue("FIELD_SIZE",
- NumToString(field.value.type.struct_def->bytesize));
+ code_.SetValue("FIELD_SIZE", NumToString(InlineSize(field.value.type)));
code_ += " pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
code_ +=
" self.0[{{FIELD_OFFSET}}..{{FIELD_OFFSET}}+{{FIELD_SIZE}}]"
".copy_from_slice(&x.0)";
+ } else if (IsArray(field.value.type)) {
+ if (GetFullType(field.value.type) == ftArrayOfBuiltin) {
+ code_.SetValue("ARRAY_ITEM",
+ GetTypeGet(field.value.type.VectorType()));
+ code_.SetValue(
+ "ARRAY_ITEM_SIZE",
+ NumToString(InlineSize(field.value.type.VectorType())));
+ code_ +=
+ " pub fn set_{{FIELD_NAME}}(&mut self, items: &{{FIELD_TYPE}}) "
+ "{";
+ code_ +=
+ " flatbuffers::emplace_scalar_array(&mut self.0, "
+ "{{FIELD_OFFSET}}, items);";
+ } else {
+ code_.SetValue("FIELD_SIZE",
+ NumToString(InlineSize(field.value.type)));
+ code_ +=
+ " pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
+ code_ += " unsafe {";
+ code_ += " std::ptr::copy(";
+ code_ += " x.as_ptr() as *const u8,";
+ code_ += " self.0.as_mut_ptr().add({{FIELD_OFFSET}}),";
+ code_ += " {{FIELD_SIZE}},";
+ code_ += " );";
+ code_ += " }";
+ }
} else {
code_ += " pub fn set_{{FIELD_NAME}}(&mut self, x: {{FIELD_TYPE}}) {";
code_ += " let x_le = x.to_little_endian();";
@@ -2502,8 +2642,19 @@ class RustGenerator : public BaseGenerator {
code_ += " pub fn unpack(&self) -> {{NATIVE_STRUCT_NAME}} {";
code_ += " {{NATIVE_STRUCT_NAME}} {";
ForAllStructFields(struct_def, [&](const FieldDef &field) {
- std::string unpack = IsStruct(field.value.type) ? ".unpack()" : "";
- code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}()" + unpack + ",";
+ if (IsArray(field.value.type)) {
+ if (GetFullType(field.value.type) == ftArrayOfStruct) {
+ code_ +=
+ " {{FIELD_NAME}}: { let {{FIELD_NAME}} = "
+ "self.{{FIELD_NAME}}(); flatbuffers::array_init(|i| "
+ "{{FIELD_NAME}}.get(i).unpack()) },";
+ } else {
+ code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}().into(),";
+ }
+ } else {
+ std::string unpack = IsStruct(field.value.type) ? ".unpack()" : "";
+ code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}()" + unpack + ",";
+ }
});
code_ += " }";
code_ += " }";
@@ -2530,6 +2681,14 @@ class RustGenerator : public BaseGenerator {
ForAllStructFields(struct_def, [&](const FieldDef &field) {
if (IsStruct(field.value.type)) {
code_ += " &self.{{FIELD_NAME}}.pack(),";
+ } else if (IsArray(field.value.type)) {
+ if (GetFullType(field.value.type) == ftArrayOfStruct) {
+ code_ +=
+ " &flatbuffers::array_init(|i| "
+ "self.{{FIELD_NAME}}[i].pack()),";
+ } else {
+ code_ += " &self.{{FIELD_NAME}},";
+ }
} else {
code_ += " self.{{FIELD_NAME}},";
}
@@ -2570,7 +2729,7 @@ class RustGenerator : public BaseGenerator {
code_ += indent + "use std::cmp::Ordering;";
code_ += "";
code_ += indent + "extern crate flatbuffers;";
- code_ += indent + "use self::flatbuffers::EndianScalar;";
+ code_ += indent + "use self::flatbuffers::{EndianScalar, Follow};";
}
// Set up the correct namespace. This opens a namespace if the current
diff --git a/src/idl_parser.cpp b/src/idl_parser.cpp
index 69fa9d13..88ecf81c 100644
--- a/src/idl_parser.cpp
+++ b/src/idl_parser.cpp
@@ -2467,7 +2467,7 @@ bool Parser::SupportsAdvancedArrayFeatures() const {
return (opts.lang_to_generate &
~(IDLOptions::kCpp | IDLOptions::kPython | IDLOptions::kJava |
IDLOptions::kCSharp | IDLOptions::kJsonSchema | IDLOptions::kJson |
- IDLOptions::kBinary)) == 0;
+ IDLOptions::kBinary | IDLOptions::kRust)) == 0;
}
Namespace *Parser::UniqueNamespace(Namespace *ns) {
diff --git a/tests/arrays_test_generated.rs b/tests/arrays_test_generated.rs
new file mode 100644
index 00000000..2c7a7c6f
--- /dev/null
+++ b/tests/arrays_test_generated.rs
@@ -0,0 +1,725 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+
+use std::mem;
+use std::cmp::Ordering;
+
+extern crate flatbuffers;
+use self::flatbuffers::{EndianScalar, Follow};
+
+#[allow(unused_imports, dead_code)]
+pub mod my_game {
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::{EndianScalar, Follow};
+#[allow(unused_imports, dead_code)]
+pub mod example {
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::{EndianScalar, Follow};
+
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MIN_TEST_ENUM: i8 = 0;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MAX_TEST_ENUM: i8 = 2;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+#[allow(non_camel_case_types)]
+pub const ENUM_VALUES_TEST_ENUM: [TestEnum; 3] = [
+ TestEnum::A,
+ TestEnum::B,
+ TestEnum::C,
+];
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+pub struct TestEnum(pub i8);
+#[allow(non_upper_case_globals)]
+impl TestEnum {
+ pub const A: Self = Self(0);
+ pub const B: Self = Self(1);
+ pub const C: Self = Self(2);
+
+ pub const ENUM_MIN: i8 = 0;
+ pub const ENUM_MAX: i8 = 2;
+ pub const ENUM_VALUES: &'static [Self] = &[
+ Self::A,
+ Self::B,
+ Self::C,
+ ];
+ /// Returns the variant's name or "" if unknown.
+ pub fn variant_name(self) -> Option<&'static str> {
+ match self {
+ Self::A => Some("A"),
+ Self::B => Some("B"),
+ Self::C => Some("C"),
+ _ => None,
+ }
+ }
+}
+impl std::fmt::Debug for TestEnum {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ if let Some(name) = self.variant_name() {
+ f.write_str(name)
+ } else {
+ f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
+ }
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for TestEnum {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
+ Self(b)
+ }
+}
+
+impl flatbuffers::Push for TestEnum {
+ type Output = TestEnum;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<i8>(dst, self.0);
+ }
+}
+
+impl flatbuffers::EndianScalar for TestEnum {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let b = i8::to_le(self.0);
+ Self(b)
+ }
+ #[inline]
+ #[allow(clippy::wrong_self_convention)]
+ fn from_little_endian(self) -> Self {
+ let b = i8::from_le(self.0);
+ Self(b)
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for TestEnum {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ i8::run_verifier(v, pos)
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for TestEnum {}
+// struct NestedStruct, aligned to 8
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq)]
+pub struct NestedStruct(pub [u8; 32]);
+impl Default for NestedStruct {
+ fn default() -> Self {
+ Self([0; 32])
+ }
+}
+impl std::fmt::Debug for NestedStruct {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ f.debug_struct("NestedStruct")
+ .field("a", &self.a())
+ .field("b", &self.b())
+ .field("c", &self.c())
+ .field("d", &self.d())
+ .finish()
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for NestedStruct {}
+impl flatbuffers::SafeSliceAccess for NestedStruct {}
+impl<'a> flatbuffers::Follow<'a> for NestedStruct {
+ type Inner = &'a NestedStruct;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a NestedStruct>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a NestedStruct {
+ type Inner = &'a NestedStruct;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<NestedStruct>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for NestedStruct {
+ type Output = NestedStruct;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const NestedStruct as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b NestedStruct {
+ type Output = NestedStruct;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const NestedStruct as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for NestedStruct {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.in_buffer::<Self>(pos)
+ }
+}
+impl<'a> NestedStruct {
+ #[allow(clippy::too_many_arguments)]
+ pub fn new(
+ a: &[i32; 2],
+ b: TestEnum,
+ c: &[TestEnum; 2],
+ d: &[i64; 2],
+ ) -> Self {
+ let mut s = Self([0; 32]);
+ s.set_a(&a);
+ s.set_b(b);
+ s.set_c(&c);
+ s.set_d(&d);
+ s
+ }
+
+ pub fn a(&'a self) -> flatbuffers::Array<'a, i32, 2> {
+ flatbuffers::Array::follow(&self.0, 0)
+ }
+
+ pub fn set_a(&mut self, items: &[i32; 2]) {
+ flatbuffers::emplace_scalar_array(&mut self.0, 0, items);
+ }
+
+ pub fn b(&self) -> TestEnum {
+ let mut mem = core::mem::MaybeUninit::<TestEnum>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[8..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<TestEnum>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_b(&mut self, x: TestEnum) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const TestEnum as *const u8,
+ self.0[8..].as_mut_ptr(),
+ core::mem::size_of::<TestEnum>(),
+ );
+ }
+ }
+
+ pub fn c(&'a self) -> flatbuffers::Array<'a, TestEnum, 2> {
+ flatbuffers::Array::follow(&self.0, 9)
+ }
+
+ pub fn set_c(&mut self, x: &[TestEnum; 2]) {
+ unsafe {
+ std::ptr::copy(
+ x.as_ptr() as *const u8,
+ self.0.as_mut_ptr().add(9),
+ 2,
+ );
+ }
+ }
+
+ pub fn d(&'a self) -> flatbuffers::Array<'a, i64, 2> {
+ flatbuffers::Array::follow(&self.0, 16)
+ }
+
+ pub fn set_d(&mut self, items: &[i64; 2]) {
+ flatbuffers::emplace_scalar_array(&mut self.0, 16, items);
+ }
+
+ pub fn unpack(&self) -> NestedStructT {
+ NestedStructT {
+ a: self.a().into(),
+ b: self.b(),
+ c: self.c().into(),
+ d: self.d().into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Default)]
+pub struct NestedStructT {
+ pub a: [i32; 2],
+ pub b: TestEnum,
+ pub c: [TestEnum; 2],
+ pub d: [i64; 2],
+}
+impl NestedStructT {
+ pub fn pack(&self) -> NestedStruct {
+ NestedStruct::new(
+ &self.a,
+ self.b,
+ &self.c,
+ &self.d,
+ )
+ }
+}
+
+// struct ArrayStruct, aligned to 8
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq)]
+pub struct ArrayStruct(pub [u8; 160]);
+impl Default for ArrayStruct {
+ fn default() -> Self {
+ Self([0; 160])
+ }
+}
+impl std::fmt::Debug for ArrayStruct {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ f.debug_struct("ArrayStruct")
+ .field("a", &self.a())
+ .field("b", &self.b())
+ .field("c", &self.c())
+ .field("d", &self.d())
+ .field("e", &self.e())
+ .field("f", &self.f())
+ .finish()
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for ArrayStruct {}
+impl flatbuffers::SafeSliceAccess for ArrayStruct {}
+impl<'a> flatbuffers::Follow<'a> for ArrayStruct {
+ type Inner = &'a ArrayStruct;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a ArrayStruct>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a ArrayStruct {
+ type Inner = &'a ArrayStruct;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<ArrayStruct>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for ArrayStruct {
+ type Output = ArrayStruct;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const ArrayStruct as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b ArrayStruct {
+ type Output = ArrayStruct;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const ArrayStruct as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for ArrayStruct {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.in_buffer::<Self>(pos)
+ }
+}
+impl<'a> ArrayStruct {
+ #[allow(clippy::too_many_arguments)]
+ pub fn new(
+ a: f32,
+ b: &[i32; 15],
+ c: i8,
+ d: &[NestedStruct; 2],
+ e: i32,
+ f: &[i64; 2],
+ ) -> Self {
+ let mut s = Self([0; 160]);
+ s.set_a(a);
+ s.set_b(&b);
+ s.set_c(c);
+ s.set_d(&d);
+ s.set_e(e);
+ s.set_f(&f);
+ s
+ }
+
+ pub fn a(&self) -> f32 {
+ let mut mem = core::mem::MaybeUninit::<f32>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[0..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<f32>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_a(&mut self, x: f32) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const f32 as *const u8,
+ self.0[0..].as_mut_ptr(),
+ core::mem::size_of::<f32>(),
+ );
+ }
+ }
+
+ pub fn b(&'a self) -> flatbuffers::Array<'a, i32, 15> {
+ flatbuffers::Array::follow(&self.0, 4)
+ }
+
+ pub fn set_b(&mut self, items: &[i32; 15]) {
+ flatbuffers::emplace_scalar_array(&mut self.0, 4, items);
+ }
+
+ pub fn c(&self) -> i8 {
+ let mut mem = core::mem::MaybeUninit::<i8>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[64..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<i8>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_c(&mut self, x: i8) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const i8 as *const u8,
+ self.0[64..].as_mut_ptr(),
+ core::mem::size_of::<i8>(),
+ );
+ }
+ }
+
+ pub fn d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2> {
+ flatbuffers::Array::follow(&self.0, 72)
+ }
+
+ pub fn set_d(&mut self, x: &[NestedStruct; 2]) {
+ unsafe {
+ std::ptr::copy(
+ x.as_ptr() as *const u8,
+ self.0.as_mut_ptr().add(72),
+ 64,
+ );
+ }
+ }
+
+ pub fn e(&self) -> i32 {
+ let mut mem = core::mem::MaybeUninit::<i32>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[136..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<i32>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_e(&mut self, x: i32) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const i32 as *const u8,
+ self.0[136..].as_mut_ptr(),
+ core::mem::size_of::<i32>(),
+ );
+ }
+ }
+
+ pub fn f(&'a self) -> flatbuffers::Array<'a, i64, 2> {
+ flatbuffers::Array::follow(&self.0, 144)
+ }
+
+ pub fn set_f(&mut self, items: &[i64; 2]) {
+ flatbuffers::emplace_scalar_array(&mut self.0, 144, items);
+ }
+
+ pub fn unpack(&self) -> ArrayStructT {
+ ArrayStructT {
+ a: self.a(),
+ b: self.b().into(),
+ c: self.c(),
+ d: { let d = self.d(); flatbuffers::array_init(|i| d.get(i).unpack()) },
+ e: self.e(),
+ f: self.f().into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Default)]
+pub struct ArrayStructT {
+ pub a: f32,
+ pub b: [i32; 15],
+ pub c: i8,
+ pub d: [NestedStructT; 2],
+ pub e: i32,
+ pub f: [i64; 2],
+}
+impl ArrayStructT {
+ pub fn pack(&self) -> ArrayStruct {
+ ArrayStruct::new(
+ self.a,
+ &self.b,
+ self.c,
+ &flatbuffers::array_init(|i| self.d[i].pack()),
+ self.e,
+ &self.f,
+ )
+ }
+}
+
+pub enum ArrayTableOffset {}
+#[derive(Copy, Clone, PartialEq)]
+
+pub struct ArrayTable<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for ArrayTable<'a> {
+ type Inner = ArrayTable<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self { _tab: flatbuffers::Table { buf, loc } }
+ }
+}
+
+impl<'a> ArrayTable<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ ArrayTable { _tab: table }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args ArrayTableArgs<'args>) -> flatbuffers::WIPOffset<ArrayTable<'bldr>> {
+ let mut builder = ArrayTableBuilder::new(_fbb);
+ if let Some(x) = args.a { builder.add_a(x); }
+ builder.finish()
+ }
+
+ pub fn unpack(&self) -> ArrayTableT {
+ let a = self.a().map(|x| {
+ x.unpack()
+ });
+ ArrayTableT {
+ a,
+ }
+ }
+ pub const VT_A: flatbuffers::VOffsetT = 4;
+
+ #[inline]
+ pub fn a(&self) -> Option<&'a ArrayStruct> {
+ self._tab.get::<ArrayStruct>(ArrayTable::VT_A, None)
+ }
+}
+
+impl flatbuffers::Verifiable for ArrayTable<'_> {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.visit_table(pos)?
+ .visit_field::<ArrayStruct>(&"a", Self::VT_A, false)?
+ .finish();
+ Ok(())
+ }
+}
+pub struct ArrayTableArgs<'a> {
+ pub a: Option<&'a ArrayStruct>,
+}
+impl<'a> Default for ArrayTableArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ ArrayTableArgs {
+ a: None,
+ }
+ }
+}
+pub struct ArrayTableBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> ArrayTableBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_a(&mut self, a: &ArrayStruct) {
+ self.fbb_.push_slot_always::<&ArrayStruct>(ArrayTable::VT_A, a);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ArrayTableBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ ArrayTableBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<ArrayTable<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+impl std::fmt::Debug for ArrayTable<'_> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut ds = f.debug_struct("ArrayTable");
+ ds.field("a", &self.a());
+ ds.finish()
+ }
+}
+#[non_exhaustive]
+#[derive(Debug, Clone, PartialEq)]
+pub struct ArrayTableT {
+ pub a: Option<ArrayStructT>,
+}
+impl Default for ArrayTableT {
+ fn default() -> Self {
+ Self {
+ a: None,
+ }
+ }
+}
+impl ArrayTableT {
+ pub fn pack<'b>(
+ &self,
+ _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
+ ) -> flatbuffers::WIPOffset<ArrayTable<'b>> {
+ let a_tmp = self.a.as_ref().map(|x| x.pack());
+ let a = a_tmp.as_ref();
+ ArrayTable::create(_fbb, &ArrayTableArgs{
+ a,
+ })
+ }
+}
+#[inline]
+#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
+pub fn get_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a> {
+ unsafe { flatbuffers::root_unchecked::<ArrayTable<'a>>(buf) }
+}
+
+#[inline]
+#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
+pub fn get_size_prefixed_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a> {
+ unsafe { flatbuffers::size_prefixed_root_unchecked::<ArrayTable<'a>>(buf) }
+}
+
+#[inline]
+/// Verifies that a buffer of bytes contains a `ArrayTable`
+/// and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_array_table_unchecked`.
+pub fn root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::root::<ArrayTable>(buf)
+}
+#[inline]
+/// Verifies that a buffer of bytes contains a size prefixed
+/// `ArrayTable` and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `size_prefixed_root_as_array_table_unchecked`.
+pub fn size_prefixed_root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::size_prefixed_root::<ArrayTable>(buf)
+}
+#[inline]
+/// Verifies, with the given options, that a buffer of bytes
+/// contains a `ArrayTable` and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_array_table_unchecked`.
+pub fn root_as_array_table_with_opts<'b, 'o>(
+ opts: &'o flatbuffers::VerifierOptions,
+ buf: &'b [u8],
+) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::root_with_opts::<ArrayTable<'b>>(opts, buf)
+}
+#[inline]
+/// Verifies, with the given verifier options, that a buffer of
+/// bytes contains a size prefixed `ArrayTable` and returns
+/// it. Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_array_table_unchecked`.
+pub fn size_prefixed_root_as_array_table_with_opts<'b, 'o>(
+ opts: &'o flatbuffers::VerifierOptions,
+ buf: &'b [u8],
+) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::size_prefixed_root_with_opts::<ArrayTable<'b>>(opts, buf)
+}
+#[inline]
+/// Assumes, without verification, that a buffer of bytes contains a ArrayTable and returns it.
+/// # Safety
+/// Callers must trust the given bytes do indeed contain a valid `ArrayTable`.
+pub unsafe fn root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable {
+ flatbuffers::root_unchecked::<ArrayTable>(buf)
+}
+#[inline]
+/// Assumes, without verification, that a buffer of bytes contains a size prefixed ArrayTable and returns it.
+/// # Safety
+/// Callers must trust the given bytes do indeed contain a valid size prefixed `ArrayTable`.
+pub unsafe fn size_prefixed_root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable {
+ flatbuffers::size_prefixed_root_unchecked::<ArrayTable>(buf)
+}
+pub const ARRAY_TABLE_IDENTIFIER: &str = "ARRT";
+
+#[inline]
+pub fn array_table_buffer_has_identifier(buf: &[u8]) -> bool {
+ flatbuffers::buffer_has_identifier(buf, ARRAY_TABLE_IDENTIFIER, false)
+}
+
+#[inline]
+pub fn array_table_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool {
+ flatbuffers::buffer_has_identifier(buf, ARRAY_TABLE_IDENTIFIER, true)
+}
+
+pub const ARRAY_TABLE_EXTENSION: &str = "mon";
+
+#[inline]
+pub fn finish_array_table_buffer<'a, 'b>(
+ fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ root: flatbuffers::WIPOffset<ArrayTable<'a>>) {
+ fbb.finish(root, Some(ARRAY_TABLE_IDENTIFIER));
+}
+
+#[inline]
+pub fn finish_size_prefixed_array_table_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ArrayTable<'a>>) {
+ fbb.finish_size_prefixed(root, Some(ARRAY_TABLE_IDENTIFIER));
+}
+} // pub mod Example
+} // pub mod MyGame
+
diff --git a/tests/docker/languages/Dockerfile.testing.rust.1_40_0 b/tests/docker/languages/Dockerfile.testing.rust.1_51_0
index 849ad76a..38c1d26d 100644
--- a/tests/docker/languages/Dockerfile.testing.rust.1_40_0
+++ b/tests/docker/languages/Dockerfile.testing.rust.1_51_0
@@ -1,4 +1,4 @@
-FROM rust:1.40.0-slim-stretch as base
+FROM rust:1.51.0-slim as base
WORKDIR /code
ADD . .
RUN cp flatc_debian_stretch flatc
diff --git a/tests/docker/languages/Dockerfile.testing.rust.big_endian.1_40_0 b/tests/docker/languages/Dockerfile.testing.rust.big_endian.1_51_0
index 3abf8df1..aa35ed7b 100644
--- a/tests/docker/languages/Dockerfile.testing.rust.big_endian.1_40_0
+++ b/tests/docker/languages/Dockerfile.testing.rust.big_endian.1_51_0
@@ -1,4 +1,4 @@
-FROM rust:1.40.0-slim-stretch as base
+FROM rust:1.51.0-slim as base
RUN apt -qq update -y && apt -qq install -y \
gcc-mips-linux-gnu \
libexpat1 \
diff --git a/tests/generate_code.bat b/tests/generate_code.bat
index 390efa9c..d151588f 100644
--- a/tests/generate_code.bat
+++ b/tests/generate_code.bat
@@ -57,7 +57,7 @@ set TEST_NOINCL_FLAGS=%TEST_BASE_FLAGS% --no-includes
..\%buildtype%\flatc.exe --cpp --bfbs-comments --bfbs-builtins --bfbs-gen-embed %TEST_NOINCL_FLAGS% %TEST_CPP_FLAGS% -I include_test monster_test.fbs || goto FAIL
..\%buildtype%\flatc.exe -b --schema --bfbs-comments --bfbs-builtins -I include_test arrays_test.fbs || goto FAIL
..\%buildtype%\flatc.exe --jsonschema --schema -I include_test monster_test.fbs || goto FAIL
-..\%buildtype%\flatc.exe --cpp --java --csharp --jsonschema %TEST_NOINCL_FLAGS% %TEST_CPP_FLAGS% %TEST_CS_FLAGS% --scoped-enums arrays_test.fbs || goto FAIL
+..\%buildtype%\flatc.exe --cpp --java --csharp --jsonschema --rust %TEST_NOINCL_FLAGS% %TEST_CPP_FLAGS% %TEST_CS_FLAGS% --scoped-enums arrays_test.fbs || goto FAIL
..\%buildtype%\flatc.exe --python %TEST_BASE_FLAGS% arrays_test.fbs || goto FAIL
..\%buildtype%\flatc.exe --cpp %TEST_BASE_FLAGS% --cpp-ptr-type flatbuffers::unique_ptr native_type_test.fbs || goto FAIL
diff --git a/tests/generate_code.sh b/tests/generate_code.sh
index d3a2dabc..12add89d 100755
--- a/tests/generate_code.sh
+++ b/tests/generate_code.sh
@@ -55,7 +55,7 @@ $TEST_NOINCL_FLAGS $TEST_CPP_FLAGS $TEST_CS_FLAGS $TEST_TS_FLAGS -o namespace_te
../flatc -b --schema --bfbs-comments --bfbs-builtins -I include_test arrays_test.fbs
../flatc --jsonschema --schema -I include_test monster_test.fbs
../flatc --cpp --java --kotlin --csharp --python $TEST_NOINCL_FLAGS $TEST_CPP_FLAGS $TEST_CS_FLAGS monster_extra.fbs monsterdata_extra.json
-../flatc --cpp --java --csharp --jsonschema $TEST_NOINCL_FLAGS $TEST_CPP_FLAGS $TEST_CS_FLAGS --scoped-enums arrays_test.fbs
+../flatc --cpp --java --csharp --jsonschema --rust $TEST_NOINCL_FLAGS $TEST_CPP_FLAGS $TEST_CS_FLAGS --scoped-enums arrays_test.fbs
../flatc --python $TEST_BASE_FLAGS arrays_test.fbs
../flatc --dart monster_extra.fbs
diff --git a/tests/include_test/include_test1_generated.rs b/tests/include_test/include_test1_generated.rs
index d7511f9e..44cfaa76 100644
--- a/tests/include_test/include_test1_generated.rs
+++ b/tests/include_test/include_test1_generated.rs
@@ -7,7 +7,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
pub enum TableAOffset {}
#[derive(Copy, Clone, PartialEq)]
diff --git a/tests/include_test/sub/include_test2_generated.rs b/tests/include_test/sub/include_test2_generated.rs
index 1a1debe1..9f0b40d1 100644
--- a/tests/include_test/sub/include_test2_generated.rs
+++ b/tests/include_test/sub/include_test2_generated.rs
@@ -7,7 +7,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod my_game {
@@ -17,7 +17,7 @@ pub mod my_game {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod other_name_space {
@@ -26,7 +26,7 @@ pub mod other_name_space {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_FROM_INCLUDE: i64 = 0;
@@ -173,7 +173,7 @@ impl<'a> flatbuffers::Verifiable for Unused {
v.in_buffer::<Self>(pos)
}
}
-impl Unused {
+impl<'a> Unused {
#[allow(clippy::too_many_arguments)]
pub fn new(
a: i32,
diff --git a/tests/monster_test_generated.rs b/tests/monster_test_generated.rs
index cff8433c..ba4b5389 100644
--- a/tests/monster_test_generated.rs
+++ b/tests/monster_test_generated.rs
@@ -8,7 +8,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod my_game {
@@ -19,7 +19,7 @@ pub mod my_game {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
pub enum InParentNamespaceOffset {}
#[derive(Copy, Clone, PartialEq)]
@@ -133,7 +133,7 @@ pub mod example_2 {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
pub enum MonsterOffset {}
#[derive(Copy, Clone, PartialEq)]
@@ -249,7 +249,7 @@ pub mod example {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[allow(non_upper_case_globals)]
mod bitflags_color {
@@ -1035,7 +1035,7 @@ impl<'a> flatbuffers::Verifiable for Test {
v.in_buffer::<Self>(pos)
}
}
-impl Test {
+impl<'a> Test {
#[allow(clippy::too_many_arguments)]
pub fn new(
a: i16,
@@ -1188,7 +1188,7 @@ impl<'a> flatbuffers::Verifiable for Vec3 {
v.in_buffer::<Self>(pos)
}
}
-impl Vec3 {
+impl<'a> Vec3 {
#[allow(clippy::too_many_arguments)]
pub fn new(
x: f32,
@@ -1434,7 +1434,7 @@ impl<'a> flatbuffers::Verifiable for Ability {
v.in_buffer::<Self>(pos)
}
}
-impl Ability {
+impl<'a> Ability {
#[allow(clippy::too_many_arguments)]
pub fn new(
id: u32,
@@ -1594,7 +1594,7 @@ impl<'a> flatbuffers::Verifiable for StructOfStructs {
v.in_buffer::<Self>(pos)
}
}
-impl StructOfStructs {
+impl<'a> StructOfStructs {
#[allow(clippy::too_many_arguments)]
pub fn new(
a: &Ability,
diff --git a/tests/more_defaults_generated.rs b/tests/more_defaults_generated.rs
index 26db2b1c..42b3eff7 100644
--- a/tests/more_defaults_generated.rs
+++ b/tests/more_defaults_generated.rs
@@ -6,7 +6,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_ABC: i32 = 0;
diff --git a/tests/namespace_test/namespace_test1_generated.rs b/tests/namespace_test/namespace_test1_generated.rs
index fc877543..0f293925 100644
--- a/tests/namespace_test/namespace_test1_generated.rs
+++ b/tests/namespace_test/namespace_test1_generated.rs
@@ -6,7 +6,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod namespace_a {
@@ -15,7 +15,7 @@ pub mod namespace_a {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod namespace_b {
@@ -23,7 +23,7 @@ pub mod namespace_b {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_UNION_IN_NESTED_NS: u8 = 0;
@@ -312,7 +312,7 @@ impl<'a> flatbuffers::Verifiable for StructInNestedNS {
v.in_buffer::<Self>(pos)
}
}
-impl StructInNestedNS {
+impl<'a> StructInNestedNS {
#[allow(clippy::too_many_arguments)]
pub fn new(
a: i32,
diff --git a/tests/namespace_test/namespace_test2_generated.rs b/tests/namespace_test/namespace_test2_generated.rs
index 9878e2f4..d9391861 100644
--- a/tests/namespace_test/namespace_test2_generated.rs
+++ b/tests/namespace_test/namespace_test2_generated.rs
@@ -6,7 +6,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod namespace_a {
@@ -15,7 +15,7 @@ pub mod namespace_a {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod namespace_b {
@@ -23,7 +23,7 @@ pub mod namespace_b {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_UNION_IN_NESTED_NS: u8 = 0;
@@ -312,7 +312,7 @@ impl<'a> flatbuffers::Verifiable for StructInNestedNS {
v.in_buffer::<Self>(pos)
}
}
-impl StructInNestedNS {
+impl<'a> StructInNestedNS {
#[allow(clippy::too_many_arguments)]
pub fn new(
a: i32,
@@ -900,7 +900,7 @@ pub mod namespace_c {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
pub enum TableInCOffset {}
#[derive(Copy, Clone, PartialEq)]
diff --git a/tests/optional_scalars_generated.rs b/tests/optional_scalars_generated.rs
index 9ef7ad37..868989cd 100644
--- a/tests/optional_scalars_generated.rs
+++ b/tests/optional_scalars_generated.rs
@@ -6,7 +6,7 @@ use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
-use self::flatbuffers::EndianScalar;
+use self::flatbuffers::{EndianScalar, Follow};
#[allow(unused_imports, dead_code)]
pub mod optional_scalars {
@@ -15,7 +15,7 @@ pub mod optional_scalars {
use std::cmp::Ordering;
extern crate flatbuffers;
- use self::flatbuffers::EndianScalar;
+ use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_OPTIONAL_BYTE: i8 = 0;
diff --git a/tests/rust_usage_test/Cargo.toml b/tests/rust_usage_test/Cargo.toml
index 45ecca66..22b25908 100644
--- a/tests/rust_usage_test/Cargo.toml
+++ b/tests/rust_usage_test/Cargo.toml
@@ -43,6 +43,7 @@ bencher = "0.1.5"
static_assertions = "1.0.0"
rand = "*"
quickcheck_derive = "*"
+array-init = "2.0"
[[bench]]
# setup for bencher
diff --git a/tests/rust_usage_test/tests/arrays_test.rs b/tests/rust_usage_test/tests/arrays_test.rs
new file mode 100644
index 00000000..1f316666
--- /dev/null
+++ b/tests/rust_usage_test/tests/arrays_test.rs
@@ -0,0 +1,330 @@
+extern crate array_init;
+#[allow(dead_code, unused_imports)]
+#[path = "../../arrays_test_generated.rs"]
+mod arrays_test_generated;
+use std::fmt::Debug;
+
+use crate::arrays_test_generated::my_game::example::*;
+extern crate quickcheck;
+use array_init::array_init;
+use std::mem::size_of;
+use quickcheck::{Arbitrary, Gen};
+
+
+fn create_serialized_example_with_generated_code(builder: &mut flatbuffers::FlatBufferBuilder) {
+ let nested_struct1 = NestedStruct::new(
+ &[-1, 2],
+ TestEnum::A,
+ &[TestEnum::C, TestEnum::B],
+ &[0x1122334455667788, -0x1122334455667788],
+ );
+ let nested_struct2 = NestedStruct::new(
+ &[3, -4],
+ TestEnum::B,
+ &[TestEnum::B, TestEnum::A],
+ &[-0x1122334455667788, 0x1122334455667788],
+ );
+ let array_struct = ArrayStruct::new(
+ 12.34,
+ &[1, 2, 3, 4, 5, 6, 7, 8, 9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
+ -127,
+ &[nested_struct1, nested_struct2],
+ 1,
+ &[-0x8000000000000000, 0x7FFFFFFFFFFFFFFF],
+ );
+ // Test five makes sense when specified.
+ let ss = ArrayTable::create(
+ builder,
+ &ArrayTableArgs {
+ a: Some(&array_struct),
+ },
+ );
+ finish_array_table_buffer(builder, ss);
+}
+
+fn serialized_example_is_accessible_and_correct(
+ bytes: &[u8],
+ identifier_required: bool,
+ size_prefixed: bool,
+) {
+ if identifier_required {
+ let correct = if size_prefixed {
+ array_table_size_prefixed_buffer_has_identifier(bytes)
+ } else {
+ array_table_buffer_has_identifier(bytes)
+ };
+ assert_eq!(correct, true);
+ }
+
+ let array_table = if size_prefixed {
+ size_prefixed_root_as_array_table(bytes).unwrap()
+ } else {
+ root_as_array_table(bytes).unwrap()
+ };
+
+ let array_struct = array_table.a().unwrap();
+ assert_eq!(array_struct.a(), 12.34);
+ assert_eq!(array_struct.b().len(), 0xF);
+ assert_eq!(array_struct.b().iter().sum::<i32>(), 120);
+ assert_eq!(array_struct.c(), -127);
+
+ assert_eq!(array_struct.d().len(), 2);
+ let nested_struct1 = array_struct.d().get(0);
+ assert_eq!(nested_struct1.a().len(), 2);
+ assert_eq!(nested_struct1.a().iter().sum::<i32>(), 1);
+ assert_eq!(nested_struct1.b(), TestEnum::A);
+ assert_eq!(nested_struct1.c().len(), 2);
+ assert_eq!(nested_struct1.c().get(0), TestEnum::C);
+ assert_eq!(nested_struct1.c().get(1), TestEnum::B);
+ assert_eq!(nested_struct1.d().len(), 2);
+ assert_eq!(
+ [nested_struct1.d().get(0), nested_struct1.d().get(1)],
+ [0x1122334455667788, -0x1122334455667788]
+ );
+ let nested_struct2 = array_struct.d().get(1);
+ assert_eq!(nested_struct2.a().len(), 2);
+ assert_eq!(nested_struct2.a().iter().sum::<i32>(), -1);
+ assert_eq!(nested_struct2.b(), TestEnum::B);
+ assert_eq!(nested_struct2.c().len(), 2);
+ assert_eq!(nested_struct2.c().get(0), TestEnum::B);
+ assert_eq!(nested_struct2.c().get(1), TestEnum::A);
+ assert_eq!(nested_struct2.d().len(), 2);
+ let arr: [i64; 2] = nested_struct2.d().into();
+ assert_eq!(
+ arr,
+ [-0x1122334455667788, 0x1122334455667788]
+ );
+
+ assert_eq!(array_struct.e(), 1);
+ assert_eq!(array_struct.f().len(), 2);
+ assert_eq!(array_struct.f().get(0), -0x8000000000000000);
+ assert_eq!(array_struct.f().get(1), 0x7FFFFFFFFFFFFFFF);
+}
+
+#[test]
+fn generated_code_creates_correct_example() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(&mut b);
+ let buf = b.finished_data();
+ serialized_example_is_accessible_and_correct(&buf[..], true, false);
+}
+
+#[test]
+fn struct_netsted_struct_is_32_bytes() {
+ assert_eq!(32, ::std::mem::size_of::<NestedStruct>());
+}
+
+#[test]
+fn struct_array_struct_is_160_bytes() {
+ assert_eq!(160, ::std::mem::size_of::<ArrayStruct>());
+}
+
+#[test]
+fn test_object_api_reads_correctly() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(&mut b);
+
+ let array_table = root_as_array_table(b.finished_data()).unwrap().unpack();
+
+ let array_struct = array_table.a.unwrap();
+ assert_eq!(array_struct.a, 12.34);
+ assert_eq!(array_struct.b.len(), 0xF);
+ assert_eq!(array_struct.b.iter().sum::<i32>(), 120);
+ assert_eq!(array_struct.c, -127);
+
+ assert_eq!(array_struct.d.len(), 2);
+ let nested_struct1 = &array_struct.d[0];
+ assert_eq!(nested_struct1.a.len(), 2);
+ assert_eq!(nested_struct1.a.iter().sum::<i32>(), 1);
+ assert_eq!(nested_struct1.b, TestEnum::A);
+ assert_eq!(nested_struct1.c.len(), 2);
+ assert_eq!(nested_struct1.c[0], TestEnum::C);
+ assert_eq!(nested_struct1.c[1], TestEnum::B);
+ assert_eq!(nested_struct1.d.len(), 2);
+ assert_eq!(nested_struct1.d, [0x1122334455667788, -0x1122334455667788]);
+ let nested_struct2 = &array_struct.d[1];
+ assert_eq!(nested_struct2.a.len(), 2);
+ assert_eq!(nested_struct2.a.iter().sum::<i32>(), -1);
+ assert_eq!(nested_struct2.b, TestEnum::B);
+ assert_eq!(nested_struct2.c.len(), 2);
+ assert_eq!(nested_struct2.c[0], TestEnum::B);
+ assert_eq!(nested_struct2.c[1], TestEnum::A);
+ assert_eq!(nested_struct2.d.len(), 2);
+ assert_eq!(nested_struct2.d, [-0x1122334455667788, 0x1122334455667788]);
+
+ assert_eq!(array_struct.e, 1);
+ assert_eq!(array_struct.f.len(), 2);
+ assert_eq!(array_struct.f[0], -0x8000000000000000);
+ assert_eq!(array_struct.f[1], 0x7FFFFFFFFFFFFFFF);
+}
+
+#[test]
+fn object_api_defaults() {
+ use arrays_test_generated::my_game::example::*;
+
+ assert_eq!(
+ NestedStructT::default(),
+ NestedStructT {
+ a: [0, 0],
+ b: TestEnum::default(),
+ c: [TestEnum::default(), TestEnum::default()],
+ d: [0, 0]
+ }
+ );
+
+ assert_eq!(
+ ArrayStructT::default(),
+ ArrayStructT {
+ a: 0.0,
+ b: [0; 0xF],
+ c: 0,
+ d: [NestedStructT::default(), NestedStructT::default()],
+ e: 0,
+ f: [0, 0]
+ }
+ );
+}
+
+#[test]
+fn generated_code_debug_prints_correctly() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(b);
+ let buf = b.finished_data();
+ let array_table = root_as_array_table(buf).unwrap();
+ assert_eq!(
+ format!("{:.5?}", &array_table),
+ "ArrayTable { a: Some(ArrayStruct { a: 12.34000, \
+ b: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], \
+ c: -127, d: [NestedStruct { a: [-1, 2], b: A, c: [C, B], \
+ d: [1234605616436508552, -1234605616436508552] }, \
+ NestedStruct { a: [3, -4], b: B, c: [B, A], d: [-1234605616436508552, 1234605616436508552] }], \
+ e: 1, f: [-9223372036854775808, 9223372036854775807] }) }"
+ );
+}
+
+#[test]
+#[should_panic]
+fn assert_on_too_small_array_buf() {
+ let a = [0u8; 19];
+ flatbuffers::Array::<i32, 5>::new(&a);
+}
+
+#[test]
+#[should_panic]
+fn assert_on_too_big_array_buf() {
+ let a = [0u8; 21];
+ flatbuffers::Array::<i32, 5>::new(&a);
+}
+
+#[test]
+#[cfg(target_endian = "little")]
+fn verify_struct_array_alignment() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(&mut b);
+ let buf = b.finished_data();
+ let array_table = root_as_array_table(buf).unwrap();
+ let array_struct = array_table.a().unwrap();
+ let struct_start_ptr = array_struct.0.as_ptr() as usize;
+ let b_start_ptr = array_struct.b().safe_slice().as_ptr() as usize;
+ let d_start_ptr = array_struct.d().safe_slice().as_ptr() as usize;
+ // The T type of b
+ let b_aln = ::std::mem::align_of::<i32>();
+ assert_eq!((b_start_ptr - struct_start_ptr) % b_aln, 0);
+ assert_eq!((d_start_ptr - b_start_ptr) % b_aln, 0);
+ assert_eq!((d_start_ptr - struct_start_ptr) % 8, 0);
+}
+
+#[derive(Clone, Debug)]
+struct FakeArray<T, const N: usize>([T; N]);
+
+impl<T: Arbitrary + Debug + PartialEq, const N: usize> Arbitrary for FakeArray<T, N> {
+ fn arbitrary<G: Gen>(g: &mut G) -> FakeArray<T, N> {
+ let x: [T; N] = array_init(|_| {
+ loop {
+ let generated_scalar = T::arbitrary(g);
+ // Verify that generated scalar is not Nan, which is not equals to itself,
+ // therefore we can't use it to validate input == output
+ if generated_scalar == generated_scalar { return generated_scalar }
+ }
+ });
+ FakeArray{0: x}
+ }
+}
+
+#[cfg(test)]
+mod array_fuzz {
+ #[cfg(not(miri))] // slow.
+ extern crate quickcheck;
+ extern crate flatbuffers;
+ use self::flatbuffers::{Follow, Push};
+ use super::*;
+
+ const MAX_TESTS: u64 = 20;
+ const ARRAY_SIZE: usize = 29;
+
+ // This uses a macro because lifetimes for the trait-bounded function get too
+ // complicated.
+ macro_rules! impl_prop {
+ ($test_name:ident, $fn_name:ident, $ty:ident) => (
+ fn $fn_name(xs: FakeArray<$ty, ARRAY_SIZE>) {
+ let mut test_buf = [0 as u8; 1024];
+ flatbuffers::emplace_scalar_array(&mut test_buf, 0, &xs.0);
+ let arr: flatbuffers::Array<$ty, ARRAY_SIZE> = flatbuffers::Array::follow(&test_buf, 0);
+ let got: [$ty; ARRAY_SIZE] = arr.into();
+ assert_eq!(got, xs.0);
+ #[cfg(target_endian = "little")]
+ assert_eq!(arr.safe_slice(), xs.0);
+ }
+ #[test]
+ fn $test_name() {
+ quickcheck::QuickCheck::new().max_tests(MAX_TESTS).quickcheck($fn_name as fn(FakeArray<$ty, ARRAY_SIZE>));
+ }
+ )
+ }
+
+ impl_prop!(test_bool, prop_bool, bool);
+ impl_prop!(test_u8, prop_u8, u8);
+ impl_prop!(test_i8, prop_i8, i8);
+ impl_prop!(test_u16, prop_u16, u16);
+ impl_prop!(test_u32, prop_u32, u32);
+ impl_prop!(test_u64, prop_u64, u64);
+ impl_prop!(test_i16, prop_i16, i16);
+ impl_prop!(test_i32, prop_i32, i32);
+ impl_prop!(test_i64, prop_i64, i64);
+ impl_prop!(test_f32, prop_f32, f32);
+ impl_prop!(test_f64, prop_f64, f64);
+
+ const NESTED_STRUCT_SIZE: usize = size_of::<NestedStruct>();
+
+ #[derive(Clone, Debug, PartialEq)]
+ struct NestedStructWrapper(NestedStruct);
+
+ impl Arbitrary for NestedStructWrapper {
+ fn arbitrary<G: Gen>(g: &mut G) -> NestedStructWrapper {
+ let mut x = NestedStruct::default();
+ x.0 = FakeArray::<u8, NESTED_STRUCT_SIZE>::arbitrary(g).0;
+ NestedStructWrapper{0: x}
+ }
+ }
+
+ fn prop_struct(xs: FakeArray<NestedStructWrapper, ARRAY_SIZE>) {
+ let mut test_buf = [0 as u8; 1024];
+ let native_struct_array: [&NestedStruct; ARRAY_SIZE] = array_init::from_iter(xs.0.iter().map(|x| &x.0)).unwrap();
+ for i in 0..ARRAY_SIZE {
+ let offset = i * NESTED_STRUCT_SIZE;
+ native_struct_array[i].push(&mut test_buf[offset..offset + NESTED_STRUCT_SIZE], &[]);
+ }
+ let arr: flatbuffers::Array<NestedStruct, ARRAY_SIZE> = flatbuffers::Array::follow(&test_buf, 0);
+ let got: [&NestedStruct; ARRAY_SIZE] = arr.into();
+ assert_eq!(got, native_struct_array);
+ let arr_slice = arr.safe_slice();
+ for i in 0..ARRAY_SIZE {
+ assert_eq!(arr_slice[i], *native_struct_array[i]);
+ }
+ }
+
+ #[test]
+ fn test_struct() {
+ quickcheck::QuickCheck::new().max_tests(MAX_TESTS).quickcheck(prop_struct as fn(FakeArray<NestedStructWrapper, ARRAY_SIZE>));
+ }
+}
diff --git a/tests/rust_usage_test/tests/integration_test.rs b/tests/rust_usage_test/tests/integration_test.rs
index e606b3b7..f2deb838 100644
--- a/tests/rust_usage_test/tests/integration_test.rs
+++ b/tests/rust_usage_test/tests/integration_test.rs
@@ -16,7 +16,7 @@
*/
#[macro_use]
-#[cfg(not(miri))] // slow.
+#[cfg(not(miri))] // slow.
extern crate quickcheck;
extern crate flatbuffers;
extern crate flexbuffers;
@@ -24,13 +24,13 @@ extern crate rand;
extern crate serde;
#[macro_use]
extern crate serde_derive;
-#[cfg(not(miri))] // slow.
+#[cfg(not(miri))] // slow.
#[macro_use]
extern crate quickcheck_derive;
mod flexbuffers_tests;
-mod optional_scalars_test;
mod more_defaults_test;
+mod optional_scalars_test;
#[allow(dead_code, unused_imports)]
#[path = "../../include_test/include_test1_generated.rs"]
@@ -57,6 +57,10 @@ pub use monster_test_generated::my_game;
#[path = "../../optional_scalars_generated.rs"]
mod optional_scalars_generated;
+#[allow(dead_code, unused_imports)]
+#[path = "../../arrays_test_generated.rs"]
+mod arrays_test_generated;
+
#[rustfmt::skip] // TODO: Use standard rust formatting and remove dead code.
#[allow(dead_code)]
mod flatbuffers_tests {
@@ -1509,8 +1513,7 @@ mod roundtrip_table {
assert!(values_generated > 0);
assert!(min_tests_per_choice > 0);
for i in 0..test_value_types_max as u64 {
- assert!(stats[&i] >= min_tests_per_choice,
- format!("inadequately-tested fuzz case: {}", i));
+ assert!(stats[&i] >= min_tests_per_choice, "inadequately-tested fuzz case: {}", i);
}
}