summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert <rw@users.noreply.github.com>2018-09-02 17:05:50 -0700
committerrw <me@rwinslow.com>2018-09-02 18:26:55 -0700
commit3c54fd964b6beae9a92955415568a001c9cea23d (patch)
tree08f625977a0de84337044abe4ca7beadb4d9ac22
parente7578548a5714dd278d798344d6619d8cbbfb4d9 (diff)
downloadflatbuffers-3c54fd964b6beae9a92955415568a001c9cea23d.tar.gz
flatbuffers-3c54fd964b6beae9a92955415568a001c9cea23d.tar.bz2
flatbuffers-3c54fd964b6beae9a92955415568a001c9cea23d.zip
Port FlatBuffers to Rust (#4898)
This is a port of FlatBuffers to Rust. It provides code generation and a runtime library derived from the C++ implementation. It utilizes the Rust type system to provide safe and fast traversal of FlatBuffers data. There are 188 tests, including many fuzz tests of roundtrips for various serialization scenarios. Initial benchmarks indicate that the canonical example payload can be written in ~700ns, and traversed in ~100ns. Rustaceans may be interested in the Follow, Push, and SafeSliceAccess traits. These traits lift traversals, reads, writes, and slice accesses into the type system, providing abstraction with no runtime penalty.
-rw-r--r--.gitignore2
-rw-r--r--BUILD1
-rw-r--r--CMakeLists.txt1
-rw-r--r--docs/source/Compiler.md2
-rw-r--r--docs/source/FlatBuffers.md4
-rw-r--r--docs/source/RustUsage.md166
-rw-r--r--docs/source/Support.md34
-rw-r--r--docs/source/Tutorial.md201
-rw-r--r--docs/source/doxyfile2
-rw-r--r--include/flatbuffers/idl.h59
-rw-r--r--rust/flatbuffers/Cargo.lock4
-rw-r--r--rust/flatbuffers/Cargo.toml7
-rw-r--r--rust/flatbuffers/src/builder.rs636
-rw-r--r--rust/flatbuffers/src/endian_scalar.rs180
-rw-r--r--rust/flatbuffers/src/follow.rs62
-rw-r--r--rust/flatbuffers/src/lib.rs38
-rw-r--r--rust/flatbuffers/src/primitives.rs298
-rw-r--r--rust/flatbuffers/src/push.rs81
-rw-r--r--rust/flatbuffers/src/table.rs77
-rw-r--r--rust/flatbuffers/src/vector.rs133
-rw-r--r--rust/flatbuffers/src/vtable.rs95
-rw-r--r--rust/flatbuffers/src/vtable_writer.rs85
-rw-r--r--samples/monster_generated.rs507
-rw-r--r--samples/sample_binary.rs155
-rw-r--r--src/flatc_main.cpp4
-rw-r--r--src/idl_gen_cpp.cpp3
-rw-r--r--src/idl_gen_general.cpp4
-rw-r--r--src/idl_gen_go.cpp2
-rw-r--r--src/idl_gen_lobster.cpp2
-rw-r--r--src/idl_gen_lua.cpp2
-rw-r--r--src/idl_gen_php.cpp2
-rw-r--r--src/idl_gen_python.cpp2
-rw-r--r--src/idl_gen_rust.cpp1811
-rw-r--r--src/idl_gen_text.cpp6
-rw-r--r--src/idl_parser.cpp12
-rwxr-xr-xtests/RustTest.sh28
-rw-r--r--tests/generate_code.bat6
-rwxr-xr-xtests/generate_code.sh4
-rw-r--r--tests/monster_test_generated.rs1644
-rw-r--r--tests/namespace_test/namespace_test1_generated.rs224
-rw-r--r--tests/namespace_test/namespace_test2_generated.rs291
-rw-r--r--tests/rust_usage_test/Cargo.lock285
-rw-r--r--tests/rust_usage_test/Cargo.toml22
-rw-r--r--tests/rust_usage_test/benches/flatbuffers_benchmarks.rs218
-rw-r--r--tests/rust_usage_test/bin/monster_example.rs18
-rw-r--r--tests/rust_usage_test/tests/integration_test.rs2639
46 files changed, 9998 insertions, 61 deletions
diff --git a/.gitignore b/.gitignore
index b45308ab..15db5909 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,7 @@
*.keystore
**/.vs/**
**/bin/**
+!tests/rust_usage_test/bin/**
**/gen/**
**/libs/**
**/obj/**
@@ -56,6 +57,7 @@ tests/monsterdata_java_wire_sp.mon
tests/monsterdata_go_wire.mon
tests/monsterdata_javascript_wire.mon
tests/monsterdata_lobster_wire.mon
+tests/monsterdata_rust_wire.mon
tests/unicode_test.mon
tests/ts/
tests/php/
diff --git a/BUILD b/BUILD
index ba3ca2eb..1a3ff58b 100644
--- a/BUILD
+++ b/BUILD
@@ -92,6 +92,7 @@ cc_binary(
"src/idl_gen_lobster.cpp",
"src/idl_gen_php.cpp",
"src/idl_gen_python.cpp",
+ "src/idl_gen_rust.cpp",
"src/idl_gen_text.cpp",
],
includes = [
diff --git a/CMakeLists.txt b/CMakeLists.txt
index dab15caf..122009b6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -54,6 +54,7 @@ set(FlatBuffers_Compiler_SRCS
src/idl_gen_python.cpp
src/idl_gen_lobster.cpp
src/idl_gen_lua.cpp
+ src/idl_gen_rust.cpp
src/idl_gen_fbs.cpp
src/idl_gen_grpc.cpp
src/idl_gen_json_schema.cpp
diff --git a/docs/source/Compiler.md b/docs/source/Compiler.md
index 60494049..586087ad 100644
--- a/docs/source/Compiler.md
+++ b/docs/source/Compiler.md
@@ -43,6 +43,8 @@ For any schema input files, one or more generators can be specified:
- `--lobster`: Generate Lobster code.
+- `--rust`, `-r` : Generate Rust code.
+
For any data input files:
- `--binary`, `-b` : If data is contained in this file, generate a
diff --git a/docs/source/FlatBuffers.md b/docs/source/FlatBuffers.md
index 98042214..7cc93b92 100644
--- a/docs/source/FlatBuffers.md
+++ b/docs/source/FlatBuffers.md
@@ -4,7 +4,7 @@ FlatBuffers {#flatbuffers_index}
# Overview {#flatbuffers_overview}
[FlatBuffers](@ref flatbuffers_overview) is an efficient cross platform
-serialization library for C++, C#, C, Go, Java, JavaScript, Lobster, Lua, TypeScript, PHP, and Python.
+serialization library for C++, C#, C, Go, Java, JavaScript, Lobster, Lua, TypeScript, PHP, Python, and Rust.
It was originally created at Google for game development and other
performance-critical applications.
@@ -144,6 +144,8 @@ sections provide a more in-depth usage guide.
own programs.
- How to [use the generated Lobster code](@ref flatbuffers_guide_use_lobster) in your
own programs.
+- How to [use the generated Rust code](@ref flatbuffers_guide_use_rust) in your
+ own programs.
- [Support matrix](@ref flatbuffers_support) for platforms/languages/features.
- Some [benchmarks](@ref flatbuffers_benchmarks) showing the advantage of
using FlatBuffers.
diff --git a/docs/source/RustUsage.md b/docs/source/RustUsage.md
new file mode 100644
index 00000000..f4201086
--- /dev/null
+++ b/docs/source/RustUsage.md
@@ -0,0 +1,166 @@
+Use in Rust {#flatbuffers_guide_use_rust}
+==========
+
+## Before you get started
+
+Before diving into the FlatBuffers usage in Rust, it should be noted that
+the [Tutorial](@ref flatbuffers_guide_tutorial) page has a complete guide
+to general FlatBuffers usage in all of the supported languages (including Rust).
+This page is designed to cover the nuances of FlatBuffers usage, specific to
+Rust.
+
+#### Prerequisites
+
+This page assumes you have written a FlatBuffers schema and compiled it
+with the Schema Compiler. If you have not, please see
+[Using the schema compiler](@ref flatbuffers_guide_using_schema_compiler)
+and [Writing a schema](@ref flatbuffers_guide_writing_schema).
+
+Assuming you wrote a schema, say `mygame.fbs` (though the extension doesn't
+matter), you've generated a Rust file called `mygame_generated.rs` using the
+compiler (e.g. `flatc --rust mygame.fbs`), you can now start using this in
+your program by including the file. As noted, this header relies on the crate
+`flatbuffers`, which should be in your include `Cargo.toml`.
+
+## FlatBuffers Rust library code location
+
+The code for the FlatBuffers Rust library can be found at
+`flatbuffers/rust`. You can browse the library code on the
+[FlatBuffers GitHub page](https://github.com/google/flatbuffers/tree/master/rust).
+
+## Testing the FlatBuffers Rust library
+
+The code to test the Rust library can be found at `flatbuffers/tests/rust_usage_test`.
+The test code itself is located in
+[integration_test.rs](https://github.com/google/flatbuffers/blob/master/tests/rust_usage_test/tests/integration_test.rs)
+
+This test file requires `flatc` to be present. To review how to build the project,
+please read the [Building](@ref flatbuffers_guide_building) documenation.
+
+To run the tests, execute `RustTest.sh` from the `flatbuffers/tests` directory.
+For example, on [Linux](https://en.wikipedia.org/wiki/Linux), you would simply
+run: `cd tests && ./RustTest.sh`.
+
+*Note: The shell script requires [Rust](https://www.rust-lang.org) to
+be installed.*
+
+## Using the FlatBuffers Rust library
+
+*Note: See [Tutorial](@ref flatbuffers_guide_tutorial) for a more in-depth
+example of how to use FlatBuffers in Rust.*
+
+FlatBuffers supports both reading and writing FlatBuffers in Rust.
+
+To use FlatBuffers in your code, first generate the Rust modules from your
+schema with the `--rust` option to `flatc`. Then you can import both FlatBuffers
+and the generated code to read or write FlatBuffers.
+
+For example, here is how you would read a FlatBuffer binary file in Rust:
+First, include the library and generated code. Then read the file into
+a `u8` vector, which you pass, as a byte slice, to `get_root_as_monster()`.
+
+This full example program is available in the Rust test suite:
+[monster_example.rs](https://github.com/google/flatbuffers/blob/master/tests/rust_usage_test/bin/monster_example.rs)
+
+It can be run by `cd`ing to the `rust_usage_test` directory and executing: `cargo run monster_example`.
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.rs}
+ extern crate flatbuffers;
+
+ #[path = "../../monster_test_generated.rs"]
+ mod monster_test_generated;
+ pub use monster_test_generated::my_game;
+
+ use std::io::Read;
+
+ fn main() {
+ let mut f = std::fs::File::open("../monsterdata_test.mon").unwrap();
+ let mut buf = Vec::new();
+ f.read_to_end(&mut buf).expect("file reading failed");
+
+ let monster = my_game::example::get_root_as_monster(&buf[..]);
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`monster` is of type `Monster`, and points to somewhere *inside* your
+buffer (root object pointers are not the same as `buffer_pointer` !).
+If you look in your generated header, you'll see it has
+convenient accessors for all fields, e.g. `hp()`, `mana()`, etc:
+
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.rs}
+ println!("{}", monster.hp()); // `80`
+ println!("{}", monster.mana()); // default value of `150`
+ println!("{:?}", monster.name()); // Some("MyMonster")
+ }
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+*Note: That we never stored a `mana` value, so it will return the default.*
+
+## Direct memory access
+
+As you can see from the above examples, all elements in a buffer are
+accessed through generated accessors. This is because everything is
+stored in little endian format on all platforms (the accessor
+performs a swap operation on big endian machines), and also because
+the layout of things is generally not known to the user.
+
+For structs, layout is deterministic and guaranteed to be the same
+across platforms (scalars are aligned to their
+own size, and structs themselves to their largest member), and you
+are allowed to access this memory directly by using `safe_slice` and
+on the reference to a struct, or even an array of structs.
+
+To compute offsets to sub-elements of a struct, make sure they
+are structs themselves, as then you can use the pointers to
+figure out the offset without having to hardcode it. This is
+handy for use of arrays of structs with calls like `glVertexAttribPointer`
+in OpenGL or similar APIs.
+
+It is important to note is that structs are still little endian on all
+machines, so only use tricks like this if you can guarantee you're not
+shipping on a big endian machine (using an `#[cfg(target_endian = "little")]`
+attribute would be wise).
+
+The special function `safe_slice` is implemented on Vector objects that are
+represented in memory the same way as they are represented on the wire. This
+function is always available on vectors of struct, bool, u8, and i8. It is
+conditionally-compiled on little-endian systems for all the remaining scalar
+types.
+
+The FlatBufferBuilder function `create_vector_direct` is implemented for all
+types that are endian-safe to write with a `memcpy`. It is the write-equivalent
+of `safe_slice`.
+
+## Access of untrusted buffers
+
+The generated accessor functions access fields over offsets, which is
+very quick. These offsets are used to index into Rust slices, so they are
+bounds-checked by the Rust runtime. However, our Rust implementation may
+change: we may convert access functions to use direct pointer dereferencing, to
+improve lookup speed. As a result, users should not rely on the aforementioned
+bounds-checking behavior.
+
+When you're processing large amounts of data from a source you know (e.g.
+your own generated data on disk), this is acceptable, but when reading
+data from the network that can potentially have been modified by an
+attacker, this is undesirable.
+
+The C++ port provides a buffer verifier. At this time, Rust does not. Rust may
+provide a verifier in a future version. In the meantime, Rust users can access
+the buffer verifier generated by the C++ port through a foreign function
+interface (FFI).
+
+## Threading
+
+Reading a FlatBuffer does not touch any memory outside the original buffer,
+and is entirely read-only (all immutable), so is safe to access from multiple
+threads even without synchronisation primitives.
+
+Creating a FlatBuffer is not thread safe. All state related to building
+a FlatBuffer is contained in a FlatBufferBuilder instance, and no memory
+outside of it is touched. To make this thread safe, either do not
+share instances of FlatBufferBuilder between threads (recommended), or
+manually wrap it in synchronisation primitives. There's no automatic way to
+accomplish this, by design, as we feel multithreaded construction
+of a single buffer will be rare, and synchronisation overhead would be costly.
+
+<br>
diff --git a/docs/source/Support.md b/docs/source/Support.md
index e4c66cd6..c8ac7f7e 100644
--- a/docs/source/Support.md
+++ b/docs/source/Support.md
@@ -18,23 +18,23 @@ In general:
NOTE: this table is a start, it needs to be extended.
-Feature | C++ | Java | C# | Go | Python | JS | TS | C | PHP | Dart | Lobster
------------------------------- | ------ | ------ | ------ | ------ | ------ | --------- | --------- | ------ | --- | ------- | -------
-Codegen for all basic features | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | WiP | Yes | Yes
-JSON parsing | Yes | No | No | No | No | No | No | Yes | No | No | Yes
-Simple mutation | Yes | Yes | Yes | Yes | No | No | No | No | No | No | No
-Reflection | Yes | No | No | No | No | No | No | Basic | No | No | No
-Buffer verifier | Yes | No | No | No | No | No | No | Yes | No | No | No
-Testing: basic | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | ? | Yes | Yes
-Testing: fuzz | Yes | No | No | Yes | Yes | No | No | No | ? | No | No
-Performance: | Superb | Great | Great | Great | Ok | ? | ? | Superb | ? | ? | Great
-Platform: Windows | VS2010 | Yes | Yes | ? | ? | ? | Yes | VS2010 | ? | Yes | Yes
-Platform: Linux | GCC282 | Yes | ? | Yes | Yes | ? | Yes | Yes | ? | Yes | Yes
-Platform: OS X | Xcode4 | ? | ? | ? | Yes | ? | Yes | Yes | ? | Yes | Yes
-Platform: Android | NDK10d | Yes | ? | ? | ? | ? | ? | ? | ? | Flutter | Yes
-Platform: iOS | ? | ? | ? | ? | ? | ? | ? | ? | ? | Flutter | Yes
-Engine: Unity | ? | ? | Yes | ? | ? | ? | ? | ? | ? | ? | No
-Primary authors (github) | aard* | aard* | ev*/js*| rw | rw | evanw/ev* | kr* | mik* | ch* | dnfield | aard*
+Feature | C++ | Java | C# | Go | Python | JS | TS | C | PHP | Dart | Lobster | Rust
+------------------------------ | ------ | ------ | ------ | ------ | ------ | --------- | --------- | ------ | --- | ------- | ------- | ----
+Codegen for all basic features | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | WiP | Yes | Yes | Yes
+JSON parsing | Yes | No | No | No | No | No | No | Yes | No | No | Yes | No
+Simple mutation | Yes | Yes | Yes | Yes | No | No | No | No | No | No | No | No
+Reflection | Yes | No | No | No | No | No | No | Basic | No | No | No | No
+Buffer verifier | Yes | No | No | No | No | No | No | Yes | No | No | No | No
+Testing: basic | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | ? | Yes | Yes | Yes
+Testing: fuzz | Yes | No | No | Yes | Yes | No | No | No | ? | No | No | Yes
+Performance: | Superb | Great | Great | Great | Ok | ? | ? | Superb | ? | ? | Great | Superb
+Platform: Windows | VS2010 | Yes | Yes | ? | ? | ? | Yes | VS2010 | ? | Yes | Yes | Yes
+Platform: Linux | GCC282 | Yes | ? | Yes | Yes | ? | Yes | Yes | ? | Yes | Yes | Yes
+Platform: OS X | Xcode4 | ? | ? | ? | Yes | ? | Yes | Yes | ? | Yes | Yes | Yes
+Platform: Android | NDK10d | Yes | ? | ? | ? | ? | ? | ? | ? | Flutter | Yes | ?
+Platform: iOS | ? | ? | ? | ? | ? | ? | ? | ? | ? | Flutter | Yes | ?
+Engine: Unity | ? | ? | Yes | ? | ? | ? | ? | ? | ? | ? | No | ?
+Primary authors (github) | aard* | aard* | ev*/js*| rw | rw | evanw/ev* | kr* | mik* | ch* | dnfield | aard* | rw
* aard = aardappel (previously: gwvo)
* ev = evolutional
diff --git a/docs/source/Tutorial.md b/docs/source/Tutorial.md
index 8cb1ab17..f2eaa2ea 100644
--- a/docs/source/Tutorial.md
+++ b/docs/source/Tutorial.md
@@ -33,6 +33,7 @@ Please select your desired language for our quest:
<input type="radio" name="language" value="dart">Dart</input>
<input type="radio" name="language" value="lua">Lua</input>
<input type="radio" name="language" value="lobster">Lobster</input>
+ <input type="radio" name="language" value="rust">Rust</input>
</form>
\endhtmlonly
@@ -144,6 +145,9 @@ For your chosen language, please cross-reference with:
<div class="language-lobster">
[sample_binary.lobster](https://github.com/google/flatbuffers/blob/master/samples/sample_binary.lobster)
</div>
+<div class="language-rust">
+[sample_binary.rs](https://github.com/google/flatbuffers/blob/master/samples/sample_binary.rs)
+</div>
## Writing the Monsters' FlatBuffer Schema
@@ -343,6 +347,12 @@ Please be aware of the difference between `flatc` and `flatcc` tools.
./../flatc --lobster monster.fbs
~~~
</div>
+<div class="language-rust">
+~~~{.sh}
+ cd flatbuffers/sample
+ ./../flatc --rust monster.fbs
+~~~
+</div>
For a more complete guide to using the `flatc` compiler, please read the
[Using the schema compiler](@ref flatbuffers_guide_using_schema_compiler)
@@ -479,6 +489,21 @@ The first step is to import/include the library, generated files, etc.
include "monster_generated.lobster"
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // import the flatbuffers runtime library
+ extern crate flatbuffers;
+
+ // import the generated code
+ #[path = "./monster_generated.rs"]
+ mod monster_generated;
+ pub use monster_generated::my_game::sample::{get_root_as_monster,
+ Color, Equipment,
+ Monster, MonsterArgs,
+ Vec3,
+ Weapon, WeaponArgs};
+~~~
+</div>
Now we are ready to start building some buffers. In order to start, we need
to create an instance of the `FlatBufferBuilder`, which will contain the buffer
@@ -570,6 +595,13 @@ which will grow automatically if needed:
let builder = flatbuffers_builder {}
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Build up a serialized buffer algorithmically.
+ // Initialize it with a capacity of 1024 bytes.
+ let mut builder = flatbuffers::FlatBufferBuilder::new_with_capacity(1024);
+~~~
+</div>
After creating the `builder`, we can start serializing our data. Before we make
our `orc` Monster, lets create some `Weapon`s: a `Sword` and an `Axe`.
@@ -788,6 +820,24 @@ our `orc` Monster, lets create some `Weapon`s: a `Sword` and an `Axe`.
builder.MyGame_Sample_WeaponEnd()
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Serialize some weapons for the Monster: A 'sword' and an 'axe'.
+ let weapon_one_name = builder.create_string("Sword");
+ let weapon_two_name = builder.create_string("Axe");
+
+ // Use the `Weapon::create` shortcut to create Weapons with named field
+ // arguments.
+ let sword = Weapon::create(&mut builder, &WeaponArgs{
+ name: Some(weapon_one_name),
+ damage: 3,
+ });
+ let axe = Weapon::create(&mut builder, &WeaponArgs{
+ name: Some(weapon_two_name),
+ damage: 5,
+ });
+~~~
+</div>
Now let's create our monster, the `orc`. For this `orc`, lets make him
`red` with rage, positioned at `(1.0, 2.0, 3.0)`, and give him
@@ -959,6 +1009,15 @@ traversal. This is generally easy to do on any tree structures.
let inv = builder.MyGame_Sample_MonsterCreateInventoryVector(map(10): _)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Name of the Monster.
+ let name = builder.create_string("Orc");
+
+ // Inventory.
+ let inventory = builder.create_vector(&[0u8, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+~~~
+</div>
We serialized two built-in data types (`string` and `vector`) and captured
their return values. These values are offsets into the serialized data,
@@ -1086,8 +1145,14 @@ offsets.
let weapons = builder.MyGame_Sample_MonsterCreateWeaponsVector(weapon_offsets)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weapons = builder.create_vector(&[sword, axe]);
+~~~
+</div>
-<div class="language-cpp">
<br>
Note there's additional convenience overloads of `CreateVector`, allowing you
to work with data that's not in a `std::vector`, or allowing you to generate
@@ -1203,6 +1268,18 @@ for the `path` field above:
let path = builder.EndVector(2)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Create the path vector of Vec3 objects.
+ let x = Vec3::new(1.0, 2.0, 3.0);
+ let y = Vec3::new(4.0, 5.0, 6.0);
+ let path = builder.create_vector(&[x, y]);
+
+ // Note that, for convenience, it is also valid to create a vector of
+ // references to structs, like this:
+ // let path = builder.create_vector(&[&x, &y]);
+~~~
+</div>
We have now serialized the non-scalar components of the orc, so we
can serialize the monster itself:
@@ -1438,6 +1515,27 @@ can serialize the monster itself:
let orc = builder.MyGame_Sample_MonsterEnd()
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Create the monster using the `Monster::create` helper function. This
+ // function accepts a `MonsterArgs` struct, which supplies all of the data
+ // needed to build a `Monster`. To supply empty/default fields, just use the
+ // Rust built-in `Default::default()` function, as demononstrated below.
+ let orc = Monster::create(&mut builder, &MonsterArgs{
+ pos: Some(&Vec3::new(1.0f32, 2.0f32, 3.0f32)),
+ mana: 150,
+ hp: 80,
+ name: Some(name),
+ inventory: Some(inventory),
+ color: Color::Red,
+ weapons: Some(weapons),
+ equipped_type: Equipment::Weapon,
+ equipped: Some(axe.as_union_value()),
+ path: Some(path),
+ ..Default::default()
+ });
+~~~
+</div>
Note how we create `Vec3` struct in-line in the table. Unlike tables, structs
are simple combinations of scalars that are always stored inline, just like
@@ -1592,6 +1690,14 @@ Here is a repetition these lines, to help highlight them more clearly:
builder.MyGame_Sample_MonsterAddEquipped(axe)
~~~
</div>
+<div class="language-rust">
+ ~~~{.rs}
+ // You need to call `as_union_value` to turn an object into a type that
+ // can be used as a union value.
+ monster_builder.add_equipped_type(Equipment::Weapon); // Union type
+ monster_builder.add_equipped(axe.as_union_value()); // Union data
+ ~~~
+</div>
After you have created your buffer, you will have the offset to the root of the
data in the `orc` variable, so you can finish the buffer by calling the
@@ -1675,6 +1781,12 @@ appropriate `finish` method.
builder.Finish(orc)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Call `finish()` to instruct the builder that this monster is complete.
+ builder.finish(orc, None);
+~~~
+</div>
The buffer is now ready to be stored somewhere, sent over the network, be
compressed, or whatever you'd like to do with it. You can access the buffer
@@ -1784,6 +1896,13 @@ like so:
let buf = builder.SizedCopy() // Of type `string`.
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // This must be called after `finish()`.
+ // `finished_data` returns a byte slice.
+ let buf = builder.finished_data(); // Of type `&[u8]`
+~~~
+</div>
Now you can write the bytes to a file, send them over the network..
@@ -1917,6 +2036,21 @@ import './monster_my_game.sample_generated.dart' as myGame;
include "monster_generated.lobster"
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // import the flatbuffers runtime library
+ extern crate flatbuffers;
+
+ // import the generated code
+ #[path = "./monster_generated.rs"]
+ mod monster_generated;
+ pub use monster_generated::my_game::sample::{get_root_as_monster,
+ Color, Equipment,
+ Monster, MonsterArgs,
+ Vec3,
+ Weapon, WeaponArgs};
+~~~
+</div>
Then, assuming you have a buffer of bytes received from disk,
network, etc., you can create start accessing the buffer like so:
@@ -2044,6 +2178,14 @@ myGame.Monster monster = new myGame.Monster(data);
let monster = MyGame_Sample_GetRootAsMonster(buf)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ let buf = /* the data you just read, in a &[u8] */
+
+ // Get an accessor to the root object inside the buffer.
+ let monster = get_root_as_monster(buf);
+~~~
+</div>
If you look in the generated files from the schema compiler, you will see it generated
accessors for all non-`deprecated` fields. For example:
@@ -2136,6 +2278,14 @@ accessors for all non-`deprecated` fields. For example:
let name = monster.name
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Get and test some scalar types from the FlatBuffer.
+ let hp = monster.hp();
+ let mana = monster.mana();
+ let name = monster.name();
+~~~
+</div>
These should hold `300`, `150`, and `"Orc"` respectively.
@@ -2245,6 +2395,14 @@ To access sub-objects, in the case of our `pos`, which is a `Vec3`:
let z = pos.z
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ let pos = monster.pos().unwrap();
+ let x = pos.x();
+ let y = pos.y();
+ let z = pos.z();
+~~~
+</div>
`x`, `y`, and `z` will contain `1.0`, `2.0`, and `3.0`, respectively.
@@ -2329,6 +2487,16 @@ FlatBuffers `vector`.
let third_item = monster.inventory(2)
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Get a test an element from the `inventory` FlatBuffer's `vector`.
+ let inv = monster.inventory().unwrap();
+
+ // Note that this vector is returned as a slice, because direct access for
+ // this type, a `u8` vector, is safe on all platforms:
+ let third_item = inv[2];
+~~~
+</div>
For `vector`s of `table`s, you can access the elements like any other vector,
except your need to handle the result as a FlatBuffer `table`:
@@ -2424,6 +2592,17 @@ except your need to handle the result as a FlatBuffer `table`:
let second_weapon_damage = monster.weapons(1).damage
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Get and test the `weapons` FlatBuffers's `vector`.
+ let weps = monster.weapons().unwrap();
+ let weps_len = weps.len();
+
+ let wep2 = weps.get(1);
+ let second_weapon_name = wep2.name();
+ let second_weapon_damage = wep2.damage();
+~~~
+</div>
Last, we can access our `Equipped` FlatBuffer `union`. Just like when we created
the `union`, we need to get both parts of the `union`: the type and the data.
@@ -2585,6 +2764,18 @@ We can access the type to dynamically cast the data as needed (since the
let weapon_damage = union_weapon.damage // 5
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ // Get and test the `Equipment` union (`equipped` field).
+ // `equipped_as_weapon` returns a FlatBuffer handle much like normal table
+ // fields, but this will return `None` is the union is not actually of that
+ // type.
+ if monster.equipped_type() == Equipment::Weapon {
+ let equipped = monster.equipped_as_weapon().unwrap();
+ let weapon_name = equipped.name();
+ let weapon_damage = equipped.damage();
+~~~
+</div>
## Mutating FlatBuffers
@@ -2675,6 +2866,11 @@ mutators like so:
<API for mutating FlatBuffers is not yet available in Lobster.>
~~~
</div>
+<div class="language-rust">
+~~~{.rs}
+ <API for mutating FlatBuffers is not yet available in Rust.>
+~~~
+</div>
We use the somewhat verbose term `mutate` instead of `set` to indicate that this
is a special use case, not to be confused with the default way of constructing
@@ -2798,5 +2994,8 @@ For your chosen language, see:
<div class="language-lobster">
[Use in Lobster](@ref flatbuffers_guide_use_lobster)
</div>
+<div class="language-rust">
+[Use in Rust](@ref flatbuffers_guide_use_rust)
+</div>
<br>
diff --git a/docs/source/doxyfile b/docs/source/doxyfile
index 19a2ec94..6ba3c108 100644
--- a/docs/source/doxyfile
+++ b/docs/source/doxyfile
@@ -760,6 +760,7 @@ INPUT = "FlatBuffers.md" \
"PythonUsage.md" \
"LuaUsage.md" \
"LobsterUsage.md" \
+ "RustUsage.md" \
"Support.md" \
"Benchmarks.md" \
"WhitePaper.md" \
@@ -778,6 +779,7 @@ INPUT = "FlatBuffers.md" \
"../../net/FlatBuffers/FlatBufferBuilder.cs" \
"../../include/flatbuffers/flatbuffers.h" \
"../../go/builder.go"
+ "../../rust/flatbuffers/src/builder.rs"
# This tag can be used to specify the character encoding of the source files
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
diff --git a/include/flatbuffers/idl.h b/include/flatbuffers/idl.h
index 54ebf764..75e34c52 100644
--- a/include/flatbuffers/idl.h
+++ b/include/flatbuffers/idl.h
@@ -41,24 +41,24 @@ namespace flatbuffers {
// of type tokens.
// clang-format off
#define FLATBUFFERS_GEN_TYPES_SCALAR(TD) \
- TD(NONE, "", uint8_t, byte, byte, byte, uint8) \
- TD(UTYPE, "", uint8_t, byte, byte, byte, uint8) /* begin scalar/int */ \
- TD(BOOL, "bool", uint8_t, boolean,byte, bool, bool) \
- TD(CHAR, "byte", int8_t, byte, int8, sbyte, int8) \
- TD(UCHAR, "ubyte", uint8_t, byte, byte, byte, uint8) \
- TD(SHORT, "short", int16_t, short, int16, short, int16) \
- TD(USHORT, "ushort", uint16_t, short, uint16, ushort, uint16) \
- TD(INT, "int", int32_t, int, int32, int, int32) \
- TD(UINT, "uint", uint32_t, int, uint32, uint, uint32) \
- TD(LONG, "long", int64_t, long, int64, long, int64) \
- TD(ULONG, "ulong", uint64_t, long, uint64, ulong, uint64) /* end int */ \
- TD(FLOAT, "float", float, float, float32, float, float32) /* begin float */ \
- TD(DOUBLE, "double", double, double, float64, double, float64) /* end float/scalar */
+ TD(NONE, "", uint8_t, byte, byte, byte, uint8, u8) \
+ TD(UTYPE, "", uint8_t, byte, byte, byte, uint8, u8) /* begin scalar/int */ \
+ TD(BOOL, "bool", uint8_t, boolean,byte, bool, bool, bool) \
+ TD(CHAR, "byte", int8_t, byte, int8, sbyte, int8, i8) \
+ TD(UCHAR, "ubyte", uint8_t, byte, byte, byte, uint8, u8) \
+ TD(SHORT, "short", int16_t, short, int16, short, int16, i16) \
+ TD(USHORT, "ushort", uint16_t, short, uint16, ushort, uint16, u16) \
+ TD(INT, "int", int32_t, int, int32, int, int32, i32) \
+ TD(UINT, "uint", uint32_t, int, uint32, uint, uint32, u32) \
+ TD(LONG, "long", int64_t, long, int64, long, int64, i64) \
+ TD(ULONG, "ulong", uint64_t, long, uint64, ulong, uint64, u64) /* end int */ \
+ TD(FLOAT, "float", float, float, float32, float, float32, f32) /* begin float */ \
+ TD(DOUBLE, "double", double, double, float64, double, float64, f64) /* end float/scalar */
#define FLATBUFFERS_GEN_TYPES_POINTER(TD) \
- TD(STRING, "string", Offset<void>, int, int, StringOffset, int) \
- TD(VECTOR, "", Offset<void>, int, int, VectorOffset, int) \
- TD(STRUCT, "", Offset<void>, int, int, int, int) \
- TD(UNION, "", Offset<void>, int, int, int, int)
+ TD(STRING, "string", Offset<void>, int, int, StringOffset, int, unused) \
+ TD(VECTOR, "", Offset<void>, int, int, VectorOffset, int, unused) \
+ TD(STRUCT, "", Offset<void>, int, int, int, int, unused) \
+ TD(UNION, "", Offset<void>, int, int, int, int, unused)
// The fields are:
// - enum
@@ -68,12 +68,14 @@ namespace flatbuffers {
// - Go type.
// - C# / .Net type.
// - Python type.
+// - Rust type.
// using these macros, we can now write code dealing with types just once, e.g.
/*
switch (type) {
- #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
case BASE_TYPE_ ## ENUM: \
// do something specific to CTYPE here
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
@@ -90,13 +92,15 @@ switch (type) {
__extension__ // Stop GCC complaining about trailing comma with -Wpendantic.
#endif
enum BaseType {
- #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
BASE_TYPE_ ## ENUM,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
};
-#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+#define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
static_assert(sizeof(CTYPE) <= sizeof(largest_scalar_t), \
"define largest_scalar_t as " #CTYPE);
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
@@ -111,6 +115,8 @@ inline bool IsFloat (BaseType t) { return t == BASE_TYPE_FLOAT ||
inline bool IsLong (BaseType t) { return t == BASE_TYPE_LONG ||
t == BASE_TYPE_ULONG; }
inline bool IsBool (BaseType t) { return t == BASE_TYPE_BOOL; }
+inline bool IsOneByte(BaseType t) { return t >= BASE_TYPE_UTYPE &&
+ t <= BASE_TYPE_UCHAR; }
// clang-format on
extern const char *const kTypeNames[];
@@ -410,6 +416,7 @@ struct IDLOptions {
kDart = 1 << 11,
kLua = 1 << 12,
kLobster = 1 << 13,
+ kRust = 1 << 14,
kMAX
};
@@ -834,6 +841,12 @@ extern bool GenerateLua(const Parser &parser,
const std::string &path,
const std::string &file_name);
+// Generate Rust files from the definitions in the Parser object.
+// See idl_gen_rust.cpp.
+extern bool GenerateRust(const Parser &parser,
+ const std::string &path,
+ const std::string &file_name);
+
// Generate Json schema file
// See idl_gen_json_schema.cpp.
extern bool GenerateJsonSchema(const Parser &parser,
@@ -872,6 +885,12 @@ extern std::string DartMakeRule(const Parser &parser,
const std::string &path,
const std::string &file_name);
+// Generate a make rule for the generated Rust code.
+// See idl_gen_rust.cpp.
+extern std::string RustMakeRule(const Parser &parser,
+ const std::string &path,
+ const std::string &file_name);
+
// Generate a make rule for the generated Java/C#/... files.
// See idl_gen_general.cpp.
extern std::string GeneralMakeRule(const Parser &parser,
diff --git a/rust/flatbuffers/Cargo.lock b/rust/flatbuffers/Cargo.lock
new file mode 100644
index 00000000..dc2168d0
--- /dev/null
+++ b/rust/flatbuffers/Cargo.lock
@@ -0,0 +1,4 @@
+[[package]]
+name = "flatbuffers"
+version = "0.1.0"
+
diff --git a/rust/flatbuffers/Cargo.toml b/rust/flatbuffers/Cargo.toml
new file mode 100644
index 00000000..f5914e99
--- /dev/null
+++ b/rust/flatbuffers/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+name = "flatbuffers"
+version = "0.1.0"
+authors = ["Robert Winslow <hello@rwinslow.com>", "FlatBuffers Maintainers"]
+
+[dependencies]
+smallvec = "0.6"
diff --git a/rust/flatbuffers/src/builder.rs b/rust/flatbuffers/src/builder.rs
new file mode 100644
index 00000000..2a808ef7
--- /dev/null
+++ b/rust/flatbuffers/src/builder.rs
@@ -0,0 +1,636 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+extern crate smallvec;
+
+use std::cmp::max;
+use std::marker::PhantomData;
+use std::ptr::write_bytes;
+use std::slice::from_raw_parts;
+
+use endian_scalar::{read_scalar, emplace_scalar};
+use primitives::*;
+use push::{Push, PushAlignment};
+use table::Table;
+use vtable::{VTable, field_index_to_field_offset};
+use vtable_writer::VTableWriter;
+use vector::{SafeSliceAccess, Vector};
+
+#[derive(Clone, Copy, Debug)]
+struct FieldLoc {
+ off: UOffsetT,
+ id: VOffsetT,
+}
+
+/// FlatBufferBuilder builds a FlatBuffer through manipulating its internal
+/// state. It has an owned `Vec<u8>` that grows as needed (up to the hardcoded
+/// limit of 2GiB, which is set by the FlatBuffers format).
+pub struct FlatBufferBuilder<'fbb> {
+ owned_buf: Vec<u8>,
+ head: usize,
+
+ field_locs: Vec<FieldLoc>,
+ written_vtable_revpos: Vec<UOffsetT>,
+
+ nested: bool,
+ finished: bool,
+
+ min_align: usize,
+
+ _phantom: PhantomData<&'fbb ()>,
+}
+
+impl<'fbb> FlatBufferBuilder<'fbb> {
+ /// Create a FlatBufferBuilder that is ready for writing.
+ pub fn new() -> Self {
+ Self::new_with_capacity(0)
+ }
+
+ /// Create a FlatBufferBuilder that is ready for writing, with a
+ /// ready-to-use capacity of the provided size.
+ ///
+ /// The maximum valid value is `FLATBUFFERS_MAX_BUFFER_SIZE`.
+ pub fn new_with_capacity(size: usize) -> Self {
+ // we need to check the size here because we create the backing buffer
+ // directly, bypassing the typical way of using grow_owned_buf:
+ assert!(size <= FLATBUFFERS_MAX_BUFFER_SIZE,
+ "cannot initialize buffer bigger than 2 gigabytes");
+
+ FlatBufferBuilder {
+ owned_buf: vec![0u8; size],
+ head: size,
+
+ field_locs: Vec::new(),
+ written_vtable_revpos: Vec::new(),
+
+ nested: false,
+ finished: false,
+
+ min_align: 0,
+
+ _phantom: PhantomData,
+ }
+ }
+
+ /// Reset the FlatBufferBuilder internal state. Use this method after a
+ /// call to a `finish` function in order to re-use a FlatBufferBuilder.
+ ///
+ /// This function is the only way to reset the `finished` state and start
+ /// again.
+ ///
+ /// If you are using a FlatBufferBuilder repeatedly, make sure to use this
+ /// function, because it re-uses the FlatBufferBuilder's existing
+ /// heap-allocated `Vec<u8>` internal buffer. This offers significant speed
+ /// improvements as compared to creating a new FlatBufferBuilder for every
+ /// new object.
+ pub fn reset(&mut self) {
+ // memset only the part of the buffer that could be dirty:
+ {
+ let to_clear = self.owned_buf.len() - self.head;
+ let ptr = (&mut self.owned_buf[self.head..]).as_mut_ptr();
+ unsafe { write_bytes(ptr, 0, to_clear); }
+ }
+
+ self.head = self.owned_buf.len();
+ self.written_vtable_revpos.clear();
+
+ self.nested = false;
+ self.finished = false;
+
+ self.min_align = 0;
+ }
+
+ /// Destroy the FlatBufferBuilder, returning its internal byte vector
+ /// and the index into it that represents the start of valid data.
+ pub fn collapse(self) -> (Vec<u8>, usize) {
+ (self.owned_buf, self.head)
+ }
+
+ /// Push a Push'able value onto the front of the in-progress data.
+ ///
+ /// This function uses traits to provide a unified API for writing
+ /// scalars, tables, vectors, and WIPOffsets.
+ #[inline]
+ pub fn push<P: Push>(&mut self, x: P) -> WIPOffset<P::Output> {
+ let sz = P::size();
+ self.align(sz, P::alignment());
+ self.make_space(sz);
+ {
+ let (dst, rest) = (&mut self.owned_buf[self.head..]).split_at_mut(sz);
+ x.push(dst, rest);
+ }
+ WIPOffset::new(self.used_space() as UOffsetT)
+ }
+
+ /// Push a Push'able value onto the front of the in-progress data, and
+ /// store a reference to it in the in-progress vtable. If the value matches
+ /// the default, then this is a no-op.
+ #[inline]
+ pub fn push_slot<X: Push + PartialEq>(&mut self, slotoff: VOffsetT, x: X, default: X) {
+ self.assert_nested("push_slot");
+ if x == default {
+ return;
+ }
+ self.push_slot_always(slotoff, x);
+ }
+
+ /// Push a Push'able value onto the front of the in-progress data, and
+ /// store a reference to it in the in-progress vtable.
+ #[inline]
+ pub fn push_slot_always<X: Push>(&mut self, slotoff: VOffsetT, x: X) {
+ self.assert_nested("push_slot_always");
+ let off = self.push(x);
+ self.track_field(slotoff, off.value());
+ }
+
+ /// Retrieve the number of vtables that have been serialized into the
+ /// FlatBuffer. This is primarily used to check vtable deduplication.
+ #[inline]
+ pub fn num_written_vtables(&self) -> usize {
+ self.written_vtable_revpos.len()
+ }
+
+ /// Start a Table write.
+ ///
+ /// Asserts that the builder is not in a nested state.
+ ///
+ /// Users probably want to use `push_slot` to add values after calling this.
+ #[inline]
+ pub fn start_table(&mut self) -> WIPOffset<TableUnfinishedWIPOffset> {
+ self.assert_not_nested("start_table can not be called when a table or vector is under construction");
+ self.nested = true;
+
+ WIPOffset::new(self.used_space() as UOffsetT)
+ }
+
+ /// End a Table write.
+ ///
+ /// Asserts that the builder is in a nested state.
+ #[inline]
+ pub fn end_table(&mut self, off: WIPOffset<TableUnfinishedWIPOffset>) -> WIPOffset<TableFinishedWIPOffset> {
+ self.assert_nested("end_table");
+
+ let o = self.write_vtable(off);
+
+ self.nested = false;
+ self.field_locs.clear();
+
+ WIPOffset::new(o.value())
+ }
+
+ /// Start a Vector write.
+ ///
+ /// Asserts that the builder is not in a nested state.
+ ///
+ /// Most users will prefer to call `create_vector`.
+ /// Speed optimizing users who choose to create vectors manually using this
+ /// function will want to use `push` to add values.
+ #[inline]
+ pub fn start_vector<T: Push>(&mut self, num_items: usize) {
+ self.assert_not_nested("start_vector can not be called when a table or vector is under construction");
+ self.nested = true;
+ self.align(num_items * T::size(), T::alignment().max_of(SIZE_UOFFSET));
+ }
+
+ /// End a Vector write.
+ ///
+ /// Note that the `num_elems` parameter is the number of written items, not
+ /// the byte count.
+ ///
+ /// Asserts that the builder is in a nested state.
+ #[inline]
+ pub fn end_vector<T: Push>(&mut self, num_elems: usize) -> WIPOffset<Vector<'fbb, T>> {
+ self.assert_nested("end_vector");
+ self.nested = false;
+ let o = self.push::<UOffsetT>(num_elems as UOffsetT);
+ WIPOffset::new(o.value())
+ }
+
+ /// Create a utf8 string.
+ ///
+ /// The wire format represents this as a zero-terminated byte vector.
+ #[inline]
+ pub fn create_string<'a: 'b, 'b>(&'a mut self, s: &'b str) -> WIPOffset<&'fbb str> {
+ self.assert_not_nested("create_string can not be called when a table or vector is under construction");
+ WIPOffset::new(self.create_byte_string(s.as_bytes()).value())
+ }
+
+ /// Create a zero-terminated byte vector.
+ #[inline]
+ pub fn create_byte_string(&mut self, data: &[u8]) -> WIPOffset<&'fbb [u8]> {
+ self.assert_not_nested("create_byte_string can not be called when a table or vector is under construction");
+ self.align(data.len() + 1, PushAlignment::new(SIZE_UOFFSET));
+ self.push(0u8);
+ self.push_bytes_unprefixed(data);
+ self.push(data.len() as UOffsetT);
+ WIPOffset::new(self.used_space() as UOffsetT)
+ }
+
+ /// Create a vector by memcpy'ing. This is much faster than calling
+ /// `create_vector`, but the underlying type must be represented as
+ /// little-endian on the host machine. This property is encoded in the
+ /// type system through the SafeSliceAccess trait. The following types are
+ /// always safe, on any platform: bool, u8, i8, and any
+ /// FlatBuffers-generated struct.
+ #[inline]
+ pub fn create_vector_direct<'a: 'b, 'b, T: SafeSliceAccess + Push + Sized + 'b>(&'a mut self, items: &'b [T]) -> WIPOffset<Vector<'fbb, T>> {
+ self.assert_not_nested("create_vector_direct can not be called when a table or vector is under construction");
+ let elem_size = T::size();
+ self.align(items.len() * elem_size, T::alignment().max_of(SIZE_UOFFSET));
+
+ let bytes = {
+ let ptr = items.as_ptr() as *const T as *const u8;
+ unsafe { from_raw_parts(ptr, items.len() * elem_size) }
+ };
+ self.push_bytes_unprefixed(bytes);
+ self.push(items.len() as UOffsetT);
+
+ WIPOffset::new(self.used_space() as UOffsetT)
+ }
+
+ /// Create a vector of strings.
+ ///
+ /// Speed-sensitive users may wish to reduce memory usage by creating the
+ /// vector manually: use `create_vector`, `push`, and `end_vector`.
+ #[inline]
+ pub fn create_vector_of_strings<'a, 'b>(&'a mut self, xs: &'b [&'b str]) -> WIPOffset<Vector<'fbb, ForwardsUOffset<&'fbb str>>> {
+ self.assert_not_nested("create_vector_of_strings can not be called when a table or vector is under construction");
+ // internally, smallvec can be a stack-allocated or heap-allocated vector.
+ // we expect it to usually be stack-allocated.
+ let mut offsets: smallvec::SmallVec<[WIPOffset<&str>; 0]> = smallvec::SmallVec::with_capacity(xs.len());
+ unsafe { offsets.set_len(xs.len()); }
+ for (i, &s) in xs.iter().enumerate().rev() {
+ let o = self.create_string(s);
+ offsets[i] = o;
+ }
+ self.create_vector(&offsets[..])
+ }
+
+ /// Create a vector of Push-able objects.
+ ///
+ /// Speed-sensitive users may wish to reduce memory usage by creating the
+ /// vector manually: use `create_vector`, `push`, and `end_vector`.
+ #[inline]
+ pub fn create_vector<'a: 'b, 'b, T: Push + Copy + 'b>(&'a mut self, items: &'b [T]) -> WIPOffset<Vector<'fbb, T::Output>> {
+ let elem_size = T::size();
+ self.align(items.len() * elem_size, T::alignment().max_of(SIZE_UOFFSET));
+ for i in (0..items.len()).rev() {
+ self.push(items[i]);
+ }
+ WIPOffset::new(self.push::<UOffsetT>(items.len() as UOffsetT).value())
+ }
+
+ /// Get the byte slice for the data that has been written, regardless of
+ /// whether it has been finished.
+ #[inline]
+ pub fn unfinished_data(&self) -> &[u8] {
+ &self.owned_buf[self.head..]
+ }
+ /// Get the byte slice for the data that has been written after a call to
+ /// one of the `finish` functions.
+ #[inline]
+ pub fn finished_data(&self) -> &[u8] {
+ self.assert_finished("finished_bytes cannot be called when the buffer is not yet finished");
+ &self.owned_buf[self.head..]
+ }
+ /// Assert that a field is present in the just-finished Table.
+ ///
+ /// This is somewhat low-level and is mostly used by the generated code.
+ #[inline]
+ pub fn required(&self,
+ tab_revloc: WIPOffset<TableFinishedWIPOffset>,
+ slot_byte_loc: VOffsetT,
+ assert_msg_name: &'static str) {
+ let idx = self.used_space() - tab_revloc.value() as usize;
+ let tab = Table::new(&self.owned_buf[self.head..], idx);
+ let o = tab.vtable().get(slot_byte_loc) as usize;
+ assert!(o != 0, "missing required field {}", assert_msg_name);
+ }
+
+ /// Finalize the FlatBuffer by: aligning it, pushing an optional file
+ /// identifier on to it, pushing a size prefix on to it, and marking the
+ /// internal state of the FlatBufferBuilder as `finished`. Afterwards,
+ /// users can call `finished_data` to get the resulting data.
+ #[inline]
+ pub fn finish_size_prefixed<T>(&mut self, root: WIPOffset<T>, file_identifier: Option<&str>) {
+ self.finish_with_opts(root, file_identifier, true);
+ }
+
+ /// Finalize the FlatBuffer by: aligning it, pushing an optional file
+ /// identifier on to it, and marking the internal state of the
+ /// FlatBufferBuilder as `finished`. Afterwards, users can call
+ /// `finished_data` to get the resulting data.
+ #[inline]
+ pub fn finish<T>(&mut self, root: WIPOffset<T>, file_identifier: Option<&str>) {
+ self.finish_with_opts(root, file_identifier, false);
+ }
+
+ /// Finalize the FlatBuffer by: aligning it and marking the internal state
+ /// of the FlatBufferBuilder as `finished`. Afterwards, users can call
+ /// `finished_data` to get the resulting data.
+ #[inline]
+ pub fn finish_minimal<T>(&mut self, root: WIPOffset<T>) {
+ self.finish_with_opts(root, None, false);
+ }
+
+ #[inline]
+ fn used_space(&self) -> usize {
+ self.owned_buf.len() - self.head as usize
+ }
+
+ #[inline]
+ fn track_field(&mut self, slot_off: VOffsetT, off: UOffsetT) {
+ let fl = FieldLoc {
+ id: slot_off,
+ off: off,
+ };
+ self.field_locs.push(fl);
+ }
+
+ /// Write the VTable, if it is new.
+ fn write_vtable(&mut self, table_tail_revloc: WIPOffset<TableUnfinishedWIPOffset>) -> WIPOffset<VTableWIPOffset> {
+ self.assert_nested("write_vtable");
+
+ // Write the vtable offset, which is the start of any Table.
+ // We fill its value later.
+ let object_revloc_to_vtable: WIPOffset<VTableWIPOffset> =
+ WIPOffset::new(self.push::<UOffsetT>(0xF0F0F0F0 as UOffsetT).value());
+
+ // Layout of the data this function will create when a new vtable is
+ // needed.
+ // --------------------------------------------------------------------
+ // vtable starts here
+ // | x, x -- vtable len (bytes) [u16]
+ // | x, x -- object inline len (bytes) [u16]
+ // | x, x -- zero, or num bytes from start of object to field #0 [u16]
+ // | ...
+ // | x, x -- zero, or num bytes from start of object to field #n-1 [u16]
+ // vtable ends here
+ // table starts here
+ // | x, x, x, x -- offset (negative direction) to the vtable [i32]
+ // | aka "vtableoffset"
+ // | -- table inline data begins here, we don't touch it --
+ // table ends here -- aka "table_start"
+ // --------------------------------------------------------------------
+ //
+ // Layout of the data this function will create when we re-use an
+ // existing vtable.
+ //
+ // We always serialize this particular vtable, then compare it to the
+ // other vtables we know about to see if there is a duplicate. If there
+ // is, then we erase the serialized vtable we just made.
+ // We serialize it first so that we are able to do byte-by-byte
+ // comparisons with already-serialized vtables. This 1) saves
+ // bookkeeping space (we only keep revlocs to existing vtables), 2)
+ // allows us to convert to little-endian once, then do
+ // fast memcmp comparisons, and 3) by ensuring we are comparing real
+ // serialized vtables, we can be more assured that we are doing the
+ // comparisons correctly.
+ //
+ // --------------------------------------------------------------------
+ // table starts here
+ // | x, x, x, x -- offset (negative direction) to an existing vtable [i32]
+ // | aka "vtableoffset"
+ // | -- table inline data begins here, we don't touch it --
+ // table starts here: aka "table_start"
+ // --------------------------------------------------------------------
+
+ // fill the WIP vtable with zeros:
+ let vtable_byte_len = get_vtable_byte_len(&self.field_locs);
+ self.make_space(vtable_byte_len);
+
+ // compute the length of the table (not vtable!) in bytes:
+ let table_object_size = object_revloc_to_vtable.value() - table_tail_revloc.value();
+ debug_assert!(table_object_size < 0x10000); // vTable use 16bit offsets.
+
+ // Write the VTable (we may delete it afterwards, if it is a duplicate):
+ let vt_start_pos = self.head;
+ let vt_end_pos = self.head + vtable_byte_len;
+ {
+ // write the vtable header:
+ let vtfw = &mut VTableWriter::init(&mut self.owned_buf[vt_start_pos..vt_end_pos]);
+ vtfw.write_vtable_byte_length(vtable_byte_len as VOffsetT);
+ vtfw.write_object_inline_size(table_object_size as VOffsetT);
+
+ // serialize every FieldLoc to the vtable:
+ for &fl in self.field_locs.iter() {
+ let pos: VOffsetT = (object_revloc_to_vtable.value() - fl.off) as VOffsetT;
+ debug_assert_eq!(vtfw.get_field_offset(fl.id),
+ 0,
+ "tried to write a vtable field multiple times");
+ vtfw.write_field_offset(fl.id, pos);
+ }
+ }
+ let dup_vt_use = {
+ let this_vt = VTable::init(&self.owned_buf[..], self.head);
+ self.find_duplicate_stored_vtable_revloc(this_vt)
+ };
+
+ let vt_use = match dup_vt_use {
+ Some(n) => {
+ VTableWriter::init(&mut self.owned_buf[vt_start_pos..vt_end_pos]).clear();
+ self.head += vtable_byte_len;
+ n
+ }
+ None => {
+ let new_vt_use = self.used_space() as UOffsetT;
+ self.written_vtable_revpos.push(new_vt_use);
+ new_vt_use
+ }
+ };
+
+ {
+ let n = self.head + self.used_space() - object_revloc_to_vtable.value() as usize;
+ let saw = read_scalar::<UOffsetT>(&self.owned_buf[n..n + SIZE_SOFFSET]);
+ debug_assert_eq!(saw, 0xF0F0F0F0);
+ emplace_scalar::<SOffsetT>(&mut self.owned_buf[n..n + SIZE_SOFFSET],
+ vt_use as SOffsetT - object_revloc_to_vtable.value() as SOffsetT);
+ }
+
+ self.field_locs.clear();
+
+ object_revloc_to_vtable
+ }
+
+ #[inline]
+ fn find_duplicate_stored_vtable_revloc(&self, needle: VTable) -> Option<UOffsetT> {
+ for &revloc in self.written_vtable_revpos.iter().rev() {
+ let o = VTable::init(&self.owned_buf[..], self.head + self.used_space() - revloc as usize);
+ if needle == o {
+ return Some(revloc);
+ }
+ }
+ None
+ }
+
+ // Only call this when you know it is safe to double the size of the buffer.
+ #[inline]
+ fn grow_owned_buf(&mut self) {
+ let old_len = self.owned_buf.len();
+ let new_len = max(1, old_len * 2);
+
+ let starting_active_size = self.used_space();
+
+ let diff = new_len - old_len;
+ self.owned_buf.resize(new_len, 0);
+ self.head += diff;
+
+ let ending_active_size = self.used_space();
+ debug_assert_eq!(starting_active_size, ending_active_size);
+
+ if new_len == 1 {
+ return;
+ }
+
+ // calculate the midpoint, and safely copy the old end data to the new
+ // end position:
+ let middle = new_len / 2;
+ {
+ let (left, right) = &mut self.owned_buf[..].split_at_mut(middle);
+ right.copy_from_slice(left);
+ }
+ // finally, zero out the old end data.
+ {
+ let ptr = (&mut self.owned_buf[..middle]).as_mut_ptr();
+ unsafe { write_bytes(ptr, 0, middle); }
+ }
+ }
+
+ // with or without a size prefix changes how we load the data, so finish*
+ // functions are split along those lines.
+ fn finish_with_opts<T>(&mut self,
+ root: WIPOffset<T>,
+ file_identifier: Option<&str>,
+ size_prefixed: bool) {
+ self.assert_not_finished("buffer cannot be finished when it is already finished");
+ self.assert_not_nested("buffer cannot be finished when a table or vector is under construction");
+ self.written_vtable_revpos.clear();
+
+ let to_align = {
+ // for the root offset:
+ let a = SIZE_UOFFSET;
+ // for the size prefix:
+ let b = if size_prefixed { SIZE_UOFFSET } else { 0 };
+ // for the file identifier (a string that is not zero-terminated):
+ let c = if file_identifier.is_some() {
+ FILE_IDENTIFIER_LENGTH
+ } else {
+ 0
+ };
+ a + b + c
+ };
+
+ {
+ let ma = PushAlignment::new(self.min_align);
+ self.align(to_align, ma);
+ }
+
+ if let Some(ident) = file_identifier {
+ debug_assert_eq!(ident.len(), FILE_IDENTIFIER_LENGTH);
+ self.push_bytes_unprefixed(ident.as_bytes());
+ }
+
+ self.push(root);
+
+ if size_prefixed {
+ let sz = self.used_space() as UOffsetT;
+ self.push::<UOffsetT>(sz);
+ }
+ self.finished = true;
+ }
+
+ #[inline]
+ fn align(&mut self, len: usize, alignment: PushAlignment) {
+ self.track_min_align(alignment.value());
+ let s = self.used_space() as usize;
+ self.make_space(padding_bytes(s + len, alignment.value()));
+ }
+
+ #[inline]
+ fn track_min_align(&mut self, alignment: usize) {
+ self.min_align = max(self.min_align, alignment);
+ }
+
+ #[inline]
+ fn push_bytes_unprefixed(&mut self, x: &[u8]) -> UOffsetT {
+ let n = self.make_space(x.len());
+ &mut self.owned_buf[n..n + x.len()].copy_from_slice(x);
+
+ n as UOffsetT
+ }
+
+ #[inline]
+ fn make_space(&mut self, want: usize) -> usize {
+ self.ensure_capacity(want);
+ self.head -= want;
+ self.head
+ }
+
+ #[inline]
+ fn ensure_capacity(&mut self, want: usize) -> usize {
+ if self.unused_ready_space() >= want {
+ return want;
+ }
+ assert!(want <= FLATBUFFERS_MAX_BUFFER_SIZE,
+ "cannot grow buffer beyond 2 gigabytes");
+
+ while self.unused_ready_space() < want {
+ self.grow_owned_buf();
+ }
+ want
+ }
+ #[inline]
+ fn unused_ready_space(&self) -> usize {
+ self.head
+ }
+ #[inline]
+ fn assert_nested(&self, fn_name: &'static str) {
+ // we don't assert that self.field_locs.len() >0 because the vtable
+ // could be empty (e.g. for empty tables, or for all-default values).
+ debug_assert!(self.nested, format!("incorrect FlatBufferBuilder usage: {} must be called while in a nested state", fn_name));
+ }
+ #[inline]
+ fn assert_not_nested(&self, msg: &'static str) {
+ debug_assert!(!self.nested, msg);
+ }
+ #[inline]
+ fn assert_finished(&self, msg: &'static str) {
+ debug_assert!(self.finished, msg);
+ }
+ #[inline]
+ fn assert_not_finished(&self, msg: &'static str) {
+ debug_assert!(!self.finished, msg);
+ }
+
+}
+
+/// Compute the length of the vtable needed to represent the provided FieldLocs.
+/// If there are no FieldLocs, then provide the minimum number of bytes
+/// required: enough to write the VTable header.
+#[inline]
+fn get_vtable_byte_len(field_locs: &[FieldLoc]) -> usize {
+ let max_voffset = field_locs.iter().map(|fl| fl.id).max();
+ match max_voffset {
+ None => { field_index_to_field_offset(0) as usize }
+ Some(mv) => { mv as usize + SIZE_VOFFSET }
+ }
+}
+
+#[inline]
+fn padding_bytes(buf_size: usize, scalar_size: usize) -> usize {
+ // ((!buf_size) + 1) & (scalar_size - 1)
+ (!buf_size).wrapping_add(1) & (scalar_size.wrapping_sub(1))
+}
diff --git a/rust/flatbuffers/src/endian_scalar.rs b/rust/flatbuffers/src/endian_scalar.rs
new file mode 100644
index 00000000..00f2ebef
--- /dev/null
+++ b/rust/flatbuffers/src/endian_scalar.rs
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::mem::size_of;
+
+/// Trait for values that must be stored in little-endian byte order, but
+/// might be represented in memory as big-endian. Every type that implements
+/// EndianScalar is a valid FlatBuffers scalar value.
+///
+/// The Rust stdlib does not provide a trait to represent scalars, so this trait
+/// serves that purpose, too.
+///
+/// Note that we do not use the num-traits crate for this, because it provides
+/// "too much". For example, num-traits provides i128 support, but that is an
+/// invalid FlatBuffers type.
+pub trait EndianScalar: Sized + PartialEq + Copy + Clone {
+ fn to_little_endian(self) -> Self;
+ fn from_little_endian(self) -> Self;
+}
+
+/// Macro for implementing a no-op endian conversion. This is used for types
+/// that are one byte wide.
+macro_rules! impl_endian_scalar_noop {
+ ($ty:ident) => (
+ impl EndianScalar for $ty {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ self
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ self
+ }
+ }
+ )
+}
+
+/// Macro for implementing an endian conversion using the stdlib `to_le` and
+/// `from_le` functions. This is used for integer types. It is not used for
+/// floats, because the `to_le` and `from_le` are not implemented for them in
+/// the stdlib.
+macro_rules! impl_endian_scalar_stdlib_le_conversion {
+ ($ty:ident) => (
+ impl EndianScalar for $ty {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ Self::to_le(self)
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ Self::from_le(self)
+ }
+ }
+ )
+}
+
+impl_endian_scalar_noop!(bool);
+impl_endian_scalar_noop!(u8);
+impl_endian_scalar_noop!(i8);
+
+impl_endian_scalar_stdlib_le_conversion!(u16);
+impl_endian_scalar_stdlib_le_conversion!(u32);
+impl_endian_scalar_stdlib_le_conversion!(u64);
+impl_endian_scalar_stdlib_le_conversion!(i16);
+impl_endian_scalar_stdlib_le_conversion!(i32);
+impl_endian_scalar_stdlib_le_conversion!(i64);
+
+impl EndianScalar for f32 {
+ /// Convert f32 from host endian-ness to little-endian.
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self
+ }
+ #[cfg(not(target_endian = "little"))]
+ {
+ byte_swap_f32(&self)
+ }
+ }
+ /// Convert f32 from little-endian to host endian-ness.
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self
+ }
+ #[cfg(not(target_endian = "little"))]
+ {
+ byte_swap_f32(&self)
+ }
+ }
+}
+
+impl EndianScalar for f64 {
+ /// Convert f64 from host endian-ness to little-endian.
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self
+ }
+ #[cfg(not(target_endian = "little"))]
+ {
+ byte_swap_f64(&self)
+ }
+ }
+ /// Convert f64 from little-endian to host endian-ness.
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ #[cfg(target_endian = "little")]
+ {
+ self
+ }
+ #[cfg(not(target_endian = "little"))]
+ {
+ byte_swap_f64(&self)
+ }
+ }
+}
+
+/// Swaps the bytes of an f32.
+#[allow(dead_code)]
+#[inline]
+pub fn byte_swap_f32(x: f32) -> f32 {
+ f32::from_bits(x.to_bits().swap_bytes())
+}
+
+/// Swaps the bytes of an f64.
+#[allow(dead_code)]
+#[inline]
+pub fn byte_swap_f64(x: f64) -> f64 {
+ f64::from_bits(x.to_bits().swap_bytes())
+}
+
+/// Place an EndianScalar into the provided mutable byte slice. Performs
+/// endian conversion, if necessary.
+#[inline]
+pub fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) {
+ let sz = size_of::<T>();
+ let mut_ptr = (&mut s[..sz]).as_mut_ptr() as *mut T;
+ let val = x.to_little_endian();
+ unsafe {
+ *mut_ptr = val;
+ }
+}
+
+/// Read an EndianScalar from the provided byte slice at the specified location.
+/// Performs endian conversion, if necessary.
+#[inline]
+pub fn read_scalar_at<T: EndianScalar>(s: &[u8], loc: usize) -> T {
+ let buf = &s[loc..loc + size_of::<T>()];
+ read_scalar(buf)
+}
+
+/// Read an EndianScalar from the provided byte slice. Performs endian
+/// conversion, if necessary.
+#[inline]
+pub fn read_scalar<T: EndianScalar>(s: &[u8]) -> T {
+ let sz = size_of::<T>();
+
+ let p = (&s[..sz]).as_ptr() as *const T;
+ let x = unsafe { *p };
+
+ x.from_little_endian()
+}
+
diff --git a/rust/flatbuffers/src/follow.rs b/rust/flatbuffers/src/follow.rs
new file mode 100644
index 00000000..4d3eff77
--- /dev/null
+++ b/rust/flatbuffers/src/follow.rs
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::marker::PhantomData;
+
+/// Follow is a trait that allows us to access FlatBuffers in a declarative,
+/// type safe, and fast way. They compile down to almost no code (after
+/// optimizations). Conceptually, Follow lifts the offset-based access
+/// patterns of FlatBuffers data into the type system. This trait is used
+/// pervasively at read time, to access tables, vtables, vectors, strings, and
+/// all other data. At this time, Follow is not utilized much on the write
+/// path.
+///
+/// Writing a new Follow implementation primarily involves deciding whether
+/// you want to return data (of the type Self::Inner) or do you want to
+/// continue traversing the FlatBuffer.
+pub trait Follow<'a> {
+ type Inner;
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner;
+}
+
+/// Execute a follow as a top-level function.
+#[allow(dead_code)]
+#[inline]
+pub fn lifted_follow<'a, T: Follow<'a>>(buf: &'a [u8], loc: usize) -> T::Inner {
+ T::follow(buf, loc)
+}
+
+/// FollowStart wraps a Follow impl in a struct type. This can make certain
+/// programming patterns more ergonomic.
+#[derive(Debug)]
+pub struct FollowStart<T>(PhantomData<T>);
+impl<'a, T: Follow<'a> + 'a> FollowStart<T> {
+ #[inline]
+ pub fn new() -> Self {
+ Self { 0: PhantomData }
+ }
+ #[inline]
+ pub fn self_follow(&'a self, buf: &'a [u8], loc: usize) -> T::Inner {
+ T::follow(buf, loc)
+ }
+}
+impl<'a, T: Follow<'a>> Follow<'a> for FollowStart<T> {
+ type Inner = T::Inner;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ T::follow(buf, loc)
+ }
+}
diff --git a/rust/flatbuffers/src/lib.rs b/rust/flatbuffers/src/lib.rs
new file mode 100644
index 00000000..1783b34c
--- /dev/null
+++ b/rust/flatbuffers/src/lib.rs
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+mod builder;
+mod endian_scalar;
+mod follow;
+mod primitives;
+mod push;
+mod table;
+mod vector;
+mod vtable;
+mod vtable_writer;
+
+pub use builder::FlatBufferBuilder;
+pub use endian_scalar::{EndianScalar, emplace_scalar, read_scalar, read_scalar_at, byte_swap_f32, byte_swap_f64};
+pub use follow::{Follow, FollowStart};
+pub use primitives::*;
+pub use push::Push;
+pub use table::{Table, buffer_has_identifier, get_root, get_size_prefixed_root};
+pub use vector::{SafeSliceAccess, Vector, follow_cast_ref};
+pub use vtable::field_index_to_field_offset;
+
+// TODO(rw): Unify `create_vector` and `create_vector_direct` by using
+// `Into<Vector<...>>`.
+// TODO(rw): Split fill ops in builder into fill_small, fill_big like in C++.
diff --git a/rust/flatbuffers/src/primitives.rs b/rust/flatbuffers/src/primitives.rs
new file mode 100644
index 00000000..59176b8f
--- /dev/null
+++ b/rust/flatbuffers/src/primitives.rs
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::marker::PhantomData;
+use std::mem::size_of;
+use std::ops::Deref;
+
+use endian_scalar::{emplace_scalar, read_scalar, read_scalar_at};
+use follow::Follow;
+use push::Push;
+
+pub const FLATBUFFERS_MAX_BUFFER_SIZE: usize = (1u64 << 31) as usize;
+
+pub const FILE_IDENTIFIER_LENGTH: usize = 4;
+
+pub const VTABLE_METADATA_FIELDS: usize = 2;
+
+pub const SIZE_U8: usize = size_of::<u8>();
+pub const SIZE_I8: usize = size_of::<i8>();
+
+pub const SIZE_U16: usize = size_of::<u16>();
+pub const SIZE_I16: usize = size_of::<i16>();
+
+pub const SIZE_U32: usize = size_of::<u32>();
+pub const SIZE_I32: usize = size_of::<i32>();
+
+pub const SIZE_U64: usize = size_of::<u64>();
+pub const SIZE_I64: usize = size_of::<i64>();
+
+pub const SIZE_F32: usize = size_of::<f32>();
+pub const SIZE_F64: usize = size_of::<f64>();
+
+pub const SIZE_SOFFSET: usize = SIZE_I32;
+pub const SIZE_UOFFSET: usize = SIZE_U32;
+pub const SIZE_VOFFSET: usize = SIZE_I16;
+
+pub const SIZE_SIZEPREFIX: usize = SIZE_UOFFSET;
+
+/// SOffsetT is an i32 that is used by tables to reference their vtables.
+pub type SOffsetT = i32;
+
+/// UOffsetT is a u32 that is used by pervasively to represent both pointers
+/// and lengths of vectors.
+pub type UOffsetT = u32;
+
+/// VOffsetT is a i32 that is used by vtables to store field data.
+pub type VOffsetT = i16;
+
+/// TableFinishedWIPOffset marks a WIPOffset as being for a finished table.
+pub struct TableFinishedWIPOffset {}
+
+/// TableUnfinishedWIPOffset marks a WIPOffset as being for an unfinished table.
+pub struct TableUnfinishedWIPOffset {}
+
+/// UnionWIPOffset marks a WIPOffset as being for a union value.
+pub struct UnionWIPOffset {}
+
+/// VTableWIPOffset marks a WIPOffset as being for a vtable.
+pub struct VTableWIPOffset {}
+
+/// WIPOffset contains an UOffsetT with a special meaning: it is the location of
+/// data relative to the *end* of an in-progress FlatBuffer. The
+/// FlatBufferBuilder uses this to track the location of objects in an absolute
+/// way. The impl of Push converts a WIPOffset into a ForwardsUOffset.
+#[derive(Debug)]
+pub struct WIPOffset<T>(UOffsetT, PhantomData<T>);
+
+// TODO(rw): why do we need to reimplement (with a default impl) Copy to
+// avoid ownership errors?
+impl<T> Copy for WIPOffset<T> {}
+impl<T> Clone for WIPOffset<T> {
+ #[inline]
+ fn clone(&self) -> WIPOffset<T> {
+ WIPOffset::new(self.0.clone())
+ }
+}
+impl<T> PartialEq for WIPOffset<T> {
+ fn eq(&self, o: &WIPOffset<T>) -> bool {
+ self.value() == o.value()
+ }
+}
+
+impl<T> Deref for WIPOffset<T> {
+ type Target = UOffsetT;
+ #[inline]
+ fn deref(&self) -> &UOffsetT {
+ &self.0
+ }
+}
+impl<'a, T: 'a> WIPOffset<T> {
+ /// Create a new WIPOffset.
+ #[inline]
+ pub fn new(o: UOffsetT) -> WIPOffset<T> {
+ WIPOffset {
+ 0: o,
+ 1: PhantomData,
+ }
+ }
+
+ /// Return a wrapped value that brings its meaning as a union WIPOffset
+ /// into the type system.
+ #[inline(always)]
+ pub fn as_union_value(&self) -> WIPOffset<UnionWIPOffset> {
+ WIPOffset::new(self.0)
+ }
+ /// Get the underlying value.
+ #[inline(always)]
+ pub fn value(&self) -> UOffsetT {
+ self.0
+ }
+}
+
+impl<T> Push for WIPOffset<T> {
+ type Output = ForwardsUOffset<T>;
+
+ #[inline(always)]
+ fn push(&self, dst: &mut [u8], rest: &[u8]) {
+ let n = (SIZE_UOFFSET + rest.len() - self.value() as usize) as UOffsetT;
+ emplace_scalar::<UOffsetT>(dst, n);
+ }
+}
+
+impl<T> Push for ForwardsUOffset<T> {
+ type Output = Self;
+
+ #[inline(always)]
+ fn push(&self, dst: &mut [u8], rest: &[u8]) {
+ self.value().push(dst, rest);
+ }
+}
+
+/// ForwardsUOffset is used by Follow to traverse a FlatBuffer: the pointer
+/// is incremented by the value contained in this type.
+#[derive(Debug)]
+pub struct ForwardsUOffset<T>(UOffsetT, PhantomData<T>);
+impl<T> ForwardsUOffset<T> {
+ #[inline(always)]
+ pub fn value(&self) -> UOffsetT {
+ self.0
+ }
+}
+
+impl<'a, T: Follow<'a>> Follow<'a> for ForwardsUOffset<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let slice = &buf[loc..loc + SIZE_UOFFSET];
+ let off = read_scalar::<u32>(slice) as usize;
+ T::follow(buf, loc + off)
+ }
+}
+
+/// ForwardsVOffset is used by Follow to traverse a FlatBuffer: the pointer
+/// is incremented by the value contained in this type.
+#[derive(Debug)]
+pub struct ForwardsVOffset<T>(VOffsetT, PhantomData<T>);
+impl<T> ForwardsVOffset<T> {
+ #[inline(always)]
+ pub fn value(&self) -> VOffsetT {
+ self.0
+ }
+}
+
+impl<'a, T: Follow<'a>> Follow<'a> for ForwardsVOffset<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let slice = &buf[loc..loc + SIZE_VOFFSET];
+ let off = read_scalar::<VOffsetT>(slice) as usize;
+ T::follow(buf, loc + off)
+ }
+}
+
+impl<T> Push for ForwardsVOffset<T> {
+ type Output = Self;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], rest: &[u8]) {
+ self.value().push(dst, rest);
+ }
+}
+
+/// ForwardsSOffset is used by Follow to traverse a FlatBuffer: the pointer
+/// is incremented by the *negative* of the value contained in this type.
+#[derive(Debug)]
+pub struct BackwardsSOffset<T>(SOffsetT, PhantomData<T>);
+impl<T> BackwardsSOffset<T> {
+ #[inline(always)]
+ pub fn value(&self) -> SOffsetT {
+ self.0
+ }
+}
+
+impl<'a, T: Follow<'a>> Follow<'a> for BackwardsSOffset<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let slice = &buf[loc..loc + SIZE_SOFFSET];
+ let off = read_scalar::<SOffsetT>(slice);
+ T::follow(buf, (loc as SOffsetT - off) as usize)
+ }
+}
+
+impl<T> Push for BackwardsSOffset<T> {
+ type Output = Self;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], rest: &[u8]) {
+ self.value().push(dst, rest);
+ }
+}
+
+/// SkipSizePrefix is used by Follow to traverse a FlatBuffer: the pointer is
+/// incremented by a fixed constant in order to skip over the size prefix value.
+pub struct SkipSizePrefix<T>(PhantomData<T>);
+impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipSizePrefix<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ T::follow(buf, loc + SIZE_SIZEPREFIX)
+ }
+}
+
+/// SkipRootOffset is used by Follow to traverse a FlatBuffer: the pointer is
+/// incremented by a fixed constant in order to skip over the root offset value.
+pub struct SkipRootOffset<T>(PhantomData<T>);
+impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipRootOffset<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ T::follow(buf, loc + SIZE_UOFFSET)
+ }
+}
+
+/// FileIdentifier is used by Follow to traverse a FlatBuffer: the pointer is
+/// dereferenced into a byte slice, whose bytes are the file identifer value.
+pub struct FileIdentifier;
+impl<'a> Follow<'a> for FileIdentifier {
+ type Inner = &'a [u8];
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ &buf[loc..loc + FILE_IDENTIFIER_LENGTH]
+ }
+}
+
+/// SkipFileIdentifier is used by Follow to traverse a FlatBuffer: the pointer
+/// is incremented by a fixed constant in order to skip over the file
+/// identifier value.
+pub struct SkipFileIdentifier<T>(PhantomData<T>);
+impl<'a, T: Follow<'a> + 'a> Follow<'a> for SkipFileIdentifier<T> {
+ type Inner = T::Inner;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ T::follow(buf, loc + FILE_IDENTIFIER_LENGTH)
+ }
+}
+
+/// Follow trait impls for primitive types.
+///
+/// Ideally, these would be implemented as a single impl using trait bounds on
+/// EndianScalar, but implementing Follow that way causes a conflict with
+/// other impls.
+macro_rules! impl_follow_for_endian_scalar {
+ ($ty:ident) => (
+ impl<'a> Follow<'a> for $ty {
+ type Inner = $ty;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ read_scalar_at::<$ty>(buf, loc)
+ }
+ }
+ )
+}
+
+impl_follow_for_endian_scalar!(bool);
+impl_follow_for_endian_scalar!(u8);
+impl_follow_for_endian_scalar!(u16);
+impl_follow_for_endian_scalar!(u32);
+impl_follow_for_endian_scalar!(u64);
+impl_follow_for_endian_scalar!(i8);
+impl_follow_for_endian_scalar!(i16);
+impl_follow_for_endian_scalar!(i32);
+impl_follow_for_endian_scalar!(i64);
+impl_follow_for_endian_scalar!(f32);
+impl_follow_for_endian_scalar!(f64);
diff --git a/rust/flatbuffers/src/push.rs b/rust/flatbuffers/src/push.rs
new file mode 100644
index 00000000..2b307a3a
--- /dev/null
+++ b/rust/flatbuffers/src/push.rs
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::cmp::max;
+use std::mem::{align_of, size_of};
+
+use endian_scalar::emplace_scalar;
+
+/// Trait to abstract over functionality needed to write values (either owned
+/// or referenced). Used in FlatBufferBuilder and implemented for generated
+/// types.
+pub trait Push: Sized {
+ type Output;
+ fn push(&self, dst: &mut [u8], _rest: &[u8]);
+ #[inline]
+ fn size() -> usize {
+ size_of::<Self::Output>()
+ }
+ #[inline]
+ fn alignment() -> PushAlignment {
+ PushAlignment::new(align_of::<Self::Output>())
+ }
+}
+
+/// Ensure Push alignment calculations are typesafe (because this helps reduce
+/// implementation issues when using FlatBufferBuilder::align).
+pub struct PushAlignment(usize);
+impl PushAlignment {
+ #[inline]
+ pub fn new(x: usize) -> Self {
+ PushAlignment { 0: x }
+ }
+ #[inline]
+ pub fn value(&self) -> usize {
+ self.0
+ }
+ #[inline]
+ pub fn max_of(&self, o: usize) -> Self {
+ PushAlignment::new(max(self.0, o))
+ }
+}
+
+/// Macro to implement Push for EndianScalar types.
+macro_rules! impl_push_for_endian_scalar {
+ ($ty:ident) => (
+ impl Push for $ty {
+ type Output = $ty;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ emplace_scalar::<$ty>(dst, *self);
+ }
+
+ }
+ )
+}
+
+impl_push_for_endian_scalar!(bool);
+impl_push_for_endian_scalar!(u8);
+impl_push_for_endian_scalar!(i8);
+impl_push_for_endian_scalar!(u16);
+impl_push_for_endian_scalar!(i16);
+impl_push_for_endian_scalar!(u32);
+impl_push_for_endian_scalar!(i32);
+impl_push_for_endian_scalar!(u64);
+impl_push_for_endian_scalar!(i64);
+impl_push_for_endian_scalar!(f32);
+impl_push_for_endian_scalar!(f64);
diff --git a/rust/flatbuffers/src/table.rs b/rust/flatbuffers/src/table.rs
new file mode 100644
index 00000000..d9e952d0
--- /dev/null
+++ b/rust/flatbuffers/src/table.rs
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use follow::Follow;
+use primitives::*;
+use vtable::VTable;
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Table<'a> {
+ pub buf: &'a [u8],
+ pub loc: usize,
+}
+
+impl<'a> Table<'a> {
+ #[inline]
+ pub fn new(buf: &'a [u8], loc: usize) -> Self {
+ Table { buf: buf, loc: loc }
+ }
+ #[inline]
+ pub fn vtable(&'a self) -> VTable<'a> {
+ <BackwardsSOffset<VTable<'a>>>::follow(self.buf, self.loc)
+ }
+ #[inline]
+ pub fn get<T: Follow<'a> + 'a>(
+ &'a self,
+ slot_byte_loc: VOffsetT,
+ default: Option<T::Inner>,
+ ) -> Option<T::Inner> {
+ let o = self.vtable().get(slot_byte_loc) as usize;
+ if o == 0 {
+ return default;
+ }
+ Some(<T>::follow(self.buf, self.loc + o))
+ }
+}
+
+impl<'a> Follow<'a> for Table<'a> {
+ type Inner = Table<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Table { buf: buf, loc: loc }
+ }
+}
+
+#[inline]
+pub fn get_root<'a, T: Follow<'a> + 'a>(data: &'a [u8]) -> T::Inner {
+ <ForwardsUOffset<T>>::follow(data, 0)
+}
+#[inline]
+pub fn get_size_prefixed_root<'a, T: Follow<'a> + 'a>(data: &'a [u8]) -> T::Inner {
+ <SkipSizePrefix<ForwardsUOffset<T>>>::follow(data, 0)
+}
+#[inline]
+pub fn buffer_has_identifier(data: &[u8], ident: &str, size_prefixed: bool) -> bool {
+ assert_eq!(ident.len(), FILE_IDENTIFIER_LENGTH);
+
+ let got = if size_prefixed {
+ <SkipSizePrefix<SkipRootOffset<FileIdentifier>>>::follow(data, 0)
+ } else {
+ <SkipRootOffset<FileIdentifier>>::follow(data, 0)
+ };
+
+ ident.as_bytes() == got
+}
diff --git a/rust/flatbuffers/src/vector.rs b/rust/flatbuffers/src/vector.rs
new file mode 100644
index 00000000..8c2d6d50
--- /dev/null
+++ b/rust/flatbuffers/src/vector.rs
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::marker::PhantomData;
+use std::mem::size_of;
+use std::slice::from_raw_parts;
+use std::str::from_utf8_unchecked;
+
+use endian_scalar::{EndianScalar, read_scalar};
+use follow::Follow;
+use primitives::*;
+
+#[derive(Debug)]
+pub struct Vector<'a, T: 'a>(&'a [u8], usize, PhantomData<T>);
+
+impl<'a, T: 'a> Vector<'a, T> {
+ #[inline(always)]
+ pub fn new(buf: &'a [u8], loc: usize) -> Self {
+ Vector {
+ 0: buf,
+ 1: loc,
+ 2: PhantomData,
+ }
+ }
+
+ #[inline(always)]
+ pub fn len(&self) -> usize {
+ read_scalar::<UOffsetT>(&self.0[self.1 as usize..]) as usize
+ }
+}
+
+impl<'a, T: Follow<'a> + 'a> Vector<'a, T> {
+ #[inline(always)]
+ pub fn get(&self, idx: usize) -> T::Inner {
+ debug_assert!(idx < read_scalar::<u32>(&self.0[self.1 as usize..]) as usize);
+ let sz = size_of::<T>();
+ debug_assert!(sz > 0);
+ T::follow(self.0, self.1 as usize + SIZE_UOFFSET + sz * idx)
+ }
+}
+
+pub trait SafeSliceAccess {}
+impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> {
+ pub fn safe_slice(self) -> &'a [T] {
+ let buf = self.0;
+ let loc = self.1;
+ let sz = size_of::<T>();
+ debug_assert!(sz > 0);
+ let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize;
+ let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz];
+ let ptr = data_buf.as_ptr() as *const T;
+ let s: &'a [T] = unsafe { from_raw_parts(ptr, len) };
+ s
+ }
+}
+
+impl SafeSliceAccess for u8 {}
+impl SafeSliceAccess for i8 {}
+impl SafeSliceAccess for bool {}
+
+#[cfg(target_endian = "little")]
+mod le_safe_slice_impls {
+ impl super::SafeSliceAccess for u16 {}
+ impl super::SafeSliceAccess for u32 {}
+ impl super::SafeSliceAccess for u64 {}
+
+ impl super::SafeSliceAccess for i16 {}
+ impl super::SafeSliceAccess for i32 {}
+ impl super::SafeSliceAccess for i64 {}
+
+ impl super::SafeSliceAccess for f32 {}
+ impl super::SafeSliceAccess for f64 {}
+}
+
+pub use self::le_safe_slice_impls::*;
+
+pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T {
+ let sz = size_of::<T>();
+ let buf = &buf[loc..loc + sz];
+ let ptr = buf.as_ptr() as *const T;
+ unsafe { &*ptr }
+}
+
+impl<'a> Follow<'a> for &'a str {
+ type Inner = &'a str;
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize;
+ let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len];
+ let s = unsafe { from_utf8_unchecked(slice) };
+ s
+ }
+}
+
+fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] {
+ let sz = size_of::<T>();
+ debug_assert!(sz > 0);
+ let len = read_scalar::<UOffsetT>(&buf[loc..loc + SIZE_UOFFSET]) as usize;
+ let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz];
+ let ptr = data_buf.as_ptr() as *const T;
+ let s: &[T] = unsafe { from_raw_parts(ptr, len) };
+ s
+}
+
+/// Implement direct slice access if the host is little-endian.
+#[cfg(target_endian = "little")]
+impl<'a, T: EndianScalar> Follow<'a> for &'a [T] {
+ type Inner = &'a [T];
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ follow_slice_helper::<T>(buf, loc)
+ }
+}
+
+/// Implement Follow for all possible Vectors that have Follow-able elements.
+impl<'a, T: Follow<'a> + 'a> Follow<'a> for Vector<'a, T> {
+ type Inner = Vector<'a, T>;
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Vector::new(buf, loc)
+ }
+}
+
diff --git a/rust/flatbuffers/src/vtable.rs b/rust/flatbuffers/src/vtable.rs
new file mode 100644
index 00000000..cd7ede6e
--- /dev/null
+++ b/rust/flatbuffers/src/vtable.rs
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use endian_scalar::read_scalar_at;
+use follow::Follow;
+use primitives::*;
+
+/// VTable encapsulates read-only usage of a vtable. It is only to be used
+/// by generated code.
+#[derive(Debug)]
+pub struct VTable<'a> {
+ buf: &'a [u8],
+ loc: usize,
+}
+
+impl<'a> PartialEq for VTable<'a> {
+ fn eq(&self, other: &VTable) -> bool {
+ self.as_bytes().eq(other.as_bytes())
+ }
+}
+
+impl<'a> VTable<'a> {
+ pub fn init(buf: &'a [u8], loc: usize) -> Self {
+ VTable {
+ buf: buf,
+ loc: loc,
+ }
+ }
+ pub fn num_fields(&self) -> usize {
+ (self.num_bytes() / SIZE_VOFFSET) - 2
+ }
+ pub fn num_bytes(&self) -> usize {
+ read_scalar_at::<VOffsetT>(self.buf, self.loc) as usize
+ }
+ pub fn object_inline_num_bytes(&self) -> usize {
+ let n = read_scalar_at::<VOffsetT>(self.buf, self.loc + SIZE_VOFFSET);
+ n as usize
+ }
+ pub fn get_field(&self, idx: usize) -> VOffsetT {
+ // TODO(rw): distinguish between None and 0?
+ if idx > self.num_fields() {
+ return 0;
+ }
+ read_scalar_at::<VOffsetT>(
+ self.buf,
+ self.loc + SIZE_VOFFSET + SIZE_VOFFSET + SIZE_VOFFSET * idx,
+ )
+ }
+ pub fn get(&self, byte_loc: VOffsetT) -> VOffsetT {
+ // TODO(rw): distinguish between None and 0?
+ if byte_loc as usize >= self.num_bytes() {
+ return 0;
+ }
+ read_scalar_at::<VOffsetT>(self.buf, self.loc + byte_loc as usize)
+ }
+ pub fn as_bytes(&self) -> &[u8] {
+ let len = self.num_bytes();
+ &self.buf[self.loc..self.loc + len]
+ }
+}
+
+
+#[allow(dead_code)]
+pub fn field_index_to_field_offset(field_id: VOffsetT) -> VOffsetT {
+ // Should correspond to what end_table() below builds up.
+ let fixed_fields = 2; // Vtable size and Object Size.
+ ((field_id + fixed_fields) * (SIZE_VOFFSET as VOffsetT)) as VOffsetT
+}
+
+#[allow(dead_code)]
+pub fn field_offset_to_field_index(field_o: VOffsetT) -> VOffsetT {
+ debug_assert!(field_o >= 2);
+ let fixed_fields = 2; // VTable size and Object Size.
+ (field_o / (SIZE_VOFFSET as VOffsetT)) - fixed_fields
+}
+
+impl<'a> Follow<'a> for VTable<'a> {
+ type Inner = VTable<'a>;
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ VTable::init(buf, loc)
+ }
+}
diff --git a/rust/flatbuffers/src/vtable_writer.rs b/rust/flatbuffers/src/vtable_writer.rs
new file mode 100644
index 00000000..119f794c
--- /dev/null
+++ b/rust/flatbuffers/src/vtable_writer.rs
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use std::ptr::write_bytes;
+
+use endian_scalar::{emplace_scalar, read_scalar};
+use primitives::*;
+
+/// VTableWriter compartmentalizes actions needed to create a vtable.
+#[derive(Debug)]
+pub struct VTableWriter<'a> {
+ buf: &'a mut [u8],
+}
+
+impl<'a> VTableWriter<'a> {
+ #[inline(always)]
+ pub fn init(buf: &'a mut [u8]) -> Self {
+ VTableWriter { buf: buf }
+ }
+
+ /// Writes the vtable length (in bytes) into the vtable.
+ ///
+ /// Note that callers already need to have computed this to initialize
+ /// a VTableWriter.
+ ///
+ /// In debug mode, asserts that the length of the underlying data is equal
+ /// to the provided value.
+ #[inline(always)]
+ pub fn write_vtable_byte_length(&mut self, n: VOffsetT) {
+ emplace_scalar::<VOffsetT>(&mut self.buf[..SIZE_VOFFSET], n);
+ debug_assert_eq!(n as usize, self.buf.len());
+ }
+
+ /// Writes an object length (in bytes) into the vtable.
+ #[inline(always)]
+ pub fn write_object_inline_size(&mut self, n: VOffsetT) {
+ emplace_scalar::<VOffsetT>(&mut self.buf[SIZE_VOFFSET..2 * SIZE_VOFFSET], n);
+ }
+
+ /// Gets an object field offset from the vtable. Only used for debugging.
+ ///
+ /// Note that this expects field offsets (which are like pointers), not
+ /// field ids (which are like array indices).
+ #[inline(always)]
+ pub fn get_field_offset(&self, vtable_offset: VOffsetT) -> VOffsetT {
+ let idx = vtable_offset as usize;
+ read_scalar::<VOffsetT>(&self.buf[idx..idx + SIZE_VOFFSET])
+ }
+
+ /// Writes an object field offset into the vtable.
+ ///
+ /// Note that this expects field offsets (which are like pointers), not
+ /// field ids (which are like array indices).
+ #[inline(always)]
+ pub fn write_field_offset(&mut self, vtable_offset: VOffsetT, object_data_offset: VOffsetT) {
+ let idx = vtable_offset as usize;
+ emplace_scalar::<VOffsetT>(&mut self.buf[idx..idx + SIZE_VOFFSET], object_data_offset);
+ }
+
+ /// Clears all data in this VTableWriter. Used to cleanly undo a
+ /// vtable write.
+ #[inline(always)]
+ pub fn clear(&mut self) {
+ // This is the closest thing to memset in Rust right now.
+ let len = self.buf.len();
+ let p = self.buf.as_mut_ptr() as *mut u8;
+ unsafe {
+ write_bytes(p, 0, len);
+ }
+ }
+}
+
diff --git a/samples/monster_generated.rs b/samples/monster_generated.rs
new file mode 100644
index 00000000..9ec573c7
--- /dev/null
+++ b/samples/monster_generated.rs
@@ -0,0 +1,507 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+pub mod my_game {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::marker::PhantomData;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+pub mod sample {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::marker::PhantomData;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+#[allow(non_camel_case_types)]
+#[repr(i8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Color {
+ Red = 0,
+ Green = 1,
+ Blue = 2
+}
+
+const ENUM_MIN_COLOR: i8 = 0;
+const ENUM_MAX_COLOR: i8 = 2;
+
+impl<'a> flatbuffers::Follow<'a> for Color {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::read_scalar_at::<Self>(buf, loc)
+ }
+}
+
+impl flatbuffers::EndianScalar for Color {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let n = i8::to_le(self as i8);
+ let p = &n as *const i8 as *const Color;
+ unsafe { *p }
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ let n = i8::from_le(self as i8);
+ let p = &n as *const i8 as *const Color;
+ unsafe { *p }
+ }
+}
+
+impl flatbuffers::Push for Color {
+ type Output = Color;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<Color>(dst, *self);
+ }
+}
+
+#[allow(non_camel_case_types)]
+const ENUM_VALUES_COLOR:[Color; 3] = [
+ Color::Red,
+ Color::Green,
+ Color::Blue
+];
+
+#[allow(non_camel_case_types)]
+const ENUM_NAMES_COLOR:[&'static str; 3] = [
+ "Red",
+ "Green",
+ "Blue"
+];
+
+pub fn enum_name_color(e: Color) -> &'static str {
+ let index: usize = e as usize;
+ ENUM_NAMES_COLOR[index]
+}
+
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Equipment {
+ NONE = 0,
+ Weapon = 1
+}
+
+const ENUM_MIN_EQUIPMENT: u8 = 0;
+const ENUM_MAX_EQUIPMENT: u8 = 1;
+
+impl<'a> flatbuffers::Follow<'a> for Equipment {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::read_scalar_at::<Self>(buf, loc)
+ }
+}
+
+impl flatbuffers::EndianScalar for Equipment {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let n = u8::to_le(self as u8);
+ let p = &n as *const u8 as *const Equipment;
+ unsafe { *p }
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ let n = u8::from_le(self as u8);
+ let p = &n as *const u8 as *const Equipment;
+ unsafe { *p }
+ }
+}
+
+impl flatbuffers::Push for Equipment {
+ type Output = Equipment;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<Equipment>(dst, *self);
+ }
+}
+
+#[allow(non_camel_case_types)]
+const ENUM_VALUES_EQUIPMENT:[Equipment; 2] = [
+ Equipment::NONE,
+ Equipment::Weapon
+];
+
+#[allow(non_camel_case_types)]
+const ENUM_NAMES_EQUIPMENT:[&'static str; 2] = [
+ "NONE",
+ "Weapon"
+];
+
+pub fn enum_name_equipment(e: Equipment) -> &'static str {
+ let index: usize = e as usize;
+ ENUM_NAMES_EQUIPMENT[index]
+}
+
+pub struct EquipmentUnionTableOffset {}
+// struct Vec3, aligned to 4
+#[repr(C, align(4))]
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Vec3 {
+ x_: f32,
+ y_: f32,
+ z_: f32,
+} // pub struct Vec3
+impl flatbuffers::SafeSliceAccess for Vec3 {}
+impl<'a> flatbuffers::Follow<'a> for Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a Vec3>::follow(buf, loc)
+ //flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for Vec3 {
+ type Output = Vec3;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b Vec3 {
+ type Output = Vec3;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+
+impl Vec3 {
+ pub fn new<'a>(_x: f32, _y: f32, _z: f32) -> Self {
+ Vec3 {
+ x_: _x.to_little_endian(),
+ y_: _y.to_little_endian(),
+ z_: _z.to_little_endian(),
+
+ }
+ }
+ pub fn x<'a>(&'a self) -> f32 {
+ self.x_.from_little_endian()
+ }
+ pub fn y<'a>(&'a self) -> f32 {
+ self.y_.from_little_endian()
+ }
+ pub fn z<'a>(&'a self) -> f32 {
+ self.z_.from_little_endian()
+ }
+}
+
+pub enum MonsterOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Monster<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Monster<'a> {
+ type Inner = Monster<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Monster<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Monster {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset<Monster<'bldr>> {
+ let mut builder = MonsterBuilder::new(_fbb);
+ if let Some(x) = args.equipped { builder.add_equipped(x); }
+ if let Some(x) = args.weapons { builder.add_weapons(x); }
+ if let Some(x) = args.inventory { builder.add_inventory(x); }
+ if let Some(x) = args.name { builder.add_name(x); }
+ if let Some(x) = args.pos { builder.add_pos(x); }
+ builder.add_hp(args.hp);
+ builder.add_mana(args.mana);
+ builder.add_equipped_type(args.equipped_type);
+ builder.add_color(args.color);
+ builder.finish()
+ }
+
+ pub const VT_POS: flatbuffers::VOffsetT = 4;
+ pub const VT_MANA: flatbuffers::VOffsetT = 6;
+ pub const VT_HP: flatbuffers::VOffsetT = 8;
+ pub const VT_NAME: flatbuffers::VOffsetT = 10;
+ pub const VT_INVENTORY: flatbuffers::VOffsetT = 14;
+ pub const VT_COLOR: flatbuffers::VOffsetT = 16;
+ pub const VT_WEAPONS: flatbuffers::VOffsetT = 18;
+ pub const VT_EQUIPPED_TYPE: flatbuffers::VOffsetT = 20;
+ pub const VT_EQUIPPED: flatbuffers::VOffsetT = 22;
+
+ #[inline]
+ pub fn pos(&'a self) -> Option<&'a Vec3> {
+ self._tab.get::<Vec3>(Monster::VT_POS, None)
+ }
+ #[inline]
+ pub fn mana(&'a self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_MANA, Some(150)).unwrap()
+ }
+ #[inline]
+ pub fn hp(&'a self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_HP, Some(100)).unwrap()
+ }
+ #[inline]
+ pub fn name(&'a self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Monster::VT_NAME, None)
+ }
+ #[inline]
+ pub fn inventory(&'a self) -> Option<&'a [u8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn color(&'a self) -> Color {
+ self._tab.get::<Color>(Monster::VT_COLOR, Some(Color::Blue)).unwrap()
+ }
+ #[inline]
+ pub fn weapons(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Weapon<'a>>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Weapon<'a>>>>>(Monster::VT_WEAPONS, None)
+ }
+ #[inline]
+ pub fn equipped_type(&'a self) -> Equipment {
+ self._tab.get::<Equipment>(Monster::VT_EQUIPPED_TYPE, Some(Equipment::NONE)).unwrap()
+ }
+ #[inline]
+ pub fn equipped(&'a self) -> Option<flatbuffers::Table<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Monster::VT_EQUIPPED, None)
+ }
+ #[inline]
+ #[allow(non_snake_case)]
+ pub fn equipped_as_weapon(&'a self) -> Option<Weapon> {
+ if self.equipped_type() == Equipment::Weapon {
+ self.equipped().map(|u| Weapon::init_from_table(u))
+ } else {
+ None
+ }
+ }
+
+}
+
+pub struct MonsterArgs<'a> {
+ pub pos: Option<&'a Vec3>,
+ pub mana: i16,
+ pub hp: i16,
+ pub name: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub inventory: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u8>>>,
+ pub color: Color,
+ pub weapons: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<Weapon<'a >>>>>,
+ pub equipped_type: Equipment,
+ pub equipped: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
+}
+impl<'a> Default for MonsterArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ MonsterArgs {
+ pos: None,
+ mana: 150,
+ hp: 100,
+ name: None,
+ inventory: None,
+ color: Color::Blue,
+ weapons: None,
+ equipped_type: Equipment::NONE,
+ equipped: None,
+ }
+ }
+}
+pub struct MonsterBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_pos(&mut self, pos: &'b Vec3) {
+ self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos);
+ }
+ #[inline]
+ pub fn add_mana(&mut self, mana: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_MANA, mana, 150);
+ }
+ #[inline]
+ pub fn add_hp(&mut self, hp: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_HP, hp, 100);
+ }
+ #[inline]
+ pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_NAME, name);
+ }
+ #[inline]
+ pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_INVENTORY, inventory);
+ }
+ #[inline]
+ pub fn add_color(&mut self, color: Color) {
+ self.fbb_.push_slot::<Color>(Monster::VT_COLOR, color, Color::Blue);
+ }
+ #[inline]
+ pub fn add_weapons(&mut self, weapons: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Weapon<'b >>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_WEAPONS, weapons);
+ }
+ #[inline]
+ pub fn add_equipped_type(&mut self, equipped_type: Equipment) {
+ self.fbb_.push_slot::<Equipment>(Monster::VT_EQUIPPED_TYPE, equipped_type, Equipment::NONE);
+ }
+ #[inline]
+ pub fn add_equipped(&mut self, equipped: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_EQUIPPED, equipped);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ MonsterBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub enum WeaponOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Weapon<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Weapon<'a> {
+ type Inner = Weapon<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Weapon<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Weapon {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args WeaponArgs<'args>) -> flatbuffers::WIPOffset<Weapon<'bldr>> {
+ let mut builder = WeaponBuilder::new(_fbb);
+ if let Some(x) = args.name { builder.add_name(x); }
+ builder.add_damage(args.damage);
+ builder.finish()
+ }
+
+ pub const VT_NAME: flatbuffers::VOffsetT = 4;
+ pub const VT_DAMAGE: flatbuffers::VOffsetT = 6;
+
+ #[inline]
+ pub fn name(&'a self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Weapon::VT_NAME, None)
+ }
+ #[inline]
+ pub fn damage(&'a self) -> i16 {
+ self._tab.get::<i16>(Weapon::VT_DAMAGE, Some(0)).unwrap()
+ }
+}
+
+pub struct WeaponArgs<'a> {
+ pub name: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub damage: i16,
+}
+impl<'a> Default for WeaponArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ WeaponArgs {
+ name: None,
+ damage: 0,
+ }
+ }
+}
+pub struct WeaponBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> WeaponBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Weapon::VT_NAME, name);
+ }
+ #[inline]
+ pub fn add_damage(&mut self, damage: i16) {
+ self.fbb_.push_slot::<i16>(Weapon::VT_DAMAGE, damage, 0);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WeaponBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ WeaponBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Weapon<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+#[inline]
+pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ flatbuffers::get_root::<Monster<'a>>(buf)
+}
+
+#[inline]
+pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ flatbuffers::get_size_prefixed_root::<Monster<'a>>(buf)
+}
+
+#[inline]
+pub fn finish_monster_buffer<'a, 'b>(
+ fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish(root, None);
+}
+
+#[inline]
+pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish_size_prefixed(root, None);
+}
+} // pub mod Sample
+} // pub mod MyGame
+
diff --git a/samples/sample_binary.rs b/samples/sample_binary.rs
new file mode 100644
index 00000000..7a4c2ae0
--- /dev/null
+++ b/samples/sample_binary.rs
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// import the flatbuffers runtime library
+extern crate flatbuffers;
+
+// import the generated code
+#[path = "./monster_generated.rs"]
+mod monster_generated;
+pub use monster_generated::my_game::sample::{get_root_as_monster,
+ Color, Equipment,
+ Monster, MonsterArgs,
+ Vec3,
+ Weapon, WeaponArgs};
+
+
+// Example how to use FlatBuffers to create and read binary buffers.
+
+fn main() {
+ // Build up a serialized buffer algorithmically.
+ // Initialize it with a capacity of 1024 bytes.
+ let mut builder = flatbuffers::FlatBufferBuilder::new_with_capacity(1024);
+
+ // Serialize some weapons for the Monster: A 'sword' and an 'axe'.
+ let weapon_one_name = builder.create_string("Sword");
+ let weapon_two_name = builder.create_string("Axe");
+
+ // Use the `Weapon::create` shortcut to create Weapons with named field
+ // arguments.
+ let sword = Weapon::create(&mut builder, &WeaponArgs{
+ name: Some(weapon_one_name),
+ damage: 3,
+ });
+ let axe = Weapon::create(&mut builder, &WeaponArgs{
+ name: Some(weapon_two_name),
+ damage: 5,
+ });
+
+ // Name of the Monster.
+ let name = builder.create_string("Orc");
+
+ // Inventory.
+ let inventory = builder.create_vector(&[0u8, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weapons = builder.create_vector(&[sword, axe]);
+
+ // Create the path vector of Vec3 objects:
+ //let x = Vec3::new(1.0, 2.0, 3.0);
+ //let y = Vec3::new(4.0, 5.0, 6.0);
+ //let path = builder.create_vector(&[x, y]);
+
+ // Note that, for convenience, it is also valid to create a vector of
+ // references to structs, like this:
+ // let path = builder.create_vector(&[&x, &y]);
+
+ // Create the monster using the `Monster::create` helper function. This
+ // function accepts a `MonsterArgs` struct, which supplies all of the data
+ // needed to build a `Monster`. To supply empty/default fields, just use the
+ // Rust built-in `Default::default()` function, as demononstrated below.
+ let orc = Monster::create(&mut builder, &MonsterArgs{
+ pos: Some(&Vec3::new(1.0f32, 2.0f32, 3.0f32)),
+ mana: 150,
+ hp: 80,
+ name: Some(name),
+ inventory: Some(inventory),
+ color: Color::Red,
+ weapons: Some(weapons),
+ equipped_type: Equipment::Weapon,
+ equipped: Some(axe.as_union_value()),
+ //path: Some(path),
+ ..Default::default()
+ });
+
+ // Serialize the root of the object, without providing a file identifier.
+ builder.finish(orc, None);
+
+ // We now have a FlatBuffer we can store on disk or send over a network.
+
+ // ** file/network code goes here :) **
+
+ // Instead, we're going to access it right away (as if we just received it).
+ // This must be called after `finish()`.
+ let buf = builder.finished_data(); // Of type `&[u8]`
+
+ // Get access to the root:
+ let monster = get_root_as_monster(buf);
+
+ // Get and test some scalar types from the FlatBuffer.
+ let hp = monster.hp();
+ let mana = monster.mana();
+ let name = monster.name();
+
+ assert_eq!(hp, 80);
+ assert_eq!(mana, 150); // default
+ assert_eq!(name, Some("Orc"));
+
+ // Get and test a field of the FlatBuffer's `struct`.
+ assert!(monster.pos().is_some());
+ let pos = monster.pos().unwrap();
+ let x = pos.x();
+ let y = pos.y();
+ let z = pos.z();
+ assert_eq!(x, 1.0f32);
+ assert_eq!(y, 2.0f32);
+ assert_eq!(z, 3.0f32);
+
+ // Get an element from the `inventory` FlatBuffer's `vector`.
+ assert!(monster.inventory().is_some());
+ let inv = monster.inventory().unwrap();
+
+ // Note that this vector is returned as a slice, because direct access for
+ // this type, a u8 vector, is safe on all platforms:
+ let third_item = inv[2];
+ assert_eq!(third_item, 2);
+
+ // Get and test the `weapons` FlatBuffers's `vector`.
+ assert!(monster.weapons().is_some());
+ let weps = monster.weapons().unwrap();
+ //let weps_len = weps.len();
+ let wep2 = weps.get(1);
+ let second_weapon_name = wep2.name();
+ let second_weapon_damage = wep2.damage();
+ assert_eq!(second_weapon_name, Some("Axe"));
+ assert_eq!(second_weapon_damage, 5);
+
+ // Get and test the `Equipment` union (`equipped` field).
+ assert_eq!(monster.equipped_type(), Equipment::Weapon);
+ let equipped = monster.equipped_as_weapon().unwrap();
+ let weapon_name = equipped.name();
+ let weapon_damage = equipped.damage();
+ assert_eq!(weapon_name, Some("Axe"));
+ assert_eq!(weapon_damage, 5);
+
+ // Get and test the `path` FlatBuffers's `vector`.
+ //assert_eq!(monster.path().unwrap().len(), 2);
+ //assert_eq!(monster.path().unwrap()[0].x(), 1.0);
+ //assert_eq!(monster.path().unwrap()[1].x(), 4.0);
+
+ println!("The FlatBuffer was successfully created and accessed!");
+}
diff --git a/src/flatc_main.cpp b/src/flatc_main.cpp
index 2a3bc980..c632fe27 100644
--- a/src/flatc_main.cpp
+++ b/src/flatc_main.cpp
@@ -79,6 +79,10 @@ int main(int argc, const char *argv[]) {
flatbuffers::IDLOptions::kLua,
"Generate Lua files for tables/structs",
flatbuffers::GeneralMakeRule },
+ { flatbuffers::GenerateRust, "-r", "--rust", "Rust", true, nullptr,
+ flatbuffers::IDLOptions::kRust,
+ "Generate Rust files for tables/structs",
+ flatbuffers::RustMakeRule },
{ flatbuffers::GeneratePhp, nullptr, "--php", "PHP", true, nullptr,
flatbuffers::IDLOptions::kPhp, "Generate PHP files for tables/structs",
flatbuffers::GeneralMakeRule },
diff --git a/src/idl_gen_cpp.cpp b/src/idl_gen_cpp.cpp
index df4c0ad9..b9bfacb0 100644
--- a/src/idl_gen_cpp.cpp
+++ b/src/idl_gen_cpp.cpp
@@ -477,7 +477,8 @@ class CppGenerator : public BaseGenerator {
std::string GenTypeBasic(const Type &type, bool user_facing_type) const {
static const char * const ctypename[] = {
// clang-format off
- #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
#CTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_general.cpp b/src/idl_gen_general.cpp
index f3ed3e4f..3245c586 100644
--- a/src/idl_gen_general.cpp
+++ b/src/idl_gen_general.cpp
@@ -247,7 +247,7 @@ class GeneralGenerator : public BaseGenerator {
// clang-format off
static const char * const java_typename[] = {
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#JTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
@@ -255,7 +255,7 @@ class GeneralGenerator : public BaseGenerator {
static const char * const csharp_typename[] = {
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#NTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_go.cpp b/src/idl_gen_go.cpp
index c5767b47..4f20719b 100644
--- a/src/idl_gen_go.cpp
+++ b/src/idl_gen_go.cpp
@@ -690,7 +690,7 @@ static std::string GenTypeBasic(const Type &type) {
static const char *ctypename[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#GTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_lobster.cpp b/src/idl_gen_lobster.cpp
index 0487d17a..5f199e3a 100644
--- a/src/idl_gen_lobster.cpp
+++ b/src/idl_gen_lobster.cpp
@@ -81,7 +81,7 @@ class LobsterGenerator : public BaseGenerator {
static const char *ctypename[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#PTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_lua.cpp b/src/idl_gen_lua.cpp
index 86264918..34408d31 100644
--- a/src/idl_gen_lua.cpp
+++ b/src/idl_gen_lua.cpp
@@ -604,7 +604,7 @@ namespace lua {
static const char *ctypename[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#PTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_php.cpp b/src/idl_gen_php.cpp
index 7d98e00d..5563f4bb 100644
--- a/src/idl_gen_php.cpp
+++ b/src/idl_gen_php.cpp
@@ -864,7 +864,7 @@ class PhpGenerator : public BaseGenerator {
static const char *ctypename[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#NTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_python.cpp b/src/idl_gen_python.cpp
index e000ada9..109e203d 100644
--- a/src/idl_gen_python.cpp
+++ b/src/idl_gen_python.cpp
@@ -615,7 +615,7 @@ class PythonGenerator : public BaseGenerator {
static const char *ctypename[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
#PTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_gen_rust.cpp b/src/idl_gen_rust.cpp
new file mode 100644
index 00000000..1441af49
--- /dev/null
+++ b/src/idl_gen_rust.cpp
@@ -0,0 +1,1811 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// independent from idl_parser, since this code is not needed for most clients
+
+#include "flatbuffers/code_generators.h"
+#include "flatbuffers/flatbuffers.h"
+#include "flatbuffers/idl.h"
+#include "flatbuffers/util.h"
+
+namespace flatbuffers {
+
+static std::string GeneratedFileName(const std::string &path,
+ const std::string &file_name) {
+ return path + file_name + "_generated.rs";
+}
+
+// Convert a camelCaseIdentifier or CamelCaseIdentifier to a
+// snake_case_indentifier.
+std::string MakeSnakeCase(const std::string &in) {
+ std::string s;
+ for (size_t i = 0; i < in.length(); i++) {
+ if (islower(in[i])) {
+ s += static_cast<char>(in[i]);
+ } else {
+ if (i > 0) {
+ s += '_';
+ }
+ s += static_cast<char>(tolower(in[i]));
+ }
+ }
+ return s;
+}
+
+// Convert a string to all uppercase.
+std::string MakeUpper(const std::string &in) {
+ std::string s;
+ for (size_t i = 0; i < in.length(); i++) {
+ s += static_cast<char>(toupper(in[i]));
+ }
+ return s;
+}
+
+// Encapsulate all logical field types in this enum. This allows us to write
+// field logic based on type switches, instead of branches on the properties
+// set on the Type.
+// TODO(rw): for backwards compatibility, we can't use a strict `enum class`
+// declaration here. could we use the `-Wswitch-enum` warning to
+// achieve the same effect?
+enum FullType {
+ ftInteger = 0,
+ ftFloat = 1,
+ ftBool = 2,
+
+ ftStruct = 3,
+ ftTable = 4,
+
+ ftEnumKey = 5,
+ ftUnionKey = 6,
+
+ ftUnionValue = 7,
+
+ // TODO(rw): bytestring?
+ ftString = 8,
+
+ ftVectorOfInteger = 9,
+ ftVectorOfFloat = 10,
+ ftVectorOfBool = 11,
+ ftVectorOfEnumKey = 12,
+ ftVectorOfStruct = 13,
+ ftVectorOfTable = 14,
+ ftVectorOfString = 15,
+ ftVectorOfUnionValue = 16,
+};
+
+// Convert a Type to a FullType (exhaustive).
+FullType GetFullType(const Type &type) {
+ // N.B. The order of these conditionals matters for some types.
+
+ if (type.base_type == BASE_TYPE_STRING) {
+ return ftString;
+ } else if (type.base_type == BASE_TYPE_STRUCT) {
+ if (type.struct_def->fixed) {
+ return ftStruct;
+ } else {
+ return ftTable;
+ }
+ } else if (type.base_type == BASE_TYPE_VECTOR) {
+ switch (GetFullType(type.VectorType())) {
+ case ftInteger: {
+ return ftVectorOfInteger;
+ }
+ case ftFloat: {
+ return ftVectorOfFloat;
+ }
+ case ftBool: {
+ return ftVectorOfBool;
+ }
+ case ftStruct: {
+ return ftVectorOfStruct;
+ }
+ case ftTable: {
+ return ftVectorOfTable;
+ }
+ case ftString: {
+ return ftVectorOfString;
+ }
+ case ftEnumKey: {
+ return ftVectorOfEnumKey;
+ }
+ case ftUnionKey:
+ case ftUnionValue: {
+ FLATBUFFERS_ASSERT(false && "vectors of unions are unsupported");
+ }
+ default: {
+ FLATBUFFERS_ASSERT(false && "vector of vectors are unsupported");
+ }
+ }
+ } else if (type.enum_def != nullptr) {
+ if (type.enum_def->is_union) {
+ if (type.base_type == BASE_TYPE_UNION) {
+ return ftUnionValue;
+ } else if (IsInteger(type.base_type)) {
+ return ftUnionKey;
+ } else {
+ FLATBUFFERS_ASSERT(false && "unknown union field type");
+ }
+ } else {
+ return ftEnumKey;
+ }
+ } else if (IsScalar(type.base_type)) {
+ if (IsBool(type.base_type)) {
+ return ftBool;
+ } else if (IsInteger(type.base_type)) {
+ return ftInteger;
+ } else if (IsFloat(type.base_type)) {
+ return ftFloat;
+ } else {
+ FLATBUFFERS_ASSERT(false && "unknown number type");
+ }
+ }
+
+ FLATBUFFERS_ASSERT(false && "completely unknown type");
+
+ // this is only to satisfy the compiler's return analysis.
+ return ftBool;
+}
+
+namespace rust {
+
+class RustGenerator : public BaseGenerator {
+ public:
+ RustGenerator(const Parser &parser, const std::string &path,
+ const std::string &file_name)
+ : BaseGenerator(parser, path, file_name, "", "::"),
+ cur_name_space_(nullptr) {
+ const char *keywords[] = {
+ // list taken from:
+ // https://doc.rust-lang.org/book/second-edition/appendix-01-keywords.html
+ //
+ // we write keywords one per line so that we can easily compare them with
+ // changes to that webpage in the future.
+
+ // currently-used keywords
+ "as",
+ "break",
+ "const",
+ "continue",
+ "crate",
+ "else",
+ "enum",
+ "extern",
+ "false",
+ "fn",
+ "for",
+ "if",
+ "impl",
+ "in",
+ "let",
+ "loop",
+ "match",
+ "mod",
+ "move",
+ "mut",
+ "pub",
+ "ref",
+ "return",
+ "Self",
+ "self",
+ "static",
+ "struct",
+ "super",
+ "trait",
+ "true",
+ "type",
+ "unsafe",
+ "use",
+ "where",
+ "while",
+
+ // future possible keywords
+ "abstract",
+ "alignof",
+ "become",
+ "box",
+ "do",
+ "final",
+ "macro",
+ "offsetof",
+ "override",
+ "priv",
+ "proc",
+ "pure",
+ "sizeof",
+ "typeof",
+ "unsized",
+ "virtual",
+ "yield",
+
+ // other rust terms we should not use
+ "std",
+ "usize",
+ "isize",
+ "u8",
+ "i8",
+ "u16",
+ "i16",
+ "u32",
+ "i32",
+ "u64",
+ "i64",
+ "u128",
+ "i128",
+ "f32",
+ "f64",
+
+ // These are terms the code generator can implement on types.
+ //
+ // In Rust, the trait resolution rules (as described at
+ // https://github.com/rust-lang/rust/issues/26007) mean that, as long
+ // as we impl table accessors as inherent methods, we'll never create
+ // conflicts with these keywords. However, that's a fairly nuanced
+ // implementation detail, and how we implement methods could change in
+ // the future. as a result, we proactively block these out as reserved
+ // words.
+ "follow",
+ "push",
+ "size",
+ "alignment",
+ "to_little_endian",
+ "from_little_endian",
+ nullptr };
+ for (auto kw = keywords; *kw; kw++) keywords_.insert(*kw);
+ }
+
+ // Iterate through all definitions we haven't generated code for (enums,
+ // structs, and tables) and output them to a single file.
+ bool generate() {
+ code_.Clear();
+ code_ += "// " + std::string(FlatBuffersGeneratedWarning()) + "\n\n";
+
+ assert(!cur_name_space_);
+
+ // Generate all code in their namespaces, once, because Rust does not
+ // permit re-opening modules.
+ //
+ // TODO(rw): Use a set data structure to reduce namespace evaluations from
+ // O(n**2) to O(n).
+ for (auto ns_it = parser_.namespaces_.begin();
+ ns_it != parser_.namespaces_.end();
+ ++ns_it) {
+ const auto &ns = *ns_it;
+
+ // Generate code for all the enum declarations.
+ for (auto it = parser_.enums_.vec.begin(); it != parser_.enums_.vec.end();
+ ++it) {
+ const auto &enum_def = **it;
+ if (enum_def.defined_namespace != ns) { continue; }
+ if (!enum_def.generated) {
+ SetNameSpace(enum_def.defined_namespace);
+ GenEnum(enum_def);
+ }
+ }
+
+ // Generate code for all structs.
+ for (auto it = parser_.structs_.vec.begin();
+ it != parser_.structs_.vec.end(); ++it) {
+ const auto &struct_def = **it;
+ if (struct_def.defined_namespace != ns) { continue; }
+ if (struct_def.fixed && !struct_def.generated) {
+ SetNameSpace(struct_def.defined_namespace);
+ GenStruct(struct_def);
+ }
+ }
+
+ // Generate code for all tables.
+ for (auto it = parser_.structs_.vec.begin();
+ it != parser_.structs_.vec.end(); ++it) {
+ const auto &struct_def = **it;
+ if (struct_def.defined_namespace != ns) { continue; }
+ if (!struct_def.fixed && !struct_def.generated) {
+ SetNameSpace(struct_def.defined_namespace);
+ GenTable(struct_def);
+ }
+ }
+
+ // Generate global helper functions.
+ if (parser_.root_struct_def_) {
+ auto &struct_def = *parser_.root_struct_def_;
+ if (struct_def.defined_namespace != ns) { continue; }
+ SetNameSpace(struct_def.defined_namespace);
+ GenRootTableFuncs(struct_def);
+ }
+ }
+ if (cur_name_space_) SetNameSpace(nullptr);
+
+ const auto file_path = GeneratedFileName(path_, file_name_);
+ const auto final_code = code_.ToString();
+ return SaveFile(file_path.c_str(), final_code, false);
+ }
+
+ private:
+ CodeWriter code_;
+
+ std::set<std::string> keywords_;
+
+ // This tracks the current namespace so we can insert namespace declarations.
+ const Namespace *cur_name_space_;
+
+ const Namespace *CurrentNameSpace() const { return cur_name_space_; }
+
+ // Determine if a Type needs a lifetime template parameter when used in Rust.
+ bool TypeNeedsLifetimeParameter(const Type &type) const {
+ switch (GetFullType(type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool:
+ case ftTable:
+ case ftEnumKey:
+ case ftUnionKey:
+ case ftStruct: { return false; }
+ default: { return true; }
+ }
+ }
+
+ // Determine if a table args rust type needs a lifetime template parameter.
+ bool TableBuilderArgsNeedsLifetime(const StructDef &struct_def) const {
+ FLATBUFFERS_ASSERT(!struct_def.fixed);
+
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (field.deprecated) {
+ continue;
+ }
+
+ if (TypeNeedsLifetimeParameter(field.value.type)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ // Determine if a Type needs to be copied (for endian safety) when used in a
+ // Struct.
+ bool StructMemberAccessNeedsCopy(const Type &type) const {
+ switch (GetFullType(type)) {
+ case ftInteger: // requires endian swap
+ case ftFloat: // requires endian swap
+ case ftBool: // no endian-swap, but do the copy for UX consistency
+ case ftEnumKey: { return true; } // requires endian swap
+ case ftStruct: { return false; } // no endian swap
+ default: {
+ // logic error: no other types can be struct members.
+ FLATBUFFERS_ASSERT(false && "invalid struct member type");
+ return false; // only to satisfy compiler's return analysis
+ }
+ }
+ }
+
+ std::string EscapeKeyword(const std::string &name) const {
+ return keywords_.find(name) == keywords_.end() ? name : name + "_";
+ }
+
+ std::string Name(const Definition &def) const {
+ return EscapeKeyword(def.name);
+ }
+
+ std::string Name(const EnumVal &ev) const { return EscapeKeyword(ev.name); }
+
+ std::string WrapInNameSpace(const Definition &def) const {
+ return WrapInNameSpace(def.defined_namespace, Name(def));
+ }
+ std::string WrapInNameSpace(const Namespace *ns,
+ const std::string &name) const {
+ if (CurrentNameSpace() == ns) return name;
+ std::string prefix = GetRelativeNamespaceTraversal(CurrentNameSpace(), ns);
+ return prefix + name;
+ }
+
+ // Determine the namespace traversal needed from the Rust crate root.
+ // This may be useful in the future for referring to included files, but is
+ // currently unused.
+ std::string GetAbsoluteNamespaceTraversal(const Namespace *dst) const {
+ std::stringstream stream;
+
+ stream << "::";
+ for (auto d = dst->components.begin(); d != dst->components.end(); d++) {
+ stream << MakeSnakeCase(*d) + "::";
+ }
+ return stream.str();
+ }
+
+ // Determine the relative namespace traversal needed to reference one
+ // namespace from another namespace. This is useful because it does not force
+ // the user to have a particular file layout. (If we output absolute
+ // namespace paths, that may require users to organize their Rust crates in a
+ // particular way.)
+ std::string GetRelativeNamespaceTraversal(const Namespace *src,
+ const Namespace *dst) const {
+ // calculate the path needed to reference dst from src.
+ // example: f(A::B::C, A::B::C) -> (none)
+ // example: f(A::B::C, A::B) -> super::
+ // example: f(A::B::C, A::B::D) -> super::D
+ // example: f(A::B::C, A) -> super::super::
+ // example: f(A::B::C, D) -> super::super::super::D
+ // example: f(A::B::C, D::E) -> super::super::super::D::E
+ // example: f(A, D::E) -> super::D::E
+ // does not include leaf object (typically a struct type).
+
+ size_t i = 0;
+ std::stringstream stream;
+
+ auto s = src->components.begin();
+ auto d = dst->components.begin();
+ for(;;) {
+ if (s == src->components.end()) { break; }
+ if (d == dst->components.end()) { break; }
+ if (*s != *d) { break; }
+ s++;
+ d++;
+ i++;
+ }
+
+ for (; s != src->components.end(); s++) {
+ stream << "super::";
+ }
+ for (; d != dst->components.end(); d++) {
+ stream << MakeSnakeCase(*d) + "::";
+ }
+ return stream.str();
+ }
+
+ // Generate a comment from the schema.
+ void GenComment(const std::vector<std::string> &dc, const char *prefix = "") {
+ std::string text;
+ ::flatbuffers::GenComment(dc, &text, nullptr, prefix);
+ code_ += text + "\\";
+ }
+
+ // Return a Rust type from the table in idl.h.
+ std::string GetTypeBasic(const Type &type) const {
+ switch (GetFullType(type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool:
+ case ftEnumKey:
+ case ftUnionKey: { break; }
+ default: { FLATBUFFERS_ASSERT(false && "incorrect type given");}
+ }
+
+ // clang-format off
+ static const char * const ctypename[] = {
+ #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
+ #RTYPE,
+ FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
+ #undef FLATBUFFERS_TD
+ // clang-format on
+ };
+
+ if (type.enum_def) { return WrapInNameSpace(*type.enum_def); }
+ return ctypename[type.base_type];
+ }
+
+ // Look up the native type for an enum. This will always be an integer like
+ // u8, i32, etc.
+ std::string GetEnumTypeForDecl(const Type &type) {
+ const auto ft = GetFullType(type);
+ if (!(ft == ftEnumKey || ft == ftUnionKey)) {
+ FLATBUFFERS_ASSERT(false && "precondition failed in GetEnumTypeForDecl");
+ }
+
+ static const char *ctypename[] = {
+ // clang-format off
+ #define FLATBUFFERS_TD(ENUM, IDLTYPE, CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, \
+ RTYPE) \
+ #RTYPE,
+ FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
+ #undef FLATBUFFERS_TD
+ // clang-format on
+ };
+
+ // Enums can be bools, but their Rust representation must be a u8, as used
+ // in the repr attribute (#[repr(bool)] is an invalid attribute).
+ if (type.base_type == BASE_TYPE_BOOL) return "u8";
+ return ctypename[type.base_type];
+ }
+
+ // Return a Rust type for any type (scalar, table, struct) specifically for
+ // using a FlatBuffer.
+ std::string GetTypeGet(const Type &type) const {
+ switch (GetFullType(type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool:
+ case ftEnumKey:
+ case ftUnionKey: {
+ return GetTypeBasic(type); }
+ case ftTable: {
+ return WrapInNameSpace(type.struct_def->defined_namespace,
+ type.struct_def->name) + "<'a>"; }
+ default: {
+ return WrapInNameSpace(type.struct_def->defined_namespace,
+ type.struct_def->name); }
+ }
+ }
+
+ std::string GetEnumValUse(const EnumDef &enum_def,
+ const EnumVal &enum_val) const {
+ return Name(enum_def) + "::" + Name(enum_val);
+ }
+
+ // Generate an enum declaration,
+ // an enum string lookup table,
+ // an enum match function,
+ // and an enum array of values
+ void GenEnum(const EnumDef &enum_def) {
+ code_.SetValue("ENUM_NAME", Name(enum_def));
+ code_.SetValue("BASE_TYPE", GetEnumTypeForDecl(enum_def.underlying_type));
+ code_.SetValue("SEP", "");
+
+ GenComment(enum_def.doc_comment);
+ code_ += "#[allow(non_camel_case_types)]";
+ code_ += "#[repr({{BASE_TYPE}})]";
+ code_ += "#[derive(Clone, Copy, PartialEq, Debug)]";
+ code_ += "pub enum " + Name(enum_def) + " {";
+
+ int64_t anyv = 0;
+ const EnumVal *minv = nullptr, *maxv = nullptr;
+ for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end();
+ ++it) {
+ const auto &ev = **it;
+
+ GenComment(ev.doc_comment, " ");
+ code_.SetValue("KEY", Name(ev));
+ code_.SetValue("VALUE", NumToString(ev.value));
+ code_ += "{{SEP}} {{KEY}} = {{VALUE}}\\";
+ code_.SetValue("SEP", ",\n");
+
+ minv = !minv || minv->value > ev.value ? &ev : minv;
+ maxv = !maxv || maxv->value < ev.value ? &ev : maxv;
+ anyv |= ev.value;
+ }
+
+ code_ += "";
+ code_ += "}";
+ code_ += "";
+
+ code_.SetValue("ENUM_NAME", Name(enum_def));
+ code_.SetValue("ENUM_NAME_SNAKE", MakeSnakeCase(Name(enum_def)));
+ code_.SetValue("ENUM_NAME_CAPS", MakeUpper(MakeSnakeCase(Name(enum_def))));
+ code_.SetValue("ENUM_MIN_BASE_VALUE", NumToString(minv->value));
+ code_.SetValue("ENUM_MAX_BASE_VALUE", NumToString(maxv->value));
+
+ // Generate enum constants, and impls for Follow, EndianScalar, and Push.
+ code_ += "const ENUM_MIN_{{ENUM_NAME_CAPS}}: {{BASE_TYPE}} = \\";
+ code_ += "{{ENUM_MIN_BASE_VALUE}};";
+ code_ += "const ENUM_MAX_{{ENUM_NAME_CAPS}}: {{BASE_TYPE}} = \\";
+ code_ += "{{ENUM_MAX_BASE_VALUE}};";
+ code_ += "";
+ code_ += "impl<'a> flatbuffers::Follow<'a> for {{ENUM_NAME}} {";
+ code_ += " type Inner = Self;";
+ code_ += " #[inline]";
+ code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
+ code_ += " flatbuffers::read_scalar_at::<Self>(buf, loc)";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+ code_ += "impl flatbuffers::EndianScalar for {{ENUM_NAME}} {";
+ code_ += " #[inline]";
+ code_ += " fn to_little_endian(self) -> Self {";
+ code_ += " let n = {{BASE_TYPE}}::to_le(self as {{BASE_TYPE}});";
+ code_ += " let p = &n as *const {{BASE_TYPE}} as *const {{ENUM_NAME}};";
+ code_ += " unsafe { *p }";
+ code_ += " }";
+ code_ += " #[inline]";
+ code_ += " fn from_little_endian(self) -> Self {";
+ code_ += " let n = {{BASE_TYPE}}::from_le(self as {{BASE_TYPE}});";
+ code_ += " let p = &n as *const {{BASE_TYPE}} as *const {{ENUM_NAME}};";
+ code_ += " unsafe { *p }";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+ code_ += "impl flatbuffers::Push for {{ENUM_NAME}} {";
+ code_ += " type Output = {{ENUM_NAME}};";
+ code_ += " #[inline]";
+ code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {";
+ code_ += " flatbuffers::emplace_scalar::<{{ENUM_NAME}}>"
+ "(dst, *self);";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+
+ // Generate an array of all enumeration values.
+ auto num_fields = NumToString(enum_def.vals.vec.size());
+ code_ += "#[allow(non_camel_case_types)]";
+ code_ += "const ENUM_VALUES_{{ENUM_NAME_CAPS}}:[{{ENUM_NAME}}; " +
+ num_fields + "] = [";
+ for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end();
+ ++it) {
+ const auto &ev = **it;
+ auto value = GetEnumValUse(enum_def, ev);
+ auto suffix = *it != enum_def.vals.vec.back() ? "," : "";
+ code_ += " " + value + suffix;
+ }
+ code_ += "];";
+ code_ += "";
+
+ // Generate a string table for enum values.
+ // Problem is, if values are very sparse that could generate really big
+ // tables. Ideally in that case we generate a map lookup instead, but for
+ // the moment we simply don't output a table at all.
+ auto range =
+ enum_def.vals.vec.back()->value - enum_def.vals.vec.front()->value + 1;
+ // Average distance between values above which we consider a table
+ // "too sparse". Change at will.
+ static const int kMaxSparseness = 5;
+ if (range / static_cast<int64_t>(enum_def.vals.vec.size()) <
+ kMaxSparseness) {
+ code_ += "#[allow(non_camel_case_types)]";
+ code_ += "const ENUM_NAMES_{{ENUM_NAME_CAPS}}:[&'static str; " +
+ NumToString(range) + "] = [";
+
+ auto val = enum_def.vals.vec.front()->value;
+ for (auto it = enum_def.vals.vec.begin(); it != enum_def.vals.vec.end();
+ ++it) {
+ const auto &ev = **it;
+ while (val++ != ev.value) { code_ += " \"\","; }
+ auto suffix = *it != enum_def.vals.vec.back() ? "," : "";
+ code_ += " \"" + Name(ev) + "\"" + suffix;
+ }
+ code_ += "];";
+ code_ += "";
+
+ code_ += "pub fn enum_name_{{ENUM_NAME_SNAKE}}(e: {{ENUM_NAME}}) -> "
+ "&'static str {";
+
+ code_ += " let index: usize = e as usize\\";
+ if (enum_def.vals.vec.front()->value) {
+ auto vals = GetEnumValUse(enum_def, *enum_def.vals.vec.front());
+ code_ += " - " + vals + " as usize\\";
+ }
+ code_ += ";";
+
+ code_ += " ENUM_NAMES_{{ENUM_NAME_CAPS}}[index]";
+ code_ += "}";
+ code_ += "";
+ }
+
+ if (enum_def.is_union) {
+ // Generate tyoesafe offset(s) for unions
+ code_.SetValue("NAME", Name(enum_def));
+ code_.SetValue("UNION_OFFSET_NAME", Name(enum_def) + "UnionTableOffset");
+ code_ += "pub struct {{UNION_OFFSET_NAME}} {}";
+ }
+ }
+
+ std::string GetFieldOffsetName(const FieldDef &field) {
+ return "VT_" + MakeUpper(Name(field));
+ }
+
+ std::string GetDefaultConstant(const FieldDef &field) {
+ return field.value.type.base_type == BASE_TYPE_FLOAT
+ ? field.value.constant + ""
+ : field.value.constant;
+ }
+
+ std::string GetDefaultScalarValue(const FieldDef &field) {
+ switch (GetFullType(field.value.type)) {
+ case ftInteger: { return GetDefaultConstant(field); }
+ case ftFloat: { return GetDefaultConstant(field); }
+ case ftBool: {
+ return field.value.constant == "0" ? "false" : "true";
+ }
+ case ftUnionKey:
+ case ftEnumKey: {
+ auto ev = field.value.type.enum_def->ReverseLookup(
+ StringToInt(field.value.constant.c_str()), false);
+ assert(ev);
+ return WrapInNameSpace(field.value.type.enum_def->defined_namespace,
+ GetEnumValUse(*field.value.type.enum_def, *ev));
+ }
+
+ // All pointer-ish types have a default value of None, because they are
+ // wrapped in Option.
+ default: { return "None"; }
+ }
+ }
+
+ // Create the return type for fields in the *BuilderArgs structs that are
+ // used to create Tables.
+ //
+ // Note: we could make all inputs to the BuilderArgs be an Option, as well
+ // as all outputs. But, the UX of Flatbuffers is that the user doesn't get to
+ // know if the value is default or not, because there are three ways to
+ // return a default value:
+ // 1) return a stored value that happens to be the default,
+ // 2) return a hardcoded value because the relevant vtable field is not in
+ // the vtable, or
+ // 3) return a hardcoded value because the vtable field value is set to zero.
+ std::string TableBuilderArgsDefnType(const FieldDef &field,
+ const std::string lifetime) {
+ const Type& type = field.value.type;
+
+ switch (GetFullType(type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool: {
+ const auto typname = GetTypeBasic(type);
+ return typname;
+ }
+ case ftStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<&" + lifetime + " " + typname + ">";
+ }
+ case ftTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<flatbuffers::WIPOffset<" + typname + "<" + lifetime + \
+ ">>>";
+ }
+ case ftString: {
+ return "Option<flatbuffers::WIPOffset<&" + lifetime + " str>>";
+ }
+ case ftEnumKey:
+ case ftUnionKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return typname;
+ }
+ case ftUnionValue: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>";
+ }
+
+ case ftVectorOfInteger:
+ case ftVectorOfFloat: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", " + typname + ">>>";
+ }
+ case ftVectorOfBool: {
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", bool>>>";
+ }
+ case ftVectorOfEnumKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", " + typname + ">>>";
+ }
+ case ftVectorOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", " + typname + ">>>";
+ }
+ case ftVectorOfTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", flatbuffers::ForwardsUOffset<" + typname + \
+ "<" + lifetime + ">>>>>";
+ }
+ case ftVectorOfString: {
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", flatbuffers::ForwardsUOffset<&" + lifetime + \
+ " str>>>>";
+ }
+ case ftVectorOfUnionValue: {
+ const auto typname = WrapInNameSpace(*type.enum_def) + \
+ "UnionTableOffset";
+ return "Option<flatbuffers::WIPOffset<flatbuffers::Vector<" + \
+ lifetime + ", flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Table<" + lifetime + ">>>>";
+ }
+ }
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+
+ std::string TableBuilderArgsDefaultValue(const FieldDef &field) {
+ return GetDefaultScalarValue(field);
+ }
+ std::string TableBuilderAddFuncDefaultValue(const FieldDef &field) {
+ switch (GetFullType(field.value.type)) {
+ case ftUnionKey:
+ case ftEnumKey: {
+ const std::string basetype = GetTypeBasic(field.value.type);
+ return GetDefaultScalarValue(field);
+ }
+
+ default: { return GetDefaultScalarValue(field); }
+ }
+ }
+
+ std::string TableBuilderArgsAddFuncType(const FieldDef &field,
+ const std::string lifetime) {
+ const Type& type = field.value.type;
+
+ switch (GetFullType(field.value.type)) {
+ case ftVectorOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", " + typname + ">>";
+ }
+ case ftVectorOfTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", flatbuffers::ForwardsUOffset<" + typname + \
+ "<" + lifetime + ">>>>";
+ }
+ case ftVectorOfInteger:
+ case ftVectorOfFloat: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", " + typname + ">>";
+ }
+ case ftVectorOfBool: {
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", bool>>";
+ }
+ case ftVectorOfString: {
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", flatbuffers::ForwardsUOffset<&" + lifetime + " str>>>";
+ }
+ case ftVectorOfEnumKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", " + typname + ">>";
+ }
+ case ftVectorOfUnionValue: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "flatbuffers::WIPOffset<flatbuffers::Vector<" + lifetime + \
+ ", flatbuffers::ForwardsUOffset<flatbuffers::Table<" + \
+ lifetime + ">>>";
+ }
+ case ftEnumKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return typname;
+ }
+ case ftStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "&" + lifetime + " " + typname + "";
+ }
+ case ftTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "flatbuffers::WIPOffset<" + typname + "<" + lifetime + ">>";
+ }
+ case ftInteger:
+ case ftFloat: {
+ const auto typname = GetTypeBasic(type);
+ return typname;
+ }
+ case ftBool: {
+ return "bool";
+ }
+ case ftString: {
+ return "flatbuffers::WIPOffset<&" + lifetime + " str>";
+ }
+ case ftUnionKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return typname;
+ }
+ case ftUnionValue: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>";
+ }
+ }
+
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+
+ std::string TableBuilderArgsAddFuncBody(const FieldDef &field) {
+ const Type& type = field.value.type;
+
+ switch (GetFullType(field.value.type)) {
+ case ftInteger:
+ case ftFloat: {
+ const auto typname = GetTypeBasic(field.value.type);
+ return "self.fbb_.push_slot::<" + typname + ">";
+ }
+ case ftBool: {
+ return "self.fbb_.push_slot::<bool>";
+ }
+
+ case ftEnumKey:
+ case ftUnionKey: {
+ const auto underlying_typname = GetTypeBasic(type);
+ return "self.fbb_.push_slot::<" + underlying_typname + ">";
+ }
+
+ case ftStruct: {
+ const std::string typname = WrapInNameSpace(*type.struct_def);
+ return "self.fbb_.push_slot_always::<&" + typname + ">";
+ }
+ case ftTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "self.fbb_.push_slot_always::<flatbuffers::WIPOffset<" + \
+ typname + ">>";
+ }
+
+ case ftUnionValue:
+ case ftString:
+ case ftVectorOfInteger:
+ case ftVectorOfFloat:
+ case ftVectorOfBool:
+ case ftVectorOfEnumKey:
+ case ftVectorOfStruct:
+ case ftVectorOfTable:
+ case ftVectorOfString:
+ case ftVectorOfUnionValue: {
+ return "self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>";
+ }
+ }
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+
+ std::string GenTableAccessorFuncReturnType(const FieldDef &field,
+ const std::string lifetime) {
+ const Type& type = field.value.type;
+
+ switch (GetFullType(field.value.type)) {
+ case ftInteger:
+ case ftFloat: {
+ const auto typname = GetTypeBasic(type);
+ return typname;
+ }
+ case ftBool: {
+ return "bool";
+ }
+ case ftStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<&" + lifetime + " " + typname + ">";
+ }
+ case ftTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<" + typname + "<" + lifetime + ">>";
+ }
+ case ftEnumKey:
+ case ftUnionKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return typname;
+ }
+
+ case ftUnionValue: {
+ return "Option<flatbuffers::Table<" + lifetime + ">>";
+ }
+ case ftString: {
+ return "Option<&" + lifetime + " str>";
+ }
+ case ftVectorOfInteger:
+ case ftVectorOfFloat: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ if (IsOneByte(type.VectorType().base_type)) {
+ return "Option<&" + lifetime + " [" + typname + "]>";
+ }
+ return "Option<flatbuffers::Vector<" + lifetime + ", " + typname + ">>";
+ }
+ case ftVectorOfBool: {
+ return "Option<&" + lifetime + " [bool]>";
+ }
+ case ftVectorOfEnumKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "Option<flatbuffers::Vector<" + lifetime + ", " + typname + ">>";
+ }
+ case ftVectorOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<&" + lifetime + " [" + typname + "]>";
+ }
+ case ftVectorOfTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<" + \
+ typname + "<" + lifetime + ">>>>";
+ }
+ case ftVectorOfString: {
+ return "Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&" + \
+ lifetime + " str>>>";
+ }
+ case ftVectorOfUnionValue: {
+ FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported");
+ // TODO(rw): when we do support these, we should consider using the
+ // Into trait to convert tables to typesafe union values.
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+ }
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+
+ std::string GenTableAccessorFuncBody(const FieldDef &field,
+ const std::string lifetime,
+ const std::string offset_prefix) {
+ const std::string offset_name = offset_prefix + "::" + \
+ GetFieldOffsetName(field);
+ const Type& type = field.value.type;
+
+ switch (GetFullType(field.value.type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool: {
+ const auto typname = GetTypeBasic(type);
+ const auto default_value = GetDefaultScalarValue(field);
+ return "self._tab.get::<" + typname + ">(" + offset_name + ", Some(" + \
+ default_value + ")).unwrap()";
+ }
+ case ftStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "self._tab.get::<" + typname + ">(" + offset_name + ", None)";
+ }
+ case ftTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<" + typname + \
+ "<" + lifetime + ">>>(" + offset_name + ", None)";
+ }
+ case ftUnionValue: {
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Table<" + lifetime + ">>>(" + offset_name + \
+ ", None)";
+ }
+ case ftUnionKey:
+ case ftEnumKey: {
+ const auto underlying_typname = GetTypeBasic(type);
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ const auto default_value = GetDefaultScalarValue(field);
+ return "self._tab.get::<" + typname + ">(" + offset_name + \
+ ", Some(" + default_value + ")).unwrap()";
+ }
+ case ftString: {
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(" + \
+ offset_name + ", None)";
+ }
+
+ case ftVectorOfInteger:
+ case ftVectorOfFloat: {
+ const auto typname = GetTypeBasic(type.VectorType());
+ std::string s = "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<" + lifetime + ", " + typname + \
+ ">>>(" + offset_name + ", None)";
+ // single-byte values are safe to slice
+ if (IsOneByte(type.VectorType().base_type)) {
+ s += ".map(|v| v.safe_slice())";
+ }
+ return s;
+ }
+ case ftVectorOfBool: {
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<" + lifetime + ", bool>>>(" + \
+ offset_name + ", None).map(|v| v.safe_slice())";
+ }
+ case ftVectorOfEnumKey: {
+ const auto typname = WrapInNameSpace(*type.enum_def);
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<" + lifetime + ", " + typname + ">>>(" + \
+ offset_name + ", None)";
+ }
+ case ftVectorOfStruct: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<" + typname + ">>>(" + \
+ offset_name + ", None).map(|v| v.safe_slice() )";
+ }
+ case ftVectorOfTable: {
+ const auto typname = WrapInNameSpace(*type.struct_def);
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<flatbuffers::ForwardsUOffset<" + typname + \
+ "<" + lifetime + ">>>>>(" + offset_name + ", None)";
+ }
+ case ftVectorOfString: {
+ return "self._tab.get::<flatbuffers::ForwardsUOffset<"
+ "flatbuffers::Vector<flatbuffers::ForwardsUOffset<&" + \
+ lifetime + " str>>>>(" + offset_name + ", None)";
+ }
+ case ftVectorOfUnionValue: {
+ FLATBUFFERS_ASSERT(false && "vectors of unions are not yet supported");
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+ }
+ return "INVALID_CODE_GENERATION"; // for return analysis
+ }
+
+ bool TableFieldReturnsOption(const Type& type) {
+ switch (GetFullType(type)) {
+ case ftInteger:
+ case ftFloat:
+ case ftBool:
+ case ftEnumKey:
+ case ftUnionKey:
+ return false;
+ default: return true;
+ }
+ }
+
+ // Generate an accessor struct, builder struct, and create function for a
+ // table.
+ void GenTable(const StructDef &struct_def) {
+ GenComment(struct_def.doc_comment);
+
+ code_.SetValue("STRUCT_NAME", Name(struct_def));
+ code_.SetValue("OFFSET_TYPELABEL", Name(struct_def) + "Offset");
+ code_.SetValue("STRUCT_NAME_SNAKECASE", MakeSnakeCase(Name(struct_def)));
+
+ // Generate an offset type, the base type, the Follow impl, and the
+ // init_from_table impl.
+ code_ += "pub enum {{OFFSET_TYPELABEL}} {}";
+ code_ += "#[derive(Copy, Clone, Debug, PartialEq)]";
+ code_ += "";
+ code_ += "pub struct {{STRUCT_NAME}}<'a> {";
+ code_ += " pub _tab: flatbuffers::Table<'a>,";
+ code_ += "}";
+ code_ += "";
+ code_ += "impl<'a> flatbuffers::Follow<'a> for {{STRUCT_NAME}}<'a> {";
+ code_ += " type Inner = {{STRUCT_NAME}}<'a>;";
+ code_ += " #[inline]";
+ code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
+ code_ += " Self {";
+ code_ += " _tab: flatbuffers::Table { buf: buf, loc: loc },";
+ code_ += " }";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+ code_ += "impl<'a> {{STRUCT_NAME}}<'a> {";
+ code_ += " #[inline]";
+ code_ += " pub fn init_from_table(table: flatbuffers::Table<'a>) -> "
+ "Self {";
+ code_ += " {{STRUCT_NAME}} {";
+ code_ += " _tab: table,";
+ code_ += " }";
+ code_ += " }";
+
+ // Generate a convenient create* function that uses the above builder
+ // to create a table in one function call.
+ code_.SetValue("MAYBE_US",
+ struct_def.fields.vec.size() == 0 ? "_" : "");
+ code_.SetValue("MAYBE_LT",
+ TableBuilderArgsNeedsLifetime(struct_def) ? "<'args>" : "");
+ code_ += " #[allow(unused_mut)]";
+ code_ += " pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(";
+ code_ += " _fbb: "
+ "&'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,";
+ code_ += " {{MAYBE_US}}args: &'args {{STRUCT_NAME}}Args{{MAYBE_LT}})"
+ " -> flatbuffers::WIPOffset<{{STRUCT_NAME}}<'bldr>> {";
+
+ code_ += " let mut builder = {{STRUCT_NAME}}Builder::new(_fbb);";
+ for (size_t size = struct_def.sortbysize ? sizeof(largest_scalar_t) : 1;
+ size; size /= 2) {
+ for (auto it = struct_def.fields.vec.rbegin();
+ it != struct_def.fields.vec.rend(); ++it) {
+ const auto &field = **it;
+ // TODO(rw): fully understand this sortbysize usage
+ if (!field.deprecated && (!struct_def.sortbysize ||
+ size == SizeOf(field.value.type.base_type))) {
+ code_.SetValue("FIELD_NAME", Name(field));
+ if (TableFieldReturnsOption(field.value.type)) {
+ code_ += " if let Some(x) = args.{{FIELD_NAME}} "
+ "{ builder.add_{{FIELD_NAME}}(x); }";
+ } else {
+ code_ += " builder.add_{{FIELD_NAME}}(args.{{FIELD_NAME}});";
+ }
+ }
+ }
+ }
+ code_ += " builder.finish()";
+ code_ += " }";
+ code_ += "";
+
+ // Generate field id constants.
+ if (struct_def.fields.vec.size() > 0) {
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (field.deprecated) {
+ // Deprecated fields won't be accessible.
+ continue;
+ }
+
+ code_.SetValue("OFFSET_NAME", GetFieldOffsetName(field));
+ code_.SetValue("OFFSET_VALUE", NumToString(field.value.offset));
+ code_ += " pub const {{OFFSET_NAME}}: flatbuffers::VOffsetT = "
+ "{{OFFSET_VALUE}};";
+ }
+ code_ += "";
+ }
+
+ // Generate the accessors. Each has one of two forms:
+ //
+ // If a value can be None:
+ // pub fn name(&'a self) -> Option<user_facing_type> {
+ // self._tab.get::<internal_type>(offset, defaultval)
+ // }
+ //
+ // If a value is always Some:
+ // pub fn name(&'a self) -> user_facing_type {
+ // self._tab.get::<internal_type>(offset, defaultval).unwrap()
+ // }
+ const auto offset_prefix = Name(struct_def);
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (field.deprecated) {
+ // Deprecated fields won't be accessible.
+ continue;
+ }
+
+ code_.SetValue("FIELD_NAME", Name(field));
+ code_.SetValue("RETURN_TYPE",
+ GenTableAccessorFuncReturnType(field, "'a"));
+ code_.SetValue("FUNC_BODY",
+ GenTableAccessorFuncBody(field, "'a", offset_prefix));
+
+ GenComment(field.doc_comment, " ");
+ code_ += " #[inline]";
+ code_ += " pub fn {{FIELD_NAME}}(&'a self) -> {{RETURN_TYPE}} {";
+ code_ += " {{FUNC_BODY}}";
+ code_ += " }";
+
+ // Generate a comparison function for this field if it is a key.
+ if (field.key) {
+ GenKeyFieldMethods(field);
+ }
+
+ // Generate a nested flatbuffer field, if applicable.
+ auto nested = field.attributes.Lookup("nested_flatbuffer");
+ if (nested) {
+ std::string qualified_name = nested->constant;
+ auto nested_root = parser_.LookupStruct(nested->constant);
+ if (nested_root == nullptr) {
+ qualified_name = parser_.current_namespace_->GetFullyQualifiedName(
+ nested->constant);
+ nested_root = parser_.LookupStruct(qualified_name);
+ }
+ FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser.
+ (void)nested_root;
+
+ code_.SetValue("OFFSET_NAME",
+ offset_prefix + "::" + GetFieldOffsetName(field));
+ code_ += " pub fn {{FIELD_NAME}}_nested_flatbuffer(&'a self) -> "
+ " Option<{{STRUCT_NAME}}<'a>> {";
+ code_ += " match self.{{FIELD_NAME}}() {";
+ code_ += " None => { None }";
+ code_ += " Some(data) => {";
+ code_ += " use self::flatbuffers::Follow;";
+ code_ += " Some(<flatbuffers::ForwardsUOffset"
+ "<{{STRUCT_NAME}}<'a>>>::follow(data, 0))";
+ code_ += " },";
+ code_ += " }";
+ code_ += " }";
+ }
+ }
+
+ // Explicit specializations for union accessors
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (field.deprecated || field.value.type.base_type != BASE_TYPE_UNION) {
+ continue;
+ }
+
+ auto u = field.value.type.enum_def;
+ if (u->uses_type_aliases) continue;
+
+ code_.SetValue("FIELD_NAME", Name(field));
+
+ for (auto u_it = u->vals.vec.begin(); u_it != u->vals.vec.end(); ++u_it) {
+ auto &ev = **u_it;
+ if (ev.union_type.base_type == BASE_TYPE_NONE) { continue; }
+
+ auto table_init_type = WrapInNameSpace(
+ ev.union_type.struct_def->defined_namespace,
+ ev.union_type.struct_def->name);
+
+ code_.SetValue("U_ELEMENT_ENUM_TYPE",
+ WrapInNameSpace(u->defined_namespace, GetEnumValUse(*u, ev)));
+ code_.SetValue("U_ELEMENT_TABLE_TYPE", table_init_type);
+ code_.SetValue("U_ELEMENT_NAME", MakeSnakeCase(Name(ev)));
+
+ code_ += " #[inline]";
+ code_ += " #[allow(non_snake_case)]";
+ code_ += " pub fn {{FIELD_NAME}}_as_{{U_ELEMENT_NAME}}(&'a self) -> "
+ "Option<{{U_ELEMENT_TABLE_TYPE}}> {";
+ code_ += " if self.{{FIELD_NAME}}_type() == {{U_ELEMENT_ENUM_TYPE}} {";
+ code_ += " self.{{FIELD_NAME}}().map(|u| "
+ "{{U_ELEMENT_TABLE_TYPE}}::init_from_table(u))";
+ code_ += " } else {";
+ code_ += " None";
+ code_ += " }";
+ code_ += " }";
+ code_ += "";
+ }
+ }
+
+ code_ += "}"; // End of table impl.
+ code_ += "";
+
+ // Generate an args struct:
+ code_.SetValue("MAYBE_LT",
+ TableBuilderArgsNeedsLifetime(struct_def) ? "<'a>" : "");
+ code_ += "pub struct {{STRUCT_NAME}}Args{{MAYBE_LT}} {";
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (!field.deprecated) {
+ code_.SetValue("PARAM_NAME", Name(field));
+ code_.SetValue("PARAM_TYPE", TableBuilderArgsDefnType(field, "'a "));
+ code_ += " pub {{PARAM_NAME}}: {{PARAM_TYPE}},";
+ }
+ }
+ code_ += "}";
+
+ // Generate an impl of Default for the *Args type:
+ code_ += "impl<'a> Default for {{STRUCT_NAME}}Args{{MAYBE_LT}} {";
+ code_ += " #[inline]";
+ code_ += " fn default() -> Self {";
+ code_ += " {{STRUCT_NAME}}Args {";
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (!field.deprecated) {
+ code_.SetValue("PARAM_VALUE", TableBuilderArgsDefaultValue(field));
+ code_.SetValue("REQ", field.required ? " // required field" : "");
+ code_.SetValue("PARAM_NAME", Name(field));
+ code_ += " {{PARAM_NAME}}: {{PARAM_VALUE}},{{REQ}}";
+ }
+ }
+ code_ += " }";
+ code_ += " }";
+ code_ += "}";
+
+ // Generate a builder struct:
+ code_ += "pub struct {{STRUCT_NAME}}Builder<'a: 'b, 'b> {";
+ code_ += " fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,";
+ code_ += " start_: flatbuffers::WIPOffset<"
+ "flatbuffers::TableUnfinishedWIPOffset>,";
+ code_ += "}";
+
+ // Generate builder functions:
+ code_ += "impl<'a: 'b, 'b> {{STRUCT_NAME}}Builder<'a, 'b> {";
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (!field.deprecated) {
+ const bool is_scalar = IsScalar(field.value.type.base_type);
+
+ std::string offset = GetFieldOffsetName(field);
+ std::string name = Name(field);
+ std::string value = GetDefaultScalarValue(field);
+
+ // Generate functions to add data, which take one of two forms.
+ //
+ // If a value has a default:
+ // fn add_x(x_: type) {
+ // fbb_.push_slot::<type>(offset, x_, Some(default));
+ // }
+ //
+ // If a value does not have a default:
+ // fn add_x(x_: type) {
+ // fbb_.push_slot_always::<type>(offset, x_);
+ // }
+ code_.SetValue("FIELD_NAME", Name(field));
+ code_.SetValue("FIELD_OFFSET", Name(struct_def) + "::" + offset);
+ code_.SetValue("FIELD_TYPE", TableBuilderArgsAddFuncType(field, "'b "));
+ code_.SetValue("FUNC_BODY", TableBuilderArgsAddFuncBody(field));
+ code_ += " #[inline]";
+ code_ += " pub fn add_{{FIELD_NAME}}(&mut self, {{FIELD_NAME}}: "
+ "{{FIELD_TYPE}}) {";
+ if (is_scalar) {
+ code_.SetValue("FIELD_DEFAULT_VALUE",
+ TableBuilderAddFuncDefaultValue(field));
+ code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}}, "
+ "{{FIELD_DEFAULT_VALUE}});";
+ } else {
+ code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}});";
+ }
+ code_ += " }";
+ }
+ }
+
+ // Struct initializer (all fields required);
+ code_ += " #[inline]";
+ code_ +=
+ " pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> "
+ "{{STRUCT_NAME}}Builder<'a, 'b> {";
+ code_.SetValue("NUM_FIELDS", NumToString(struct_def.fields.vec.size()));
+ code_ += " let start = _fbb.start_table();";
+ code_ += " {{STRUCT_NAME}}Builder {";
+ code_ += " fbb_: _fbb,";
+ code_ += " start_: start,";
+ code_ += " }";
+ code_ += " }";
+
+ // finish() function.
+ code_ += " #[inline]";
+ code_ += " pub fn finish(self) -> "
+ "flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>> {";
+ code_ += " let o = self.fbb_.end_table(self.start_);";
+
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (!field.deprecated && field.required) {
+ code_.SetValue("FIELD_NAME", MakeSnakeCase(Name(field)));
+ code_.SetValue("OFFSET_NAME", GetFieldOffsetName(field));
+ code_ += " self.fbb_.required(o, {{STRUCT_NAME}}::{{OFFSET_NAME}},"
+ "\"{{FIELD_NAME}}\");";
+ }
+ }
+ code_ += " flatbuffers::WIPOffset::new(o.value())";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+ }
+
+ // Generate functions to compare tables and structs by key. This function
+ // must only be called if the field key is defined.
+ void GenKeyFieldMethods(const FieldDef &field) {
+ FLATBUFFERS_ASSERT(field.key);
+ const bool is_string = (field.value.type.base_type == BASE_TYPE_STRING);
+
+ if (is_string) {
+ code_.SetValue("KEY_TYPE", "Option<&str>");
+ } else {
+ FLATBUFFERS_ASSERT(IsScalar(field.value.type.base_type));
+ auto type = GetTypeBasic(field.value.type);
+ if (parser_.opts.scoped_enums && field.value.type.enum_def &&
+ IsScalar(field.value.type.base_type)) {
+ type = GetTypeGet(field.value.type);
+ }
+ code_.SetValue("KEY_TYPE", type);
+ }
+
+ code_ += " #[inline]";
+ code_ += " pub fn key_compare_less_than(&self, o: &{{STRUCT_NAME}}) -> "
+ " bool {";
+ code_ += " self.{{FIELD_NAME}}() < o.{{FIELD_NAME}}()";
+ code_ += " }";
+ code_ += "";
+ code_ += " #[inline]";
+ code_ += " pub fn key_compare_with_value(&self, val: {{KEY_TYPE}}) -> "
+ " ::std::cmp::Ordering {";
+ code_ += " let key = self.{{FIELD_NAME}}();";
+ code_ += " key.cmp(&val)";
+ code_ += " }";
+ }
+
+ // Generate functions for accessing the root table object. This function
+ // must only be called if the root table is defined.
+ void GenRootTableFuncs(const StructDef &struct_def) {
+ FLATBUFFERS_ASSERT(parser_.root_struct_def_ && "root table not defined");
+ auto name = Name(struct_def);
+
+ code_.SetValue("STRUCT_NAME", name);
+ code_.SetValue("STRUCT_NAME_SNAKECASE", MakeSnakeCase(name));
+ code_.SetValue("STRUCT_NAME_CAPS", MakeUpper(MakeSnakeCase(name)));
+
+ // The root datatype accessors:
+ code_ += "#[inline]";
+ code_ +=
+ "pub fn get_root_as_{{STRUCT_NAME_SNAKECASE}}<'a>(buf: &'a [u8])"
+ " -> {{STRUCT_NAME}}<'a> {";
+ code_ += " flatbuffers::get_root::<{{STRUCT_NAME}}<'a>>(buf)";
+ code_ += "}";
+ code_ += "";
+
+ code_ += "#[inline]";
+ code_ += "pub fn get_size_prefixed_root_as_{{STRUCT_NAME_SNAKECASE}}"
+ "<'a>(buf: &'a [u8]) -> {{STRUCT_NAME}}<'a> {";
+ code_ += " flatbuffers::get_size_prefixed_root::<{{STRUCT_NAME}}<'a>>"
+ "(buf)";
+ code_ += "}";
+ code_ += "";
+
+ if (parser_.file_identifier_.length()) {
+ // Declare the identifier
+ code_ += "pub const {{STRUCT_NAME_CAPS}}_IDENTIFIER: &'static str\\";
+ code_ += " = \"" + parser_.file_identifier_ + "\";";
+ code_ += "";
+
+ // Check if a buffer has the identifier.
+ code_ += "#[inline]";
+ code_ += "pub fn {{STRUCT_NAME_SNAKECASE}}_buffer_has_identifier\\";
+ code_ += "(buf: &[u8]) -> bool {";
+ code_ += " return flatbuffers::buffer_has_identifier(buf, \\";
+ code_ += "{{STRUCT_NAME_CAPS}}_IDENTIFIER, false);";
+ code_ += "}";
+ code_ += "";
+ code_ += "#[inline]";
+ code_ += "pub fn {{STRUCT_NAME_SNAKECASE}}_size_prefixed\\";
+ code_ += "_buffer_has_identifier(buf: &[u8]) -> bool {";
+ code_ += " return flatbuffers::buffer_has_identifier(buf, \\";
+ code_ += "{{STRUCT_NAME_CAPS}}_IDENTIFIER, true);";
+ code_ += "}";
+ code_ += "";
+ }
+
+ if (parser_.file_extension_.length()) {
+ // Return the extension
+ code_ += "pub const {{STRUCT_NAME_CAPS}}_EXTENSION: &'static str = \\";
+ code_ += "\"" + parser_.file_extension_ + "\";";
+ code_ += "";
+ }
+
+ // Finish a buffer with a given root object:
+ code_.SetValue("OFFSET_TYPELABEL", Name(struct_def) + "Offset");
+ code_ += "#[inline]";
+ code_ += "pub fn finish_{{STRUCT_NAME_SNAKECASE}}_buffer<'a, 'b>(";
+ code_ += " fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,";
+ code_ += " root: flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>>) {";
+ if (parser_.file_identifier_.length()) {
+ code_ += " fbb.finish(root, Some({{STRUCT_NAME_CAPS}}_IDENTIFIER));";
+ } else {
+ code_ += " fbb.finish(root, None);";
+ }
+ code_ += "}";
+ code_ += "";
+ code_ += "#[inline]";
+ code_ += "pub fn finish_size_prefixed_{{STRUCT_NAME_SNAKECASE}}_buffer"
+ "<'a, 'b>("
+ "fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, "
+ "root: flatbuffers::WIPOffset<{{STRUCT_NAME}}<'a>>) {";
+ if (parser_.file_identifier_.length()) {
+ code_ += " fbb.finish_size_prefixed(root, "
+ "Some({{STRUCT_NAME_CAPS}}_IDENTIFIER));";
+ } else {
+ code_ += " fbb.finish_size_prefixed(root, None);";
+ }
+ code_ += "}";
+ }
+
+ static void GenPadding(
+ const FieldDef &field, std::string *code_ptr, int *id,
+ const std::function<void(int bits, std::string *code_ptr, int *id)> &f) {
+ if (field.padding) {
+ for (int i = 0; i < 4; i++) {
+ if (static_cast<int>(field.padding) & (1 << i)) {
+ f((1 << i) * 8, code_ptr, id);
+ }
+ }
+ assert(!(field.padding & ~0xF));
+ }
+ }
+
+ static void PaddingDefinition(int bits, std::string *code_ptr, int *id) {
+ *code_ptr += " padding" + NumToString((*id)++) + "__: u" + \
+ NumToString(bits) + ",";
+ }
+
+ static void PaddingInitializer(int bits, std::string *code_ptr, int *id) {
+ (void)bits;
+ *code_ptr += "padding" + NumToString((*id)++) + "__: 0,";
+ }
+
+ // Generate an accessor struct with constructor for a flatbuffers struct.
+ void GenStruct(const StructDef &struct_def) {
+ // Generates manual padding and alignment.
+ // Variables are private because they contain little endian data on all
+ // platforms.
+ GenComment(struct_def.doc_comment);
+ code_.SetValue("ALIGN", NumToString(struct_def.minalign));
+ code_.SetValue("STRUCT_NAME", Name(struct_def));
+
+ code_ += "// struct {{STRUCT_NAME}}, aligned to {{ALIGN}}";
+ code_ += "#[repr(C, align({{ALIGN}}))]";
+
+ // PartialEq is useful to derive because we can correctly compare structs
+ // for equality by just comparing their underlying byte data. This doesn't
+ // hold for PartialOrd/Ord.
+ code_ += "#[derive(Clone, Copy, Debug, PartialEq)]";
+ code_ += "pub struct {{STRUCT_NAME}} {";
+
+ int padding_id = 0;
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ code_.SetValue("FIELD_TYPE", GetTypeGet(field.value.type));
+ code_.SetValue("FIELD_NAME", Name(field));
+ code_ += " {{FIELD_NAME}}_: {{FIELD_TYPE}},";
+
+ if (field.padding) {
+ std::string padding;
+ GenPadding(field, &padding, &padding_id, PaddingDefinition);
+ code_ += padding;
+ }
+ }
+
+ code_ += "} // pub struct {{STRUCT_NAME}}";
+
+ // Generate impls for SafeSliceAccess (because all structs are endian-safe),
+ // Follow for the value type, Follow for the reference type, Push for the
+ // value type, and Push for the reference type.
+ code_ += "impl flatbuffers::SafeSliceAccess for {{STRUCT_NAME}} {}";
+ code_ += "impl<'a> flatbuffers::Follow<'a> for {{STRUCT_NAME}} {";
+ code_ += " type Inner = &'a {{STRUCT_NAME}};";
+ code_ += " #[inline]";
+ code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
+ code_ += " <&'a {{STRUCT_NAME}}>::follow(buf, loc)";
+ code_ += " }";
+ code_ += "}";
+ code_ += "impl<'a> flatbuffers::Follow<'a> for &'a {{STRUCT_NAME}} {";
+ code_ += " type Inner = &'a {{STRUCT_NAME}};";
+ code_ += " #[inline]";
+ code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
+ code_ += " flatbuffers::follow_cast_ref::<{{STRUCT_NAME}}>(buf, loc)";
+ code_ += " }";
+ code_ += "}";
+ code_ += "impl<'b> flatbuffers::Push for {{STRUCT_NAME}} {";
+ code_ += " type Output = {{STRUCT_NAME}};";
+ code_ += " #[inline]";
+ code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {";
+ code_ += " let src = unsafe {";
+ code_ += " ::std::slice::from_raw_parts("
+ "self as *const {{STRUCT_NAME}} as *const u8, Self::size())";
+ code_ += " };";
+ code_ += " dst.copy_from_slice(src);";
+ code_ += " }";
+ code_ += "}";
+ code_ += "impl<'b> flatbuffers::Push for &'b {{STRUCT_NAME}} {";
+ code_ += " type Output = {{STRUCT_NAME}};";
+ code_ += "";
+ code_ += " #[inline]";
+ code_ += " fn push(&self, dst: &mut [u8], _rest: &[u8]) {";
+ code_ += " let src = unsafe {";
+ code_ += " ::std::slice::from_raw_parts("
+ "*self as *const {{STRUCT_NAME}} as *const u8, Self::size())";
+ code_ += " };";
+ code_ += " dst.copy_from_slice(src);";
+ code_ += " }";
+ code_ += "}";
+ code_ += "";
+ code_ += "";
+
+ // Generate a constructor that takes all fields as arguments.
+ code_ += "impl {{STRUCT_NAME}} {";
+ std::string arg_list;
+ std::string init_list;
+ padding_id = 0;
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ const auto member_name = Name(field) + "_";
+ const auto reference = StructMemberAccessNeedsCopy(field.value.type)
+ ? "" : "&'a ";
+ const auto arg_name = "_" + Name(field);
+ const auto arg_type = reference + GetTypeGet(field.value.type);
+
+ if (it != struct_def.fields.vec.begin()) {
+ arg_list += ", ";
+ }
+ arg_list += arg_name + ": ";
+ arg_list += arg_type;
+ init_list += " " + member_name;
+ if (StructMemberAccessNeedsCopy(field.value.type)) {
+ init_list += ": " + arg_name + ".to_little_endian(),\n";
+ } else {
+ init_list += ": *" + arg_name + ",\n";
+ }
+ }
+
+ code_.SetValue("ARG_LIST", arg_list);
+ code_.SetValue("INIT_LIST", init_list);
+ code_ += " pub fn new<'a>({{ARG_LIST}}) -> Self {";
+ code_ += " {{STRUCT_NAME}} {";
+ code_ += "{{INIT_LIST}}";
+ padding_id = 0;
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+ if (field.padding) {
+ std::string padding;
+ GenPadding(field, &padding, &padding_id, PaddingInitializer);
+ code_ += " " + padding;
+ }
+ }
+ code_ += " }";
+ code_ += " }";
+
+ // Generate accessor methods for the struct.
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ const auto &field = **it;
+
+ auto field_type = TableBuilderArgsAddFuncType(field, "'a");
+ auto member = "self." + Name(field) + "_";
+ auto value = StructMemberAccessNeedsCopy(field.value.type) ?
+ member + ".from_little_endian()" : member;
+
+ code_.SetValue("FIELD_NAME", Name(field));
+ code_.SetValue("FIELD_TYPE", field_type);
+ code_.SetValue("FIELD_VALUE", value);
+ code_.SetValue("REF", IsStruct(field.value.type) ? "&" : "");
+
+ GenComment(field.doc_comment, " ");
+ code_ += " pub fn {{FIELD_NAME}}<'a>(&'a self) -> {{FIELD_TYPE}} {";
+ code_ += " {{REF}}{{FIELD_VALUE}}";
+ code_ += " }";
+
+ // Generate a comparison function for this field if it is a key.
+ if (field.key) {
+ GenKeyFieldMethods(field);
+ }
+ }
+ code_ += "}";
+ code_ += "";
+ }
+
+ // Set up the correct namespace. This opens a namespace if the current
+ // namespace is different from the target namespace. This function
+ // closes and opens the namespaces only as necessary.
+ //
+ // The file must start and end with an empty (or null) namespace so that
+ // namespaces are properly opened and closed.
+ void SetNameSpace(const Namespace *ns) {
+ if (cur_name_space_ == ns) { return; }
+
+ // Compute the size of the longest common namespace prefix.
+ // If cur_name_space is A::B::C::D and ns is A::B::E::F::G,
+ // the common prefix is A::B:: and we have old_size = 4, new_size = 5
+ // and common_prefix_size = 2
+ size_t old_size = cur_name_space_ ? cur_name_space_->components.size() : 0;
+ size_t new_size = ns ? ns->components.size() : 0;
+
+ size_t common_prefix_size = 0;
+ while (common_prefix_size < old_size && common_prefix_size < new_size &&
+ ns->components[common_prefix_size] ==
+ cur_name_space_->components[common_prefix_size]) {
+ common_prefix_size++;
+ }
+
+ // Close cur_name_space in reverse order to reach the common prefix.
+ // In the previous example, D then C are closed.
+ for (size_t j = old_size; j > common_prefix_size; --j) {
+ code_ += "} // pub mod " + cur_name_space_->components[j - 1];
+ }
+ if (old_size != common_prefix_size) { code_ += ""; }
+
+ // open namespace parts to reach the ns namespace
+ // in the previous example, E, then F, then G are opened
+ for (auto j = common_prefix_size; j != new_size; ++j) {
+ code_ += "pub mod " + MakeSnakeCase(ns->components[j]) + " {";
+ code_ += " #![allow(dead_code)]";
+ code_ += " #![allow(unused_imports)]";
+ code_ += "";
+ code_ += " use std::mem;";
+ code_ += " use std::cmp::Ordering;";
+ code_ += "";
+ code_ += " extern crate flatbuffers;";
+ code_ += " use self::flatbuffers::EndianScalar;";
+ }
+ if (new_size != common_prefix_size) { code_ += ""; }
+
+ cur_name_space_ = ns;
+ }
+};
+
+} // namespace rust
+
+bool GenerateRust(const Parser &parser, const std::string &path,
+ const std::string &file_name) {
+ rust::RustGenerator generator(parser, path, file_name);
+ return generator.generate();
+}
+
+std::string RustMakeRule(const Parser &parser, const std::string &path,
+ const std::string &file_name) {
+ std::string filebase =
+ flatbuffers::StripPath(flatbuffers::StripExtension(file_name));
+ std::string make_rule = GeneratedFileName(path, filebase) + ": ";
+
+ auto included_files = parser.GetIncludedFilesRecursive(file_name);
+ for (auto it = included_files.begin(); it != included_files.end(); ++it) {
+ make_rule += " " + *it;
+ }
+ return make_rule;
+}
+
+} // namespace flatbuffers
+
+// TODO(rw): Generated code should import other generated files.
+// TODO(rw): Generated code should refer to namespaces in included files in a
+// way that makes them referrable.
+// TODO(rw): Generated code should indent according to nesting level.
+// TODO(rw): Generated code should generate endian-safe Debug impls.
+// TODO(rw): Generated code could use a Rust-only enum type to access unions,
+// instead of making the user use _type() to manually switch.
diff --git a/src/idl_gen_text.cpp b/src/idl_gen_text.cpp
index 41d19125..563f6901 100644
--- a/src/idl_gen_text.cpp
+++ b/src/idl_gen_text.cpp
@@ -131,7 +131,7 @@ bool Print<const void *>(const void *val, Type type, int indent,
switch (type.base_type) {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM: \
if (!PrintVector<CTYPE>( \
*reinterpret_cast<const Vector<CTYPE> *>(val), \
@@ -223,7 +223,7 @@ static bool GenStruct(const StructDef &struct_def, const Table *table,
switch (fd.value.type.base_type) {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM: \
if (!GenField<CTYPE>(fd, table, struct_def.fixed, \
opts, indent + Indent(opts), _text)) { \
@@ -234,7 +234,7 @@ static bool GenStruct(const StructDef &struct_def, const Table *table,
#undef FLATBUFFERS_TD
// Generate drop-thru case statements for all pointer types:
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM:
FLATBUFFERS_GEN_TYPES_POINTER(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
diff --git a/src/idl_parser.cpp b/src/idl_parser.cpp
index 366a77be..d10a2e7e 100644
--- a/src/idl_parser.cpp
+++ b/src/idl_parser.cpp
@@ -30,7 +30,7 @@ const double kPi = 3.14159265358979323846;
const char *const kTypeNames[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
IDLTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
@@ -41,7 +41,7 @@ const char *const kTypeNames[] = {
const char kTypeSizes[] = {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
sizeof(CTYPE),
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
@@ -217,7 +217,7 @@ static std::string TokenToString(int t) {
FLATBUFFERS_GEN_TOKENS(FLATBUFFERS_TOKEN)
#undef FLATBUFFERS_TOKEN
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
IDLTYPE,
FLATBUFFERS_GEN_TYPES(FLATBUFFERS_TD)
#undef FLATBUFFERS_TD
@@ -1077,7 +1077,7 @@ CheckedError Parser::ParseTable(const StructDef &struct_def, std::string *value,
switch (field_value.type.base_type) {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM: \
builder_.Pad(field->padding); \
if (struct_def.fixed) { \
@@ -1094,7 +1094,7 @@ CheckedError Parser::ParseTable(const StructDef &struct_def, std::string *value,
FLATBUFFERS_GEN_TYPES_SCALAR(FLATBUFFERS_TD);
#undef FLATBUFFERS_TD
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM: \
builder_.Pad(field->padding); \
if (IsStruct(field->value.type)) { \
@@ -1176,7 +1176,7 @@ CheckedError Parser::ParseVector(const Type &type, uoffset_t *ovalue) {
switch (val.type.base_type) {
// clang-format off
#define FLATBUFFERS_TD(ENUM, IDLTYPE, \
- CTYPE, JTYPE, GTYPE, NTYPE, PTYPE) \
+ CTYPE, JTYPE, GTYPE, NTYPE, PTYPE, RTYPE) \
case BASE_TYPE_ ## ENUM: \
if (IsStruct(val.type)) SerializeStruct(*val.type.struct_def, val); \
else { \
diff --git a/tests/RustTest.sh b/tests/RustTest.sh
new file mode 100755
index 00000000..8388701c
--- /dev/null
+++ b/tests/RustTest.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+set -ex
+#
+# Copyright 2018 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cd ./rust_usage_test
+cargo test $1
+TEST_RESULT=$?
+if [[ $TEST_RESULT == 0 ]]; then
+ echo "OK: Rust tests passed."
+else
+ echo "KO: Rust tests failed."
+ exit 1
+fi
+
+cargo bench
diff --git a/tests/generate_code.bat b/tests/generate_code.bat
index eaadc5ab..b233c144 100644
--- a/tests/generate_code.bat
+++ b/tests/generate_code.bat
@@ -15,8 +15,8 @@
set buildtype=Release
if "%1"=="-b" set buildtype=%2
-..\%buildtype%\flatc.exe --cpp --java --csharp --go --binary --python --lobster --lua --js --ts --php --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json
-..\%buildtype%\flatc.exe --cpp --java --csharp --go --binary --python --lobster --lua --js --ts --php --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs
+..\%buildtype%\flatc.exe --cpp --java --csharp --go --binary --python --lobster --lua --js --rust --ts --php --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json
+..\%buildtype%\flatc.exe --cpp --java --csharp --go --binary --python --lobster --lua --js --rust --ts --php --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs
..\%buildtype%\flatc.exe --cpp --js --ts --php --gen-mutable --reflect-names --gen-object-api --cpp-ptr-type flatbuffers::unique_ptr -o union_vector ./union_vector/union_vector.fbs
..\%buildtype%\flatc.exe -b --schema --bfbs-comments -I include_test monster_test.fbs
..\%buildtype%\flatc.exe --jsonschema --schema -I include_test monster_test.fbs
@@ -24,4 +24,4 @@ cd ../samples
..\%buildtype%\flatc.exe --cpp --lobster --gen-mutable --reflect-names --gen-object-api --cpp-ptr-type flatbuffers::unique_ptr monster.fbs
cd ../reflection
-cd ../tests \ No newline at end of file
+cd ../tests
diff --git a/tests/generate_code.sh b/tests/generate_code.sh
index 8e060dbd..f25366b0 100755
--- a/tests/generate_code.sh
+++ b/tests/generate_code.sh
@@ -14,8 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json
-../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs
+../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --rust --grpc --gen-mutable --reflect-names --gen-object-api --no-includes --cpp-ptr-type flatbuffers::unique_ptr --no-fb-import -I include_test monster_test.fbs monsterdata_test.json
+../flatc --cpp --java --csharp --dart --go --binary --lobster --lua --python --js --ts --php --rust --gen-mutable --reflect-names --no-fb-import --cpp-ptr-type flatbuffers::unique_ptr -o namespace_test namespace_test/namespace_test1.fbs namespace_test/namespace_test2.fbs
../flatc --cpp --js --ts --php --gen-mutable --reflect-names --gen-object-api --cpp-ptr-type flatbuffers::unique_ptr -o union_vector ./union_vector/union_vector.fbs
../flatc -b --schema --bfbs-comments -I include_test monster_test.fbs
../flatc --jsonschema --schema -I include_test monster_test.fbs
diff --git a/tests/monster_test_generated.rs b/tests/monster_test_generated.rs
new file mode 100644
index 00000000..aa236a20
--- /dev/null
+++ b/tests/monster_test_generated.rs
@@ -0,0 +1,1644 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+pub mod my_game {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+pub enum InParentNamespaceOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct InParentNamespace<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for InParentNamespace<'a> {
+ type Inner = InParentNamespace<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> InParentNamespace<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ InParentNamespace {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ _args: &'args InParentNamespaceArgs) -> flatbuffers::WIPOffset<InParentNamespace<'bldr>> {
+ let mut builder = InParentNamespaceBuilder::new(_fbb);
+ builder.finish()
+ }
+
+}
+
+pub struct InParentNamespaceArgs {
+}
+impl<'a> Default for InParentNamespaceArgs {
+ #[inline]
+ fn default() -> Self {
+ InParentNamespaceArgs {
+ }
+ }
+}
+pub struct InParentNamespaceBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> InParentNamespaceBuilder<'a, 'b> {
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> InParentNamespaceBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ InParentNamespaceBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<InParentNamespace<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub mod example_2 {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+pub enum MonsterOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Monster<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Monster<'a> {
+ type Inner = Monster<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Monster<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Monster {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ _args: &'args MonsterArgs) -> flatbuffers::WIPOffset<Monster<'bldr>> {
+ let mut builder = MonsterBuilder::new(_fbb);
+ builder.finish()
+ }
+
+}
+
+pub struct MonsterArgs {
+}
+impl<'a> Default for MonsterArgs {
+ #[inline]
+ fn default() -> Self {
+ MonsterArgs {
+ }
+ }
+}
+pub struct MonsterBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> {
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ MonsterBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+} // pub mod Example2
+
+pub mod example {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+#[allow(non_camel_case_types)]
+#[repr(i8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Color {
+ Red = 1,
+ Green = 2,
+ Blue = 8
+}
+
+const ENUM_MIN_COLOR: i8 = 1;
+const ENUM_MAX_COLOR: i8 = 8;
+
+impl<'a> flatbuffers::Follow<'a> for Color {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::read_scalar_at::<Self>(buf, loc)
+ }
+}
+
+impl flatbuffers::EndianScalar for Color {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let n = i8::to_le(self as i8);
+ let p = &n as *const i8 as *const Color;
+ unsafe { *p }
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ let n = i8::from_le(self as i8);
+ let p = &n as *const i8 as *const Color;
+ unsafe { *p }
+ }
+}
+
+impl flatbuffers::Push for Color {
+ type Output = Color;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<Color>(dst, *self);
+ }
+}
+
+#[allow(non_camel_case_types)]
+const ENUM_VALUES_COLOR:[Color; 3] = [
+ Color::Red,
+ Color::Green,
+ Color::Blue
+];
+
+#[allow(non_camel_case_types)]
+const ENUM_NAMES_COLOR:[&'static str; 8] = [
+ "Red",
+ "Green",
+ "",
+ "",
+ "",
+ "",
+ "",
+ "Blue"
+];
+
+pub fn enum_name_color(e: Color) -> &'static str {
+ let index: usize = e as usize - Color::Red as usize;
+ ENUM_NAMES_COLOR[index]
+}
+
+#[allow(non_camel_case_types)]
+#[repr(u8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Any {
+ NONE = 0,
+ Monster = 1,
+ TestSimpleTableWithEnum = 2,
+ MyGame_Example2_Monster = 3
+}
+
+const ENUM_MIN_ANY: u8 = 0;
+const ENUM_MAX_ANY: u8 = 3;
+
+impl<'a> flatbuffers::Follow<'a> for Any {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::read_scalar_at::<Self>(buf, loc)
+ }
+}
+
+impl flatbuffers::EndianScalar for Any {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let n = u8::to_le(self as u8);
+ let p = &n as *const u8 as *const Any;
+ unsafe { *p }
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ let n = u8::from_le(self as u8);
+ let p = &n as *const u8 as *const Any;
+ unsafe { *p }
+ }
+}
+
+impl flatbuffers::Push for Any {
+ type Output = Any;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<Any>(dst, *self);
+ }
+}
+
+#[allow(non_camel_case_types)]
+const ENUM_VALUES_ANY:[Any; 4] = [
+ Any::NONE,
+ Any::Monster,
+ Any::TestSimpleTableWithEnum,
+ Any::MyGame_Example2_Monster
+];
+
+#[allow(non_camel_case_types)]
+const ENUM_NAMES_ANY:[&'static str; 4] = [
+ "NONE",
+ "Monster",
+ "TestSimpleTableWithEnum",
+ "MyGame_Example2_Monster"
+];
+
+pub fn enum_name_any(e: Any) -> &'static str {
+ let index: usize = e as usize;
+ ENUM_NAMES_ANY[index]
+}
+
+pub struct AnyUnionTableOffset {}
+// struct Test, aligned to 2
+#[repr(C, align(2))]
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Test {
+ a_: i16,
+ b_: i8,
+ padding0__: u8,
+} // pub struct Test
+impl flatbuffers::SafeSliceAccess for Test {}
+impl<'a> flatbuffers::Follow<'a> for Test {
+ type Inner = &'a Test;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a Test>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a Test {
+ type Inner = &'a Test;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<Test>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for Test {
+ type Output = Test;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const Test as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b Test {
+ type Output = Test;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const Test as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+
+impl Test {
+ pub fn new<'a>(_a: i16, _b: i8) -> Self {
+ Test {
+ a_: _a.to_little_endian(),
+ b_: _b.to_little_endian(),
+
+ padding0__: 0,
+ }
+ }
+ pub fn a<'a>(&'a self) -> i16 {
+ self.a_.from_little_endian()
+ }
+ pub fn b<'a>(&'a self) -> i8 {
+ self.b_.from_little_endian()
+ }
+}
+
+// struct Vec3, aligned to 16
+#[repr(C, align(16))]
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Vec3 {
+ x_: f32,
+ y_: f32,
+ z_: f32,
+ padding0__: u32,
+ test1_: f64,
+ test2_: Color,
+ padding1__: u8,
+ test3_: Test,
+ padding2__: u16,
+} // pub struct Vec3
+impl flatbuffers::SafeSliceAccess for Vec3 {}
+impl<'a> flatbuffers::Follow<'a> for Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a Vec3>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for Vec3 {
+ type Output = Vec3;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b Vec3 {
+ type Output = Vec3;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+
+impl Vec3 {
+ pub fn new<'a>(_x: f32, _y: f32, _z: f32, _test1: f64, _test2: Color, _test3: &'a Test) -> Self {
+ Vec3 {
+ x_: _x.to_little_endian(),
+ y_: _y.to_little_endian(),
+ z_: _z.to_little_endian(),
+ test1_: _test1.to_little_endian(),
+ test2_: _test2.to_little_endian(),
+ test3_: *_test3,
+
+ padding0__: 0,
+ padding1__: 0,
+ padding2__: 0,
+ }
+ }
+ pub fn x<'a>(&'a self) -> f32 {
+ self.x_.from_little_endian()
+ }
+ pub fn y<'a>(&'a self) -> f32 {
+ self.y_.from_little_endian()
+ }
+ pub fn z<'a>(&'a self) -> f32 {
+ self.z_.from_little_endian()
+ }
+ pub fn test1<'a>(&'a self) -> f64 {
+ self.test1_.from_little_endian()
+ }
+ pub fn test2<'a>(&'a self) -> Color {
+ self.test2_.from_little_endian()
+ }
+ pub fn test3<'a>(&'a self) -> &'a Test {
+ &self.test3_
+ }
+}
+
+// struct Ability, aligned to 4
+#[repr(C, align(4))]
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct Ability {
+ id_: u32,
+ distance_: u32,
+} // pub struct Ability
+impl flatbuffers::SafeSliceAccess for Ability {}
+impl<'a> flatbuffers::Follow<'a> for Ability {
+ type Inner = &'a Ability;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a Ability>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a Ability {
+ type Inner = &'a Ability;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<Ability>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for Ability {
+ type Output = Ability;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const Ability as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b Ability {
+ type Output = Ability;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const Ability as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+
+impl Ability {
+ pub fn new<'a>(_id: u32, _distance: u32) -> Self {
+ Ability {
+ id_: _id.to_little_endian(),
+ distance_: _distance.to_little_endian(),
+
+ }
+ }
+ pub fn id<'a>(&'a self) -> u32 {
+ self.id_.from_little_endian()
+ }
+ #[inline]
+ pub fn key_compare_less_than(&self, o: &Ability) -> bool {
+ self.id() < o.id()
+ }
+
+ #[inline]
+ pub fn key_compare_with_value(&self, val: u32) -> ::std::cmp::Ordering {
+ let key = self.id();
+ key.cmp(&val)
+ }
+ pub fn distance<'a>(&'a self) -> u32 {
+ self.distance_.from_little_endian()
+ }
+}
+
+pub enum TestSimpleTableWithEnumOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct TestSimpleTableWithEnum<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for TestSimpleTableWithEnum<'a> {
+ type Inner = TestSimpleTableWithEnum<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> TestSimpleTableWithEnum<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ TestSimpleTableWithEnum {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args TestSimpleTableWithEnumArgs) -> flatbuffers::WIPOffset<TestSimpleTableWithEnum<'bldr>> {
+ let mut builder = TestSimpleTableWithEnumBuilder::new(_fbb);
+ builder.add_color(args.color);
+ builder.finish()
+ }
+
+ pub const VT_COLOR: flatbuffers::VOffsetT = 4;
+
+ #[inline]
+ pub fn color(&'a self) -> Color {
+ self._tab.get::<Color>(TestSimpleTableWithEnum::VT_COLOR, Some(Color::Green)).unwrap()
+ }
+}
+
+pub struct TestSimpleTableWithEnumArgs {
+ pub color: Color,
+}
+impl<'a> Default for TestSimpleTableWithEnumArgs {
+ #[inline]
+ fn default() -> Self {
+ TestSimpleTableWithEnumArgs {
+ color: Color::Green,
+ }
+ }
+}
+pub struct TestSimpleTableWithEnumBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> TestSimpleTableWithEnumBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_color(&mut self, color: Color) {
+ self.fbb_.push_slot::<Color>(TestSimpleTableWithEnum::VT_COLOR, color, Color::Green);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TestSimpleTableWithEnumBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ TestSimpleTableWithEnumBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<TestSimpleTableWithEnum<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub enum StatOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Stat<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Stat<'a> {
+ type Inner = Stat<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Stat<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Stat {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args StatArgs<'args>) -> flatbuffers::WIPOffset<Stat<'bldr>> {
+ let mut builder = StatBuilder::new(_fbb);
+ builder.add_val(args.val);
+ if let Some(x) = args.id { builder.add_id(x); }
+ builder.add_count(args.count);
+ builder.finish()
+ }
+
+ pub const VT_ID: flatbuffers::VOffsetT = 4;
+ pub const VT_VAL: flatbuffers::VOffsetT = 6;
+ pub const VT_COUNT: flatbuffers::VOffsetT = 8;
+
+ #[inline]
+ pub fn id(&'a self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Stat::VT_ID, None)
+ }
+ #[inline]
+ pub fn val(&'a self) -> i64 {
+ self._tab.get::<i64>(Stat::VT_VAL, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn count(&'a self) -> u16 {
+ self._tab.get::<u16>(Stat::VT_COUNT, Some(0)).unwrap()
+ }
+}
+
+pub struct StatArgs<'a> {
+ pub id: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub val: i64,
+ pub count: u16,
+}
+impl<'a> Default for StatArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ StatArgs {
+ id: None,
+ val: 0,
+ count: 0,
+ }
+ }
+}
+pub struct StatBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> StatBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_id(&mut self, id: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Stat::VT_ID, id);
+ }
+ #[inline]
+ pub fn add_val(&mut self, val: i64) {
+ self.fbb_.push_slot::<i64>(Stat::VT_VAL, val, 0);
+ }
+ #[inline]
+ pub fn add_count(&mut self, count: u16) {
+ self.fbb_.push_slot::<u16>(Stat::VT_COUNT, count, 0);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> StatBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ StatBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Stat<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub enum ReferrableOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Referrable<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Referrable<'a> {
+ type Inner = Referrable<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Referrable<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Referrable {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args ReferrableArgs) -> flatbuffers::WIPOffset<Referrable<'bldr>> {
+ let mut builder = ReferrableBuilder::new(_fbb);
+ builder.add_id(args.id);
+ builder.finish()
+ }
+
+ pub const VT_ID: flatbuffers::VOffsetT = 4;
+
+ #[inline]
+ pub fn id(&'a self) -> u64 {
+ self._tab.get::<u64>(Referrable::VT_ID, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn key_compare_less_than(&self, o: &Referrable) -> bool {
+ self.id() < o.id()
+ }
+
+ #[inline]
+ pub fn key_compare_with_value(&self, val: u64) -> ::std::cmp::Ordering {
+ let key = self.id();
+ key.cmp(&val)
+ }
+}
+
+pub struct ReferrableArgs {
+ pub id: u64,
+}
+impl<'a> Default for ReferrableArgs {
+ #[inline]
+ fn default() -> Self {
+ ReferrableArgs {
+ id: 0,
+ }
+ }
+}
+pub struct ReferrableBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> ReferrableBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_id(&mut self, id: u64) {
+ self.fbb_.push_slot::<u64>(Referrable::VT_ID, id, 0);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ReferrableBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ ReferrableBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Referrable<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+/// an example documentation comment: monster object
+pub enum MonsterOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct Monster<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Monster<'a> {
+ type Inner = Monster<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> Monster<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Monster {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset<Monster<'bldr>> {
+ let mut builder = MonsterBuilder::new(_fbb);
+ builder.add_non_owning_reference(args.non_owning_reference);
+ builder.add_co_owning_reference(args.co_owning_reference);
+ builder.add_single_weak_reference(args.single_weak_reference);
+ builder.add_testhashu64_fnv1a(args.testhashu64_fnv1a);
+ builder.add_testhashs64_fnv1a(args.testhashs64_fnv1a);
+ builder.add_testhashu64_fnv1(args.testhashu64_fnv1);
+ builder.add_testhashs64_fnv1(args.testhashs64_fnv1);
+ if let Some(x) = args.vector_of_non_owning_references { builder.add_vector_of_non_owning_references(x); }
+ if let Some(x) = args.vector_of_co_owning_references { builder.add_vector_of_co_owning_references(x); }
+ if let Some(x) = args.vector_of_strong_referrables { builder.add_vector_of_strong_referrables(x); }
+ if let Some(x) = args.vector_of_weak_references { builder.add_vector_of_weak_references(x); }
+ if let Some(x) = args.vector_of_referrables { builder.add_vector_of_referrables(x); }
+ if let Some(x) = args.parent_namespace_test { builder.add_parent_namespace_test(x); }
+ if let Some(x) = args.vector_of_doubles { builder.add_vector_of_doubles(x); }
+ if let Some(x) = args.vector_of_longs { builder.add_vector_of_longs(x); }
+ if let Some(x) = args.test5 { builder.add_test5(x); }
+ if let Some(x) = args.flex { builder.add_flex(x); }
+ if let Some(x) = args.testarrayofsortedstruct { builder.add_testarrayofsortedstruct(x); }
+ if let Some(x) = args.testarrayofstring2 { builder.add_testarrayofstring2(x); }
+ builder.add_testf3(args.testf3);
+ builder.add_testf2(args.testf2);
+ builder.add_testf(args.testf);
+ if let Some(x) = args.testarrayofbools { builder.add_testarrayofbools(x); }
+ builder.add_testhashu32_fnv1a(args.testhashu32_fnv1a);
+ builder.add_testhashs32_fnv1a(args.testhashs32_fnv1a);
+ builder.add_testhashu32_fnv1(args.testhashu32_fnv1);
+ builder.add_testhashs32_fnv1(args.testhashs32_fnv1);
+ if let Some(x) = args.testempty { builder.add_testempty(x); }
+ if let Some(x) = args.testnestedflatbuffer { builder.add_testnestedflatbuffer(x); }
+ if let Some(x) = args.enemy { builder.add_enemy(x); }
+ if let Some(x) = args.testarrayoftables { builder.add_testarrayoftables(x); }
+ if let Some(x) = args.testarrayofstring { builder.add_testarrayofstring(x); }
+ if let Some(x) = args.test4 { builder.add_test4(x); }
+ if let Some(x) = args.test { builder.add_test(x); }
+ if let Some(x) = args.inventory { builder.add_inventory(x); }
+ if let Some(x) = args.name { builder.add_name(x); }
+ if let Some(x) = args.pos { builder.add_pos(x); }
+ builder.add_hp(args.hp);
+ builder.add_mana(args.mana);
+ builder.add_testbool(args.testbool);
+ builder.add_test_type(args.test_type);
+ builder.add_color(args.color);
+ builder.finish()
+ }
+
+ pub const VT_POS: flatbuffers::VOffsetT = 4;
+ pub const VT_MANA: flatbuffers::VOffsetT = 6;
+ pub const VT_HP: flatbuffers::VOffsetT = 8;
+ pub const VT_NAME: flatbuffers::VOffsetT = 10;
+ pub const VT_INVENTORY: flatbuffers::VOffsetT = 14;
+ pub const VT_COLOR: flatbuffers::VOffsetT = 16;
+ pub const VT_TEST_TYPE: flatbuffers::VOffsetT = 18;
+ pub const VT_TEST: flatbuffers::VOffsetT = 20;
+ pub const VT_TEST4: flatbuffers::VOffsetT = 22;
+ pub const VT_TESTARRAYOFSTRING: flatbuffers::VOffsetT = 24;
+ pub const VT_TESTARRAYOFTABLES: flatbuffers::VOffsetT = 26;
+ pub const VT_ENEMY: flatbuffers::VOffsetT = 28;
+ pub const VT_TESTNESTEDFLATBUFFER: flatbuffers::VOffsetT = 30;
+ pub const VT_TESTEMPTY: flatbuffers::VOffsetT = 32;
+ pub const VT_TESTBOOL: flatbuffers::VOffsetT = 34;
+ pub const VT_TESTHASHS32_FNV1: flatbuffers::VOffsetT = 36;
+ pub const VT_TESTHASHU32_FNV1: flatbuffers::VOffsetT = 38;
+ pub const VT_TESTHASHS64_FNV1: flatbuffers::VOffsetT = 40;
+ pub const VT_TESTHASHU64_FNV1: flatbuffers::VOffsetT = 42;
+ pub const VT_TESTHASHS32_FNV1A: flatbuffers::VOffsetT = 44;
+ pub const VT_TESTHASHU32_FNV1A: flatbuffers::VOffsetT = 46;
+ pub const VT_TESTHASHS64_FNV1A: flatbuffers::VOffsetT = 48;
+ pub const VT_TESTHASHU64_FNV1A: flatbuffers::VOffsetT = 50;
+ pub const VT_TESTARRAYOFBOOLS: flatbuffers::VOffsetT = 52;
+ pub const VT_TESTF: flatbuffers::VOffsetT = 54;
+ pub const VT_TESTF2: flatbuffers::VOffsetT = 56;
+ pub const VT_TESTF3: flatbuffers::VOffsetT = 58;
+ pub const VT_TESTARRAYOFSTRING2: flatbuffers::VOffsetT = 60;
+ pub const VT_TESTARRAYOFSORTEDSTRUCT: flatbuffers::VOffsetT = 62;
+ pub const VT_FLEX: flatbuffers::VOffsetT = 64;
+ pub const VT_TEST5: flatbuffers::VOffsetT = 66;
+ pub const VT_VECTOR_OF_LONGS: flatbuffers::VOffsetT = 68;
+ pub const VT_VECTOR_OF_DOUBLES: flatbuffers::VOffsetT = 70;
+ pub const VT_PARENT_NAMESPACE_TEST: flatbuffers::VOffsetT = 72;
+ pub const VT_VECTOR_OF_REFERRABLES: flatbuffers::VOffsetT = 74;
+ pub const VT_SINGLE_WEAK_REFERENCE: flatbuffers::VOffsetT = 76;
+ pub const VT_VECTOR_OF_WEAK_REFERENCES: flatbuffers::VOffsetT = 78;
+ pub const VT_VECTOR_OF_STRONG_REFERRABLES: flatbuffers::VOffsetT = 80;
+ pub const VT_CO_OWNING_REFERENCE: flatbuffers::VOffsetT = 82;
+ pub const VT_VECTOR_OF_CO_OWNING_REFERENCES: flatbuffers::VOffsetT = 84;
+ pub const VT_NON_OWNING_REFERENCE: flatbuffers::VOffsetT = 86;
+ pub const VT_VECTOR_OF_NON_OWNING_REFERENCES: flatbuffers::VOffsetT = 88;
+
+ #[inline]
+ pub fn pos(&'a self) -> Option<&'a Vec3> {
+ self._tab.get::<Vec3>(Monster::VT_POS, None)
+ }
+ #[inline]
+ pub fn mana(&'a self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_MANA, Some(150)).unwrap()
+ }
+ #[inline]
+ pub fn hp(&'a self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_HP, Some(100)).unwrap()
+ }
+ #[inline]
+ pub fn name(&'a self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Monster::VT_NAME, None)
+ }
+ #[inline]
+ pub fn key_compare_less_than(&self, o: &Monster) -> bool {
+ self.name() < o.name()
+ }
+
+ #[inline]
+ pub fn key_compare_with_value(&self, val: Option<&str>) -> ::std::cmp::Ordering {
+ let key = self.name();
+ key.cmp(&val)
+ }
+ #[inline]
+ pub fn inventory(&'a self) -> Option<&'a [u8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn color(&'a self) -> Color {
+ self._tab.get::<Color>(Monster::VT_COLOR, Some(Color::Blue)).unwrap()
+ }
+ #[inline]
+ pub fn test_type(&'a self) -> Any {
+ self._tab.get::<Any>(Monster::VT_TEST_TYPE, Some(Any::NONE)).unwrap()
+ }
+ #[inline]
+ pub fn test(&'a self) -> Option<flatbuffers::Table<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Monster::VT_TEST, None)
+ }
+ #[inline]
+ pub fn test4(&'a self) -> Option<&'a [Test]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<Test>>>(Monster::VT_TEST4, None).map(|v| v.safe_slice() )
+ }
+ #[inline]
+ pub fn testarrayofstring(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&'a str>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&'a str>>>>(Monster::VT_TESTARRAYOFSTRING, None)
+ }
+ /// an example documentation comment: this will end up in the generated code
+ /// multiline too
+ #[inline]
+ pub fn testarrayoftables(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Monster<'a>>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Monster<'a>>>>>(Monster::VT_TESTARRAYOFTABLES, None)
+ }
+ #[inline]
+ pub fn enemy(&'a self) -> Option<Monster<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<Monster<'a>>>(Monster::VT_ENEMY, None)
+ }
+ #[inline]
+ pub fn testnestedflatbuffer(&'a self) -> Option<&'a [u8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_TESTNESTEDFLATBUFFER, None).map(|v| v.safe_slice())
+ }
+ pub fn testnestedflatbuffer_nested_flatbuffer(&'a self) -> Option<Monster<'a>> {
+ match self.testnestedflatbuffer() {
+ None => { None }
+ Some(data) => {
+ use self::flatbuffers::Follow;
+ Some(<flatbuffers::ForwardsUOffset<Monster<'a>>>::follow(data, 0))
+ },
+ }
+ }
+ #[inline]
+ pub fn testempty(&'a self) -> Option<Stat<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<Stat<'a>>>(Monster::VT_TESTEMPTY, None)
+ }
+ #[inline]
+ pub fn testbool(&'a self) -> bool {
+ self._tab.get::<bool>(Monster::VT_TESTBOOL, Some(false)).unwrap()
+ }
+ #[inline]
+ pub fn testhashs32_fnv1(&'a self) -> i32 {
+ self._tab.get::<i32>(Monster::VT_TESTHASHS32_FNV1, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashu32_fnv1(&'a self) -> u32 {
+ self._tab.get::<u32>(Monster::VT_TESTHASHU32_FNV1, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashs64_fnv1(&'a self) -> i64 {
+ self._tab.get::<i64>(Monster::VT_TESTHASHS64_FNV1, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashu64_fnv1(&'a self) -> u64 {
+ self._tab.get::<u64>(Monster::VT_TESTHASHU64_FNV1, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashs32_fnv1a(&'a self) -> i32 {
+ self._tab.get::<i32>(Monster::VT_TESTHASHS32_FNV1A, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashu32_fnv1a(&'a self) -> u32 {
+ self._tab.get::<u32>(Monster::VT_TESTHASHU32_FNV1A, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashs64_fnv1a(&'a self) -> i64 {
+ self._tab.get::<i64>(Monster::VT_TESTHASHS64_FNV1A, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testhashu64_fnv1a(&'a self) -> u64 {
+ self._tab.get::<u64>(Monster::VT_TESTHASHU64_FNV1A, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn testarrayofbools(&'a self) -> Option<&'a [bool]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, bool>>>(Monster::VT_TESTARRAYOFBOOLS, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn testf(&'a self) -> f32 {
+ self._tab.get::<f32>(Monster::VT_TESTF, Some(3.14159)).unwrap()
+ }
+ #[inline]
+ pub fn testf2(&'a self) -> f32 {
+ self._tab.get::<f32>(Monster::VT_TESTF2, Some(3.0)).unwrap()
+ }
+ #[inline]
+ pub fn testf3(&'a self) -> f32 {
+ self._tab.get::<f32>(Monster::VT_TESTF3, Some(0.0)).unwrap()
+ }
+ #[inline]
+ pub fn testarrayofstring2(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&'a str>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&'a str>>>>(Monster::VT_TESTARRAYOFSTRING2, None)
+ }
+ #[inline]
+ pub fn testarrayofsortedstruct(&'a self) -> Option<&'a [Ability]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<Ability>>>(Monster::VT_TESTARRAYOFSORTEDSTRUCT, None).map(|v| v.safe_slice() )
+ }
+ #[inline]
+ pub fn flex(&'a self) -> Option<&'a [u8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_FLEX, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn test5(&'a self) -> Option<&'a [Test]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<Test>>>(Monster::VT_TEST5, None).map(|v| v.safe_slice() )
+ }
+ #[inline]
+ pub fn vector_of_longs(&'a self) -> Option<flatbuffers::Vector<'a, i64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(Monster::VT_VECTOR_OF_LONGS, None)
+ }
+ #[inline]
+ pub fn vector_of_doubles(&'a self) -> Option<flatbuffers::Vector<'a, f64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, f64>>>(Monster::VT_VECTOR_OF_DOUBLES, None)
+ }
+ #[inline]
+ pub fn parent_namespace_test(&'a self) -> Option<super::InParentNamespace<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<super::InParentNamespace<'a>>>(Monster::VT_PARENT_NAMESPACE_TEST, None)
+ }
+ #[inline]
+ pub fn vector_of_referrables(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Referrable<'a>>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Referrable<'a>>>>>(Monster::VT_VECTOR_OF_REFERRABLES, None)
+ }
+ #[inline]
+ pub fn single_weak_reference(&'a self) -> u64 {
+ self._tab.get::<u64>(Monster::VT_SINGLE_WEAK_REFERENCE, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn vector_of_weak_references(&'a self) -> Option<flatbuffers::Vector<'a, u64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u64>>>(Monster::VT_VECTOR_OF_WEAK_REFERENCES, None)
+ }
+ #[inline]
+ pub fn vector_of_strong_referrables(&'a self) -> Option<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Referrable<'a>>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<Referrable<'a>>>>>(Monster::VT_VECTOR_OF_STRONG_REFERRABLES, None)
+ }
+ #[inline]
+ pub fn co_owning_reference(&'a self) -> u64 {
+ self._tab.get::<u64>(Monster::VT_CO_OWNING_REFERENCE, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn vector_of_co_owning_references(&'a self) -> Option<flatbuffers::Vector<'a, u64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u64>>>(Monster::VT_VECTOR_OF_CO_OWNING_REFERENCES, None)
+ }
+ #[inline]
+ pub fn non_owning_reference(&'a self) -> u64 {
+ self._tab.get::<u64>(Monster::VT_NON_OWNING_REFERENCE, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn vector_of_non_owning_references(&'a self) -> Option<flatbuffers::Vector<'a, u64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u64>>>(Monster::VT_VECTOR_OF_NON_OWNING_REFERENCES, None)
+ }
+ #[inline]
+ #[allow(non_snake_case)]
+ pub fn test_as_monster(&'a self) -> Option<Monster> {
+ if self.test_type() == Any::Monster {
+ self.test().map(|u| Monster::init_from_table(u))
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ #[allow(non_snake_case)]
+ pub fn test_as_test_simple_table_with_enum(&'a self) -> Option<TestSimpleTableWithEnum> {
+ if self.test_type() == Any::TestSimpleTableWithEnum {
+ self.test().map(|u| TestSimpleTableWithEnum::init_from_table(u))
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ #[allow(non_snake_case)]
+ pub fn test_as_my_game___example_2___monster(&'a self) -> Option<super::example_2::Monster> {
+ if self.test_type() == Any::MyGame_Example2_Monster {
+ self.test().map(|u| super::example_2::Monster::init_from_table(u))
+ } else {
+ None
+ }
+ }
+
+}
+
+pub struct MonsterArgs<'a> {
+ pub pos: Option<&'a Vec3>,
+ pub mana: i16,
+ pub hp: i16,
+ pub name: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub inventory: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u8>>>,
+ pub color: Color,
+ pub test_type: Any,
+ pub test: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
+ pub test4: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , Test>>>,
+ pub testarrayofstring: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<&'a str>>>>,
+ pub testarrayoftables: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<Monster<'a >>>>>,
+ pub enemy: Option<flatbuffers::WIPOffset<Monster<'a >>>,
+ pub testnestedflatbuffer: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u8>>>,
+ pub testempty: Option<flatbuffers::WIPOffset<Stat<'a >>>,
+ pub testbool: bool,
+ pub testhashs32_fnv1: i32,
+ pub testhashu32_fnv1: u32,
+ pub testhashs64_fnv1: i64,
+ pub testhashu64_fnv1: u64,
+ pub testhashs32_fnv1a: i32,
+ pub testhashu32_fnv1a: u32,
+ pub testhashs64_fnv1a: i64,
+ pub testhashu64_fnv1a: u64,
+ pub testarrayofbools: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , bool>>>,
+ pub testf: f32,
+ pub testf2: f32,
+ pub testf3: f32,
+ pub testarrayofstring2: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<&'a str>>>>,
+ pub testarrayofsortedstruct: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , Ability>>>,
+ pub flex: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u8>>>,
+ pub test5: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , Test>>>,
+ pub vector_of_longs: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , i64>>>,
+ pub vector_of_doubles: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , f64>>>,
+ pub parent_namespace_test: Option<flatbuffers::WIPOffset<super::InParentNamespace<'a >>>,
+ pub vector_of_referrables: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<Referrable<'a >>>>>,
+ pub single_weak_reference: u64,
+ pub vector_of_weak_references: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u64>>>,
+ pub vector_of_strong_referrables: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , flatbuffers::ForwardsUOffset<Referrable<'a >>>>>,
+ pub co_owning_reference: u64,
+ pub vector_of_co_owning_references: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u64>>>,
+ pub non_owning_reference: u64,
+ pub vector_of_non_owning_references: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , u64>>>,
+}
+impl<'a> Default for MonsterArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ MonsterArgs {
+ pos: None,
+ mana: 150,
+ hp: 100,
+ name: None, // required field
+ inventory: None,
+ color: Color::Blue,
+ test_type: Any::NONE,
+ test: None,
+ test4: None,
+ testarrayofstring: None,
+ testarrayoftables: None,
+ enemy: None,
+ testnestedflatbuffer: None,
+ testempty: None,
+ testbool: false,
+ testhashs32_fnv1: 0,
+ testhashu32_fnv1: 0,
+ testhashs64_fnv1: 0,
+ testhashu64_fnv1: 0,
+ testhashs32_fnv1a: 0,
+ testhashu32_fnv1a: 0,
+ testhashs64_fnv1a: 0,
+ testhashu64_fnv1a: 0,
+ testarrayofbools: None,
+ testf: 3.14159,
+ testf2: 3.0,
+ testf3: 0.0,
+ testarrayofstring2: None,
+ testarrayofsortedstruct: None,
+ flex: None,
+ test5: None,
+ vector_of_longs: None,
+ vector_of_doubles: None,
+ parent_namespace_test: None,
+ vector_of_referrables: None,
+ single_weak_reference: 0,
+ vector_of_weak_references: None,
+ vector_of_strong_referrables: None,
+ co_owning_reference: 0,
+ vector_of_co_owning_references: None,
+ non_owning_reference: 0,
+ vector_of_non_owning_references: None,
+ }
+ }
+}
+pub struct MonsterBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_pos(&mut self, pos: &'b Vec3) {
+ self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos);
+ }
+ #[inline]
+ pub fn add_mana(&mut self, mana: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_MANA, mana, 150);
+ }
+ #[inline]
+ pub fn add_hp(&mut self, hp: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_HP, hp, 100);
+ }
+ #[inline]
+ pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_NAME, name);
+ }
+ #[inline]
+ pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_INVENTORY, inventory);
+ }
+ #[inline]
+ pub fn add_color(&mut self, color: Color) {
+ self.fbb_.push_slot::<Color>(Monster::VT_COLOR, color, Color::Blue);
+ }
+ #[inline]
+ pub fn add_test_type(&mut self, test_type: Any) {
+ self.fbb_.push_slot::<Any>(Monster::VT_TEST_TYPE, test_type, Any::NONE);
+ }
+ #[inline]
+ pub fn add_test(&mut self, test: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TEST, test);
+ }
+ #[inline]
+ pub fn add_test4(&mut self, test4: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Test>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TEST4, test4);
+ }
+ #[inline]
+ pub fn add_testarrayofstring(&mut self, testarrayofstring: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<&'b str>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTARRAYOFSTRING, testarrayofstring);
+ }
+ #[inline]
+ pub fn add_testarrayoftables(&mut self, testarrayoftables: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Monster<'b >>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTARRAYOFTABLES, testarrayoftables);
+ }
+ #[inline]
+ pub fn add_enemy(&mut self, enemy: flatbuffers::WIPOffset<Monster<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Monster>>(Monster::VT_ENEMY, enemy);
+ }
+ #[inline]
+ pub fn add_testnestedflatbuffer(&mut self, testnestedflatbuffer: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTNESTEDFLATBUFFER, testnestedflatbuffer);
+ }
+ #[inline]
+ pub fn add_testempty(&mut self, testempty: flatbuffers::WIPOffset<Stat<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Stat>>(Monster::VT_TESTEMPTY, testempty);
+ }
+ #[inline]
+ pub fn add_testbool(&mut self, testbool: bool) {
+ self.fbb_.push_slot::<bool>(Monster::VT_TESTBOOL, testbool, false);
+ }
+ #[inline]
+ pub fn add_testhashs32_fnv1(&mut self, testhashs32_fnv1: i32) {
+ self.fbb_.push_slot::<i32>(Monster::VT_TESTHASHS32_FNV1, testhashs32_fnv1, 0);
+ }
+ #[inline]
+ pub fn add_testhashu32_fnv1(&mut self, testhashu32_fnv1: u32) {
+ self.fbb_.push_slot::<u32>(Monster::VT_TESTHASHU32_FNV1, testhashu32_fnv1, 0);
+ }
+ #[inline]
+ pub fn add_testhashs64_fnv1(&mut self, testhashs64_fnv1: i64) {
+ self.fbb_.push_slot::<i64>(Monster::VT_TESTHASHS64_FNV1, testhashs64_fnv1, 0);
+ }
+ #[inline]
+ pub fn add_testhashu64_fnv1(&mut self, testhashu64_fnv1: u64) {
+ self.fbb_.push_slot::<u64>(Monster::VT_TESTHASHU64_FNV1, testhashu64_fnv1, 0);
+ }
+ #[inline]
+ pub fn add_testhashs32_fnv1a(&mut self, testhashs32_fnv1a: i32) {
+ self.fbb_.push_slot::<i32>(Monster::VT_TESTHASHS32_FNV1A, testhashs32_fnv1a, 0);
+ }
+ #[inline]
+ pub fn add_testhashu32_fnv1a(&mut self, testhashu32_fnv1a: u32) {
+ self.fbb_.push_slot::<u32>(Monster::VT_TESTHASHU32_FNV1A, testhashu32_fnv1a, 0);
+ }
+ #[inline]
+ pub fn add_testhashs64_fnv1a(&mut self, testhashs64_fnv1a: i64) {
+ self.fbb_.push_slot::<i64>(Monster::VT_TESTHASHS64_FNV1A, testhashs64_fnv1a, 0);
+ }
+ #[inline]
+ pub fn add_testhashu64_fnv1a(&mut self, testhashu64_fnv1a: u64) {
+ self.fbb_.push_slot::<u64>(Monster::VT_TESTHASHU64_FNV1A, testhashu64_fnv1a, 0);
+ }
+ #[inline]
+ pub fn add_testarrayofbools(&mut self, testarrayofbools: flatbuffers::WIPOffset<flatbuffers::Vector<'b , bool>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTARRAYOFBOOLS, testarrayofbools);
+ }
+ #[inline]
+ pub fn add_testf(&mut self, testf: f32) {
+ self.fbb_.push_slot::<f32>(Monster::VT_TESTF, testf, 3.14159);
+ }
+ #[inline]
+ pub fn add_testf2(&mut self, testf2: f32) {
+ self.fbb_.push_slot::<f32>(Monster::VT_TESTF2, testf2, 3.0);
+ }
+ #[inline]
+ pub fn add_testf3(&mut self, testf3: f32) {
+ self.fbb_.push_slot::<f32>(Monster::VT_TESTF3, testf3, 0.0);
+ }
+ #[inline]
+ pub fn add_testarrayofstring2(&mut self, testarrayofstring2: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<&'b str>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTARRAYOFSTRING2, testarrayofstring2);
+ }
+ #[inline]
+ pub fn add_testarrayofsortedstruct(&mut self, testarrayofsortedstruct: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Ability>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TESTARRAYOFSORTEDSTRUCT, testarrayofsortedstruct);
+ }
+ #[inline]
+ pub fn add_flex(&mut self, flex: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_FLEX, flex);
+ }
+ #[inline]
+ pub fn add_test5(&mut self, test5: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Test>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_TEST5, test5);
+ }
+ #[inline]
+ pub fn add_vector_of_longs(&mut self, vector_of_longs: flatbuffers::WIPOffset<flatbuffers::Vector<'b , i64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_LONGS, vector_of_longs);
+ }
+ #[inline]
+ pub fn add_vector_of_doubles(&mut self, vector_of_doubles: flatbuffers::WIPOffset<flatbuffers::Vector<'b , f64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_DOUBLES, vector_of_doubles);
+ }
+ #[inline]
+ pub fn add_parent_namespace_test(&mut self, parent_namespace_test: flatbuffers::WIPOffset<super::InParentNamespace<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::InParentNamespace>>(Monster::VT_PARENT_NAMESPACE_TEST, parent_namespace_test);
+ }
+ #[inline]
+ pub fn add_vector_of_referrables(&mut self, vector_of_referrables: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Referrable<'b >>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_REFERRABLES, vector_of_referrables);
+ }
+ #[inline]
+ pub fn add_single_weak_reference(&mut self, single_weak_reference: u64) {
+ self.fbb_.push_slot::<u64>(Monster::VT_SINGLE_WEAK_REFERENCE, single_weak_reference, 0);
+ }
+ #[inline]
+ pub fn add_vector_of_weak_references(&mut self, vector_of_weak_references: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_WEAK_REFERENCES, vector_of_weak_references);
+ }
+ #[inline]
+ pub fn add_vector_of_strong_referrables(&mut self, vector_of_strong_referrables: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Referrable<'b >>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_STRONG_REFERRABLES, vector_of_strong_referrables);
+ }
+ #[inline]
+ pub fn add_co_owning_reference(&mut self, co_owning_reference: u64) {
+ self.fbb_.push_slot::<u64>(Monster::VT_CO_OWNING_REFERENCE, co_owning_reference, 0);
+ }
+ #[inline]
+ pub fn add_vector_of_co_owning_references(&mut self, vector_of_co_owning_references: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_CO_OWNING_REFERENCES, vector_of_co_owning_references);
+ }
+ #[inline]
+ pub fn add_non_owning_reference(&mut self, non_owning_reference: u64) {
+ self.fbb_.push_slot::<u64>(Monster::VT_NON_OWNING_REFERENCE, non_owning_reference, 0);
+ }
+ #[inline]
+ pub fn add_vector_of_non_owning_references(&mut self, vector_of_non_owning_references: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_VECTOR_OF_NON_OWNING_REFERENCES, vector_of_non_owning_references);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ MonsterBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ self.fbb_.required(o, Monster::VT_NAME,"name");
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub enum TypeAliasesOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct TypeAliases<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for TypeAliases<'a> {
+ type Inner = TypeAliases<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> TypeAliases<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ TypeAliases {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args TypeAliasesArgs<'args>) -> flatbuffers::WIPOffset<TypeAliases<'bldr>> {
+ let mut builder = TypeAliasesBuilder::new(_fbb);
+ builder.add_f64_(args.f64_);
+ builder.add_u64_(args.u64_);
+ builder.add_i64_(args.i64_);
+ if let Some(x) = args.vf64 { builder.add_vf64(x); }
+ if let Some(x) = args.v8 { builder.add_v8(x); }
+ builder.add_f32_(args.f32_);
+ builder.add_u32_(args.u32_);
+ builder.add_i32_(args.i32_);
+ builder.add_u16_(args.u16_);
+ builder.add_i16_(args.i16_);
+ builder.add_u8_(args.u8_);
+ builder.add_i8_(args.i8_);
+ builder.finish()
+ }
+
+ pub const VT_I8_: flatbuffers::VOffsetT = 4;
+ pub const VT_U8_: flatbuffers::VOffsetT = 6;
+ pub const VT_I16_: flatbuffers::VOffsetT = 8;
+ pub const VT_U16_: flatbuffers::VOffsetT = 10;
+ pub const VT_I32_: flatbuffers::VOffsetT = 12;
+ pub const VT_U32_: flatbuffers::VOffsetT = 14;
+ pub const VT_I64_: flatbuffers::VOffsetT = 16;
+ pub const VT_U64_: flatbuffers::VOffsetT = 18;
+ pub const VT_F32_: flatbuffers::VOffsetT = 20;
+ pub const VT_F64_: flatbuffers::VOffsetT = 22;
+ pub const VT_V8: flatbuffers::VOffsetT = 24;
+ pub const VT_VF64: flatbuffers::VOffsetT = 26;
+
+ #[inline]
+ pub fn i8_(&'a self) -> i8 {
+ self._tab.get::<i8>(TypeAliases::VT_I8_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn u8_(&'a self) -> u8 {
+ self._tab.get::<u8>(TypeAliases::VT_U8_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn i16_(&'a self) -> i16 {
+ self._tab.get::<i16>(TypeAliases::VT_I16_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn u16_(&'a self) -> u16 {
+ self._tab.get::<u16>(TypeAliases::VT_U16_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn i32_(&'a self) -> i32 {
+ self._tab.get::<i32>(TypeAliases::VT_I32_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn u32_(&'a self) -> u32 {
+ self._tab.get::<u32>(TypeAliases::VT_U32_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn i64_(&'a self) -> i64 {
+ self._tab.get::<i64>(TypeAliases::VT_I64_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn u64_(&'a self) -> u64 {
+ self._tab.get::<u64>(TypeAliases::VT_U64_, Some(0)).unwrap()
+ }
+ #[inline]
+ pub fn f32_(&'a self) -> f32 {
+ self._tab.get::<f32>(TypeAliases::VT_F32_, Some(0.0)).unwrap()
+ }
+ #[inline]
+ pub fn f64_(&'a self) -> f64 {
+ self._tab.get::<f64>(TypeAliases::VT_F64_, Some(0.0)).unwrap()
+ }
+ #[inline]
+ pub fn v8(&'a self) -> Option<&'a [i8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i8>>>(TypeAliases::VT_V8, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn vf64(&'a self) -> Option<flatbuffers::Vector<'a, f64>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, f64>>>(TypeAliases::VT_VF64, None)
+ }
+}
+
+pub struct TypeAliasesArgs<'a> {
+ pub i8_: i8,
+ pub u8_: u8,
+ pub i16_: i16,
+ pub u16_: u16,
+ pub i32_: i32,
+ pub u32_: u32,
+ pub i64_: i64,
+ pub u64_: u64,
+ pub f32_: f32,
+ pub f64_: f64,
+ pub v8: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , i8>>>,
+ pub vf64: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , f64>>>,
+}
+impl<'a> Default for TypeAliasesArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ TypeAliasesArgs {
+ i8_: 0,
+ u8_: 0,
+ i16_: 0,
+ u16_: 0,
+ i32_: 0,
+ u32_: 0,
+ i64_: 0,
+ u64_: 0,
+ f32_: 0.0,
+ f64_: 0.0,
+ v8: None,
+ vf64: None,
+ }
+ }
+}
+pub struct TypeAliasesBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> TypeAliasesBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_i8_(&mut self, i8_: i8) {
+ self.fbb_.push_slot::<i8>(TypeAliases::VT_I8_, i8_, 0);
+ }
+ #[inline]
+ pub fn add_u8_(&mut self, u8_: u8) {
+ self.fbb_.push_slot::<u8>(TypeAliases::VT_U8_, u8_, 0);
+ }
+ #[inline]
+ pub fn add_i16_(&mut self, i16_: i16) {
+ self.fbb_.push_slot::<i16>(TypeAliases::VT_I16_, i16_, 0);
+ }
+ #[inline]
+ pub fn add_u16_(&mut self, u16_: u16) {
+ self.fbb_.push_slot::<u16>(TypeAliases::VT_U16_, u16_, 0);
+ }
+ #[inline]
+ pub fn add_i32_(&mut self, i32_: i32) {
+ self.fbb_.push_slot::<i32>(TypeAliases::VT_I32_, i32_, 0);
+ }
+ #[inline]
+ pub fn add_u32_(&mut self, u32_: u32) {
+ self.fbb_.push_slot::<u32>(TypeAliases::VT_U32_, u32_, 0);
+ }
+ #[inline]
+ pub fn add_i64_(&mut self, i64_: i64) {
+ self.fbb_.push_slot::<i64>(TypeAliases::VT_I64_, i64_, 0);
+ }
+ #[inline]
+ pub fn add_u64_(&mut self, u64_: u64) {
+ self.fbb_.push_slot::<u64>(TypeAliases::VT_U64_, u64_, 0);
+ }
+ #[inline]
+ pub fn add_f32_(&mut self, f32_: f32) {
+ self.fbb_.push_slot::<f32>(TypeAliases::VT_F32_, f32_, 0.0);
+ }
+ #[inline]
+ pub fn add_f64_(&mut self, f64_: f64) {
+ self.fbb_.push_slot::<f64>(TypeAliases::VT_F64_, f64_, 0.0);
+ }
+ #[inline]
+ pub fn add_v8(&mut self, v8: flatbuffers::WIPOffset<flatbuffers::Vector<'b , i8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(TypeAliases::VT_V8, v8);
+ }
+ #[inline]
+ pub fn add_vf64(&mut self, vf64: flatbuffers::WIPOffset<flatbuffers::Vector<'b , f64>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(TypeAliases::VT_VF64, vf64);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TypeAliasesBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ TypeAliasesBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<TypeAliases<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+#[inline]
+pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ flatbuffers::get_root::<Monster<'a>>(buf)
+}
+
+#[inline]
+pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ flatbuffers::get_size_prefixed_root::<Monster<'a>>(buf)
+}
+
+pub const MONSTER_IDENTIFIER: &'static str = "MONS";
+
+#[inline]
+pub fn monster_buffer_has_identifier(buf: &[u8]) -> bool {
+ return flatbuffers::buffer_has_identifier(buf, MONSTER_IDENTIFIER, false);
+}
+
+#[inline]
+pub fn monster_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool {
+ return flatbuffers::buffer_has_identifier(buf, MONSTER_IDENTIFIER, true);
+}
+
+pub const MONSTER_EXTENSION: &'static str = "mon";
+
+#[inline]
+pub fn finish_monster_buffer<'a, 'b>(
+ fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish(root, Some(MONSTER_IDENTIFIER));
+}
+
+#[inline]
+pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish_size_prefixed(root, Some(MONSTER_IDENTIFIER));
+}
+} // pub mod Example
+} // pub mod MyGame
+
diff --git a/tests/namespace_test/namespace_test1_generated.rs b/tests/namespace_test/namespace_test1_generated.rs
new file mode 100644
index 00000000..b7e88e9f
--- /dev/null
+++ b/tests/namespace_test/namespace_test1_generated.rs
@@ -0,0 +1,224 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+pub mod namespace_a {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+pub mod namespace_b {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+#[allow(non_camel_case_types)]
+#[repr(i8)]
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum EnumInNestedNS {
+ A = 0,
+ B = 1,
+ C = 2
+}
+
+const ENUM_MIN_ENUM_IN_NESTED_N_S: i8 = 0;
+const ENUM_MAX_ENUM_IN_NESTED_N_S: i8 = 2;
+
+impl<'a> flatbuffers::Follow<'a> for EnumInNestedNS {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::read_scalar_at::<Self>(buf, loc)
+ }
+}
+
+impl flatbuffers::EndianScalar for EnumInNestedNS {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let n = i8::to_le(self as i8);
+ let p = &n as *const i8 as *const EnumInNestedNS;
+ unsafe { *p }
+ }
+ #[inline]
+ fn from_little_endian(self) -> Self {
+ let n = i8::from_le(self as i8);
+ let p = &n as *const i8 as *const EnumInNestedNS;
+ unsafe { *p }
+ }
+}
+
+impl flatbuffers::Push for EnumInNestedNS {
+ type Output = EnumInNestedNS;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ flatbuffers::emplace_scalar::<EnumInNestedNS>(dst, *self);
+ }
+}
+
+#[allow(non_camel_case_types)]
+const ENUM_VALUES_ENUM_IN_NESTED_N_S:[EnumInNestedNS; 3] = [
+ EnumInNestedNS::A,
+ EnumInNestedNS::B,
+ EnumInNestedNS::C
+];
+
+#[allow(non_camel_case_types)]
+const ENUM_NAMES_ENUM_IN_NESTED_N_S:[&'static str; 3] = [
+ "A",
+ "B",
+ "C"
+];
+
+pub fn enum_name_enum_in_nested_n_s(e: EnumInNestedNS) -> &'static str {
+ let index: usize = e as usize;
+ ENUM_NAMES_ENUM_IN_NESTED_N_S[index]
+}
+
+// struct StructInNestedNS, aligned to 4
+#[repr(C, align(4))]
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub struct StructInNestedNS {
+ a_: i32,
+ b_: i32,
+} // pub struct StructInNestedNS
+impl flatbuffers::SafeSliceAccess for StructInNestedNS {}
+impl<'a> flatbuffers::Follow<'a> for StructInNestedNS {
+ type Inner = &'a StructInNestedNS;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a StructInNestedNS>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a StructInNestedNS {
+ type Inner = &'a StructInNestedNS;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<StructInNestedNS>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for StructInNestedNS {
+ type Output = StructInNestedNS;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const StructInNestedNS as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b StructInNestedNS {
+ type Output = StructInNestedNS;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const StructInNestedNS as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+
+impl StructInNestedNS {
+ pub fn new<'a>(_a: i32, _b: i32) -> Self {
+ StructInNestedNS {
+ a_: _a.to_little_endian(),
+ b_: _b.to_little_endian(),
+
+ }
+ }
+ pub fn a<'a>(&'a self) -> i32 {
+ self.a_.from_little_endian()
+ }
+ pub fn b<'a>(&'a self) -> i32 {
+ self.b_.from_little_endian()
+ }
+}
+
+pub enum TableInNestedNSOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct TableInNestedNS<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for TableInNestedNS<'a> {
+ type Inner = TableInNestedNS<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> TableInNestedNS<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ TableInNestedNS {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args TableInNestedNSArgs) -> flatbuffers::WIPOffset<TableInNestedNS<'bldr>> {
+ let mut builder = TableInNestedNSBuilder::new(_fbb);
+ builder.add_foo(args.foo);
+ builder.finish()
+ }
+
+ pub const VT_FOO: flatbuffers::VOffsetT = 4;
+
+ #[inline]
+ pub fn foo(&'a self) -> i32 {
+ self._tab.get::<i32>(TableInNestedNS::VT_FOO, Some(0)).unwrap()
+ }
+}
+
+pub struct TableInNestedNSArgs {
+ pub foo: i32,
+}
+impl<'a> Default for TableInNestedNSArgs {
+ #[inline]
+ fn default() -> Self {
+ TableInNestedNSArgs {
+ foo: 0,
+ }
+ }
+}
+pub struct TableInNestedNSBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> TableInNestedNSBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_foo(&mut self, foo: i32) {
+ self.fbb_.push_slot::<i32>(TableInNestedNS::VT_FOO, foo, 0);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInNestedNSBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ TableInNestedNSBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<TableInNestedNS<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+} // pub mod NamespaceB
+} // pub mod NamespaceA
+
diff --git a/tests/namespace_test/namespace_test2_generated.rs b/tests/namespace_test/namespace_test2_generated.rs
new file mode 100644
index 00000000..c516d80e
--- /dev/null
+++ b/tests/namespace_test/namespace_test2_generated.rs
@@ -0,0 +1,291 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+pub mod namespace_a {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+pub enum TableInFirstNSOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct TableInFirstNS<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for TableInFirstNS<'a> {
+ type Inner = TableInFirstNS<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> TableInFirstNS<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ TableInFirstNS {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args TableInFirstNSArgs) -> flatbuffers::WIPOffset<TableInFirstNS<'bldr>> {
+ let mut builder = TableInFirstNSBuilder::new(_fbb);
+ if let Some(x) = args.foo_struct { builder.add_foo_struct(x); }
+ if let Some(x) = args.foo_table { builder.add_foo_table(x); }
+ builder.add_foo_enum(args.foo_enum);
+ builder.finish()
+ }
+
+ pub const VT_FOO_TABLE: flatbuffers::VOffsetT = 4;
+ pub const VT_FOO_ENUM: flatbuffers::VOffsetT = 6;
+ pub const VT_FOO_STRUCT: flatbuffers::VOffsetT = 8;
+
+ #[inline]
+ pub fn foo_table(&'a self) -> Option<namespace_b::TableInNestedNS<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<namespace_b::TableInNestedNS<'a>>>(TableInFirstNS::VT_FOO_TABLE, None)
+ }
+ #[inline]
+ pub fn foo_enum(&'a self) -> namespace_b::EnumInNestedNS {
+ self._tab.get::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, Some(namespace_b::EnumInNestedNS::A)).unwrap()
+ }
+ #[inline]
+ pub fn foo_struct(&'a self) -> Option<&'a namespace_b::StructInNestedNS> {
+ self._tab.get::<namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, None)
+ }
+}
+
+pub struct TableInFirstNSArgs {
+ pub foo_table: Option<flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'a >>>,
+ pub foo_enum: namespace_b::EnumInNestedNS,
+ pub foo_struct: Option<&'a namespace_b::StructInNestedNS>,
+}
+impl<'a> Default for TableInFirstNSArgs {
+ #[inline]
+ fn default() -> Self {
+ TableInFirstNSArgs {
+ foo_table: None,
+ foo_enum: namespace_b::EnumInNestedNS::A,
+ foo_struct: None,
+ }
+ }
+}
+pub struct TableInFirstNSBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> TableInFirstNSBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_foo_table(&mut self, foo_table: flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<namespace_b::TableInNestedNS>>(TableInFirstNS::VT_FOO_TABLE, foo_table);
+ }
+ #[inline]
+ pub fn add_foo_enum(&mut self, foo_enum: namespace_b::EnumInNestedNS) {
+ self.fbb_.push_slot::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, foo_enum, namespace_b::EnumInNestedNS::A);
+ }
+ #[inline]
+ pub fn add_foo_struct(&mut self, foo_struct: &'b namespace_b::StructInNestedNS) {
+ self.fbb_.push_slot_always::<&namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, foo_struct);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInFirstNSBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ TableInFirstNSBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<TableInFirstNS<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+pub enum SecondTableInAOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct SecondTableInA<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for SecondTableInA<'a> {
+ type Inner = SecondTableInA<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> SecondTableInA<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ SecondTableInA {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args SecondTableInAArgs) -> flatbuffers::WIPOffset<SecondTableInA<'bldr>> {
+ let mut builder = SecondTableInABuilder::new(_fbb);
+ if let Some(x) = args.refer_to_c { builder.add_refer_to_c(x); }
+ builder.finish()
+ }
+
+ pub const VT_REFER_TO_C: flatbuffers::VOffsetT = 4;
+
+ #[inline]
+ pub fn refer_to_c(&'a self) -> Option<super::namespace_c::TableInC<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_c::TableInC<'a>>>(SecondTableInA::VT_REFER_TO_C, None)
+ }
+}
+
+pub struct SecondTableInAArgs {
+ pub refer_to_c: Option<flatbuffers::WIPOffset<super::namespace_c::TableInC<'a >>>,
+}
+impl<'a> Default for SecondTableInAArgs {
+ #[inline]
+ fn default() -> Self {
+ SecondTableInAArgs {
+ refer_to_c: None,
+ }
+ }
+}
+pub struct SecondTableInABuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> SecondTableInABuilder<'a, 'b> {
+ #[inline]
+ pub fn add_refer_to_c(&mut self, refer_to_c: flatbuffers::WIPOffset<super::namespace_c::TableInC<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_c::TableInC>>(SecondTableInA::VT_REFER_TO_C, refer_to_c);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SecondTableInABuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ SecondTableInABuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<SecondTableInA<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+} // pub mod NamespaceA
+
+pub mod namespace_c {
+ #![allow(dead_code)]
+ #![allow(unused_imports)]
+
+ use std::mem;
+ use std::cmp::Ordering;
+
+ extern crate flatbuffers;
+ use self::flatbuffers::EndianScalar;
+
+pub enum TableInCOffset {}
+#[derive(Copy, Clone, Debug, PartialEq)]
+
+pub struct TableInC<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for TableInC<'a> {
+ type Inner = TableInC<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self {
+ _tab: flatbuffers::Table { buf: buf, loc: loc },
+ }
+ }
+}
+
+impl<'a> TableInC<'a> {
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ TableInC {
+ _tab: table,
+ }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args TableInCArgs) -> flatbuffers::WIPOffset<TableInC<'bldr>> {
+ let mut builder = TableInCBuilder::new(_fbb);
+ if let Some(x) = args.refer_to_a2 { builder.add_refer_to_a2(x); }
+ if let Some(x) = args.refer_to_a1 { builder.add_refer_to_a1(x); }
+ builder.finish()
+ }
+
+ pub const VT_REFER_TO_A1: flatbuffers::VOffsetT = 4;
+ pub const VT_REFER_TO_A2: flatbuffers::VOffsetT = 6;
+
+ #[inline]
+ pub fn refer_to_a1(&'a self) -> Option<super::namespace_a::TableInFirstNS<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::TableInFirstNS<'a>>>(TableInC::VT_REFER_TO_A1, None)
+ }
+ #[inline]
+ pub fn refer_to_a2(&'a self) -> Option<super::namespace_a::SecondTableInA<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::SecondTableInA<'a>>>(TableInC::VT_REFER_TO_A2, None)
+ }
+}
+
+pub struct TableInCArgs {
+ pub refer_to_a1: Option<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'a >>>,
+ pub refer_to_a2: Option<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'a >>>,
+}
+impl<'a> Default for TableInCArgs {
+ #[inline]
+ fn default() -> Self {
+ TableInCArgs {
+ refer_to_a1: None,
+ refer_to_a2: None,
+ }
+ }
+}
+pub struct TableInCBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> TableInCBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_refer_to_a1(&mut self, refer_to_a1: flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS>>(TableInC::VT_REFER_TO_A1, refer_to_a1);
+ }
+ #[inline]
+ pub fn add_refer_to_a2(&mut self, refer_to_a2: flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'b >>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA>>(TableInC::VT_REFER_TO_A2, refer_to_a2);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInCBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ TableInCBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<TableInC<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+} // pub mod NamespaceC
+
diff --git a/tests/rust_usage_test/Cargo.lock b/tests/rust_usage_test/Cargo.lock
new file mode 100644
index 00000000..b0edc932
--- /dev/null
+++ b/tests/rust_usage_test/Cargo.lock
@@ -0,0 +1,285 @@
+[[package]]
+name = "aho-corasick"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "atty"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "bencher"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "bitflags"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "cfg-if"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "env_logger"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "atty 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "flatbuffers"
+version = "0.1.0"
+dependencies = [
+ "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "fuchsia-zircon"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "fuchsia-zircon-sys"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "humantime"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "libc"
+version = "0.2.42"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "log"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "memchr"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quickcheck"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rand"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.1.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "redox_termios"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rust_usage_test"
+version = "0.1.0"
+dependencies = [
+ "bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flatbuffers 0.1.0",
+ "quickcheck 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "smallvec"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "termcolor"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "termion"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "ucd-util"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unreachable"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "utf8-ranges"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "void"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "wincolor"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[metadata]
+"checksum aho-corasick 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f0ba20154ea1f47ce2793322f049c5646cc6d0fa9759d5f333f286e507bf8080"
+"checksum atty 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2fc4a1aa4c24c0718a250f0681885c1af91419d242f29eb8f2ab28502d80dbd1"
+"checksum bencher 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7dfdb4953a096c551ce9ace855a604d702e6e62d77fac690575ae347571717f5"
+"checksum bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789"
+"checksum cfg-if 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efe5c877e17a9c717a0bf3613b2709f723202c4e4675cc8f12926ded29bcb17e"
+"checksum env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0e6e40ebb0e66918a37b38c7acab4e10d299e0463fe2af5d29b9cc86710cfd2a"
+"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
+"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
+"checksum lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e6412c5e2ad9584b0b8e979393122026cdd6d2a80b933f890dcd694ddbe73739"
+"checksum libc 0.2.42 (registry+https://github.com/rust-lang/crates.io-index)" = "b685088df2b950fccadf07a7187c8ef846a959c142338a48f9dc0b94517eb5f1"
+"checksum log 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "61bd98ae7f7b754bc53dca7d44b604f733c6bba044ea6f41bc8d89272d8161d2"
+"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d"
+"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
+"checksum quickcheck 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c01babc5ffd48a2a83744b3024814bb46dfd4f2a4705ccb44b1b60e644fdcab7"
+"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5"
+"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
+"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
+"checksum regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13c93d55961981ba9226a213b385216f83ab43bd6ac53ab16b2eeb47e337cf4e"
+"checksum regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05b06a75f5217880fc5e905952a42750bf44787e56a6c6d6852ed0992f5e1d54"
+"checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d"
+"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
+"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
+"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
+"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
+"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
+"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
+"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+"checksum winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd"
+"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+"checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767"
diff --git a/tests/rust_usage_test/Cargo.toml b/tests/rust_usage_test/Cargo.toml
new file mode 100644
index 00000000..9392b12c
--- /dev/null
+++ b/tests/rust_usage_test/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "rust_usage_test"
+version = "0.1.0"
+authors = ["Robert Winslow <hello@rwinslow.com>", "FlatBuffers Maintainers"]
+
+[dependencies]
+flatbuffers = { path = "../../rust/flatbuffers" }
+
+[[bin]]
+name = "monster_example"
+path = "bin/monster_example.rs"
+
+
+[dev-dependencies]
+quickcheck = "0.6"
+# TODO(rw): look into moving to criterion.rs
+bencher = "0.1.5"
+
+[[bench]]
+# setup for bencher
+name = "flatbuffers_benchmarks"
+harness = false
diff --git a/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs b/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs
new file mode 100644
index 00000000..3ad45c28
--- /dev/null
+++ b/tests/rust_usage_test/benches/flatbuffers_benchmarks.rs
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#[macro_use]
+extern crate bencher;
+use bencher::Bencher;
+
+extern crate flatbuffers;
+
+#[path = "../../monster_test_generated.rs"]
+mod monster_test_generated;
+pub use monster_test_generated::my_game;
+
+fn traverse_canonical_buffer(bench: &mut Bencher) {
+ let owned_data = {
+ let mut builder = &mut flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(&mut builder, true);
+ builder.finished_data().to_vec()
+ };
+ let data = &owned_data[..];
+ let n = data.len() as u64;
+ bench.iter(|| {
+ traverse_serialized_example_with_generated_code(data);
+ });
+ bench.bytes = n;
+}
+
+fn create_canonical_buffer_then_reset(bench: &mut Bencher) {
+ let mut builder = &mut flatbuffers::FlatBufferBuilder::new();
+ // warmup
+ create_serialized_example_with_generated_code(&mut builder, true);
+ let n = builder.finished_data().len() as u64;
+ builder.reset();
+
+ bench.iter(|| {
+ let _ = create_serialized_example_with_generated_code(&mut builder, true);
+ builder.reset();
+ });
+
+ bench.bytes = n;
+}
+
+#[inline(always)]
+fn create_serialized_example_with_generated_code(builder: &mut flatbuffers::FlatBufferBuilder, finish: bool) -> usize{
+ let s0 = builder.create_string("test1");
+ let s1 = builder.create_string("test2");
+ let t0_name = builder.create_string("Barney");
+ let t1_name = builder.create_string("Fred");
+ let t2_name = builder.create_string("Wilma");
+ let t0 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{
+ hp: 1000,
+ name: Some(t0_name),
+ ..Default::default()
+ });
+ let t1 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{
+ name: Some(t1_name),
+ ..Default::default()
+ });
+ let t2 = my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{
+ name: Some(t2_name),
+ ..Default::default()
+ });
+ let mon = {
+ let name = builder.create_string("MyMonster");
+ let fred_name = builder.create_string("Fred");
+ let inventory = builder.create_vector_direct(&[0u8, 1, 2, 3, 4]);
+ let test4 = builder.create_vector_direct(&[my_game::example::Test::new(10, 20),
+ my_game::example::Test::new(30, 40)]);
+ let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8));
+ let args = my_game::example::MonsterArgs{
+ hp: 80,
+ mana: 150,
+ name: Some(name),
+ pos: Some(&pos),
+ test_type: my_game::example::Any::Monster,
+ test: Some(my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{
+ name: Some(fred_name),
+ ..Default::default()
+ }).as_union_value()),
+ inventory: Some(inventory),
+ test4: Some(test4),
+ testarrayofstring: Some(builder.create_vector(&[s0, s1])),
+ testarrayoftables: Some(builder.create_vector(&[t0, t1, t2])),
+ ..Default::default()
+ };
+ my_game::example::Monster::create(builder, &args)
+ };
+ if finish {
+ my_game::example::finish_monster_buffer(builder, mon);
+ }
+
+ builder.finished_data().len()
+
+ // make it do some work
+ // if builder.finished_data().len() == 0 { panic!("bad benchmark"); }
+}
+
+#[inline(always)]
+fn blackbox<T>(t: T) -> T {
+ // encapsulate this in case we need to turn it into a noop
+ bencher::black_box(t)
+}
+
+#[inline(always)]
+fn traverse_serialized_example_with_generated_code(bytes: &[u8]) {
+ let m = my_game::example::get_root_as_monster(bytes);
+ blackbox(m.hp());
+ blackbox(m.mana());
+ blackbox(m.name());
+ let pos = m.pos().unwrap();
+ blackbox(pos.x());
+ blackbox(pos.y());
+ blackbox(pos.z());
+ blackbox(pos.test1());
+ blackbox(pos.test2());
+ let pos_test3 = pos.test3();
+ blackbox(pos_test3.a());
+ blackbox(pos_test3.b());
+ blackbox(m.test_type());
+ let table2 = m.test().unwrap();
+ let monster2 = my_game::example::Monster::init_from_table(table2);
+ blackbox(monster2.name());
+ blackbox(m.inventory());
+ blackbox(m.test4());
+ let testarrayoftables = m.testarrayoftables().unwrap();
+ blackbox(testarrayoftables.get(0).hp());
+ blackbox(testarrayoftables.get(0).name());
+ blackbox(testarrayoftables.get(1).name());
+ blackbox(testarrayoftables.get(2).name());
+ let testarrayofstring = m.testarrayofstring().unwrap();
+ blackbox(testarrayofstring.get(0));
+ blackbox(testarrayofstring.get(1));
+}
+
+fn create_string_10(bench: &mut Bencher) {
+ let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20);
+ let mut i = 0;
+ bench.iter(|| {
+ builder.create_string("foobarbaz"); // zero-terminated -> 10 bytes
+ i += 1;
+ if i == 10000 {
+ builder.reset();
+ i = 0;
+ }
+ });
+
+ bench.bytes = 10;
+}
+
+fn create_string_100(bench: &mut Bencher) {
+ let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20);
+ let s_owned = (0..99).map(|_| "x").collect::<String>();
+ let s: &str = &s_owned;
+
+ let mut i = 0;
+ bench.iter(|| {
+ builder.create_string(s); // zero-terminated -> 100 bytes
+ i += 1;
+ if i == 1000 {
+ builder.reset();
+ i = 0;
+ }
+ });
+
+ bench.bytes = s.len() as u64;
+}
+
+fn create_byte_vector_100_naive(bench: &mut Bencher) {
+ let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20);
+ let v_owned = (0u8..100).map(|i| i).collect::<Vec<u8>>();
+ let v: &[u8] = &v_owned;
+
+ let mut i = 0;
+ bench.iter(|| {
+ builder.create_vector(v); // zero-terminated -> 100 bytes
+ i += 1;
+ if i == 10000 {
+ builder.reset();
+ i = 0;
+ }
+ });
+
+ bench.bytes = v.len() as u64;
+}
+
+fn create_byte_vector_100_optimal(bench: &mut Bencher) {
+ let builder = &mut flatbuffers::FlatBufferBuilder::new_with_capacity(1<<20);
+ let v_owned = (0u8..100).map(|i| i).collect::<Vec<u8>>();
+ let v: &[u8] = &v_owned;
+
+ let mut i = 0;
+ bench.iter(|| {
+ builder.create_vector_direct(v);
+ i += 1;
+ if i == 10000 {
+ builder.reset();
+ i = 0;
+ }
+ });
+
+ bench.bytes = v.len() as u64;
+}
+
+benchmark_group!(benches, create_byte_vector_100_naive, create_byte_vector_100_optimal, traverse_canonical_buffer, create_canonical_buffer_then_reset, create_string_10, create_string_100);
+benchmark_main!(benches);
diff --git a/tests/rust_usage_test/bin/monster_example.rs b/tests/rust_usage_test/bin/monster_example.rs
new file mode 100644
index 00000000..e415a952
--- /dev/null
+++ b/tests/rust_usage_test/bin/monster_example.rs
@@ -0,0 +1,18 @@
+extern crate flatbuffers;
+
+#[path = "../../monster_test_generated.rs"]
+mod monster_test_generated;
+pub use monster_test_generated::my_game;
+
+use std::io::Read;
+
+fn main() {
+ let mut f = std::fs::File::open("../monsterdata_test.mon").unwrap();
+ let mut buf = Vec::new();
+ f.read_to_end(&mut buf).expect("file reading failed");
+
+ let monster = my_game::example::get_root_as_monster(&buf[..]);
+ println!("{}", monster.hp()); // `80`
+ println!("{}", monster.mana()); // default value of `150`
+ println!("{:?}", monster.name()); // Some("MyMonster")
+}
diff --git a/tests/rust_usage_test/tests/integration_test.rs b/tests/rust_usage_test/tests/integration_test.rs
new file mode 100644
index 00000000..54c63592
--- /dev/null
+++ b/tests/rust_usage_test/tests/integration_test.rs
@@ -0,0 +1,2639 @@
+/*
+ *
+ * Copyright 2018 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+extern crate quickcheck;
+
+extern crate flatbuffers;
+
+#[path = "../../monster_test_generated.rs"]
+mod monster_test_generated;
+pub use monster_test_generated::my_game;
+
+// Include simple random number generator to ensure results will be the
+// same across platforms.
+// http://en.wikipedia.org/wiki/Park%E2%80%93Miller_random_number_generator
+struct LCG(u64);
+impl LCG {
+ fn new() -> Self {
+ LCG { 0: 48271 }
+ }
+ fn next(&mut self) -> u64 {
+ let old = self.0;
+ self.0 = (self.0 * 279470273u64) % 4294967291u64;
+ old
+ }
+ fn reset(&mut self) {
+ self.0 = 48271
+ }
+}
+
+// test helper macro to return an error if two expressions are not equal
+macro_rules! check_eq {
+ ($field_call:expr, $want:expr) => (
+ if $field_call == $want {
+ Ok(())
+ } else {
+ Err(stringify!($field_call))
+ }
+ )
+}
+
+#[test]
+fn macro_check_eq() {
+ assert!(check_eq!(1, 1).is_ok());
+ assert!(check_eq!(1, 2).is_err());
+}
+
+// test helper macro to return an error if two expressions are equal
+macro_rules! check_is_some {
+ ($field_call:expr) => (
+ if $field_call.is_some() {
+ Ok(())
+ } else {
+ Err(stringify!($field_call))
+ }
+ )
+}
+
+#[test]
+fn macro_check_is_some() {
+ let some: Option<usize> = Some(0);
+ let none: Option<usize> = None;
+ assert!(check_is_some!(some).is_ok());
+ assert!(check_is_some!(none).is_err());
+}
+
+
+fn create_serialized_example_with_generated_code(builder: &mut flatbuffers::FlatBufferBuilder) {
+ let mon = {
+ let s0 = builder.create_string("test1");
+ let s1 = builder.create_string("test2");
+ let fred_name = builder.create_string("Fred");
+
+ // can't inline creation of this Vec3 because we refer to it by reference, so it must live
+ // long enough to be used by MonsterArgs.
+ let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8));
+
+ let args = my_game::example::MonsterArgs{
+ hp: 80,
+ mana: 150,
+ name: Some(builder.create_string("MyMonster")),
+ pos: Some(&pos),
+ test_type: my_game::example::Any::Monster,
+ test: Some(my_game::example::Monster::create(builder, &my_game::example::MonsterArgs{
+ name: Some(fred_name),
+ ..Default::default()
+ }).as_union_value()),
+ inventory: Some(builder.create_vector_direct(&[0u8, 1, 2, 3, 4][..])),
+ test4: Some(builder.create_vector_direct(&[my_game::example::Test::new(10, 20),
+ my_game::example::Test::new(30, 40)])),
+ testarrayofstring: Some(builder.create_vector(&[s0, s1])),
+ ..Default::default()
+ };
+ my_game::example::Monster::create(builder, &args)
+ };
+ my_game::example::finish_monster_buffer(builder, mon);
+}
+
+fn create_serialized_example_with_library_code(builder: &mut flatbuffers::FlatBufferBuilder) {
+ let nested_union_mon = {
+ let name = builder.create_string("Fred");
+ let table_start = builder.start_table();
+ builder.push_slot_always(my_game::example::Monster::VT_NAME, name);
+ builder.end_table(table_start)
+ };
+ let pos = my_game::example::Vec3::new(1.0, 2.0, 3.0, 3.0, my_game::example::Color::Green, &my_game::example::Test::new(5i16, 6i8));
+ let inv = builder.create_vector(&[0u8, 1, 2, 3, 4]);
+
+ let test4 = builder.create_vector(&[my_game::example::Test::new(10, 20),
+ my_game::example::Test::new(30, 40)][..]);
+
+ let name = builder.create_string("MyMonster");
+ let testarrayofstring = builder.create_vector_of_strings(&["test1", "test2"][..]);
+
+ // begin building
+
+ let table_start = builder.start_table();
+ builder.push_slot(my_game::example::Monster::VT_HP, 80i16, 100);
+ builder.push_slot_always(my_game::example::Monster::VT_NAME, name);
+ builder.push_slot_always(my_game::example::Monster::VT_POS, &pos);
+ builder.push_slot(my_game::example::Monster::VT_TEST_TYPE, my_game::example::Any::Monster, my_game::example::Any::NONE);
+ builder.push_slot_always(my_game::example::Monster::VT_TEST, nested_union_mon);
+ builder.push_slot_always(my_game::example::Monster::VT_INVENTORY, inv);
+ builder.push_slot_always(my_game::example::Monster::VT_TEST4, test4);
+ builder.push_slot_always(my_game::example::Monster::VT_TESTARRAYOFSTRING, testarrayofstring);
+ let root = builder.end_table(table_start);
+ builder.finish(root, Some(my_game::example::MONSTER_IDENTIFIER));
+}
+
+fn serialized_example_is_accessible_and_correct(bytes: &[u8], identifier_required: bool, size_prefixed: bool) -> Result<(), &'static str> {
+
+ if identifier_required {
+ let correct = if size_prefixed {
+ my_game::example::monster_size_prefixed_buffer_has_identifier(bytes)
+ } else {
+ my_game::example::monster_buffer_has_identifier(bytes)
+ };
+ check_eq!(correct, true)?;
+ }
+
+ let m = if size_prefixed {
+ my_game::example::get_size_prefixed_root_as_monster(bytes)
+ } else {
+ my_game::example::get_root_as_monster(bytes)
+ };
+
+ check_eq!(m.hp(), 80)?;
+ check_eq!(m.mana(), 150)?;
+ check_eq!(m.name(), Some("MyMonster"))?;
+ check_is_some!(m.name())?;
+
+ let pos = m.pos().unwrap();
+ check_eq!(pos.x(), 1.0f32)?;
+ check_eq!(pos.y(), 2.0f32)?;
+ check_eq!(pos.z(), 3.0f32)?;
+ check_eq!(pos.test1(), 3.0f64)?;
+ check_eq!(pos.test2(), my_game::example::Color::Green)?;
+
+ let pos_test3 = pos.test3();
+ check_eq!(pos_test3.a(), 5i16)?;
+ check_eq!(pos_test3.b(), 6i8)?;
+
+ check_eq!(m.test_type(), my_game::example::Any::Monster)?;
+ check_is_some!(m.test())?;
+ let table2 = m.test().unwrap();
+ let monster2 = my_game::example::Monster::init_from_table(table2);
+
+ check_eq!(monster2.name(), Some("Fred"))?;
+
+ check_is_some!(m.inventory())?;
+ let inv = m.inventory().unwrap();
+ check_eq!(inv.len(), 5)?;
+ check_eq!(inv.iter().sum::<u8>(), 10u8)?;
+
+ check_is_some!(m.test4())?;
+ let test4 = m.test4().unwrap();
+ check_eq!(test4.len(), 2)?;
+ check_eq!(test4[0].a() as i32 + test4[0].b() as i32 +
+ test4[1].a() as i32 + test4[1].b() as i32, 100)?;
+
+ check_is_some!(m.testarrayofstring())?;
+ let testarrayofstring = m.testarrayofstring().unwrap();
+ check_eq!(testarrayofstring.len(), 2)?;
+ check_eq!(testarrayofstring.get(0), "test1")?;
+ check_eq!(testarrayofstring.get(1), "test2")?;
+
+ Ok(())
+}
+
+#[test]
+fn builder_initializes_with_maximum_buffer_size() {
+ flatbuffers::FlatBufferBuilder::new_with_capacity(flatbuffers::FLATBUFFERS_MAX_BUFFER_SIZE);
+}
+
+#[should_panic]
+#[test]
+fn builder_abort_with_greater_than_maximum_buffer_size() {
+ flatbuffers::FlatBufferBuilder::new_with_capacity(flatbuffers::FLATBUFFERS_MAX_BUFFER_SIZE+1);
+}
+
+#[test]
+fn builder_collapses_into_vec() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(&mut b);
+ let (backing_buf, head) = b.collapse();
+ serialized_example_is_accessible_and_correct(&backing_buf[head..], true, false).unwrap();
+}
+
+#[cfg(test)]
+mod generated_constants {
+ extern crate flatbuffers;
+ use super::my_game;
+
+ #[test]
+ fn monster_identifier() {
+ assert_eq!("MONS", my_game::example::MONSTER_IDENTIFIER);
+ }
+
+ #[test]
+ fn monster_file_extension() {
+ assert_eq!("mon", my_game::example::MONSTER_EXTENSION);
+ }
+}
+
+#[cfg(test)]
+mod roundtrip_generated_code {
+ extern crate flatbuffers;
+
+ use super::my_game;
+
+ fn build_mon<'a, 'b>(builder: &'a mut flatbuffers::FlatBufferBuilder, args: &'b my_game::example::MonsterArgs) -> my_game::example::Monster<'a> {
+ let mon = my_game::example::Monster::create(builder, &args);
+ my_game::example::finish_monster_buffer(builder, mon);
+ my_game::example::get_root_as_monster(builder.finished_data())
+ }
+
+ #[test]
+ fn scalar_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{hp: 123, name: Some(name), ..Default::default()});
+ assert_eq!(m.hp(), 123);
+ }
+ #[test]
+ fn scalar_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.hp(), 100);
+ }
+ #[test]
+ fn string_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foobar");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.name(), Some("foobar"));
+ }
+ #[test]
+ fn struct_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ pos: Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0,
+ my_game::example::Color::Green,
+ &my_game::example::Test::new(98, 99))),
+ ..Default::default()
+ });
+ assert_eq!(m.pos(), Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0,
+ my_game::example::Color::Green,
+ &my_game::example::Test::new(98, 99))));
+ }
+ #[test]
+ fn struct_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.pos(), None);
+ }
+ #[test]
+ fn enum_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), color: my_game::example::Color::Red, ..Default::default()});
+ assert_eq!(m.color(), my_game::example::Color::Red);
+ }
+ #[test]
+ fn enum_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.color(), my_game::example::Color::Blue);
+ }
+ #[test]
+ fn union_store() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ {
+ let name_inner = b.create_string("foo");
+ let name_outer = b.create_string("bar");
+
+ let inner = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name_inner),
+ ..Default::default()
+ });
+ let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name_outer),
+ test_type: my_game::example::Any::Monster,
+ test: Some(inner.as_union_value()),
+ ..Default::default()
+ });
+ my_game::example::finish_monster_buffer(b, outer);
+ }
+
+ let mon = my_game::example::get_root_as_monster(b.finished_data());
+ assert_eq!(mon.name(), Some("bar"));
+ assert_eq!(mon.test_type(), my_game::example::Any::Monster);
+ assert_eq!(my_game::example::Monster::init_from_table(mon.test().unwrap()).name(),
+ Some("foo"));
+ assert_eq!(mon.test_as_monster().unwrap().name(), Some("foo"));
+ assert_eq!(mon.test_as_test_simple_table_with_enum(), None);
+ assert_eq!(mon.test_as_my_game___example_2___monster(), None);
+ }
+ #[test]
+ fn union_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.test_type(), my_game::example::Any::NONE);
+ assert_eq!(m.test(), None);
+ }
+ #[test]
+ fn table_full_namespace_store() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ {
+ let name_inner = b.create_string("foo");
+ let name_outer = b.create_string("bar");
+
+ let inner = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name_inner),
+ ..Default::default()
+ });
+ let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name_outer),
+ enemy: Some(inner),
+ ..Default::default()
+ });
+ my_game::example::finish_monster_buffer(b, outer);
+ }
+
+ let mon = my_game::example::get_root_as_monster(b.finished_data());
+ assert_eq!(mon.name(), Some("bar"));
+ assert_eq!(mon.enemy().unwrap().name(), Some("foo"));
+ }
+ #[test]
+ fn table_full_namespace_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.enemy(), None);
+ }
+ #[test]
+ fn table_store() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ {
+ let id_inner = b.create_string("foo");
+ let name_outer = b.create_string("bar");
+
+ let inner = my_game::example::Stat::create(b, &my_game::example::StatArgs{
+ id: Some(id_inner),
+ ..Default::default()
+ });
+ let outer = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name_outer),
+ testempty: Some(inner),
+ ..Default::default()
+ });
+ my_game::example::finish_monster_buffer(b, outer);
+ }
+
+ let mon = my_game::example::get_root_as_monster(b.finished_data());
+ assert_eq!(mon.name(), Some("bar"));
+ assert_eq!(mon.testempty().unwrap().id(), Some("foo"));
+ }
+ #[test]
+ fn table_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert_eq!(m.testempty(), None);
+ }
+ #[test]
+ fn nested_flatbuffer_store() {
+ let b0 = {
+ let mut b0 = flatbuffers::FlatBufferBuilder::new();
+ let args = my_game::example::MonsterArgs{
+ hp: 123,
+ name: Some(b0.create_string("foobar")),
+ ..Default::default()
+ };
+ let mon = my_game::example::Monster::create(&mut b0, &args);
+ my_game::example::finish_monster_buffer(&mut b0, mon);
+ b0
+ };
+
+ let b1 = {
+ let mut b1 = flatbuffers::FlatBufferBuilder::new();
+ let args = my_game::example::MonsterArgs{
+ testnestedflatbuffer: Some(b1.create_vector(b0.finished_data())),
+ name: Some(b1.create_string("foo")),
+ ..Default::default()
+ };
+ let mon = my_game::example::Monster::create(&mut b1, &args);
+ my_game::example::finish_monster_buffer(&mut b1, mon);
+ b1
+ };
+
+ let m = my_game::example::get_root_as_monster(b1.finished_data());
+
+ assert!(m.testnestedflatbuffer().is_some());
+ assert_eq!(m.testnestedflatbuffer().unwrap(), b0.finished_data());
+
+ let m2_a = my_game::example::get_root_as_monster(m.testnestedflatbuffer().unwrap());
+ assert_eq!(m2_a.hp(), 123);
+ assert_eq!(m2_a.name(), Some("foobar"));
+
+ assert!(m.testnestedflatbuffer_nested_flatbuffer().is_some());
+ let m2_b = m.testnestedflatbuffer_nested_flatbuffer().unwrap();
+
+ assert_eq!(m2_b.hp(), 123);
+ assert_eq!(m2_b.name(), Some("foobar"));
+ }
+ #[test]
+ fn nested_flatbuffer_default() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{name: Some(name), ..Default::default()});
+ assert!(m.testnestedflatbuffer().is_none());
+ }
+ #[test]
+ fn vector_of_string_store_helper_build() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector_of_strings(&["foobar", "baz"]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ testarrayofstring: Some(v), ..Default::default()});
+ assert_eq!(m.testarrayofstring().unwrap().len(), 2);
+ assert_eq!(m.testarrayofstring().unwrap().get(0), "foobar");
+ assert_eq!(m.testarrayofstring().unwrap().get(1), "baz");
+ }
+ #[test]
+ fn vector_of_string_store_manual_build() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let s0 = b.create_string("foobar");
+ let s1 = b.create_string("baz");
+ let v = b.create_vector(&[s0, s1]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ testarrayofstring: Some(v), ..Default::default()});
+ assert_eq!(m.testarrayofstring().unwrap().len(), 2);
+ assert_eq!(m.testarrayofstring().unwrap().get(0), "foobar");
+ assert_eq!(m.testarrayofstring().unwrap().get(1), "baz");
+ }
+ #[test]
+ fn vector_of_ubyte_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector(&[123u8, 234u8][..]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ inventory: Some(v), ..Default::default()});
+ assert_eq!(m.inventory().unwrap(), &[123, 234][..]);
+ }
+ #[test]
+ fn vector_of_bool_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector(&[false, true, false, true][..]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ testarrayofbools: Some(v), ..Default::default()});
+ assert_eq!(m.testarrayofbools().unwrap(), &[false, true, false, true][..]);
+ }
+ #[test]
+ fn vector_of_f64_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector(&[3.14159265359f64][..]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ vector_of_doubles: Some(v), ..Default::default()});
+ assert_eq!(m.vector_of_doubles().unwrap().len(), 1);
+ assert_eq!(m.vector_of_doubles().unwrap().get(0), 3.14159265359f64);
+ }
+ #[test]
+ fn vector_of_struct_store() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector(&[my_game::example::Test::new(127, -128), my_game::example::Test::new(3, 123)][..]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ test4: Some(v), ..Default::default()});
+ assert_eq!(m.test4().unwrap(), &[my_game::example::Test::new(127, -128), my_game::example::Test::new(3, 123)][..]);
+ }
+ #[test]
+ fn vector_of_struct_store_with_type_inference() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let v = b.create_vector(&[my_game::example::Test::new(127, -128),
+ my_game::example::Test::new(3, 123),
+ my_game::example::Test::new(100, 101)]);
+ let name = b.create_string("foo");
+ let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ test4: Some(v), ..Default::default()});
+ assert_eq!(m.test4().unwrap(), &[my_game::example::Test::new(127, -128), my_game::example::Test::new(3, 123), my_game::example::Test::new(100, 101)][..]);
+ }
+ // TODO(rw) this passes, but I don't want to change the monster test schema right now
+ // #[test]
+ // fn vector_of_enum_store() {
+ // let mut b = flatbuffers::FlatBufferBuilder::new();
+ // let v = b.create_vector::<my_game::example::Color>(&[my_game::example::Color::Red, my_game::example::Color::Green][..]);
+ // let name = b.create_string("foo");
+ // let m = build_mon(&mut b, &my_game::example::MonsterArgs{
+ // name: Some(name),
+ // vector_of_enum: Some(v), ..Default::default()});
+ // assert_eq!(m.vector_of_enum().unwrap().len(), 2);
+ // assert_eq!(m.vector_of_enum().unwrap().get(0), my_game::example::Color::Red);
+ // assert_eq!(m.vector_of_enum().unwrap().get(1), my_game::example::Color::Green);
+ // }
+ #[test]
+ fn vector_of_table_store() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ let t0 = {
+ let name = b.create_string("foo");
+ let args = my_game::example::MonsterArgs{hp: 55, name: Some(name), ..Default::default()};
+ my_game::example::Monster::create(b, &args)
+ };
+ let t1 = {
+ let name = b.create_string("bar");
+ let args = my_game::example::MonsterArgs{name: Some(name), ..Default::default()};
+ my_game::example::Monster::create(b, &args)
+ };
+ let v = b.create_vector(&[t0, t1][..]);
+ let name = b.create_string("foo");
+ let m = build_mon(b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ testarrayoftables: Some(v), ..Default::default()});
+ assert_eq!(m.testarrayoftables().unwrap().len(), 2);
+ assert_eq!(m.testarrayoftables().unwrap().get(0).hp(), 55);
+ assert_eq!(m.testarrayoftables().unwrap().get(0).name(), Some("foo"));
+ assert_eq!(m.testarrayoftables().unwrap().get(1).hp(), 100);
+ assert_eq!(m.testarrayoftables().unwrap().get(1).name(), Some("bar"));
+ }
+}
+
+#[cfg(test)]
+mod generated_code_alignment_and_padding {
+ extern crate flatbuffers;
+ use super::my_game;
+
+ #[test]
+ fn enum_color_is_1_byte() {
+ assert_eq!(1, ::std::mem::size_of::<my_game::example::Color>());
+ }
+
+ #[test]
+ fn enum_color_is_aligned_to_1() {
+ assert_eq!(1, ::std::mem::align_of::<my_game::example::Color>());
+ }
+
+ #[test]
+ fn union_any_is_1_byte() {
+ assert_eq!(1, ::std::mem::size_of::<my_game::example::Any>());
+ }
+
+ #[test]
+ fn union_any_is_aligned_to_1() {
+ assert_eq!(1, ::std::mem::align_of::<my_game::example::Any>());
+ }
+
+ #[test]
+ fn struct_test_is_4_bytes() {
+ assert_eq!(4, ::std::mem::size_of::<my_game::example::Test>());
+ }
+
+ #[test]
+ fn struct_test_is_aligned_to_2() {
+ assert_eq!(2, ::std::mem::align_of::<my_game::example::Test>());
+ }
+
+ #[test]
+ fn struct_vec3_is_32_bytes() {
+ assert_eq!(32, ::std::mem::size_of::<my_game::example::Vec3>());
+ }
+
+ #[test]
+ fn struct_vec3_is_aligned_to_16() {
+ assert_eq!(16, ::std::mem::align_of::<my_game::example::Vec3>());
+ }
+
+ #[test]
+ fn struct_vec3_is_written_with_correct_alignment_in_table() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ {
+ let name = b.create_string("foo");
+ let mon = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ pos: Some(&my_game::example::Vec3::new(1.0, 2.0, 3.0, 4.0,
+ my_game::example::Color::Green,
+ &my_game::example::Test::new(98, 99))),
+ ..Default::default()});
+ my_game::example::finish_monster_buffer(b, mon);
+ }
+ let buf = b.finished_data();
+ let mon = my_game::example::get_root_as_monster(buf);
+ let vec3 = mon.pos().unwrap();
+
+ let start_ptr = buf.as_ptr() as usize;
+ let vec3_ptr = vec3 as *const my_game::example::Vec3 as usize;
+
+ assert!(vec3_ptr > start_ptr);
+ let aln = ::std::mem::align_of::<my_game::example::Vec3>();
+ assert_eq!((vec3_ptr - start_ptr) % aln, 0);
+ }
+
+ #[test]
+ fn struct_ability_is_8_bytes() {
+ assert_eq!(8, ::std::mem::size_of::<my_game::example::Ability>());
+ }
+
+ #[test]
+ fn struct_ability_is_aligned_to_4() {
+ assert_eq!(4, ::std::mem::align_of::<my_game::example::Ability>());
+ }
+
+ #[test]
+ fn struct_ability_is_written_with_correct_alignment_in_table_vector() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ {
+ let name = b.create_string("foo");
+ let v = b.create_vector(&[my_game::example::Ability::new(1, 2),
+ my_game::example::Ability::new(3, 4),
+ my_game::example::Ability::new(5, 6)]);
+ let mon = my_game::example::Monster::create(b, &my_game::example::MonsterArgs{
+ name: Some(name),
+ testarrayofsortedstruct: Some(v),
+ ..Default::default()});
+ my_game::example::finish_monster_buffer(b, mon);
+ }
+ let buf = b.finished_data();
+ let mon = my_game::example::get_root_as_monster(buf);
+ let abilities = mon.testarrayofsortedstruct().unwrap();
+
+ let start_ptr = buf.as_ptr() as usize;
+ for a in abilities.iter() {
+ let a_ptr = a as *const my_game::example::Ability as usize;
+ assert!(a_ptr > start_ptr);
+ let aln = ::std::mem::align_of::<my_game::example::Ability>();
+ assert_eq!((a_ptr - start_ptr) % aln, 0);
+ }
+ }
+}
+
+#[cfg(test)]
+mod roundtrip_byteswap {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ const N: u64 = 10000;
+
+ fn palindrome_32(x: f32) -> bool {
+ x == f32::from_bits(x.to_bits().swap_bytes())
+ }
+ fn palindrome_64(x: f64) -> bool {
+ x == f64::from_bits(x.to_bits().swap_bytes())
+ }
+
+ fn prop_f32(x: f32) {
+ use flatbuffers::byte_swap_f32;
+
+ let there = byte_swap_f32(x);
+
+ let back_again = byte_swap_f32(there);
+
+ if !palindrome_32(x) {
+ assert!(x != there);
+ }
+
+ assert_eq!(x, back_again);
+ }
+
+ fn prop_f64(x: f64) {
+ use flatbuffers::byte_swap_f64;
+
+ let there = byte_swap_f64(x);
+ let back_again = byte_swap_f64(there);
+
+ if !palindrome_64(x) {
+ assert!(x != there);
+ }
+
+ assert_eq!(x, back_again);
+ }
+
+ #[test]
+ fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f32 as fn(f32)); }
+ #[test]
+ fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f64 as fn(f64)); }
+}
+
+#[cfg(test)]
+mod roundtrip_vectors {
+
+ #[cfg(test)]
+ mod scalar {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ const N: u64 = 20;
+
+ fn prop<T: PartialEq + ::std::fmt::Debug + Copy + flatbuffers::EndianScalar + flatbuffers::Push>(xs: Vec<T>) {
+ use flatbuffers::Follow;
+
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<T>(xs.len());
+ for i in (0..xs.len()).rev() {
+ b.push::<T>(xs[i]);
+ }
+ let vecend = b.end_vector::<T>(xs.len());
+ b.finish_minimal(vecend);
+
+ let buf = b.finished_data();
+
+ let got = <flatbuffers::ForwardsUOffset<&[T]>>::follow(buf, 0);
+ assert_eq!(got, &xs[..]);
+ }
+
+ #[test]
+ fn easy_u8() {
+ prop::<u8>(vec![]);
+ prop::<u8>(vec![1u8]);
+ prop::<u8>(vec![1u8, 2u8]);
+ prop::<u8>(vec![1u8, 2u8, 3u8]);
+ prop::<u8>(vec![1u8, 2u8, 3u8, 4u8]);
+ }
+
+ #[test]
+ fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<bool> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u8> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i8> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u16> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i16> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u32> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i32> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u64> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i64> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<f32> as fn(Vec<_>)); }
+ #[test]
+ fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<f64> as fn(Vec<_>)); }
+ }
+
+ #[cfg(test)]
+ mod create_vector_direct {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ const N: u64 = 20;
+
+ // This uses a macro because lifetimes for the trait-bounded function get too
+ // complicated.
+ macro_rules! impl_prop {
+ ($test_name:ident, $fn_name:ident, $ty:ident) => (
+ fn $fn_name(xs: Vec<$ty>) {
+ use flatbuffers::Follow;
+
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.create_vector_direct(&xs[..]);
+ let buf = b.unfinished_data();
+
+ let got = <flatbuffers::Vector<$ty>>::follow(&buf[..], 0).safe_slice();
+ assert_eq!(got, &xs[..]);
+ }
+ #[test]
+ fn $test_name() { quickcheck::QuickCheck::new().max_tests(N).quickcheck($fn_name as fn(Vec<_>)); }
+ )
+ }
+
+ impl_prop!(test_bool, prop_bool, bool);
+ impl_prop!(test_u8, prop_u8, u8);
+ impl_prop!(test_i8, prop_i8, i8);
+
+ #[cfg(test)]
+ #[cfg(target_endian = "little")]
+ mod host_is_le {
+ const N: u64 = 20;
+ use super::flatbuffers;
+ use super::quickcheck;
+ impl_prop!(test_u16, prop_u16, u16);
+ impl_prop!(test_u32, prop_u32, u32);
+ impl_prop!(test_u64, prop_u64, u64);
+ impl_prop!(test_i16, prop_i16, i16);
+ impl_prop!(test_i32, prop_i32, i32);
+ impl_prop!(test_i64, prop_i64, i64);
+ impl_prop!(test_f32, prop_f32, f32);
+ impl_prop!(test_f64, prop_f64, f64);
+ }
+ }
+
+ #[cfg(test)]
+ mod string_manual_build {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ fn prop(xs: Vec<String>) {
+ use flatbuffers::Follow;
+
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let mut offsets = Vec::new();
+ for s in xs.iter().rev() {
+ offsets.push(b.create_string(s.as_str()));
+ }
+
+ b.start_vector::<flatbuffers::WIPOffset<&str>>(xs.len());
+ for &i in offsets.iter() {
+ b.push(i);
+ }
+ let vecend = b.end_vector::<flatbuffers::WIPOffset<&str>>(xs.len());
+
+ b.finish_minimal(vecend);
+
+ let buf = b.finished_data();
+ let got = <flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&str>>>>::follow(buf, 0);
+
+ assert_eq!(got.len(), xs.len());
+ for i in 0..xs.len() {
+ assert_eq!(got.get(i), &xs[i][..]);
+ }
+ }
+
+ #[test]
+ fn fuzz() {
+ quickcheck::QuickCheck::new().max_tests(20).quickcheck(prop as fn(Vec<_>));
+ }
+ }
+
+ #[cfg(test)]
+ mod string_helper_build {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ fn prop(input: Vec<String>) {
+ let xs: Vec<&str> = input.iter().map(|s: &String| &s[..]).collect();
+
+ use flatbuffers::Follow;
+
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let vecend = b.create_vector_of_strings(&xs[..]);
+
+ b.finish_minimal(vecend);
+
+ let buf = b.finished_data();
+ let got = <flatbuffers::ForwardsUOffset<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&str>>>>::follow(buf, 0);
+
+ assert_eq!(got.len(), xs.len());
+ for i in 0..xs.len() {
+ assert_eq!(got.get(i), &xs[i][..]);
+ }
+ }
+
+ #[test]
+ fn fuzz() {
+ quickcheck::QuickCheck::new().max_tests(100).quickcheck(prop as fn(Vec<_>));
+ }
+ }
+
+ #[cfg(test)]
+ mod ubyte {
+ extern crate quickcheck;
+ extern crate flatbuffers;
+
+ #[test]
+ fn fuzz_manual_build() {
+ fn prop(vec: Vec<u8>) {
+ let xs = &vec[..];
+
+ let mut b1 = flatbuffers::FlatBufferBuilder::new();
+ b1.start_vector::<u8>(xs.len());
+
+ for i in (0..xs.len()).rev() {
+ b1.push(xs[i]);
+ }
+ b1.end_vector::<u8>(xs.len());
+
+ let mut b2 = flatbuffers::FlatBufferBuilder::new();
+ b2.create_vector(xs);
+ assert_eq!(b1.unfinished_data(), b2.unfinished_data());
+ }
+ quickcheck::QuickCheck::new().max_tests(100).quickcheck(prop as fn(Vec<_>));
+ }
+ }
+}
+
+#[cfg(test)]
+mod framing_format {
+ extern crate flatbuffers;
+
+ use super::my_game;
+
+ #[test]
+ fn test_size_prefixed_buffer() {
+ // Create size prefixed buffer.
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let args = &my_game::example::MonsterArgs{
+ mana: 200,
+ hp: 300,
+ name: Some(b.create_string("bob")),
+ ..Default::default()
+ };
+ let mon = my_game::example::Monster::create(&mut b, &args);
+ b.finish_size_prefixed(mon, None);
+
+ // Access it.
+ let buf = b.finished_data();
+ let m = flatbuffers::get_size_prefixed_root::<my_game::example::Monster>(buf);
+ assert_eq!(m.mana(), 200);
+ assert_eq!(m.hp(), 300);
+ assert_eq!(m.name(), Some("bob"));
+ }
+}
+
+#[cfg(test)]
+mod roundtrip_table {
+ use std::collections::HashMap;
+
+ extern crate flatbuffers;
+ extern crate quickcheck;
+
+ use super::LCG;
+
+ #[test]
+ fn table_of_mixed_scalars_fuzz() {
+ // Values we're testing against: chosen to ensure no bits get chopped
+ // off anywhere, and also be different from eachother.
+ let bool_val: bool = true;
+ let char_val: i8 = -127; // 0x81
+ let uchar_val: u8 = 0xFF;
+ let short_val: i16 = -32222; // 0x8222;
+ let ushort_val: u16 = 0xFEEE;
+ let int_val: i32 = unsafe { ::std::mem::transmute(0x83333333u32) };
+ let uint_val: u32 = 0xFDDDDDDD;
+ let long_val: i64 = unsafe { ::std::mem::transmute(0x8444444444444444u64) }; // TODO: byte literal?
+ let ulong_val: u64 = 0xFCCCCCCCCCCCCCCCu64;
+ let float_val: f32 = 3.14159;
+ let double_val: f64 = 3.14159265359;
+
+ let test_value_types_max: isize = 11;
+ let max_fields_per_object: flatbuffers::VOffsetT = 100;
+ let num_fuzz_objects: isize = 1000; // The higher, the more thorough :)
+
+ let mut builder = flatbuffers::FlatBufferBuilder::new();
+ let mut lcg = LCG::new();
+
+ let mut objects: Vec<flatbuffers::UOffsetT> = vec![0; num_fuzz_objects as usize];
+
+ // Generate num_fuzz_objects random objects each consisting of
+ // fields_per_object fields, each of a random type.
+ for i in 0..(num_fuzz_objects as usize) {
+ let fields_per_object = (lcg.next() % (max_fields_per_object as u64)) as flatbuffers::VOffsetT;
+ let start = builder.start_table();
+
+ for j in 0..fields_per_object {
+ let choice = lcg.next() % (test_value_types_max as u64);
+
+ let f = flatbuffers::field_index_to_field_offset(j);
+
+ match choice {
+ 0 => {builder.push_slot::<bool>(f, bool_val, false);}
+ 1 => {builder.push_slot::<i8>(f, char_val, 0);}
+ 2 => {builder.push_slot::<u8>(f, uchar_val, 0);}
+ 3 => {builder.push_slot::<i16>(f, short_val, 0);}
+ 4 => {builder.push_slot::<u16>(f, ushort_val, 0);}
+ 5 => {builder.push_slot::<i32>(f, int_val, 0);}
+ 6 => {builder.push_slot::<u32>(f, uint_val, 0);}
+ 7 => {builder.push_slot::<i64>(f, long_val, 0);}
+ 8 => {builder.push_slot::<u64>(f, ulong_val, 0);}
+ 9 => {builder.push_slot::<f32>(f, float_val, 0.0);}
+ 10 => {builder.push_slot::<f64>(f, double_val, 0.0);}
+ _ => { panic!("unknown choice: {}", choice); }
+ }
+ }
+ objects[i] = builder.end_table(start).value();
+ }
+
+ // Do some bookkeeping to generate stats on fuzzes:
+ let mut stats: HashMap<u64, u64> = HashMap::new();
+ let mut values_generated: u64 = 0;
+
+ // Embrace PRNG determinism:
+ lcg.reset();
+
+ // Test that all objects we generated are readable and return the
+ // expected values. We generate random objects in the same order
+ // so this is deterministic:
+ for i in 0..(num_fuzz_objects as usize) {
+ let table = {
+ let buf = builder.unfinished_data();
+ let loc = buf.len() as flatbuffers::UOffsetT - objects[i];
+ flatbuffers::Table::new(buf, loc as usize)
+ };
+
+ let fields_per_object = (lcg.next() % (max_fields_per_object as u64)) as flatbuffers::VOffsetT;
+ for j in 0..fields_per_object {
+ let choice = lcg.next() % (test_value_types_max as u64);
+
+ *stats.entry(choice).or_insert(0) += 1;
+ values_generated += 1;
+
+ let f = flatbuffers::field_index_to_field_offset(j);
+
+ match choice {
+ 0 => { assert_eq!(bool_val, table.get::<bool>(f, Some(false)).unwrap()); }
+ 1 => { assert_eq!(char_val, table.get::<i8>(f, Some(0)).unwrap()); }
+ 2 => { assert_eq!(uchar_val, table.get::<u8>(f, Some(0)).unwrap()); }
+ 3 => { assert_eq!(short_val, table.get::<i16>(f, Some(0)).unwrap()); }
+ 4 => { assert_eq!(ushort_val, table.get::<u16>(f, Some(0)).unwrap()); }
+ 5 => { assert_eq!(int_val, table.get::<i32>(f, Some(0)).unwrap()); }
+ 6 => { assert_eq!(uint_val, table.get::<u32>(f, Some(0)).unwrap()); }
+ 7 => { assert_eq!(long_val, table.get::<i64>(f, Some(0)).unwrap()); }
+ 8 => { assert_eq!(ulong_val, table.get::<u64>(f, Some(0)).unwrap()); }
+ 9 => { assert_eq!(float_val, table.get::<f32>(f, Some(0.0)).unwrap()); }
+ 10 => { assert_eq!(double_val, table.get::<f64>(f, Some(0.0)).unwrap()); }
+ _ => { panic!("unknown choice: {}", choice); }
+ }
+ }
+ }
+
+ // Assert that we tested all the fuzz cases enough:
+ let min_tests_per_choice = 1000;
+ assert!(values_generated > 0);
+ assert!(min_tests_per_choice > 0);
+ for i in 0..test_value_types_max as u64 {
+ assert!(stats[&i] >= min_tests_per_choice,
+ format!("inadequately-tested fuzz case: {}", i));
+ }
+ }
+
+ #[test]
+ fn table_of_byte_strings_fuzz() {
+ fn prop(vec: Vec<Vec<u8>>) {
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+ use flatbuffers::Follow;
+
+ let xs = &vec[..];
+
+ // build
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let str_offsets: Vec<flatbuffers::WIPOffset<_>> = xs.iter().map(|s| b.create_byte_string(&s[..])).collect();
+ let table_start = b.start_table();
+
+ for i in 0..xs.len() {
+ b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), str_offsets[i]);
+ }
+ let root = b.end_table(table_start);
+ b.finish_minimal(root);
+
+ // use
+ let buf = b.finished_data();
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(buf, 0);
+
+ for i in 0..xs.len() {
+ let v = tab.get::<flatbuffers::ForwardsUOffset<&[u8]>>(fi2fo(i as flatbuffers::VOffsetT), None);
+ assert_eq!(v, Some(&xs[i][..]));
+ }
+ }
+ prop(vec![vec![1,2,3]]);
+
+ let n = 20;
+ quickcheck::QuickCheck::new().max_tests(n).quickcheck(prop as fn(Vec<_>));
+ }
+
+ #[test]
+ fn fuzz_table_of_strings() {
+ fn prop(vec: Vec<String>) {
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+ use flatbuffers::Follow;
+
+ let xs = &vec[..];
+
+ // build
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let str_offsets: Vec<flatbuffers::WIPOffset<_>> = xs.iter().map(|s| b.create_string(&s[..])).collect();
+ let table_start = b.start_table();
+
+ for i in 0..xs.len() {
+ b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), str_offsets[i]);
+ }
+ let root = b.end_table(table_start);
+ b.finish_minimal(root);
+
+ // use
+ let buf = b.finished_data();
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(buf, 0);
+
+ for i in 0..xs.len() {
+ let v = tab.get::<flatbuffers::ForwardsUOffset<&str>>(fi2fo(i as flatbuffers::VOffsetT), None);
+ assert_eq!(v, Some(&xs[i][..]));
+ }
+ }
+ let n = 20;
+ quickcheck::QuickCheck::new().max_tests(n).quickcheck(prop as fn(Vec<String>));
+ }
+
+ mod table_of_vectors_of_scalars {
+ extern crate flatbuffers;
+ extern crate quickcheck;
+
+ const N: u64 = 20;
+
+ fn prop<'a, T: flatbuffers::Follow<'a> + 'a + flatbuffers::EndianScalar + flatbuffers::Push + ::std::fmt::Debug>(vecs: Vec<Vec<T>>) {
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+ use flatbuffers::Follow;
+
+ // build
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let mut offs = vec![];
+ for vec in &vecs {
+ b.start_vector::<T>(vec.len());
+
+ let xs = &vec[..];
+ for i in (0..xs.len()).rev() {
+ b.push::<T>(xs[i]);
+ }
+ let vecend = b.end_vector::<T>(xs.len());
+ offs.push(vecend);
+ }
+
+ let table_start = b.start_table();
+
+ for i in 0..vecs.len() {
+ b.push_slot_always(fi2fo(i as flatbuffers::VOffsetT), offs[i]);
+ }
+ let root = b.end_table(table_start);
+ b.finish_minimal(root);
+
+ // use
+ let buf = b.finished_data();
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(buf, 0);
+
+ for i in 0..vecs.len() {
+ let got = tab.get::<flatbuffers::ForwardsUOffset<&[T]>>(fi2fo(i as flatbuffers::VOffsetT), None);
+ assert!(got.is_some());
+ let got2 = got.unwrap();
+ assert_eq!(&vecs[i][..], got2);
+ }
+ }
+
+ #[test]
+ fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<bool>>)); }
+
+ #[test]
+ fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u8>>)); }
+ #[test]
+ fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u16>>)); }
+ #[test]
+ fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u32>>)); }
+ #[test]
+ fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u64>>)); }
+
+ #[test]
+ fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u8>>)); }
+ #[test]
+ fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u16>>)); }
+ #[test]
+ fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u32>>)); }
+ #[test]
+ fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<u64>>)); }
+
+ #[test]
+ fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<f32>>)); }
+ #[test]
+ fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop as fn(Vec<Vec<f64>>)); }
+ }
+}
+
+#[cfg(test)]
+mod roundtrip_scalars {
+ extern crate flatbuffers;
+ extern crate quickcheck;
+
+ const N: u64 = 1000;
+
+ fn prop<T: PartialEq + ::std::fmt::Debug + Copy + flatbuffers::EndianScalar>(x: T) {
+ let mut buf = vec![0u8; ::std::mem::size_of::<T>()];
+ flatbuffers::emplace_scalar(&mut buf[..], x);
+ let y = flatbuffers::read_scalar(&buf[..]);
+ assert_eq!(x, y);
+ }
+
+ #[test]
+ fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<bool> as fn(_)); }
+ #[test]
+ fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u8> as fn(_)); }
+ #[test]
+ fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i8> as fn(_)); }
+
+ #[test]
+ fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u16> as fn(_)); }
+ #[test]
+ fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i16> as fn(_)); }
+
+ #[test]
+ fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u32> as fn(_)); }
+ #[test]
+ fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i32> as fn(_)); }
+
+ #[test]
+ fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<u64> as fn(_)); }
+ #[test]
+ fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<i64> as fn(_)); }
+
+ #[test]
+ fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<f32> as fn(_)); }
+ #[test]
+ fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop::<f64> as fn(_)); }
+}
+
+#[cfg(test)]
+mod roundtrip_push_follow_scalars {
+ extern crate flatbuffers;
+ extern crate quickcheck;
+
+ use flatbuffers::Push;
+
+ const N: u64 = 1000;
+
+ // This uses a macro because lifetimes for a trait-bounded function get too
+ // complicated.
+ macro_rules! impl_prop {
+ ($fn_name:ident, $ty:ident) => (
+ fn $fn_name(x: $ty) {
+ let mut buf = vec![0u8; ::std::mem::size_of::<$ty>()];
+ x.push(&mut buf[..], &[][..]);
+ let fs: flatbuffers::FollowStart<$ty> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&buf[..], 0), x);
+ }
+ )
+ }
+
+ impl_prop!(prop_bool, bool);
+ impl_prop!(prop_u8, u8);
+ impl_prop!(prop_i8, i8);
+ impl_prop!(prop_u16, u16);
+ impl_prop!(prop_i16, i16);
+ impl_prop!(prop_u32, u32);
+ impl_prop!(prop_i32, i32);
+ impl_prop!(prop_u64, u64);
+ impl_prop!(prop_i64, i64);
+ impl_prop!(prop_f32, f32);
+ impl_prop!(prop_f64, f64);
+
+ #[test]
+ fn fuzz_bool() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_bool as fn(bool)); }
+ #[test]
+ fn fuzz_u8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u8 as fn(u8)); }
+ #[test]
+ fn fuzz_i8() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i8 as fn(i8)); }
+ #[test]
+ fn fuzz_u16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u16 as fn(u16)); }
+ #[test]
+ fn fuzz_i16() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i16 as fn(i16)); }
+ #[test]
+ fn fuzz_u32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u32 as fn(u32)); }
+ #[test]
+ fn fuzz_i32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i32 as fn(i32)); }
+ #[test]
+ fn fuzz_u64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_u64 as fn(u64)); }
+ #[test]
+ fn fuzz_i64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_i64 as fn(i64)); }
+ #[test]
+ fn fuzz_f32() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f32 as fn(f32)); }
+ #[test]
+ fn fuzz_f64() { quickcheck::QuickCheck::new().max_tests(N).quickcheck(prop_f64 as fn(f64)); }
+}
+
+
+#[cfg(test)]
+mod write_and_read_examples {
+ extern crate flatbuffers;
+
+ use super::create_serialized_example_with_library_code;
+ use super::create_serialized_example_with_generated_code;
+ use super::serialized_example_is_accessible_and_correct;
+
+ #[test]
+ fn generated_code_creates_correct_example() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(b);
+ let buf = b.finished_data();
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+
+ #[test]
+ fn generated_code_creates_correct_example_repeatedly_with_reset() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ for _ in 0..100 {
+ create_serialized_example_with_generated_code(b);
+ {
+ let buf = b.finished_data();
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+ b.reset();
+ }
+ }
+
+ #[test]
+ fn library_code_creates_correct_example() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_library_code(b);
+ let buf = b.finished_data();
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+
+ #[test]
+ fn library_code_creates_correct_example_repeatedly_with_reset() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ for _ in 0..100 {
+ create_serialized_example_with_library_code(b);
+ {
+ let buf = b.finished_data();
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+ b.reset();
+ }
+ }
+}
+
+#[cfg(test)]
+mod read_examples_from_other_language_ports {
+ extern crate flatbuffers;
+
+ use super::load_file;
+ use super::serialized_example_is_accessible_and_correct;
+
+ #[test]
+ fn gold_cpp_example_data_is_accessible_and_correct() {
+ let buf = load_file("../monsterdata_test.mon");
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+ #[test]
+ fn java_wire_example_data_is_accessible_and_correct() {
+ let buf = load_file("../monsterdata_java_wire.mon");
+ serialized_example_is_accessible_and_correct(&buf[..], true, false).unwrap();
+ }
+ #[test]
+ fn java_wire_size_prefixed_example_data_is_accessible_and_correct() {
+ let buf = load_file("../monsterdata_java_wire_sp.mon");
+ serialized_example_is_accessible_and_correct(&buf[..], true, true).unwrap();
+ }
+}
+
+#[cfg(test)]
+mod generated_code_asserts {
+ extern crate flatbuffers;
+
+ use super::my_game;
+
+ #[test]
+ #[should_panic]
+ fn monster_builder_fails_when_name_is_missing() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ my_game::example::Monster::create(b, &my_game::example::MonsterArgs{..Default::default()});
+ }
+}
+
+#[cfg(test)]
+mod generated_key_comparisons {
+ extern crate flatbuffers;
+
+ use super::my_game;
+
+ #[test]
+ fn struct_ability_key_compare_less_than() {
+ let a = my_game::example::Ability::new(1, 2);
+ let b = my_game::example::Ability::new(2, 1);
+ let c = my_game::example::Ability::new(3, 3);
+
+ assert_eq!(a.key_compare_less_than(&a), false);
+ assert_eq!(b.key_compare_less_than(&b), false);
+ assert_eq!(c.key_compare_less_than(&c), false);
+
+ assert_eq!(a.key_compare_less_than(&b), true);
+ assert_eq!(a.key_compare_less_than(&c), true);
+
+ assert_eq!(b.key_compare_less_than(&a), false);
+ assert_eq!(b.key_compare_less_than(&c), true);
+
+ assert_eq!(c.key_compare_less_than(&a), false);
+ assert_eq!(c.key_compare_less_than(&b), false);
+ }
+
+ #[test]
+ fn struct_key_compare_with_value() {
+ let a = my_game::example::Ability::new(1, 2);
+
+ assert_eq!(a.key_compare_with_value(0), ::std::cmp::Ordering::Greater);
+ assert_eq!(a.key_compare_with_value(1), ::std::cmp::Ordering::Equal);
+ assert_eq!(a.key_compare_with_value(2), ::std::cmp::Ordering::Less);
+ }
+
+ #[test]
+ fn struct_key_compare_less_than() {
+ let a = my_game::example::Ability::new(1, 2);
+ let b = my_game::example::Ability::new(2, 1);
+ let c = my_game::example::Ability::new(3, 3);
+
+ assert_eq!(a.key_compare_less_than(&a), false);
+ assert_eq!(b.key_compare_less_than(&b), false);
+ assert_eq!(c.key_compare_less_than(&c), false);
+
+ assert_eq!(a.key_compare_less_than(&b), true);
+ assert_eq!(a.key_compare_less_than(&c), true);
+
+ assert_eq!(b.key_compare_less_than(&a), false);
+ assert_eq!(b.key_compare_less_than(&c), true);
+
+ assert_eq!(c.key_compare_less_than(&a), false);
+ assert_eq!(c.key_compare_less_than(&b), false);
+ }
+
+ #[test]
+ fn table_key_compare_with_value() {
+ // setup
+ let builder = &mut flatbuffers::FlatBufferBuilder::new();
+ super::create_serialized_example_with_library_code(builder);
+ let buf = builder.finished_data();
+ let a = my_game::example::get_root_as_monster(buf);
+
+ // preconditions
+ assert_eq!(a.name(), Some("MyMonster"));
+
+ assert_eq!(a.key_compare_with_value(None), ::std::cmp::Ordering::Greater);
+
+ assert_eq!(a.key_compare_with_value(Some("AAA")), ::std::cmp::Ordering::Greater);
+ assert_eq!(a.key_compare_with_value(Some("MyMonster")), ::std::cmp::Ordering::Equal);
+ assert_eq!(a.key_compare_with_value(Some("ZZZ")), ::std::cmp::Ordering::Less);
+ }
+
+ #[test]
+ fn table_key_compare_less_than() {
+ // setup
+ let builder = &mut flatbuffers::FlatBufferBuilder::new();
+ super::create_serialized_example_with_library_code(builder);
+ let buf = builder.finished_data();
+ let a = my_game::example::get_root_as_monster(buf);
+ let b = a.test_as_monster().unwrap();
+
+ // preconditions
+ assert_eq!(a.name(), Some("MyMonster"));
+ assert_eq!(b.name(), Some("Fred"));
+
+ assert_eq!(a.key_compare_less_than(&a), false);
+ assert_eq!(a.key_compare_less_than(&b), false);
+
+ assert_eq!(b.key_compare_less_than(&a), true);
+ assert_eq!(b.key_compare_less_than(&b), false);
+ }
+}
+
+#[cfg(test)]
+mod included_schema_generated_code {
+ extern crate flatbuffers;
+
+ //extern crate rust_usage_test;
+
+ // TODO(rw): make generated sub-namespace files importable
+ //#[test]
+ //fn namespace_test_mod_is_importable() {
+ // use rust_usage_test::namespace_test;
+ //}
+ //#[test]
+ //fn namespace_test1_mod_is_importable() {
+ // use rust_usage_test::namespace_test::namespace_test1_generated;
+ //}
+ //#[test]
+ //fn namespace_test2_mod_is_importable() {
+ // use rust_usage_test::namespace_test::namespace_test2_generated;
+ //}
+}
+
+#[cfg(test)]
+mod builder_asserts {
+ extern crate flatbuffers;
+
+ #[test]
+ #[should_panic]
+ fn end_table_should_panic_when_not_in_table() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.end_table(flatbuffers::WIPOffset::new(0));
+ }
+
+ #[test]
+ #[should_panic]
+ fn create_string_should_panic_when_in_table() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_table();
+ b.create_string("foo");
+ }
+
+ #[test]
+ #[should_panic]
+ fn create_byte_string_should_panic_when_in_table() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_table();
+ b.create_byte_string(b"foo");
+ }
+
+ #[test]
+ #[should_panic]
+ fn push_struct_slot_should_panic_when_not_in_table() {
+ #[derive(Copy, Clone, Debug, PartialEq)]
+ #[repr(C, packed)]
+ struct foo { }
+ impl<'b> flatbuffers::Push for &'b foo {
+ type Output = foo;
+ fn push<'a>(&'a self, _dst: &'a mut [u8], _rest: &'a [u8]) { }
+ }
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push_slot_always(0, &foo{});
+ }
+
+ #[test]
+ #[should_panic]
+ fn finished_bytes_should_panic_when_table_is_not_finished() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_table();
+ b.finished_data();
+ }
+
+ #[test]
+ #[should_panic]
+ fn required_panics_when_field_not_set() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let start = b.start_table();
+ let o = b.end_table(start);
+ b.required(o, 4 /* byte offset to first field */, "test field");
+ }
+}
+
+#[cfg(test)]
+mod follow_impls {
+ extern crate flatbuffers;
+ use flatbuffers::Follow;
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+
+ #[test]
+ fn to_u8() {
+ let vec: Vec<u8> = vec![255, 3];
+ let fs: flatbuffers::FollowStart<u8> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&vec[..], 1), 3);
+ }
+
+ #[test]
+ fn to_u16() {
+ let vec: Vec<u8> = vec![255, 255, 3, 4];
+ let fs: flatbuffers::FollowStart<u16> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&vec[..], 2), 1027);
+ }
+
+ #[test]
+ fn to_f32() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, /* start of value */ 208, 15, 73, 64];
+ let fs: flatbuffers::FollowStart<f32> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&vec[..], 4), 3.14159);
+ }
+
+ #[test]
+ fn to_string() {
+ let vec: Vec<u8> = vec![255,255,255,255, 3, 0, 0, 0, 'f' as u8, 'o' as u8, 'o' as u8, 0];
+ let off: flatbuffers::FollowStart<&str> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4), "foo");
+ }
+
+ #[test]
+ fn to_byte_slice() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 4, 0, 0, 0, 1, 2, 3, 4];
+ let off: flatbuffers::FollowStart<&[u8]> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4), &[1, 2, 3, 4][..]);
+ }
+
+ #[test]
+ fn to_byte_vector() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 4, 0, 0, 0, 1, 2, 3, 4];
+ let off: flatbuffers::FollowStart<flatbuffers::Vector<u8>> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4).safe_slice(), &[1, 2, 3, 4][..]);
+ }
+
+ #[test]
+ fn to_byte_string_zero_teriminated() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 3, 0, 0, 0, 1, 2, 3, 0];
+ let off: flatbuffers::FollowStart<&[u8]> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4), &[1, 2, 3][..]);
+ }
+
+ #[cfg(target_endian = "little")]
+ #[test]
+ fn to_slice_of_u16() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 2, 0, 0, 0, 1, 2, 3, 4];
+ let off: flatbuffers::FollowStart<&[u16]> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4), &vec![513, 1027][..]);
+ }
+
+ #[test]
+ fn to_vector_of_u16() {
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 2, 0, 0, 0, 1, 2, 3, 4];
+ let off: flatbuffers::FollowStart<flatbuffers::Vector<u16>> = flatbuffers::FollowStart::new();
+ assert_eq!(off.self_follow(&vec[..], 4).len(), 2);
+ assert_eq!(off.self_follow(&vec[..], 4).get(0), 513);
+ assert_eq!(off.self_follow(&vec[..], 4).get(1), 1027);
+ }
+
+ #[test]
+ fn to_struct() {
+ #[derive(Copy, Clone, Debug, PartialEq)]
+ #[repr(C, packed)]
+ struct FooStruct {
+ a: i8,
+ b: u8,
+ c: i16,
+ }
+ impl<'a> flatbuffers::Follow<'a> for &'a FooStruct {
+ type Inner = &'a FooStruct;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<FooStruct>(buf, loc)
+ }
+ }
+
+ let vec: Vec<u8> = vec![255, 255, 255, 255, 1, 2, 3, 4];
+ let off: flatbuffers::FollowStart<&FooStruct> = flatbuffers::FollowStart::new();
+ assert_eq!(*off.self_follow(&vec[..], 4), FooStruct{a: 1, b: 2, c: 1027});
+ }
+
+ #[test]
+ fn to_vector_of_offset_to_string_elements() {
+ let buf: Vec<u8> = vec![/* vec len */ 1, 0, 0, 0, /* offset to string */ 4, 0, 0, 0, /* str length */ 3, 0, 0, 0, 'f' as u8, 'o' as u8, 'o' as u8, 0];
+ let s: flatbuffers::FollowStart<flatbuffers::Vector<flatbuffers::ForwardsUOffset<&str>>> = flatbuffers::FollowStart::new();
+ assert_eq!(s.self_follow(&buf[..], 0).len(), 1);
+ assert_eq!(s.self_follow(&buf[..], 0).get(0), "foo");
+ }
+
+ #[test]
+ fn to_slice_of_struct_elements() {
+ #[derive(Copy, Clone, Debug, PartialEq)]
+ #[repr(C, packed)]
+ struct FooStruct {
+ a: i8,
+ b: u8,
+ c: i16,
+ }
+ impl flatbuffers::SafeSliceAccess for FooStruct {}
+ impl<'a> flatbuffers::Follow<'a> for FooStruct {
+ type Inner = &'a FooStruct;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<FooStruct>(buf, loc)
+ }
+ }
+
+ let buf: Vec<u8> = vec![1, 0, 0, 0, /* struct data */ 1, 2, 3, 4];
+ let fs: flatbuffers::FollowStart<flatbuffers::Vector<FooStruct>> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&buf[..], 0).safe_slice(), &vec![FooStruct{a: 1, b: 2, c: 1027}][..]);
+ }
+
+ #[test]
+ fn to_vector_of_struct_elements() {
+ #[derive(Copy, Clone, Debug, PartialEq)]
+ #[repr(C, packed)]
+ struct FooStruct {
+ a: i8,
+ b: u8,
+ c: i16,
+ }
+ impl<'a> flatbuffers::Follow<'a> for FooStruct {
+ type Inner = &'a FooStruct;
+ #[inline(always)]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<FooStruct>(buf, loc)
+ }
+ }
+
+ let buf: Vec<u8> = vec![1, 0, 0, 0, /* struct data */ 1, 2, 3, 4];
+ let fs: flatbuffers::FollowStart<flatbuffers::Vector<FooStruct>> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&buf[..], 0).len(), 1);
+ assert_eq!(fs.self_follow(&buf[..], 0).get(0), &FooStruct{a: 1, b: 2, c: 1027});
+ }
+
+ #[test]
+ fn to_root_to_empty_table() {
+ let buf: Vec<u8> = vec![
+ 12, 0, 0, 0, // offset to root table
+ // enter vtable
+ 4, 0, // vtable len
+ 0, 0, // inline size
+ 255, 255, 255, 255, // canary
+ // enter table
+ 8, 0, 0, 0, // vtable location
+ ];
+ let fs: flatbuffers::FollowStart<flatbuffers::ForwardsUOffset<flatbuffers::Table>> = flatbuffers::FollowStart::new();
+ assert_eq!(fs.self_follow(&buf[..], 0), flatbuffers::Table::new(&buf[..], 12));
+ }
+
+ #[test]
+ fn to_root_table_get_slot_scalar_u8() {
+ let buf: Vec<u8> = vec![
+ 14, 0, 0, 0, // offset to root table
+ // enter vtable
+ 6, 0, // vtable len
+ 2, 0, // inline size
+ 5, 0, // value loc
+ 255, 255, 255, 255, // canary
+ // enter table
+ 10, 0, 0, 0, // vtable location
+ 0, 99 // value (with padding)
+ ];
+ let fs: flatbuffers::FollowStart<flatbuffers::ForwardsUOffset<flatbuffers::Table>> = flatbuffers::FollowStart::new();
+ let tab = fs.self_follow(&buf[..], 0);
+ assert_eq!(tab.get::<u8>(fi2fo(0), Some(123)), Some(99));
+ }
+
+ #[test]
+ fn to_root_to_table_get_slot_scalar_u8_default_via_vtable_len() {
+ let buf: Vec<u8> = vec![
+ 12, 0, 0, 0, // offset to root table
+ // enter vtable
+ 4, 0, // vtable len
+ 2, 0, // inline size
+ 255, 255, 255, 255, // canary
+ // enter table
+ 8, 0, 0, 0, // vtable location
+ ];
+ let fs: flatbuffers::FollowStart<flatbuffers::ForwardsUOffset<flatbuffers::Table>> = flatbuffers::FollowStart::new();
+ let tab = fs.self_follow(&buf[..], 0);
+ assert_eq!(tab.get::<u8>(fi2fo(0), Some(123)), Some(123));
+ }
+
+ #[test]
+ fn to_root_to_table_get_slot_scalar_u8_default_via_vtable_zero() {
+ let buf: Vec<u8> = vec![
+ 14, 0, 0, 0, // offset to root table
+ // enter vtable
+ 6, 0, // vtable len
+ 2, 0, // inline size
+ 0, 0, // zero means use the default value
+ 255, 255, 255, 255, // canary
+ // enter table
+ 10, 0, 0, 0, // vtable location
+ ];
+ let fs: flatbuffers::FollowStart<flatbuffers::ForwardsUOffset<flatbuffers::Table>> = flatbuffers::FollowStart::new();
+ let tab = fs.self_follow(&buf[..], 0);
+ assert_eq!(tab.get::<u8>(fi2fo(0), Some(123)), Some(123));
+ }
+
+ #[test]
+ fn to_root_to_table_get_slot_string_multiple_types() {
+ let buf: Vec<u8> = vec![
+ 14, 0, 0, 0, // offset to root table
+ // enter vtable
+ 6, 0, // vtable len
+ 2, 0, // inline size
+ 4, 0, // value loc
+ 255, 255, 255, 255, // canary
+ // enter table
+ 10, 0, 0, 0, // vtable location
+ 8, 0, 0, 0, // offset to string
+ // leave table
+ 255, 255, 255, 255, // canary
+ // enter string
+ 3, 0, 0, 0, 109, 111, 111, 0 // string length and contents
+ ];
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(&buf[..], 0);
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&str>>(fi2fo(0), None), Some("moo"));
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&[u8]>>(fi2fo(0), None), Some(&vec![109, 111, 111][..]));
+ let v = tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<u8>>>(fi2fo(0), None).unwrap();
+ assert_eq!(v.len(), 3);
+ assert_eq!(v.get(0), 109);
+ assert_eq!(v.get(1), 111);
+ assert_eq!(v.get(2), 111);
+ }
+
+ #[test]
+ fn to_root_to_table_get_slot_string_multiple_types_default_via_vtable_len() {
+ let buf: Vec<u8> = vec![
+ 12, 0, 0, 0, // offset to root table
+ // enter vtable
+ 4, 0, // vtable len
+ 4, 0, // table inline len
+ 255, 255, 255, 255, // canary
+ // enter table
+ 8, 0, 0, 0, // vtable location
+ ];
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(&buf[..], 0);
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&str>>(fi2fo(0), Some("abc")), Some("abc"));
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&[u8]>>(fi2fo(0), Some(&vec![70, 71, 72][..])), Some(&vec![70, 71, 72][..]));
+
+ let default_vec_buf: Vec<u8> = vec![3, 0, 0, 0, 70, 71, 72, 0];
+ let default_vec = flatbuffers::Vector::new(&default_vec_buf[..], 0);
+ let v = tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<u8>>>(fi2fo(0), Some(default_vec)).unwrap();
+ assert_eq!(v.len(), 3);
+ assert_eq!(v.get(0), 70);
+ assert_eq!(v.get(1), 71);
+ assert_eq!(v.get(2), 72);
+ }
+
+ #[test]
+ fn to_root_to_table_get_slot_string_multiple_types_default_via_vtable_zero() {
+ let buf: Vec<u8> = vec![
+ 14, 0, 0, 0, // offset to root table
+ // enter vtable
+ 6, 0, // vtable len
+ 2, 0, // inline size
+ 0, 0, // value loc
+ 255, 255, 255, 255, // canary
+ // enter table
+ 10, 0, 0, 0, // vtable location
+ ];
+ let tab = <flatbuffers::ForwardsUOffset<flatbuffers::Table>>::follow(&buf[..], 0);
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&str>>(fi2fo(0), Some("abc")), Some("abc"));
+ assert_eq!(tab.get::<flatbuffers::ForwardsUOffset<&[u8]>>(fi2fo(0), Some(&vec![70, 71, 72][..])), Some(&vec![70, 71, 72][..]));
+
+ let default_vec_buf: Vec<u8> = vec![3, 0, 0, 0, 70, 71, 72, 0];
+ let default_vec = flatbuffers::Vector::new(&default_vec_buf[..], 0);
+ let v = tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<u8>>>(fi2fo(0), Some(default_vec)).unwrap();
+ assert_eq!(v.len(), 3);
+ assert_eq!(v.get(0), 70);
+ assert_eq!(v.get(1), 71);
+ assert_eq!(v.get(2), 72);
+ }
+}
+
+#[cfg(test)]
+mod push_impls {
+ extern crate flatbuffers;
+
+ use super::my_game;
+
+ fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) {
+ let got = b.unfinished_data();
+ assert_eq!(want, got);
+ }
+
+ #[test]
+ fn push_u8() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(123u8);
+ check(&b, &[123]);
+ }
+
+ #[test]
+ fn push_u64() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(0x12345678);
+ check(&b, &[0x78, 0x56, 0x34, 0x12]);
+ }
+
+ #[test]
+ fn push_f64() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(3.14159265359f64);
+ check(&b, &[234, 46, 68, 84, 251, 33, 9, 64]);
+ }
+
+ #[test]
+ fn push_generated_struct() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(my_game::example::Test::new(10, 20));
+ check(&b, &[10, 0, 20, 0]);
+ }
+
+ #[test]
+ fn push_u8_vector_with_offset_with_alignment() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.create_vector(&[1u8, 2, 3, 4, 5, 6, 7, 8, 9][..]);
+ b.push(off);
+ check(&b, &[/* loc */ 4, 0, 0, 0, /* len */ 9, 0, 0, 0, /* val */ 1, 2, 3, 4, 5, 6, 7, 8, 9, /* padding */ 0, 0, 0]);
+ }
+
+ #[test]
+ fn push_u8_u16_alignment() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(1u8);
+ b.push(2u16);
+ check(&b, &[2, 0, 0, 1]);
+ }
+
+ #[test]
+ fn push_u8_u32_alignment() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(1u8);
+ b.push(2u32);
+ check(&b, &[2, 0, 0, 0, 0, 0, 0, 1]);
+ }
+
+ #[test]
+ fn push_u8_u64_alignment() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(1u8);
+ b.push(2u64);
+ check(&b, &[2, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 0, 0, 0, 1]);
+ }
+}
+
+#[cfg(test)]
+mod vtable_deduplication {
+ extern crate flatbuffers;
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+
+ fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) {
+ let got = b.unfinished_data();
+ assert_eq!(want, got);
+ }
+
+ #[test]
+ fn one_empty_table() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let start0 = b.start_table();
+ b.end_table(start0);
+ check(&b, &[
+ 4, 0, // vtable size in bytes
+ 4, 0, // object inline data in bytes
+
+ 4, 0, 0, 0, // backwards offset to vtable
+ ]);
+ }
+
+ #[test]
+ fn two_empty_tables_are_deduplicated() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let start0 = b.start_table();
+ b.end_table(start0);
+ let start1 = b.start_table();
+ b.end_table(start1);
+ check(&b, &[
+ 252, 255, 255, 255, // forwards offset to vtable
+
+ 4, 0, // vtable size in bytes
+ 4, 0, // object inline data in bytes
+
+ 4, 0, 0, 0, // backwards offset to vtable
+ ]);
+ }
+
+ #[test]
+ fn two_tables_with_two_conveniently_sized_inline_elements_are_deduplicated() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let start0 = b.start_table();
+ b.push_slot::<u64>(fi2fo(0), 100, 0);
+ b.push_slot::<u32>(fi2fo(1), 101, 0);
+ b.end_table(start0);
+ let start1 = b.start_table();
+ b.push_slot::<u64>(fi2fo(0), 200, 0);
+ b.push_slot::<u32>(fi2fo(1), 201, 0);
+ b.end_table(start1);
+ check(&b, &[
+ 240, 255, 255, 255, // forwards offset to vtable
+
+ 201, 0, 0, 0, // value #1
+ 200, 0, 0, 0, 0, 0, 0, 0, // value #0
+
+ 8, 0, // vtable size in bytes
+ 16, 0, // object inline data in bytes
+ 8, 0, // offset in object for value #0
+ 4, 0, // offset in object for value #1
+
+ 8, 0, 0, 0, // backwards offset to vtable
+ 101, 0, 0, 0, // value #1
+ 100, 0, 0, 0, 0, 0, 0, 0 // value #0
+ ]);
+ }
+
+ #[test]
+ fn many_identical_tables_use_few_vtables() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ for _ in 0..1000 {
+ let start = b.start_table();
+ b.push_slot::<u8>(fi2fo(0), 100, 0);
+ b.push_slot::<u32>(fi2fo(1), 101, 0);
+ b.end_table(start);
+ }
+ assert!(b.num_written_vtables() <= 10);
+ }
+}
+
+#[cfg(test)]
+mod byte_layouts {
+ extern crate flatbuffers;
+ use flatbuffers::field_index_to_field_offset as fi2fo;
+
+ fn check<'a>(b: &'a flatbuffers::FlatBufferBuilder, want: &'a [u8]) {
+ let got = b.unfinished_data();
+ assert_eq!(want, got);
+ }
+
+ #[test]
+ fn layout_01_basic_numbers() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(true);
+ check(&b, &[1]);
+ b.push(-127i8);
+ check(&b, &[129, 1]);
+ b.push(255u8);
+ check(&b, &[255, 129, 1]);
+ b.push(-32222i16);
+ check(&b, &[0x22, 0x82, 0, 255, 129, 1]); // first pad
+ b.push(0xFEEEu16);
+ check(&b, &[0xEE, 0xFE, 0x22, 0x82, 0, 255, 129, 1]); // no pad this time
+ b.push(-53687092i32);
+ check(&b, &[204, 204, 204, 252, 0xEE, 0xFE, 0x22, 0x82, 0, 255, 129, 1]);
+ b.push(0x98765432u32);
+ check(&b, &[0x32, 0x54, 0x76, 0x98, 204, 204, 204, 252, 0xEE, 0xFE, 0x22, 0x82, 0, 255, 129, 1]);
+ }
+
+ #[test]
+ fn layout_01b_bigger_numbers() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.push(0x1122334455667788u64);
+ check(&b, &[0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11]);
+ }
+
+ #[test]
+ fn layout_02_1xbyte_vector() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ check(&b, &[]);
+ b.start_vector::<u8>(1);
+ check(&b, &[0, 0, 0]); // align to 4bytes
+ b.push(1u8);
+ check(&b, &[1, 0, 0, 0]);
+ b.end_vector::<u8>(1);
+ check(&b, &[1, 0, 0, 0, 1, 0, 0, 0]); // padding
+ }
+
+ #[test]
+ fn layout_03_2xbyte_vector() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<u8>(2);
+ check(&b, &[0, 0]); // align to 4bytes
+ b.push(1u8);
+ check(&b, &[1, 0, 0]);
+ b.push(2u8);
+ check(&b, &[2, 1, 0, 0]);
+ b.end_vector::<u8>(2);
+ check(&b, &[2, 0, 0, 0, 2, 1, 0, 0]); // padding
+ }
+
+ #[test]
+ fn layout_03b_11xbyte_vector_matches_builder_size() {
+ let mut b = flatbuffers::FlatBufferBuilder::new_with_capacity(12);
+ b.start_vector::<u8>(8);
+
+ let mut gold = vec![0u8; 0];
+ check(&b, &gold[..]);
+
+ for i in 1u8..=8 {
+ b.push(i);
+ gold.insert(0, i);
+ check(&b, &gold[..]);
+ }
+ b.end_vector::<u8>(8);
+ let want = vec![8u8, 0, 0, 0, 8, 7, 6, 5, 4, 3, 2, 1];
+ check(&b, &want[..]);
+ }
+ #[test]
+ fn layout_04_1xuint16_vector() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<u16>(1);
+ check(&b, &[0, 0]); // align to 4bytes
+ b.push(1u16);
+ check(&b, &[1, 0, 0, 0]);
+ b.end_vector::<u16>(1);
+ check(&b, &[1, 0, 0, 0, 1, 0, 0, 0]); // padding
+ }
+
+ #[test]
+ fn layout_05_2xuint16_vector() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let _off = b.start_vector::<u16>(2);
+ check(&b, &[]); // align to 4bytes
+ b.push(0xABCDu16);
+ check(&b, &[0xCD, 0xAB]);
+ b.push(0xDCBAu16);
+ check(&b, &[0xBA, 0xDC, 0xCD, 0xAB]);
+ b.end_vector::<u16>(2);
+ check(&b, &[2, 0, 0, 0, 0xBA, 0xDC, 0xCD, 0xAB]);
+ }
+
+ #[test]
+ fn layout_06_create_string() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off0 = b.create_string("foo");
+ assert_eq!(8, off0.value());
+ check(&b, b"\x03\x00\x00\x00foo\x00"); // 0-terminated, no pad
+ let off1 = b.create_string("moop");
+ assert_eq!(20, off1.value());
+ check(&b, b"\x04\x00\x00\x00moop\x00\x00\x00\x00\
+ \x03\x00\x00\x00foo\x00"); // 0-terminated, 3-byte pad
+ }
+
+ #[test]
+ fn layout_06b_create_string_unicode() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ // These characters are chinese from blog.golang.org/strings
+ // We use escape codes here so that editors without unicode support
+ // aren't bothered:
+ let uni_str = "\u{65e5}\u{672c}\u{8a9e}";
+ let off0 = b.create_string(uni_str);
+ assert_eq!(16, off0.value());
+ check(&b, &[9, 0, 0, 0, 230, 151, 165, 230, 156, 172, 232, 170, 158, 0, // null-terminated, 2-byte pad
+ 0, 0]);
+ }
+
+ #[test]
+ fn layout_06c_create_byte_string() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off0 = b.create_byte_string(b"foo");
+ assert_eq!(8, off0.value());
+ check(&b, b"\x03\x00\x00\x00foo\x00"); // 0-terminated, no pad
+ let off1 = b.create_byte_string(b"moop");
+ assert_eq!(20, off1.value());
+ check(&b, b"\x04\x00\x00\x00moop\x00\x00\x00\x00\
+ \x03\x00\x00\x00foo\x00"); // 0-terminated, 3-byte pad
+ }
+
+ #[test]
+ fn layout_07_empty_vtable() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off0 = b.start_table();
+ check(&b, &[]);
+ b.end_table(off0);
+ check(&b, &[4, 0, // vtable length
+ 4, 0, // length of table including vtable offset
+ 4, 0, 0, 0]); // offset for start of vtable
+ }
+
+ #[test]
+ fn layout_08_vtable_with_one_true_bool() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ check(&b, &[]);
+ let off0 = b.start_table();
+ assert_eq!(0, off0.value());
+ check(&b, &[]);
+ b.push_slot(fi2fo(0), true, false);
+ check(&b, &[1]);
+ let off1 = b.end_table(off0);
+ assert_eq!(8, off1.value());
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 8, 0, // length of object including vtable offset
+ 7, 0, // start of bool value
+ 6, 0, 0, 0, // offset for start of vtable (int32)
+ 0, 0, 0, // padded to 4 bytes
+ 1, // bool value
+ ]);
+ }
+
+ #[test]
+ fn layout_09_vtable_with_one_default_bool() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ check(&b, &[]);
+ let off = b.start_table();
+ check(&b, &[]);
+ b.push_slot(fi2fo(0), false, false);
+ b.end_table(off);
+ check(&b, &[
+ 4, 0, // vtable bytes
+ 4, 0, // end of object from here
+ // entry 1 is zero and not stored.
+ 4, 0, 0, 0, // offset for start of vtable (int32)
+ ]);
+ }
+
+ #[test]
+ fn layout_10_vtable_with_one_int16() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ check(&b, &[]);
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 0x789Ai16, 0);
+ b.end_table(off);
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 8, 0, // end of object from here
+ 6, 0, // offset to value
+ 6, 0, 0, 0, // offset for start of vtable (int32)
+ 0, 0, // padding to 4 bytes
+ 0x9A, 0x78,
+ ]);
+ }
+
+ #[test]
+ fn layout_11_vtable_with_two_int16() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 0x3456i16, 0);
+ b.push_slot(fi2fo(1), 0x789Ai16, 0);
+ b.end_table(off);
+ check(&b, &[
+ 8, 0, // vtable bytes
+ 8, 0, // end of object from here
+ 6, 0, // offset to value 0
+ 4, 0, // offset to value 1
+ 8, 0, 0, 0, // offset for start of vtable (int32)
+ 0x9A, 0x78, // value 1
+ 0x56, 0x34, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_12_vtable_with_int16_and_bool() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 0x3456i16, 0);
+ b.push_slot(fi2fo(1), true, false);
+ b.end_table(off);
+ check(&b, &[
+ 8, 0, // vtable bytes
+ 8, 0, // end of object from here
+ 6, 0, // offset to value 0
+ 5, 0, // offset to value 1
+ 8, 0, 0, 0, // offset for start of vtable (int32)
+ 0, // padding
+ 1, // value 1
+ 0x56, 0x34, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_12b_vtable_with_empty_vector() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<u8>(0);
+ let vecend = b.end_vector::<u8>(0);
+ let off = b.start_table();
+ b.push_slot_always(fi2fo(0), vecend);
+ b.end_table(off);
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 8, 0,
+ 4, 0, // offset to vector offset
+ 6, 0, 0, 0, // offset for start of vtable (int32)
+ 4, 0, 0, 0,
+ 0, 0, 0, 0, // length of vector (not in struct)
+ ]);
+ }
+
+ #[test]
+ fn layout_12c_vtable_with_empty_vector_of_byte_and_some_scalars() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<u8>(0);
+ let vecend = b.end_vector::<u8>(0);
+ let off = b.start_table();
+ b.push_slot::<i16>(fi2fo(0), 55i16, 0);
+ b.push_slot_always::<flatbuffers::WIPOffset<_>>(fi2fo(1), vecend);
+ b.end_table(off);
+ check(&b, &[
+ 8, 0, // vtable bytes
+ 12, 0,
+ 10, 0, // offset to value 0
+ 4, 0, // offset to vector offset
+ 8, 0, 0, 0, // vtable loc
+ 8, 0, 0, 0, // value 1
+ 0, 0, 55, 0, // value 0
+
+ 0, 0, 0, 0, // length of vector (not in struct)
+ ]);
+ }
+ #[test]
+ fn layout_13_vtable_with_1_int16_and_2_vector_of_i16() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<i16>(2);
+ b.push(0x1234i16);
+ b.push(0x5678i16);
+ let vecend = b.end_vector::<i16>(2);
+ let off = b.start_table();
+ b.push_slot_always(fi2fo(1), vecend);
+ b.push_slot(fi2fo(0), 55i16, 0);
+ b.end_table(off);
+ check(&b, &[
+ 8, 0, // vtable bytes
+ 12, 0, // length of object
+ 6, 0, // start of value 0 from end of vtable
+ 8, 0, // start of value 1 from end of buffer
+ 8, 0, 0, 0, // offset for start of vtable (int32)
+ 0, 0, // padding
+ 55, 0, // value 0
+ 4, 0, 0, 0, // vector position from here
+ 2, 0, 0, 0, // length of vector (uint32)
+ 0x78, 0x56, // vector value 1
+ 0x34, 0x12, // vector value 0
+ ]);
+ }
+ #[test]
+ fn layout_14_vtable_with_1_struct_of_int8_and_int16_and_int32() {
+ #[derive(Copy, Clone, Debug, Eq, PartialEq)]
+ #[repr(C, packed)]
+ struct foo {
+ a: i32,
+ _pad0: [u8; 2],
+ b: i16,
+ _pad1: [u8; 3],
+ c: i8,
+ _pad2: [u8; 4],
+ }
+ assert_eq!(::std::mem::size_of::<foo>(), 16);
+ impl<'b> flatbuffers::Push for &'b foo {
+ type Output = foo;
+ fn push<'a>(&'a self, dst: &'a mut [u8], _rest: &'a [u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const foo as *const u8, ::std::mem::size_of::<foo>())
+ };
+ dst.copy_from_slice(src);
+ }
+ }
+
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ let x = foo{a: 0x12345678i32.to_le(), _pad0: [0,0], b: 0x1234i16.to_le(), _pad1: [0, 0, 0], c: 0x12i8.to_le(), _pad2: [0, 0, 0, 0]};
+ b.push_slot_always(fi2fo(0), &x);
+ b.end_table(off);
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 20, 0, // end of object from here
+ 4, 0, // start of struct from here
+ 6, 0, 0, 0, // offset for start of vtable (int32)
+
+ 0x78, 0x56, 0x34, 0x12, // value a
+ 0, 0, // padding
+ 0x34, 0x12, // value b
+ 0, 0, 0, // padding
+ 0x12, // value c
+ 0, 0, 0, 0, // padding
+ ]);
+ }
+ #[test]
+ fn layout_15_vtable_with_1_vector_of_4_int8() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ b.start_vector::<i8>(4);
+ b.push(33i8);
+ b.push(44i8);
+ b.push(55i8);
+ b.push(66i8);
+ let vecend = b.end_vector::<i8>(4);
+ let off = b.start_table();
+ b.push_slot_always(fi2fo(0), vecend);
+ b.end_table(off);
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 8, 0,
+ 4, 0, // offset of vector offset
+ 6, 0, 0, 0, // offset for start of vtable (int32)
+ 4, 0, 0, 0, // vector start offset
+
+ 4, 0, 0, 0, // vector length
+ 66, // vector value 1,1
+ 55, // vector value 1,0
+ 44, // vector value 0,1
+ 33, // vector value 0,0
+ ]);
+ }
+
+ #[test]
+ fn layout_16_table_with_some_elements() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 33i8, 0);
+ b.push_slot(fi2fo(1), 66i16, 0);
+ let off2 = b.end_table(off);
+ b.finish_minimal(off2);
+
+ check(&b, &[
+ 12, 0, 0, 0, // root of table: points to vtable offset
+
+ 8, 0, // vtable bytes
+ 8, 0, // end of object from here
+ 7, 0, // start of value 0
+ 4, 0, // start of value 1
+
+ 8, 0, 0, 0, // offset for start of vtable (int32)
+
+ 66, 0, // value 1
+ 0, // padding
+ 33, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_17_one_unfinished_table_and_one_finished_table() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ {
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 33i8, 0);
+ b.push_slot(fi2fo(1), 44i8, 0);
+ b.end_table(off);
+ }
+
+ {
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 55i8, 0);
+ b.push_slot(fi2fo(1), 66i8, 0);
+ b.push_slot(fi2fo(2), 77i8, 0);
+ let off2 = b.end_table(off);
+ b.finish_minimal(off2);
+ }
+
+ check(&b, &[
+ 16, 0, 0, 0, // root of table: points to object
+ 0, 0, // padding
+
+ 10, 0, // vtable bytes
+ 8, 0, // size of object
+ 7, 0, // start of value 0
+ 6, 0, // start of value 1
+ 5, 0, // start of value 2
+ 10, 0, 0, 0, // offset for start of vtable (int32)
+ 0, // padding
+ 77, // value 2
+ 66, // value 1
+ 55, // value 0
+
+ //12, 0, 0, 0, // root of table: points to object
+
+ 8, 0, // vtable bytes
+ 8, 0, // size of object
+ 7, 0, // start of value 0
+ 6, 0, // start of value 1
+ 8, 0, 0, 0, // offset for start of vtable (int32)
+ 0, 0, // padding
+ 44, // value 1
+ 33, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_18_a_bunch_of_bools() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), true, false);
+ b.push_slot(fi2fo(1), true, false);
+ b.push_slot(fi2fo(2), true, false);
+ b.push_slot(fi2fo(3), true, false);
+ b.push_slot(fi2fo(4), true, false);
+ b.push_slot(fi2fo(5), true, false);
+ b.push_slot(fi2fo(6), true, false);
+ b.push_slot(fi2fo(7), true, false);
+ let off2 = b.end_table(off);
+ b.finish_minimal(off2);
+
+ check(&b, &[
+ 24, 0, 0, 0, // root of table: points to vtable offset
+
+ 20, 0, // vtable bytes
+ 12, 0, // size of object
+ 11, 0, // start of value 0
+ 10, 0, // start of value 1
+ 9, 0, // start of value 2
+ 8, 0, // start of value 3
+ 7, 0, // start of value 4
+ 6, 0, // start of value 5
+ 5, 0, // start of value 6
+ 4, 0, // start of value 7
+ 20, 0, 0, 0, // vtable offset
+
+ 1, // value 7
+ 1, // value 6
+ 1, // value 5
+ 1, // value 4
+ 1, // value 3
+ 1, // value 2
+ 1, // value 1
+ 1, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_19_three_bools() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), true, false);
+ b.push_slot(fi2fo(1), true, false);
+ b.push_slot(fi2fo(2), true, false);
+ let off2 = b.end_table(off);
+ b.finish_minimal(off2);
+
+ check(&b, &[
+ 16, 0, 0, 0, // root of table: points to vtable offset
+
+ 0, 0, // padding
+
+ 10, 0, // vtable bytes
+ 8, 0, // size of object
+ 7, 0, // start of value 0
+ 6, 0, // start of value 1
+ 5, 0, // start of value 2
+ 10, 0, 0, 0, // vtable offset from here
+
+ 0, // padding
+ 1, // value 2
+ 1, // value 1
+ 1, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_20_some_floats() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot(fi2fo(0), 1.0f32, 0.0);
+ b.end_table(off);
+
+ check(&b, &[
+ 6, 0, // vtable bytes
+ 8, 0, // size of object
+ 4, 0, // start of value 0
+ 6, 0, 0, 0, // vtable offset
+
+ 0, 0, 128, 63, // value 0
+ ]);
+ }
+
+ #[test]
+ fn layout_21_vtable_defaults() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot::<i8>(fi2fo(0), 1, 1);
+ b.push_slot::<i8>(fi2fo(1), 3, 2);
+ b.push_slot::<i8>(fi2fo(2), 3, 3);
+ b.end_table(off);
+ check(&b, &[
+ 8, 0, // vtable size in bytes
+ 8, 0, // object inline data in bytes
+ 0, 0, // entry 1/3: 0 => default
+ 7, 0, // entry 2/3: 7 => table start + 7 bytes
+ // entry 3/3: not present => default
+ 8, 0, 0, 0,
+ 0, 0, 0,
+ 3,
+ ]);
+ }
+
+ #[test]
+ fn layout_22_root() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ // skipped: b.push_slot_scalar::<i16>(0, 1, 1);
+ b.push_slot::<i16>(fi2fo(1), 3, 2);
+ b.push_slot::<i16>(fi2fo(2), 3, 3);
+ let table_end = b.end_table(off);
+ b.finish_minimal(table_end);
+ check(&b, &[
+ 12, 0, 0, 0, // root
+
+ 8, 0, // vtable size in bytes
+ 8, 0, // object inline data in bytes
+ 0, 0, // entry 1/3: 0 => default
+ 6, 0, // entry 2/3: 6 => table start + 6 bytes
+ // entry 3/3: not present => default
+ 8, 0, 0, 0, // size of table data in bytes
+ 0, 0, // padding
+ 3, 0, // value 2/3
+ ]);
+ }
+ #[test]
+ fn layout_23_varied_slots_and_root() {
+ let mut b = flatbuffers::FlatBufferBuilder::new();
+ let off = b.start_table();
+ b.push_slot::<i16>(fi2fo(0), 1, 0);
+ b.push_slot::<u8>(fi2fo(1), 2, 0);
+ b.push_slot::<f32>(fi2fo(2), 3.0, 0.0);
+ let table_end = b.end_table(off);
+ b.finish_minimal(table_end);
+ check(&b, &[
+ 16, 0, 0, 0, // root
+ 0, 0, // padding
+ 10, 0, // vtable bytes
+ 12, 0, // object inline data size
+ 10, 0, // offset to value #1 (i16)
+ 9, 0, // offset to value #2 (u8)
+ 4, 0, // offset to value #3 (f32)
+ 10, 0, // offset to vtable
+ 0, 0, // padding
+ 0, 0, 64, 64, // value #3 => 3.0 (float32)
+ 0, 2, // value #1 => 2 (u8)
+ 1, 0, // value #0 => 1 (int16)
+ ]);
+ }
+}
+
+// this is not technically a test, but we want to always keep this generated
+// file up-to-date, and the simplest way to do that is to make sure that when
+// tests are run, the file is generated.
+#[test]
+fn write_example_wire_data_to_file() {
+ let b = &mut flatbuffers::FlatBufferBuilder::new();
+ create_serialized_example_with_generated_code(b);
+
+ use ::std::io::Write;
+ let mut f = std::fs::File::create("../monsterdata_rust_wire.mon").unwrap();
+ f.write_all(b.finished_data()).unwrap();
+}
+
+fn load_file(filename: &str) -> Vec<u8> {
+ use std::io::Read;
+ let mut f = std::fs::File::open(filename).expect("file does not exist");
+ let mut buf = Vec::new();
+ f.read_to_end(&mut buf).expect("file reading failed");
+ buf
+}