summaryrefslogtreecommitdiff
path: root/chromium/third_party/flatbuffers
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/third_party/flatbuffers')
-rw-r--r--chromium/third_party/flatbuffers/README.chromium4
-rw-r--r--chromium/third_party/flatbuffers/src/.appveyor/check-generate-code.bat6
-rw-r--r--chromium/third_party/flatbuffers/src/.bazelci/presubmit.yml16
-rw-r--r--chromium/third_party/flatbuffers/src/.github/workflows/build.yml59
-rw-r--r--chromium/third_party/flatbuffers/src/BUILD.bazel29
-rw-r--r--chromium/third_party/flatbuffers/src/CMake/Version.cmake2
-rw-r--r--chromium/third_party/flatbuffers/src/CMakeLists.txt81
-rw-r--r--chromium/third_party/flatbuffers/src/WORKSPACE2
-rw-r--r--chromium/third_party/flatbuffers/src/android/app/build.gradle10
-rw-r--r--chromium/third_party/flatbuffers/src/android/app/src/main/cpp/flatbuffers/CMakeLists.txt21
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/CMakeLists.txt81
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/bench.h19
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/benchmark_main.cpp96
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench.fbs52
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench_generated.h347
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.cpp80
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.h23
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.cpp109
-rw-r--r--chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.h10
-rw-r--r--chromium/third_party/flatbuffers/src/build_defs.bzl2
-rw-r--r--chromium/third_party/flatbuffers/src/dart/CHANGELOG.md22
-rw-r--r--chromium/third_party/flatbuffers/src/dart/README.md22
-rw-r--r--chromium/third_party/flatbuffers/src/dart/analysis_options.yaml1
-rw-r--r--chromium/third_party/flatbuffers/src/dart/example/example.dart50
-rw-r--r--chromium/third_party/flatbuffers/src/dart/example/monster_my_game.sample_generated.dart99
-rw-r--r--chromium/third_party/flatbuffers/src/dart/lib/flat_buffers.dart775
-rw-r--r--chromium/third_party/flatbuffers/src/dart/lib/src/builder.dart80
-rw-r--r--chromium/third_party/flatbuffers/src/dart/lib/src/reference.dart19
-rw-r--r--chromium/third_party/flatbuffers/src/dart/lib/src/types.dart2
-rwxr-xr-xchromium/third_party/flatbuffers/src/dart/publish.sh13
-rw-r--r--chromium/third_party/flatbuffers/src/dart/pubspec.yaml19
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/CUsage.md2
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/Compiler.md6
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/CppUsage.md101
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/Internals.md4
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/JavaScriptUsage.md2
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/RustUsage.md2
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/Schemas.md2
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/Tutorial.md8
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/TypeScriptUsage.md2
-rw-r--r--chromium/third_party/flatbuffers/src/docs/source/WhitePaper.md2
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/README.md5
-rwxr-xr-xchromium/third_party/flatbuffers/src/grpc/build_grpc.sh9
-rwxr-xr-xchromium/third_party/flatbuffers/src/grpc/examples/generate.sh2
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloReply.py21
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloRequest.py21
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/Model/greeter_generated.swift22
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/client/main.swift6
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/server/main.swift16
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/examples/ts/greeter/src/greeter.ts2
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/flatbuffers-java-grpc/pom.xml2
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/pom.xml2
-rw-r--r--chromium/third_party/flatbuffers/src/grpc/src/compiler/schema_interface.h2
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/allocator.h68
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/array.h243
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/base.h44
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/buffer.h142
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/buffer_ref.h53
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/default_allocator.h64
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/detached_buffer.h114
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffer_builder.h1187
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffers.h2809
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/flexbuffers.h79
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/grpc.h125
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/idl.h28
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/reflection.h6
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/registry.h7
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/stl_emulation.h284
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/string.h64
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/struct.h53
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/table.h166
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/util.h14
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/vector.h370
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/vector_downward.h271
-rw-r--r--chromium/third_party/flatbuffers/src/include/flatbuffers/verifier.h270
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferReadWriteBuf.java7
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferUtil.java3
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlatBufferBuilder.java30
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlexBuffers.java17
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Table.java5
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Old.java7
-rw-r--r--chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Safe.java7
-rw-r--r--chromium/third_party/flatbuffers/src/package.json12
-rw-r--r--chromium/third_party/flatbuffers/src/pom.xml203
-rw-r--r--chromium/third_party/flatbuffers/src/rust/flatbuffers/src/builder.rs66
-rw-r--r--chromium/third_party/flatbuffers/src/rust/flatbuffers/src/get_root.rs4
-rw-r--r--chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vector.rs8
-rw-r--r--chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vtable_writer.rs12
-rwxr-xr-xchromium/third_party/flatbuffers/src/samples/dart_sample.sh18
-rw-r--r--chromium/third_party/flatbuffers/src/samples/monster.bfbsbin1912 -> 1936 bytes
-rw-r--r--chromium/third_party/flatbuffers/src/samples/monster_generated.h34
-rw-r--r--chromium/third_party/flatbuffers/src/samples/monster_generated.rs1006
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/mod.rs17
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/color_generated.rs97
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/equipment_generated.rs142
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/monster_generated.rs444
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/vec_3_generated.rs184
-rw-r--r--chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/weapon_generated.rs156
-rw-r--r--chromium/third_party/flatbuffers/src/samples/sample_binary.rs7
-rw-r--r--chromium/third_party/flatbuffers/src/samples/sample_flexbuffers.rs1
-rwxr-xr-xchromium/third_party/flatbuffers/src/scripts/check-generate-code.sh11
-rwxr-xr-xchromium/third_party/flatbuffers/src/scripts/generate_code.py381
-rw-r--r--chromium/third_party/flatbuffers/src/src/BUILD.bazel9
-rw-r--r--chromium/third_party/flatbuffers/src/src/code_generators.cpp21
-rw-r--r--chromium/third_party/flatbuffers/src/src/flatc.cpp19
-rw-r--r--chromium/third_party/flatbuffers/src/src/flatc_main.cpp12
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_cpp.cpp163
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_csharp.cpp316
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_dart.cpp160
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_grpc.cpp8
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_java.cpp10
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_json_schema.cpp16
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_kotlin.cpp147
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_python.cpp127
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_rust.cpp660
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_swift.cpp219
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_text.cpp4
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_gen_ts.cpp5
-rw-r--r--chromium/third_party/flatbuffers/src/src/idl_parser.cpp17
-rw-r--r--chromium/third_party/flatbuffers/src/src/reflection.cpp35
-rw-r--r--chromium/third_party/flatbuffers/src/src/util.cpp5
-rw-r--r--chromium/third_party/flatbuffers/src/swift.swiftformat3
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Package.swift3
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Package@swift-5.5.swift36
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/ByteBuffer.swift20
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Constants.swift6
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Documentation.md22
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_1.fbs1
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_2.fbs1
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_3.fbs6
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_4.fbs12
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_5.fbs18
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_6.fbs25
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_7.fbs27
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_1.swift1
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_10.swift71
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_11.swift11
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_12.swift19
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_13.swift26
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_2.swift2
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_3.swift7
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_4.swift10
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_5.swift22
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_6.swift26
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_7.swift29
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_8.swift40
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_9.swift62
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/images/tutorial_cover_image_1.pngbin0 -> 20176 bytes
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/Tutorial_Table_of_Contents.tutorial14
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/create_your_first_buffer.tutorial72
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/creating_flatbuffer_schema.tutorial47
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/reading_bytebuffer.tutorial27
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Enum.swift3
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferBuilder.swift400
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferObject.swift20
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBuffersUtils.swift2
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatbuffersErrors.swift5
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Message.swift6
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Mutable.swift8
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/NativeObject.swift4
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Root.swift20
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/String+extension.swift18
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Table.swift28
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/TableVerifier.swift9
-rw-r--r--chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Verifier.swift10
-rw-r--r--chromium/third_party/flatbuffers/src/ts/flexbuffers.ts2
-rw-r--r--chromium/third_party/flatbuffers/src/ts/flexbuffers/reference.ts2
167 files changed, 9092 insertions, 5796 deletions
diff --git a/chromium/third_party/flatbuffers/README.chromium b/chromium/third_party/flatbuffers/README.chromium
index 91f829bea91..4fc8031a203 100644
--- a/chromium/third_party/flatbuffers/README.chromium
+++ b/chromium/third_party/flatbuffers/README.chromium
@@ -1,8 +1,8 @@
Name: FlatBuffers
Short Name: flatbuffers
URL: https://github.com/google/flatbuffers
-Version: c0ba2870c909fb57f97efd7083b5d33d2311d7f9
-Date: 2021-07-06
+Version: 4f7f6dc301f5ffb9765b055d48324dc7629abd5a
+Date: 2021-11-30
License: Apache 2.0
License File: LICENSE
Security Critical: yes
diff --git a/chromium/third_party/flatbuffers/src/.appveyor/check-generate-code.bat b/chromium/third_party/flatbuffers/src/.appveyor/check-generate-code.bat
index ba7398a2f08..2a18cf99259 100644
--- a/chromium/third_party/flatbuffers/src/.appveyor/check-generate-code.bat
+++ b/chromium/third_party/flatbuffers/src/.appveyor/check-generate-code.bat
@@ -27,9 +27,9 @@ goto SUCCESS
:DIFFFOUND
@echo "" >&2
@echo "ERROR: ********************************************************" >&2
-@echo "ERROR: The following differences were found after running the" >&2
-@echo "ERROR: tests/generate_code.sh script. Maybe you forgot to run" >&2
-@echo "ERROR: it after making changes in a generator or schema?" >&2
+@echo "ERROR: The following differences were found after building." >&2
+@echo "ERROR: Perhaps there is a difference in the flags for the" >&2
+@echo "ERROR: CMakeLists.txt vs the tests/generate_code.bat script?" >&2
@echo "ERROR: ********************************************************" >&2
@echo "" >&2
@git -c core.autocrlf=true --no-pager diff --binary
diff --git a/chromium/third_party/flatbuffers/src/.bazelci/presubmit.yml b/chromium/third_party/flatbuffers/src/.bazelci/presubmit.yml
index a6e38fde214..33cb7c2996a 100644
--- a/chromium/third_party/flatbuffers/src/.bazelci/presubmit.yml
+++ b/chromium/third_party/flatbuffers/src/.bazelci/presubmit.yml
@@ -1,18 +1,18 @@
---
buildifier: latest
platforms:
- ubuntu1604:
+ ubuntu1804:
build_targets:
- - "..."
+ - "//..."
test_targets:
- - "..."
- ubuntu1804:
+ - "//..."
+ ubuntu2004:
build_targets:
- - "..."
+ - "//..."
test_targets:
- - "..."
+ - "//..."
macos:
build_targets:
- - "..."
+ - "//..."
test_targets:
- - "..."
+ - "//..."
diff --git a/chromium/third_party/flatbuffers/src/.github/workflows/build.yml b/chromium/third_party/flatbuffers/src/.github/workflows/build.yml
index e94ce43f1d5..3e76107a95b 100644
--- a/chromium/third_party/flatbuffers/src/.github/workflows/build.yml
+++ b/chromium/third_party/flatbuffers/src/.github/workflows/build.yml
@@ -23,6 +23,10 @@ jobs:
run: make -j4
- name: test
run: ./flattests
+ - name: make flatc executable
+ run: |
+ chmod +x flatc
+ ./flatc --version
- name: upload build artifacts
uses: actions/upload-artifact@v1
with:
@@ -72,8 +76,17 @@ jobs:
- name: build
# NOTE: we need this _build dir to not have xcodebuild's default ./build dir clash with the BUILD file.
run: xcodebuild -toolchain clang -configuration Release -target flattests SYMROOT=$(PWD)/_build
+ - name: check that the binary is "universal"
+ run: |
+ info=$(file _build/Release/flatc)
+ echo $info
+ echo $info | grep "universal binary with 2 architectures"
- name: test
run: _build/Release/flattests
+ - name: make flatc executable
+ run: |
+ chmod +x _build/Release/flatc
+ ./_build/Release/flatc --version
- name: upload build artifacts
uses: actions/upload-artifact@v1
with:
@@ -111,6 +124,24 @@ jobs:
- name: Generate
run: bash scripts/check-generate-code.sh && bash scripts/check-grpc-generated-code.sh
+ build-benchmarks:
+ name: Build Benchmarks (on Linux)
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ cxx: [g++-10]
+ steps:
+ - uses: actions/checkout@v1
+ - name: cmake
+ run: CXX=${{ matrix.cxx }} cmake -G "Unix Makefiles" -DFLATBUFFERS_CXX_FLAGS="-Wno-unused-parameter -fno-aligned-new" -DFLATBUFFERS_BUILD_BENCHMARKS=ON -DCMAKE_BUILD_TYPE=Release . && make -j4
+ - name: Run benchmarks
+ run: ./flatbenchmark --benchmark_repetitions=5 --benchmark_display_aggregates_only=true --benchmark_out_format=console --benchmark_out=benchmarks/results_${{matrix.cxx}}
+ - name: Upload benchmarks results
+ uses: actions/upload-artifact@v1
+ with:
+ name: Linux flatbenchmark results ${{matrix.cxx}}
+ path: benchmarks/results_${{matrix.cxx}}
+
build-java:
name: Build Java
runs-on: ubuntu-latest
@@ -120,18 +151,36 @@ jobs:
working-directory: tests
run: bash JavaTest.sh
- build-kotlin:
- name: Build Kotlin
+ build-kotlin-macos:
+ name: Build Kotlin MacOS
runs-on: macos-latest
steps:
- name: Checkout
- uses: actions/checkout@v1
+ uses: actions/checkout@v2
+ - uses: actions/setup-java@v2
+ with:
+ distribution: 'adopt-hotspot'
+ java-version: '11'
+ - name: Build
+ working-directory: kotlin
+ run: ./gradlew clean iosX64Test macosX64Test jsTest jsBrowserTest
+
+ build-kotlin-linux:
+ name: Build Kotlin Linux
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - uses: actions/setup-java@v2
+ with:
+ distribution: 'adopt-hotspot'
+ java-version: '8'
- name: Build
working-directory: kotlin
- run: ./gradlew clean build allTests
+ run: ./gradlew jvmMainClasses jvmTest
- name: Run Benchmark
working-directory: kotlin
- run: ./gradlew benchmark
+ run: ./gradlew jvmBenchmark
- name: Generate Benchmark Report
working-directory: kotlin
run: |
diff --git a/chromium/third_party/flatbuffers/src/BUILD.bazel b/chromium/third_party/flatbuffers/src/BUILD.bazel
index 4e40b718b14..9b7c1232259 100644
--- a/chromium/third_party/flatbuffers/src/BUILD.bazel
+++ b/chromium/third_party/flatbuffers/src/BUILD.bazel
@@ -10,6 +10,20 @@ exports_files([
"LICENSE",
])
+config_setting(
+ name = "platform_freebsd",
+ constraint_values = [
+ "@platforms//os:freebsd",
+ ],
+)
+
+config_setting(
+ name = "platform_openbsd",
+ constraint_values = [
+ "@platforms//os:openbsd",
+ ],
+)
+
# Public flatc library to compile flatbuffer files at runtime.
cc_library(
name = "flatbuffers",
@@ -23,8 +37,15 @@ cc_library(
filegroup(
name = "public_headers",
srcs = [
+ "include/flatbuffers/allocator.h",
+ "include/flatbuffers/array.h",
"include/flatbuffers/base.h",
+ "include/flatbuffers/buffer.h",
+ "include/flatbuffers/buffer_ref.h",
"include/flatbuffers/code_generators.h",
+ "include/flatbuffers/default_allocator.h",
+ "include/flatbuffers/detached_buffer.h",
+ "include/flatbuffers/flatbuffer_builder.h",
"include/flatbuffers/flatbuffers.h",
"include/flatbuffers/flexbuffers.h",
"include/flatbuffers/grpc.h",
@@ -35,7 +56,13 @@ filegroup(
"include/flatbuffers/reflection_generated.h",
"include/flatbuffers/registry.h",
"include/flatbuffers/stl_emulation.h",
+ "include/flatbuffers/string.h",
+ "include/flatbuffers/struct.h",
+ "include/flatbuffers/table.h",
"include/flatbuffers/util.h",
+ "include/flatbuffers/vector.h",
+ "include/flatbuffers/vector_downward.h",
+ "include/flatbuffers/verifier.h",
],
)
@@ -73,6 +100,8 @@ cc_library(
"include/flatbuffers/flexbuffers.h",
"include/flatbuffers/stl_emulation.h",
"include/flatbuffers/util.h",
+ "include/flatbuffers/vector.h",
+ "include/flatbuffers/verifier.h",
],
linkstatic = 1,
strip_include_prefix = "/include",
diff --git a/chromium/third_party/flatbuffers/src/CMake/Version.cmake b/chromium/third_party/flatbuffers/src/CMake/Version.cmake
index cc6ca1fa7e0..a577eef2ad8 100644
--- a/chromium/third_party/flatbuffers/src/CMake/Version.cmake
+++ b/chromium/third_party/flatbuffers/src/CMake/Version.cmake
@@ -6,7 +6,7 @@ set(VERSION_COMMIT 0)
find_program(GIT git)
if(GIT)
execute_process(
- COMMAND ${GIT} describe
+ COMMAND ${GIT} describe --tags
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
OUTPUT_VARIABLE GIT_DESCRIBE_DIRTY
OUTPUT_STRIP_TRAILING_WHITESPACE
diff --git a/chromium/third_party/flatbuffers/src/CMakeLists.txt b/chromium/third_party/flatbuffers/src/CMakeLists.txt
index 249f46bd739..544c7afb8da 100644
--- a/chromium/third_party/flatbuffers/src/CMakeLists.txt
+++ b/chromium/third_party/flatbuffers/src/CMakeLists.txt
@@ -1,9 +1,17 @@
cmake_minimum_required(VERSION 2.8.12)
+
+if (POLICY CMP0048)
+ cmake_policy(SET CMP0048 NEW)
+ project(FlatBuffers
+ DESCRIPTION "Flatbuffers serialization library"
+ VERSION 2.0.0
+ LANGUAGES CXX)
+else()
+ project(FlatBuffers)
+endif (POLICY CMP0048)
+
# generate compile_commands.json
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
-include(CheckCXXSymbolExists)
-
-project(FlatBuffers)
# NOTE: Code coverage only works on Linux & OSX.
option(FLATBUFFERS_CODE_COVERAGE "Enable the code coverage build option." OFF)
@@ -16,6 +24,9 @@ option(FLATBUFFERS_BUILD_FLATC "Enable the build of the flatbuffers compiler"
option(FLATBUFFERS_STATIC_FLATC "Build flatbuffers compiler with -static flag"
OFF)
option(FLATBUFFERS_BUILD_FLATHASH "Enable the build of flathash" ON)
+option(FLATBUFFERS_BUILD_BENCHMARKS "Enable the build of flatbenchmark. \"
+ Requires C++11."
+ OFF)
option(FLATBUFFERS_BUILD_GRPCTEST "Enable the build of grpctest" OFF)
option(FLATBUFFERS_BUILD_SHAREDLIB
"Enable the build of the flatbuffers shared library"
@@ -58,6 +69,8 @@ endif()
# Auto-detect locale-narrow 'strtod_l' and 'strtoull_l' functions.
if(NOT DEFINED FLATBUFFERS_LOCALE_INDEPENDENT)
+ include(CheckCXXSymbolExists)
+
set(FLATBUFFERS_LOCALE_INDEPENDENT 0)
if(MSVC)
check_cxx_symbol_exists(_strtof_l stdlib.h FLATBUFFERS_HAS_STRTOF_L)
@@ -73,17 +86,30 @@ endif()
add_definitions(-DFLATBUFFERS_LOCALE_INDEPENDENT=$<BOOL:${FLATBUFFERS_LOCALE_INDEPENDENT}>)
set(FlatBuffers_Library_SRCS
+ include/flatbuffers/allocator.h
+ include/flatbuffers/array.h
include/flatbuffers/base.h
+ include/flatbuffers/buffer.h
+ include/flatbuffers/buffer_ref.h
+ include/flatbuffers/default_allocator.h
+ include/flatbuffers/detached_buffer.h
+ include/flatbuffers/flatbuffer_builder.h
include/flatbuffers/flatbuffers.h
+ include/flatbuffers/flexbuffers.h
include/flatbuffers/hash.h
include/flatbuffers/idl.h
- include/flatbuffers/util.h
+ include/flatbuffers/minireflect.h
include/flatbuffers/reflection.h
include/flatbuffers/reflection_generated.h
- include/flatbuffers/stl_emulation.h
- include/flatbuffers/flexbuffers.h
include/flatbuffers/registry.h
- include/flatbuffers/minireflect.h
+ include/flatbuffers/stl_emulation.h
+ include/flatbuffers/string.h
+ include/flatbuffers/struct.h
+ include/flatbuffers/table.h
+ include/flatbuffers/util.h
+ include/flatbuffers/vector.h
+ include/flatbuffers/vector_downward.h
+ include/flatbuffers/verifier.h
src/idl_parser.cpp
src/idl_gen_text.cpp
src/reflection.cpp
@@ -220,10 +246,6 @@ if(EXISTS "${CMAKE_TOOLCHAIN_FILE}")
# do not apply any global settings if the toolchain
# is being configured externally
message(STATUS "Using toolchain file: ${CMAKE_TOOLCHAIN_FILE}.")
-elseif(APPLE)
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -stdlib=libc++")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pedantic -Werror -Wextra -Wno-unused-parameter")
- set(FLATBUFFERS_PRIVATE_CXX_FLAGS "-Wold-style-cast")
elseif(CMAKE_COMPILER_IS_GNUCXX)
if(CYGWIN)
set(CMAKE_CXX_FLAGS
@@ -250,8 +272,14 @@ elseif(CMAKE_COMPILER_IS_GNUCXX)
"${CMAKE_CXX_FLAGS} -fsigned-char")
elseif(${CMAKE_CXX_COMPILER_ID} MATCHES "Clang")
- set(CMAKE_CXX_FLAGS
- "${CMAKE_CXX_FLAGS} -std=c++0x -Wall -pedantic -Werror -Wextra -Wno-unused-parameter")
+ if(APPLE)
+ set(CMAKE_OSX_ARCHITECTURES "arm64;x86_64")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ else()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
+ endif()
+
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pedantic -Werror -Wextra -Wno-unused-parameter")
set(FLATBUFFERS_PRIVATE_CXX_FLAGS "-Wold-style-cast")
if(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 3.8)
list(APPEND FLATBUFFERS_PRIVATE_CXX_FLAGS "-Wimplicit-fallthrough" "-Wextra-semi" "-Werror=unused-private-field") # enable warning
@@ -478,6 +506,27 @@ function(compile_flatbuffers_schema_to_embedded_binary SRC_FBS OPT)
register_generated_output(${GEN_BFBS_HEADER})
endfunction()
+# Look if we have python 3 installed so that we can run the generate code python
+# script after flatc is built.
+find_package(PythonInterp 3)
+if(PYTHONINTERP_FOUND AND
+ # Skip doing this if the MSVC version is below VS 12.
+ # https://cmake.org/cmake/help/latest/variable/MSVC_VERSION.html
+ (NOT MSVC OR MSVC_VERSION GREATER 1800))
+ if(WIN32)
+ set(GENERATION_SCRIPT py scripts/generate_code.py)
+ else()
+ set(GENERATION_SCRIPT scripts/generate_code.py)
+ endif()
+ add_custom_command(
+ TARGET flatc
+ POST_BUILD
+ COMMAND ${GENERATION_SCRIPT} "${FLATBUFFERS_FLATC_EXECUTABLE}"
+ WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
+ COMMENT "Running ${GENERATION_SCRIPT}..."
+ VERBATIM)
+endif()
+
if(FLATBUFFERS_BUILD_TESTS)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/tests" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}")
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/samples" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}")
@@ -547,6 +596,7 @@ if(FLATBUFFERS_BUILD_GRPCTEST)
INCLUDE_DIRECTORIES(${PROTOBUF_DOWNLOAD_PATH}/src)
find_package(Threads REQUIRED)
list(APPEND CMAKE_PREFIX_PATH ${GRPC_INSTALL_PATH})
+ find_package(absl CONFIG REQUIRED)
find_package(protobuf CONFIG REQUIRED)
find_package(gRPC CONFIG REQUIRED)
add_executable(grpctest ${FlatBuffers_GRPCTest_SRCS})
@@ -678,3 +728,8 @@ if(UNIX)
include(CPack)
endif()
endif()
+
+# Include for running Google Benchmarks.
+if(FLATBUFFERS_BUILD_BENCHMARKS AND CMAKE_VERSION VERSION_GREATER 3.13)
+ add_subdirectory(benchmarks)
+endif() \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/WORKSPACE b/chromium/third_party/flatbuffers/src/WORKSPACE
index c3bfdbea21e..01c1e86319c 100644
--- a/chromium/third_party/flatbuffers/src/WORKSPACE
+++ b/chromium/third_party/flatbuffers/src/WORKSPACE
@@ -29,7 +29,7 @@ http_archive(
)
##### GRPC
-_GRPC_VERSION = "1.36.1"
+_GRPC_VERSION = "1.39.0"
http_archive(
name = "com_github_grpc_grpc",
diff --git a/chromium/third_party/flatbuffers/src/android/app/build.gradle b/chromium/third_party/flatbuffers/src/android/app/build.gradle
index f72f21f0b8d..dba6624f25d 100644
--- a/chromium/third_party/flatbuffers/src/android/app/build.gradle
+++ b/chromium/third_party/flatbuffers/src/android/app/build.gradle
@@ -101,16 +101,6 @@ android {
flavorDimensions "stl-variant"
productFlavors {
- stlport {
- dimension "stl-variant"
- applicationIdSuffix ".stlport"
- versionNameSuffix "-stlport"
- externalNativeBuild {
- ndkBuild {
- arguments "APP_STL=stlport_static"
- }
- }
- }
gnustl {
dimension "stl-variant"
applicationIdSuffix ".gnustl"
diff --git a/chromium/third_party/flatbuffers/src/android/app/src/main/cpp/flatbuffers/CMakeLists.txt b/chromium/third_party/flatbuffers/src/android/app/src/main/cpp/flatbuffers/CMakeLists.txt
index f32b0bbb475..7ce2a2c93e8 100644
--- a/chromium/third_party/flatbuffers/src/android/app/src/main/cpp/flatbuffers/CMakeLists.txt
+++ b/chromium/third_party/flatbuffers/src/android/app/src/main/cpp/flatbuffers/CMakeLists.txt
@@ -15,17 +15,30 @@ set(CMAKE_CXX_FLAGS
"${CMAKE_CXX_FLAGS} -fsigned-char")
set(FlatBuffers_Library_SRCS
+ ${FLATBUFFERS_SRC}/include/flatbuffers/allocator.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/array.h
${FLATBUFFERS_SRC}/include/flatbuffers/base.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/buffer.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/buffer_ref.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/default_allocator.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/detached_buffer.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/flatbuffer_builder.h
${FLATBUFFERS_SRC}/include/flatbuffers/flatbuffers.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/flexbuffers.h
${FLATBUFFERS_SRC}/include/flatbuffers/hash.h
${FLATBUFFERS_SRC}/include/flatbuffers/idl.h
- ${FLATBUFFERS_SRC}/include/flatbuffers/util.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/minireflect.h
${FLATBUFFERS_SRC}/include/flatbuffers/reflection.h
${FLATBUFFERS_SRC}/include/flatbuffers/reflection_generated.h
- ${FLATBUFFERS_SRC}/include/flatbuffers/stl_emulation.h
- ${FLATBUFFERS_SRC}/include/flatbuffers/flexbuffers.h
${FLATBUFFERS_SRC}/include/flatbuffers/registry.h
- ${FLATBUFFERS_SRC}/include/flatbuffers/minireflect.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/stl_emulation.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/string.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/struct.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/table.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/util.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/vector.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/vector_downward.h
+ ${FLATBUFFERS_SRC}/include/flatbuffers/verifier.h
${FLATBUFFERS_SRC}/src/idl_parser.cpp
${FLATBUFFERS_SRC}/src/idl_gen_text.cpp
${FLATBUFFERS_SRC}/src/reflection.cpp
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/CMakeLists.txt b/chromium/third_party/flatbuffers/src/benchmarks/CMakeLists.txt
new file mode 100644
index 00000000000..8d49c35d1a4
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/CMakeLists.txt
@@ -0,0 +1,81 @@
+# Setup for running Google Benchmarks (https://github.com/google/benchmark) on
+# flatbuffers. This requires both that benchmark library and its depenency gtest
+# to build. Instead of including them here or doing a submodule, this uses
+# FetchContent (https://cmake.org/cmake/help/latest/module/FetchContent.html) to
+# grab the dependencies at config time. This requires CMake 3.14 or higher.
+cmake_minimum_required(VERSION 3.14)
+include(FetchContent)
+
+# No particular reason for the specific GIT_TAGs for the following repos, they
+# were just the latest releases when this was added.
+FetchContent_Declare(
+ googletest
+ GIT_REPOSITORY https://github.com/google/googletest.git
+ GIT_TAG e2239ee6043f73722e7aa812a459f54a28552929 # release-1.11.0
+)
+FetchContent_Declare(
+ googlebenchmark
+ GIT_REPOSITORY https://github.com/google/benchmark.git
+ GIT_TAG f91b6b42b1b9854772a90ae9501464a161707d1e # v1.6.0
+)
+
+# For Windows: Prevent overriding the parent project's compiler/linker
+# settings.
+set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
+FetchContent_MakeAvailable(
+ googletest
+ googlebenchmark
+)
+
+set(CPP_BENCH_DIR cpp)
+set(CPP_FB_BENCH_DIR ${CPP_BENCH_DIR}/flatbuffers)
+set(CPP_RAW_BENCH_DIR ${CPP_BENCH_DIR}/raw)
+set(CPP_BENCH_FBS ${CPP_FB_BENCH_DIR}/bench.fbs)
+set(CPP_BENCH_FB_GEN ${CPP_FB_BENCH_DIR}/bench_generated.h)
+
+set(FlatBenchmark_SRCS
+ ${CPP_BENCH_DIR}/benchmark_main.cpp
+ ${CPP_FB_BENCH_DIR}/fb_bench.cpp
+ ${CPP_RAW_BENCH_DIR}/raw_bench.cpp
+ ${CPP_BENCH_FB_GEN}
+)
+
+# Generate the flatbuffers benchmark code from the flatbuffers schema using
+# flatc itself, thus it depends on flatc. This also depends on the C++ runtime
+# flatbuffers and the schema file itself, so it should auto-generated at the
+# correct times.
+add_custom_command(
+ OUTPUT ${CPP_BENCH_FB_GEN}
+ COMMAND
+ "${FLATBUFFERS_FLATC_EXECUTABLE}"
+ --cpp
+ -o ${CPP_FB_BENCH_DIR}
+ ${CPP_BENCH_FBS}
+ DEPENDS
+ flatc
+ flatbuffers
+ ${CPP_BENCH_FBS}
+ COMMENT "Run Flatbuffers Benchmark Codegen: ${CPP_BENCH_FB_GEN}"
+ VERBATIM)
+
+# The main flatbuffers benchmark executable
+add_executable(flatbenchmark ${FlatBenchmark_SRCS})
+
+# Benchmark requires C++11
+target_compile_features(flatbenchmark PUBLIC
+ cxx_std_11
+)
+
+# Set the output directory to the root binary directory
+set_target_properties(flatbenchmark
+ PROPERTIES RUNTIME_OUTPUT_DIRECTORY
+ "${CMAKE_BINARY_DIR}"
+)
+
+# The includes of the benchmark files are fully qualified from flatbuffers root.
+target_include_directories(flatbenchmark PUBLIC ${CMAKE_SOURCE_DIR})
+
+target_link_libraries(flatbenchmark
+ benchmark::benchmark_main # _main to use their entry point
+ gtest # Link to gtest so we can also assert in the benchmarks
+) \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/bench.h b/chromium/third_party/flatbuffers/src/benchmarks/cpp/bench.h
new file mode 100644
index 00000000000..ad4baebf51c
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/bench.h
@@ -0,0 +1,19 @@
+#ifndef BENCHMARKS_CPP_BENCH_H_
+#define BENCHMARKS_CPP_BENCH_H_
+
+#include <cstdint>
+
+struct Bench {
+ virtual ~Bench() {}
+
+ inline void Add(int64_t value) { sum += value; }
+
+ virtual uint8_t *Encode(void *buf, int64_t &len) = 0;
+ virtual void *Decode(void *buf, int64_t len) = 0;
+ virtual int64_t Use(void *decoded) = 0;
+ virtual void Dealloc(void *decoded) = 0;
+
+ int64_t sum = 0;
+};
+
+#endif // BENCHMARKS_CPP_BENCH_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/benchmark_main.cpp b/chromium/third_party/flatbuffers/src/benchmarks/cpp/benchmark_main.cpp
new file mode 100644
index 00000000000..63807a5dd53
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/benchmark_main.cpp
@@ -0,0 +1,96 @@
+#include <benchmark/benchmark.h>
+#include <gtest/gtest.h>
+
+#include "benchmarks/cpp/bench.h"
+#include "benchmarks/cpp/flatbuffers/fb_bench.h"
+#include "benchmarks/cpp/raw/raw_bench.h"
+
+static inline void Encode(benchmark::State &state,
+ std::unique_ptr<Bench> &bench, uint8_t *buffer) {
+ int64_t length;
+ for (auto _ : state) {
+ bench->Encode(buffer, length);
+ benchmark::DoNotOptimize(length);
+ }
+}
+
+static inline void Decode(benchmark::State &state,
+ std::unique_ptr<Bench> &bench, uint8_t *buffer) {
+ int64_t length;
+ uint8_t *encoded = bench->Encode(buffer, length);
+
+ for (auto _ : state) {
+ void *decoded = bench->Decode(encoded, length);
+ benchmark::DoNotOptimize(decoded);
+ }
+}
+
+static inline void Use(benchmark::State &state, std::unique_ptr<Bench> &bench,
+ uint8_t *buffer, int64_t check_sum) {
+ int64_t length;
+ uint8_t *encoded = bench->Encode(buffer, length);
+ void *decoded = bench->Decode(encoded, length);
+
+ int64_t sum = 0;
+
+ for (auto _ : state) { sum = bench->Use(decoded); }
+
+ EXPECT_EQ(sum, check_sum);
+}
+
+static void BM_Flatbuffers_Encode(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ StaticAllocator allocator(&buffer[0]);
+ std::unique_ptr<Bench> bench = NewFlatBuffersBench(kBufferLength, &allocator);
+ Encode(state, bench, buffer);
+}
+BENCHMARK(BM_Flatbuffers_Encode);
+
+static void BM_Flatbuffers_Decode(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ StaticAllocator allocator(&buffer[0]);
+ std::unique_ptr<Bench> bench = NewFlatBuffersBench(kBufferLength, &allocator);
+ Decode(state, bench, buffer);
+}
+BENCHMARK(BM_Flatbuffers_Decode);
+
+static void BM_Flatbuffers_Use(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ StaticAllocator allocator(&buffer[0]);
+ std::unique_ptr<Bench> bench = NewFlatBuffersBench(kBufferLength, &allocator);
+ Use(state, bench, buffer, 218812692406581874);
+}
+BENCHMARK(BM_Flatbuffers_Use);
+
+static void BM_Raw_Encode(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ std::unique_ptr<Bench> bench = NewRawBench();
+ Encode(state, bench, buffer);
+}
+BENCHMARK(BM_Raw_Encode);
+
+static void BM_Raw_Decode(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ std::unique_ptr<Bench> bench = NewRawBench();
+ Decode(state, bench, buffer);
+}
+BENCHMARK(BM_Raw_Decode);
+
+static void BM_Raw_Use(benchmark::State &state) {
+ const int64_t kBufferLength = 1024;
+ uint8_t buffer[kBufferLength];
+
+ std::unique_ptr<Bench> bench = NewRawBench();
+ Use(state, bench, buffer, 218812692406581874);
+}
+BENCHMARK(BM_Raw_Use);
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench.fbs b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench.fbs
new file mode 100644
index 00000000000..8e5f3ff7e1c
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench.fbs
@@ -0,0 +1,52 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+
+// trying to represent a typical mix of datatypes:
+// 1 array of 3 elements, each element: 1 string, 3 nested objects, 9 scalars
+// root element has the array, additional string and an enum
+
+namespace benchmarks_flatbuffers;
+
+enum Enum : short { Apples, Pears, Bananas}
+
+struct Foo {
+ id:ulong;
+ count:short;
+ prefix:byte;
+ length:uint;
+}
+
+struct Bar {
+ parent:Foo;
+ time:int;
+ ratio:float;
+ size:ushort;
+}
+
+table FooBar {
+ sibling:Bar;
+ name:string;
+ rating:double;
+ postfix:ubyte;
+}
+
+table FooBarContainer {
+ list:[FooBar]; // 3 copies of the above
+ initialized:bool;
+ fruit:Enum;
+ location:string;
+}
+
+root_type FooBarContainer;
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench_generated.h b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench_generated.h
new file mode 100644
index 00000000000..9d207c59f2c
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/bench_generated.h
@@ -0,0 +1,347 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+
+
+#ifndef FLATBUFFERS_GENERATED_BENCH_BENCHMARKS_FLATBUFFERS_H_
+#define FLATBUFFERS_GENERATED_BENCH_BENCHMARKS_FLATBUFFERS_H_
+
+#include "flatbuffers/flatbuffers.h"
+
+namespace benchmarks_flatbuffers {
+
+struct Foo;
+
+struct Bar;
+
+struct FooBar;
+struct FooBarBuilder;
+
+struct FooBarContainer;
+struct FooBarContainerBuilder;
+
+enum Enum : int16_t {
+ Enum_Apples = 0,
+ Enum_Pears = 1,
+ Enum_Bananas = 2,
+ Enum_MIN = Enum_Apples,
+ Enum_MAX = Enum_Bananas
+};
+
+inline const Enum (&EnumValuesEnum())[3] {
+ static const Enum values[] = {
+ Enum_Apples,
+ Enum_Pears,
+ Enum_Bananas
+ };
+ return values;
+}
+
+inline const char * const *EnumNamesEnum() {
+ static const char * const names[4] = {
+ "Apples",
+ "Pears",
+ "Bananas",
+ nullptr
+ };
+ return names;
+}
+
+inline const char *EnumNameEnum(Enum e) {
+ if (flatbuffers::IsOutRange(e, Enum_Apples, Enum_Bananas)) return "";
+ const size_t index = static_cast<size_t>(e);
+ return EnumNamesEnum()[index];
+}
+
+FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) Foo FLATBUFFERS_FINAL_CLASS {
+ private:
+ uint64_t id_;
+ int16_t count_;
+ int8_t prefix_;
+ int8_t padding0__;
+ uint32_t length_;
+
+ public:
+ Foo()
+ : id_(0),
+ count_(0),
+ prefix_(0),
+ padding0__(0),
+ length_(0) {
+ (void)padding0__;
+ }
+ Foo(uint64_t _id, int16_t _count, int8_t _prefix, uint32_t _length)
+ : id_(flatbuffers::EndianScalar(_id)),
+ count_(flatbuffers::EndianScalar(_count)),
+ prefix_(flatbuffers::EndianScalar(_prefix)),
+ padding0__(0),
+ length_(flatbuffers::EndianScalar(_length)) {
+ (void)padding0__;
+ }
+ uint64_t id() const {
+ return flatbuffers::EndianScalar(id_);
+ }
+ int16_t count() const {
+ return flatbuffers::EndianScalar(count_);
+ }
+ int8_t prefix() const {
+ return flatbuffers::EndianScalar(prefix_);
+ }
+ uint32_t length() const {
+ return flatbuffers::EndianScalar(length_);
+ }
+};
+FLATBUFFERS_STRUCT_END(Foo, 16);
+
+FLATBUFFERS_MANUALLY_ALIGNED_STRUCT(8) Bar FLATBUFFERS_FINAL_CLASS {
+ private:
+ benchmarks_flatbuffers::Foo parent_;
+ int32_t time_;
+ float ratio_;
+ uint16_t size_;
+ int16_t padding0__; int32_t padding1__;
+
+ public:
+ Bar()
+ : parent_(),
+ time_(0),
+ ratio_(0),
+ size_(0),
+ padding0__(0),
+ padding1__(0) {
+ (void)padding0__;
+ (void)padding1__;
+ }
+ Bar(const benchmarks_flatbuffers::Foo &_parent, int32_t _time, float _ratio, uint16_t _size)
+ : parent_(_parent),
+ time_(flatbuffers::EndianScalar(_time)),
+ ratio_(flatbuffers::EndianScalar(_ratio)),
+ size_(flatbuffers::EndianScalar(_size)),
+ padding0__(0),
+ padding1__(0) {
+ (void)padding0__;
+ (void)padding1__;
+ }
+ const benchmarks_flatbuffers::Foo &parent() const {
+ return parent_;
+ }
+ int32_t time() const {
+ return flatbuffers::EndianScalar(time_);
+ }
+ float ratio() const {
+ return flatbuffers::EndianScalar(ratio_);
+ }
+ uint16_t size() const {
+ return flatbuffers::EndianScalar(size_);
+ }
+};
+FLATBUFFERS_STRUCT_END(Bar, 32);
+
+struct FooBar FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
+ typedef FooBarBuilder Builder;
+ enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
+ VT_SIBLING = 4,
+ VT_NAME = 6,
+ VT_RATING = 8,
+ VT_POSTFIX = 10
+ };
+ const benchmarks_flatbuffers::Bar *sibling() const {
+ return GetStruct<const benchmarks_flatbuffers::Bar *>(VT_SIBLING);
+ }
+ const flatbuffers::String *name() const {
+ return GetPointer<const flatbuffers::String *>(VT_NAME);
+ }
+ double rating() const {
+ return GetField<double>(VT_RATING, 0.0);
+ }
+ uint8_t postfix() const {
+ return GetField<uint8_t>(VT_POSTFIX, 0);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const {
+ return VerifyTableStart(verifier) &&
+ VerifyField<benchmarks_flatbuffers::Bar>(verifier, VT_SIBLING) &&
+ VerifyOffset(verifier, VT_NAME) &&
+ verifier.VerifyString(name()) &&
+ VerifyField<double>(verifier, VT_RATING) &&
+ VerifyField<uint8_t>(verifier, VT_POSTFIX) &&
+ verifier.EndTable();
+ }
+};
+
+struct FooBarBuilder {
+ typedef FooBar Table;
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_sibling(const benchmarks_flatbuffers::Bar *sibling) {
+ fbb_.AddStruct(FooBar::VT_SIBLING, sibling);
+ }
+ void add_name(flatbuffers::Offset<flatbuffers::String> name) {
+ fbb_.AddOffset(FooBar::VT_NAME, name);
+ }
+ void add_rating(double rating) {
+ fbb_.AddElement<double>(FooBar::VT_RATING, rating, 0.0);
+ }
+ void add_postfix(uint8_t postfix) {
+ fbb_.AddElement<uint8_t>(FooBar::VT_POSTFIX, postfix, 0);
+ }
+ explicit FooBarBuilder(flatbuffers::FlatBufferBuilder &_fbb)
+ : fbb_(_fbb) {
+ start_ = fbb_.StartTable();
+ }
+ flatbuffers::Offset<FooBar> Finish() {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FooBar>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FooBar> CreateFooBar(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ const benchmarks_flatbuffers::Bar *sibling = nullptr,
+ flatbuffers::Offset<flatbuffers::String> name = 0,
+ double rating = 0.0,
+ uint8_t postfix = 0) {
+ FooBarBuilder builder_(_fbb);
+ builder_.add_rating(rating);
+ builder_.add_name(name);
+ builder_.add_sibling(sibling);
+ builder_.add_postfix(postfix);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<FooBar> CreateFooBarDirect(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ const benchmarks_flatbuffers::Bar *sibling = nullptr,
+ const char *name = nullptr,
+ double rating = 0.0,
+ uint8_t postfix = 0) {
+ auto name__ = name ? _fbb.CreateString(name) : 0;
+ return benchmarks_flatbuffers::CreateFooBar(
+ _fbb,
+ sibling,
+ name__,
+ rating,
+ postfix);
+}
+
+struct FooBarContainer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
+ typedef FooBarContainerBuilder Builder;
+ enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
+ VT_LIST = 4,
+ VT_INITIALIZED = 6,
+ VT_FRUIT = 8,
+ VT_LOCATION = 10
+ };
+ const flatbuffers::Vector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>> *list() const {
+ return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>> *>(VT_LIST);
+ }
+ bool initialized() const {
+ return GetField<uint8_t>(VT_INITIALIZED, 0) != 0;
+ }
+ benchmarks_flatbuffers::Enum fruit() const {
+ return static_cast<benchmarks_flatbuffers::Enum>(GetField<int16_t>(VT_FRUIT, 0));
+ }
+ const flatbuffers::String *location() const {
+ return GetPointer<const flatbuffers::String *>(VT_LOCATION);
+ }
+ bool Verify(flatbuffers::Verifier &verifier) const {
+ return VerifyTableStart(verifier) &&
+ VerifyOffset(verifier, VT_LIST) &&
+ verifier.VerifyVector(list()) &&
+ verifier.VerifyVectorOfTables(list()) &&
+ VerifyField<uint8_t>(verifier, VT_INITIALIZED) &&
+ VerifyField<int16_t>(verifier, VT_FRUIT) &&
+ VerifyOffset(verifier, VT_LOCATION) &&
+ verifier.VerifyString(location()) &&
+ verifier.EndTable();
+ }
+};
+
+struct FooBarContainerBuilder {
+ typedef FooBarContainer Table;
+ flatbuffers::FlatBufferBuilder &fbb_;
+ flatbuffers::uoffset_t start_;
+ void add_list(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>>> list) {
+ fbb_.AddOffset(FooBarContainer::VT_LIST, list);
+ }
+ void add_initialized(bool initialized) {
+ fbb_.AddElement<uint8_t>(FooBarContainer::VT_INITIALIZED, static_cast<uint8_t>(initialized), 0);
+ }
+ void add_fruit(benchmarks_flatbuffers::Enum fruit) {
+ fbb_.AddElement<int16_t>(FooBarContainer::VT_FRUIT, static_cast<int16_t>(fruit), 0);
+ }
+ void add_location(flatbuffers::Offset<flatbuffers::String> location) {
+ fbb_.AddOffset(FooBarContainer::VT_LOCATION, location);
+ }
+ explicit FooBarContainerBuilder(flatbuffers::FlatBufferBuilder &_fbb)
+ : fbb_(_fbb) {
+ start_ = fbb_.StartTable();
+ }
+ flatbuffers::Offset<FooBarContainer> Finish() {
+ const auto end = fbb_.EndTable(start_);
+ auto o = flatbuffers::Offset<FooBarContainer>(end);
+ return o;
+ }
+};
+
+inline flatbuffers::Offset<FooBarContainer> CreateFooBarContainer(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>>> list = 0,
+ bool initialized = false,
+ benchmarks_flatbuffers::Enum fruit = benchmarks_flatbuffers::Enum_Apples,
+ flatbuffers::Offset<flatbuffers::String> location = 0) {
+ FooBarContainerBuilder builder_(_fbb);
+ builder_.add_location(location);
+ builder_.add_list(list);
+ builder_.add_fruit(fruit);
+ builder_.add_initialized(initialized);
+ return builder_.Finish();
+}
+
+inline flatbuffers::Offset<FooBarContainer> CreateFooBarContainerDirect(
+ flatbuffers::FlatBufferBuilder &_fbb,
+ const std::vector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>> *list = nullptr,
+ bool initialized = false,
+ benchmarks_flatbuffers::Enum fruit = benchmarks_flatbuffers::Enum_Apples,
+ const char *location = nullptr) {
+ auto list__ = list ? _fbb.CreateVector<flatbuffers::Offset<benchmarks_flatbuffers::FooBar>>(*list) : 0;
+ auto location__ = location ? _fbb.CreateString(location) : 0;
+ return benchmarks_flatbuffers::CreateFooBarContainer(
+ _fbb,
+ list__,
+ initialized,
+ fruit,
+ location__);
+}
+
+inline const benchmarks_flatbuffers::FooBarContainer *GetFooBarContainer(const void *buf) {
+ return flatbuffers::GetRoot<benchmarks_flatbuffers::FooBarContainer>(buf);
+}
+
+inline const benchmarks_flatbuffers::FooBarContainer *GetSizePrefixedFooBarContainer(const void *buf) {
+ return flatbuffers::GetSizePrefixedRoot<benchmarks_flatbuffers::FooBarContainer>(buf);
+}
+
+inline bool VerifyFooBarContainerBuffer(
+ flatbuffers::Verifier &verifier) {
+ return verifier.VerifyBuffer<benchmarks_flatbuffers::FooBarContainer>(nullptr);
+}
+
+inline bool VerifySizePrefixedFooBarContainerBuffer(
+ flatbuffers::Verifier &verifier) {
+ return verifier.VerifySizePrefixedBuffer<benchmarks_flatbuffers::FooBarContainer>(nullptr);
+}
+
+inline void FinishFooBarContainerBuffer(
+ flatbuffers::FlatBufferBuilder &fbb,
+ flatbuffers::Offset<benchmarks_flatbuffers::FooBarContainer> root) {
+ fbb.Finish(root);
+}
+
+inline void FinishSizePrefixedFooBarContainerBuffer(
+ flatbuffers::FlatBufferBuilder &fbb,
+ flatbuffers::Offset<benchmarks_flatbuffers::FooBarContainer> root) {
+ fbb.FinishSizePrefixed(root);
+}
+
+} // namespace benchmarks_flatbuffers
+
+#endif // FLATBUFFERS_GENERATED_BENCH_BENCHMARKS_FLATBUFFERS_H_
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.cpp b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.cpp
new file mode 100644
index 00000000000..2bbb9a53790
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.cpp
@@ -0,0 +1,80 @@
+#include "benchmarks/cpp/flatbuffers/fb_bench.h"
+
+#include <cstdint>
+#include <memory>
+
+#include "benchmarks/cpp/bench.h"
+#include "benchmarks/cpp/flatbuffers/bench_generated.h"
+#include "flatbuffers/flatbuffers.h"
+
+using namespace flatbuffers;
+using namespace benchmarks_flatbuffers;
+
+namespace {
+
+struct FlatBufferBench : Bench {
+ explicit FlatBufferBench(int64_t initial_size, Allocator *allocator)
+ : fbb(initial_size, allocator, false) {}
+
+ uint8_t *Encode(void *, int64_t &len) {
+ fbb.Clear();
+
+ const int kVectorLength = 3;
+ Offset<FooBar> vec[kVectorLength];
+
+ for (int i = 0; i < kVectorLength; ++i) {
+ Foo foo(0xABADCAFEABADCAFE + i, 10000 + i, '@' + i, 1000000 + i);
+ Bar bar(foo, 123456 + i, 3.14159f + i, 10000 + i);
+ auto name = fbb.CreateString("Hello, World!");
+ auto foobar =
+ CreateFooBar(fbb, &bar, name, 3.1415432432445543543 + i, '!' + i);
+ vec[i] = foobar;
+ }
+ auto location = fbb.CreateString("http://google.com/flatbuffers/");
+ auto foobarvec = fbb.CreateVector(vec, kVectorLength);
+ auto foobarcontainer =
+ CreateFooBarContainer(fbb, foobarvec, true, Enum_Bananas, location);
+ fbb.Finish(foobarcontainer);
+
+ len = fbb.GetSize();
+ return fbb.GetBufferPointer();
+ }
+
+ int64_t Use(void *decoded) {
+ sum = 0;
+ auto foobarcontainer = GetFooBarContainer(decoded);
+ sum = 0;
+ Add(foobarcontainer->initialized());
+ Add(foobarcontainer->location()->Length());
+ Add(foobarcontainer->fruit());
+ for (unsigned int i = 0; i < foobarcontainer->list()->Length(); i++) {
+ auto foobar = foobarcontainer->list()->Get(i);
+ Add(foobar->name()->Length());
+ Add(foobar->postfix());
+ Add(static_cast<int64_t>(foobar->rating()));
+ auto bar = foobar->sibling();
+ Add(static_cast<int64_t>(bar->ratio()));
+ Add(bar->size());
+ Add(bar->time());
+ auto &foo = bar->parent();
+ Add(foo.count());
+ Add(foo.id());
+ Add(foo.length());
+ Add(foo.prefix());
+ }
+ return sum;
+ }
+
+ void *Decode(void *buffer, int64_t) { return buffer; }
+ void Dealloc(void *) override{};
+
+ FlatBufferBuilder fbb;
+};
+
+} // namespace
+
+std::unique_ptr<Bench> NewFlatBuffersBench(int64_t initial_size,
+ Allocator *allocator) {
+ return std::unique_ptr<FlatBufferBench>(
+ new FlatBufferBench(initial_size, allocator));
+}
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.h b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.h
new file mode 100644
index 00000000000..ffb5f54c1a1
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/flatbuffers/fb_bench.h
@@ -0,0 +1,23 @@
+#ifndef BENCHMARKS_CPP_FLATBUFFERS_FB_BENCH_H_
+#define BENCHMARKS_CPP_FLATBUFFERS_FB_BENCH_H_
+
+#include <cstdint>
+#include <memory>
+
+#include "benchmarks/cpp/bench.h"
+#include "include/flatbuffers/flatbuffers.h"
+
+struct StaticAllocator : public flatbuffers::Allocator {
+ explicit StaticAllocator(uint8_t *buffer) : buffer_(buffer) {}
+
+ uint8_t *allocate(size_t) override { return buffer_; }
+
+ void deallocate(uint8_t *, size_t) override {}
+
+ uint8_t *buffer_;
+};
+
+std::unique_ptr<Bench> NewFlatBuffersBench(
+ int64_t initial_size = 1024, flatbuffers::Allocator *allocator = nullptr);
+
+#endif // BENCHMARKS_CPP_FLATBUFFERS_FB_BENCH_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.cpp b/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.cpp
new file mode 100644
index 00000000000..810cb1f4edd
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.cpp
@@ -0,0 +1,109 @@
+#include "benchmarks/cpp/raw/raw_bench.h"
+
+#include <cstdint>
+#include <cstring>
+#include <memory>
+
+#include "benchmarks/cpp/bench.h"
+
+namespace {
+const int64_t kStringLength = 32;
+const int64_t kVectorLength = 3;
+
+enum Enum { Apples, Pears, Bananas };
+
+struct Foo {
+ int64_t id;
+ short count;
+ char prefix;
+ int length;
+};
+
+struct Bar {
+ Foo parent;
+ int time;
+ float ratio;
+ unsigned short size;
+};
+
+struct FooBar {
+ Bar sibling;
+ // We have to stick this in, otherwise strlen() will make it slower than
+ // FlatBuffers:
+ int name_len;
+ char name[kStringLength];
+ double rating;
+ unsigned char postfix;
+};
+
+struct FooBarContainer {
+ FooBar list[kVectorLength]; // 3 copies of the above
+ bool initialized;
+ Enum fruit;
+ int location_len;
+ char location[kStringLength];
+};
+
+struct RawBench : Bench {
+ uint8_t *Encode(void *buf, int64_t &len) {
+ FooBarContainer *fbc = new (buf) FooBarContainer;
+ strcpy(fbc->location, "http://google.com/flatbuffers/"); // Unsafe eek!
+ fbc->location_len = (int)strlen(fbc->location);
+ fbc->fruit = Bananas;
+ fbc->initialized = true;
+ for (int i = 0; i < kVectorLength; i++) {
+ // We add + i to not make these identical copies for a more realistic
+ // compression test.
+ auto &foobar = fbc->list[i];
+ foobar.rating = 3.1415432432445543543 + i;
+ foobar.postfix = '!' + i;
+ strcpy(foobar.name, "Hello, World!");
+ foobar.name_len = (int)strlen(foobar.name);
+ auto &bar = foobar.sibling;
+ bar.ratio = 3.14159f + i;
+ bar.size = 10000 + i;
+ bar.time = 123456 + i;
+ auto &foo = bar.parent;
+ foo.id = 0xABADCAFEABADCAFE + i;
+ foo.count = 10000 + i;
+ foo.length = 1000000 + i;
+ foo.prefix = '@' + i;
+ }
+
+ len = sizeof(FooBarContainer);
+ return reinterpret_cast<uint8_t *>(fbc);
+ };
+
+ int64_t Use(void *decoded) {
+ auto foobarcontainer = reinterpret_cast<FooBarContainer *>(decoded);
+ sum = 0;
+ Add(foobarcontainer->initialized);
+ Add(foobarcontainer->location_len);
+ Add(foobarcontainer->fruit);
+ for (unsigned int i = 0; i < kVectorLength; i++) {
+ auto foobar = &foobarcontainer->list[i];
+ Add(foobar->name_len);
+ Add(foobar->postfix);
+ Add(static_cast<int64_t>(foobar->rating));
+ auto bar = &foobar->sibling;
+ Add(static_cast<int64_t>(bar->ratio));
+ Add(bar->size);
+ Add(bar->time);
+ auto &foo = bar->parent;
+ Add(foo.count);
+ Add(foo.id);
+ Add(foo.length);
+ Add(foo.prefix);
+ }
+ return sum;
+ }
+
+ void *Decode(void *buf, int64_t) { return buf; }
+ void Dealloc(void *) override{};
+};
+
+} // namespace
+
+std::unique_ptr<Bench> NewRawBench() {
+ return std::unique_ptr<RawBench>(new RawBench());
+}
diff --git a/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.h b/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.h
new file mode 100644
index 00000000000..68bb278c26f
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/benchmarks/cpp/raw/raw_bench.h
@@ -0,0 +1,10 @@
+#ifndef BENCHMARKS_CPP_RAW_RAW_BENCH_H_
+#define BENCHMARKS_CPP_RAW_RAW_BENCH_H_
+
+#include <memory>
+
+#include "benchmarks/cpp/bench.h"
+
+std::unique_ptr<Bench> NewRawBench();
+
+#endif // BENCHMARKS_CPP_RAW_RAW_BENCH_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/build_defs.bzl b/chromium/third_party/flatbuffers/src/build_defs.bzl
index 88792be45da..231804bf3f4 100644
--- a/chromium/third_party/flatbuffers/src/build_defs.bzl
+++ b/chromium/third_party/flatbuffers/src/build_defs.bzl
@@ -209,7 +209,7 @@ def flatbuffer_cc_library(
Happy dependent Flatbuffering!
'''
output_headers = [
- (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1])
+ (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1].split(":")[-1])
for s in srcs
]
reflection_name = "%s_reflection" % name if gen_reflections else ""
diff --git a/chromium/third_party/flatbuffers/src/dart/CHANGELOG.md b/chromium/third_party/flatbuffers/src/dart/CHANGELOG.md
index 5e2d2deea50..cfed9def7b2 100644
--- a/chromium/third_party/flatbuffers/src/dart/CHANGELOG.md
+++ b/chromium/third_party/flatbuffers/src/dart/CHANGELOG.md
@@ -1,4 +1,22 @@
-# CHANGELOG
+## 2.0.0
+
+- switch to null safety (#6696)
+- add Object APIs (pack/unpack) (#6682, #6723, #6846)
+- add custom builder buffer allocator support (#6711)
+- add `Builder.size()` - finished buffer size (#6403)
+- make `writeString()` argument non-nullable (#6737)
+- make tables fixed size (expect the number of fields when creating) (#6735)
+- make table deduplication optional (param `deduplicateTables`) (#6734)
+- change `Builder.reset()` to reuse an existing buffer (#6661)
+- change table building to assert() instead of exceptions (#6754)
+- optimize `writeString()` for ASCII (param `asciiOptimization`) (#6736)
+- change `StringReader` to make ASCII optimization optional (param `asciiOptimization`) (#6758)
+- change `[byte]` and `[ubyte]` representation to `dart:typed_data` `Int8List` and `Uint8List` (#6839)
+- rename `lowFinish()` to `buffer` getter (#6712)
+- fix `Builder._writeString()` - always write trailing zero byte (#6390)
+- fix `Builder.reset()` - clear vTables (#6386)
+- make sure added padding is zeroed, same as in C++ (#6716)
+- many performance improvements (#6755)
## 1.9.2
@@ -11,4 +29,4 @@
## 1.9.0
-- Initial release, supports Dart 1.x and many dev versions of Dart 2.x \ No newline at end of file
+- Initial release, supports Dart 1.x and many dev versions of Dart 2.x
diff --git a/chromium/third_party/flatbuffers/src/dart/README.md b/chromium/third_party/flatbuffers/src/dart/README.md
index 11bc0c48f1b..312c1dadad6 100644
--- a/chromium/third_party/flatbuffers/src/dart/README.md
+++ b/chromium/third_party/flatbuffers/src/dart/README.md
@@ -1,13 +1,17 @@
# FlatBuffers for Dart
-This package is used to read and write FlatBuffer files in Dart.
-
-Most consumers will want to use the [`flatc`](https://github.com/google/flatbuffers)
-compiler to generate Dart code from a FlatBuffers IDL schema. For example, the
-`monster_my_game.sample_generated.dart` was generated with `flatc` from
-`monster.fbs` in the example folder. The generated classes can be used to read
-or write binary files that are interoperable with other languages and platforms
-supported by FlatBuffers, as illustrated in the `example.dart` in the
+This package is used to read and write [FlatBuffers](https://google.github.io/flatbuffers/).
+
+Most consumers will want to use the [`flatc` - FlatBuffer compiler](https://github.com/google/flatbuffers) binary for your platform:
+* [Linux](https://github.com/google/flatbuffers/suites/4363603985/artifacts/114682272)
+* [macOS](https://github.com/google/flatbuffers/suites/4363603985/artifacts/114682273)
+* [Windows](https://github.com/google/flatbuffers/suites/4363603985/artifacts/114682274)
+
+The FlatBuffer compiler `flatc` reads a FlatBuffers IDL schema and generates Dart code.
+The generated classes can be used to read or write binary data/files that are interoperable with
+other languages and platforms supported by FlatBuffers, as illustrated in the `example.dart` in the
examples folder.
-Additional documentation and examples are available [at the FlatBuffers site](https://google.github.io/flatbuffers/index.html) \ No newline at end of file
+For more details and documentation, head over to the official site and read the
+[Tutorial](https://google.github.io/flatbuffers/flatbuffers_guide_tutorial.html) and how to
+[use FlatBuffers in Dart](https://google.github.io/flatbuffers/flatbuffers_guide_use_dart.html). \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/dart/analysis_options.yaml b/chromium/third_party/flatbuffers/src/dart/analysis_options.yaml
new file mode 100644
index 00000000000..572dd239d09
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/dart/analysis_options.yaml
@@ -0,0 +1 @@
+include: package:lints/recommended.yaml
diff --git a/chromium/third_party/flatbuffers/src/dart/example/example.dart b/chromium/third_party/flatbuffers/src/dart/example/example.dart
index c84ffa60198..d397bbb3f44 100644
--- a/chromium/third_party/flatbuffers/src/dart/example/example.dart
+++ b/chromium/third_party/flatbuffers/src/dart/example/example.dart
@@ -15,7 +15,7 @@
*/
import 'package:flat_buffers/flat_buffers.dart' as fb;
-import './monster_my_game.sample_generated.dart' as myGame;
+import './monster_my_game.sample_generated.dart' as my_game;
// Example how to use FlatBuffers to create and read binary buffers.
@@ -25,20 +25,20 @@ void main() {
}
void builderTest() {
- final builder = new fb.Builder(initialSize: 1024);
+ final builder = fb.Builder(initialSize: 1024);
final int? weaponOneName = builder.writeString("Sword");
final int weaponOneDamage = 3;
final int? weaponTwoName = builder.writeString("Axe");
final int weaponTwoDamage = 5;
- final swordBuilder = new myGame.WeaponBuilder(builder)
+ final swordBuilder = my_game.WeaponBuilder(builder)
..begin()
..addNameOffset(weaponOneName)
..addDamage(weaponOneDamage);
final int sword = swordBuilder.finish();
- final axeBuilder = new myGame.WeaponBuilder(builder)
+ final axeBuilder = my_game.WeaponBuilder(builder)
..begin()
..addNameOffset(weaponTwoName)
..addDamage(weaponTwoDamage);
@@ -54,7 +54,7 @@ void builderTest() {
final weapons = builder.writeList([sword, axe]);
// Struct builders are very easy to reuse.
- final vec3Builder = new myGame.Vec3Builder(builder);
+ final vec3Builder = my_game.Vec3Builder(builder);
vec3Builder.finish(4.0, 5.0, 6.0);
vec3Builder.finish(1.0, 2.0, 3.0);
@@ -62,21 +62,21 @@ void builderTest() {
final int hp = 300;
final int mana = 150;
- final monster = new myGame.MonsterBuilder(builder)
+ final monster = my_game.MonsterBuilder(builder)
..begin()
..addNameOffset(name)
..addInventoryOffset(inventory)
..addWeaponsOffset(weapons)
- ..addEquippedType(myGame.EquipmentTypeId.Weapon)
+ ..addEquippedType(my_game.EquipmentTypeId.Weapon)
..addEquippedOffset(axe)
..addHp(hp)
..addMana(mana)
..addPos(vec3Builder.finish(1.0, 2.0, 3.0))
- ..addColor(myGame.Color.Red);
+ ..addColor(my_game.Color.Red);
final int monsteroff = monster.finish();
- final buffer = builder.finish(monsteroff);
- if (verify(buffer)) {
+ builder.finish(monsteroff);
+ if (verify(builder.buffer)) {
print(
"The FlatBuffer was successfully created with a builder and verified!");
}
@@ -85,17 +85,17 @@ void builderTest() {
void objectBuilderTest() {
// Create the builder here so we can use it for both weapons and equipped
// the actual data will only be written to the buffer once.
- var axe = new myGame.WeaponObjectBuilder(name: 'Axe', damage: 5);
+ var axe = my_game.WeaponObjectBuilder(name: 'Axe', damage: 5);
- var monsterBuilder = new myGame.MonsterObjectBuilder(
- pos: new myGame.Vec3ObjectBuilder(x: 1.0, y: 2.0, z: 3.0),
+ var monsterBuilder = my_game.MonsterObjectBuilder(
+ pos: my_game.Vec3ObjectBuilder(x: 1.0, y: 2.0, z: 3.0),
mana: 150,
hp: 300,
name: 'Orc',
inventory: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- color: myGame.Color.Red,
- weapons: [new myGame.WeaponObjectBuilder(name: 'Sword', damage: 3), axe],
- equippedType: myGame.EquipmentTypeId.Weapon,
+ color: my_game.Color.Red,
+ weapons: [my_game.WeaponObjectBuilder(name: 'Sword', damage: 3), axe],
+ equippedType: my_game.EquipmentTypeId.Weapon,
equipped: axe,
);
@@ -114,7 +114,7 @@ void objectBuilderTest() {
bool verify(List<int> buffer) {
// Get access to the root:
- var monster = new myGame.Monster(buffer);
+ var monster = my_game.Monster(buffer);
// Get and test some scalar types from the FlatBuffer.
assert(monster.hp == 80);
@@ -131,20 +131,20 @@ bool verify(List<int> buffer) {
assert(inv[9] == 9);
// Get and test the `weapons` FlatBuffers's `vector`.
- var expected_weapon_names = ["Sword", "Axe"];
- var expected_weapon_damages = [3, 5];
+ var expectedWeaponNames = ["Sword", "Axe"];
+ var expectedWeaponDamages = [3, 5];
var weps = monster.weapons!;
for (int i = 0; i < weps.length; i++) {
- assert(weps[i].name == expected_weapon_names[i]);
- assert(weps[i].damage == expected_weapon_damages[i]);
+ assert(weps[i].name == expectedWeaponNames[i]);
+ assert(weps[i].damage == expectedWeaponDamages[i]);
}
// Get and test the `Equipment` union (`equipped` field).
- assert(monster.equippedType!.value == myGame.EquipmentTypeId.Weapon.value);
- assert(monster.equippedType == myGame.EquipmentTypeId.Weapon);
+ assert(monster.equippedType!.value == my_game.EquipmentTypeId.Weapon.value);
+ assert(monster.equippedType == my_game.EquipmentTypeId.Weapon);
- assert(monster.equipped is myGame.Weapon);
- var equipped = monster.equipped as myGame.Weapon;
+ assert(monster.equipped is my_game.Weapon);
+ var equipped = monster.equipped as my_game.Weapon;
assert(equipped.name == "Axe");
assert(equipped.damage == 5);
diff --git a/chromium/third_party/flatbuffers/src/dart/example/monster_my_game.sample_generated.dart b/chromium/third_party/flatbuffers/src/dart/example/monster_my_game.sample_generated.dart
index 4f08860fb38..ba0a81f0d57 100644
--- a/chromium/third_party/flatbuffers/src/dart/example/monster_my_game.sample_generated.dart
+++ b/chromium/third_party/flatbuffers/src/dart/example/monster_my_game.sample_generated.dart
@@ -14,7 +14,7 @@ class Color {
factory Color.fromValue(int value) {
final result = values[value];
if (result == null) {
- throw new StateError('Invalid value $value for bit flag enum Color');
+ throw StateError('Invalid value $value for bit flag enum Color');
}
return result;
}
@@ -26,15 +26,15 @@ class Color {
static const int maxValue = 2;
static bool containsValue(int value) => values.containsKey(value);
- static const Color Red = const Color._(0);
- static const Color Green = const Color._(1);
- static const Color Blue = const Color._(2);
+ static const Color Red = Color._(0);
+ static const Color Green = Color._(1);
+ static const Color Blue = Color._(2);
static const Map<int, Color> values = {
0: Red,
1: Green,
2: Blue};
- static const fb.Reader<Color> reader = const _ColorReader();
+ static const fb.Reader<Color> reader = _ColorReader();
@override
String toString() {
@@ -50,7 +50,7 @@ class _ColorReader extends fb.Reader<Color> {
@override
Color read(fb.BufferContext bc, int offset) =>
- new Color.fromValue(const fb.Int8Reader().read(bc, offset));
+ Color.fromValue(const fb.Int8Reader().read(bc, offset));
}
class EquipmentTypeId {
@@ -60,7 +60,7 @@ class EquipmentTypeId {
factory EquipmentTypeId.fromValue(int value) {
final result = values[value];
if (result == null) {
- throw new StateError('Invalid value $value for bit flag enum EquipmentTypeId');
+ throw StateError('Invalid value $value for bit flag enum EquipmentTypeId');
}
return result;
}
@@ -72,13 +72,13 @@ class EquipmentTypeId {
static const int maxValue = 1;
static bool containsValue(int value) => values.containsKey(value);
- static const EquipmentTypeId NONE = const EquipmentTypeId._(0);
- static const EquipmentTypeId Weapon = const EquipmentTypeId._(1);
+ static const EquipmentTypeId NONE = EquipmentTypeId._(0);
+ static const EquipmentTypeId Weapon = EquipmentTypeId._(1);
static const Map<int, EquipmentTypeId> values = {
0: NONE,
1: Weapon};
- static const fb.Reader<EquipmentTypeId> reader = const _EquipmentTypeIdReader();
+ static const fb.Reader<EquipmentTypeId> reader = _EquipmentTypeIdReader();
@override
String toString() {
@@ -94,13 +94,13 @@ class _EquipmentTypeIdReader extends fb.Reader<EquipmentTypeId> {
@override
EquipmentTypeId read(fb.BufferContext bc, int offset) =>
- new EquipmentTypeId.fromValue(const fb.Uint8Reader().read(bc, offset));
+ EquipmentTypeId.fromValue(const fb.Uint8Reader().read(bc, offset));
}
class Vec3 {
Vec3._(this._bc, this._bcOffset);
- static const fb.Reader<Vec3> reader = const _Vec3Reader();
+ static const fb.Reader<Vec3> reader = _Vec3Reader();
final fb.BufferContext _bc;
final int _bcOffset;
@@ -123,11 +123,11 @@ class _Vec3Reader extends fb.StructReader<Vec3> {
@override
Vec3 createObject(fb.BufferContext bc, int offset) =>
- new Vec3._(bc, offset);
+ Vec3._(bc, offset);
}
class Vec3Builder {
- Vec3Builder(this.fbBuilder) {}
+ Vec3Builder(this.fbBuilder);
final fb.Builder fbBuilder;
@@ -166,19 +166,19 @@ class Vec3ObjectBuilder extends fb.ObjectBuilder {
/// Convenience method to serialize to byte list.
@override
Uint8List toBytes([String? fileIdentifier]) {
- fb.Builder fbBuilder = new fb.Builder();
- int offset = finish(fbBuilder);
- return fbBuilder.finish(offset, fileIdentifier);
+ final fbBuilder = fb.Builder(deduplicateTables: false);
+ fbBuilder.finish(finish(fbBuilder), fileIdentifier);
+ return fbBuilder.buffer;
}
}
class Monster {
Monster._(this._bc, this._bcOffset);
factory Monster(List<int> bytes) {
- fb.BufferContext rootRef = new fb.BufferContext.fromBytes(bytes);
+ final rootRef = fb.BufferContext.fromBytes(bytes);
return reader.read(rootRef, 0);
}
- static const fb.Reader<Monster> reader = const _MonsterReader();
+ static const fb.Reader<Monster> reader = _MonsterReader();
final fb.BufferContext _bc;
final int _bcOffset;
@@ -187,11 +187,11 @@ class Monster {
int get mana => const fb.Int16Reader().vTableGet(_bc, _bcOffset, 6, 150);
int get hp => const fb.Int16Reader().vTableGet(_bc, _bcOffset, 8, 100);
String? get name => const fb.StringReader().vTableGetNullable(_bc, _bcOffset, 10);
- List<int>? get inventory => const fb.ListReader<int>(const fb.Uint8Reader()).vTableGetNullable(_bc, _bcOffset, 14);
+ List<int>? get inventory => const fb.Uint8ListReader().vTableGetNullable(_bc, _bcOffset, 14);
Color get color => Color.fromValue(const fb.Int8Reader().vTableGet(_bc, _bcOffset, 16, 2));
List<Weapon>? get weapons => const fb.ListReader<Weapon>(Weapon.reader).vTableGetNullable(_bc, _bcOffset, 18);
EquipmentTypeId? get equippedType => EquipmentTypeId._createOrNull(const fb.Uint8Reader().vTableGetNullable(_bc, _bcOffset, 20));
- dynamic? get equipped {
+ dynamic get equipped {
switch (equippedType?.value) {
case 1: return Weapon.reader.vTableGetNullable(_bc, _bcOffset, 22);
default: return null;
@@ -210,16 +210,16 @@ class _MonsterReader extends fb.TableReader<Monster> {
@override
Monster createObject(fb.BufferContext bc, int offset) =>
- new Monster._(bc, offset);
+ Monster._(bc, offset);
}
class MonsterBuilder {
- MonsterBuilder(this.fbBuilder) {}
+ MonsterBuilder(this.fbBuilder);
final fb.Builder fbBuilder;
void begin() {
- fbBuilder.startTable();
+ fbBuilder.startTable(10);
}
int addPos(int offset) {
@@ -277,7 +277,7 @@ class MonsterObjectBuilder extends fb.ObjectBuilder {
final Color? _color;
final List<WeaponObjectBuilder>? _weapons;
final EquipmentTypeId? _equippedType;
- final dynamic? _equipped;
+ final dynamic _equipped;
final List<Vec3ObjectBuilder>? _path;
MonsterObjectBuilder({
@@ -289,7 +289,7 @@ class MonsterObjectBuilder extends fb.ObjectBuilder {
Color? color,
List<WeaponObjectBuilder>? weapons,
EquipmentTypeId? equippedType,
- dynamic? equipped,
+ dynamic equipped,
List<Vec3ObjectBuilder>? path,
})
: _pos = pos,
@@ -306,18 +306,16 @@ class MonsterObjectBuilder extends fb.ObjectBuilder {
/// Finish building, and store into the [fbBuilder].
@override
int finish(fb.Builder fbBuilder) {
- final int? nameOffset = fbBuilder.writeString(_name);
- final int? inventoryOffset = _inventory?.isNotEmpty == true
- ? fbBuilder.writeListUint8(_inventory!)
- : null;
- final int? weaponsOffset = _weapons?.isNotEmpty == true
- ? fbBuilder.writeList(_weapons!.map((b) => b.getOrCreateOffset(fbBuilder)).toList())
- : null;
+ final int? nameOffset = _name == null ? null
+ : fbBuilder.writeString(_name!);
+ final int? inventoryOffset = _inventory == null ? null
+ : fbBuilder.writeListUint8(_inventory!);
+ final int? weaponsOffset = _weapons == null ? null
+ : fbBuilder.writeList(_weapons!.map((b) => b.getOrCreateOffset(fbBuilder)).toList());
final int? equippedOffset = _equipped?.getOrCreateOffset(fbBuilder);
- final int? pathOffset = _path?.isNotEmpty == true
- ? fbBuilder.writeListOfStructs(_path!)
- : null;
- fbBuilder.startTable();
+ final int? pathOffset = _path == null ? null
+ : fbBuilder.writeListOfStructs(_path!);
+ fbBuilder.startTable(10);
if (_pos != null) {
fbBuilder.addStruct(0, _pos!.finish(fbBuilder));
}
@@ -336,19 +334,19 @@ class MonsterObjectBuilder extends fb.ObjectBuilder {
/// Convenience method to serialize to byte list.
@override
Uint8List toBytes([String? fileIdentifier]) {
- fb.Builder fbBuilder = new fb.Builder();
- int offset = finish(fbBuilder);
- return fbBuilder.finish(offset, fileIdentifier);
+ final fbBuilder = fb.Builder(deduplicateTables: false);
+ fbBuilder.finish(finish(fbBuilder), fileIdentifier);
+ return fbBuilder.buffer;
}
}
class Weapon {
Weapon._(this._bc, this._bcOffset);
factory Weapon(List<int> bytes) {
- fb.BufferContext rootRef = new fb.BufferContext.fromBytes(bytes);
+ final rootRef = fb.BufferContext.fromBytes(bytes);
return reader.read(rootRef, 0);
}
- static const fb.Reader<Weapon> reader = const _WeaponReader();
+ static const fb.Reader<Weapon> reader = _WeaponReader();
final fb.BufferContext _bc;
final int _bcOffset;
@@ -367,16 +365,16 @@ class _WeaponReader extends fb.TableReader<Weapon> {
@override
Weapon createObject(fb.BufferContext bc, int offset) =>
- new Weapon._(bc, offset);
+ Weapon._(bc, offset);
}
class WeaponBuilder {
- WeaponBuilder(this.fbBuilder) {}
+ WeaponBuilder(this.fbBuilder);
final fb.Builder fbBuilder;
void begin() {
- fbBuilder.startTable();
+ fbBuilder.startTable(2);
}
int addNameOffset(int? offset) {
@@ -407,8 +405,9 @@ class WeaponObjectBuilder extends fb.ObjectBuilder {
/// Finish building, and store into the [fbBuilder].
@override
int finish(fb.Builder fbBuilder) {
- final int? nameOffset = fbBuilder.writeString(_name);
- fbBuilder.startTable();
+ final int? nameOffset = _name == null ? null
+ : fbBuilder.writeString(_name!);
+ fbBuilder.startTable(2);
fbBuilder.addOffset(0, nameOffset);
fbBuilder.addInt16(1, _damage);
return fbBuilder.endTable();
@@ -417,8 +416,8 @@ class WeaponObjectBuilder extends fb.ObjectBuilder {
/// Convenience method to serialize to byte list.
@override
Uint8List toBytes([String? fileIdentifier]) {
- fb.Builder fbBuilder = new fb.Builder();
- int offset = finish(fbBuilder);
- return fbBuilder.finish(offset, fileIdentifier);
+ final fbBuilder = fb.Builder(deduplicateTables: false);
+ fbBuilder.finish(finish(fbBuilder), fileIdentifier);
+ return fbBuilder.buffer;
}
}
diff --git a/chromium/third_party/flatbuffers/src/dart/lib/flat_buffers.dart b/chromium/third_party/flatbuffers/src/dart/lib/flat_buffers.dart
index 74c25943023..bb70811b994 100644
--- a/chromium/third_party/flatbuffers/src/dart/lib/flat_buffers.dart
+++ b/chromium/third_party/flatbuffers/src/dart/lib/flat_buffers.dart
@@ -22,7 +22,7 @@ const int _sizeofFloat64 = 8;
///
/// This callback is used by other struct's `finish` methods to write the nested
/// struct's fields inline.
-typedef void StructBuilder();
+typedef StructBuilder = void Function();
/// Buffer with data and some context about it.
class BufferContext {
@@ -31,51 +31,56 @@ class BufferContext {
ByteData get buffer => _buffer;
/// Create from a FlatBuffer represented by a list of bytes (uint8).
- factory BufferContext.fromBytes(List<int> byteList) {
- Uint8List uint8List = _asUint8List(byteList);
- ByteData buf = new ByteData.view(uint8List.buffer, uint8List.offsetInBytes);
- return BufferContext(buf);
- }
+ factory BufferContext.fromBytes(List<int> byteList) =>
+ BufferContext(byteList is Uint8List
+ ? byteList.buffer.asByteData(byteList.offsetInBytes)
+ : ByteData.view(Uint8List.fromList(byteList).buffer));
/// Create from a FlatBuffer represented by ByteData.
BufferContext(this._buffer);
- int derefObject(int offset) {
- return offset + _getUint32(offset);
- }
+ @pragma('vm:prefer-inline')
+ int derefObject(int offset) => offset + _getUint32(offset);
- Uint8List _asUint8LIst(int offset, int length) =>
+ @pragma('vm:prefer-inline')
+ Uint8List _asUint8List(int offset, int length) =>
_buffer.buffer.asUint8List(_buffer.offsetInBytes + offset, length);
+ @pragma('vm:prefer-inline')
double _getFloat64(int offset) => _buffer.getFloat64(offset, Endian.little);
+ @pragma('vm:prefer-inline')
double _getFloat32(int offset) => _buffer.getFloat32(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getInt64(int offset) => _buffer.getInt64(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getInt32(int offset) => _buffer.getInt32(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getInt16(int offset) => _buffer.getInt16(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getInt8(int offset) => _buffer.getInt8(offset);
+ @pragma('vm:prefer-inline')
int _getUint64(int offset) => _buffer.getUint64(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getUint32(int offset) => _buffer.getUint32(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getUint16(int offset) => _buffer.getUint16(offset, Endian.little);
+ @pragma('vm:prefer-inline')
int _getUint8(int offset) => _buffer.getUint8(offset);
+}
- /// If the [byteList] is already a [Uint8List] return it.
- /// Otherwise return a [Uint8List] copy of the [byteList].
- static Uint8List _asUint8List(List<int> byteList) {
- if (byteList is Uint8List) {
- return byteList;
- } else {
- return new Uint8List.fromList(byteList);
- }
- }
+/// Interface implemented by the "object-api" classes (ending with "T").
+abstract class Packable {
+ /// Serialize the object using the given builder, returning the offset.
+ int pack(Builder fbBuilder);
}
/// Class implemented by typed builders generated by flatc.
@@ -103,14 +108,19 @@ abstract class ObjectBuilder {
/// Class that helps building flat buffers.
class Builder {
+ bool _finished = false;
+
final int initialSize;
/// The list of existing VTable(s).
- final List<int> _vTables = List<int>.filled(16, 0, growable: true)
- ..length = 0;
+ final List<int> _vTables;
+
+ final bool deduplicateTables;
ByteData _buf;
+ final Allocator _allocator;
+
/// The maximum alignment that has been seen so far. If [_buf] has to be
/// reallocated in the future (to insert room at its start for more bytes) the
/// reallocation will need to be a multiple of this many bytes.
@@ -138,21 +148,28 @@ class Builder {
/// automatically grow the array if/as needed. `internStrings`, if set to
/// true, will cause [writeString] to pool strings in the buffer so that
/// identical strings will always use the same offset in tables.
- Builder({this.initialSize: 1024, bool internStrings = false})
- : _buf = ByteData(initialSize) {
- if (internStrings == true) {
- _strings = new Map<String, int>();
+ Builder({
+ this.initialSize = 1024,
+ bool internStrings = false,
+ Allocator allocator = const DefaultAllocator(),
+ this.deduplicateTables = true,
+ }) : _allocator = allocator,
+ _buf = allocator.allocate(initialSize),
+ _vTables = deduplicateTables ? [] : const [] {
+ if (internStrings) {
+ _strings = <String, int>{};
}
}
/// Calculate the finished buffer size (aligned).
- int size() => _tail + ((-_tail) % _maxAlign);
+ @pragma('vm:prefer-inline')
+ int size() => _tail + ((-_tail) & (_maxAlign - 1));
/// Add the [field] with the given boolean [value]. The field is not added if
/// the [value] is equal to [def]. Booleans are stored as 8-bit fields with
/// `0` for `false` and `1` for `true`.
void addBool(int field, bool? value, [bool? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofUint8, 1);
_trackField(field);
@@ -163,134 +180,132 @@ class Builder {
/// Add the [field] with the given 32-bit signed integer [value]. The field is
/// not added if the [value] is equal to [def].
void addInt32(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofInt32, 1);
_trackField(field);
- _setInt32AtTail(_buf, _tail, value);
+ _setInt32AtTail(_tail, value);
}
}
/// Add the [field] with the given 32-bit signed integer [value]. The field is
/// not added if the [value] is equal to [def].
void addInt16(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofInt16, 1);
_trackField(field);
- _setInt16AtTail(_buf, _tail, value);
+ _setInt16AtTail(_tail, value);
}
}
/// Add the [field] with the given 8-bit signed integer [value]. The field is
/// not added if the [value] is equal to [def].
void addInt8(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofInt8, 1);
_trackField(field);
- _setInt8AtTail(_buf, _tail, value);
+ _setInt8AtTail(_tail, value);
}
}
void addStruct(int field, int offset) {
- _ensureCurrentVTable();
+ assert(_inVTable);
_trackField(field);
_currentVTable!.addField(field, offset);
}
/// Add the [field] referencing an object with the given [offset].
void addOffset(int field, int? offset) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (offset != null) {
_prepare(_sizeofUint32, 1);
_trackField(field);
- _setUint32AtTail(_buf, _tail, _tail - offset);
+ _setUint32AtTail(_tail, _tail - offset);
}
}
/// Add the [field] with the given 32-bit unsigned integer [value]. The field
/// is not added if the [value] is equal to [def].
void addUint32(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofUint32, 1);
_trackField(field);
- _setUint32AtTail(_buf, _tail, value);
+ _setUint32AtTail(_tail, value);
}
}
/// Add the [field] with the given 32-bit unsigned integer [value]. The field
/// is not added if the [value] is equal to [def].
void addUint16(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofUint16, 1);
_trackField(field);
- _setUint16AtTail(_buf, _tail, value);
+ _setUint16AtTail(_tail, value);
}
}
/// Add the [field] with the given 8-bit unsigned integer [value]. The field
/// is not added if the [value] is equal to [def].
void addUint8(int field, int? value, [int? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofUint8, 1);
_trackField(field);
- _setUint8AtTail(_buf, _tail, value);
+ _setUint8AtTail(_tail, value);
}
}
/// Add the [field] with the given 32-bit float [value]. The field
/// is not added if the [value] is equal to [def].
void addFloat32(int field, double? value, [double? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofFloat32, 1);
_trackField(field);
- _setFloat32AtTail(_buf, _tail, value);
+ _setFloat32AtTail(_tail, value);
}
}
/// Add the [field] with the given 64-bit double [value]. The field
/// is not added if the [value] is equal to [def].
void addFloat64(int field, double? value, [double? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofFloat64, 1);
_trackField(field);
- _setFloat64AtTail(_buf, _tail, value);
+ _setFloat64AtTail(_tail, value);
}
}
/// Add the [field] with the given 64-bit unsigned integer [value]. The field
/// is not added if the [value] is equal to [def].
void addUint64(int field, int? value, [double? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofUint64, 1);
_trackField(field);
- _setUint64AtTail(_buf, _tail, value);
+ _setUint64AtTail(_tail, value);
}
}
/// Add the [field] with the given 64-bit unsigned integer [value]. The field
/// is not added if the [value] is equal to [def].
void addInt64(int field, int? value, [double? def]) {
- _ensureCurrentVTable();
+ assert(_inVTable);
if (value != null && value != def) {
_prepare(_sizeofInt64, 1);
_trackField(field);
- _setInt64AtTail(_buf, _tail, value);
+ _setInt64AtTail(_tail, value);
}
}
/// End the current table and return its offset.
int endTable() {
- if (_currentVTable == null) {
- throw new StateError('Start a table before ending it.');
- }
+ assert(_inVTable);
// Prepare for writing the VTable.
_prepare(_sizeofInt32, 1);
int tableTail = _tail;
@@ -301,41 +316,46 @@ class Builder {
int? vTableTail;
{
currentVTable.computeFieldOffsets(tableTail);
+
// Try to find an existing compatible VTable.
- // Search backward - more likely to have recently used one
- for (int i = _vTables.length - 1; i >= 0; i--) {
- final int vt2Offset = _vTables[i];
- final int vt2Start = _buf.lengthInBytes - vt2Offset;
- final int vt2Size = _buf.getUint16(vt2Start, Endian.little);
-
- if (currentVTable._vTableSize == vt2Size &&
- currentVTable._offsetsMatch(vt2Start, _buf)) {
- vTableTail = vt2Offset;
- break;
+ if (deduplicateTables) {
+ // Search backward - more likely to have recently used one
+ for (int i = _vTables.length - 1; i >= 0; i--) {
+ final int vt2Offset = _vTables[i];
+ final int vt2Start = _buf.lengthInBytes - vt2Offset;
+ final int vt2Size = _buf.getUint16(vt2Start, Endian.little);
+
+ if (currentVTable._vTableSize == vt2Size &&
+ currentVTable._offsetsMatch(vt2Start, _buf)) {
+ vTableTail = vt2Offset;
+ break;
+ }
}
}
+
// Write a new VTable.
if (vTableTail == null) {
_prepare(_sizeofUint16, _currentVTable!.numOfUint16);
vTableTail = _tail;
currentVTable.tail = vTableTail;
currentVTable.output(_buf, _buf.lengthInBytes - _tail);
- _vTables.add(currentVTable.tail);
+ if (deduplicateTables) _vTables.add(currentVTable.tail);
}
}
// Set the VTable offset.
- _setInt32AtTail(_buf, tableTail, vTableTail - tableTail);
+ _setInt32AtTail(tableTail, vTableTail - tableTail);
// Done with this table.
_currentVTable = null;
return tableTail;
}
- /// This method low level method can be used to return a raw piece of the buffer
- /// after using the the put* methods.
- ///
- /// Most clients should prefer calling [finish].
- Uint8List lowFinish() {
- return _buf.buffer.asUint8List(_buf.lengthInBytes - size());
+ /// Returns the finished buffer. You must call [finish] before accessing this.
+ @pragma('vm:prefer-inline')
+ Uint8List get buffer {
+ assert(_finished);
+ final finishedSize = size();
+ return _buf.buffer
+ .asUint8List(_buf.lengthInBytes - finishedSize, finishedSize);
}
/// Finish off the creation of the buffer. The given [offset] is used as the
@@ -343,17 +363,26 @@ class Builder {
/// written object. If [fileIdentifier] is specified (and not `null`), it is
/// interpreted as a 4-byte Latin-1 encoded string that should be placed at
/// bytes 4-7 of the file.
- Uint8List finish(int offset, [String? fileIdentifier]) {
- _prepare(max(_sizeofUint32, _maxAlign), fileIdentifier == null ? 1 : 2);
+ void finish(int offset, [String? fileIdentifier]) {
+ final sizeBeforePadding = size();
+ final requiredBytes = _sizeofUint32 * (fileIdentifier == null ? 1 : 2);
+ _prepare(max(requiredBytes, _maxAlign), 1);
final finishedSize = size();
- _setUint32AtTail(_buf, finishedSize, finishedSize - offset);
+ _setUint32AtTail(finishedSize, finishedSize - offset);
if (fileIdentifier != null) {
for (int i = 0; i < 4; i++) {
- _setUint8AtTail(_buf, finishedSize - _sizeofUint32 - i,
- fileIdentifier.codeUnitAt(i));
+ _setUint8AtTail(
+ finishedSize - _sizeofUint32 - i, fileIdentifier.codeUnitAt(i));
}
}
- return _buf.buffer.asUint8List(_buf.lengthInBytes - finishedSize);
+
+ // zero out the added padding
+ for (var i = sizeBeforePadding + 1;
+ i <= finishedSize - requiredBytes;
+ i++) {
+ _setUint8AtTail(i, 0);
+ }
+ _finished = true;
}
/// Writes a Float64 to the tail of the buffer after preparing space for it.
@@ -361,7 +390,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putFloat64(double value) {
_prepare(_sizeofFloat64, 1);
- _setFloat32AtTail(_buf, _tail, value);
+ _setFloat32AtTail(_tail, value);
}
/// Writes a Float32 to the tail of the buffer after preparing space for it.
@@ -369,7 +398,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putFloat32(double value) {
_prepare(_sizeofFloat32, 1);
- _setFloat32AtTail(_buf, _tail, value);
+ _setFloat32AtTail(_tail, value);
}
/// Writes a Int64 to the tail of the buffer after preparing space for it.
@@ -377,7 +406,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putInt64(int value) {
_prepare(_sizeofInt64, 1);
- _setInt64AtTail(_buf, _tail, value);
+ _setInt64AtTail(_tail, value);
}
/// Writes a Uint32 to the tail of the buffer after preparing space for it.
@@ -385,7 +414,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putInt32(int value) {
_prepare(_sizeofInt32, 1);
- _setInt32AtTail(_buf, _tail, value);
+ _setInt32AtTail(_tail, value);
}
/// Writes a Uint16 to the tail of the buffer after preparing space for it.
@@ -393,7 +422,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putInt16(int value) {
_prepare(_sizeofInt16, 1);
- _setInt16AtTail(_buf, _tail, value);
+ _setInt16AtTail(_tail, value);
}
/// Writes a Uint8 to the tail of the buffer after preparing space for it.
@@ -409,7 +438,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putUint64(int value) {
_prepare(_sizeofUint64, 1);
- _setUint64AtTail(_buf, _tail, value);
+ _setUint64AtTail(_tail, value);
}
/// Writes a Uint32 to the tail of the buffer after preparing space for it.
@@ -417,7 +446,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putUint32(int value) {
_prepare(_sizeofUint32, 1);
- _setUint32AtTail(_buf, _tail, value);
+ _setUint32AtTail(_tail, value);
}
/// Writes a Uint16 to the tail of the buffer after preparing space for it.
@@ -425,7 +454,7 @@ class Builder {
/// Updates the [offset] pointer. This method is intended for use when writing structs to the buffer.
void putUint16(int value) {
_prepare(_sizeofUint16, 1);
- _setUint16AtTail(_buf, _tail, value);
+ _setUint16AtTail(_tail, value);
}
/// Writes a Uint8 to the tail of the buffer after preparing space for it.
@@ -438,21 +467,20 @@ class Builder {
/// Reset the builder and make it ready for filling a new buffer.
void reset() {
+ _finished = false;
_maxAlign = 1;
_tail = 0;
_currentVTable = null;
- _vTables.length = 0;
+ if (deduplicateTables) _vTables.clear();
if (_strings != null) {
- _strings = new Map<String, int>();
+ _strings = <String, int>{};
}
}
- /// Start a new table. Must be finished with [endTable] invocation.
- void startTable() {
- if (_currentVTable != null) {
- throw new StateError('Inline tables are not supported.');
- }
- _currentVTable = new _VTable();
+ /// Start a new table. Must be finished with [endTable] invocation.
+ void startTable(int numFields) {
+ assert(!_inVTable); // Inline tables are not supported.
+ _currentVTable = _VTable(numFields);
_currentTableEndTail = _tail;
}
@@ -466,7 +494,7 @@ class Builder {
/// Writes a list of Structs to the buffer, returning the offset
int writeListOfStructs(List<ObjectBuilder> structBuilders) {
- _ensureNoVTable();
+ assert(!_inVTable);
for (int i = structBuilders.length - 1; i >= 0; i--) {
structBuilders[i].finish(this);
}
@@ -475,14 +503,14 @@ class Builder {
/// Write the given list of [values].
int writeList(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1 + values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setUint32AtTail(_buf, tail, tail - value);
+ _setUint32AtTail(tail, tail - value);
tail -= _sizeofUint32;
}
return result;
@@ -490,14 +518,14 @@ class Builder {
/// Write the given list of 64-bit float [values].
int writeListFloat64(List<double> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofFloat64, values.length, additionalBytes: _sizeofUint32);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (double value in values) {
- _setFloat64AtTail(_buf, tail, value);
+ _setFloat64AtTail(tail, value);
tail -= _sizeofFloat64;
}
return result;
@@ -505,14 +533,14 @@ class Builder {
/// Write the given list of 32-bit float [values].
int writeListFloat32(List<double> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofFloat32, 1 + values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (double value in values) {
- _setFloat32AtTail(_buf, tail, value);
+ _setFloat32AtTail(tail, value);
tail -= _sizeofFloat32;
}
return result;
@@ -520,14 +548,14 @@ class Builder {
/// Write the given list of signed 64-bit integer [values].
int writeListInt64(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofInt64, values.length, additionalBytes: _sizeofUint32);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setInt64AtTail(_buf, tail, value);
+ _setInt64AtTail(tail, value);
tail -= _sizeofInt64;
}
return result;
@@ -535,14 +563,14 @@ class Builder {
/// Write the given list of signed 64-bit integer [values].
int writeListUint64(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint64, values.length, additionalBytes: _sizeofUint32);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setUint64AtTail(_buf, tail, value);
+ _setUint64AtTail(tail, value);
tail -= _sizeofUint64;
}
return result;
@@ -550,14 +578,14 @@ class Builder {
/// Write the given list of signed 32-bit integer [values].
int writeListInt32(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1 + values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setInt32AtTail(_buf, tail, value);
+ _setInt32AtTail(tail, value);
tail -= _sizeofInt32;
}
return result;
@@ -565,14 +593,14 @@ class Builder {
/// Write the given list of unsigned 32-bit integer [values].
int writeListUint32(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1 + values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setUint32AtTail(_buf, tail, value);
+ _setUint32AtTail(tail, value);
tail -= _sizeofUint32;
}
return result;
@@ -580,14 +608,14 @@ class Builder {
/// Write the given list of signed 16-bit integer [values].
int writeListInt16(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1, additionalBytes: 2 * values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setInt16AtTail(_buf, tail, value);
+ _setInt16AtTail(tail, value);
tail -= _sizeofInt16;
}
return result;
@@ -595,14 +623,14 @@ class Builder {
/// Write the given list of unsigned 16-bit integer [values].
int writeListUint16(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1, additionalBytes: 2 * values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setUint16AtTail(_buf, tail, value);
+ _setUint16AtTail(tail, value);
tail -= _sizeofUint16;
}
return result;
@@ -615,14 +643,14 @@ class Builder {
/// Write the given list of signed 8-bit integer [values].
int writeListInt8(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1, additionalBytes: values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setInt8AtTail(_buf, tail, value);
+ _setInt8AtTail(tail, value);
tail -= _sizeofUint8;
}
return result;
@@ -630,83 +658,122 @@ class Builder {
/// Write the given list of unsigned 8-bit integer [values].
int writeListUint8(List<int> values) {
- _ensureNoVTable();
+ assert(!_inVTable);
_prepare(_sizeofUint32, 1, additionalBytes: values.length);
final int result = _tail;
int tail = _tail;
- _setUint32AtTail(_buf, tail, values.length);
+ _setUint32AtTail(tail, values.length);
tail -= _sizeofUint32;
for (int value in values) {
- _setUint8AtTail(_buf, tail, value);
+ _setUint8AtTail(tail, value);
tail -= _sizeofUint8;
}
return result;
}
- /// Write the given string [value] and return its offset, or `null` if
- /// the [value] is `null`.
- int? writeString(String? value) {
- _ensureNoVTable();
- if (value != null) {
- if (_strings != null) {
- return _strings!.putIfAbsent(value, () => _writeString(value));
- } else {
- return _writeString(value);
- }
+ /// Write the given string [value] and return its offset.
+ ///
+ /// Dart strings are UTF-16 but must be stored as UTF-8 in FlatBuffers.
+ /// If the given string consists only of ASCII characters, you can indicate
+ /// enable [asciiOptimization]. In this mode, [writeString()] first tries to
+ /// copy the ASCII string directly to the output buffer and if that fails
+ /// (because there are no-ASCII characters in the string) it falls back and to
+ /// the default UTF-16 -> UTF-8 conversion (with slight performance penalty).
+ int writeString(String value, {bool asciiOptimization = false}) {
+ assert(!_inVTable);
+ if (_strings != null) {
+ return _strings!
+ .putIfAbsent(value, () => _writeString(value, asciiOptimization));
+ } else {
+ return _writeString(value, asciiOptimization);
}
- return null;
}
- int _writeString(String value) {
- // TODO(scheglov) optimize for ASCII strings
- List<int> bytes = utf8.encode(value);
- int length = bytes.length;
- _prepare(4, 1, additionalBytes: length + 1);
- final int result = _tail;
- _setUint32AtTail(_buf, _tail, length);
- int offset = _buf.lengthInBytes - _tail + 4;
- for (int i = 0; i < length; i++) {
- _buf.setUint8(offset++, bytes[i]);
+ int _writeString(String value, bool asciiOptimization) {
+ if (asciiOptimization) {
+ // [utf8.encode()] is slow (up to at least Dart SDK 2.13). If the given
+ // string is ASCII we can just write it directly, without any conversion.
+ final originalTail = _tail;
+ if (_tryWriteASCIIString(value)) return _tail;
+ // if non-ASCII: reset the output buffer position for [_writeUTFString()]
+ _tail = originalTail;
}
- _buf.setUint8(offset, 0); // trailing zero
- return result;
+ _writeUTFString(value);
+ return _tail;
}
- /// Throw an exception if there is not currently a vtable.
- void _ensureCurrentVTable() {
- if (_currentVTable == null) {
- throw new StateError('Start a table before adding values.');
+ // Try to write the string as ASCII, return false if there's a non-ascii char.
+ @pragma('vm:prefer-inline')
+ bool _tryWriteASCIIString(String value) {
+ _prepare(4, 1, additionalBytes: value.length + 1);
+ final length = value.length;
+ var offset = _buf.lengthInBytes - _tail + 4;
+ for (var i = 0; i < length; i++) {
+ // utf16 code unit, e.g. for '†' it's [0x20 0x20], which is 8224 decimal.
+ // ASCII characters go from 0x00 to 0x7F (which is 0 to 127 decimal).
+ final char = value.codeUnitAt(i);
+ if ((char & ~0x7F) != 0) {
+ return false;
+ }
+ _buf.setUint8(offset++, char);
}
+ _buf.setUint8(offset, 0); // trailing zero
+ _setUint32AtTail(_tail, value.length);
+ return true;
}
- /// Throw an exception if there is currently a vtable.
- void _ensureNoVTable() {
- if (_currentVTable != null) {
- throw new StateError(
- 'Cannot write a non-scalar value while writing a table.');
+ @pragma('vm:prefer-inline')
+ void _writeUTFString(String value) {
+ final bytes = utf8.encode(value) as Uint8List;
+ final length = bytes.length;
+ _prepare(4, 1, additionalBytes: length + 1);
+ _setUint32AtTail(_tail, length);
+ var offset = _buf.lengthInBytes - _tail + 4;
+ for (int i = 0; i < length; i++) {
+ _buf.setUint8(offset++, bytes[i]);
}
+ _buf.setUint8(offset, 0); // trailing zero
}
+ /// Used to assert whether a "Table" is currently being built.
+ ///
+ /// If you hit `assert(!_inVTable())`, you're trying to add table fields
+ /// without starting a table with [Builder.startTable()].
+ ///
+ /// If you hit `assert(_inVTable())`, you're trying to construct a
+ /// Table/Vector/String during the construction of its parent table,
+ /// between the MyTableBuilder and [Builder.endTable()].
+ /// Move the creation of these sub-objects to before the MyTableBuilder to
+ /// not get this assert.
+ @pragma('vm:prefer-inline')
+ bool get _inVTable => _currentVTable != null;
+
/// The number of bytes that have been written to the buffer so far. The
/// most recently written byte is this many bytes from the end of the buffer.
+ @pragma('vm:prefer-inline')
int get offset => _tail;
/// Zero-pads the buffer, which may be required for some struct layouts.
+ @pragma('vm:prefer-inline')
void pad(int howManyBytes) {
- for (int i = 0; i < howManyBytes; i++) putUint8(0);
+ for (int i = 0; i < howManyBytes; i++) {
+ putUint8(0);
+ }
}
/// Prepare for writing the given `count` of scalars of the given `size`.
/// Additionally allocate the specified `additionalBytes`. Update the current
/// tail pointer to point at the allocated space.
+ @pragma('vm:prefer-inline')
void _prepare(int size, int count, {int additionalBytes = 0}) {
+ assert(!_finished);
// Update the alignment.
if (_maxAlign < size) {
_maxAlign = size;
}
// Prepare amount of required space.
int dataSize = size * count + additionalBytes;
- int alignDelta = (-(_tail + dataSize)) % size;
+ int alignDelta = (-(_tail + dataSize)) & (size - 1);
int bufSize = alignDelta + dataSize;
// Ensure that we have the required amount of space.
{
@@ -714,63 +781,64 @@ class Builder {
if (_tail + bufSize > oldCapacity) {
int desiredNewCapacity = (oldCapacity + bufSize) * 2;
int deltaCapacity = desiredNewCapacity - oldCapacity;
- deltaCapacity += (-deltaCapacity) % _maxAlign;
+ deltaCapacity += (-deltaCapacity) & (_maxAlign - 1);
int newCapacity = oldCapacity + deltaCapacity;
- ByteData newBuf = new ByteData(newCapacity);
- newBuf.buffer
- .asUint8List()
- .setAll(deltaCapacity, _buf.buffer.asUint8List());
- _buf = newBuf;
+ _buf = _allocator.resize(_buf, newCapacity, _tail, 0);
}
}
+
+ // zero out the added padding
+ for (var i = _tail + 1; i <= _tail + alignDelta; i++) {
+ _setUint8AtTail(i, 0);
+ }
+
// Update the tail pointer.
_tail += bufSize;
}
/// Record the offset of the given [field].
- void _trackField(int field) {
- _currentVTable!.addField(field, _tail);
- }
+ @pragma('vm:prefer-inline')
+ void _trackField(int field) => _currentVTable!.addField(field, _tail);
- static void _setFloat64AtTail(ByteData _buf, int tail, double x) {
- _buf.setFloat64(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setFloat64AtTail(int tail, double x) =>
+ _buf.setFloat64(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setFloat32AtTail(ByteData _buf, int tail, double x) {
- _buf.setFloat32(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setFloat32AtTail(int tail, double x) =>
+ _buf.setFloat32(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setUint64AtTail(ByteData _buf, int tail, int x) {
- _buf.setUint64(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setUint64AtTail(int tail, int x) =>
+ _buf.setUint64(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setInt64AtTail(ByteData _buf, int tail, int x) {
- _buf.setInt64(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setInt64AtTail(int tail, int x) =>
+ _buf.setInt64(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setInt32AtTail(ByteData _buf, int tail, int x) {
- _buf.setInt32(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setInt32AtTail(int tail, int x) =>
+ _buf.setInt32(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setUint32AtTail(ByteData _buf, int tail, int x) {
- _buf.setUint32(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setUint32AtTail(int tail, int x) =>
+ _buf.setUint32(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setInt16AtTail(ByteData _buf, int tail, int x) {
- _buf.setInt16(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setInt16AtTail(int tail, int x) =>
+ _buf.setInt16(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setUint16AtTail(ByteData _buf, int tail, int x) {
- _buf.setUint16(_buf.lengthInBytes - tail, x, Endian.little);
- }
+ @pragma('vm:prefer-inline')
+ void _setUint16AtTail(int tail, int x) =>
+ _buf.setUint16(_buf.lengthInBytes - tail, x, Endian.little);
- static void _setInt8AtTail(ByteData _buf, int tail, int x) {
- _buf.setInt8(_buf.lengthInBytes - tail, x);
- }
+ @pragma('vm:prefer-inline')
+ void _setInt8AtTail(int tail, int x) =>
+ _buf.setInt8(_buf.lengthInBytes - tail, x);
- static void _setUint8AtTail(ByteData _buf, int tail, int x) {
- _buf.setUint8(_buf.lengthInBytes - tail, x);
- }
+ @pragma('vm:prefer-inline')
+ void _setUint8AtTail(int tail, int x) =>
+ _buf.setUint8(_buf.lengthInBytes - tail, x);
}
/// Reader of lists of boolean values.
@@ -780,11 +848,13 @@ class BoolListReader extends Reader<List<bool>> {
const BoolListReader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
+ @pragma('vm:prefer-inline')
List<bool> read(BufferContext bc, int offset) =>
- new _FbBoolList(bc, bc.derefObject(offset));
+ _FbBoolList(bc, bc.derefObject(offset));
}
/// The reader of booleans.
@@ -792,9 +862,11 @@ class BoolReader extends Reader<bool> {
const BoolReader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint8;
@override
+ @pragma('vm:prefer-inline')
bool read(BufferContext bc, int offset) => bc._getInt8(offset) != 0;
}
@@ -805,31 +877,37 @@ class Float64ListReader extends Reader<List<double>> {
const Float64ListReader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofFloat64;
@override
+ @pragma('vm:prefer-inline')
List<double> read(BufferContext bc, int offset) =>
- new _FbFloat64List(bc, bc.derefObject(offset));
+ _FbFloat64List(bc, bc.derefObject(offset));
}
class Float32ListReader extends Reader<List<double>> {
const Float32ListReader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofFloat32;
@override
+ @pragma('vm:prefer-inline')
List<double> read(BufferContext bc, int offset) =>
- new _FbFloat32List(bc, bc.derefObject(offset));
+ _FbFloat32List(bc, bc.derefObject(offset));
}
class Float64Reader extends Reader<double> {
const Float64Reader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofFloat64;
@override
+ @pragma('vm:prefer-inline')
double read(BufferContext bc, int offset) => bc._getFloat64(offset);
}
@@ -837,9 +915,11 @@ class Float32Reader extends Reader<double> {
const Float32Reader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofFloat32;
@override
+ @pragma('vm:prefer-inline')
double read(BufferContext bc, int offset) => bc._getFloat32(offset);
}
@@ -847,9 +927,11 @@ class Int64Reader extends Reader<int> {
const Int64Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofInt64;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getInt64(offset);
}
@@ -858,9 +940,11 @@ class Int32Reader extends Reader<int> {
const Int32Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofInt32;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getInt32(offset);
}
@@ -869,9 +953,11 @@ class Int16Reader extends Reader<int> {
const Int16Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofInt16;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getInt16(offset);
}
@@ -880,26 +966,43 @@ class Int8Reader extends Reader<int> {
const Int8Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofInt8;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getInt8(offset);
}
-/// The reader of lists of objects.
-///
-/// The returned unmodifiable lists lazily read objects on access.
+/// The reader of lists of objects. Lazy by default - see [lazy].
class ListReader<E> extends Reader<List<E>> {
final Reader<E> _elementReader;
- const ListReader(this._elementReader);
+ /// Enables lazy reading of the list
+ ///
+ /// If true, the returned unmodifiable list lazily reads objects on access.
+ /// Therefore, the underlying buffer must not change while accessing the list.
+ ///
+ /// If false, reads the whole list immediately on access.
+ final bool lazy;
+
+ const ListReader(this._elementReader, {this.lazy = true});
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
- List<E> read(BufferContext bc, int offset) =>
- new _FbGenericList<E>(_elementReader, bc, bc.derefObject(offset));
+ List<E> read(BufferContext bc, int offset) {
+ final listOffset = bc.derefObject(offset);
+ return lazy
+ ? _FbGenericList<E>(_elementReader, bc, listOffset)
+ : List<E>.generate(
+ bc.buffer.getUint32(listOffset, Endian.little),
+ (int index) => _elementReader.read(
+ bc, listOffset + size + _elementReader.size * index),
+ growable: true);
+ }
}
/// Object that can read a value at a [BufferContext].
@@ -913,17 +1016,20 @@ abstract class Reader<T> {
T read(BufferContext bc, int offset);
/// Read the value of the given [field] in the given [object].
+ @pragma('vm:prefer-inline')
T vTableGet(BufferContext object, int offset, int field, T defaultValue) {
int fieldOffset = _vTableFieldOffset(object, offset, field);
return fieldOffset == 0 ? defaultValue : read(object, offset + fieldOffset);
}
/// Read the value of the given [field] in the given [object].
+ @pragma('vm:prefer-inline')
T? vTableGetNullable(BufferContext object, int offset, int field) {
int fieldOffset = _vTableFieldOffset(object, offset, field);
return fieldOffset == 0 ? null : read(object, offset + fieldOffset);
}
+ @pragma('vm:prefer-inline')
int _vTableFieldOffset(BufferContext object, int offset, int field) {
int vTableSOffset = object._getInt32(offset);
int vTableOffset = offset - vTableSOffset;
@@ -935,22 +1041,27 @@ abstract class Reader<T> {
/// The reader of string values.
class StringReader extends Reader<String> {
- const StringReader() : super();
+ final bool asciiOptimization;
+
+ const StringReader({this.asciiOptimization = false}) : super();
@override
- int get size => 4;
+ @pragma('vm:prefer-inline')
+ int get size => _sizeofUint32;
@override
+ @pragma('vm:prefer-inline')
String read(BufferContext bc, int offset) {
int strOffset = bc.derefObject(offset);
int length = bc._getUint32(strOffset);
- Uint8List bytes = bc._asUint8LIst(strOffset + 4, length);
- if (_isLatin(bytes)) {
- return new String.fromCharCodes(bytes);
+ Uint8List bytes = bc._asUint8List(strOffset + _sizeofUint32, length);
+ if (asciiOptimization && _isLatin(bytes)) {
+ return String.fromCharCodes(bytes);
}
return utf8.decode(bytes);
}
+ @pragma('vm:prefer-inline')
static bool _isLatin(Uint8List bytes) {
int length = bytes.length;
for (int i = 0; i < length; i++) {
@@ -969,8 +1080,9 @@ abstract class StructReader<T> extends Reader<T> {
/// Return the object at `offset`.
T createObject(BufferContext bc, int offset);
- T read(BufferContext bp, int offset) {
- return createObject(bp, offset);
+ @override
+ T read(BufferContext bc, int offset) {
+ return createObject(bc, offset);
}
}
@@ -979,15 +1091,16 @@ abstract class TableReader<T> extends Reader<T> {
const TableReader();
@override
+ @pragma('vm:prefer-inline')
int get size => 4;
/// Return the object at [offset].
T createObject(BufferContext bc, int offset);
@override
- T read(BufferContext bp, int offset) {
- int objectOffset = bp.derefObject(offset);
- return createObject(bp, objectOffset);
+ T read(BufferContext bc, int offset) {
+ int objectOffset = bc.derefObject(offset);
+ return createObject(bc, objectOffset);
}
}
@@ -998,11 +1111,13 @@ class Uint32ListReader extends Reader<List<int>> {
const Uint32ListReader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
+ @pragma('vm:prefer-inline')
List<int> read(BufferContext bc, int offset) =>
- new _FbUint32List(bc, bc.derefObject(offset));
+ _FbUint32List(bc, bc.derefObject(offset));
}
/// The reader of unsigned 64-bit integers.
@@ -1012,9 +1127,11 @@ class Uint64Reader extends Reader<int> {
const Uint64Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint64;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getUint64(offset);
}
@@ -1023,9 +1140,11 @@ class Uint32Reader extends Reader<int> {
const Uint32Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getUint32(offset);
}
@@ -1036,11 +1155,13 @@ class Uint16ListReader extends Reader<List<int>> {
const Uint16ListReader();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
+ @pragma('vm:prefer-inline')
List<int> read(BufferContext bc, int offset) =>
- new _FbUint16List(bc, bc.derefObject(offset));
+ _FbUint16List(bc, bc.derefObject(offset));
}
/// The reader of unsigned 32-bit integers.
@@ -1048,24 +1169,44 @@ class Uint16Reader extends Reader<int> {
const Uint16Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint16;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getUint16(offset);
}
-/// Reader of lists of unsigned 8-bit integer values.
-///
-/// The returned unmodifiable lists lazily read values on access.
+/// Reader of unmodifiable binary data (a list of unsigned 8-bit integers).
class Uint8ListReader extends Reader<List<int>> {
- const Uint8ListReader();
+ /// Enables lazy reading of the list
+ ///
+ /// If true, the returned unmodifiable list lazily reads bytes on access.
+ /// Therefore, the underlying buffer must not change while accessing the list.
+ ///
+ /// If false, reads the whole list immediately as an Uint8List.
+ final bool lazy;
+
+ const Uint8ListReader({this.lazy = true});
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint32;
@override
- List<int> read(BufferContext bc, int offset) =>
- new _FbUint8List(bc, bc.derefObject(offset));
+ @pragma('vm:prefer-inline')
+ List<int> read(BufferContext bc, int offset) {
+ final listOffset = bc.derefObject(offset);
+ if (lazy) return _FbUint8List(bc, listOffset);
+
+ final length = bc._getUint32(listOffset);
+ final result = Uint8List(length);
+ var pos = listOffset + _sizeofUint32;
+ for (var i = 0; i < length; i++, pos++) {
+ result[i] = bc._getUint8(pos);
+ }
+ return result;
+ }
}
/// The reader of unsigned 8-bit integers.
@@ -1073,20 +1214,53 @@ class Uint8Reader extends Reader<int> {
const Uint8Reader() : super();
@override
+ @pragma('vm:prefer-inline')
int get size => _sizeofUint8;
@override
+ @pragma('vm:prefer-inline')
int read(BufferContext bc, int offset) => bc._getUint8(offset);
}
+/// Reader of unmodifiable binary data (a list of signed 8-bit integers).
+class Int8ListReader extends Reader<List<int>> {
+ /// Enables lazy reading of the list
+ ///
+ /// If true, the returned unmodifiable list lazily reads bytes on access.
+ /// Therefore, the underlying buffer must not change while accessing the list.
+ ///
+ /// If false, reads the whole list immediately as an Uint8List.
+ final bool lazy;
+
+ const Int8ListReader({this.lazy = true});
+
+ @override
+ @pragma('vm:prefer-inline')
+ int get size => _sizeofUint32;
+
+ @override
+ @pragma('vm:prefer-inline')
+ List<int> read(BufferContext bc, int offset) {
+ final listOffset = bc.derefObject(offset);
+ if (lazy) return _FbUint8List(bc, listOffset);
+
+ final length = bc._getUint32(listOffset);
+ final result = Int8List(length);
+ var pos = listOffset + _sizeofUint32;
+ for (var i = 0; i < length; i++, pos++) {
+ result[i] = bc._getInt8(pos);
+ }
+ return result;
+ }
+}
+
/// The list backed by 64-bit values - Uint64 length and Float64.
class _FbFloat64List extends _FbList<double> {
_FbFloat64List(BufferContext bc, int offset) : super(bc, offset);
@override
- double operator [](int i) {
- return bc._getFloat64(offset + 4 + 8 * i);
- }
+ @pragma('vm:prefer-inline')
+ double operator [](int i) => bc._getFloat64(offset + 4 + 8 * i);
}
/// The list backed by 32-bit values - Float32.
@@ -1094,9 +1268,8 @@ class _FbFloat32List extends _FbList<double> {
_FbFloat32List(BufferContext bc, int offset) : super(bc, offset);
@override
- double operator [](int i) {
- return bc._getFloat32(offset + 4 + 4 * i);
- }
+ @pragma('vm:prefer-inline')
+ double operator [](int i) => bc._getFloat32(offset + 4 + 4 * i);
}
/// List backed by a generic object which may have any size.
@@ -1109,6 +1282,7 @@ class _FbGenericList<E> extends _FbList<E> {
: super(bp, offset);
@override
+ @pragma('vm:prefer-inline')
E operator [](int i) {
_items ??= List<E?>.filled(length, null);
E? item = _items![i];
@@ -1129,18 +1303,15 @@ abstract class _FbList<E> extends Object with ListMixin<E> implements List<E> {
_FbList(this.bc, this.offset);
@override
- int get length {
- _length ??= bc._getUint32(offset);
- return _length!;
- }
+ @pragma('vm:prefer-inline')
+ int get length => _length ??= bc._getUint32(offset);
@override
- void set length(int i) =>
- throw new StateError('Attempt to modify immutable list');
+ set length(int i) => throw StateError('Attempt to modify immutable list');
@override
void operator []=(int i, E e) =>
- throw new StateError('Attempt to modify immutable list');
+ throw StateError('Attempt to modify immutable list');
}
/// List backed by 32-bit unsigned integers.
@@ -1148,9 +1319,8 @@ class _FbUint32List extends _FbList<int> {
_FbUint32List(BufferContext bc, int offset) : super(bc, offset);
@override
- int operator [](int i) {
- return bc._getUint32(offset + 4 + 4 * i);
- }
+ @pragma('vm:prefer-inline')
+ int operator [](int i) => bc._getUint32(offset + 4 + 4 * i);
}
/// List backed by 16-bit unsigned integers.
@@ -1158,9 +1328,8 @@ class _FbUint16List extends _FbList<int> {
_FbUint16List(BufferContext bc, int offset) : super(bc, offset);
@override
- int operator [](int i) {
- return bc._getUint16(offset + 4 + 2 * i);
- }
+ @pragma('vm:prefer-inline')
+ int operator [](int i) => bc._getUint16(offset + 4 + 2 * i);
}
/// List backed by 8-bit unsigned integers.
@@ -1168,9 +1337,17 @@ class _FbUint8List extends _FbList<int> {
_FbUint8List(BufferContext bc, int offset) : super(bc, offset);
@override
- int operator [](int i) {
- return bc._getUint8(offset + 4 + i);
- }
+ @pragma('vm:prefer-inline')
+ int operator [](int i) => bc._getUint8(offset + 4 + i);
+}
+
+/// List backed by 8-bit signed integers.
+class _FbInt8List extends _FbList<int> {
+ _FbInt8List(BufferContext bc, int offset) : super(bc, offset);
+
+ @override
+ @pragma('vm:prefer-inline')
+ int operator [](int i) => bc._getInt8(offset + 4 + i);
}
/// List backed by 8-bit unsigned integers.
@@ -1178,17 +1355,22 @@ class _FbBoolList extends _FbList<bool> {
_FbBoolList(BufferContext bc, int offset) : super(bc, offset);
@override
- bool operator [](int i) {
- return bc._getUint8(offset + 4 + i) == 1 ? true : false;
- }
+ @pragma('vm:prefer-inline')
+ bool operator [](int i) => bc._getUint8(offset + 4 + i) == 1 ? true : false;
}
/// Class that describes the structure of a table.
class _VTable {
static const int _metadataLength = 4;
- final fieldTails = <int?>[];
- final fieldOffsets = <int>[];
+ final int numFields;
+
+ // Note: fieldOffsets start as "tail offsets" and are then transformed by
+ // [computeFieldOffsets()] to actual offsets when a table is finished.
+ final Uint32List fieldOffsets;
+ bool offsetsComputed = false;
+
+ _VTable(this.numFields) : fieldOffsets = Uint32List(numFields);
/// The size of the table that uses this VTable.
int tableSize = 0;
@@ -1199,17 +1381,20 @@ class _VTable {
int get _vTableSize => numOfUint16 * _sizeofUint16;
- int get numOfUint16 => 1 + 1 + fieldTails.length;
+ int get numOfUint16 => 1 + 1 + numFields;
+ @pragma('vm:prefer-inline')
void addField(int field, int offset) {
- while (fieldTails.length <= field) {
- fieldTails.add(null);
- }
- fieldTails[field] = offset;
+ assert(!offsetsComputed);
+ assert(offset > 0); // it's impossible for field to start at the buffer end
+ assert(offset <= 4294967295); // uint32 max
+ fieldOffsets[field] = offset;
}
+ @pragma('vm:prefer-inline')
bool _offsetsMatch(int vt2Start, ByteData buf) {
- for (int i = 0; i < fieldOffsets.length; i++) {
+ assert(offsetsComputed);
+ for (int i = 0; i < numFields; i++) {
if (fieldOffsets[i] !=
buf.getUint16(vt2Start + _metadataLength + (2 * i), Endian.little)) {
return false;
@@ -1219,17 +1404,22 @@ class _VTable {
}
/// Fill the [fieldOffsets] field.
+ @pragma('vm:prefer-inline')
void computeFieldOffsets(int tableTail) {
- assert(fieldOffsets.isEmpty);
- for (int? fieldTail in fieldTails) {
- int fieldOffset = fieldTail == null ? 0 : tableTail - fieldTail;
- fieldOffsets.add(fieldOffset);
+ assert(!offsetsComputed);
+ offsetsComputed = true;
+ for (var i = 0; i < numFields; i++) {
+ if (fieldOffsets[i] != 0) {
+ fieldOffsets[i] = tableTail - fieldOffsets[i];
+ }
}
}
/// Outputs this VTable to [buf], which is is expected to be aligned to 16-bit
/// and have at least [numOfUint16] 16-bit words available.
+ @pragma('vm:prefer-inline')
void output(ByteData buf, int bufOffset) {
+ assert(offsetsComputed);
// VTable size.
buf.setUint16(bufOffset, numOfUint16 * 2, Endian.little);
bufOffset += 2;
@@ -1237,9 +1427,62 @@ class _VTable {
buf.setUint16(bufOffset, tableSize, Endian.little);
bufOffset += 2;
// Field offsets.
- for (int fieldOffset in fieldOffsets) {
- buf.setUint16(bufOffset, fieldOffset, Endian.little);
+ for (int i = 0; i < numFields; i++) {
+ buf.setUint16(bufOffset, fieldOffsets[i], Endian.little);
bufOffset += 2;
}
}
}
+
+/// The interface that [Builder] uses to allocate buffers for encoding.
+abstract class Allocator {
+ const Allocator();
+
+ /// Allocate a [ByteData] buffer of a given size.
+ ByteData allocate(int size);
+
+ /// Free the given [ByteData] buffer previously allocated by [allocate].
+ void deallocate(ByteData data);
+
+ /// Reallocate [newSize] bytes of memory, replacing the old [oldData]. This
+ /// grows downwards, and is intended specifically for use with [Builder].
+ /// Params [inUseBack] and [inUseFront] indicate how much of [oldData] is
+ /// actually in use at each end, and needs to be copied.
+ ByteData resize(
+ ByteData oldData, int newSize, int inUseBack, int inUseFront) {
+ final newData = allocate(newSize);
+ _copyDownward(oldData, newData, inUseBack, inUseFront);
+ deallocate(oldData);
+ return newData;
+ }
+
+ /// Called by [resize] to copy memory from [oldData] to [newData]. Only
+ /// memory of size [inUseFront] and [inUseBack] will be copied from the front
+ /// and back of the old memory allocation.
+ void _copyDownward(
+ ByteData oldData, ByteData newData, int inUseBack, int inUseFront) {
+ if (inUseBack != 0) {
+ newData.buffer.asUint8List().setAll(
+ newData.lengthInBytes - inUseBack,
+ oldData.buffer.asUint8List().getRange(
+ oldData.lengthInBytes - inUseBack, oldData.lengthInBytes));
+ }
+ if (inUseFront != 0) {
+ newData.buffer
+ .asUint8List()
+ .setAll(0, oldData.buffer.asUint8List().getRange(0, inUseFront));
+ }
+ }
+}
+
+class DefaultAllocator extends Allocator {
+ const DefaultAllocator();
+
+ @override
+ ByteData allocate(int size) => ByteData(size);
+
+ @override
+ void deallocate(ByteData data) {
+ // nothing to do, it's garbage-collected
+ }
+}
diff --git a/chromium/third_party/flatbuffers/src/dart/lib/src/builder.dart b/chromium/third_party/flatbuffers/src/dart/lib/src/builder.dart
index 27c0184aeab..a0d47ed1f48 100644
--- a/chromium/third_party/flatbuffers/src/dart/lib/src/builder.dart
+++ b/chromium/third_party/flatbuffers/src/dart/lib/src/builder.dart
@@ -5,22 +5,22 @@ import 'types.dart';
/// The main builder class for creation of a FlexBuffer.
class Builder {
- ByteData _buffer;
+ final ByteData _buffer;
List<_StackValue> _stack = [];
List<_StackPointer> _stackPointers = [];
int _offset = 0;
bool _finished = false;
- Map<String, _StackValue> _stringCache = {};
- Map<String, _StackValue> _keyCache = {};
- Map<_KeysHash, _StackValue> _keyVectorCache = {};
- Map<int, _StackValue> _indirectIntCache = {};
- Map<double, _StackValue> _indirectDoubleCache = {};
+ final Map<String, _StackValue> _stringCache = {};
+ final Map<String, _StackValue> _keyCache = {};
+ final Map<_KeysHash, _StackValue> _keyVectorCache = {};
+ final Map<int, _StackValue> _indirectIntCache = {};
+ final Map<double, _StackValue> _indirectDoubleCache = {};
/// Instantiate the builder if you intent to gradually build up the buffer by calling
/// add... methods and calling [finish] to receive the the resulting byte array.
///
/// The default size of internal buffer is set to 2048. Provide a different value in order to avoid buffer copies.
- Builder({int size = 2048}) : _buffer = ByteData(size) {}
+ Builder({int size = 2048}) : _buffer = ByteData(size);
/// Use this method in order to turn an object into a FlexBuffer directly.
///
@@ -70,25 +70,25 @@ class Builder {
/// Specifically useful when building up a vector where values can be null.
void addNull() {
_integrityCheckOnValueAddition();
- _stack.add(_StackValue.WithNull());
+ _stack.add(_StackValue.withNull());
}
/// Adds a string value.
void addInt(int value) {
_integrityCheckOnValueAddition();
- _stack.add(_StackValue.WithInt(value));
+ _stack.add(_StackValue.withInt(value));
}
/// Adds a bool value.
void addBool(bool value) {
_integrityCheckOnValueAddition();
- _stack.add(_StackValue.WithBool(value));
+ _stack.add(_StackValue.withBool(value));
}
/// Adds a double value.
void addDouble(double value) {
_integrityCheckOnValueAddition();
- _stack.add(_StackValue.WithDouble(value));
+ _stack.add(_StackValue.withDouble(value));
}
/// Adds a string value.
@@ -108,7 +108,7 @@ class Builder {
_pushBuffer(utf8String);
_offset = newOffset;
final stackValue =
- _StackValue.WithOffset(stringOffset, ValueType.String, bitWidth);
+ _StackValue.withOffset(stringOffset, ValueType.String, bitWidth);
_stack.add(stackValue);
_stringCache[value] = stackValue;
}
@@ -129,7 +129,7 @@ class Builder {
_pushBuffer(utf8String);
_offset = newOffset;
final stackValue =
- _StackValue.WithOffset(keyOffset, ValueType.Key, BitWidth.width8);
+ _StackValue.withOffset(keyOffset, ValueType.Key, BitWidth.width8);
_stack.add(stackValue);
_keyCache[value] = stackValue;
}
@@ -148,7 +148,7 @@ class Builder {
_pushBuffer(value.asUint8List());
_offset = newOffset;
final stackValue =
- _StackValue.WithOffset(blobOffset, ValueType.Blob, bitWidth);
+ _StackValue.withOffset(blobOffset, ValueType.Blob, bitWidth);
_stack.add(stackValue);
}
@@ -164,12 +164,12 @@ class Builder {
_stack.add(_indirectIntCache[value]!);
return;
}
- final stackValue = _StackValue.WithInt(value);
+ final stackValue = _StackValue.withInt(value);
final byteWidth = _align(stackValue.width);
final newOffset = _newOffset(byteWidth);
final valueOffset = _offset;
_pushBuffer(stackValue.asU8List(stackValue.width));
- final stackOffset = _StackValue.WithOffset(
+ final stackOffset = _StackValue.withOffset(
valueOffset, ValueType.IndirectInt, stackValue.width);
_stack.add(stackOffset);
_offset = newOffset;
@@ -189,12 +189,12 @@ class Builder {
_stack.add(_indirectDoubleCache[value]!);
return;
}
- final stackValue = _StackValue.WithDouble(value);
+ final stackValue = _StackValue.withDouble(value);
final byteWidth = _align(stackValue.width);
final newOffset = _newOffset(byteWidth);
final valueOffset = _offset;
_pushBuffer(stackValue.asU8List(stackValue.width));
- final stackOffset = _StackValue.WithOffset(
+ final stackOffset = _StackValue.withOffset(
valueOffset, ValueType.IndirectFloat, stackValue.width);
_stack.add(stackOffset);
_offset = newOffset;
@@ -346,14 +346,14 @@ class Builder {
}
}
if (keys != null) {
- return _StackValue.WithOffset(vecOffset, ValueType.Map, bitWidth);
+ return _StackValue.withOffset(vecOffset, ValueType.Map, bitWidth);
}
if (typed) {
final vType =
ValueTypeUtils.toTypedVector(vectorType, fix ? vecLength : 0);
- return _StackValue.WithOffset(vecOffset, vType, bitWidth);
+ return _StackValue.withOffset(vecOffset, vType, bitWidth);
}
- return _StackValue.WithOffset(vecOffset, ValueType.Vector, bitWidth);
+ return _StackValue.withOffset(vecOffset, ValueType.Vector, bitWidth);
}
void _endVector(_StackPointer pointer) {
@@ -405,7 +405,7 @@ class Builder {
offsets.add(_stack[i].offset!);
}
final keysHash = _KeysHash(offsets);
- var keysStackValue;
+ _StackValue? keysStackValue;
if (_keyVectorCache.containsKey(keysHash)) {
keysStackValue = _keyVectorCache[keysHash];
} else {
@@ -424,7 +424,7 @@ class Builder {
'Stack values are not keys $v1 | $v2. Check if you combined [addKey] with add... method calls properly.');
}
- var c1, c2;
+ late int c1, c2;
var index = 0;
do {
c1 = _buffer.getUint8(v1.offset! + index);
@@ -520,32 +520,32 @@ class Builder {
class _StackValue {
late Object _value;
int? _offset;
- ValueType _type;
- BitWidth _width;
+ final ValueType _type;
+ final BitWidth _width;
- _StackValue.WithNull()
+ _StackValue.withNull()
: _type = ValueType.Null,
- _width = BitWidth.width8 {}
+ _width = BitWidth.width8;
- _StackValue.WithInt(int value)
+ _StackValue.withInt(int value)
: _type = ValueType.Int,
_width = BitWidthUtil.width(value),
- _value = value {}
+ _value = value;
- _StackValue.WithBool(bool value)
+ _StackValue.withBool(bool value)
: _type = ValueType.Bool,
_width = BitWidth.width8,
- _value = value {}
+ _value = value;
- _StackValue.WithDouble(double value)
+ _StackValue.withDouble(double value)
: _type = ValueType.Float,
_width = BitWidthUtil.width(value),
- _value = value {}
+ _value = value;
- _StackValue.WithOffset(int value, ValueType type, BitWidth width)
+ _StackValue.withOffset(int value, ValueType type, BitWidth width)
: _offset = value,
_type = type,
- _width = width {}
+ _width = width;
BitWidth storedWidth({BitWidth width = BitWidth.width8}) {
return ValueTypeUtils.isInline(_type)
@@ -559,12 +559,13 @@ class _StackValue {
BitWidth elementWidth(int size, int index) {
if (ValueTypeUtils.isInline(_type)) return _width;
- final offset = offsetLoc - _offset!;
+ final offset = _offset!;
for (var i = 0; i < 4; i++) {
final width = 1 << i;
- final offsetLoc =
- size + BitWidthUtil.paddingSize(size, width) + index * width;
- final bitWidth = BitWidthUtil.uwidth(offset);
+ final bitWidth = BitWidthUtil.uwidth(size +
+ BitWidthUtil.paddingSize(size, width) +
+ index * width -
+ offset);
if (1 << bitWidth.index == width) {
return bitWidth;
}
@@ -643,6 +644,7 @@ class _StackValue {
class _StackPointer {
int stackPosition;
bool isVector;
+
_StackPointer(this.stackPosition, this.isVector);
}
diff --git a/chromium/third_party/flatbuffers/src/dart/lib/src/reference.dart b/chromium/third_party/flatbuffers/src/dart/lib/src/reference.dart
index c2956e30d9d..e52d0b7092d 100644
--- a/chromium/third_party/flatbuffers/src/dart/lib/src/reference.dart
+++ b/chromium/third_party/flatbuffers/src/dart/lib/src/reference.dart
@@ -19,7 +19,7 @@ class Reference {
this._buffer, this._offset, this._parentWidth, int packedType, this._path,
[int? byteWidth, ValueType? valueType])
: _byteWidth = byteWidth ?? 1 << (packedType & 3),
- _valueType = valueType ?? ValueTypeUtils.fromInt(packedType >> 2) {}
+ _valueType = valueType ?? ValueTypeUtils.fromInt(packedType >> 2);
/// Use this method to access the root value of a FlexBuffer.
static Reference fromBuffer(ByteBuffer buffer) {
@@ -218,13 +218,13 @@ class Reference {
_length = 0;
} else if (_valueType == ValueType.String) {
final indirect = _indirect;
- var size_byte_width = _byteWidth;
- var size = _readUInt(indirect - size_byte_width,
- BitWidthUtil.fromByteWidth(size_byte_width));
+ var sizeByteWidth = _byteWidth;
+ var size = _readUInt(indirect - sizeByteWidth,
+ BitWidthUtil.fromByteWidth(sizeByteWidth));
while (_buffer.getInt8(indirect + size) != 0) {
- size_byte_width <<= 1;
- size = _readUInt(indirect - size_byte_width,
- BitWidthUtil.fromByteWidth(size_byte_width));
+ sizeByteWidth <<= 1;
+ size = _readUInt(indirect - sizeByteWidth,
+ BitWidthUtil.fromByteWidth(sizeByteWidth));
}
_length = size;
} else if (_valueType == ValueType.Key) {
@@ -373,9 +373,8 @@ class Reference {
return null;
}
- int _diffKeys(
- List<int> input, int index, int indirect_offset, int byteWidth) {
- final keyOffset = indirect_offset + index * byteWidth;
+ int _diffKeys(List<int> input, int index, int indirectOffset, int byteWidth) {
+ final keyOffset = indirectOffset + index * byteWidth;
final keyIndirectOffset =
keyOffset - _readUInt(keyOffset, BitWidthUtil.fromByteWidth(byteWidth));
for (var i = 0; i < input.length; i++) {
diff --git a/chromium/third_party/flatbuffers/src/dart/lib/src/types.dart b/chromium/third_party/flatbuffers/src/dart/lib/src/types.dart
index bc9bb3ac078..f9eefd8bc09 100644
--- a/chromium/third_party/flatbuffers/src/dart/lib/src/types.dart
+++ b/chromium/third_party/flatbuffers/src/dart/lib/src/types.dart
@@ -47,7 +47,7 @@ class BitWidthUtil {
if (value == 8) {
return BitWidth.width64;
}
- throw Exception('Unexpected value ${value}');
+ throw Exception('Unexpected value $value');
}
static int paddingSize(int bufSize, int scalarSize) {
diff --git a/chromium/third_party/flatbuffers/src/dart/publish.sh b/chromium/third_party/flatbuffers/src/dart/publish.sh
index 7f031013b53..bda43ba01bf 100755
--- a/chromium/third_party/flatbuffers/src/dart/publish.sh
+++ b/chromium/third_party/flatbuffers/src/dart/publish.sh
@@ -21,17 +21,12 @@ set -e
command -v dart >/dev/null 2>&1 || { echo >&2 "Require `dart` but it's not installed. Aborting."; exit 1; }
-cp ../samples/monster.fbs example/
-cp ../tests/monster_test.fbs test/
-cp -r ../tests/include_test/*.fbs test/
-cp -r ../tests/include_test/sub test/
-
-pushd example
-../../flatc --dart ./monster.fbs
+pushd ../tests
+./DartTest.sh
popd
-pushd test
-../../flatc --dart ./monster_test.fbs
+pushd ../samples
+./dart_sample.sh
popd
dart pub publish
diff --git a/chromium/third_party/flatbuffers/src/dart/pubspec.yaml b/chromium/third_party/flatbuffers/src/dart/pubspec.yaml
index 38738f55f73..5fe53b1dc7e 100644
--- a/chromium/third_party/flatbuffers/src/dart/pubspec.yaml
+++ b/chromium/third_party/flatbuffers/src/dart/pubspec.yaml
@@ -1,20 +1,15 @@
name: flat_buffers
version: 2.0.0
-description: >
- FlatBuffers reading and writing library for Dart. Use the flatc compiler to
- generate Dart classes for a FlatBuffers schema, and this library to assist with
- reading and writing the binary format.
-
- Based on original work by Konstantin Scheglov and Paul Berry of the Dart SDK team.
-authors:
-- Dan Field <dfield@gmail.com>
-- Konstantin Scheglov
-- Paul Berry
+description: FlatBuffers reading and writing library for Dart. Based on original work by Konstantin Scheglov and Paul Berry of the Dart SDK team.
homepage: https://github.com/google/flatbuffers
documentation: https://google.github.io/flatbuffers/index.html
+
+environment:
+ sdk: '>=2.12.0 <3.0.0'
+
dev_dependencies:
test: ^1.17.7
test_reflective_loader: ^0.2.0
path: ^1.8.0
-environment:
- sdk: '>=2.12.0 <3.0.0'
+ lints: ^1.0.1
+
diff --git a/chromium/third_party/flatbuffers/src/docs/source/CUsage.md b/chromium/third_party/flatbuffers/src/docs/source/CUsage.md
index 9aafa6f775e..bd1ec159d21 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/CUsage.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/CUsage.md
@@ -7,7 +7,7 @@ The `flatcc` C schema compiler can generate code offline as well as
online via a C library. It can also generate buffer verifiers and fast
JSON parsers, printers.
-Great care has been taken to ensure compatibily with the main `flatc`
+Great care has been taken to ensure compatibility with the main `flatc`
project.
## General Documention
diff --git a/chromium/third_party/flatbuffers/src/docs/source/Compiler.md b/chromium/third_party/flatbuffers/src/docs/source/Compiler.md
index 0d2d51ff73a..249ecdded45 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/Compiler.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/Compiler.md
@@ -79,7 +79,7 @@ Additional options:
- `--allow-non-utf8` : Pass non-UTF-8 input through parser and emit nonstandard
\x escapes in JSON. (Default is to raise parse error on non-UTF-8 input.)
-- `--natural-utf8` : Output strings with UTF-8 as human-readable strings.
+- `--natural-utf8` : Output strings with UTF-8 as human-readable strings.
By default, UTF-8 characters are printed as \uXXXX escapes."
- `--defaults-json` : Output fields whose value is equal to the default value
@@ -216,7 +216,9 @@ Additional options:
- `--flexbuffers` : Used with "binary" and "json" options, it generates
data using schema-less FlexBuffers.
-- `--no-warnings` : Inhibit all warning messages.
+- `--no-warnings` : Inhibit all warning messages.
+
+- `--cs-global-alias` : Prepend `global::` to all user generated csharp classes and structs.
NOTE: short-form options for generators are deprecated, use the long form
whenever possible.
diff --git a/chromium/third_party/flatbuffers/src/docs/source/CppUsage.md b/chromium/third_party/flatbuffers/src/docs/source/CppUsage.md
index 0589fc51aed..82d6f154aa8 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/CppUsage.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/CppUsage.md
@@ -56,7 +56,7 @@ For example, here is how you would read a FlatBuffer binary file in C++:
First, include the library and generated code. Then read the file into
a `char *` array, which you pass to `GetMonster()`.
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
#include "flatbuffers/flatbuffers.h"
#include "monster_test_generate.h"
#include <iostream> // C++ header file for printing
@@ -73,18 +73,18 @@ a `char *` array, which you pass to `GetMonster()`.
infile.close();
auto monster = GetMonster(data);
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
`monster` is of type `Monster *`, and points to somewhere *inside* your
-buffer (root object pointers are not the same as `buffer_pointer` !).
+buffer (root object pointers are not the same as `buffer_pointer` \!).
If you look in your generated header, you'll see it has
convenient accessors for all fields, e.g. `hp()`, `mana()`, etc:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
- std::cout << "hp : " << monster->hp() << std::endl; // `80`
- std::cout << "mana : " << monster->mana() << std::endl; // default value of `150`
- std::cout << "name : " << monster->name()->c_str() << std::endl; // "MyMonster"
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```cpp
+ std::cout << "hp : " << monster->hp() << std::endl; // '80'
+ std::cout << "mana : " << monster->mana() << std::endl; // default value of '150'
+ std::cout << "name : " << monster->name()->c_str() << std::endl; // "MyMonster"
+```
*Note: That we never stored a `mana` value, so it will return the default.*
@@ -96,7 +96,7 @@ The following attributes are supported:
Specifically, `CreateXxxDirect` functions and `Pack` functions for object
based API (see below) will use `CreateSharedString` to create strings.
-## Object based API. {#flatbuffers_cpp_object_based_api}
+## Object based API {#flatbuffers_cpp_object_based_api}
FlatBuffers is all about memory efficiency, which is why its base API is written
around using as little as possible of it. This does make the API clumsier
@@ -109,7 +109,7 @@ construction, access and mutation.
To use:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
// Autogenerated class from table Monster.
MonsterT monsterobj;
@@ -123,7 +123,7 @@ To use:
// Serialize into new flatbuffer.
FlatBufferBuilder fbb;
fbb.Finish(Monster::Pack(fbb, &monsterobj));
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
The following attributes are specific to the object-based API code generation:
@@ -144,19 +144,19 @@ The following attributes are specific to the object-based API code generation:
This can be used to provide allocation from a pool for example, for faster
unpacking when using the object-based API.
- Minimal Example:
+Minimal Example:
- schema:
+schema:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
table mytable(native_custom_alloc:"custom_allocator") {
...
}
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- with custom_allocator defined before `flatbuffers.h` is included, as:
+with `custom_allocator` defined before `flatbuffers.h` is included, as:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
template <typename T> struct custom_allocator : public std::allocator<T> {
typedef T *pointer;
@@ -175,34 +175,35 @@ The following attributes are specific to the object-based API code generation:
}
custom_allocator() throw() {}
+
template <class U>
custom_allocator(const custom_allocator<U>&) throw() {}
};
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- `native_type("type")` (on a struct): In some cases, a more optimal C++ data
- type exists for a given struct. For example, the following schema:
+type exists for a given struct. For example, the following schema:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
struct Vec2 {
x: float;
y: float;
}
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- generates the following Object-Based API class:
+generates the following Object-Based API class:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
struct Vec2T : flatbuffers::NativeTable {
float x;
float y;
};
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- However, it can be useful to instead use a user-defined C++ type since it
- can provide more functionality, eg.
+However, it can be useful to instead use a user-defined C++ type since it
+can provide more functionality, eg.
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
struct vector2 {
float x = 0, y = 0;
vector2 operator+(vector2 rhs) const { ... }
@@ -210,22 +211,22 @@ The following attributes are specific to the object-based API code generation:
float length() const { ... }
// etc.
};
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- The `native_type` attribute will replace the usage of the generated class
- with the given type. So, continuing with the example, the generated
- code would use `vector2` in place of `Vec2T` for all generated code of
- the Object-Based API.
+The `native_type` attribute will replace the usage of the generated class
+with the given type. So, continuing with the example, the generated
+code would use `vector2` in place of `Vec2T` for all generated code of
+the Object-Based API.
- However, because the `native_type` is unknown to flatbuffers, the user must
- provide the following functions to aide in the serialization process:
+However, because the `native_type` is unknown to flatbuffers, the user must
+provide the following functions to aide in the serialization process:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
namespace flatbuffers {
Vec2 Pack(const vector2& obj);
vector2 UnPack(const Vec2& obj);
}
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
- `native_type_pack_name("name")` (on a struct when `native_type` is
specified, too): when you want to use the same `native_type` multiple times
@@ -235,12 +236,12 @@ The following attributes are specific to the object-based API code generation:
specify `native_type_pack_name("Vec2")` in the above example you now need to
implement these serialization functions instead:
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
namespace flatbuffers {
Vec2 PackVec2(const vector2& obj);
vector2 UnPackVec2(const Vec2& obj);
}
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
Finally, the following top-level attributes:
@@ -253,7 +254,7 @@ Finally, the following top-level attributes:
- `force_align`: this attribute may not be respected in the object API,
depending on the aligned of the allocator used with `new`.
-# External references.
+# External references
An additional feature of the object API is the ability to allow you to load
multiple independent FlatBuffers, and have them refer to eachothers objects
@@ -272,7 +273,7 @@ same string (or hash).
When you call `UnPack` (or `Create`), you'll need a function that maps from
hash to the object (see `resolver_function_t` for details).
-# Using different pointer types.
+# Using different pointer types
By default the object tree is built out of `std::unique_ptr`, but you can
influence this either globally (using the `--cpp-ptr-type` argument to
@@ -283,13 +284,13 @@ you, so you'll have to manage their lifecycles manually. To reference the
pointer type specified by the `--cpp-ptr-type` argument to `flatc` from a
flatbuffer field set the `cpp_ptr_type` attribute to `default_ptr_type`.
-# Using different string type.
+# Using different string type
By default the object tree is built out of `std::string`, but you can
influence this either globally (using the `--cpp-str-type` argument to
`flatc`) or per field using the `cpp_str_type` attribute.
-The type must support T::c_str(), T::length() and T::empty() as member functions.
+The type must support `T::c_str()`, `T::length()` and `T::empty()` as member functions.
Further, the type must be constructible from std::string, as by default a
std::string instance is constructed and then used to initialize the custom
@@ -298,7 +299,7 @@ custom string types; the `--cpp-str-flex-ctor` argument to `flatc` or the
per field attribute `cpp_str_flex_ctor` can be used to change this behavior,
so that the custom string type is constructed by passing the pointer and
length of the FlatBuffers String. The custom string class will require a
-constructor in the following format: custom_str_class(const char *, size_t).
+constructor in the following format: `custom_str_class(const char *, size_t)`.
Please note that the character array is not guaranteed to be NULL terminated,
you should always use the provided size to determine end of string.
@@ -309,7 +310,7 @@ read and write data even if you don't know the exact format of a buffer, and
even allows you to change sizes of strings and vectors in-place.
The way this works is very elegant; there is actually a FlatBuffer schema that
-describes schemas (!) which you can find in `reflection/reflection.fbs`.
+describes schemas (\!) which you can find in `reflection/reflection.fbs`.
The compiler, `flatc`, can write out any schemas it has just parsed as a binary
FlatBuffer, corresponding to this meta-schema.
@@ -418,9 +419,9 @@ is accessed, all reads will end up inside the buffer.
Each root type will have a verification function generated for it,
e.g. for `Monster`, you can call:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
bool ok = VerifyMonsterBuffer(Verifier(buf, len));
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
if `ok` is true, the buffer is safe to read.
@@ -486,15 +487,15 @@ Load text (either a schema or json) into an in-memory buffer (there is a
convenient `LoadFile()` utility function in `flatbuffers/util.h` if you
wish). Construct a parser:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
flatbuffers::Parser parser;
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
Now you can parse any number of text files in sequence:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+```cpp
parser.Parse(text_file.c_str());
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+```
This works similarly to how the command-line compiler works: a sequence
of files parsed by the same `Parser` object allow later files to
diff --git a/chromium/third_party/flatbuffers/src/docs/source/Internals.md b/chromium/third_party/flatbuffers/src/docs/source/Internals.md
index 16a16660073..591f040c439 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/Internals.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/Internals.md
@@ -85,7 +85,7 @@ referred to by offset.
They start with an `soffset_t` to a vtable. This is a signed version of
`uoffset_t`, since vtables may be stored anywhere relative to the object.
-This offset is substracted (not added) from the object start to arrive at
+This offset is subtracted (not added) from the object start to arrive at
the vtable start. This offset is followed by all the
fields as aligned scalars (or offsets). Unlike structs, not all fields
need to be present. There is no set order and layout. A table may contain
@@ -435,7 +435,7 @@ The keys vector is a typed vector of keys. Both the keys and corresponding
values *have* to be stored in sorted order (as determined by `strcmp`), such
that lookups can be made using binary search.
-The reason the key vector is a seperate structure from the value vector is
+The reason the key vector is a separate structure from the value vector is
such that it can be shared between multiple value vectors, and also to
allow it to be treated as its own individual vector in code.
diff --git a/chromium/third_party/flatbuffers/src/docs/source/JavaScriptUsage.md b/chromium/third_party/flatbuffers/src/docs/source/JavaScriptUsage.md
index 2f0d37934eb..64764e21619 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/JavaScriptUsage.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/JavaScriptUsage.md
@@ -23,7 +23,7 @@ https://www.npmjs.com/package/flatbuffers. To use it from sources:
1. In your project, install it as a normal dependency, using the flatbuffers
folder as the source.
-## Using the FlatBuffers JavaScript libary
+## Using the FlatBuffers JavaScript library
*Note: See [Tutorial](@ref flatbuffers_guide_tutorial) for a more in-depth
example of how to use FlatBuffers.*
diff --git a/chromium/third_party/flatbuffers/src/docs/source/RustUsage.md b/chromium/third_party/flatbuffers/src/docs/source/RustUsage.md
index 68191179cec..9e959a913e2 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/RustUsage.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/RustUsage.md
@@ -36,7 +36,7 @@ The test code itself is located in
[integration_test.rs](https://github.com/google/flatbuffers/blob/master/tests/rust_usage_test/tests/integration_test.rs)
This test file requires `flatc` to be present. To review how to build the project,
-please read the [Building](@ref flatbuffers_guide_building) documenation.
+please read the [Building](@ref flatbuffers_guide_building) documentation.
To run the tests, execute `RustTest.sh` from the `flatbuffers/tests` directory.
For example, on [Linux](https://en.wikipedia.org/wiki/Linux), you would simply
diff --git a/chromium/third_party/flatbuffers/src/docs/source/Schemas.md b/chromium/third_party/flatbuffers/src/docs/source/Schemas.md
index 3a2b15dd540..10ed260c293 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/Schemas.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/Schemas.md
@@ -490,7 +490,7 @@ as much as possible such that you can use tables where you might be
tempted to use a dictionary.
Similarly, strings as values should only be used when they are
-truely open-ended. If you can, always use an enum instead.
+truly open-ended. If you can, always use an enum instead.
FlatBuffers doesn't have inheritance, so the way to represent a set
of related data structures is a union. Unions do have a cost however,
diff --git a/chromium/third_party/flatbuffers/src/docs/source/Tutorial.md b/chromium/third_party/flatbuffers/src/docs/source/Tutorial.md
index 9b801000656..cb77bed8f39 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/Tutorial.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/Tutorial.md
@@ -231,7 +231,7 @@ fields, such as `mana:short = 150`. If unspecified, scalar fields (like `int`,
`uint`, or `float`) will be given a default of `0` while strings and tables will
be given a default of `null`. Another thing to note is the line `friendly:bool =
false (deprecated);`. Since you cannot delete fields from a `table` (to support
-backwards compatability), you can set fields as `deprecated`, which will prevent
+backwards compatibility), you can set fields as `deprecated`, which will prevent
the generation of accessors for this field in the generated code. Be careful
when using `deprecated`, however, as it may break legacy code that used this
accessor.
@@ -1090,7 +1090,7 @@ traversal. This is generally easy to do on any tree structures.
</div>
<div class="language-lua">
~~~{.py}
- -- Serialize a name for our mosnter, called 'orc'
+ -- Serialize a name for our monster, called 'orc'
local name = builder:CreateString("Orc")
-- Create a `vector` representing the inventory of the Orc. Each number
@@ -1820,7 +1820,7 @@ Here is a repetition of these lines, to help highlight them more clearly:
<div class="language-cpp">
~~~{.cpp}
monster_builder.add_equipped_type(Equipment_Weapon); // Union type
- monster_builder.add_equipped(axe); // Union data
+ monster_builder.add_equipped(axe.Union()); // Union data
~~~
</div>
<div class="language-java">
@@ -2970,7 +2970,7 @@ We can access the type to dynamically cast the data as needed (since the
<div class="language-cpp">
~~~{.cpp}
- auto union_type = monster.equipped_type();
+ auto union_type = monster->equipped_type();
if (union_type == Equipment_Weapon) {
auto weapon = static_cast<const Weapon*>(monster->equipped()); // Requires `static_cast`
diff --git a/chromium/third_party/flatbuffers/src/docs/source/TypeScriptUsage.md b/chromium/third_party/flatbuffers/src/docs/source/TypeScriptUsage.md
index 437b49d590e..a1acaeb8a2b 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/TypeScriptUsage.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/TypeScriptUsage.md
@@ -26,7 +26,7 @@ flatbuffers/blob/master/tests/TypeScriptTest.sh) shell script.
*Note: The TypeScript test file requires [Node.js](https://nodejs.org/en/).*
-## Using the FlatBuffers TypeScript libary
+## Using the FlatBuffers TypeScript library
*Note: See [Tutorial](@ref flatbuffers_guide_tutorial) for a more in-depth
example of how to use FlatBuffers in TypeScript.*
diff --git a/chromium/third_party/flatbuffers/src/docs/source/WhitePaper.md b/chromium/third_party/flatbuffers/src/docs/source/WhitePaper.md
index e504ada41d5..af56b630922 100644
--- a/chromium/third_party/flatbuffers/src/docs/source/WhitePaper.md
+++ b/chromium/third_party/flatbuffers/src/docs/source/WhitePaper.md
@@ -20,7 +20,7 @@ and locality.
If it would be possible to do serialization with no temporary objects,
no additional allocation, no copying, and good locality, this could be
of great value. The reason serialization systems usually don't manage
-this is because it goes counter to forwards/backwards compatability, and
+this is because it goes counter to forwards/backwards compatibility, and
platform specifics like endianness and alignment.
FlatBuffers is what you get if you try anyway.
diff --git a/chromium/third_party/flatbuffers/src/grpc/README.md b/chromium/third_party/flatbuffers/src/grpc/README.md
index 685003f92bf..f46258fcb16 100644
--- a/chromium/third_party/flatbuffers/src/grpc/README.md
+++ b/chromium/third_party/flatbuffers/src/grpc/README.md
@@ -32,9 +32,8 @@ $bazel test src/compiler/...
### Linux
-1. `ln -s ${GRPC_INSTALL_PATH}/lib/libgrpc++_unsecure.so.6 ${GRPC_INSTALL_PATH}/lib/libgrpc++_unsecure.so.1`
-2. `export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${GRPC_INSTALL_PATH}/lib`
-3. `make test ARGS=-V`
+1. `export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${GRPC_INSTALL_PATH}/lib`
+2. `make test ARGS=-V`
For Bazel users:
diff --git a/chromium/third_party/flatbuffers/src/grpc/build_grpc.sh b/chromium/third_party/flatbuffers/src/grpc/build_grpc.sh
index 190a1ef9664..49c5a60f0eb 100755
--- a/chromium/third_party/flatbuffers/src/grpc/build_grpc.sh
+++ b/chromium/third_party/flatbuffers/src/grpc/build_grpc.sh
@@ -1,11 +1,11 @@
#!/bin/bash
-grpc_1_36_0_githash=736e3758351ced3cd842bad3ba4e2540f01bbc48
+grpc_1_39_0_githash=58602e20a3f3e48f24a4114c757099b25b947f7b
function build_grpc () {
git clone https://github.com/grpc/grpc.git google/grpc
cd google/grpc
- git checkout ${grpc_1_36_0_githash}
+ git checkout ${grpc_1_39_0_githash}
git submodule update --init
# Apply boringssl build patch
cd third_party/boringssl-with-bazel
@@ -13,11 +13,8 @@ function build_grpc () {
cd ../..
mkdir ../grpc_build
cd ../grpc_build
- cmake ../grpc -DgRPC_INSTALL=ON -DgRPC_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=`pwd`/../grpc/install
+ cmake ../grpc -DgRPC_INSTALL=ON -DgRPC_BUILD_TESTS=OFF -DABSL_ENABLE_INSTALL=ON -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=`pwd`/../grpc/install
cmake --build . --target install ${JOBS:+-j$JOBS}
- if [ ! -f ${GRPC_INSTALL_PATH}/lib/libgrpc++_unsecure.so.1 ]; then
- ln -s ${GRPC_INSTALL_PATH}/lib/libgrpc++_unsecure.so.6 ${GRPC_INSTALL_PATH}/lib/libgrpc++_unsecure.so.1
- fi
cd ../..
}
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/generate.sh b/chromium/third_party/flatbuffers/src/grpc/examples/generate.sh
index 39ca6620c3a..0f051da8e41 100755
--- a/chromium/third_party/flatbuffers/src/grpc/examples/generate.sh
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/generate.sh
@@ -58,7 +58,7 @@ cd ${current_dir}
cd swift
cd Greeter/Sources/Model
-fbc --bfbs-filenames ../../../.. --swift ${generator}
+fbc --bfbs-filenames ../../../.. --swift --gen-json-emit ${generator}
cd ${current_dir}
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloReply.py b/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloReply.py
index 95434dccfde..301c84d9c52 100644
--- a/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloReply.py
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloReply.py
@@ -31,15 +31,12 @@ class HelloReply(object):
return self._tab.String(o + self._tab.Pos)
return None
-def Start(builder): builder.StartObject(1)
-def HelloReplyStart(builder):
- """This method is deprecated. Please switch to Start."""
- return Start(builder)
-def AddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0)
-def HelloReplyAddMessage(builder, message):
- """This method is deprecated. Please switch to AddMessage."""
- return AddMessage(builder, message)
-def End(builder): return builder.EndObject()
-def HelloReplyEnd(builder):
- """This method is deprecated. Please switch to End."""
- return End(builder) \ No newline at end of file
+def HelloReplyStart(builder): builder.StartObject(1)
+def Start(builder):
+ return HelloReplyStart(builder)
+def HelloReplyAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0)
+def AddMessage(builder, message):
+ return HelloReplyAddMessage(builder, message)
+def HelloReplyEnd(builder): return builder.EndObject()
+def End(builder):
+ return HelloReplyEnd(builder) \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloRequest.py b/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloRequest.py
index 0263095ec31..122568fd21e 100644
--- a/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloRequest.py
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/python/greeter/models/HelloRequest.py
@@ -31,15 +31,12 @@ class HelloRequest(object):
return self._tab.String(o + self._tab.Pos)
return None
-def Start(builder): builder.StartObject(1)
-def HelloRequestStart(builder):
- """This method is deprecated. Please switch to Start."""
- return Start(builder)
-def AddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0)
-def HelloRequestAddName(builder, name):
- """This method is deprecated. Please switch to AddName."""
- return AddName(builder, name)
-def End(builder): return builder.EndObject()
-def HelloRequestEnd(builder):
- """This method is deprecated. Please switch to End."""
- return End(builder) \ No newline at end of file
+def HelloRequestStart(builder): builder.StartObject(1)
+def Start(builder):
+ return HelloRequestStart(builder)
+def HelloRequestAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0)
+def AddName(builder, name):
+ return HelloRequestAddName(builder, name)
+def HelloRequestEnd(builder): return builder.EndObject()
+def End(builder):
+ return HelloRequestEnd(builder) \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/Model/greeter_generated.swift b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/Model/greeter_generated.swift
index bc1eca3cbfd..a7c420bc1f3 100644
--- a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/Model/greeter_generated.swift
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/Model/greeter_generated.swift
@@ -42,6 +42,17 @@ public struct models_HelloReply: FlatBufferObject, Verifiable {
}
}
+extension models_HelloReply: Encodable {
+
+ enum CodingKeys: String, CodingKey {
+ case message = "message"
+ }
+ public func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encodeIfPresent(message, forKey: .message)
+ }
+}
+
public struct models_HelloRequest: FlatBufferObject, Verifiable {
static func validateVersion() { FlatBuffersVersion_2_0_0() }
@@ -80,3 +91,14 @@ public struct models_HelloRequest: FlatBufferObject, Verifiable {
}
}
+extension models_HelloRequest: Encodable {
+
+ enum CodingKeys: String, CodingKey {
+ case name = "name"
+ }
+ public func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encodeIfPresent(name, forKey: .name)
+ }
+}
+
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/client/main.swift b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/client/main.swift
index a6b11300001..168b0713c8e 100644
--- a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/client/main.swift
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/client/main.swift
@@ -37,7 +37,8 @@ func greet(name: String, client greeter: models_GreeterServiceClient) {
builder.finish(offset: root)
// Make the RPC call to the server.
- let sayHello = greeter.SayHello(Message<models_HelloRequest>(builder: &builder))
+ let sayHello = greeter
+ .SayHello(Message<models_HelloRequest>(builder: &builder))
// wait() on the response to stop the program from exiting before the response is received.
do {
@@ -54,7 +55,8 @@ func greet(name: String, client greeter: models_GreeterServiceClient) {
builder.finish(offset: manyRoot)
let call = greeter.SayManyHellos(Message(builder: &builder)) { message in
- print("Greeter SayManyHellos received: \(message.object.message ?? "Unknown")")
+ print(
+ "Greeter SayManyHellos received: \(message.object.message ?? "Unknown")")
}
let status = try! call.status.recover { _ in .processingError }.wait()
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/server/main.swift b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/server/main.swift
index af1c5557fe6..fca623f5a3a 100644
--- a/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/server/main.swift
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/swift/Greeter/Sources/server/main.swift
@@ -32,7 +32,8 @@ class Greeter: models_GreeterProvider {
func SayHello(
request: Message<models_HelloRequest>,
- context: StatusOnlyCallContext) -> EventLoopFuture<Message<models_HelloReply>>
+ context: StatusOnlyCallContext)
+ -> EventLoopFuture<Message<models_HelloReply>>
{
let recipient = request.object.name ?? "Stranger"
@@ -40,17 +41,22 @@ class Greeter: models_GreeterProvider {
let off = builder.create(string: "Hello \(recipient)")
let root = models_HelloReply.createHelloReply(&builder, messageOffset: off)
builder.finish(offset: root)
- return context.eventLoop.makeSucceededFuture(Message<models_HelloReply>(builder: &builder))
+ return context.eventLoop
+ .makeSucceededFuture(Message<models_HelloReply>(builder: &builder))
}
func SayManyHellos(
request: Message<models_HelloRequest>,
- context: StreamingResponseCallContext<Message<models_HelloReply>>) -> EventLoopFuture<GRPCStatus>
+ context: StreamingResponseCallContext<Message<models_HelloReply>>)
+ -> EventLoopFuture<GRPCStatus>
{
for name in greetings {
var builder = FlatBufferBuilder()
- let off = builder.create(string: "\(name) \(request.object.name ?? "Unknown")")
- let root = models_HelloReply.createHelloReply(&builder, messageOffset: off)
+ let off = builder
+ .create(string: "\(name) \(request.object.name ?? "Unknown")")
+ let root = models_HelloReply.createHelloReply(
+ &builder,
+ messageOffset: off)
builder.finish(offset: root)
_ = context.sendResponse(Message<models_HelloReply>(builder: &builder))
}
diff --git a/chromium/third_party/flatbuffers/src/grpc/examples/ts/greeter/src/greeter.ts b/chromium/third_party/flatbuffers/src/grpc/examples/ts/greeter/src/greeter.ts
new file mode 100644
index 00000000000..5e62d999401
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/grpc/examples/ts/greeter/src/greeter.ts
@@ -0,0 +1,2 @@
+export { HelloReply } from './models/hello-reply';
+export { HelloRequest } from './models/hello-request';
diff --git a/chromium/third_party/flatbuffers/src/grpc/flatbuffers-java-grpc/pom.xml b/chromium/third_party/flatbuffers/src/grpc/flatbuffers-java-grpc/pom.xml
index 7cd6e563c55..f7d41763747 100644
--- a/chromium/third_party/flatbuffers/src/grpc/flatbuffers-java-grpc/pom.xml
+++ b/chromium/third_party/flatbuffers/src/grpc/flatbuffers-java-grpc/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>com.google.flatbuffers</groupId>
<artifactId>flatbuffers-parent</artifactId>
- <version>2.0.2</version>
+ <version>2.0.3</version>
</parent>
<artifactId>flatbuffers-java-grpc</artifactId>
<name>${project.artifactId}</name>
diff --git a/chromium/third_party/flatbuffers/src/grpc/pom.xml b/chromium/third_party/flatbuffers/src/grpc/pom.xml
index a7584c11eb3..a0608ae8ae1 100644
--- a/chromium/third_party/flatbuffers/src/grpc/pom.xml
+++ b/chromium/third_party/flatbuffers/src/grpc/pom.xml
@@ -4,7 +4,7 @@
<groupId>com.google.flatbuffers</groupId>
<artifactId>flatbuffers-parent</artifactId>
<packaging>pom</packaging>
- <version>2.0.2</version>
+ <version>2.0.3</version>
<name>flatbuffers-parent</name>
<description>parent pom for flatbuffers java artifacts</description>
<properties>
diff --git a/chromium/third_party/flatbuffers/src/grpc/src/compiler/schema_interface.h b/chromium/third_party/flatbuffers/src/grpc/src/compiler/schema_interface.h
index 04494981984..9804e641f5e 100644
--- a/chromium/third_party/flatbuffers/src/grpc/src/compiler/schema_interface.h
+++ b/chromium/third_party/flatbuffers/src/grpc/src/compiler/schema_interface.h
@@ -93,7 +93,7 @@ struct Printer {
virtual void Print(const std::map<grpc::string, grpc::string> &vars,
const char *template_string) = 0;
virtual void Print(const char *string) = 0;
- virtual void SetIndentationSize(const int size) = 0;
+ virtual void SetIndentationSize(const size_t size) = 0;
virtual void Indent() = 0;
virtual void Outdent() = 0;
};
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/allocator.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/allocator.h
new file mode 100644
index 00000000000..f4ef22db458
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/allocator.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_ALLOCATOR_H_
+#define FLATBUFFERS_ALLOCATOR_H_
+
+#include "flatbuffers/base.h"
+
+namespace flatbuffers {
+
+// Allocator interface. This is flatbuffers-specific and meant only for
+// `vector_downward` usage.
+class Allocator {
+ public:
+ virtual ~Allocator() {}
+
+ // Allocate `size` bytes of memory.
+ virtual uint8_t *allocate(size_t size) = 0;
+
+ // Deallocate `size` bytes of memory at `p` allocated by this allocator.
+ virtual void deallocate(uint8_t *p, size_t size) = 0;
+
+ // Reallocate `new_size` bytes of memory, replacing the old region of size
+ // `old_size` at `p`. In contrast to a normal realloc, this grows downwards,
+ // and is intended specifcally for `vector_downward` use.
+ // `in_use_back` and `in_use_front` indicate how much of `old_size` is
+ // actually in use at each end, and needs to be copied.
+ virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size,
+ size_t new_size, size_t in_use_back,
+ size_t in_use_front) {
+ FLATBUFFERS_ASSERT(new_size > old_size); // vector_downward only grows
+ uint8_t *new_p = allocate(new_size);
+ memcpy_downward(old_p, old_size, new_p, new_size, in_use_back,
+ in_use_front);
+ deallocate(old_p, old_size);
+ return new_p;
+ }
+
+ protected:
+ // Called by `reallocate_downward` to copy memory from `old_p` of `old_size`
+ // to `new_p` of `new_size`. Only memory of size `in_use_front` and
+ // `in_use_back` will be copied from the front and back of the old memory
+ // allocation.
+ void memcpy_downward(uint8_t *old_p, size_t old_size, uint8_t *new_p,
+ size_t new_size, size_t in_use_back,
+ size_t in_use_front) {
+ memcpy(new_p + new_size - in_use_back, old_p + old_size - in_use_back,
+ in_use_back);
+ memcpy(new_p, old_p, in_use_front);
+ }
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_ALLOCATOR_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/array.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/array.h
new file mode 100644
index 00000000000..d4b73fc9e17
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/array.h
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_ARRAY_H_
+#define FLATBUFFERS_ARRAY_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/stl_emulation.h"
+#include "flatbuffers/vector.h"
+
+namespace flatbuffers {
+
+// This is used as a helper type for accessing arrays.
+template<typename T, uint16_t length> class Array {
+ // Array<T> can carry only POD data types (scalars or structs).
+ typedef typename flatbuffers::bool_constant<flatbuffers::is_scalar<T>::value>
+ scalar_tag;
+ typedef
+ typename flatbuffers::conditional<scalar_tag::value, T, const T *>::type
+ IndirectHelperType;
+
+ public:
+ typedef uint16_t size_type;
+ typedef typename IndirectHelper<IndirectHelperType>::return_type return_type;
+ typedef VectorIterator<T, return_type> const_iterator;
+ typedef VectorReverseIterator<const_iterator> const_reverse_iterator;
+
+ // If T is a LE-scalar or a struct (!scalar_tag::value).
+ static FLATBUFFERS_CONSTEXPR bool is_span_observable =
+ (scalar_tag::value && (FLATBUFFERS_LITTLEENDIAN || sizeof(T) == 1)) ||
+ !scalar_tag::value;
+
+ FLATBUFFERS_CONSTEXPR uint16_t size() const { return length; }
+
+ return_type Get(uoffset_t i) const {
+ FLATBUFFERS_ASSERT(i < size());
+ return IndirectHelper<IndirectHelperType>::Read(Data(), i);
+ }
+
+ return_type operator[](uoffset_t i) const { return Get(i); }
+
+ // If this is a Vector of enums, T will be its storage type, not the enum
+ // type. This function makes it convenient to retrieve value with enum
+ // type E.
+ template<typename E> E GetEnum(uoffset_t i) const {
+ return static_cast<E>(Get(i));
+ }
+
+ const_iterator begin() const { return const_iterator(Data(), 0); }
+ const_iterator end() const { return const_iterator(Data(), size()); }
+
+ const_reverse_iterator rbegin() const {
+ return const_reverse_iterator(end());
+ }
+ const_reverse_iterator rend() const {
+ return const_reverse_iterator(begin());
+ }
+
+ const_iterator cbegin() const { return begin(); }
+ const_iterator cend() const { return end(); }
+
+ const_reverse_iterator crbegin() const { return rbegin(); }
+ const_reverse_iterator crend() const { return rend(); }
+
+ // Get a mutable pointer to elements inside this array.
+ // This method used to mutate arrays of structs followed by a @p Mutate
+ // operation. For primitive types use @p Mutate directly.
+ // @warning Assignments and reads to/from the dereferenced pointer are not
+ // automatically converted to the correct endianness.
+ typename flatbuffers::conditional<scalar_tag::value, void, T *>::type
+ GetMutablePointer(uoffset_t i) const {
+ FLATBUFFERS_ASSERT(i < size());
+ return const_cast<T *>(&data()[i]);
+ }
+
+ // Change elements if you have a non-const pointer to this object.
+ void Mutate(uoffset_t i, const T &val) { MutateImpl(scalar_tag(), i, val); }
+
+ // The raw data in little endian format. Use with care.
+ const uint8_t *Data() const { return data_; }
+
+ uint8_t *Data() { return data_; }
+
+ // Similarly, but typed, much like std::vector::data
+ const T *data() const { return reinterpret_cast<const T *>(Data()); }
+ T *data() { return reinterpret_cast<T *>(Data()); }
+
+ // Copy data from a span with endian conversion.
+ // If this Array and the span overlap, the behavior is undefined.
+ void CopyFromSpan(flatbuffers::span<const T, length> src) {
+ const auto p1 = reinterpret_cast<const uint8_t *>(src.data());
+ const auto p2 = Data();
+ FLATBUFFERS_ASSERT(!(p1 >= p2 && p1 < (p2 + length)) &&
+ !(p2 >= p1 && p2 < (p1 + length)));
+ (void)p1;
+ (void)p2;
+ CopyFromSpanImpl(flatbuffers::bool_constant<is_span_observable>(), src);
+ }
+
+ protected:
+ void MutateImpl(flatbuffers::true_type, uoffset_t i, const T &val) {
+ FLATBUFFERS_ASSERT(i < size());
+ WriteScalar(data() + i, val);
+ }
+
+ void MutateImpl(flatbuffers::false_type, uoffset_t i, const T &val) {
+ *(GetMutablePointer(i)) = val;
+ }
+
+ void CopyFromSpanImpl(flatbuffers::true_type,
+ flatbuffers::span<const T, length> src) {
+ // Use std::memcpy() instead of std::copy() to avoid performance degradation
+ // due to aliasing if T is char or unsigned char.
+ // The size is known at compile time, so memcpy would be inlined.
+ std::memcpy(data(), src.data(), length * sizeof(T));
+ }
+
+ // Copy data from flatbuffers::span with endian conversion.
+ void CopyFromSpanImpl(flatbuffers::false_type,
+ flatbuffers::span<const T, length> src) {
+ for (size_type k = 0; k < length; k++) { Mutate(k, src[k]); }
+ }
+
+ // This class is only used to access pre-existing data. Don't ever
+ // try to construct these manually.
+ // 'constexpr' allows us to use 'size()' at compile time.
+ // @note Must not use 'FLATBUFFERS_CONSTEXPR' here, as const is not allowed on
+ // a constructor.
+#if defined(__cpp_constexpr)
+ constexpr Array();
+#else
+ Array();
+#endif
+
+ uint8_t data_[length * sizeof(T)];
+
+ private:
+ // This class is a pointer. Copying will therefore create an invalid object.
+ // Private and unimplemented copy constructor.
+ Array(const Array &);
+ Array &operator=(const Array &);
+};
+
+// Specialization for Array[struct] with access using Offset<void> pointer.
+// This specialization used by idl_gen_text.cpp.
+template<typename T, uint16_t length> class Array<Offset<T>, length> {
+ static_assert(flatbuffers::is_same<T, void>::value, "unexpected type T");
+
+ public:
+ typedef const void *return_type;
+
+ const uint8_t *Data() const { return data_; }
+
+ // Make idl_gen_text.cpp::PrintContainer happy.
+ return_type operator[](uoffset_t) const {
+ FLATBUFFERS_ASSERT(false);
+ return nullptr;
+ }
+
+ private:
+ // This class is only used to access pre-existing data.
+ Array();
+ Array(const Array &);
+ Array &operator=(const Array &);
+
+ uint8_t data_[1];
+};
+
+template<class U, uint16_t N>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<U, N> make_span(Array<U, N> &arr)
+ FLATBUFFERS_NOEXCEPT {
+ static_assert(
+ Array<U, N>::is_span_observable,
+ "wrong type U, only plain struct, LE-scalar, or byte types are allowed");
+ return span<U, N>(arr.data(), N);
+}
+
+template<class U, uint16_t N>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const U, N> make_span(
+ const Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
+ static_assert(
+ Array<U, N>::is_span_observable,
+ "wrong type U, only plain struct, LE-scalar, or byte types are allowed");
+ return span<const U, N>(arr.data(), N);
+}
+
+template<class U, uint16_t N>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<uint8_t, sizeof(U) * N>
+make_bytes_span(Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
+ static_assert(Array<U, N>::is_span_observable,
+ "internal error, Array<T> might hold only scalars or structs");
+ return span<uint8_t, sizeof(U) * N>(arr.Data(), sizeof(U) * N);
+}
+
+template<class U, uint16_t N>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const uint8_t, sizeof(U) * N>
+make_bytes_span(const Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
+ static_assert(Array<U, N>::is_span_observable,
+ "internal error, Array<T> might hold only scalars or structs");
+ return span<const uint8_t, sizeof(U) * N>(arr.Data(), sizeof(U) * N);
+}
+
+// Cast a raw T[length] to a raw flatbuffers::Array<T, length>
+// without endian conversion. Use with care.
+// TODO: move these Cast-methods to `internal` namespace.
+template<typename T, uint16_t length>
+Array<T, length> &CastToArray(T (&arr)[length]) {
+ return *reinterpret_cast<Array<T, length> *>(arr);
+}
+
+template<typename T, uint16_t length>
+const Array<T, length> &CastToArray(const T (&arr)[length]) {
+ return *reinterpret_cast<const Array<T, length> *>(arr);
+}
+
+template<typename E, typename T, uint16_t length>
+Array<E, length> &CastToArrayOfEnum(T (&arr)[length]) {
+ static_assert(sizeof(E) == sizeof(T), "invalid enum type E");
+ return *reinterpret_cast<Array<E, length> *>(arr);
+}
+
+template<typename E, typename T, uint16_t length>
+const Array<E, length> &CastToArrayOfEnum(const T (&arr)[length]) {
+ static_assert(sizeof(E) == sizeof(T), "invalid enum type E");
+ return *reinterpret_cast<const Array<E, length> *>(arr);
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_ARRAY_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/base.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/base.h
index de7898dcc2a..947d774763c 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/base.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/base.h
@@ -1,5 +1,5 @@
#ifndef FLATBUFFERS_BASE_H_
-# define FLATBUFFERS_BASE_H_
+#define FLATBUFFERS_BASE_H_
// clang-format off
@@ -50,10 +50,6 @@
#include <unistd.h>
#endif
-#ifdef _STLPORT_VERSION
- #define FLATBUFFERS_CPP98_STL
-#endif
-
#ifdef __ANDROID__
#include <android/api-level.h>
#endif
@@ -294,7 +290,7 @@ template<typename T> FLATBUFFERS_CONSTEXPR inline bool IsConstTrue(T t) {
#if ((__cplusplus >= 201703L) \
|| (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
// All attributes unknown to an implementation are ignored without causing an error.
- #define FLATBUFFERS_ATTRIBUTE(attr) [[attr]]
+ #define FLATBUFFERS_ATTRIBUTE(attr) attr
#define FLATBUFFERS_FALLTHROUGH() [[fallthrough]]
#else
@@ -335,6 +331,9 @@ typedef uintmax_t largest_scalar_t;
// We support aligning the contents of buffers up to this size.
#define FLATBUFFERS_MAX_ALIGNMENT 16
+/// @brief The length of a FlatBuffer file header.
+static const size_t kFileIdentifierLength = 4;
+
inline bool VerifyAlignmentRequirements(size_t align, size_t min_align = 1) {
return (min_align <= align) && (align <= (FLATBUFFERS_MAX_ALIGNMENT)) &&
(align & (align - 1)) == 0; // must be power of 2
@@ -438,5 +437,38 @@ inline size_t PaddingBytes(size_t buf_size, size_t scalar_size) {
return ((~buf_size) + 1) & (scalar_size - 1);
}
+// Generic 'operator==' with conditional specialisations.
+// T e - new value of a scalar field.
+// T def - default of scalar (is known at compile-time).
+template<typename T> inline bool IsTheSameAs(T e, T def) { return e == def; }
+
+#if defined(FLATBUFFERS_NAN_DEFAULTS) && \
+ defined(FLATBUFFERS_HAS_NEW_STRTOD) && (FLATBUFFERS_HAS_NEW_STRTOD > 0)
+// Like `operator==(e, def)` with weak NaN if T=(float|double).
+template<typename T> inline bool IsFloatTheSameAs(T e, T def) {
+ return (e == def) || ((def != def) && (e != e));
+}
+template<> inline bool IsTheSameAs<float>(float e, float def) {
+ return IsFloatTheSameAs(e, def);
+}
+template<> inline bool IsTheSameAs<double>(double e, double def) {
+ return IsFloatTheSameAs(e, def);
+}
+#endif
+
+// Check 'v' is out of closed range [low; high].
+// Workaround for GCC warning [-Werror=type-limits]:
+// comparison is always true due to limited range of data type.
+template<typename T>
+inline bool IsOutRange(const T &v, const T &low, const T &high) {
+ return (v < low) || (high < v);
+}
+
+// Check 'v' is in closed range [low; high].
+template<typename T>
+inline bool IsInRange(const T &v, const T &low, const T &high) {
+ return !IsOutRange(v, low, high);
+}
+
} // namespace flatbuffers
#endif // FLATBUFFERS_BASE_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer.h
new file mode 100644
index 00000000000..e8d2ce9c74b
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_BUFFER_H_
+#define FLATBUFFERS_BUFFER_H_
+
+#include "flatbuffers/base.h"
+
+namespace flatbuffers {
+
+// Wrapper for uoffset_t to allow safe template specialization.
+// Value is allowed to be 0 to indicate a null object (see e.g. AddOffset).
+template<typename T> struct Offset {
+ uoffset_t o;
+ Offset() : o(0) {}
+ Offset(uoffset_t _o) : o(_o) {}
+ Offset<void> Union() const { return Offset<void>(o); }
+ bool IsNull() const { return !o; }
+};
+
+inline void EndianCheck() {
+ int endiantest = 1;
+ // If this fails, see FLATBUFFERS_LITTLEENDIAN above.
+ FLATBUFFERS_ASSERT(*reinterpret_cast<char *>(&endiantest) ==
+ FLATBUFFERS_LITTLEENDIAN);
+ (void)endiantest;
+}
+
+template<typename T> FLATBUFFERS_CONSTEXPR size_t AlignOf() {
+ // clang-format off
+ #ifdef _MSC_VER
+ return __alignof(T);
+ #else
+ #ifndef alignof
+ return __alignof__(T);
+ #else
+ return alignof(T);
+ #endif
+ #endif
+ // clang-format on
+}
+
+// Lexicographically compare two strings (possibly containing nulls), and
+// return true if the first is less than the second.
+static inline bool StringLessThan(const char *a_data, uoffset_t a_size,
+ const char *b_data, uoffset_t b_size) {
+ const auto cmp = memcmp(a_data, b_data, (std::min)(a_size, b_size));
+ return cmp == 0 ? a_size < b_size : cmp < 0;
+}
+
+// When we read serialized data from memory, in the case of most scalars,
+// we want to just read T, but in the case of Offset, we want to actually
+// perform the indirection and return a pointer.
+// The template specialization below does just that.
+// It is wrapped in a struct since function templates can't overload on the
+// return type like this.
+// The typedef is for the convenience of callers of this function
+// (avoiding the need for a trailing return decltype)
+template<typename T> struct IndirectHelper {
+ typedef T return_type;
+ typedef T mutable_return_type;
+ static const size_t element_stride = sizeof(T);
+ static return_type Read(const uint8_t *p, uoffset_t i) {
+ return EndianScalar((reinterpret_cast<const T *>(p))[i]);
+ }
+};
+template<typename T> struct IndirectHelper<Offset<T>> {
+ typedef const T *return_type;
+ typedef T *mutable_return_type;
+ static const size_t element_stride = sizeof(uoffset_t);
+ static return_type Read(const uint8_t *p, uoffset_t i) {
+ p += i * sizeof(uoffset_t);
+ return reinterpret_cast<return_type>(p + ReadScalar<uoffset_t>(p));
+ }
+};
+template<typename T> struct IndirectHelper<const T *> {
+ typedef const T *return_type;
+ typedef T *mutable_return_type;
+ static const size_t element_stride = sizeof(T);
+ static return_type Read(const uint8_t *p, uoffset_t i) {
+ return reinterpret_cast<const T *>(p + i * sizeof(T));
+ }
+};
+
+/// @brief Get a pointer to the the file_identifier section of the buffer.
+/// @return Returns a const char pointer to the start of the file_identifier
+/// characters in the buffer. The returned char * has length
+/// 'flatbuffers::FlatBufferBuilder::kFileIdentifierLength'.
+/// This function is UNDEFINED for FlatBuffers whose schema does not include
+/// a file_identifier (likely points at padding or the start of a the root
+/// vtable).
+inline const char *GetBufferIdentifier(const void *buf,
+ bool size_prefixed = false) {
+ return reinterpret_cast<const char *>(buf) +
+ ((size_prefixed) ? 2 * sizeof(uoffset_t) : sizeof(uoffset_t));
+}
+
+// Helper to see if the identifier in a buffer has the expected value.
+inline bool BufferHasIdentifier(const void *buf, const char *identifier,
+ bool size_prefixed = false) {
+ return strncmp(GetBufferIdentifier(buf, size_prefixed), identifier,
+ flatbuffers::kFileIdentifierLength) == 0;
+}
+
+/// @cond FLATBUFFERS_INTERNAL
+// Helpers to get a typed pointer to the root object contained in the buffer.
+template<typename T> T *GetMutableRoot(void *buf) {
+ EndianCheck();
+ return reinterpret_cast<T *>(
+ reinterpret_cast<uint8_t *>(buf) +
+ EndianScalar(*reinterpret_cast<uoffset_t *>(buf)));
+}
+
+template<typename T> T *GetMutableSizePrefixedRoot(void *buf) {
+ return GetMutableRoot<T>(reinterpret_cast<uint8_t *>(buf) +
+ sizeof(uoffset_t));
+}
+
+template<typename T> const T *GetRoot(const void *buf) {
+ return GetMutableRoot<T>(const_cast<void *>(buf));
+}
+
+template<typename T> const T *GetSizePrefixedRoot(const void *buf) {
+ return GetRoot<T>(reinterpret_cast<const uint8_t *>(buf) + sizeof(uoffset_t));
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_BUFFER_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer_ref.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer_ref.h
new file mode 100644
index 00000000000..ce30207330b
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/buffer_ref.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_BUFFER_REF_H_
+#define FLATBUFFERS_BUFFER_REF_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/verifier.h"
+
+namespace flatbuffers {
+
+// Convenient way to bundle a buffer and its length, to pass it around
+// typed by its root.
+// A BufferRef does not own its buffer.
+struct BufferRefBase {}; // for std::is_base_of
+
+template<typename T> struct BufferRef : BufferRefBase {
+ BufferRef() : buf(nullptr), len(0), must_free(false) {}
+ BufferRef(uint8_t *_buf, uoffset_t _len)
+ : buf(_buf), len(_len), must_free(false) {}
+
+ ~BufferRef() {
+ if (must_free) free(buf);
+ }
+
+ const T *GetRoot() const { return flatbuffers::GetRoot<T>(buf); }
+
+ bool Verify() {
+ Verifier verifier(buf, len);
+ return verifier.VerifyBuffer<T>(nullptr);
+ }
+
+ uint8_t *buf;
+ uoffset_t len;
+ bool must_free;
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_BUFFER_REF_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/default_allocator.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/default_allocator.h
new file mode 100644
index 00000000000..8b173af11bc
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/default_allocator.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_DEFAULT_ALLOCATOR_H_
+#define FLATBUFFERS_DEFAULT_ALLOCATOR_H_
+
+#include "flatbuffers/allocator.h"
+#include "flatbuffers/base.h"
+
+namespace flatbuffers {
+
+// DefaultAllocator uses new/delete to allocate memory regions
+class DefaultAllocator : public Allocator {
+ public:
+ uint8_t *allocate(size_t size) FLATBUFFERS_OVERRIDE {
+ return new uint8_t[size];
+ }
+
+ void deallocate(uint8_t *p, size_t) FLATBUFFERS_OVERRIDE { delete[] p; }
+
+ static void dealloc(void *p, size_t) { delete[] static_cast<uint8_t *>(p); }
+};
+
+// These functions allow for a null allocator to mean use the default allocator,
+// as used by DetachedBuffer and vector_downward below.
+// This is to avoid having a statically or dynamically allocated default
+// allocator, or having to move it between the classes that may own it.
+inline uint8_t *Allocate(Allocator *allocator, size_t size) {
+ return allocator ? allocator->allocate(size)
+ : DefaultAllocator().allocate(size);
+}
+
+inline void Deallocate(Allocator *allocator, uint8_t *p, size_t size) {
+ if (allocator)
+ allocator->deallocate(p, size);
+ else
+ DefaultAllocator().deallocate(p, size);
+}
+
+inline uint8_t *ReallocateDownward(Allocator *allocator, uint8_t *old_p,
+ size_t old_size, size_t new_size,
+ size_t in_use_back, size_t in_use_front) {
+ return allocator ? allocator->reallocate_downward(old_p, old_size, new_size,
+ in_use_back, in_use_front)
+ : DefaultAllocator().reallocate_downward(
+ old_p, old_size, new_size, in_use_back, in_use_front);
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/detached_buffer.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/detached_buffer.h
new file mode 100644
index 00000000000..760a0884535
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/detached_buffer.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_DETACHED_BUFFER_H_
+#define FLATBUFFERS_DETACHED_BUFFER_H_
+
+#include "flatbuffers/allocator.h"
+#include "flatbuffers/base.h"
+#include "flatbuffers/default_allocator.h"
+
+namespace flatbuffers {
+
+// DetachedBuffer is a finished flatbuffer memory region, detached from its
+// builder. The original memory region and allocator are also stored so that
+// the DetachedBuffer can manage the memory lifetime.
+class DetachedBuffer {
+ public:
+ DetachedBuffer()
+ : allocator_(nullptr),
+ own_allocator_(false),
+ buf_(nullptr),
+ reserved_(0),
+ cur_(nullptr),
+ size_(0) {}
+
+ DetachedBuffer(Allocator *allocator, bool own_allocator, uint8_t *buf,
+ size_t reserved, uint8_t *cur, size_t sz)
+ : allocator_(allocator),
+ own_allocator_(own_allocator),
+ buf_(buf),
+ reserved_(reserved),
+ cur_(cur),
+ size_(sz) {}
+
+ DetachedBuffer(DetachedBuffer &&other)
+ : allocator_(other.allocator_),
+ own_allocator_(other.own_allocator_),
+ buf_(other.buf_),
+ reserved_(other.reserved_),
+ cur_(other.cur_),
+ size_(other.size_) {
+ other.reset();
+ }
+
+ DetachedBuffer &operator=(DetachedBuffer &&other) {
+ if (this == &other) return *this;
+
+ destroy();
+
+ allocator_ = other.allocator_;
+ own_allocator_ = other.own_allocator_;
+ buf_ = other.buf_;
+ reserved_ = other.reserved_;
+ cur_ = other.cur_;
+ size_ = other.size_;
+
+ other.reset();
+
+ return *this;
+ }
+
+ ~DetachedBuffer() { destroy(); }
+
+ const uint8_t *data() const { return cur_; }
+
+ uint8_t *data() { return cur_; }
+
+ size_t size() const { return size_; }
+
+ // These may change access mode, leave these at end of public section
+ FLATBUFFERS_DELETE_FUNC(DetachedBuffer(const DetachedBuffer &other));
+ FLATBUFFERS_DELETE_FUNC(
+ DetachedBuffer &operator=(const DetachedBuffer &other));
+
+ protected:
+ Allocator *allocator_;
+ bool own_allocator_;
+ uint8_t *buf_;
+ size_t reserved_;
+ uint8_t *cur_;
+ size_t size_;
+
+ inline void destroy() {
+ if (buf_) Deallocate(allocator_, buf_, reserved_);
+ if (own_allocator_ && allocator_) { delete allocator_; }
+ reset();
+ }
+
+ inline void reset() {
+ allocator_ = nullptr;
+ own_allocator_ = false;
+ buf_ = nullptr;
+ reserved_ = 0;
+ cur_ = nullptr;
+ size_ = 0;
+ }
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_DETACHED_BUFFER_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffer_builder.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffer_builder.h
new file mode 100644
index 00000000000..8be4efbe94d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffer_builder.h
@@ -0,0 +1,1187 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_FLATBUFFER_BUILDER_H_
+#define FLATBUFFERS_FLATBUFFER_BUILDER_H_
+
+#include <functional>
+
+#include "flatbuffers/allocator.h"
+#include "flatbuffers/array.h"
+#include "flatbuffers/base.h"
+#include "flatbuffers/buffer_ref.h"
+#include "flatbuffers/default_allocator.h"
+#include "flatbuffers/detached_buffer.h"
+#include "flatbuffers/stl_emulation.h"
+#include "flatbuffers/string.h"
+#include "flatbuffers/struct.h"
+#include "flatbuffers/table.h"
+#include "flatbuffers/vector.h"
+#include "flatbuffers/vector_downward.h"
+#include "flatbuffers/verifier.h"
+
+namespace flatbuffers {
+
+// Converts a Field ID to a virtual table offset.
+inline voffset_t FieldIndexToOffset(voffset_t field_id) {
+ // Should correspond to what EndTable() below builds up.
+ const int fixed_fields = 2; // Vtable size and Object Size.
+ return static_cast<voffset_t>((field_id + fixed_fields) * sizeof(voffset_t));
+}
+
+template<typename T, typename Alloc>
+const T *data(const std::vector<T, Alloc> &v) {
+ // Eventually the returned pointer gets passed down to memcpy, so
+ // we need it to be non-null to avoid undefined behavior.
+ static uint8_t t;
+ return v.empty() ? reinterpret_cast<const T *>(&t) : &v.front();
+}
+template<typename T, typename Alloc> T *data(std::vector<T, Alloc> &v) {
+ // Eventually the returned pointer gets passed down to memcpy, so
+ // we need it to be non-null to avoid undefined behavior.
+ static uint8_t t;
+ return v.empty() ? reinterpret_cast<T *>(&t) : &v.front();
+}
+
+/// @addtogroup flatbuffers_cpp_api
+/// @{
+/// @class FlatBufferBuilder
+/// @brief Helper class to hold data needed in creation of a FlatBuffer.
+/// To serialize data, you typically call one of the `Create*()` functions in
+/// the generated code, which in turn call a sequence of `StartTable`/
+/// `PushElement`/`AddElement`/`EndTable`, or the builtin `CreateString`/
+/// `CreateVector` functions. Do this is depth-first order to build up a tree to
+/// the root. `Finish()` wraps up the buffer ready for transport.
+class FlatBufferBuilder {
+ public:
+ /// @brief Default constructor for FlatBufferBuilder.
+ /// @param[in] initial_size The initial size of the buffer, in bytes. Defaults
+ /// to `1024`.
+ /// @param[in] allocator An `Allocator` to use. If null will use
+ /// `DefaultAllocator`.
+ /// @param[in] own_allocator Whether the builder/vector should own the
+ /// allocator. Defaults to / `false`.
+ /// @param[in] buffer_minalign Force the buffer to be aligned to the given
+ /// minimum alignment upon reallocation. Only needed if you intend to store
+ /// types with custom alignment AND you wish to read the buffer in-place
+ /// directly after creation.
+ explicit FlatBufferBuilder(
+ size_t initial_size = 1024, Allocator *allocator = nullptr,
+ bool own_allocator = false,
+ size_t buffer_minalign = AlignOf<largest_scalar_t>())
+ : buf_(initial_size, allocator, own_allocator, buffer_minalign),
+ num_field_loc(0),
+ max_voffset_(0),
+ nested(false),
+ finished(false),
+ minalign_(1),
+ force_defaults_(false),
+ dedup_vtables_(true),
+ string_pool(nullptr) {
+ EndianCheck();
+ }
+
+ /// @brief Move constructor for FlatBufferBuilder.
+ FlatBufferBuilder(FlatBufferBuilder &&other)
+ : buf_(1024, nullptr, false, AlignOf<largest_scalar_t>()),
+ num_field_loc(0),
+ max_voffset_(0),
+ nested(false),
+ finished(false),
+ minalign_(1),
+ force_defaults_(false),
+ dedup_vtables_(true),
+ string_pool(nullptr) {
+ EndianCheck();
+ // Default construct and swap idiom.
+ // Lack of delegating constructors in vs2010 makes it more verbose than
+ // needed.
+ Swap(other);
+ }
+
+ /// @brief Move assignment operator for FlatBufferBuilder.
+ FlatBufferBuilder &operator=(FlatBufferBuilder &&other) {
+ // Move construct a temporary and swap idiom
+ FlatBufferBuilder temp(std::move(other));
+ Swap(temp);
+ return *this;
+ }
+
+ void Swap(FlatBufferBuilder &other) {
+ using std::swap;
+ buf_.swap(other.buf_);
+ swap(num_field_loc, other.num_field_loc);
+ swap(max_voffset_, other.max_voffset_);
+ swap(nested, other.nested);
+ swap(finished, other.finished);
+ swap(minalign_, other.minalign_);
+ swap(force_defaults_, other.force_defaults_);
+ swap(dedup_vtables_, other.dedup_vtables_);
+ swap(string_pool, other.string_pool);
+ }
+
+ ~FlatBufferBuilder() {
+ if (string_pool) delete string_pool;
+ }
+
+ void Reset() {
+ Clear(); // clear builder state
+ buf_.reset(); // deallocate buffer
+ }
+
+ /// @brief Reset all the state in this FlatBufferBuilder so it can be reused
+ /// to construct another buffer.
+ void Clear() {
+ ClearOffsets();
+ buf_.clear();
+ nested = false;
+ finished = false;
+ minalign_ = 1;
+ if (string_pool) string_pool->clear();
+ }
+
+ /// @brief The current size of the serialized buffer, counting from the end.
+ /// @return Returns an `uoffset_t` with the current size of the buffer.
+ uoffset_t GetSize() const { return buf_.size(); }
+
+ /// @brief Get the serialized buffer (after you call `Finish()`).
+ /// @return Returns an `uint8_t` pointer to the FlatBuffer data inside the
+ /// buffer.
+ uint8_t *GetBufferPointer() const {
+ Finished();
+ return buf_.data();
+ }
+
+ /// @brief Get the serialized buffer (after you call `Finish()`) as a span.
+ /// @return Returns a constructed flatbuffers::span that is a view over the
+ /// FlatBuffer data inside the buffer.
+ flatbuffers::span<uint8_t> GetBufferSpan() const {
+ Finished();
+ return flatbuffers::span<uint8_t>(buf_.data(), buf_.size());
+ }
+
+ /// @brief Get a pointer to an unfinished buffer.
+ /// @return Returns a `uint8_t` pointer to the unfinished buffer.
+ uint8_t *GetCurrentBufferPointer() const { return buf_.data(); }
+
+ /// @brief Get the released pointer to the serialized buffer.
+ /// @warning Do NOT attempt to use this FlatBufferBuilder afterwards!
+ /// @return A `FlatBuffer` that owns the buffer and its allocator and
+ /// behaves similar to a `unique_ptr` with a deleter.
+ FLATBUFFERS_ATTRIBUTE([[deprecated("use Release() instead")]])
+ DetachedBuffer ReleaseBufferPointer() {
+ Finished();
+ return buf_.release();
+ }
+
+ /// @brief Get the released DetachedBuffer.
+ /// @return A `DetachedBuffer` that owns the buffer and its allocator.
+ DetachedBuffer Release() {
+ Finished();
+ return buf_.release();
+ }
+
+ /// @brief Get the released pointer to the serialized buffer.
+ /// @param size The size of the memory block containing
+ /// the serialized `FlatBuffer`.
+ /// @param offset The offset from the released pointer where the finished
+ /// `FlatBuffer` starts.
+ /// @return A raw pointer to the start of the memory block containing
+ /// the serialized `FlatBuffer`.
+ /// @remark If the allocator is owned, it gets deleted when the destructor is
+ /// called..
+ uint8_t *ReleaseRaw(size_t &size, size_t &offset) {
+ Finished();
+ return buf_.release_raw(size, offset);
+ }
+
+ /// @brief get the minimum alignment this buffer needs to be accessed
+ /// properly. This is only known once all elements have been written (after
+ /// you call Finish()). You can use this information if you need to embed
+ /// a FlatBuffer in some other buffer, such that you can later read it
+ /// without first having to copy it into its own buffer.
+ size_t GetBufferMinAlignment() const {
+ Finished();
+ return minalign_;
+ }
+
+ /// @cond FLATBUFFERS_INTERNAL
+ void Finished() const {
+ // If you get this assert, you're attempting to get access a buffer
+ // which hasn't been finished yet. Be sure to call
+ // FlatBufferBuilder::Finish with your root table.
+ // If you really need to access an unfinished buffer, call
+ // GetCurrentBufferPointer instead.
+ FLATBUFFERS_ASSERT(finished);
+ }
+ /// @endcond
+
+ /// @brief In order to save space, fields that are set to their default value
+ /// don't get serialized into the buffer.
+ /// @param[in] fd When set to `true`, always serializes default values that
+ /// are set. Optional fields which are not set explicitly, will still not be
+ /// serialized.
+ void ForceDefaults(bool fd) { force_defaults_ = fd; }
+
+ /// @brief By default vtables are deduped in order to save space.
+ /// @param[in] dedup When set to `true`, dedup vtables.
+ void DedupVtables(bool dedup) { dedup_vtables_ = dedup; }
+
+ /// @cond FLATBUFFERS_INTERNAL
+ void Pad(size_t num_bytes) { buf_.fill(num_bytes); }
+
+ void TrackMinAlign(size_t elem_size) {
+ if (elem_size > minalign_) minalign_ = elem_size;
+ }
+
+ void Align(size_t elem_size) {
+ TrackMinAlign(elem_size);
+ buf_.fill(PaddingBytes(buf_.size(), elem_size));
+ }
+
+ void PushFlatBuffer(const uint8_t *bytes, size_t size) {
+ PushBytes(bytes, size);
+ finished = true;
+ }
+
+ void PushBytes(const uint8_t *bytes, size_t size) { buf_.push(bytes, size); }
+
+ void PopBytes(size_t amount) { buf_.pop(amount); }
+
+ template<typename T> void AssertScalarT() {
+ // The code assumes power of 2 sizes and endian-swap-ability.
+ static_assert(flatbuffers::is_scalar<T>::value, "T must be a scalar type");
+ }
+
+ // Write a single aligned scalar to the buffer
+ template<typename T> uoffset_t PushElement(T element) {
+ AssertScalarT<T>();
+ Align(sizeof(T));
+ buf_.push_small(EndianScalar(element));
+ return GetSize();
+ }
+
+ template<typename T> uoffset_t PushElement(Offset<T> off) {
+ // Special case for offsets: see ReferTo below.
+ return PushElement(ReferTo(off.o));
+ }
+
+ // When writing fields, we track where they are, so we can create correct
+ // vtables later.
+ void TrackField(voffset_t field, uoffset_t off) {
+ FieldLoc fl = { off, field };
+ buf_.scratch_push_small(fl);
+ num_field_loc++;
+ if (field > max_voffset_) {
+ max_voffset_ = field;
+ }
+ }
+
+ // Like PushElement, but additionally tracks the field this represents.
+ template<typename T> void AddElement(voffset_t field, T e, T def) {
+ // We don't serialize values equal to the default.
+ if (IsTheSameAs(e, def) && !force_defaults_) return;
+ TrackField(field, PushElement(e));
+ }
+
+ template<typename T> void AddElement(voffset_t field, T e) {
+ TrackField(field, PushElement(e));
+ }
+
+ template<typename T> void AddOffset(voffset_t field, Offset<T> off) {
+ if (off.IsNull()) return; // Don't store.
+ AddElement(field, ReferTo(off.o), static_cast<uoffset_t>(0));
+ }
+
+ template<typename T> void AddStruct(voffset_t field, const T *structptr) {
+ if (!structptr) return; // Default, don't store.
+ Align(AlignOf<T>());
+ buf_.push_small(*structptr);
+ TrackField(field, GetSize());
+ }
+
+ void AddStructOffset(voffset_t field, uoffset_t off) {
+ TrackField(field, off);
+ }
+
+ // Offsets initially are relative to the end of the buffer (downwards).
+ // This function converts them to be relative to the current location
+ // in the buffer (when stored here), pointing upwards.
+ uoffset_t ReferTo(uoffset_t off) {
+ // Align to ensure GetSize() below is correct.
+ Align(sizeof(uoffset_t));
+ // Offset must refer to something already in buffer.
+ const uoffset_t size = GetSize();
+ FLATBUFFERS_ASSERT(off && off <= size);
+ return size - off + static_cast<uoffset_t>(sizeof(uoffset_t));
+ }
+
+ void NotNested() {
+ // If you hit this, you're trying to construct a Table/Vector/String
+ // during the construction of its parent table (between the MyTableBuilder
+ // and table.Finish().
+ // Move the creation of these sub-objects to above the MyTableBuilder to
+ // not get this assert.
+ // Ignoring this assert may appear to work in simple cases, but the reason
+ // it is here is that storing objects in-line may cause vtable offsets
+ // to not fit anymore. It also leads to vtable duplication.
+ FLATBUFFERS_ASSERT(!nested);
+ // If you hit this, fields were added outside the scope of a table.
+ FLATBUFFERS_ASSERT(!num_field_loc);
+ }
+
+ // From generated code (or from the parser), we call StartTable/EndTable
+ // with a sequence of AddElement calls in between.
+ uoffset_t StartTable() {
+ NotNested();
+ nested = true;
+ return GetSize();
+ }
+
+ // This finishes one serialized object by generating the vtable if it's a
+ // table, comparing it against existing vtables, and writing the
+ // resulting vtable offset.
+ uoffset_t EndTable(uoffset_t start) {
+ // If you get this assert, a corresponding StartTable wasn't called.
+ FLATBUFFERS_ASSERT(nested);
+ // Write the vtable offset, which is the start of any Table.
+ // We fill it's value later.
+ auto vtableoffsetloc = PushElement<soffset_t>(0);
+ // Write a vtable, which consists entirely of voffset_t elements.
+ // It starts with the number of offsets, followed by a type id, followed
+ // by the offsets themselves. In reverse:
+ // Include space for the last offset and ensure empty tables have a
+ // minimum size.
+ max_voffset_ =
+ (std::max)(static_cast<voffset_t>(max_voffset_ + sizeof(voffset_t)),
+ FieldIndexToOffset(0));
+ buf_.fill_big(max_voffset_);
+ auto table_object_size = vtableoffsetloc - start;
+ // Vtable use 16bit offsets.
+ FLATBUFFERS_ASSERT(table_object_size < 0x10000);
+ WriteScalar<voffset_t>(buf_.data() + sizeof(voffset_t),
+ static_cast<voffset_t>(table_object_size));
+ WriteScalar<voffset_t>(buf_.data(), max_voffset_);
+ // Write the offsets into the table
+ for (auto it = buf_.scratch_end() - num_field_loc * sizeof(FieldLoc);
+ it < buf_.scratch_end(); it += sizeof(FieldLoc)) {
+ auto field_location = reinterpret_cast<FieldLoc *>(it);
+ auto pos = static_cast<voffset_t>(vtableoffsetloc - field_location->off);
+ // If this asserts, it means you've set a field twice.
+ FLATBUFFERS_ASSERT(
+ !ReadScalar<voffset_t>(buf_.data() + field_location->id));
+ WriteScalar<voffset_t>(buf_.data() + field_location->id, pos);
+ }
+ ClearOffsets();
+ auto vt1 = reinterpret_cast<voffset_t *>(buf_.data());
+ auto vt1_size = ReadScalar<voffset_t>(vt1);
+ auto vt_use = GetSize();
+ // See if we already have generated a vtable with this exact same
+ // layout before. If so, make it point to the old one, remove this one.
+ if (dedup_vtables_) {
+ for (auto it = buf_.scratch_data(); it < buf_.scratch_end();
+ it += sizeof(uoffset_t)) {
+ auto vt_offset_ptr = reinterpret_cast<uoffset_t *>(it);
+ auto vt2 = reinterpret_cast<voffset_t *>(buf_.data_at(*vt_offset_ptr));
+ auto vt2_size = ReadScalar<voffset_t>(vt2);
+ if (vt1_size != vt2_size || 0 != memcmp(vt2, vt1, vt1_size)) continue;
+ vt_use = *vt_offset_ptr;
+ buf_.pop(GetSize() - vtableoffsetloc);
+ break;
+ }
+ }
+ // If this is a new vtable, remember it.
+ if (vt_use == GetSize()) { buf_.scratch_push_small(vt_use); }
+ // Fill the vtable offset we created above.
+ // The offset points from the beginning of the object to where the
+ // vtable is stored.
+ // Offsets default direction is downward in memory for future format
+ // flexibility (storing all vtables at the start of the file).
+ WriteScalar(buf_.data_at(vtableoffsetloc),
+ static_cast<soffset_t>(vt_use) -
+ static_cast<soffset_t>(vtableoffsetloc));
+
+ nested = false;
+ return vtableoffsetloc;
+ }
+
+ FLATBUFFERS_ATTRIBUTE([[deprecated("call the version above instead")]])
+ uoffset_t EndTable(uoffset_t start, voffset_t /*numfields*/) {
+ return EndTable(start);
+ }
+
+ // This checks a required field has been set in a given table that has
+ // just been constructed.
+ template<typename T> void Required(Offset<T> table, voffset_t field);
+
+ uoffset_t StartStruct(size_t alignment) {
+ Align(alignment);
+ return GetSize();
+ }
+
+ uoffset_t EndStruct() { return GetSize(); }
+
+ void ClearOffsets() {
+ buf_.scratch_pop(num_field_loc * sizeof(FieldLoc));
+ num_field_loc = 0;
+ max_voffset_ = 0;
+ }
+
+ // Aligns such that when "len" bytes are written, an object can be written
+ // after it with "alignment" without padding.
+ void PreAlign(size_t len, size_t alignment) {
+ TrackMinAlign(alignment);
+ buf_.fill(PaddingBytes(GetSize() + len, alignment));
+ }
+ template<typename T> void PreAlign(size_t len) {
+ AssertScalarT<T>();
+ PreAlign(len, sizeof(T));
+ }
+ /// @endcond
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// @param[in] str A const char pointer to the data to be stored as a string.
+ /// @param[in] len The number of bytes that should be stored from `str`.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateString(const char *str, size_t len) {
+ NotNested();
+ PreAlign<uoffset_t>(len + 1); // Always 0-terminated.
+ buf_.fill(1);
+ PushBytes(reinterpret_cast<const uint8_t *>(str), len);
+ PushElement(static_cast<uoffset_t>(len));
+ return Offset<String>(GetSize());
+ }
+
+ /// @brief Store a string in the buffer, which is null-terminated.
+ /// @param[in] str A const char pointer to a C-string to add to the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateString(const char *str) {
+ return CreateString(str, strlen(str));
+ }
+
+ /// @brief Store a string in the buffer, which is null-terminated.
+ /// @param[in] str A char pointer to a C-string to add to the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateString(char *str) {
+ return CreateString(str, strlen(str));
+ }
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// @param[in] str A const reference to a std::string to store in the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateString(const std::string &str) {
+ return CreateString(str.c_str(), str.length());
+ }
+
+ // clang-format off
+ #ifdef FLATBUFFERS_HAS_STRING_VIEW
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// @param[in] str A const string_view to copy in to the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateString(flatbuffers::string_view str) {
+ return CreateString(str.data(), str.size());
+ }
+ #endif // FLATBUFFERS_HAS_STRING_VIEW
+ // clang-format on
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// @param[in] str A const pointer to a `String` struct to add to the buffer.
+ /// @return Returns the offset in the buffer where the string starts
+ Offset<String> CreateString(const String *str) {
+ return str ? CreateString(str->c_str(), str->size()) : 0;
+ }
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// @param[in] str A const reference to a std::string like type with support
+ /// of T::c_str() and T::length() to store in the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ template<typename T> Offset<String> CreateString(const T &str) {
+ return CreateString(str.c_str(), str.length());
+ }
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// If a string with this exact contents has already been serialized before,
+ /// instead simply returns the offset of the existing string. This uses a map
+ /// stored on the heap, but only stores the numerical offsets.
+ /// @param[in] str A const char pointer to the data to be stored as a string.
+ /// @param[in] len The number of bytes that should be stored from `str`.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateSharedString(const char *str, size_t len) {
+ FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
+ if (!string_pool)
+ string_pool = new StringOffsetMap(StringOffsetCompare(buf_));
+ auto size_before_string = buf_.size();
+ // Must first serialize the string, since the set is all offsets into
+ // buffer.
+ auto off = CreateString(str, len);
+ auto it = string_pool->find(off);
+ // If it exists we reuse existing serialized data!
+ if (it != string_pool->end()) {
+ // We can remove the string we serialized.
+ buf_.pop(buf_.size() - size_before_string);
+ return *it;
+ }
+ // Record this string for future use.
+ string_pool->insert(off);
+ return off;
+ }
+
+#ifdef FLATBUFFERS_HAS_STRING_VIEW
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// If a string with this exact contents has already been serialized before,
+ /// instead simply returns the offset of the existing string. This uses a map
+ /// stored on the heap, but only stores the numerical offsets.
+ /// @param[in] str A const std::string_view to store in the buffer.
+ /// @return Returns the offset in the buffer where the string starts
+ Offset<String> CreateSharedString(const flatbuffers::string_view str) {
+ return CreateSharedString(str.data(), str.size());
+ }
+#else
+ /// @brief Store a string in the buffer, which null-terminated.
+ /// If a string with this exact contents has already been serialized before,
+ /// instead simply returns the offset of the existing string. This uses a map
+ /// stored on the heap, but only stores the numerical offsets.
+ /// @param[in] str A const char pointer to a C-string to add to the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateSharedString(const char *str) {
+ return CreateSharedString(str, strlen(str));
+ }
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// If a string with this exact contents has already been serialized before,
+ /// instead simply returns the offset of the existing string. This uses a map
+ /// stored on the heap, but only stores the numerical offsets.
+ /// @param[in] str A const reference to a std::string to store in the buffer.
+ /// @return Returns the offset in the buffer where the string starts.
+ Offset<String> CreateSharedString(const std::string &str) {
+ return CreateSharedString(str.c_str(), str.length());
+ }
+#endif
+
+ /// @brief Store a string in the buffer, which can contain any binary data.
+ /// If a string with this exact contents has already been serialized before,
+ /// instead simply returns the offset of the existing string. This uses a map
+ /// stored on the heap, but only stores the numerical offsets.
+ /// @param[in] str A const pointer to a `String` struct to add to the buffer.
+ /// @return Returns the offset in the buffer where the string starts
+ Offset<String> CreateSharedString(const String *str) {
+ return CreateSharedString(str->c_str(), str->size());
+ }
+
+ /// @cond FLATBUFFERS_INTERNAL
+ uoffset_t EndVector(size_t len) {
+ FLATBUFFERS_ASSERT(nested); // Hit if no corresponding StartVector.
+ nested = false;
+ return PushElement(static_cast<uoffset_t>(len));
+ }
+
+ void StartVector(size_t len, size_t elemsize) {
+ NotNested();
+ nested = true;
+ PreAlign<uoffset_t>(len * elemsize);
+ PreAlign(len * elemsize, elemsize); // Just in case elemsize > uoffset_t.
+ }
+
+ // Call this right before StartVector/CreateVector if you want to force the
+ // alignment to be something different than what the element size would
+ // normally dictate.
+ // This is useful when storing a nested_flatbuffer in a vector of bytes,
+ // or when storing SIMD floats, etc.
+ void ForceVectorAlignment(size_t len, size_t elemsize, size_t alignment) {
+ FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
+ PreAlign(len * elemsize, alignment);
+ }
+
+ // Similar to ForceVectorAlignment but for String fields.
+ void ForceStringAlignment(size_t len, size_t alignment) {
+ FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
+ PreAlign((len + 1) * sizeof(char), alignment);
+ }
+
+ /// @endcond
+
+ /// @brief Serialize an array into a FlatBuffer `vector`.
+ /// @tparam T The data type of the array elements.
+ /// @param[in] v A pointer to the array of type `T` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T> Offset<Vector<T>> CreateVector(const T *v, size_t len) {
+ // If this assert hits, you're specifying a template argument that is
+ // causing the wrong overload to be selected, remove it.
+ AssertScalarT<T>();
+ StartVector(len, sizeof(T));
+ if (len == 0) { return Offset<Vector<T>>(EndVector(len)); }
+ // clang-format off
+ #if FLATBUFFERS_LITTLEENDIAN
+ PushBytes(reinterpret_cast<const uint8_t *>(v), len * sizeof(T));
+ #else
+ if (sizeof(T) == 1) {
+ PushBytes(reinterpret_cast<const uint8_t *>(v), len);
+ } else {
+ for (auto i = len; i > 0; ) {
+ PushElement(v[--i]);
+ }
+ }
+ #endif
+ // clang-format on
+ return Offset<Vector<T>>(EndVector(len));
+ }
+
+ template<typename T>
+ Offset<Vector<Offset<T>>> CreateVector(const Offset<T> *v, size_t len) {
+ StartVector(len, sizeof(Offset<T>));
+ for (auto i = len; i > 0;) { PushElement(v[--i]); }
+ return Offset<Vector<Offset<T>>>(EndVector(len));
+ }
+
+ /// @brief Serialize a `std::vector` into a FlatBuffer `vector`.
+ /// @tparam T The data type of the `std::vector` elements.
+ /// @param v A const reference to the `std::vector` to serialize into the
+ /// buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename Alloc>
+ Offset<Vector<T>> CreateVector(const std::vector<T, Alloc> &v) {
+ return CreateVector(data(v), v.size());
+ }
+
+ // vector<bool> may be implemented using a bit-set, so we can't access it as
+ // an array. Instead, read elements manually.
+ // Background: https://isocpp.org/blog/2012/11/on-vectorbool
+ Offset<Vector<uint8_t>> CreateVector(const std::vector<bool> &v) {
+ StartVector(v.size(), sizeof(uint8_t));
+ for (auto i = v.size(); i > 0;) {
+ PushElement(static_cast<uint8_t>(v[--i]));
+ }
+ return Offset<Vector<uint8_t>>(EndVector(v.size()));
+ }
+
+ /// @brief Serialize values returned by a function into a FlatBuffer `vector`.
+ /// This is a convenience function that takes care of iteration for you.
+ /// @tparam T The data type of the `std::vector` elements.
+ /// @param f A function that takes the current iteration 0..vector_size-1 and
+ /// returns any type that you can construct a FlatBuffers vector out of.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T>
+ Offset<Vector<T>> CreateVector(size_t vector_size,
+ const std::function<T(size_t i)> &f) {
+ FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
+ std::vector<T> elems(vector_size);
+ for (size_t i = 0; i < vector_size; i++) elems[i] = f(i);
+ return CreateVector(elems);
+ }
+
+ /// @brief Serialize values returned by a function into a FlatBuffer `vector`.
+ /// This is a convenience function that takes care of iteration for you. This
+ /// uses a vector stored on the heap to store the intermediate results of the
+ /// iteration.
+ /// @tparam T The data type of the `std::vector` elements.
+ /// @param f A function that takes the current iteration 0..vector_size-1,
+ /// and the state parameter returning any type that you can construct a
+ /// FlatBuffers vector out of.
+ /// @param state State passed to f.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename F, typename S>
+ Offset<Vector<T>> CreateVector(size_t vector_size, F f, S *state) {
+ FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
+ std::vector<T> elems(vector_size);
+ for (size_t i = 0; i < vector_size; i++) elems[i] = f(i, state);
+ return CreateVector(elems);
+ }
+
+ /// @brief Serialize a `std::vector<std::string>` into a FlatBuffer `vector`.
+ /// This is a convenience function for a common case.
+ /// @param v A const reference to the `std::vector` to serialize into the
+ /// buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename Alloc>
+ Offset<Vector<Offset<String>>> CreateVectorOfStrings(
+ const std::vector<std::string, Alloc> &v) {
+ return CreateVectorOfStrings(v.cbegin(), v.cend());
+ }
+
+ /// @brief Serialize a collection of Strings into a FlatBuffer `vector`.
+ /// This is a convenience function for a common case.
+ /// @param begin The begining iterator of the collection
+ /// @param end The ending iterator of the collection
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<class It>
+ Offset<Vector<Offset<String>>> CreateVectorOfStrings(It begin, It end) {
+ auto size = std::distance(begin, end);
+ auto scratch_buffer_usage = size * sizeof(Offset<String>);
+ // If there is not enough space to store the offsets, there definitely won't
+ // be enough space to store all the strings. So ensuring space for the
+ // scratch region is OK, for it it fails, it would have failed later.
+ buf_.ensure_space(scratch_buffer_usage);
+ for (auto it = begin; it != end; ++it) {
+ buf_.scratch_push_small(CreateString(*it));
+ }
+ StartVector(size, sizeof(Offset<String>));
+ for (auto i = 1; i <= size; i++) {
+ // Note we re-evaluate the buf location each iteration to account for any
+ // underlying buffer resizing that may occur.
+ PushElement(*reinterpret_cast<Offset<String> *>(
+ buf_.scratch_end() - i * sizeof(Offset<String>)));
+ }
+ buf_.scratch_pop(scratch_buffer_usage);
+ return Offset<Vector<Offset<String>>>(EndVector(size));
+ }
+
+ /// @brief Serialize an array of structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the struct array elements.
+ /// @param[in] v A pointer to the array of type `T` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T>
+ Offset<Vector<const T *>> CreateVectorOfStructs(const T *v, size_t len) {
+ StartVector(len * sizeof(T) / AlignOf<T>(), AlignOf<T>());
+ if (len > 0) {
+ PushBytes(reinterpret_cast<const uint8_t *>(v), sizeof(T) * len);
+ }
+ return Offset<Vector<const T *>>(EndVector(len));
+ }
+
+ /// @brief Serialize an array of native structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the struct array elements.
+ /// @tparam S The data type of the native struct array elements.
+ /// @param[in] v A pointer to the array of type `S` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @param[in] pack_func Pointer to a function to convert the native struct
+ /// to the FlatBuffer struct.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S>
+ Offset<Vector<const T *>> CreateVectorOfNativeStructs(
+ const S *v, size_t len, T (*const pack_func)(const S &)) {
+ FLATBUFFERS_ASSERT(pack_func);
+ auto structs = StartVectorOfStructs<T>(len);
+ for (size_t i = 0; i < len; i++) { structs[i] = pack_func(v[i]); }
+ return EndVectorOfStructs<T>(len);
+ }
+
+ /// @brief Serialize an array of native structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the struct array elements.
+ /// @tparam S The data type of the native struct array elements.
+ /// @param[in] v A pointer to the array of type `S` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S>
+ Offset<Vector<const T *>> CreateVectorOfNativeStructs(const S *v,
+ size_t len) {
+ extern T Pack(const S &);
+ return CreateVectorOfNativeStructs(v, len, Pack);
+ }
+
+ /// @brief Serialize an array of structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the struct array elements.
+ /// @param[in] filler A function that takes the current iteration
+ /// 0..vector_size-1 and a pointer to the struct that must be filled.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ /// This is mostly useful when flatbuffers are generated with mutation
+ /// accessors.
+ template<typename T>
+ Offset<Vector<const T *>> CreateVectorOfStructs(
+ size_t vector_size, const std::function<void(size_t i, T *)> &filler) {
+ T *structs = StartVectorOfStructs<T>(vector_size);
+ for (size_t i = 0; i < vector_size; i++) {
+ filler(i, structs);
+ structs++;
+ }
+ return EndVectorOfStructs<T>(vector_size);
+ }
+
+ /// @brief Serialize an array of structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the struct array elements.
+ /// @param[in] f A function that takes the current iteration 0..vector_size-1,
+ /// a pointer to the struct that must be filled and the state argument.
+ /// @param[in] state Arbitrary state to pass to f.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ /// This is mostly useful when flatbuffers are generated with mutation
+ /// accessors.
+ template<typename T, typename F, typename S>
+ Offset<Vector<const T *>> CreateVectorOfStructs(size_t vector_size, F f,
+ S *state) {
+ T *structs = StartVectorOfStructs<T>(vector_size);
+ for (size_t i = 0; i < vector_size; i++) {
+ f(i, structs, state);
+ structs++;
+ }
+ return EndVectorOfStructs<T>(vector_size);
+ }
+
+ /// @brief Serialize a `std::vector` of structs into a FlatBuffer `vector`.
+ /// @tparam T The data type of the `std::vector` struct elements.
+ /// @param[in] v A const reference to the `std::vector` of structs to
+ /// serialize into the buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename Alloc>
+ Offset<Vector<const T *>> CreateVectorOfStructs(
+ const std::vector<T, Alloc> &v) {
+ return CreateVectorOfStructs(data(v), v.size());
+ }
+
+ /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
+ /// `vector`.
+ /// @tparam T The data type of the `std::vector` struct elements.
+ /// @tparam S The data type of the `std::vector` native struct elements.
+ /// @param[in] v A const reference to the `std::vector` of structs to
+ /// serialize into the buffer as a `vector`.
+ /// @param[in] pack_func Pointer to a function to convert the native struct
+ /// to the FlatBuffer struct.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S, typename Alloc>
+ Offset<Vector<const T *>> CreateVectorOfNativeStructs(
+ const std::vector<S, Alloc> &v, T (*const pack_func)(const S &)) {
+ return CreateVectorOfNativeStructs<T, S>(data(v), v.size(), pack_func);
+ }
+
+ /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
+ /// `vector`.
+ /// @tparam T The data type of the `std::vector` struct elements.
+ /// @tparam S The data type of the `std::vector` native struct elements.
+ /// @param[in] v A const reference to the `std::vector` of structs to
+ /// serialize into the buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S, typename Alloc>
+ Offset<Vector<const T *>> CreateVectorOfNativeStructs(
+ const std::vector<S, Alloc> &v) {
+ return CreateVectorOfNativeStructs<T, S>(data(v), v.size());
+ }
+
+ /// @cond FLATBUFFERS_INTERNAL
+ template<typename T> struct StructKeyComparator {
+ bool operator()(const T &a, const T &b) const {
+ return a.KeyCompareLessThan(&b);
+ }
+ };
+ /// @endcond
+
+ /// @brief Serialize a `std::vector` of structs into a FlatBuffer `vector`
+ /// in sorted order.
+ /// @tparam T The data type of the `std::vector` struct elements.
+ /// @param[in] v A const reference to the `std::vector` of structs to
+ /// serialize into the buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename Alloc>
+ Offset<Vector<const T *>> CreateVectorOfSortedStructs(
+ std::vector<T, Alloc> *v) {
+ return CreateVectorOfSortedStructs(data(*v), v->size());
+ }
+
+ /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
+ /// `vector` in sorted order.
+ /// @tparam T The data type of the `std::vector` struct elements.
+ /// @tparam S The data type of the `std::vector` native struct elements.
+ /// @param[in] v A const reference to the `std::vector` of structs to
+ /// serialize into the buffer as a `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S, typename Alloc>
+ Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(
+ std::vector<S, Alloc> *v) {
+ return CreateVectorOfSortedNativeStructs<T, S>(data(*v), v->size());
+ }
+
+ /// @brief Serialize an array of structs into a FlatBuffer `vector` in sorted
+ /// order.
+ /// @tparam T The data type of the struct array elements.
+ /// @param[in] v A pointer to the array of type `T` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T>
+ Offset<Vector<const T *>> CreateVectorOfSortedStructs(T *v, size_t len) {
+ std::sort(v, v + len, StructKeyComparator<T>());
+ return CreateVectorOfStructs(v, len);
+ }
+
+ /// @brief Serialize an array of native structs into a FlatBuffer `vector` in
+ /// sorted order.
+ /// @tparam T The data type of the struct array elements.
+ /// @tparam S The data type of the native struct array elements.
+ /// @param[in] v A pointer to the array of type `S` to serialize into the
+ /// buffer as a `vector`.
+ /// @param[in] len The number of elements to serialize.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename S>
+ Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(S *v,
+ size_t len) {
+ extern T Pack(const S &);
+ auto structs = StartVectorOfStructs<T>(len);
+ for (size_t i = 0; i < len; i++) { structs[i] = Pack(v[i]); }
+ std::sort(structs, structs + len, StructKeyComparator<T>());
+ return EndVectorOfStructs<T>(len);
+ }
+
+ /// @cond FLATBUFFERS_INTERNAL
+ template<typename T> struct TableKeyComparator {
+ TableKeyComparator(vector_downward &buf) : buf_(buf) {}
+ TableKeyComparator(const TableKeyComparator &other) : buf_(other.buf_) {}
+ bool operator()(const Offset<T> &a, const Offset<T> &b) const {
+ auto table_a = reinterpret_cast<T *>(buf_.data_at(a.o));
+ auto table_b = reinterpret_cast<T *>(buf_.data_at(b.o));
+ return table_a->KeyCompareLessThan(table_b);
+ }
+ vector_downward &buf_;
+
+ private:
+ FLATBUFFERS_DELETE_FUNC(
+ TableKeyComparator &operator=(const TableKeyComparator &other));
+ };
+ /// @endcond
+
+ /// @brief Serialize an array of `table` offsets as a `vector` in the buffer
+ /// in sorted order.
+ /// @tparam T The data type that the offset refers to.
+ /// @param[in] v An array of type `Offset<T>` that contains the `table`
+ /// offsets to store in the buffer in sorted order.
+ /// @param[in] len The number of elements to store in the `vector`.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T>
+ Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(Offset<T> *v,
+ size_t len) {
+ std::sort(v, v + len, TableKeyComparator<T>(buf_));
+ return CreateVector(v, len);
+ }
+
+ /// @brief Serialize an array of `table` offsets as a `vector` in the buffer
+ /// in sorted order.
+ /// @tparam T The data type that the offset refers to.
+ /// @param[in] v An array of type `Offset<T>` that contains the `table`
+ /// offsets to store in the buffer in sorted order.
+ /// @return Returns a typed `Offset` into the serialized data indicating
+ /// where the vector is stored.
+ template<typename T, typename Alloc>
+ Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(
+ std::vector<Offset<T>, Alloc> *v) {
+ return CreateVectorOfSortedTables(data(*v), v->size());
+ }
+
+ /// @brief Specialized version of `CreateVector` for non-copying use cases.
+ /// Write the data any time later to the returned buffer pointer `buf`.
+ /// @param[in] len The number of elements to store in the `vector`.
+ /// @param[in] elemsize The size of each element in the `vector`.
+ /// @param[out] buf A pointer to a `uint8_t` pointer that can be
+ /// written to at a later time to serialize the data into a `vector`
+ /// in the buffer.
+ uoffset_t CreateUninitializedVector(size_t len, size_t elemsize,
+ uint8_t **buf) {
+ NotNested();
+ StartVector(len, elemsize);
+ buf_.make_space(len * elemsize);
+ auto vec_start = GetSize();
+ auto vec_end = EndVector(len);
+ *buf = buf_.data_at(vec_start);
+ return vec_end;
+ }
+
+ /// @brief Specialized version of `CreateVector` for non-copying use cases.
+ /// Write the data any time later to the returned buffer pointer `buf`.
+ /// @tparam T The data type of the data that will be stored in the buffer
+ /// as a `vector`.
+ /// @param[in] len The number of elements to store in the `vector`.
+ /// @param[out] buf A pointer to a pointer of type `T` that can be
+ /// written to at a later time to serialize the data into a `vector`
+ /// in the buffer.
+ template<typename T>
+ Offset<Vector<T>> CreateUninitializedVector(size_t len, T **buf) {
+ AssertScalarT<T>();
+ return CreateUninitializedVector(len, sizeof(T),
+ reinterpret_cast<uint8_t **>(buf));
+ }
+
+ template<typename T>
+ Offset<Vector<const T *>> CreateUninitializedVectorOfStructs(size_t len,
+ T **buf) {
+ return CreateUninitializedVector(len, sizeof(T),
+ reinterpret_cast<uint8_t **>(buf));
+ }
+
+ // @brief Create a vector of scalar type T given as input a vector of scalar
+ // type U, useful with e.g. pre "enum class" enums, or any existing scalar
+ // data of the wrong type.
+ template<typename T, typename U>
+ Offset<Vector<T>> CreateVectorScalarCast(const U *v, size_t len) {
+ AssertScalarT<T>();
+ AssertScalarT<U>();
+ StartVector(len, sizeof(T));
+ for (auto i = len; i > 0;) { PushElement(static_cast<T>(v[--i])); }
+ return Offset<Vector<T>>(EndVector(len));
+ }
+
+ /// @brief Write a struct by itself, typically to be part of a union.
+ template<typename T> Offset<const T *> CreateStruct(const T &structobj) {
+ NotNested();
+ Align(AlignOf<T>());
+ buf_.push_small(structobj);
+ return Offset<const T *>(GetSize());
+ }
+
+ /// @brief Finish serializing a buffer by writing the root offset.
+ /// @param[in] file_identifier If a `file_identifier` is given, the buffer
+ /// will be prefixed with a standard FlatBuffers file header.
+ template<typename T>
+ void Finish(Offset<T> root, const char *file_identifier = nullptr) {
+ Finish(root.o, file_identifier, false);
+ }
+
+ /// @brief Finish a buffer with a 32 bit size field pre-fixed (size of the
+ /// buffer following the size field). These buffers are NOT compatible
+ /// with standard buffers created by Finish, i.e. you can't call GetRoot
+ /// on them, you have to use GetSizePrefixedRoot instead.
+ /// All >32 bit quantities in this buffer will be aligned when the whole
+ /// size pre-fixed buffer is aligned.
+ /// These kinds of buffers are useful for creating a stream of FlatBuffers.
+ template<typename T>
+ void FinishSizePrefixed(Offset<T> root,
+ const char *file_identifier = nullptr) {
+ Finish(root.o, file_identifier, true);
+ }
+
+ void SwapBufAllocator(FlatBufferBuilder &other) {
+ buf_.swap_allocator(other.buf_);
+ }
+
+ /// @brief The length of a FlatBuffer file header.
+ static const size_t kFileIdentifierLength =
+ ::flatbuffers::kFileIdentifierLength;
+
+ protected:
+ // You shouldn't really be copying instances of this class.
+ FlatBufferBuilder(const FlatBufferBuilder &);
+ FlatBufferBuilder &operator=(const FlatBufferBuilder &);
+
+ void Finish(uoffset_t root, const char *file_identifier, bool size_prefix) {
+ NotNested();
+ buf_.clear_scratch();
+ // This will cause the whole buffer to be aligned.
+ PreAlign((size_prefix ? sizeof(uoffset_t) : 0) + sizeof(uoffset_t) +
+ (file_identifier ? kFileIdentifierLength : 0),
+ minalign_);
+ if (file_identifier) {
+ FLATBUFFERS_ASSERT(strlen(file_identifier) == kFileIdentifierLength);
+ PushBytes(reinterpret_cast<const uint8_t *>(file_identifier),
+ kFileIdentifierLength);
+ }
+ PushElement(ReferTo(root)); // Location of root.
+ if (size_prefix) { PushElement(GetSize()); }
+ finished = true;
+ }
+
+ struct FieldLoc {
+ uoffset_t off;
+ voffset_t id;
+ };
+
+ vector_downward buf_;
+
+ // Accumulating offsets of table members while it is being built.
+ // We store these in the scratch pad of buf_, after the vtable offsets.
+ uoffset_t num_field_loc;
+ // Track how much of the vtable is in use, so we can output the most compact
+ // possible vtable.
+ voffset_t max_voffset_;
+
+ // Ensure objects are not nested.
+ bool nested;
+
+ // Ensure the buffer is finished before it is being accessed.
+ bool finished;
+
+ size_t minalign_;
+
+ bool force_defaults_; // Serialize values equal to their defaults anyway.
+
+ bool dedup_vtables_;
+
+ struct StringOffsetCompare {
+ StringOffsetCompare(const vector_downward &buf) : buf_(&buf) {}
+ bool operator()(const Offset<String> &a, const Offset<String> &b) const {
+ auto stra = reinterpret_cast<const String *>(buf_->data_at(a.o));
+ auto strb = reinterpret_cast<const String *>(buf_->data_at(b.o));
+ return StringLessThan(stra->data(), stra->size(), strb->data(),
+ strb->size());
+ }
+ const vector_downward *buf_;
+ };
+
+ // For use with CreateSharedString. Instantiated on first use only.
+ typedef std::set<Offset<String>, StringOffsetCompare> StringOffsetMap;
+ StringOffsetMap *string_pool;
+
+ private:
+ // Allocates space for a vector of structures.
+ // Must be completed with EndVectorOfStructs().
+ template<typename T> T *StartVectorOfStructs(size_t vector_size) {
+ StartVector(vector_size * sizeof(T) / AlignOf<T>(), AlignOf<T>());
+ return reinterpret_cast<T *>(buf_.make_space(vector_size * sizeof(T)));
+ }
+
+ // End the vector of structures in the flatbuffers.
+ // Vector should have previously be started with StartVectorOfStructs().
+ template<typename T>
+ Offset<Vector<const T *>> EndVectorOfStructs(size_t vector_size) {
+ return Offset<Vector<const T *>>(EndVector(vector_size));
+ }
+};
+/// @}
+
+/// Helpers to get a typed pointer to objects that are currently being built.
+/// @warning Creating new objects will lead to reallocations and invalidates
+/// the pointer!
+template<typename T>
+T *GetMutableTemporaryPointer(FlatBufferBuilder &fbb, Offset<T> offset) {
+ return reinterpret_cast<T *>(fbb.GetCurrentBufferPointer() + fbb.GetSize() -
+ offset.o);
+}
+
+template<typename T>
+const T *GetTemporaryPointer(FlatBufferBuilder &fbb, Offset<T> offset) {
+ return GetMutableTemporaryPointer<T>(fbb, offset);
+}
+
+template<typename T>
+void FlatBufferBuilder::Required(Offset<T> table, voffset_t field) {
+ auto table_ptr = reinterpret_cast<const Table *>(buf_.data_at(table.o));
+ bool ok = table_ptr->GetOptionalFieldOffset(field) != 0;
+ // If this fails, the caller will show what field needs to be set.
+ FLATBUFFERS_ASSERT(ok);
+ (void)ok;
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_VECTOR_DOWNWARD_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffers.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffers.h
index c073f3b0550..c903d646c1b 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffers.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/flatbuffers.h
@@ -17,2789 +17,23 @@
#ifndef FLATBUFFERS_H_
#define FLATBUFFERS_H_
+// TODO: These includes are for mitigating the pains of users editing their
+// source because they relied on flatbuffers.h to include everything for them.
+#include "flatbuffers/array.h"
#include "flatbuffers/base.h"
+#include "flatbuffers/buffer.h"
+#include "flatbuffers/buffer_ref.h"
+#include "flatbuffers/detached_buffer.h"
+#include "flatbuffers/flatbuffer_builder.h"
#include "flatbuffers/stl_emulation.h"
-
-#ifndef FLATBUFFERS_CPP98_STL
-# include <functional>
-#endif
-
-#if defined(FLATBUFFERS_NAN_DEFAULTS)
-# include <cmath>
-#endif
+#include "flatbuffers/string.h"
+#include "flatbuffers/struct.h"
+#include "flatbuffers/table.h"
+#include "flatbuffers/vector.h"
+#include "flatbuffers/vector_downward.h"
+#include "flatbuffers/verifier.h"
namespace flatbuffers {
-// Generic 'operator==' with conditional specialisations.
-// T e - new value of a scalar field.
-// T def - default of scalar (is known at compile-time).
-template<typename T> inline bool IsTheSameAs(T e, T def) { return e == def; }
-
-#if defined(FLATBUFFERS_NAN_DEFAULTS) && \
- defined(FLATBUFFERS_HAS_NEW_STRTOD) && (FLATBUFFERS_HAS_NEW_STRTOD > 0)
-// Like `operator==(e, def)` with weak NaN if T=(float|double).
-template<typename T> inline bool IsFloatTheSameAs(T e, T def) {
- return (e == def) || ((def != def) && (e != e));
-}
-template<> inline bool IsTheSameAs<float>(float e, float def) {
- return IsFloatTheSameAs(e, def);
-}
-template<> inline bool IsTheSameAs<double>(double e, double def) {
- return IsFloatTheSameAs(e, def);
-}
-#endif
-
-// Check 'v' is out of closed range [low; high].
-// Workaround for GCC warning [-Werror=type-limits]:
-// comparison is always true due to limited range of data type.
-template<typename T>
-inline bool IsOutRange(const T &v, const T &low, const T &high) {
- return (v < low) || (high < v);
-}
-
-// Check 'v' is in closed range [low; high].
-template<typename T>
-inline bool IsInRange(const T &v, const T &low, const T &high) {
- return !IsOutRange(v, low, high);
-}
-
-// Wrapper for uoffset_t to allow safe template specialization.
-// Value is allowed to be 0 to indicate a null object (see e.g. AddOffset).
-template<typename T> struct Offset {
- uoffset_t o;
- Offset() : o(0) {}
- Offset(uoffset_t _o) : o(_o) {}
- Offset<void> Union() const { return Offset<void>(o); }
- bool IsNull() const { return !o; }
-};
-
-inline void EndianCheck() {
- int endiantest = 1;
- // If this fails, see FLATBUFFERS_LITTLEENDIAN above.
- FLATBUFFERS_ASSERT(*reinterpret_cast<char *>(&endiantest) ==
- FLATBUFFERS_LITTLEENDIAN);
- (void)endiantest;
-}
-
-template<typename T> FLATBUFFERS_CONSTEXPR size_t AlignOf() {
- // clang-format off
- #ifdef _MSC_VER
- return __alignof(T);
- #else
- #ifndef alignof
- return __alignof__(T);
- #else
- return alignof(T);
- #endif
- #endif
- // clang-format on
-}
-
-// When we read serialized data from memory, in the case of most scalars,
-// we want to just read T, but in the case of Offset, we want to actually
-// perform the indirection and return a pointer.
-// The template specialization below does just that.
-// It is wrapped in a struct since function templates can't overload on the
-// return type like this.
-// The typedef is for the convenience of callers of this function
-// (avoiding the need for a trailing return decltype)
-template<typename T> struct IndirectHelper {
- typedef T return_type;
- typedef T mutable_return_type;
- static const size_t element_stride = sizeof(T);
- static return_type Read(const uint8_t *p, uoffset_t i) {
- return EndianScalar((reinterpret_cast<const T *>(p))[i]);
- }
-};
-template<typename T> struct IndirectHelper<Offset<T>> {
- typedef const T *return_type;
- typedef T *mutable_return_type;
- static const size_t element_stride = sizeof(uoffset_t);
- static return_type Read(const uint8_t *p, uoffset_t i) {
- p += i * sizeof(uoffset_t);
- return reinterpret_cast<return_type>(p + ReadScalar<uoffset_t>(p));
- }
-};
-template<typename T> struct IndirectHelper<const T *> {
- typedef const T *return_type;
- typedef T *mutable_return_type;
- static const size_t element_stride = sizeof(T);
- static return_type Read(const uint8_t *p, uoffset_t i) {
- return reinterpret_cast<const T *>(p + i * sizeof(T));
- }
-};
-
-// An STL compatible iterator implementation for Vector below, effectively
-// calling Get() for every element.
-template<typename T, typename IT> struct VectorIterator {
- typedef std::random_access_iterator_tag iterator_category;
- typedef IT value_type;
- typedef ptrdiff_t difference_type;
- typedef IT *pointer;
- typedef IT &reference;
-
- VectorIterator(const uint8_t *data, uoffset_t i)
- : data_(data + IndirectHelper<T>::element_stride * i) {}
- VectorIterator(const VectorIterator &other) : data_(other.data_) {}
- VectorIterator() : data_(nullptr) {}
-
- VectorIterator &operator=(const VectorIterator &other) {
- data_ = other.data_;
- return *this;
- }
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- VectorIterator &operator=(VectorIterator &&other) {
- data_ = other.data_;
- return *this;
- }
- #endif // !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- bool operator==(const VectorIterator &other) const {
- return data_ == other.data_;
- }
-
- bool operator<(const VectorIterator &other) const {
- return data_ < other.data_;
- }
-
- bool operator!=(const VectorIterator &other) const {
- return data_ != other.data_;
- }
-
- difference_type operator-(const VectorIterator &other) const {
- return (data_ - other.data_) / IndirectHelper<T>::element_stride;
- }
-
- // Note: return type is incompatible with the standard
- // `reference operator*()`.
- IT operator*() const { return IndirectHelper<T>::Read(data_, 0); }
-
- // Note: return type is incompatible with the standard
- // `pointer operator->()`.
- IT operator->() const { return IndirectHelper<T>::Read(data_, 0); }
-
- VectorIterator &operator++() {
- data_ += IndirectHelper<T>::element_stride;
- return *this;
- }
-
- VectorIterator operator++(int) {
- VectorIterator temp(data_, 0);
- data_ += IndirectHelper<T>::element_stride;
- return temp;
- }
-
- VectorIterator operator+(const uoffset_t &offset) const {
- return VectorIterator(data_ + offset * IndirectHelper<T>::element_stride,
- 0);
- }
-
- VectorIterator &operator+=(const uoffset_t &offset) {
- data_ += offset * IndirectHelper<T>::element_stride;
- return *this;
- }
-
- VectorIterator &operator--() {
- data_ -= IndirectHelper<T>::element_stride;
- return *this;
- }
-
- VectorIterator operator--(int) {
- VectorIterator temp(data_, 0);
- data_ -= IndirectHelper<T>::element_stride;
- return temp;
- }
-
- VectorIterator operator-(const uoffset_t &offset) const {
- return VectorIterator(data_ - offset * IndirectHelper<T>::element_stride,
- 0);
- }
-
- VectorIterator &operator-=(const uoffset_t &offset) {
- data_ -= offset * IndirectHelper<T>::element_stride;
- return *this;
- }
-
- private:
- const uint8_t *data_;
-};
-
-template<typename Iterator>
-struct VectorReverseIterator : public std::reverse_iterator<Iterator> {
- explicit VectorReverseIterator(Iterator iter)
- : std::reverse_iterator<Iterator>(iter) {}
-
- // Note: return type is incompatible with the standard
- // `reference operator*()`.
- typename Iterator::value_type operator*() const {
- auto tmp = std::reverse_iterator<Iterator>::current;
- return *--tmp;
- }
-
- // Note: return type is incompatible with the standard
- // `pointer operator->()`.
- typename Iterator::value_type operator->() const {
- auto tmp = std::reverse_iterator<Iterator>::current;
- return *--tmp;
- }
-};
-
-struct String;
-
-// This is used as a helper type for accessing vectors.
-// Vector::data() assumes the vector elements start after the length field.
-template<typename T> class Vector {
- public:
- typedef VectorIterator<T, typename IndirectHelper<T>::mutable_return_type>
- iterator;
- typedef VectorIterator<T, typename IndirectHelper<T>::return_type>
- const_iterator;
- typedef VectorReverseIterator<iterator> reverse_iterator;
- typedef VectorReverseIterator<const_iterator> const_reverse_iterator;
-
- typedef typename flatbuffers::bool_constant<flatbuffers::is_scalar<T>::value>
- scalar_tag;
-
- static FLATBUFFERS_CONSTEXPR bool is_span_observable =
- scalar_tag::value && (FLATBUFFERS_LITTLEENDIAN || sizeof(T) == 1);
-
- uoffset_t size() const { return EndianScalar(length_); }
-
- // Deprecated: use size(). Here for backwards compatibility.
- FLATBUFFERS_ATTRIBUTE(deprecated("use size() instead"))
- uoffset_t Length() const { return size(); }
-
- typedef typename IndirectHelper<T>::return_type return_type;
- typedef typename IndirectHelper<T>::mutable_return_type mutable_return_type;
- typedef return_type value_type;
-
- return_type Get(uoffset_t i) const {
- FLATBUFFERS_ASSERT(i < size());
- return IndirectHelper<T>::Read(Data(), i);
- }
-
- return_type operator[](uoffset_t i) const { return Get(i); }
-
- // If this is a Vector of enums, T will be its storage type, not the enum
- // type. This function makes it convenient to retrieve value with enum
- // type E.
- template<typename E> E GetEnum(uoffset_t i) const {
- return static_cast<E>(Get(i));
- }
-
- // If this a vector of unions, this does the cast for you. There's no check
- // to make sure this is the right type!
- template<typename U> const U *GetAs(uoffset_t i) const {
- return reinterpret_cast<const U *>(Get(i));
- }
-
- // If this a vector of unions, this does the cast for you. There's no check
- // to make sure this is actually a string!
- const String *GetAsString(uoffset_t i) const {
- return reinterpret_cast<const String *>(Get(i));
- }
-
- const void *GetStructFromOffset(size_t o) const {
- return reinterpret_cast<const void *>(Data() + o);
- }
-
- iterator begin() { return iterator(Data(), 0); }
- const_iterator begin() const { return const_iterator(Data(), 0); }
-
- iterator end() { return iterator(Data(), size()); }
- const_iterator end() const { return const_iterator(Data(), size()); }
-
- reverse_iterator rbegin() { return reverse_iterator(end()); }
- const_reverse_iterator rbegin() const {
- return const_reverse_iterator(end());
- }
-
- reverse_iterator rend() { return reverse_iterator(begin()); }
- const_reverse_iterator rend() const {
- return const_reverse_iterator(begin());
- }
-
- const_iterator cbegin() const { return begin(); }
-
- const_iterator cend() const { return end(); }
-
- const_reverse_iterator crbegin() const { return rbegin(); }
-
- const_reverse_iterator crend() const { return rend(); }
-
- // Change elements if you have a non-const pointer to this object.
- // Scalars only. See reflection.h, and the documentation.
- void Mutate(uoffset_t i, const T &val) {
- FLATBUFFERS_ASSERT(i < size());
- WriteScalar(data() + i, val);
- }
-
- // Change an element of a vector of tables (or strings).
- // "val" points to the new table/string, as you can obtain from
- // e.g. reflection::AddFlatBuffer().
- void MutateOffset(uoffset_t i, const uint8_t *val) {
- FLATBUFFERS_ASSERT(i < size());
- static_assert(sizeof(T) == sizeof(uoffset_t), "Unrelated types");
- WriteScalar(data() + i,
- static_cast<uoffset_t>(val - (Data() + i * sizeof(uoffset_t))));
- }
-
- // Get a mutable pointer to tables/strings inside this vector.
- mutable_return_type GetMutableObject(uoffset_t i) const {
- FLATBUFFERS_ASSERT(i < size());
- return const_cast<mutable_return_type>(IndirectHelper<T>::Read(Data(), i));
- }
-
- // The raw data in little endian format. Use with care.
- const uint8_t *Data() const {
- return reinterpret_cast<const uint8_t *>(&length_ + 1);
- }
-
- uint8_t *Data() { return reinterpret_cast<uint8_t *>(&length_ + 1); }
-
- // Similarly, but typed, much like std::vector::data
- const T *data() const { return reinterpret_cast<const T *>(Data()); }
- T *data() { return reinterpret_cast<T *>(Data()); }
-
- template<typename K> return_type LookupByKey(K key) const {
- void *search_result = std::bsearch(
- &key, Data(), size(), IndirectHelper<T>::element_stride, KeyCompare<K>);
-
- if (!search_result) {
- return nullptr; // Key not found.
- }
-
- const uint8_t *element = reinterpret_cast<const uint8_t *>(search_result);
-
- return IndirectHelper<T>::Read(element, 0);
- }
-
- protected:
- // This class is only used to access pre-existing data. Don't ever
- // try to construct these manually.
- Vector();
-
- uoffset_t length_;
-
- private:
- // This class is a pointer. Copying will therefore create an invalid object.
- // Private and unimplemented copy constructor.
- Vector(const Vector &);
- Vector &operator=(const Vector &);
-
- template<typename K> static int KeyCompare(const void *ap, const void *bp) {
- const K *key = reinterpret_cast<const K *>(ap);
- const uint8_t *data = reinterpret_cast<const uint8_t *>(bp);
- auto table = IndirectHelper<T>::Read(data, 0);
-
- // std::bsearch compares with the operands transposed, so we negate the
- // result here.
- return -table->KeyCompareWithValue(*key);
- }
-};
-
-template<class U>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<U> make_span(Vector<U> &vec)
- FLATBUFFERS_NOEXCEPT {
- static_assert(Vector<U>::is_span_observable,
- "wrong type U, only LE-scalar, or byte types are allowed");
- return span<U>(vec.data(), vec.size());
-}
-
-template<class U>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const U> make_span(
- const Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
- static_assert(Vector<U>::is_span_observable,
- "wrong type U, only LE-scalar, or byte types are allowed");
- return span<const U>(vec.data(), vec.size());
-}
-
-template<class U>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<uint8_t> make_bytes_span(
- Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
- static_assert(Vector<U>::scalar_tag::value,
- "wrong type U, only LE-scalar, or byte types are allowed");
- return span<uint8_t>(vec.Data(), vec.size() * sizeof(U));
-}
-
-template<class U>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const uint8_t> make_bytes_span(
- const Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
- static_assert(Vector<U>::scalar_tag::value,
- "wrong type U, only LE-scalar, or byte types are allowed");
- return span<const uint8_t>(vec.Data(), vec.size() * sizeof(U));
-}
-
-// Represent a vector much like the template above, but in this case we
-// don't know what the element types are (used with reflection.h).
-class VectorOfAny {
- public:
- uoffset_t size() const { return EndianScalar(length_); }
-
- const uint8_t *Data() const {
- return reinterpret_cast<const uint8_t *>(&length_ + 1);
- }
- uint8_t *Data() { return reinterpret_cast<uint8_t *>(&length_ + 1); }
-
- protected:
- VectorOfAny();
-
- uoffset_t length_;
-
- private:
- VectorOfAny(const VectorOfAny &);
- VectorOfAny &operator=(const VectorOfAny &);
-};
-
-#ifndef FLATBUFFERS_CPP98_STL
-template<typename T, typename U>
-Vector<Offset<T>> *VectorCast(Vector<Offset<U>> *ptr) {
- static_assert(std::is_base_of<T, U>::value, "Unrelated types");
- return reinterpret_cast<Vector<Offset<T>> *>(ptr);
-}
-
-template<typename T, typename U>
-const Vector<Offset<T>> *VectorCast(const Vector<Offset<U>> *ptr) {
- static_assert(std::is_base_of<T, U>::value, "Unrelated types");
- return reinterpret_cast<const Vector<Offset<T>> *>(ptr);
-}
-#endif
-
-// Convenient helper function to get the length of any vector, regardless
-// of whether it is null or not (the field is not set).
-template<typename T> static inline size_t VectorLength(const Vector<T> *v) {
- return v ? v->size() : 0;
-}
-
-// This is used as a helper type for accessing arrays.
-template<typename T, uint16_t length> class Array {
- // Array<T> can carry only POD data types (scalars or structs).
- typedef typename flatbuffers::bool_constant<flatbuffers::is_scalar<T>::value>
- scalar_tag;
- typedef
- typename flatbuffers::conditional<scalar_tag::value, T, const T *>::type
- IndirectHelperType;
-
- public:
- typedef uint16_t size_type;
- typedef typename IndirectHelper<IndirectHelperType>::return_type return_type;
- typedef VectorIterator<T, return_type> const_iterator;
- typedef VectorReverseIterator<const_iterator> const_reverse_iterator;
-
- // If T is a LE-scalar or a struct (!scalar_tag::value).
- static FLATBUFFERS_CONSTEXPR bool is_span_observable =
- (scalar_tag::value && (FLATBUFFERS_LITTLEENDIAN || sizeof(T) == 1)) ||
- !scalar_tag::value;
-
- FLATBUFFERS_CONSTEXPR uint16_t size() const { return length; }
-
- return_type Get(uoffset_t i) const {
- FLATBUFFERS_ASSERT(i < size());
- return IndirectHelper<IndirectHelperType>::Read(Data(), i);
- }
-
- return_type operator[](uoffset_t i) const { return Get(i); }
-
- // If this is a Vector of enums, T will be its storage type, not the enum
- // type. This function makes it convenient to retrieve value with enum
- // type E.
- template<typename E> E GetEnum(uoffset_t i) const {
- return static_cast<E>(Get(i));
- }
-
- const_iterator begin() const { return const_iterator(Data(), 0); }
- const_iterator end() const { return const_iterator(Data(), size()); }
-
- const_reverse_iterator rbegin() const {
- return const_reverse_iterator(end());
- }
- const_reverse_iterator rend() const {
- return const_reverse_iterator(begin());
- }
-
- const_iterator cbegin() const { return begin(); }
- const_iterator cend() const { return end(); }
-
- const_reverse_iterator crbegin() const { return rbegin(); }
- const_reverse_iterator crend() const { return rend(); }
-
- // Get a mutable pointer to elements inside this array.
- // This method used to mutate arrays of structs followed by a @p Mutate
- // operation. For primitive types use @p Mutate directly.
- // @warning Assignments and reads to/from the dereferenced pointer are not
- // automatically converted to the correct endianness.
- typename flatbuffers::conditional<scalar_tag::value, void, T *>::type
- GetMutablePointer(uoffset_t i) const {
- FLATBUFFERS_ASSERT(i < size());
- return const_cast<T *>(&data()[i]);
- }
-
- // Change elements if you have a non-const pointer to this object.
- void Mutate(uoffset_t i, const T &val) { MutateImpl(scalar_tag(), i, val); }
-
- // The raw data in little endian format. Use with care.
- const uint8_t *Data() const { return data_; }
-
- uint8_t *Data() { return data_; }
-
- // Similarly, but typed, much like std::vector::data
- const T *data() const { return reinterpret_cast<const T *>(Data()); }
- T *data() { return reinterpret_cast<T *>(Data()); }
-
- // Copy data from a span with endian conversion.
- // If this Array and the span overlap, the behavior is undefined.
- void CopyFromSpan(flatbuffers::span<const T, length> src) {
- const auto p1 = reinterpret_cast<const uint8_t *>(src.data());
- const auto p2 = Data();
- FLATBUFFERS_ASSERT(!(p1 >= p2 && p1 < (p2 + length)) &&
- !(p2 >= p1 && p2 < (p1 + length)));
- (void)p1;
- (void)p2;
- CopyFromSpanImpl(flatbuffers::bool_constant<is_span_observable>(), src);
- }
-
- protected:
- void MutateImpl(flatbuffers::true_type, uoffset_t i, const T &val) {
- FLATBUFFERS_ASSERT(i < size());
- WriteScalar(data() + i, val);
- }
-
- void MutateImpl(flatbuffers::false_type, uoffset_t i, const T &val) {
- *(GetMutablePointer(i)) = val;
- }
-
- void CopyFromSpanImpl(flatbuffers::true_type,
- flatbuffers::span<const T, length> src) {
- // Use std::memcpy() instead of std::copy() to avoid preformance degradation
- // due to aliasing if T is char or unsigned char.
- // The size is known at compile time, so memcpy would be inlined.
- std::memcpy(data(), src.data(), length * sizeof(T));
- }
-
- // Copy data from flatbuffers::span with endian conversion.
- void CopyFromSpanImpl(flatbuffers::false_type,
- flatbuffers::span<const T, length> src) {
- for (size_type k = 0; k < length; k++) { Mutate(k, src[k]); }
- }
-
- // This class is only used to access pre-existing data. Don't ever
- // try to construct these manually.
- // 'constexpr' allows us to use 'size()' at compile time.
- // @note Must not use 'FLATBUFFERS_CONSTEXPR' here, as const is not allowed on
- // a constructor.
-#if defined(__cpp_constexpr)
- constexpr Array();
-#else
- Array();
-#endif
-
- uint8_t data_[length * sizeof(T)];
-
- private:
- // This class is a pointer. Copying will therefore create an invalid object.
- // Private and unimplemented copy constructor.
- Array(const Array &);
- Array &operator=(const Array &);
-};
-
-// Specialization for Array[struct] with access using Offset<void> pointer.
-// This specialization used by idl_gen_text.cpp.
-template<typename T, uint16_t length> class Array<Offset<T>, length> {
- static_assert(flatbuffers::is_same<T, void>::value, "unexpected type T");
-
- public:
- typedef const void *return_type;
-
- const uint8_t *Data() const { return data_; }
-
- // Make idl_gen_text.cpp::PrintContainer happy.
- return_type operator[](uoffset_t) const {
- FLATBUFFERS_ASSERT(false);
- return nullptr;
- }
-
- private:
- // This class is only used to access pre-existing data.
- Array();
- Array(const Array &);
- Array &operator=(const Array &);
-
- uint8_t data_[1];
-};
-
-template<class U, uint16_t N>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<U, N> make_span(Array<U, N> &arr)
- FLATBUFFERS_NOEXCEPT {
- static_assert(
- Array<U, N>::is_span_observable,
- "wrong type U, only plain struct, LE-scalar, or byte types are allowed");
- return span<U, N>(arr.data(), N);
-}
-
-template<class U, uint16_t N>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const U, N> make_span(
- const Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
- static_assert(
- Array<U, N>::is_span_observable,
- "wrong type U, only plain struct, LE-scalar, or byte types are allowed");
- return span<const U, N>(arr.data(), N);
-}
-
-template<class U, uint16_t N>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<uint8_t, sizeof(U) * N>
-make_bytes_span(Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
- static_assert(Array<U, N>::is_span_observable,
- "internal error, Array<T> might hold only scalars or structs");
- return span<uint8_t, sizeof(U) * N>(arr.Data(), sizeof(U) * N);
-}
-
-template<class U, uint16_t N>
-FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const uint8_t, sizeof(U) * N>
-make_bytes_span(const Array<U, N> &arr) FLATBUFFERS_NOEXCEPT {
- static_assert(Array<U, N>::is_span_observable,
- "internal error, Array<T> might hold only scalars or structs");
- return span<const uint8_t, sizeof(U) * N>(arr.Data(), sizeof(U) * N);
-}
-
-// Cast a raw T[length] to a raw flatbuffers::Array<T, length>
-// without endian conversion. Use with care.
-// TODO: move these Cast-methods to `internal` namespace.
-template<typename T, uint16_t length>
-Array<T, length> &CastToArray(T (&arr)[length]) {
- return *reinterpret_cast<Array<T, length> *>(arr);
-}
-
-template<typename T, uint16_t length>
-const Array<T, length> &CastToArray(const T (&arr)[length]) {
- return *reinterpret_cast<const Array<T, length> *>(arr);
-}
-
-template<typename E, typename T, uint16_t length>
-Array<E, length> &CastToArrayOfEnum(T (&arr)[length]) {
- static_assert(sizeof(E) == sizeof(T), "invalid enum type E");
- return *reinterpret_cast<Array<E, length> *>(arr);
-}
-
-template<typename E, typename T, uint16_t length>
-const Array<E, length> &CastToArrayOfEnum(const T (&arr)[length]) {
- static_assert(sizeof(E) == sizeof(T), "invalid enum type E");
- return *reinterpret_cast<const Array<E, length> *>(arr);
-}
-
-// Lexicographically compare two strings (possibly containing nulls), and
-// return true if the first is less than the second.
-static inline bool StringLessThan(const char *a_data, uoffset_t a_size,
- const char *b_data, uoffset_t b_size) {
- const auto cmp = memcmp(a_data, b_data, (std::min)(a_size, b_size));
- return cmp == 0 ? a_size < b_size : cmp < 0;
-}
-
-struct String : public Vector<char> {
- const char *c_str() const { return reinterpret_cast<const char *>(Data()); }
- std::string str() const { return std::string(c_str(), size()); }
-
- // clang-format off
- #ifdef FLATBUFFERS_HAS_STRING_VIEW
- flatbuffers::string_view string_view() const {
- return flatbuffers::string_view(c_str(), size());
- }
- #endif // FLATBUFFERS_HAS_STRING_VIEW
- // clang-format on
-
- bool operator<(const String &o) const {
- return StringLessThan(this->data(), this->size(), o.data(), o.size());
- }
-};
-
-// Convenience function to get std::string from a String returning an empty
-// string on null pointer.
-static inline std::string GetString(const String *str) {
- return str ? str->str() : "";
-}
-
-// Convenience function to get char* from a String returning an empty string on
-// null pointer.
-static inline const char *GetCstring(const String *str) {
- return str ? str->c_str() : "";
-}
-
-#ifdef FLATBUFFERS_HAS_STRING_VIEW
-// Convenience function to get string_view from a String returning an empty
-// string_view on null pointer.
-static inline flatbuffers::string_view GetStringView(const String *str) {
- return str ? str->string_view() : flatbuffers::string_view();
-}
-#endif // FLATBUFFERS_HAS_STRING_VIEW
-
-// Allocator interface. This is flatbuffers-specific and meant only for
-// `vector_downward` usage.
-class Allocator {
- public:
- virtual ~Allocator() {}
-
- // Allocate `size` bytes of memory.
- virtual uint8_t *allocate(size_t size) = 0;
-
- // Deallocate `size` bytes of memory at `p` allocated by this allocator.
- virtual void deallocate(uint8_t *p, size_t size) = 0;
-
- // Reallocate `new_size` bytes of memory, replacing the old region of size
- // `old_size` at `p`. In contrast to a normal realloc, this grows downwards,
- // and is intended specifcally for `vector_downward` use.
- // `in_use_back` and `in_use_front` indicate how much of `old_size` is
- // actually in use at each end, and needs to be copied.
- virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size,
- size_t new_size, size_t in_use_back,
- size_t in_use_front) {
- FLATBUFFERS_ASSERT(new_size > old_size); // vector_downward only grows
- uint8_t *new_p = allocate(new_size);
- memcpy_downward(old_p, old_size, new_p, new_size, in_use_back,
- in_use_front);
- deallocate(old_p, old_size);
- return new_p;
- }
-
- protected:
- // Called by `reallocate_downward` to copy memory from `old_p` of `old_size`
- // to `new_p` of `new_size`. Only memory of size `in_use_front` and
- // `in_use_back` will be copied from the front and back of the old memory
- // allocation.
- void memcpy_downward(uint8_t *old_p, size_t old_size, uint8_t *new_p,
- size_t new_size, size_t in_use_back,
- size_t in_use_front) {
- memcpy(new_p + new_size - in_use_back, old_p + old_size - in_use_back,
- in_use_back);
- memcpy(new_p, old_p, in_use_front);
- }
-};
-
-// DefaultAllocator uses new/delete to allocate memory regions
-class DefaultAllocator : public Allocator {
- public:
- uint8_t *allocate(size_t size) FLATBUFFERS_OVERRIDE {
- return new uint8_t[size];
- }
-
- void deallocate(uint8_t *p, size_t) FLATBUFFERS_OVERRIDE { delete[] p; }
-
- static void dealloc(void *p, size_t) { delete[] static_cast<uint8_t *>(p); }
-};
-
-// These functions allow for a null allocator to mean use the default allocator,
-// as used by DetachedBuffer and vector_downward below.
-// This is to avoid having a statically or dynamically allocated default
-// allocator, or having to move it between the classes that may own it.
-inline uint8_t *Allocate(Allocator *allocator, size_t size) {
- return allocator ? allocator->allocate(size)
- : DefaultAllocator().allocate(size);
-}
-
-inline void Deallocate(Allocator *allocator, uint8_t *p, size_t size) {
- if (allocator)
- allocator->deallocate(p, size);
- else
- DefaultAllocator().deallocate(p, size);
-}
-
-inline uint8_t *ReallocateDownward(Allocator *allocator, uint8_t *old_p,
- size_t old_size, size_t new_size,
- size_t in_use_back, size_t in_use_front) {
- return allocator ? allocator->reallocate_downward(old_p, old_size, new_size,
- in_use_back, in_use_front)
- : DefaultAllocator().reallocate_downward(
- old_p, old_size, new_size, in_use_back, in_use_front);
-}
-
-// DetachedBuffer is a finished flatbuffer memory region, detached from its
-// builder. The original memory region and allocator are also stored so that
-// the DetachedBuffer can manage the memory lifetime.
-class DetachedBuffer {
- public:
- DetachedBuffer()
- : allocator_(nullptr),
- own_allocator_(false),
- buf_(nullptr),
- reserved_(0),
- cur_(nullptr),
- size_(0) {}
-
- DetachedBuffer(Allocator *allocator, bool own_allocator, uint8_t *buf,
- size_t reserved, uint8_t *cur, size_t sz)
- : allocator_(allocator),
- own_allocator_(own_allocator),
- buf_(buf),
- reserved_(reserved),
- cur_(cur),
- size_(sz) {}
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- DetachedBuffer(DetachedBuffer &&other)
- : allocator_(other.allocator_),
- own_allocator_(other.own_allocator_),
- buf_(other.buf_),
- reserved_(other.reserved_),
- cur_(other.cur_),
- size_(other.size_) {
- other.reset();
- }
- // clang-format off
- #endif // !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- DetachedBuffer &operator=(DetachedBuffer &&other) {
- if (this == &other) return *this;
-
- destroy();
-
- allocator_ = other.allocator_;
- own_allocator_ = other.own_allocator_;
- buf_ = other.buf_;
- reserved_ = other.reserved_;
- cur_ = other.cur_;
- size_ = other.size_;
-
- other.reset();
-
- return *this;
- }
- // clang-format off
- #endif // !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- ~DetachedBuffer() { destroy(); }
-
- const uint8_t *data() const { return cur_; }
-
- uint8_t *data() { return cur_; }
-
- size_t size() const { return size_; }
-
- // clang-format off
- #if 0 // disabled for now due to the ordering of classes in this header
- template <class T>
- bool Verify() const {
- Verifier verifier(data(), size());
- return verifier.Verify<T>(nullptr);
- }
-
- template <class T>
- const T* GetRoot() const {
- return flatbuffers::GetRoot<T>(data());
- }
-
- template <class T>
- T* GetRoot() {
- return flatbuffers::GetRoot<T>(data());
- }
- #endif
- // clang-format on
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- // These may change access mode, leave these at end of public section
- FLATBUFFERS_DELETE_FUNC(DetachedBuffer(const DetachedBuffer &other));
- FLATBUFFERS_DELETE_FUNC(
- DetachedBuffer &operator=(const DetachedBuffer &other));
- // clang-format off
- #endif // !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- protected:
- Allocator *allocator_;
- bool own_allocator_;
- uint8_t *buf_;
- size_t reserved_;
- uint8_t *cur_;
- size_t size_;
-
- inline void destroy() {
- if (buf_) Deallocate(allocator_, buf_, reserved_);
- if (own_allocator_ && allocator_) { delete allocator_; }
- reset();
- }
-
- inline void reset() {
- allocator_ = nullptr;
- own_allocator_ = false;
- buf_ = nullptr;
- reserved_ = 0;
- cur_ = nullptr;
- size_ = 0;
- }
-};
-
-// This is a minimal replication of std::vector<uint8_t> functionality,
-// except growing from higher to lower addresses. i.e push_back() inserts data
-// in the lowest address in the vector.
-// Since this vector leaves the lower part unused, we support a "scratch-pad"
-// that can be stored there for temporary data, to share the allocated space.
-// Essentially, this supports 2 std::vectors in a single buffer.
-class vector_downward {
- public:
- explicit vector_downward(size_t initial_size, Allocator *allocator,
- bool own_allocator, size_t buffer_minalign)
- : allocator_(allocator),
- own_allocator_(own_allocator),
- initial_size_(initial_size),
- buffer_minalign_(buffer_minalign),
- reserved_(0),
- buf_(nullptr),
- cur_(nullptr),
- scratch_(nullptr) {}
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- vector_downward(vector_downward &&other)
- #else
- vector_downward(vector_downward &other)
- #endif // defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- : allocator_(other.allocator_),
- own_allocator_(other.own_allocator_),
- initial_size_(other.initial_size_),
- buffer_minalign_(other.buffer_minalign_),
- reserved_(other.reserved_),
- buf_(other.buf_),
- cur_(other.cur_),
- scratch_(other.scratch_) {
- // No change in other.allocator_
- // No change in other.initial_size_
- // No change in other.buffer_minalign_
- other.own_allocator_ = false;
- other.reserved_ = 0;
- other.buf_ = nullptr;
- other.cur_ = nullptr;
- other.scratch_ = nullptr;
- }
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- vector_downward &operator=(vector_downward &&other) {
- // Move construct a temporary and swap idiom
- vector_downward temp(std::move(other));
- swap(temp);
- return *this;
- }
- // clang-format off
- #endif // defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- ~vector_downward() {
- clear_buffer();
- clear_allocator();
- }
-
- void reset() {
- clear_buffer();
- clear();
- }
-
- void clear() {
- if (buf_) {
- cur_ = buf_ + reserved_;
- } else {
- reserved_ = 0;
- cur_ = nullptr;
- }
- clear_scratch();
- }
-
- void clear_scratch() { scratch_ = buf_; }
-
- void clear_allocator() {
- if (own_allocator_ && allocator_) { delete allocator_; }
- allocator_ = nullptr;
- own_allocator_ = false;
- }
-
- void clear_buffer() {
- if (buf_) Deallocate(allocator_, buf_, reserved_);
- buf_ = nullptr;
- }
-
- // Relinquish the pointer to the caller.
- uint8_t *release_raw(size_t &allocated_bytes, size_t &offset) {
- auto *buf = buf_;
- allocated_bytes = reserved_;
- offset = static_cast<size_t>(cur_ - buf_);
-
- // release_raw only relinquishes the buffer ownership.
- // Does not deallocate or reset the allocator. Destructor will do that.
- buf_ = nullptr;
- clear();
- return buf;
- }
-
- // Relinquish the pointer to the caller.
- DetachedBuffer release() {
- // allocator ownership (if any) is transferred to DetachedBuffer.
- DetachedBuffer fb(allocator_, own_allocator_, buf_, reserved_, cur_,
- size());
- if (own_allocator_) {
- allocator_ = nullptr;
- own_allocator_ = false;
- }
- buf_ = nullptr;
- clear();
- return fb;
- }
-
- size_t ensure_space(size_t len) {
- FLATBUFFERS_ASSERT(cur_ >= scratch_ && scratch_ >= buf_);
- if (len > static_cast<size_t>(cur_ - scratch_)) { reallocate(len); }
- // Beyond this, signed offsets may not have enough range:
- // (FlatBuffers > 2GB not supported).
- FLATBUFFERS_ASSERT(size() < FLATBUFFERS_MAX_BUFFER_SIZE);
- return len;
- }
-
- inline uint8_t *make_space(size_t len) {
- size_t space = ensure_space(len);
- cur_ -= space;
- return cur_;
- }
-
- // Returns nullptr if using the DefaultAllocator.
- Allocator *get_custom_allocator() { return allocator_; }
-
- uoffset_t size() const {
- return static_cast<uoffset_t>(reserved_ - static_cast<size_t>(cur_ - buf_));
- }
-
- uoffset_t scratch_size() const {
- return static_cast<uoffset_t>(scratch_ - buf_);
- }
-
- size_t capacity() const { return reserved_; }
-
- uint8_t *data() const {
- FLATBUFFERS_ASSERT(cur_);
- return cur_;
- }
-
- uint8_t *scratch_data() const {
- FLATBUFFERS_ASSERT(buf_);
- return buf_;
- }
-
- uint8_t *scratch_end() const {
- FLATBUFFERS_ASSERT(scratch_);
- return scratch_;
- }
-
- uint8_t *data_at(size_t offset) const { return buf_ + reserved_ - offset; }
-
- void push(const uint8_t *bytes, size_t num) {
- if (num > 0) { memcpy(make_space(num), bytes, num); }
- }
-
- // Specialized version of push() that avoids memcpy call for small data.
- template<typename T> void push_small(const T &little_endian_t) {
- make_space(sizeof(T));
- *reinterpret_cast<T *>(cur_) = little_endian_t;
- }
-
- template<typename T> void scratch_push_small(const T &t) {
- ensure_space(sizeof(T));
- *reinterpret_cast<T *>(scratch_) = t;
- scratch_ += sizeof(T);
- }
-
- // fill() is most frequently called with small byte counts (<= 4),
- // which is why we're using loops rather than calling memset.
- void fill(size_t zero_pad_bytes) {
- make_space(zero_pad_bytes);
- for (size_t i = 0; i < zero_pad_bytes; i++) cur_[i] = 0;
- }
-
- // Version for when we know the size is larger.
- // Precondition: zero_pad_bytes > 0
- void fill_big(size_t zero_pad_bytes) {
- memset(make_space(zero_pad_bytes), 0, zero_pad_bytes);
- }
-
- void pop(size_t bytes_to_remove) { cur_ += bytes_to_remove; }
- void scratch_pop(size_t bytes_to_remove) { scratch_ -= bytes_to_remove; }
-
- void swap(vector_downward &other) {
- using std::swap;
- swap(allocator_, other.allocator_);
- swap(own_allocator_, other.own_allocator_);
- swap(initial_size_, other.initial_size_);
- swap(buffer_minalign_, other.buffer_minalign_);
- swap(reserved_, other.reserved_);
- swap(buf_, other.buf_);
- swap(cur_, other.cur_);
- swap(scratch_, other.scratch_);
- }
-
- void swap_allocator(vector_downward &other) {
- using std::swap;
- swap(allocator_, other.allocator_);
- swap(own_allocator_, other.own_allocator_);
- }
-
- private:
- // You shouldn't really be copying instances of this class.
- FLATBUFFERS_DELETE_FUNC(vector_downward(const vector_downward &));
- FLATBUFFERS_DELETE_FUNC(vector_downward &operator=(const vector_downward &));
-
- Allocator *allocator_;
- bool own_allocator_;
- size_t initial_size_;
- size_t buffer_minalign_;
- size_t reserved_;
- uint8_t *buf_;
- uint8_t *cur_; // Points at location between empty (below) and used (above).
- uint8_t *scratch_; // Points to the end of the scratchpad in use.
-
- void reallocate(size_t len) {
- auto old_reserved = reserved_;
- auto old_size = size();
- auto old_scratch_size = scratch_size();
- reserved_ +=
- (std::max)(len, old_reserved ? old_reserved / 2 : initial_size_);
- reserved_ = (reserved_ + buffer_minalign_ - 1) & ~(buffer_minalign_ - 1);
- if (buf_) {
- buf_ = ReallocateDownward(allocator_, buf_, old_reserved, reserved_,
- old_size, old_scratch_size);
- } else {
- buf_ = Allocate(allocator_, reserved_);
- }
- cur_ = buf_ + reserved_ - old_size;
- scratch_ = buf_ + old_scratch_size;
- }
-};
-
-// Converts a Field ID to a virtual table offset.
-inline voffset_t FieldIndexToOffset(voffset_t field_id) {
- // Should correspond to what EndTable() below builds up.
- const int fixed_fields = 2; // Vtable size and Object Size.
- return static_cast<voffset_t>((field_id + fixed_fields) * sizeof(voffset_t));
-}
-
-template<typename T, typename Alloc>
-const T *data(const std::vector<T, Alloc> &v) {
- // Eventually the returned pointer gets passed down to memcpy, so
- // we need it to be non-null to avoid undefined behavior.
- static uint8_t t;
- return v.empty() ? reinterpret_cast<const T *>(&t) : &v.front();
-}
-template<typename T, typename Alloc> T *data(std::vector<T, Alloc> &v) {
- // Eventually the returned pointer gets passed down to memcpy, so
- // we need it to be non-null to avoid undefined behavior.
- static uint8_t t;
- return v.empty() ? reinterpret_cast<T *>(&t) : &v.front();
-}
-
-/// @endcond
-
-/// @addtogroup flatbuffers_cpp_api
-/// @{
-/// @class FlatBufferBuilder
-/// @brief Helper class to hold data needed in creation of a FlatBuffer.
-/// To serialize data, you typically call one of the `Create*()` functions in
-/// the generated code, which in turn call a sequence of `StartTable`/
-/// `PushElement`/`AddElement`/`EndTable`, or the builtin `CreateString`/
-/// `CreateVector` functions. Do this is depth-first order to build up a tree to
-/// the root. `Finish()` wraps up the buffer ready for transport.
-class FlatBufferBuilder {
- public:
- /// @brief Default constructor for FlatBufferBuilder.
- /// @param[in] initial_size The initial size of the buffer, in bytes. Defaults
- /// to `1024`.
- /// @param[in] allocator An `Allocator` to use. If null will use
- /// `DefaultAllocator`.
- /// @param[in] own_allocator Whether the builder/vector should own the
- /// allocator. Defaults to / `false`.
- /// @param[in] buffer_minalign Force the buffer to be aligned to the given
- /// minimum alignment upon reallocation. Only needed if you intend to store
- /// types with custom alignment AND you wish to read the buffer in-place
- /// directly after creation.
- explicit FlatBufferBuilder(
- size_t initial_size = 1024, Allocator *allocator = nullptr,
- bool own_allocator = false,
- size_t buffer_minalign = AlignOf<largest_scalar_t>())
- : buf_(initial_size, allocator, own_allocator, buffer_minalign),
- num_field_loc(0),
- max_voffset_(0),
- nested(false),
- finished(false),
- minalign_(1),
- force_defaults_(false),
- dedup_vtables_(true),
- string_pool(nullptr) {
- EndianCheck();
- }
-
- // clang-format off
- /// @brief Move constructor for FlatBufferBuilder.
- #if !defined(FLATBUFFERS_CPP98_STL)
- FlatBufferBuilder(FlatBufferBuilder &&other)
- #else
- FlatBufferBuilder(FlatBufferBuilder &other)
- #endif // #if !defined(FLATBUFFERS_CPP98_STL)
- : buf_(1024, nullptr, false, AlignOf<largest_scalar_t>()),
- num_field_loc(0),
- max_voffset_(0),
- nested(false),
- finished(false),
- minalign_(1),
- force_defaults_(false),
- dedup_vtables_(true),
- string_pool(nullptr) {
- EndianCheck();
- // Default construct and swap idiom.
- // Lack of delegating constructors in vs2010 makes it more verbose than needed.
- Swap(other);
- }
- // clang-format on
-
- // clang-format off
- #if !defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
- /// @brief Move assignment operator for FlatBufferBuilder.
- FlatBufferBuilder &operator=(FlatBufferBuilder &&other) {
- // Move construct a temporary and swap idiom
- FlatBufferBuilder temp(std::move(other));
- Swap(temp);
- return *this;
- }
- // clang-format off
- #endif // defined(FLATBUFFERS_CPP98_STL)
- // clang-format on
-
- void Swap(FlatBufferBuilder &other) {
- using std::swap;
- buf_.swap(other.buf_);
- swap(num_field_loc, other.num_field_loc);
- swap(max_voffset_, other.max_voffset_);
- swap(nested, other.nested);
- swap(finished, other.finished);
- swap(minalign_, other.minalign_);
- swap(force_defaults_, other.force_defaults_);
- swap(dedup_vtables_, other.dedup_vtables_);
- swap(string_pool, other.string_pool);
- }
-
- ~FlatBufferBuilder() {
- if (string_pool) delete string_pool;
- }
-
- void Reset() {
- Clear(); // clear builder state
- buf_.reset(); // deallocate buffer
- }
-
- /// @brief Reset all the state in this FlatBufferBuilder so it can be reused
- /// to construct another buffer.
- void Clear() {
- ClearOffsets();
- buf_.clear();
- nested = false;
- finished = false;
- minalign_ = 1;
- if (string_pool) string_pool->clear();
- }
-
- /// @brief The current size of the serialized buffer, counting from the end.
- /// @return Returns an `uoffset_t` with the current size of the buffer.
- uoffset_t GetSize() const { return buf_.size(); }
-
- /// @brief Get the serialized buffer (after you call `Finish()`).
- /// @return Returns an `uint8_t` pointer to the FlatBuffer data inside the
- /// buffer.
- uint8_t *GetBufferPointer() const {
- Finished();
- return buf_.data();
- }
-
- /// @brief Get the serialized buffer (after you call `Finish()`) as a span.
- /// @return Returns a constructed flatbuffers::span that is a view over the
- /// FlatBuffer data inside the buffer.
- flatbuffers::span<uint8_t> GetBufferSpan() const {
- Finished();
- return flatbuffers::span<uint8_t>(buf_.data(), buf_.size());
- }
-
- /// @brief Get a pointer to an unfinished buffer.
- /// @return Returns a `uint8_t` pointer to the unfinished buffer.
- uint8_t *GetCurrentBufferPointer() const { return buf_.data(); }
-
- /// @brief Get the released pointer to the serialized buffer.
- /// @warning Do NOT attempt to use this FlatBufferBuilder afterwards!
- /// @return A `FlatBuffer` that owns the buffer and its allocator and
- /// behaves similar to a `unique_ptr` with a deleter.
- FLATBUFFERS_ATTRIBUTE(deprecated("use Release() instead"))
- DetachedBuffer ReleaseBufferPointer() {
- Finished();
- return buf_.release();
- }
-
- /// @brief Get the released DetachedBuffer.
- /// @return A `DetachedBuffer` that owns the buffer and its allocator.
- DetachedBuffer Release() {
- Finished();
- return buf_.release();
- }
-
- /// @brief Get the released pointer to the serialized buffer.
- /// @param size The size of the memory block containing
- /// the serialized `FlatBuffer`.
- /// @param offset The offset from the released pointer where the finished
- /// `FlatBuffer` starts.
- /// @return A raw pointer to the start of the memory block containing
- /// the serialized `FlatBuffer`.
- /// @remark If the allocator is owned, it gets deleted when the destructor is
- /// called..
- uint8_t *ReleaseRaw(size_t &size, size_t &offset) {
- Finished();
- return buf_.release_raw(size, offset);
- }
-
- /// @brief get the minimum alignment this buffer needs to be accessed
- /// properly. This is only known once all elements have been written (after
- /// you call Finish()). You can use this information if you need to embed
- /// a FlatBuffer in some other buffer, such that you can later read it
- /// without first having to copy it into its own buffer.
- size_t GetBufferMinAlignment() const {
- Finished();
- return minalign_;
- }
-
- /// @cond FLATBUFFERS_INTERNAL
- void Finished() const {
- // If you get this assert, you're attempting to get access a buffer
- // which hasn't been finished yet. Be sure to call
- // FlatBufferBuilder::Finish with your root table.
- // If you really need to access an unfinished buffer, call
- // GetCurrentBufferPointer instead.
- FLATBUFFERS_ASSERT(finished);
- }
- /// @endcond
-
- /// @brief In order to save space, fields that are set to their default value
- /// don't get serialized into the buffer.
- /// @param[in] fd When set to `true`, always serializes default values that
- /// are set. Optional fields which are not set explicitly, will still not be
- /// serialized.
- void ForceDefaults(bool fd) { force_defaults_ = fd; }
-
- /// @brief By default vtables are deduped in order to save space.
- /// @param[in] dedup When set to `true`, dedup vtables.
- void DedupVtables(bool dedup) { dedup_vtables_ = dedup; }
-
- /// @cond FLATBUFFERS_INTERNAL
- void Pad(size_t num_bytes) { buf_.fill(num_bytes); }
-
- void TrackMinAlign(size_t elem_size) {
- if (elem_size > minalign_) minalign_ = elem_size;
- }
-
- void Align(size_t elem_size) {
- TrackMinAlign(elem_size);
- buf_.fill(PaddingBytes(buf_.size(), elem_size));
- }
-
- void PushFlatBuffer(const uint8_t *bytes, size_t size) {
- PushBytes(bytes, size);
- finished = true;
- }
-
- void PushBytes(const uint8_t *bytes, size_t size) { buf_.push(bytes, size); }
-
- void PopBytes(size_t amount) { buf_.pop(amount); }
-
- template<typename T> void AssertScalarT() {
- // The code assumes power of 2 sizes and endian-swap-ability.
- static_assert(flatbuffers::is_scalar<T>::value, "T must be a scalar type");
- }
-
- // Write a single aligned scalar to the buffer
- template<typename T> uoffset_t PushElement(T element) {
- AssertScalarT<T>();
- Align(sizeof(T));
- buf_.push_small(EndianScalar(element));
- return GetSize();
- }
-
- template<typename T> uoffset_t PushElement(Offset<T> off) {
- // Special case for offsets: see ReferTo below.
- return PushElement(ReferTo(off.o));
- }
-
- // When writing fields, we track where they are, so we can create correct
- // vtables later.
- void TrackField(voffset_t field, uoffset_t off) {
- FieldLoc fl = { off, field };
- buf_.scratch_push_small(fl);
- num_field_loc++;
- max_voffset_ = (std::max)(max_voffset_, field);
- }
-
- // Like PushElement, but additionally tracks the field this represents.
- template<typename T> void AddElement(voffset_t field, T e, T def) {
- // We don't serialize values equal to the default.
- if (IsTheSameAs(e, def) && !force_defaults_) return;
- auto off = PushElement(e);
- TrackField(field, off);
- }
-
- template<typename T> void AddElement(voffset_t field, T e) {
- auto off = PushElement(e);
- TrackField(field, off);
- }
-
- template<typename T> void AddOffset(voffset_t field, Offset<T> off) {
- if (off.IsNull()) return; // Don't store.
- AddElement(field, ReferTo(off.o), static_cast<uoffset_t>(0));
- }
-
- template<typename T> void AddStruct(voffset_t field, const T *structptr) {
- if (!structptr) return; // Default, don't store.
- Align(AlignOf<T>());
- buf_.push_small(*structptr);
- TrackField(field, GetSize());
- }
-
- void AddStructOffset(voffset_t field, uoffset_t off) {
- TrackField(field, off);
- }
-
- // Offsets initially are relative to the end of the buffer (downwards).
- // This function converts them to be relative to the current location
- // in the buffer (when stored here), pointing upwards.
- uoffset_t ReferTo(uoffset_t off) {
- // Align to ensure GetSize() below is correct.
- Align(sizeof(uoffset_t));
- // Offset must refer to something already in buffer.
- FLATBUFFERS_ASSERT(off && off <= GetSize());
- return GetSize() - off + static_cast<uoffset_t>(sizeof(uoffset_t));
- }
-
- void NotNested() {
- // If you hit this, you're trying to construct a Table/Vector/String
- // during the construction of its parent table (between the MyTableBuilder
- // and table.Finish().
- // Move the creation of these sub-objects to above the MyTableBuilder to
- // not get this assert.
- // Ignoring this assert may appear to work in simple cases, but the reason
- // it is here is that storing objects in-line may cause vtable offsets
- // to not fit anymore. It also leads to vtable duplication.
- FLATBUFFERS_ASSERT(!nested);
- // If you hit this, fields were added outside the scope of a table.
- FLATBUFFERS_ASSERT(!num_field_loc);
- }
-
- // From generated code (or from the parser), we call StartTable/EndTable
- // with a sequence of AddElement calls in between.
- uoffset_t StartTable() {
- NotNested();
- nested = true;
- return GetSize();
- }
-
- // This finishes one serialized object by generating the vtable if it's a
- // table, comparing it against existing vtables, and writing the
- // resulting vtable offset.
- uoffset_t EndTable(uoffset_t start) {
- // If you get this assert, a corresponding StartTable wasn't called.
- FLATBUFFERS_ASSERT(nested);
- // Write the vtable offset, which is the start of any Table.
- // We fill it's value later.
- auto vtableoffsetloc = PushElement<soffset_t>(0);
- // Write a vtable, which consists entirely of voffset_t elements.
- // It starts with the number of offsets, followed by a type id, followed
- // by the offsets themselves. In reverse:
- // Include space for the last offset and ensure empty tables have a
- // minimum size.
- max_voffset_ =
- (std::max)(static_cast<voffset_t>(max_voffset_ + sizeof(voffset_t)),
- FieldIndexToOffset(0));
- buf_.fill_big(max_voffset_);
- auto table_object_size = vtableoffsetloc - start;
- // Vtable use 16bit offsets.
- FLATBUFFERS_ASSERT(table_object_size < 0x10000);
- WriteScalar<voffset_t>(buf_.data() + sizeof(voffset_t),
- static_cast<voffset_t>(table_object_size));
- WriteScalar<voffset_t>(buf_.data(), max_voffset_);
- // Write the offsets into the table
- for (auto it = buf_.scratch_end() - num_field_loc * sizeof(FieldLoc);
- it < buf_.scratch_end(); it += sizeof(FieldLoc)) {
- auto field_location = reinterpret_cast<FieldLoc *>(it);
- auto pos = static_cast<voffset_t>(vtableoffsetloc - field_location->off);
- // If this asserts, it means you've set a field twice.
- FLATBUFFERS_ASSERT(
- !ReadScalar<voffset_t>(buf_.data() + field_location->id));
- WriteScalar<voffset_t>(buf_.data() + field_location->id, pos);
- }
- ClearOffsets();
- auto vt1 = reinterpret_cast<voffset_t *>(buf_.data());
- auto vt1_size = ReadScalar<voffset_t>(vt1);
- auto vt_use = GetSize();
- // See if we already have generated a vtable with this exact same
- // layout before. If so, make it point to the old one, remove this one.
- if (dedup_vtables_) {
- for (auto it = buf_.scratch_data(); it < buf_.scratch_end();
- it += sizeof(uoffset_t)) {
- auto vt_offset_ptr = reinterpret_cast<uoffset_t *>(it);
- auto vt2 = reinterpret_cast<voffset_t *>(buf_.data_at(*vt_offset_ptr));
- auto vt2_size = ReadScalar<voffset_t>(vt2);
- if (vt1_size != vt2_size || 0 != memcmp(vt2, vt1, vt1_size)) continue;
- vt_use = *vt_offset_ptr;
- buf_.pop(GetSize() - vtableoffsetloc);
- break;
- }
- }
- // If this is a new vtable, remember it.
- if (vt_use == GetSize()) { buf_.scratch_push_small(vt_use); }
- // Fill the vtable offset we created above.
- // The offset points from the beginning of the object to where the
- // vtable is stored.
- // Offsets default direction is downward in memory for future format
- // flexibility (storing all vtables at the start of the file).
- WriteScalar(buf_.data_at(vtableoffsetloc),
- static_cast<soffset_t>(vt_use) -
- static_cast<soffset_t>(vtableoffsetloc));
-
- nested = false;
- return vtableoffsetloc;
- }
-
- FLATBUFFERS_ATTRIBUTE(deprecated("call the version above instead"))
- uoffset_t EndTable(uoffset_t start, voffset_t /*numfields*/) {
- return EndTable(start);
- }
-
- // This checks a required field has been set in a given table that has
- // just been constructed.
- template<typename T> void Required(Offset<T> table, voffset_t field);
-
- uoffset_t StartStruct(size_t alignment) {
- Align(alignment);
- return GetSize();
- }
-
- uoffset_t EndStruct() { return GetSize(); }
-
- void ClearOffsets() {
- buf_.scratch_pop(num_field_loc * sizeof(FieldLoc));
- num_field_loc = 0;
- max_voffset_ = 0;
- }
-
- // Aligns such that when "len" bytes are written, an object can be written
- // after it with "alignment" without padding.
- void PreAlign(size_t len, size_t alignment) {
- TrackMinAlign(alignment);
- buf_.fill(PaddingBytes(GetSize() + len, alignment));
- }
- template<typename T> void PreAlign(size_t len) {
- AssertScalarT<T>();
- PreAlign(len, sizeof(T));
- }
- /// @endcond
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// @param[in] str A const char pointer to the data to be stored as a string.
- /// @param[in] len The number of bytes that should be stored from `str`.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateString(const char *str, size_t len) {
- NotNested();
- PreAlign<uoffset_t>(len + 1); // Always 0-terminated.
- buf_.fill(1);
- PushBytes(reinterpret_cast<const uint8_t *>(str), len);
- PushElement(static_cast<uoffset_t>(len));
- return Offset<String>(GetSize());
- }
-
- /// @brief Store a string in the buffer, which is null-terminated.
- /// @param[in] str A const char pointer to a C-string to add to the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateString(const char *str) {
- return CreateString(str, strlen(str));
- }
-
- /// @brief Store a string in the buffer, which is null-terminated.
- /// @param[in] str A char pointer to a C-string to add to the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateString(char *str) {
- return CreateString(str, strlen(str));
- }
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// @param[in] str A const reference to a std::string to store in the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateString(const std::string &str) {
- return CreateString(str.c_str(), str.length());
- }
-
- // clang-format off
- #ifdef FLATBUFFERS_HAS_STRING_VIEW
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// @param[in] str A const string_view to copy in to the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateString(flatbuffers::string_view str) {
- return CreateString(str.data(), str.size());
- }
- #endif // FLATBUFFERS_HAS_STRING_VIEW
- // clang-format on
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// @param[in] str A const pointer to a `String` struct to add to the buffer.
- /// @return Returns the offset in the buffer where the string starts
- Offset<String> CreateString(const String *str) {
- return str ? CreateString(str->c_str(), str->size()) : 0;
- }
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// @param[in] str A const reference to a std::string like type with support
- /// of T::c_str() and T::length() to store in the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- template<typename T> Offset<String> CreateString(const T &str) {
- return CreateString(str.c_str(), str.length());
- }
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// If a string with this exact contents has already been serialized before,
- /// instead simply returns the offset of the existing string. This uses a map
- /// stored on the heap, but only stores the numerical offsets.
- /// @param[in] str A const char pointer to the data to be stored as a string.
- /// @param[in] len The number of bytes that should be stored from `str`.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateSharedString(const char *str, size_t len) {
- FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
- if (!string_pool)
- string_pool = new StringOffsetMap(StringOffsetCompare(buf_));
- auto size_before_string = buf_.size();
- // Must first serialize the string, since the set is all offsets into
- // buffer.
- auto off = CreateString(str, len);
- auto it = string_pool->find(off);
- // If it exists we reuse existing serialized data!
- if (it != string_pool->end()) {
- // We can remove the string we serialized.
- buf_.pop(buf_.size() - size_before_string);
- return *it;
- }
- // Record this string for future use.
- string_pool->insert(off);
- return off;
- }
-
-#ifdef FLATBUFFERS_HAS_STRING_VIEW
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// If a string with this exact contents has already been serialized before,
- /// instead simply returns the offset of the existing string. This uses a map
- /// stored on the heap, but only stores the numerical offsets.
- /// @param[in] str A const std::string_view to store in the buffer.
- /// @return Returns the offset in the buffer where the string starts
- Offset<String> CreateSharedString(const flatbuffers::string_view str) {
- return CreateSharedString(str.data(), str.size());
- }
-#else
- /// @brief Store a string in the buffer, which null-terminated.
- /// If a string with this exact contents has already been serialized before,
- /// instead simply returns the offset of the existing string. This uses a map
- /// stored on the heap, but only stores the numerical offsets.
- /// @param[in] str A const char pointer to a C-string to add to the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateSharedString(const char *str) {
- return CreateSharedString(str, strlen(str));
- }
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// If a string with this exact contents has already been serialized before,
- /// instead simply returns the offset of the existing string. This uses a map
- /// stored on the heap, but only stores the numerical offsets.
- /// @param[in] str A const reference to a std::string to store in the buffer.
- /// @return Returns the offset in the buffer where the string starts.
- Offset<String> CreateSharedString(const std::string &str) {
- return CreateSharedString(str.c_str(), str.length());
- }
-#endif
-
- /// @brief Store a string in the buffer, which can contain any binary data.
- /// If a string with this exact contents has already been serialized before,
- /// instead simply returns the offset of the existing string. This uses a map
- /// stored on the heap, but only stores the numerical offsets.
- /// @param[in] str A const pointer to a `String` struct to add to the buffer.
- /// @return Returns the offset in the buffer where the string starts
- Offset<String> CreateSharedString(const String *str) {
- return CreateSharedString(str->c_str(), str->size());
- }
-
- /// @cond FLATBUFFERS_INTERNAL
- uoffset_t EndVector(size_t len) {
- FLATBUFFERS_ASSERT(nested); // Hit if no corresponding StartVector.
- nested = false;
- return PushElement(static_cast<uoffset_t>(len));
- }
-
- void StartVector(size_t len, size_t elemsize) {
- NotNested();
- nested = true;
- PreAlign<uoffset_t>(len * elemsize);
- PreAlign(len * elemsize, elemsize); // Just in case elemsize > uoffset_t.
- }
-
- // Call this right before StartVector/CreateVector if you want to force the
- // alignment to be something different than what the element size would
- // normally dictate.
- // This is useful when storing a nested_flatbuffer in a vector of bytes,
- // or when storing SIMD floats, etc.
- void ForceVectorAlignment(size_t len, size_t elemsize, size_t alignment) {
- FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
- PreAlign(len * elemsize, alignment);
- }
-
- // Similar to ForceVectorAlignment but for String fields.
- void ForceStringAlignment(size_t len, size_t alignment) {
- FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
- PreAlign((len + 1) * sizeof(char), alignment);
- }
-
- /// @endcond
-
- /// @brief Serialize an array into a FlatBuffer `vector`.
- /// @tparam T The data type of the array elements.
- /// @param[in] v A pointer to the array of type `T` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T> Offset<Vector<T>> CreateVector(const T *v, size_t len) {
- // If this assert hits, you're specifying a template argument that is
- // causing the wrong overload to be selected, remove it.
- AssertScalarT<T>();
- StartVector(len, sizeof(T));
- if (len == 0) { return Offset<Vector<T>>(EndVector(len)); }
- // clang-format off
- #if FLATBUFFERS_LITTLEENDIAN
- PushBytes(reinterpret_cast<const uint8_t *>(v), len * sizeof(T));
- #else
- if (sizeof(T) == 1) {
- PushBytes(reinterpret_cast<const uint8_t *>(v), len);
- } else {
- for (auto i = len; i > 0; ) {
- PushElement(v[--i]);
- }
- }
- #endif
- // clang-format on
- return Offset<Vector<T>>(EndVector(len));
- }
-
- template<typename T>
- Offset<Vector<Offset<T>>> CreateVector(const Offset<T> *v, size_t len) {
- StartVector(len, sizeof(Offset<T>));
- for (auto i = len; i > 0;) { PushElement(v[--i]); }
- return Offset<Vector<Offset<T>>>(EndVector(len));
- }
-
- /// @brief Serialize a `std::vector` into a FlatBuffer `vector`.
- /// @tparam T The data type of the `std::vector` elements.
- /// @param v A const reference to the `std::vector` to serialize into the
- /// buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T> Offset<Vector<T>> CreateVector(const std::vector<T> &v) {
- return CreateVector(data(v), v.size());
- }
-
- // vector<bool> may be implemented using a bit-set, so we can't access it as
- // an array. Instead, read elements manually.
- // Background: https://isocpp.org/blog/2012/11/on-vectorbool
- Offset<Vector<uint8_t>> CreateVector(const std::vector<bool> &v) {
- StartVector(v.size(), sizeof(uint8_t));
- for (auto i = v.size(); i > 0;) {
- PushElement(static_cast<uint8_t>(v[--i]));
- }
- return Offset<Vector<uint8_t>>(EndVector(v.size()));
- }
-
- // clang-format off
- #ifndef FLATBUFFERS_CPP98_STL
- /// @brief Serialize values returned by a function into a FlatBuffer `vector`.
- /// This is a convenience function that takes care of iteration for you.
- /// @tparam T The data type of the `std::vector` elements.
- /// @param f A function that takes the current iteration 0..vector_size-1 and
- /// returns any type that you can construct a FlatBuffers vector out of.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T> Offset<Vector<T>> CreateVector(size_t vector_size,
- const std::function<T (size_t i)> &f) {
- FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
- std::vector<T> elems(vector_size);
- for (size_t i = 0; i < vector_size; i++) elems[i] = f(i);
- return CreateVector(elems);
- }
- #endif // FLATBUFFERS_CPP98_STL
- // clang-format on
-
- /// @brief Serialize values returned by a function into a FlatBuffer `vector`.
- /// This is a convenience function that takes care of iteration for you. This
- /// uses a vector stored on the heap to store the intermediate results of the
- /// iteration.
- /// @tparam T The data type of the `std::vector` elements.
- /// @param f A function that takes the current iteration 0..vector_size-1,
- /// and the state parameter returning any type that you can construct a
- /// FlatBuffers vector out of.
- /// @param state State passed to f.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename F, typename S>
- Offset<Vector<T>> CreateVector(size_t vector_size, F f, S *state) {
- FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
- std::vector<T> elems(vector_size);
- for (size_t i = 0; i < vector_size; i++) elems[i] = f(i, state);
- return CreateVector(elems);
- }
-
- /// @brief Serialize a `std::vector<std::string>` into a FlatBuffer `vector`.
- /// This is a convenience function for a common case.
- /// @param v A const reference to the `std::vector` to serialize into the
- /// buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- Offset<Vector<Offset<String>>> CreateVectorOfStrings(
- const std::vector<std::string> &v) {
- return CreateVectorOfStrings(v.cbegin(), v.cend());
- }
-
- /// @brief Serialize a collection of Strings into a FlatBuffer `vector`.
- /// This is a convenience function for a common case.
- /// @param begin The begining iterator of the collection
- /// @param end The ending iterator of the collection
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<class It>
- Offset<Vector<Offset<String>>> CreateVectorOfStrings(It begin, It end) {
- auto size = std::distance(begin, end);
- auto scratch_buffer_usage = size * sizeof(Offset<String>);
- // If there is not enough space to store the offsets, there definitely won't
- // be enough space to store all the strings. So ensuring space for the
- // scratch region is OK, for it it fails, it would have failed later.
- buf_.ensure_space(scratch_buffer_usage);
- for (auto it = begin; it != end; ++it) {
- buf_.scratch_push_small(CreateString(*it));
- }
- StartVector(size, sizeof(Offset<String>));
- for (auto i = 1; i <= size; i++) {
- // Note we re-evaluate the buf location each iteration to account for any
- // underlying buffer resizing that may occur.
- PushElement(*reinterpret_cast<Offset<String> *>(
- buf_.scratch_end() - i * sizeof(Offset<String>)));
- }
- buf_.scratch_pop(scratch_buffer_usage);
- return Offset<Vector<Offset<String>>>(EndVector(size));
- }
-
- /// @brief Serialize an array of structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the struct array elements.
- /// @param[in] v A pointer to the array of type `T` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T>
- Offset<Vector<const T *>> CreateVectorOfStructs(const T *v, size_t len) {
- StartVector(len * sizeof(T) / AlignOf<T>(), AlignOf<T>());
- PushBytes(reinterpret_cast<const uint8_t *>(v), sizeof(T) * len);
- return Offset<Vector<const T *>>(EndVector(len));
- }
-
- /// @brief Serialize an array of native structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the struct array elements.
- /// @tparam S The data type of the native struct array elements.
- /// @param[in] v A pointer to the array of type `S` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @param[in] pack_func Pointer to a function to convert the native struct
- /// to the FlatBuffer struct.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfNativeStructs(
- const S *v, size_t len, T((*const pack_func)(const S &))) {
- FLATBUFFERS_ASSERT(pack_func);
- auto structs = StartVectorOfStructs<T>(len);
- for (size_t i = 0; i < len; i++) { structs[i] = pack_func(v[i]); }
- return EndVectorOfStructs<T>(len);
- }
-
- /// @brief Serialize an array of native structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the struct array elements.
- /// @tparam S The data type of the native struct array elements.
- /// @param[in] v A pointer to the array of type `S` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfNativeStructs(const S *v,
- size_t len) {
- extern T Pack(const S &);
- return CreateVectorOfNativeStructs(v, len, Pack);
- }
-
- // clang-format off
- #ifndef FLATBUFFERS_CPP98_STL
- /// @brief Serialize an array of structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the struct array elements.
- /// @param[in] filler A function that takes the current iteration 0..vector_size-1
- /// and a pointer to the struct that must be filled.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- /// This is mostly useful when flatbuffers are generated with mutation
- /// accessors.
- template<typename T> Offset<Vector<const T *>> CreateVectorOfStructs(
- size_t vector_size, const std::function<void(size_t i, T *)> &filler) {
- T* structs = StartVectorOfStructs<T>(vector_size);
- for (size_t i = 0; i < vector_size; i++) {
- filler(i, structs);
- structs++;
- }
- return EndVectorOfStructs<T>(vector_size);
- }
- #endif
- // clang-format on
-
- /// @brief Serialize an array of structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the struct array elements.
- /// @param[in] f A function that takes the current iteration 0..vector_size-1,
- /// a pointer to the struct that must be filled and the state argument.
- /// @param[in] state Arbitrary state to pass to f.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- /// This is mostly useful when flatbuffers are generated with mutation
- /// accessors.
- template<typename T, typename F, typename S>
- Offset<Vector<const T *>> CreateVectorOfStructs(size_t vector_size, F f,
- S *state) {
- T *structs = StartVectorOfStructs<T>(vector_size);
- for (size_t i = 0; i < vector_size; i++) {
- f(i, structs, state);
- structs++;
- }
- return EndVectorOfStructs<T>(vector_size);
- }
-
- /// @brief Serialize a `std::vector` of structs into a FlatBuffer `vector`.
- /// @tparam T The data type of the `std::vector` struct elements.
- /// @param[in] v A const reference to the `std::vector` of structs to
- /// serialize into the buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename Alloc>
- Offset<Vector<const T *>> CreateVectorOfStructs(
- const std::vector<T, Alloc> &v) {
- return CreateVectorOfStructs(data(v), v.size());
- }
-
- /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
- /// `vector`.
- /// @tparam T The data type of the `std::vector` struct elements.
- /// @tparam S The data type of the `std::vector` native struct elements.
- /// @param[in] v A const reference to the `std::vector` of structs to
- /// serialize into the buffer as a `vector`.
- /// @param[in] pack_func Pointer to a function to convert the native struct
- /// to the FlatBuffer struct.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfNativeStructs(
- const std::vector<S> &v, T((*const pack_func)(const S &))) {
- return CreateVectorOfNativeStructs<T, S>(data(v), v.size(), pack_func);
- }
-
- /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
- /// `vector`.
- /// @tparam T The data type of the `std::vector` struct elements.
- /// @tparam S The data type of the `std::vector` native struct elements.
- /// @param[in] v A const reference to the `std::vector` of structs to
- /// serialize into the buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfNativeStructs(
- const std::vector<S> &v) {
- return CreateVectorOfNativeStructs<T, S>(data(v), v.size());
- }
-
- /// @cond FLATBUFFERS_INTERNAL
- template<typename T> struct StructKeyComparator {
- bool operator()(const T &a, const T &b) const {
- return a.KeyCompareLessThan(&b);
- }
- };
- /// @endcond
-
- /// @brief Serialize a `std::vector` of structs into a FlatBuffer `vector`
- /// in sorted order.
- /// @tparam T The data type of the `std::vector` struct elements.
- /// @param[in] v A const reference to the `std::vector` of structs to
- /// serialize into the buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T>
- Offset<Vector<const T *>> CreateVectorOfSortedStructs(std::vector<T> *v) {
- return CreateVectorOfSortedStructs(data(*v), v->size());
- }
-
- /// @brief Serialize a `std::vector` of native structs into a FlatBuffer
- /// `vector` in sorted order.
- /// @tparam T The data type of the `std::vector` struct elements.
- /// @tparam S The data type of the `std::vector` native struct elements.
- /// @param[in] v A const reference to the `std::vector` of structs to
- /// serialize into the buffer as a `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(
- std::vector<S> *v) {
- return CreateVectorOfSortedNativeStructs<T, S>(data(*v), v->size());
- }
-
- /// @brief Serialize an array of structs into a FlatBuffer `vector` in sorted
- /// order.
- /// @tparam T The data type of the struct array elements.
- /// @param[in] v A pointer to the array of type `T` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T>
- Offset<Vector<const T *>> CreateVectorOfSortedStructs(T *v, size_t len) {
- std::sort(v, v + len, StructKeyComparator<T>());
- return CreateVectorOfStructs(v, len);
- }
-
- /// @brief Serialize an array of native structs into a FlatBuffer `vector` in
- /// sorted order.
- /// @tparam T The data type of the struct array elements.
- /// @tparam S The data type of the native struct array elements.
- /// @param[in] v A pointer to the array of type `S` to serialize into the
- /// buffer as a `vector`.
- /// @param[in] len The number of elements to serialize.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T, typename S>
- Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(S *v,
- size_t len) {
- extern T Pack(const S &);
- auto structs = StartVectorOfStructs<T>(len);
- for (size_t i = 0; i < len; i++) { structs[i] = Pack(v[i]); }
- std::sort(structs, structs + len, StructKeyComparator<T>());
- return EndVectorOfStructs<T>(len);
- }
-
- /// @cond FLATBUFFERS_INTERNAL
- template<typename T> struct TableKeyComparator {
- TableKeyComparator(vector_downward &buf) : buf_(buf) {}
- TableKeyComparator(const TableKeyComparator &other) : buf_(other.buf_) {}
- bool operator()(const Offset<T> &a, const Offset<T> &b) const {
- auto table_a = reinterpret_cast<T *>(buf_.data_at(a.o));
- auto table_b = reinterpret_cast<T *>(buf_.data_at(b.o));
- return table_a->KeyCompareLessThan(table_b);
- }
- vector_downward &buf_;
-
- private:
- FLATBUFFERS_DELETE_FUNC(
- TableKeyComparator &operator=(const TableKeyComparator &other));
- };
- /// @endcond
-
- /// @brief Serialize an array of `table` offsets as a `vector` in the buffer
- /// in sorted order.
- /// @tparam T The data type that the offset refers to.
- /// @param[in] v An array of type `Offset<T>` that contains the `table`
- /// offsets to store in the buffer in sorted order.
- /// @param[in] len The number of elements to store in the `vector`.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T>
- Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(Offset<T> *v,
- size_t len) {
- std::sort(v, v + len, TableKeyComparator<T>(buf_));
- return CreateVector(v, len);
- }
-
- /// @brief Serialize an array of `table` offsets as a `vector` in the buffer
- /// in sorted order.
- /// @tparam T The data type that the offset refers to.
- /// @param[in] v An array of type `Offset<T>` that contains the `table`
- /// offsets to store in the buffer in sorted order.
- /// @return Returns a typed `Offset` into the serialized data indicating
- /// where the vector is stored.
- template<typename T>
- Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(
- std::vector<Offset<T>> *v) {
- return CreateVectorOfSortedTables(data(*v), v->size());
- }
-
- /// @brief Specialized version of `CreateVector` for non-copying use cases.
- /// Write the data any time later to the returned buffer pointer `buf`.
- /// @param[in] len The number of elements to store in the `vector`.
- /// @param[in] elemsize The size of each element in the `vector`.
- /// @param[out] buf A pointer to a `uint8_t` pointer that can be
- /// written to at a later time to serialize the data into a `vector`
- /// in the buffer.
- uoffset_t CreateUninitializedVector(size_t len, size_t elemsize,
- uint8_t **buf) {
- NotNested();
- StartVector(len, elemsize);
- buf_.make_space(len * elemsize);
- auto vec_start = GetSize();
- auto vec_end = EndVector(len);
- *buf = buf_.data_at(vec_start);
- return vec_end;
- }
-
- /// @brief Specialized version of `CreateVector` for non-copying use cases.
- /// Write the data any time later to the returned buffer pointer `buf`.
- /// @tparam T The data type of the data that will be stored in the buffer
- /// as a `vector`.
- /// @param[in] len The number of elements to store in the `vector`.
- /// @param[out] buf A pointer to a pointer of type `T` that can be
- /// written to at a later time to serialize the data into a `vector`
- /// in the buffer.
- template<typename T>
- Offset<Vector<T>> CreateUninitializedVector(size_t len, T **buf) {
- AssertScalarT<T>();
- return CreateUninitializedVector(len, sizeof(T),
- reinterpret_cast<uint8_t **>(buf));
- }
-
- template<typename T>
- Offset<Vector<const T *>> CreateUninitializedVectorOfStructs(size_t len,
- T **buf) {
- return CreateUninitializedVector(len, sizeof(T),
- reinterpret_cast<uint8_t **>(buf));
- }
-
- // @brief Create a vector of scalar type T given as input a vector of scalar
- // type U, useful with e.g. pre "enum class" enums, or any existing scalar
- // data of the wrong type.
- template<typename T, typename U>
- Offset<Vector<T>> CreateVectorScalarCast(const U *v, size_t len) {
- AssertScalarT<T>();
- AssertScalarT<U>();
- StartVector(len, sizeof(T));
- for (auto i = len; i > 0;) { PushElement(static_cast<T>(v[--i])); }
- return Offset<Vector<T>>(EndVector(len));
- }
-
- /// @brief Write a struct by itself, typically to be part of a union.
- template<typename T> Offset<const T *> CreateStruct(const T &structobj) {
- NotNested();
- Align(AlignOf<T>());
- buf_.push_small(structobj);
- return Offset<const T *>(GetSize());
- }
-
- /// @brief The length of a FlatBuffer file header.
- static const size_t kFileIdentifierLength = 4;
-
- /// @brief Finish serializing a buffer by writing the root offset.
- /// @param[in] file_identifier If a `file_identifier` is given, the buffer
- /// will be prefixed with a standard FlatBuffers file header.
- template<typename T>
- void Finish(Offset<T> root, const char *file_identifier = nullptr) {
- Finish(root.o, file_identifier, false);
- }
-
- /// @brief Finish a buffer with a 32 bit size field pre-fixed (size of the
- /// buffer following the size field). These buffers are NOT compatible
- /// with standard buffers created by Finish, i.e. you can't call GetRoot
- /// on them, you have to use GetSizePrefixedRoot instead.
- /// All >32 bit quantities in this buffer will be aligned when the whole
- /// size pre-fixed buffer is aligned.
- /// These kinds of buffers are useful for creating a stream of FlatBuffers.
- template<typename T>
- void FinishSizePrefixed(Offset<T> root,
- const char *file_identifier = nullptr) {
- Finish(root.o, file_identifier, true);
- }
-
- void SwapBufAllocator(FlatBufferBuilder &other) {
- buf_.swap_allocator(other.buf_);
- }
-
- protected:
- // You shouldn't really be copying instances of this class.
- FlatBufferBuilder(const FlatBufferBuilder &);
- FlatBufferBuilder &operator=(const FlatBufferBuilder &);
-
- void Finish(uoffset_t root, const char *file_identifier, bool size_prefix) {
- NotNested();
- buf_.clear_scratch();
- // This will cause the whole buffer to be aligned.
- PreAlign((size_prefix ? sizeof(uoffset_t) : 0) + sizeof(uoffset_t) +
- (file_identifier ? kFileIdentifierLength : 0),
- minalign_);
- if (file_identifier) {
- FLATBUFFERS_ASSERT(strlen(file_identifier) == kFileIdentifierLength);
- PushBytes(reinterpret_cast<const uint8_t *>(file_identifier),
- kFileIdentifierLength);
- }
- PushElement(ReferTo(root)); // Location of root.
- if (size_prefix) { PushElement(GetSize()); }
- finished = true;
- }
-
- struct FieldLoc {
- uoffset_t off;
- voffset_t id;
- };
-
- vector_downward buf_;
-
- // Accumulating offsets of table members while it is being built.
- // We store these in the scratch pad of buf_, after the vtable offsets.
- uoffset_t num_field_loc;
- // Track how much of the vtable is in use, so we can output the most compact
- // possible vtable.
- voffset_t max_voffset_;
-
- // Ensure objects are not nested.
- bool nested;
-
- // Ensure the buffer is finished before it is being accessed.
- bool finished;
-
- size_t minalign_;
-
- bool force_defaults_; // Serialize values equal to their defaults anyway.
-
- bool dedup_vtables_;
-
- struct StringOffsetCompare {
- StringOffsetCompare(const vector_downward &buf) : buf_(&buf) {}
- bool operator()(const Offset<String> &a, const Offset<String> &b) const {
- auto stra = reinterpret_cast<const String *>(buf_->data_at(a.o));
- auto strb = reinterpret_cast<const String *>(buf_->data_at(b.o));
- return StringLessThan(stra->data(), stra->size(), strb->data(),
- strb->size());
- }
- const vector_downward *buf_;
- };
-
- // For use with CreateSharedString. Instantiated on first use only.
- typedef std::set<Offset<String>, StringOffsetCompare> StringOffsetMap;
- StringOffsetMap *string_pool;
-
- private:
- // Allocates space for a vector of structures.
- // Must be completed with EndVectorOfStructs().
- template<typename T> T *StartVectorOfStructs(size_t vector_size) {
- StartVector(vector_size * sizeof(T) / AlignOf<T>(), AlignOf<T>());
- return reinterpret_cast<T *>(buf_.make_space(vector_size * sizeof(T)));
- }
-
- // End the vector of structues in the flatbuffers.
- // Vector should have previously be started with StartVectorOfStructs().
- template<typename T>
- Offset<Vector<const T *>> EndVectorOfStructs(size_t vector_size) {
- return Offset<Vector<const T *>>(EndVector(vector_size));
- }
-};
-/// @}
-
-/// @cond FLATBUFFERS_INTERNAL
-// Helpers to get a typed pointer to the root object contained in the buffer.
-template<typename T> T *GetMutableRoot(void *buf) {
- EndianCheck();
- return reinterpret_cast<T *>(
- reinterpret_cast<uint8_t *>(buf) +
- EndianScalar(*reinterpret_cast<uoffset_t *>(buf)));
-}
-
-template<typename T> const T *GetRoot(const void *buf) {
- return GetMutableRoot<T>(const_cast<void *>(buf));
-}
-
-template<typename T> const T *GetSizePrefixedRoot(const void *buf) {
- return GetRoot<T>(reinterpret_cast<const uint8_t *>(buf) + sizeof(uoffset_t));
-}
-
-/// Helpers to get a typed pointer to objects that are currently being built.
-/// @warning Creating new objects will lead to reallocations and invalidates
-/// the pointer!
-template<typename T>
-T *GetMutableTemporaryPointer(FlatBufferBuilder &fbb, Offset<T> offset) {
- return reinterpret_cast<T *>(fbb.GetCurrentBufferPointer() + fbb.GetSize() -
- offset.o);
-}
-
-template<typename T>
-const T *GetTemporaryPointer(FlatBufferBuilder &fbb, Offset<T> offset) {
- return GetMutableTemporaryPointer<T>(fbb, offset);
-}
-
-/// @brief Get a pointer to the the file_identifier section of the buffer.
-/// @return Returns a const char pointer to the start of the file_identifier
-/// characters in the buffer. The returned char * has length
-/// 'flatbuffers::FlatBufferBuilder::kFileIdentifierLength'.
-/// This function is UNDEFINED for FlatBuffers whose schema does not include
-/// a file_identifier (likely points at padding or the start of a the root
-/// vtable).
-inline const char *GetBufferIdentifier(const void *buf,
- bool size_prefixed = false) {
- return reinterpret_cast<const char *>(buf) +
- ((size_prefixed) ? 2 * sizeof(uoffset_t) : sizeof(uoffset_t));
-}
-
-// Helper to see if the identifier in a buffer has the expected value.
-inline bool BufferHasIdentifier(const void *buf, const char *identifier,
- bool size_prefixed = false) {
- return strncmp(GetBufferIdentifier(buf, size_prefixed), identifier,
- FlatBufferBuilder::kFileIdentifierLength) == 0;
-}
-
-// Helper class to verify the integrity of a FlatBuffer
-class Verifier FLATBUFFERS_FINAL_CLASS {
- public:
- Verifier(const uint8_t *buf, size_t buf_len, uoffset_t _max_depth = 64,
- uoffset_t _max_tables = 1000000, bool _check_alignment = true)
- : buf_(buf),
- size_(buf_len),
- depth_(0),
- max_depth_(_max_depth),
- num_tables_(0),
- max_tables_(_max_tables),
- upper_bound_(0),
- check_alignment_(_check_alignment) {
- FLATBUFFERS_ASSERT(size_ < FLATBUFFERS_MAX_BUFFER_SIZE);
- }
-
- // Central location where any verification failures register.
- bool Check(bool ok) const {
- // clang-format off
- #ifdef FLATBUFFERS_DEBUG_VERIFICATION_FAILURE
- FLATBUFFERS_ASSERT(ok);
- #endif
- #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
- if (!ok)
- upper_bound_ = 0;
- #endif
- // clang-format on
- return ok;
- }
-
- // Verify any range within the buffer.
- bool Verify(size_t elem, size_t elem_len) const {
- // clang-format off
- #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
- auto upper_bound = elem + elem_len;
- if (upper_bound_ < upper_bound)
- upper_bound_ = upper_bound;
- #endif
- // clang-format on
- return Check(elem_len < size_ && elem <= size_ - elem_len);
- }
-
- template<typename T> bool VerifyAlignment(size_t elem) const {
- return Check((elem & (sizeof(T) - 1)) == 0 || !check_alignment_);
- }
-
- // Verify a range indicated by sizeof(T).
- template<typename T> bool Verify(size_t elem) const {
- return VerifyAlignment<T>(elem) && Verify(elem, sizeof(T));
- }
-
- bool VerifyFromPointer(const uint8_t *p, size_t len) {
- auto o = static_cast<size_t>(p - buf_);
- return Verify(o, len);
- }
-
- // Verify relative to a known-good base pointer.
- bool Verify(const uint8_t *base, voffset_t elem_off, size_t elem_len) const {
- return Verify(static_cast<size_t>(base - buf_) + elem_off, elem_len);
- }
-
- template<typename T>
- bool Verify(const uint8_t *base, voffset_t elem_off) const {
- return Verify(static_cast<size_t>(base - buf_) + elem_off, sizeof(T));
- }
-
- // Verify a pointer (may be NULL) of a table type.
- template<typename T> bool VerifyTable(const T *table) {
- return !table || table->Verify(*this);
- }
-
- // Verify a pointer (may be NULL) of any vector type.
- template<typename T> bool VerifyVector(const Vector<T> *vec) const {
- return !vec || VerifyVectorOrString(reinterpret_cast<const uint8_t *>(vec),
- sizeof(T));
- }
-
- // Verify a pointer (may be NULL) of a vector to struct.
- template<typename T> bool VerifyVector(const Vector<const T *> *vec) const {
- return VerifyVector(reinterpret_cast<const Vector<T> *>(vec));
- }
-
- // Verify a pointer (may be NULL) to string.
- bool VerifyString(const String *str) const {
- size_t end;
- return !str || (VerifyVectorOrString(reinterpret_cast<const uint8_t *>(str),
- 1, &end) &&
- Verify(end, 1) && // Must have terminator
- Check(buf_[end] == '\0')); // Terminating byte must be 0.
- }
-
- // Common code between vectors and strings.
- bool VerifyVectorOrString(const uint8_t *vec, size_t elem_size,
- size_t *end = nullptr) const {
- auto veco = static_cast<size_t>(vec - buf_);
- // Check we can read the size field.
- if (!Verify<uoffset_t>(veco)) return false;
- // Check the whole array. If this is a string, the byte past the array
- // must be 0.
- auto size = ReadScalar<uoffset_t>(vec);
- auto max_elems = FLATBUFFERS_MAX_BUFFER_SIZE / elem_size;
- if (!Check(size < max_elems))
- return false; // Protect against byte_size overflowing.
- auto byte_size = sizeof(size) + elem_size * size;
- if (end) *end = veco + byte_size;
- return Verify(veco, byte_size);
- }
-
- // Special case for string contents, after the above has been called.
- bool VerifyVectorOfStrings(const Vector<Offset<String>> *vec) const {
- if (vec) {
- for (uoffset_t i = 0; i < vec->size(); i++) {
- if (!VerifyString(vec->Get(i))) return false;
- }
- }
- return true;
- }
-
- // Special case for table contents, after the above has been called.
- template<typename T> bool VerifyVectorOfTables(const Vector<Offset<T>> *vec) {
- if (vec) {
- for (uoffset_t i = 0; i < vec->size(); i++) {
- if (!vec->Get(i)->Verify(*this)) return false;
- }
- }
- return true;
- }
-
- __supress_ubsan__("unsigned-integer-overflow") bool VerifyTableStart(
- const uint8_t *table) {
- // Check the vtable offset.
- auto tableo = static_cast<size_t>(table - buf_);
- if (!Verify<soffset_t>(tableo)) return false;
- // This offset may be signed, but doing the subtraction unsigned always
- // gives the result we want.
- auto vtableo = tableo - static_cast<size_t>(ReadScalar<soffset_t>(table));
- // Check the vtable size field, then check vtable fits in its entirety.
- return VerifyComplexity() && Verify<voffset_t>(vtableo) &&
- VerifyAlignment<voffset_t>(ReadScalar<voffset_t>(buf_ + vtableo)) &&
- Verify(vtableo, ReadScalar<voffset_t>(buf_ + vtableo));
- }
-
- template<typename T>
- bool VerifyBufferFromStart(const char *identifier, size_t start) {
- if (identifier && !Check((size_ >= 2 * sizeof(flatbuffers::uoffset_t) &&
- BufferHasIdentifier(buf_ + start, identifier)))) {
- return false;
- }
-
- // Call T::Verify, which must be in the generated code for this type.
- auto o = VerifyOffset(start);
- return o && reinterpret_cast<const T *>(buf_ + start + o)->Verify(*this)
- // clang-format off
- #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
- && GetComputedSize()
- #endif
- ;
- // clang-format on
- }
-
- // Verify this whole buffer, starting with root type T.
- template<typename T> bool VerifyBuffer() { return VerifyBuffer<T>(nullptr); }
-
- template<typename T> bool VerifyBuffer(const char *identifier) {
- return VerifyBufferFromStart<T>(identifier, 0);
- }
-
- template<typename T> bool VerifySizePrefixedBuffer(const char *identifier) {
- return Verify<uoffset_t>(0U) &&
- ReadScalar<uoffset_t>(buf_) == size_ - sizeof(uoffset_t) &&
- VerifyBufferFromStart<T>(identifier, sizeof(uoffset_t));
- }
-
- uoffset_t VerifyOffset(size_t start) const {
- if (!Verify<uoffset_t>(start)) return 0;
- auto o = ReadScalar<uoffset_t>(buf_ + start);
- // May not point to itself.
- if (!Check(o != 0)) return 0;
- // Can't wrap around / buffers are max 2GB.
- if (!Check(static_cast<soffset_t>(o) >= 0)) return 0;
- // Must be inside the buffer to create a pointer from it (pointer outside
- // buffer is UB).
- if (!Verify(start + o, 1)) return 0;
- return o;
- }
-
- uoffset_t VerifyOffset(const uint8_t *base, voffset_t start) const {
- return VerifyOffset(static_cast<size_t>(base - buf_) + start);
- }
-
- // Called at the start of a table to increase counters measuring data
- // structure depth and amount, and possibly bails out with false if
- // limits set by the constructor have been hit. Needs to be balanced
- // with EndTable().
- bool VerifyComplexity() {
- depth_++;
- num_tables_++;
- return Check(depth_ <= max_depth_ && num_tables_ <= max_tables_);
- }
-
- // Called at the end of a table to pop the depth count.
- bool EndTable() {
- depth_--;
- return true;
- }
-
- // Returns the message size in bytes
- size_t GetComputedSize() const {
- // clang-format off
- #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
- uintptr_t size = upper_bound_;
- // Align the size to uoffset_t
- size = (size - 1 + sizeof(uoffset_t)) & ~(sizeof(uoffset_t) - 1);
- return (size > size_) ? 0 : size;
- #else
- // Must turn on FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE for this to work.
- (void)upper_bound_;
- FLATBUFFERS_ASSERT(false);
- return 0;
- #endif
- // clang-format on
- }
-
- private:
- const uint8_t *buf_;
- size_t size_;
- uoffset_t depth_;
- uoffset_t max_depth_;
- uoffset_t num_tables_;
- uoffset_t max_tables_;
- mutable size_t upper_bound_;
- bool check_alignment_;
-};
-
-// Convenient way to bundle a buffer and its length, to pass it around
-// typed by its root.
-// A BufferRef does not own its buffer.
-struct BufferRefBase {}; // for std::is_base_of
-template<typename T> struct BufferRef : BufferRefBase {
- BufferRef() : buf(nullptr), len(0), must_free(false) {}
- BufferRef(uint8_t *_buf, uoffset_t _len)
- : buf(_buf), len(_len), must_free(false) {}
-
- ~BufferRef() {
- if (must_free) free(buf);
- }
-
- const T *GetRoot() const { return flatbuffers::GetRoot<T>(buf); }
-
- bool Verify() {
- Verifier verifier(buf, len);
- return verifier.VerifyBuffer<T>(nullptr);
- }
-
- uint8_t *buf;
- uoffset_t len;
- bool must_free;
-};
-
-// "structs" are flat structures that do not have an offset table, thus
-// always have all members present and do not support forwards/backwards
-// compatible extensions.
-
-class Struct FLATBUFFERS_FINAL_CLASS {
- public:
- template<typename T> T GetField(uoffset_t o) const {
- return ReadScalar<T>(&data_[o]);
- }
-
- template<typename T> T GetStruct(uoffset_t o) const {
- return reinterpret_cast<T>(&data_[o]);
- }
-
- const uint8_t *GetAddressOf(uoffset_t o) const { return &data_[o]; }
- uint8_t *GetAddressOf(uoffset_t o) { return &data_[o]; }
-
- private:
- // private constructor & copy constructor: you obtain instances of this
- // class by pointing to existing data only
- Struct();
- Struct(const Struct &);
- Struct &operator=(const Struct &);
-
- uint8_t data_[1];
-};
-
-// "tables" use an offset table (possibly shared) that allows fields to be
-// omitted and added at will, but uses an extra indirection to read.
-class Table {
- public:
- const uint8_t *GetVTable() const {
- return data_ - ReadScalar<soffset_t>(data_);
- }
-
- // This gets the field offset for any of the functions below it, or 0
- // if the field was not present.
- voffset_t GetOptionalFieldOffset(voffset_t field) const {
- // The vtable offset is always at the start.
- auto vtable = GetVTable();
- // The first element is the size of the vtable (fields + type id + itself).
- auto vtsize = ReadScalar<voffset_t>(vtable);
- // If the field we're accessing is outside the vtable, we're reading older
- // data, so it's the same as if the offset was 0 (not present).
- return field < vtsize ? ReadScalar<voffset_t>(vtable + field) : 0;
- }
-
- template<typename T> T GetField(voffset_t field, T defaultval) const {
- auto field_offset = GetOptionalFieldOffset(field);
- return field_offset ? ReadScalar<T>(data_ + field_offset) : defaultval;
- }
-
- template<typename P> P GetPointer(voffset_t field) {
- auto field_offset = GetOptionalFieldOffset(field);
- auto p = data_ + field_offset;
- return field_offset ? reinterpret_cast<P>(p + ReadScalar<uoffset_t>(p))
- : nullptr;
- }
- template<typename P> P GetPointer(voffset_t field) const {
- return const_cast<Table *>(this)->GetPointer<P>(field);
- }
-
- template<typename P> P GetStruct(voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- auto p = const_cast<uint8_t *>(data_ + field_offset);
- return field_offset ? reinterpret_cast<P>(p) : nullptr;
- }
-
- template<typename Raw, typename Face>
- flatbuffers::Optional<Face> GetOptional(voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- auto p = data_ + field_offset;
- return field_offset ? Optional<Face>(static_cast<Face>(ReadScalar<Raw>(p)))
- : Optional<Face>();
- }
-
- template<typename T> bool SetField(voffset_t field, T val, T def) {
- auto field_offset = GetOptionalFieldOffset(field);
- if (!field_offset) return IsTheSameAs(val, def);
- WriteScalar(data_ + field_offset, val);
- return true;
- }
- template<typename T> bool SetField(voffset_t field, T val) {
- auto field_offset = GetOptionalFieldOffset(field);
- if (!field_offset) return false;
- WriteScalar(data_ + field_offset, val);
- return true;
- }
-
- bool SetPointer(voffset_t field, const uint8_t *val) {
- auto field_offset = GetOptionalFieldOffset(field);
- if (!field_offset) return false;
- WriteScalar(data_ + field_offset,
- static_cast<uoffset_t>(val - (data_ + field_offset)));
- return true;
- }
-
- uint8_t *GetAddressOf(voffset_t field) {
- auto field_offset = GetOptionalFieldOffset(field);
- return field_offset ? data_ + field_offset : nullptr;
- }
- const uint8_t *GetAddressOf(voffset_t field) const {
- return const_cast<Table *>(this)->GetAddressOf(field);
- }
-
- bool CheckField(voffset_t field) const {
- return GetOptionalFieldOffset(field) != 0;
- }
-
- // Verify the vtable of this table.
- // Call this once per table, followed by VerifyField once per field.
- bool VerifyTableStart(Verifier &verifier) const {
- return verifier.VerifyTableStart(data_);
- }
-
- // Verify a particular field.
- template<typename T>
- bool VerifyField(const Verifier &verifier, voffset_t field) const {
- // Calling GetOptionalFieldOffset should be safe now thanks to
- // VerifyTable().
- auto field_offset = GetOptionalFieldOffset(field);
- // Check the actual field.
- return !field_offset || verifier.Verify<T>(data_, field_offset);
- }
-
- // VerifyField for required fields.
- template<typename T>
- bool VerifyFieldRequired(const Verifier &verifier, voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- return verifier.Check(field_offset != 0) &&
- verifier.Verify<T>(data_, field_offset);
- }
-
- // Versions for offsets.
- bool VerifyOffset(const Verifier &verifier, voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- return !field_offset || verifier.VerifyOffset(data_, field_offset);
- }
-
- bool VerifyOffsetRequired(const Verifier &verifier, voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- return verifier.Check(field_offset != 0) &&
- verifier.VerifyOffset(data_, field_offset);
- }
-
- private:
- // private constructor & copy constructor: you obtain instances of this
- // class by pointing to existing data only
- Table();
- Table(const Table &other);
- Table &operator=(const Table &);
-
- uint8_t data_[1];
-};
-
-// This specialization allows avoiding warnings like:
-// MSVC C4800: type: forcing value to bool 'true' or 'false'.
-template<>
-inline flatbuffers::Optional<bool> Table::GetOptional<uint8_t, bool>(
- voffset_t field) const {
- auto field_offset = GetOptionalFieldOffset(field);
- auto p = data_ + field_offset;
- return field_offset ? Optional<bool>(ReadScalar<uint8_t>(p) != 0)
- : Optional<bool>();
-}
-
-template<typename T>
-void FlatBufferBuilder::Required(Offset<T> table, voffset_t field) {
- auto table_ptr = reinterpret_cast<const Table *>(buf_.data_at(table.o));
- bool ok = table_ptr->GetOptionalFieldOffset(field) != 0;
- // If this fails, the caller will show what field needs to be set.
- FLATBUFFERS_ASSERT(ok);
- (void)ok;
-}
/// @brief This can compute the start of a FlatBuffer from a root pointer, i.e.
/// it is the opposite transformation of GetRoot().
@@ -2822,7 +56,7 @@ inline const uint8_t *GetBufferStartFromRootPointer(const void *root) {
// file_identifier, and alignment padding) to see which points to the root.
// None of the other values can "impersonate" the root since they will either
// be 0 or four ASCII characters.
- static_assert(FlatBufferBuilder::kFileIdentifierLength == sizeof(uoffset_t),
+ static_assert(flatbuffers::kFileIdentifierLength == sizeof(uoffset_t),
"file_identifier is assumed to be the same size as uoffset_t");
for (auto possible_roots = FLATBUFFERS_MAX_ALIGNMENT / sizeof(uoffset_t) + 1;
possible_roots; possible_roots--) {
@@ -2858,16 +92,9 @@ struct NativeTable {};
/// if you wish. The resolver does the opposite lookup, for when the object
/// is being serialized again.
typedef uint64_t hash_value_t;
-// clang-format off
-#ifdef FLATBUFFERS_CPP98_STL
- typedef void (*resolver_function_t)(void **pointer_adr, hash_value_t hash);
- typedef hash_value_t (*rehasher_function_t)(void *pointer);
-#else
- typedef std::function<void (void **pointer_adr, hash_value_t hash)>
- resolver_function_t;
- typedef std::function<hash_value_t (void *pointer)> rehasher_function_t;
-#endif
-// clang-format on
+typedef std::function<void(void **pointer_adr, hash_value_t hash)>
+ resolver_function_t;
+typedef std::function<hash_value_t(void *pointer)> rehasher_function_t;
// Helper function to test if a field is present, using any of the field
// enums in the generated code.
@@ -2924,7 +151,7 @@ inline int LookupEnum(const char **names, const char *name) {
// Minimal reflection via code generation.
// Besides full-fat reflection (see reflection.h) and parsing/printing by
-// loading schemas (see idl.h), we can also have code generation for mimimal
+// loading schemas (see idl.h), we can also have code generation for minimal
// reflection data which allows pretty-printing and other uses without needing
// a schema or a parser.
// Generate code with --reflect-types (types only) or --reflect-names (names
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/flexbuffers.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/flexbuffers.h
index 049f752cc04..9662a60907f 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/flexbuffers.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/flexbuffers.h
@@ -851,6 +851,7 @@ inline Reference Map::operator[](const char *key) const {
case 2: comp = KeyCompare<uint16_t>; break;
case 4: comp = KeyCompare<uint32_t>; break;
case 8: comp = KeyCompare<uint64_t>; break;
+ default: FLATBUFFERS_ASSERT(false); return Reference();
}
auto res = std::bsearch(key, keys.data_, keys.size(), keys.byte_width_, comp);
if (!res) return Reference(nullptr, 1, NullPackedType());
@@ -873,7 +874,7 @@ inline Reference GetRoot(const uint8_t *buffer, size_t size) {
}
inline Reference GetRoot(const std::vector<uint8_t> &buffer) {
- return GetRoot(flatbuffers::vector_data(buffer), buffer.size());
+ return GetRoot(buffer.data(), buffer.size());
}
// Flags that configure how the Builder behaves.
@@ -1069,7 +1070,16 @@ class Builder FLATBUFFERS_FINAL_CLASS {
return CreateBlob(data, len, 0, FBT_BLOB);
}
size_t Blob(const std::vector<uint8_t> &v) {
- return CreateBlob(flatbuffers::vector_data(v), v.size(), 0, FBT_BLOB);
+ return CreateBlob(v.data(), v.size(), 0, FBT_BLOB);
+ }
+
+ void Blob(const char *key, const void *data, size_t len) {
+ Key(key);
+ Blob(data, len);
+ }
+ void Blob(const char *key, const std::vector<uint8_t> &v) {
+ Key(key);
+ Blob(v);
}
// TODO(wvo): support all the FlexBuffer types (like flexbuffers::String),
@@ -1087,7 +1097,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
return stack_.size();
}
- // TODO(wvo): allow this to specify an aligment greater than the natural
+ // TODO(wvo): allow this to specify an alignment greater than the natural
// alignment.
size_t EndVector(size_t start, bool typed, bool fixed) {
auto vec = CreateVector(start, stack_.size() - start, 1, typed, fixed);
@@ -1122,27 +1132,24 @@ class Builder FLATBUFFERS_FINAL_CLASS {
// step automatically when appliccable, and encourage people to write in
// sorted fashion.
// std::sort is typically already a lot faster on sorted data though.
- auto dict =
- reinterpret_cast<TwoValue *>(flatbuffers::vector_data(stack_) + start);
- std::sort(dict, dict + len,
- [&](const TwoValue &a, const TwoValue &b) -> bool {
- auto as = reinterpret_cast<const char *>(
- flatbuffers::vector_data(buf_) + a.key.u_);
- auto bs = reinterpret_cast<const char *>(
- flatbuffers::vector_data(buf_) + b.key.u_);
- auto comp = strcmp(as, bs);
- // We want to disallow duplicate keys, since this results in a
- // map where values cannot be found.
- // But we can't assert here (since we don't want to fail on
- // random JSON input) or have an error mechanism.
- // Instead, we set has_duplicate_keys_ in the builder to
- // signal this.
- // TODO: Have to check for pointer equality, as some sort
- // implementation apparently call this function with the same
- // element?? Why?
- if (!comp && &a != &b) has_duplicate_keys_ = true;
- return comp < 0;
- });
+ auto dict = reinterpret_cast<TwoValue *>(stack_.data() + start);
+ std::sort(
+ dict, dict + len, [&](const TwoValue &a, const TwoValue &b) -> bool {
+ auto as = reinterpret_cast<const char *>(buf_.data() + a.key.u_);
+ auto bs = reinterpret_cast<const char *>(buf_.data() + b.key.u_);
+ auto comp = strcmp(as, bs);
+ // We want to disallow duplicate keys, since this results in a
+ // map where values cannot be found.
+ // But we can't assert here (since we don't want to fail on
+ // random JSON input) or have an error mechanism.
+ // Instead, we set has_duplicate_keys_ in the builder to
+ // signal this.
+ // TODO: Have to check for pointer equality, as some sort
+ // implementation apparently call this function with the same
+ // element?? Why?
+ if (!comp && &a != &b) has_duplicate_keys_ = true;
+ return comp < 0;
+ });
// First create a vector out of all keys.
// TODO(wvo): if kBuilderFlagShareKeyVectors is true, see if we can share
// the first vector.
@@ -1196,7 +1203,7 @@ class Builder FLATBUFFERS_FINAL_CLASS {
Vector(elems, len);
}
template<typename T> void Vector(const std::vector<T> &vec) {
- Vector(flatbuffers::vector_data(vec), vec.size());
+ Vector(vec.data(), vec.size());
}
template<typename F> size_t TypedVector(F f) {
@@ -1398,12 +1405,10 @@ class Builder FLATBUFFERS_FINAL_CLASS {
template<typename T> static Type GetScalarType() {
static_assert(flatbuffers::is_scalar<T>::value, "Unrelated types");
- return flatbuffers::is_floating_point<T>::value
- ? FBT_FLOAT
- : flatbuffers::is_same<T, bool>::value
- ? FBT_BOOL
- : (flatbuffers::is_unsigned<T>::value ? FBT_UINT
- : FBT_INT);
+ return flatbuffers::is_floating_point<T>::value ? FBT_FLOAT
+ : flatbuffers::is_same<T, bool>::value
+ ? FBT_BOOL
+ : (flatbuffers::is_unsigned<T>::value ? FBT_UINT : FBT_INT);
}
public:
@@ -1598,10 +1603,8 @@ class Builder FLATBUFFERS_FINAL_CLASS {
struct KeyOffsetCompare {
explicit KeyOffsetCompare(const std::vector<uint8_t> &buf) : buf_(&buf) {}
bool operator()(size_t a, size_t b) const {
- auto stra =
- reinterpret_cast<const char *>(flatbuffers::vector_data(*buf_) + a);
- auto strb =
- reinterpret_cast<const char *>(flatbuffers::vector_data(*buf_) + b);
+ auto stra = reinterpret_cast<const char *>(buf_->data() + a);
+ auto strb = reinterpret_cast<const char *>(buf_->data() + b);
return strcmp(stra, strb) < 0;
}
const std::vector<uint8_t> *buf_;
@@ -1612,10 +1615,8 @@ class Builder FLATBUFFERS_FINAL_CLASS {
explicit StringOffsetCompare(const std::vector<uint8_t> &buf)
: buf_(&buf) {}
bool operator()(const StringOffset &a, const StringOffset &b) const {
- auto stra = reinterpret_cast<const char *>(
- flatbuffers::vector_data(*buf_) + a.first);
- auto strb = reinterpret_cast<const char *>(
- flatbuffers::vector_data(*buf_) + b.first);
+ auto stra = reinterpret_cast<const char *>(buf_->data() + a.first);
+ auto strb = reinterpret_cast<const char *>(buf_->data() + b.first);
return strncmp(stra, strb, (std::min)(a.second, b.second) + 1) < 0;
}
const std::vector<uint8_t> *buf_;
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/grpc.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/grpc.h
index b7935551c79..5d15bc51c0b 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/grpc.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/grpc.h
@@ -22,6 +22,7 @@
#include "flatbuffers/flatbuffers.h"
#include "grpc/byte_buffer_reader.h"
#include "grpcpp/support/byte_buffer.h"
+#include "grpcpp/support/slice.h"
namespace flatbuffers {
namespace grpc {
@@ -32,33 +33,23 @@ namespace grpc {
// is refcounted and ownership is be managed automatically.
template<class T> class Message {
public:
- Message() : slice_(grpc_empty_slice()) {}
+ Message() {}
- Message(grpc_slice slice, bool add_ref)
- : slice_(add_ref ? grpc_slice_ref(slice) : slice) {}
+ Message(::grpc::Slice slice) : slice_(slice) {}
Message &operator=(const Message &other) = delete;
- Message(Message &&other) : slice_(other.slice_) {
- other.slice_ = grpc_empty_slice();
- }
+ Message(Message &&other) = default;
Message(const Message &other) = delete;
- Message &operator=(Message &&other) {
- grpc_slice_unref(slice_);
- slice_ = other.slice_;
- other.slice_ = grpc_empty_slice();
- return *this;
- }
-
- ~Message() { grpc_slice_unref(slice_); }
+ Message &operator=(Message &&other) = default;
- const uint8_t *mutable_data() const { return GRPC_SLICE_START_PTR(slice_); }
+ const uint8_t *mutable_data() const { return slice_.begin(); }
- const uint8_t *data() const { return GRPC_SLICE_START_PTR(slice_); }
+ const uint8_t *data() const { return slice_.begin(); }
- size_t size() const { return GRPC_SLICE_LENGTH(slice_); }
+ size_t size() const { return slice_.size(); }
bool Verify() const {
Verifier verifier(data(), size());
@@ -70,10 +61,10 @@ template<class T> class Message {
const T *GetRoot() const { return flatbuffers::GetRoot<T>(data()); }
// This is only intended for serializer use, or if you know what you're doing
- const grpc_slice &BorrowSlice() const { return slice_; }
+ const ::grpc::Slice &BorrowSlice() const { return slice_; }
private:
- grpc_slice slice_;
+ ::grpc::Slice slice_;
};
class MessageBuilder;
@@ -83,12 +74,12 @@ class MessageBuilder;
// efficient to transfer buffers to gRPC.
class SliceAllocator : public Allocator {
public:
- SliceAllocator() : slice_(grpc_empty_slice()) {}
+ SliceAllocator() {}
SliceAllocator(const SliceAllocator &other) = delete;
SliceAllocator &operator=(const SliceAllocator &other) = delete;
- SliceAllocator(SliceAllocator &&other) : slice_(grpc_empty_slice()) {
+ SliceAllocator(SliceAllocator &&other) {
// default-construct and swap idiom
swap(other);
}
@@ -105,45 +96,43 @@ class SliceAllocator : public Allocator {
swap(slice_, other.slice_);
}
- virtual ~SliceAllocator() { grpc_slice_unref(slice_); }
+ virtual ~SliceAllocator() {}
virtual uint8_t *allocate(size_t size) override {
- FLATBUFFERS_ASSERT(GRPC_SLICE_IS_EMPTY(slice_));
- slice_ = grpc_slice_malloc(size);
- return GRPC_SLICE_START_PTR(slice_);
+ FLATBUFFERS_ASSERT(slice_.size() == 0);
+ slice_ = ::grpc::Slice(size);
+ return const_cast<uint8_t *>(slice_.begin());
}
virtual void deallocate(uint8_t *p, size_t size) override {
- FLATBUFFERS_ASSERT(p == GRPC_SLICE_START_PTR(slice_));
- FLATBUFFERS_ASSERT(size == GRPC_SLICE_LENGTH(slice_));
- grpc_slice_unref(slice_);
- slice_ = grpc_empty_slice();
+ FLATBUFFERS_ASSERT(p == slice_.begin());
+ FLATBUFFERS_ASSERT(size == slice_.size());
+ slice_ = ::grpc::Slice();
}
virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size,
size_t new_size, size_t in_use_back,
size_t in_use_front) override {
- FLATBUFFERS_ASSERT(old_p == GRPC_SLICE_START_PTR(slice_));
- FLATBUFFERS_ASSERT(old_size == GRPC_SLICE_LENGTH(slice_));
+ FLATBUFFERS_ASSERT(old_p == slice_.begin());
+ FLATBUFFERS_ASSERT(old_size == slice_.size());
FLATBUFFERS_ASSERT(new_size > old_size);
- grpc_slice old_slice = slice_;
- grpc_slice new_slice = grpc_slice_malloc(new_size);
- uint8_t *new_p = GRPC_SLICE_START_PTR(new_slice);
+ ::grpc::Slice old_slice = slice_;
+ ::grpc::Slice new_slice = ::grpc::Slice(new_size);
+ uint8_t *new_p = const_cast<uint8_t *>(new_slice.begin());
memcpy_downward(old_p, old_size, new_p, new_size, in_use_back,
in_use_front);
slice_ = new_slice;
- grpc_slice_unref(old_slice);
return new_p;
}
private:
- grpc_slice &get_slice(uint8_t *p, size_t size) {
- FLATBUFFERS_ASSERT(p == GRPC_SLICE_START_PTR(slice_));
- FLATBUFFERS_ASSERT(size == GRPC_SLICE_LENGTH(slice_));
+ ::grpc::Slice &get_slice(uint8_t *p, size_t size) {
+ FLATBUFFERS_ASSERT(p == slice_.begin());
+ FLATBUFFERS_ASSERT(size == slice_.size());
return slice_;
}
- grpc_slice slice_;
+ ::grpc::Slice slice_;
friend class MessageBuilder;
};
@@ -184,9 +173,9 @@ class MessageBuilder : private detail::SliceAllocatorMember,
if (buf_.capacity()) {
uint8_t *buf = buf_.scratch_data(); // pointer to memory
size_t capacity = buf_.capacity(); // size of memory
- slice_allocator_.slice_ = grpc_slice_new_with_len(buf, capacity, dealloc);
+ slice_allocator_.slice_ = ::grpc::Slice(buf, capacity, dealloc);
} else {
- slice_allocator_.slice_ = grpc_empty_slice();
+ slice_allocator_.slice_ = ::grpc::Slice();
}
}
@@ -221,10 +210,10 @@ class MessageBuilder : private detail::SliceAllocatorMember,
// Releases the ownership of the buffer pointer.
// Returns the size, offset, and the original grpc_slice that
// allocated the buffer. Also see grpc_slice_unref().
- uint8_t *ReleaseRaw(size_t &size, size_t &offset, grpc_slice &slice) {
+ uint8_t *ReleaseRaw(size_t &size, size_t &offset, ::grpc::Slice &slice) {
uint8_t *buf = FlatBufferBuilder::ReleaseRaw(size, offset);
slice = slice_allocator_.slice_;
- slice_allocator_.slice_ = grpc_empty_slice();
+ slice_allocator_.slice_ = ::grpc::Slice();
return buf;
}
@@ -247,11 +236,11 @@ class MessageBuilder : private detail::SliceAllocatorMember,
auto begin = msg_data - buf_data;
auto end = begin + msg_size;
// Get the slice we are working with (no refcount change)
- grpc_slice slice = slice_allocator_.get_slice(buf_data, buf_size);
+ ::grpc::Slice slice = slice_allocator_.get_slice(buf_data, buf_size);
// Extract a subslice of the existing slice (increment refcount)
- grpc_slice subslice = grpc_slice_sub(slice, begin, end);
+ ::grpc::Slice subslice = slice.sub(begin, end);
// Wrap the subslice in a `Message<T>`, but don't increment refcount
- Message<T> msg(subslice, false);
+ Message<T> msg(subslice);
return msg;
}
@@ -273,15 +262,10 @@ namespace grpc {
template<class T> class SerializationTraits<flatbuffers::grpc::Message<T>> {
public:
static grpc::Status Serialize(const flatbuffers::grpc::Message<T> &msg,
- grpc_byte_buffer **buffer, bool *own_buffer) {
- // We are passed in a `Message<T>`, which is a wrapper around a
- // `grpc_slice`. We extract it here using `BorrowSlice()`. The const cast
- // is necessary because the `grpc_raw_byte_buffer_create` func expects
- // non-const slices in order to increment their refcounts.
- grpc_slice *slice = const_cast<grpc_slice *>(&msg.BorrowSlice());
- // Now use `grpc_raw_byte_buffer_create` to package the single slice into a
- // `grpc_byte_buffer`, incrementing the refcount in the process.
- *buffer = grpc_raw_byte_buffer_create(slice, 1);
+ ByteBuffer *buffer, bool *own_buffer) {
+ // Package the single slice into a `ByteBuffer`,
+ // incrementing the refcount in the process.
+ *buffer = ByteBuffer(&msg.BorrowSlice(), 1);
*own_buffer = true;
return grpc::Status::OK;
}
@@ -289,30 +273,15 @@ template<class T> class SerializationTraits<flatbuffers::grpc::Message<T>> {
// Deserialize by pulling the
static grpc::Status Deserialize(ByteBuffer *buf,
flatbuffers::grpc::Message<T> *msg) {
- grpc_byte_buffer *buffer = *reinterpret_cast<grpc_byte_buffer **>(buf);
- if (!buffer) {
- return ::grpc::Status(::grpc::StatusCode::INTERNAL, "No payload");
- }
- // Check if this is a single uncompressed slice.
- if ((buffer->type == GRPC_BB_RAW) &&
- (buffer->data.raw.compression == GRPC_COMPRESS_NONE) &&
- (buffer->data.raw.slice_buffer.count == 1)) {
- // If it is, then we can reference the `grpc_slice` directly.
- grpc_slice slice = buffer->data.raw.slice_buffer.slices[0];
- // We wrap a `Message<T>` around the slice, incrementing the refcount.
- *msg = flatbuffers::grpc::Message<T>(slice, true);
- } else {
- // Otherwise, we need to use `grpc_byte_buffer_reader_readall` to read
- // `buffer` into a single contiguous `grpc_slice`. The gRPC reader gives
- // us back a new slice with the refcount already incremented.
- grpc_byte_buffer_reader reader;
- grpc_byte_buffer_reader_init(&reader, buffer);
- grpc_slice slice = grpc_byte_buffer_reader_readall(&reader);
- grpc_byte_buffer_reader_destroy(&reader);
- // We wrap a `Message<T>` around the slice, but don't increment refcount
- *msg = flatbuffers::grpc::Message<T>(slice, false);
+ Slice slice;
+ if (!buf->TrySingleSlice(&slice).ok()) {
+ if (!buf->DumpToSingleSlice(&slice).ok()) {
+ buf->Clear();
+ return ::grpc::Status(::grpc::StatusCode::INTERNAL, "No payload");
+ }
}
- grpc_byte_buffer_destroy(buffer);
+ *msg = flatbuffers::grpc::Message<T>(slice);
+ buf->Clear();
#if FLATBUFFERS_GRPC_DISABLE_AUTO_VERIFICATION
return ::grpc::Status::OK;
#else
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/idl.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/idl.h
index 71de8254c16..208ac52c9e8 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/idl.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/idl.h
@@ -17,6 +17,7 @@
#ifndef FLATBUFFERS_IDL_H_
#define FLATBUFFERS_IDL_H_
+#include <functional>
#include <map>
#include <memory>
#include <stack>
@@ -27,10 +28,6 @@
#include "flatbuffers/hash.h"
#include "flatbuffers/reflection.h"
-#if !defined(FLATBUFFERS_CPP98_STL)
-# include <functional>
-#endif // !defined(FLATBUFFERS_CPP98_STL)
-
// This file defines the data types representing a parsed IDL (Interface
// Definition Language) / schema file.
@@ -207,7 +204,7 @@ template<typename T> class SymbolTable {
}
bool Add(const std::string &name, T *e) {
- vector_emplace_back(&vec, e);
+ vec.emplace_back(e);
auto it = dict.find(name);
if (it != dict.end()) return true;
dict[name] = e;
@@ -568,6 +565,7 @@ struct IDLOptions {
bool gen_nullable;
bool java_checkerframework;
bool gen_generated;
+ bool gen_json_coders;
std::string object_prefix;
std::string object_suffix;
bool union_value_namespacing;
@@ -594,6 +592,9 @@ struct IDLOptions {
std::string filename_extension;
bool no_warnings;
std::string project_root;
+ bool cs_global_alias;
+ bool json_nested_flatbuffers;
+ bool json_nested_flexbuffers;
// Possible options for the more general generator below.
enum Language {
@@ -616,8 +617,6 @@ struct IDLOptions {
kMAX
};
- Language lang;
-
enum MiniReflect { kNone, kTypes, kTypesAndNames };
MiniReflect mini_reflect;
@@ -663,6 +662,7 @@ struct IDLOptions {
gen_nullable(false),
java_checkerframework(false),
gen_generated(false),
+ gen_json_coders(false),
object_suffix("T"),
union_value_namespacing(true),
allow_non_utf8(false),
@@ -681,7 +681,9 @@ struct IDLOptions {
filename_extension(),
no_warnings(false),
project_root(""),
- lang(IDLOptions::kJava),
+ cs_global_alias(false),
+ json_nested_flatbuffers(true),
+ json_nested_flexbuffers(true),
mini_reflect(IDLOptions::kNone),
require_explicit_ids(false),
lang_to_generate(0),
@@ -1162,9 +1164,10 @@ extern std::string RustMakeRule(const Parser &parser, const std::string &path,
// Generate a make rule for generated Java or C# files.
// See code_generators.cpp.
-extern std::string JavaCSharpMakeRule(const Parser &parser,
- const std::string &path,
- const std::string &file_name);
+extern std::string CSharpMakeRule(const Parser &parser, const std::string &path,
+ const std::string &file_name);
+extern std::string JavaMakeRule(const Parser &parser, const std::string &path,
+ const std::string &file_name);
// Generate a make rule for the generated text (JSON) files.
// See idl_gen_text.cpp.
@@ -1203,6 +1206,9 @@ extern bool GenerateSwiftGRPC(const Parser &parser, const std::string &path,
extern bool GenerateTSGRPC(const Parser &parser, const std::string &path,
const std::string &file_name);
+
+extern bool GenerateRustModuleRootFile(const Parser &parser,
+ const std::string &path);
} // namespace flatbuffers
#endif // FLATBUFFERS_IDL_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/reflection.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/reflection.h
index fa2cf31fad0..cb3ec9a0f0f 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/reflection.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/reflection.h
@@ -379,12 +379,12 @@ template<typename T, typename U> class pointer_inside_vector {
public:
pointer_inside_vector(T *ptr, std::vector<U> &vec)
: offset_(reinterpret_cast<uint8_t *>(ptr) -
- reinterpret_cast<uint8_t *>(flatbuffers::vector_data(vec))),
+ reinterpret_cast<uint8_t *>(vec.data())),
vec_(vec) {}
T *operator*() const {
- return reinterpret_cast<T *>(
- reinterpret_cast<uint8_t *>(flatbuffers::vector_data(vec_)) + offset_);
+ return reinterpret_cast<T *>(reinterpret_cast<uint8_t *>(vec_.data()) +
+ offset_);
}
T *operator->() const { return operator*(); }
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/registry.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/registry.h
index 9ea425b3978..e8bb8f5e32b 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/registry.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/registry.h
@@ -17,6 +17,7 @@
#ifndef FLATBUFFERS_REGISTRY_H_
#define FLATBUFFERS_REGISTRY_H_
+#include "flatbuffers/base.h"
#include "flatbuffers/idl.h"
namespace flatbuffers {
@@ -40,13 +41,13 @@ class Registry {
bool FlatBufferToText(const uint8_t *flatbuf, size_t len, std::string *dest) {
// Get the identifier out of the buffer.
// If the buffer is truncated, exit.
- if (len < sizeof(uoffset_t) + FlatBufferBuilder::kFileIdentifierLength) {
+ if (len < sizeof(uoffset_t) + kFileIdentifierLength) {
lasterror_ = "buffer truncated";
return false;
}
std::string ident(
reinterpret_cast<const char *>(flatbuf) + sizeof(uoffset_t),
- FlatBufferBuilder::kFileIdentifierLength);
+ kFileIdentifierLength);
// Load and parse the schema.
Parser parser;
if (!LoadSchema(ident, &parser)) return false;
@@ -103,7 +104,7 @@ class Registry {
}
// Parse schema.
parser->opts = opts_;
- if (!parser->Parse(schematext.c_str(), vector_data(include_paths_),
+ if (!parser->Parse(schematext.c_str(), include_paths_.data(),
schema.path_.c_str())) {
lasterror_ = parser->error_;
return false;
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/stl_emulation.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/stl_emulation.h
index 2697a0a8a4c..75d13b29ced 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/stl_emulation.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/stl_emulation.h
@@ -26,14 +26,6 @@
#include <memory>
#include <limits>
-#if defined(_STLPORT_VERSION) && !defined(FLATBUFFERS_CPP98_STL)
- #define FLATBUFFERS_CPP98_STL
-#endif // defined(_STLPORT_VERSION) && !defined(FLATBUFFERS_CPP98_STL)
-
-#if defined(FLATBUFFERS_CPP98_STL)
- #include <cctype>
-#endif // defined(FLATBUFFERS_CPP98_STL)
-
// Detect C++17 compatible compiler.
// __cplusplus >= 201703L - a compiler has support of 'static inline' variables.
#if defined(FLATBUFFERS_USE_STD_OPTIONAL) \
@@ -55,7 +47,7 @@
#endif
#else
// Disable non-trivial ctors if FLATBUFFERS_SPAN_MINIMAL defined.
- #if !defined(FLATBUFFERS_TEMPLATES_ALIASES) || defined(FLATBUFFERS_CPP98_STL)
+ #if !defined(FLATBUFFERS_TEMPLATES_ALIASES)
#define FLATBUFFERS_SPAN_MINIMAL
#else
// Enable implicit construction of a span<T,N> from a std::array<T,N>.
@@ -63,139 +55,32 @@
#endif
#endif // defined(FLATBUFFERS_USE_STD_SPAN)
-// This header provides backwards compatibility for C++98 STLs like stlport.
+// This header provides backwards compatibility for older versions of the STL.
namespace flatbuffers {
-// Retrieve ::back() from a string in a way that is compatible with pre C++11
-// STLs (e.g stlport).
-inline char& string_back(std::string &value) {
- return value[value.length() - 1];
-}
-
-inline char string_back(const std::string &value) {
- return value[value.length() - 1];
-}
-
-// Helper method that retrieves ::data() from a vector in a way that is
-// compatible with pre C++11 STLs (e.g stlport).
-template <typename T> inline T *vector_data(std::vector<T> &vector) {
- // In some debug environments, operator[] does bounds checking, so &vector[0]
- // can't be used.
- return vector.empty() ? nullptr : &vector[0];
-}
-
-template <typename T> inline const T *vector_data(
- const std::vector<T> &vector) {
- return vector.empty() ? nullptr : &vector[0];
-}
-
-template <typename T, typename V>
-inline void vector_emplace_back(std::vector<T> *vector, V &&data) {
- #if defined(FLATBUFFERS_CPP98_STL)
- vector->push_back(data);
- #else
- vector->emplace_back(std::forward<V>(data));
- #endif // defined(FLATBUFFERS_CPP98_STL)
-}
-
-#ifndef FLATBUFFERS_CPP98_STL
- #if defined(FLATBUFFERS_TEMPLATES_ALIASES)
- template <typename T>
- using numeric_limits = std::numeric_limits<T>;
- #else
- template <typename T> class numeric_limits :
- public std::numeric_limits<T> {};
- #endif // defined(FLATBUFFERS_TEMPLATES_ALIASES)
+#if defined(FLATBUFFERS_TEMPLATES_ALIASES)
+ template <typename T>
+ using numeric_limits = std::numeric_limits<T>;
#else
template <typename T> class numeric_limits :
- public std::numeric_limits<T> {
- public:
- // Android NDK fix.
- static T lowest() {
- return std::numeric_limits<T>::min();
- }
- };
-
- template <> class numeric_limits<float> :
- public std::numeric_limits<float> {
- public:
- static float lowest() { return -FLT_MAX; }
- };
-
- template <> class numeric_limits<double> :
- public std::numeric_limits<double> {
- public:
- static double lowest() { return -DBL_MAX; }
- };
-
- template <> class numeric_limits<unsigned long long> {
- public:
- static unsigned long long min() { return 0ULL; }
- static unsigned long long max() { return ~0ULL; }
- static unsigned long long lowest() {
- return numeric_limits<unsigned long long>::min();
- }
- };
-
- template <> class numeric_limits<long long> {
- public:
- static long long min() {
- return static_cast<long long>(1ULL << ((sizeof(long long) << 3) - 1));
- }
- static long long max() {
- return static_cast<long long>(
- (1ULL << ((sizeof(long long) << 3) - 1)) - 1);
- }
- static long long lowest() {
- return numeric_limits<long long>::min();
- }
- };
-#endif // FLATBUFFERS_CPP98_STL
+ public std::numeric_limits<T> {};
+#endif // defined(FLATBUFFERS_TEMPLATES_ALIASES)
#if defined(FLATBUFFERS_TEMPLATES_ALIASES)
- #ifndef FLATBUFFERS_CPP98_STL
- template <typename T> using is_scalar = std::is_scalar<T>;
- template <typename T, typename U> using is_same = std::is_same<T,U>;
- template <typename T> using is_floating_point = std::is_floating_point<T>;
- template <typename T> using is_unsigned = std::is_unsigned<T>;
- template <typename T> using is_enum = std::is_enum<T>;
- template <typename T> using make_unsigned = std::make_unsigned<T>;
- template<bool B, class T, class F>
- using conditional = std::conditional<B, T, F>;
- template<class T, T v>
- using integral_constant = std::integral_constant<T, v>;
- template <bool B>
- using bool_constant = integral_constant<bool, B>;
- using true_type = std::true_type;
- using false_type = std::false_type;
- #else
- // Map C++ TR1 templates defined by stlport.
- template <typename T> using is_scalar = std::tr1::is_scalar<T>;
- template <typename T, typename U> using is_same = std::tr1::is_same<T,U>;
- template <typename T> using is_floating_point =
- std::tr1::is_floating_point<T>;
- template <typename T> using is_unsigned = std::tr1::is_unsigned<T>;
- template <typename T> using is_enum = std::tr1::is_enum<T>;
- // Android NDK doesn't have std::make_unsigned or std::tr1::make_unsigned.
- template<typename T> struct make_unsigned {
- static_assert(is_unsigned<T>::value, "Specialization not implemented!");
- using type = T;
- };
- template<> struct make_unsigned<char> { using type = unsigned char; };
- template<> struct make_unsigned<short> { using type = unsigned short; };
- template<> struct make_unsigned<int> { using type = unsigned int; };
- template<> struct make_unsigned<long> { using type = unsigned long; };
- template<>
- struct make_unsigned<long long> { using type = unsigned long long; };
- template<bool B, class T, class F>
- using conditional = std::tr1::conditional<B, T, F>;
- template<class T, T v>
- using integral_constant = std::tr1::integral_constant<T, v>;
- template <bool B>
- using bool_constant = integral_constant<bool, B>;
- using true_type = bool_constant<true>;
- using false_type = bool_constant<false>;
- #endif // !FLATBUFFERS_CPP98_STL
+ template <typename T> using is_scalar = std::is_scalar<T>;
+ template <typename T, typename U> using is_same = std::is_same<T,U>;
+ template <typename T> using is_floating_point = std::is_floating_point<T>;
+ template <typename T> using is_unsigned = std::is_unsigned<T>;
+ template <typename T> using is_enum = std::is_enum<T>;
+ template <typename T> using make_unsigned = std::make_unsigned<T>;
+ template<bool B, class T, class F>
+ using conditional = std::conditional<B, T, F>;
+ template<class T, T v>
+ using integral_constant = std::integral_constant<T, v>;
+ template <bool B>
+ using bool_constant = integral_constant<bool, B>;
+ using true_type = std::true_type;
+ using false_type = std::false_type;
#else
// MSVC 2010 doesn't support C++11 aliases.
template <typename T> struct is_scalar : public std::is_scalar<T> {};
@@ -215,124 +100,33 @@ inline void vector_emplace_back(std::vector<T> *vector, V &&data) {
typedef bool_constant<false> false_type;
#endif // defined(FLATBUFFERS_TEMPLATES_ALIASES)
-#ifndef FLATBUFFERS_CPP98_STL
- #if defined(FLATBUFFERS_TEMPLATES_ALIASES)
- template <class T> using unique_ptr = std::unique_ptr<T>;
- #else
- // MSVC 2010 doesn't support C++11 aliases.
- // We're manually "aliasing" the class here as we want to bring unique_ptr
- // into the flatbuffers namespace. We have unique_ptr in the flatbuffers
- // namespace we have a completely independent implementation (see below)
- // for C++98 STL implementations.
- template <class T> class unique_ptr : public std::unique_ptr<T> {
- public:
- unique_ptr() {}
- explicit unique_ptr(T* p) : std::unique_ptr<T>(p) {}
- unique_ptr(std::unique_ptr<T>&& u) { *this = std::move(u); }
- unique_ptr(unique_ptr&& u) { *this = std::move(u); }
- unique_ptr& operator=(std::unique_ptr<T>&& u) {
- std::unique_ptr<T>::reset(u.release());
- return *this;
- }
- unique_ptr& operator=(unique_ptr&& u) {
- std::unique_ptr<T>::reset(u.release());
- return *this;
- }
- unique_ptr& operator=(T* p) {
- return std::unique_ptr<T>::operator=(p);
- }
- };
- #endif // defined(FLATBUFFERS_TEMPLATES_ALIASES)
+#if defined(FLATBUFFERS_TEMPLATES_ALIASES)
+ template <class T> using unique_ptr = std::unique_ptr<T>;
#else
- // Very limited implementation of unique_ptr.
- // This is provided simply to allow the C++ code generated from the default
- // settings to function in C++98 environments with no modifications.
- template <class T> class unique_ptr {
- public:
- typedef T element_type;
-
- unique_ptr() : ptr_(nullptr) {}
- explicit unique_ptr(T* p) : ptr_(p) {}
- unique_ptr(unique_ptr&& u) : ptr_(nullptr) { reset(u.release()); }
- unique_ptr(const unique_ptr& u) : ptr_(nullptr) {
- reset(const_cast<unique_ptr*>(&u)->release());
- }
- ~unique_ptr() { reset(); }
-
- unique_ptr& operator=(const unique_ptr& u) {
- reset(const_cast<unique_ptr*>(&u)->release());
+ // MSVC 2010 doesn't support C++11 aliases.
+ // We're manually "aliasing" the class here as we want to bring unique_ptr
+ // into the flatbuffers namespace. We have unique_ptr in the flatbuffers
+ // namespace we have a completely independent implementation (see below)
+ // for C++98 STL implementations.
+ template <class T> class unique_ptr : public std::unique_ptr<T> {
+ public:
+ unique_ptr() {}
+ explicit unique_ptr(T* p) : std::unique_ptr<T>(p) {}
+ unique_ptr(std::unique_ptr<T>&& u) { *this = std::move(u); }
+ unique_ptr(unique_ptr&& u) { *this = std::move(u); }
+ unique_ptr& operator=(std::unique_ptr<T>&& u) {
+ std::unique_ptr<T>::reset(u.release());
return *this;
}
-
unique_ptr& operator=(unique_ptr&& u) {
- reset(u.release());
+ std::unique_ptr<T>::reset(u.release());
return *this;
}
-
unique_ptr& operator=(T* p) {
- reset(p);
- return *this;
- }
-
- const T& operator*() const { return *ptr_; }
- T* operator->() const { return ptr_; }
- T* get() const noexcept { return ptr_; }
- explicit operator bool() const { return ptr_ != nullptr; }
-
- // modifiers
- T* release() {
- T* value = ptr_;
- ptr_ = nullptr;
- return value;
- }
-
- void reset(T* p = nullptr) {
- T* value = ptr_;
- ptr_ = p;
- if (value) delete value;
- }
-
- void swap(unique_ptr& u) {
- T* temp_ptr = ptr_;
- ptr_ = u.ptr_;
- u.ptr_ = temp_ptr;
+ return std::unique_ptr<T>::operator=(p);
}
-
- private:
- T* ptr_;
};
-
- template <class T> bool operator==(const unique_ptr<T>& x,
- const unique_ptr<T>& y) {
- return x.get() == y.get();
- }
-
- template <class T, class D> bool operator==(const unique_ptr<T>& x,
- const D* y) {
- return static_cast<D*>(x.get()) == y;
- }
-
- template <class T> bool operator==(const unique_ptr<T>& x, intptr_t y) {
- return reinterpret_cast<intptr_t>(x.get()) == y;
- }
-
- template <class T> bool operator!=(const unique_ptr<T>& x, decltype(nullptr)) {
- return !!x;
- }
-
- template <class T> bool operator!=(decltype(nullptr), const unique_ptr<T>& x) {
- return !!x;
- }
-
- template <class T> bool operator==(const unique_ptr<T>& x, decltype(nullptr)) {
- return !x;
- }
-
- template <class T> bool operator==(decltype(nullptr), const unique_ptr<T>& x) {
- return !x;
- }
-
-#endif // !FLATBUFFERS_CPP98_STL
+#endif // defined(FLATBUFFERS_TEMPLATES_ALIASES)
#ifdef FLATBUFFERS_USE_STD_OPTIONAL
template<class T>
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/string.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/string.h
new file mode 100644
index 00000000000..3db95fce1bd
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/string.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_STRING_H_
+#define FLATBUFFERS_STRING_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/vector.h"
+
+namespace flatbuffers {
+
+struct String : public Vector<char> {
+ const char *c_str() const { return reinterpret_cast<const char *>(Data()); }
+ std::string str() const { return std::string(c_str(), size()); }
+
+ // clang-format off
+ #ifdef FLATBUFFERS_HAS_STRING_VIEW
+ flatbuffers::string_view string_view() const {
+ return flatbuffers::string_view(c_str(), size());
+ }
+ #endif // FLATBUFFERS_HAS_STRING_VIEW
+ // clang-format on
+
+ bool operator<(const String &o) const {
+ return StringLessThan(this->data(), this->size(), o.data(), o.size());
+ }
+};
+
+// Convenience function to get std::string from a String returning an empty
+// string on null pointer.
+static inline std::string GetString(const String *str) {
+ return str ? str->str() : "";
+}
+
+// Convenience function to get char* from a String returning an empty string on
+// null pointer.
+static inline const char *GetCstring(const String *str) {
+ return str ? str->c_str() : "";
+}
+
+#ifdef FLATBUFFERS_HAS_STRING_VIEW
+// Convenience function to get string_view from a String returning an empty
+// string_view on null pointer.
+static inline flatbuffers::string_view GetStringView(const String *str) {
+ return str ? str->string_view() : flatbuffers::string_view();
+}
+#endif // FLATBUFFERS_HAS_STRING_VIEW
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_STRING_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/struct.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/struct.h
new file mode 100644
index 00000000000..d8753c84f0d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/struct.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_STRUCT_H_
+#define FLATBUFFERS_STRUCT_H_
+
+#include "flatbuffers/base.h"
+
+namespace flatbuffers {
+
+// "structs" are flat structures that do not have an offset table, thus
+// always have all members present and do not support forwards/backwards
+// compatible extensions.
+
+class Struct FLATBUFFERS_FINAL_CLASS {
+ public:
+ template<typename T> T GetField(uoffset_t o) const {
+ return ReadScalar<T>(&data_[o]);
+ }
+
+ template<typename T> T GetStruct(uoffset_t o) const {
+ return reinterpret_cast<T>(&data_[o]);
+ }
+
+ const uint8_t *GetAddressOf(uoffset_t o) const { return &data_[o]; }
+ uint8_t *GetAddressOf(uoffset_t o) { return &data_[o]; }
+
+ private:
+ // private constructor & copy constructor: you obtain instances of this
+ // class by pointing to existing data only
+ Struct();
+ Struct(const Struct &);
+ Struct &operator=(const Struct &);
+
+ uint8_t data_[1];
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_STRUCT_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/table.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/table.h
new file mode 100644
index 00000000000..42470693950
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/table.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_TABLE_H_
+#define FLATBUFFERS_TABLE_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/verifier.h"
+
+namespace flatbuffers {
+
+// "tables" use an offset table (possibly shared) that allows fields to be
+// omitted and added at will, but uses an extra indirection to read.
+class Table {
+ public:
+ const uint8_t *GetVTable() const {
+ return data_ - ReadScalar<soffset_t>(data_);
+ }
+
+ // This gets the field offset for any of the functions below it, or 0
+ // if the field was not present.
+ voffset_t GetOptionalFieldOffset(voffset_t field) const {
+ // The vtable offset is always at the start.
+ auto vtable = GetVTable();
+ // The first element is the size of the vtable (fields + type id + itself).
+ auto vtsize = ReadScalar<voffset_t>(vtable);
+ // If the field we're accessing is outside the vtable, we're reading older
+ // data, so it's the same as if the offset was 0 (not present).
+ return field < vtsize ? ReadScalar<voffset_t>(vtable + field) : 0;
+ }
+
+ template<typename T> T GetField(voffset_t field, T defaultval) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ return field_offset ? ReadScalar<T>(data_ + field_offset) : defaultval;
+ }
+
+ template<typename P> P GetPointer(voffset_t field) {
+ auto field_offset = GetOptionalFieldOffset(field);
+ auto p = data_ + field_offset;
+ return field_offset ? reinterpret_cast<P>(p + ReadScalar<uoffset_t>(p))
+ : nullptr;
+ }
+ template<typename P> P GetPointer(voffset_t field) const {
+ return const_cast<Table *>(this)->GetPointer<P>(field);
+ }
+
+ template<typename P> P GetStruct(voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ auto p = const_cast<uint8_t *>(data_ + field_offset);
+ return field_offset ? reinterpret_cast<P>(p) : nullptr;
+ }
+
+ template<typename Raw, typename Face>
+ flatbuffers::Optional<Face> GetOptional(voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ auto p = data_ + field_offset;
+ return field_offset ? Optional<Face>(static_cast<Face>(ReadScalar<Raw>(p)))
+ : Optional<Face>();
+ }
+
+ template<typename T> bool SetField(voffset_t field, T val, T def) {
+ auto field_offset = GetOptionalFieldOffset(field);
+ if (!field_offset) return IsTheSameAs(val, def);
+ WriteScalar(data_ + field_offset, val);
+ return true;
+ }
+ template<typename T> bool SetField(voffset_t field, T val) {
+ auto field_offset = GetOptionalFieldOffset(field);
+ if (!field_offset) return false;
+ WriteScalar(data_ + field_offset, val);
+ return true;
+ }
+
+ bool SetPointer(voffset_t field, const uint8_t *val) {
+ auto field_offset = GetOptionalFieldOffset(field);
+ if (!field_offset) return false;
+ WriteScalar(data_ + field_offset,
+ static_cast<uoffset_t>(val - (data_ + field_offset)));
+ return true;
+ }
+
+ uint8_t *GetAddressOf(voffset_t field) {
+ auto field_offset = GetOptionalFieldOffset(field);
+ return field_offset ? data_ + field_offset : nullptr;
+ }
+ const uint8_t *GetAddressOf(voffset_t field) const {
+ return const_cast<Table *>(this)->GetAddressOf(field);
+ }
+
+ bool CheckField(voffset_t field) const {
+ return GetOptionalFieldOffset(field) != 0;
+ }
+
+ // Verify the vtable of this table.
+ // Call this once per table, followed by VerifyField once per field.
+ bool VerifyTableStart(Verifier &verifier) const {
+ return verifier.VerifyTableStart(data_);
+ }
+
+ // Verify a particular field.
+ template<typename T>
+ bool VerifyField(const Verifier &verifier, voffset_t field) const {
+ // Calling GetOptionalFieldOffset should be safe now thanks to
+ // VerifyTable().
+ auto field_offset = GetOptionalFieldOffset(field);
+ // Check the actual field.
+ return !field_offset || verifier.Verify<T>(data_, field_offset);
+ }
+
+ // VerifyField for required fields.
+ template<typename T>
+ bool VerifyFieldRequired(const Verifier &verifier, voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ return verifier.Check(field_offset != 0) &&
+ verifier.Verify<T>(data_, field_offset);
+ }
+
+ // Versions for offsets.
+ bool VerifyOffset(const Verifier &verifier, voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ return !field_offset || verifier.VerifyOffset(data_, field_offset);
+ }
+
+ bool VerifyOffsetRequired(const Verifier &verifier, voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ return verifier.Check(field_offset != 0) &&
+ verifier.VerifyOffset(data_, field_offset);
+ }
+
+ private:
+ // private constructor & copy constructor: you obtain instances of this
+ // class by pointing to existing data only
+ Table();
+ Table(const Table &other);
+ Table &operator=(const Table &);
+
+ uint8_t data_[1];
+};
+
+// This specialization allows avoiding warnings like:
+// MSVC C4800: type: forcing value to bool 'true' or 'false'.
+template<>
+inline flatbuffers::Optional<bool> Table::GetOptional<uint8_t, bool>(
+ voffset_t field) const {
+ auto field_offset = GetOptionalFieldOffset(field);
+ auto p = data_ + field_offset;
+ return field_offset ? Optional<bool>(ReadScalar<uint8_t>(p) != 0)
+ : Optional<bool>();
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_TABLE_H_ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/util.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/util.h
index edbee649569..020a0607c4a 100644
--- a/chromium/third_party/flatbuffers/src/include/flatbuffers/util.h
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/util.h
@@ -145,20 +145,6 @@ template<> inline std::string NumToString<unsigned char>(unsigned char t) {
template<> inline std::string NumToString<char>(char t) {
return NumToString(static_cast<int>(t));
}
-#if defined(FLATBUFFERS_CPP98_STL)
-template<> inline std::string NumToString<long long>(long long t) {
- char buf[21]; // (log((1 << 63) - 1) / log(10)) + 2
- snprintf(buf, sizeof(buf), "%lld", t);
- return std::string(buf);
-}
-
-template<>
-inline std::string NumToString<unsigned long long>(unsigned long long t) {
- char buf[22]; // (log((1 << 63) - 1) / log(10)) + 1
- snprintf(buf, sizeof(buf), "%llu", t);
- return std::string(buf);
-}
-#endif // defined(FLATBUFFERS_CPP98_STL)
// Special versions for floats/doubles.
template<typename T> std::string FloatToString(T t, int precision) {
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/vector.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/vector.h
new file mode 100644
index 00000000000..f8a5d88e86b
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/vector.h
@@ -0,0 +1,370 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_VECTOR_H_
+#define FLATBUFFERS_VECTOR_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/buffer.h"
+
+namespace flatbuffers {
+
+struct String;
+
+// An STL compatible iterator implementation for Vector below, effectively
+// calling Get() for every element.
+template<typename T, typename IT> struct VectorIterator {
+ typedef std::random_access_iterator_tag iterator_category;
+ typedef IT value_type;
+ typedef ptrdiff_t difference_type;
+ typedef IT *pointer;
+ typedef IT &reference;
+
+ VectorIterator(const uint8_t *data, uoffset_t i)
+ : data_(data + IndirectHelper<T>::element_stride * i) {}
+ VectorIterator(const VectorIterator &other) : data_(other.data_) {}
+ VectorIterator() : data_(nullptr) {}
+
+ VectorIterator &operator=(const VectorIterator &other) {
+ data_ = other.data_;
+ return *this;
+ }
+
+ VectorIterator &operator=(VectorIterator &&other) {
+ data_ = other.data_;
+ return *this;
+ }
+
+ bool operator==(const VectorIterator &other) const {
+ return data_ == other.data_;
+ }
+
+ bool operator<(const VectorIterator &other) const {
+ return data_ < other.data_;
+ }
+
+ bool operator!=(const VectorIterator &other) const {
+ return data_ != other.data_;
+ }
+
+ difference_type operator-(const VectorIterator &other) const {
+ return (data_ - other.data_) / IndirectHelper<T>::element_stride;
+ }
+
+ // Note: return type is incompatible with the standard
+ // `reference operator*()`.
+ IT operator*() const { return IndirectHelper<T>::Read(data_, 0); }
+
+ // Note: return type is incompatible with the standard
+ // `pointer operator->()`.
+ IT operator->() const { return IndirectHelper<T>::Read(data_, 0); }
+
+ VectorIterator &operator++() {
+ data_ += IndirectHelper<T>::element_stride;
+ return *this;
+ }
+
+ VectorIterator operator++(int) {
+ VectorIterator temp(data_, 0);
+ data_ += IndirectHelper<T>::element_stride;
+ return temp;
+ }
+
+ VectorIterator operator+(const uoffset_t &offset) const {
+ return VectorIterator(data_ + offset * IndirectHelper<T>::element_stride,
+ 0);
+ }
+
+ VectorIterator &operator+=(const uoffset_t &offset) {
+ data_ += offset * IndirectHelper<T>::element_stride;
+ return *this;
+ }
+
+ VectorIterator &operator--() {
+ data_ -= IndirectHelper<T>::element_stride;
+ return *this;
+ }
+
+ VectorIterator operator--(int) {
+ VectorIterator temp(data_, 0);
+ data_ -= IndirectHelper<T>::element_stride;
+ return temp;
+ }
+
+ VectorIterator operator-(const uoffset_t &offset) const {
+ return VectorIterator(data_ - offset * IndirectHelper<T>::element_stride,
+ 0);
+ }
+
+ VectorIterator &operator-=(const uoffset_t &offset) {
+ data_ -= offset * IndirectHelper<T>::element_stride;
+ return *this;
+ }
+
+ private:
+ const uint8_t *data_;
+};
+
+template<typename Iterator>
+struct VectorReverseIterator : public std::reverse_iterator<Iterator> {
+ explicit VectorReverseIterator(Iterator iter)
+ : std::reverse_iterator<Iterator>(iter) {}
+
+ // Note: return type is incompatible with the standard
+ // `reference operator*()`.
+ typename Iterator::value_type operator*() const {
+ auto tmp = std::reverse_iterator<Iterator>::current;
+ return *--tmp;
+ }
+
+ // Note: return type is incompatible with the standard
+ // `pointer operator->()`.
+ typename Iterator::value_type operator->() const {
+ auto tmp = std::reverse_iterator<Iterator>::current;
+ return *--tmp;
+ }
+};
+
+// This is used as a helper type for accessing vectors.
+// Vector::data() assumes the vector elements start after the length field.
+template<typename T> class Vector {
+ public:
+ typedef VectorIterator<T, typename IndirectHelper<T>::mutable_return_type>
+ iterator;
+ typedef VectorIterator<T, typename IndirectHelper<T>::return_type>
+ const_iterator;
+ typedef VectorReverseIterator<iterator> reverse_iterator;
+ typedef VectorReverseIterator<const_iterator> const_reverse_iterator;
+
+ typedef typename flatbuffers::bool_constant<flatbuffers::is_scalar<T>::value>
+ scalar_tag;
+
+ static FLATBUFFERS_CONSTEXPR bool is_span_observable =
+ scalar_tag::value && (FLATBUFFERS_LITTLEENDIAN || sizeof(T) == 1);
+
+ uoffset_t size() const { return EndianScalar(length_); }
+
+ // Deprecated: use size(). Here for backwards compatibility.
+ FLATBUFFERS_ATTRIBUTE([[deprecated("use size() instead")]])
+ uoffset_t Length() const { return size(); }
+
+ typedef typename IndirectHelper<T>::return_type return_type;
+ typedef typename IndirectHelper<T>::mutable_return_type mutable_return_type;
+ typedef return_type value_type;
+
+ return_type Get(uoffset_t i) const {
+ FLATBUFFERS_ASSERT(i < size());
+ return IndirectHelper<T>::Read(Data(), i);
+ }
+
+ return_type operator[](uoffset_t i) const { return Get(i); }
+
+ // If this is a Vector of enums, T will be its storage type, not the enum
+ // type. This function makes it convenient to retrieve value with enum
+ // type E.
+ template<typename E> E GetEnum(uoffset_t i) const {
+ return static_cast<E>(Get(i));
+ }
+
+ // If this a vector of unions, this does the cast for you. There's no check
+ // to make sure this is the right type!
+ template<typename U> const U *GetAs(uoffset_t i) const {
+ return reinterpret_cast<const U *>(Get(i));
+ }
+
+ // If this a vector of unions, this does the cast for you. There's no check
+ // to make sure this is actually a string!
+ const String *GetAsString(uoffset_t i) const {
+ return reinterpret_cast<const String *>(Get(i));
+ }
+
+ const void *GetStructFromOffset(size_t o) const {
+ return reinterpret_cast<const void *>(Data() + o);
+ }
+
+ iterator begin() { return iterator(Data(), 0); }
+ const_iterator begin() const { return const_iterator(Data(), 0); }
+
+ iterator end() { return iterator(Data(), size()); }
+ const_iterator end() const { return const_iterator(Data(), size()); }
+
+ reverse_iterator rbegin() { return reverse_iterator(end()); }
+ const_reverse_iterator rbegin() const {
+ return const_reverse_iterator(end());
+ }
+
+ reverse_iterator rend() { return reverse_iterator(begin()); }
+ const_reverse_iterator rend() const {
+ return const_reverse_iterator(begin());
+ }
+
+ const_iterator cbegin() const { return begin(); }
+
+ const_iterator cend() const { return end(); }
+
+ const_reverse_iterator crbegin() const { return rbegin(); }
+
+ const_reverse_iterator crend() const { return rend(); }
+
+ // Change elements if you have a non-const pointer to this object.
+ // Scalars only. See reflection.h, and the documentation.
+ void Mutate(uoffset_t i, const T &val) {
+ FLATBUFFERS_ASSERT(i < size());
+ WriteScalar(data() + i, val);
+ }
+
+ // Change an element of a vector of tables (or strings).
+ // "val" points to the new table/string, as you can obtain from
+ // e.g. reflection::AddFlatBuffer().
+ void MutateOffset(uoffset_t i, const uint8_t *val) {
+ FLATBUFFERS_ASSERT(i < size());
+ static_assert(sizeof(T) == sizeof(uoffset_t), "Unrelated types");
+ WriteScalar(data() + i,
+ static_cast<uoffset_t>(val - (Data() + i * sizeof(uoffset_t))));
+ }
+
+ // Get a mutable pointer to tables/strings inside this vector.
+ mutable_return_type GetMutableObject(uoffset_t i) const {
+ FLATBUFFERS_ASSERT(i < size());
+ return const_cast<mutable_return_type>(IndirectHelper<T>::Read(Data(), i));
+ }
+
+ // The raw data in little endian format. Use with care.
+ const uint8_t *Data() const {
+ return reinterpret_cast<const uint8_t *>(&length_ + 1);
+ }
+
+ uint8_t *Data() { return reinterpret_cast<uint8_t *>(&length_ + 1); }
+
+ // Similarly, but typed, much like std::vector::data
+ const T *data() const { return reinterpret_cast<const T *>(Data()); }
+ T *data() { return reinterpret_cast<T *>(Data()); }
+
+ template<typename K> return_type LookupByKey(K key) const {
+ void *search_result = std::bsearch(
+ &key, Data(), size(), IndirectHelper<T>::element_stride, KeyCompare<K>);
+
+ if (!search_result) {
+ return nullptr; // Key not found.
+ }
+
+ const uint8_t *element = reinterpret_cast<const uint8_t *>(search_result);
+
+ return IndirectHelper<T>::Read(element, 0);
+ }
+
+ template<typename K> mutable_return_type MutableLookupByKey(K key) {
+ return const_cast<mutable_return_type>(LookupByKey(key));
+ }
+
+ protected:
+ // This class is only used to access pre-existing data. Don't ever
+ // try to construct these manually.
+ Vector();
+
+ uoffset_t length_;
+
+ private:
+ // This class is a pointer. Copying will therefore create an invalid object.
+ // Private and unimplemented copy constructor.
+ Vector(const Vector &);
+ Vector &operator=(const Vector &);
+
+ template<typename K> static int KeyCompare(const void *ap, const void *bp) {
+ const K *key = reinterpret_cast<const K *>(ap);
+ const uint8_t *data = reinterpret_cast<const uint8_t *>(bp);
+ auto table = IndirectHelper<T>::Read(data, 0);
+
+ // std::bsearch compares with the operands transposed, so we negate the
+ // result here.
+ return -table->KeyCompareWithValue(*key);
+ }
+};
+
+template<class U>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<U> make_span(Vector<U> &vec)
+ FLATBUFFERS_NOEXCEPT {
+ static_assert(Vector<U>::is_span_observable,
+ "wrong type U, only LE-scalar, or byte types are allowed");
+ return span<U>(vec.data(), vec.size());
+}
+
+template<class U>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const U> make_span(
+ const Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
+ static_assert(Vector<U>::is_span_observable,
+ "wrong type U, only LE-scalar, or byte types are allowed");
+ return span<const U>(vec.data(), vec.size());
+}
+
+template<class U>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<uint8_t> make_bytes_span(
+ Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
+ static_assert(Vector<U>::scalar_tag::value,
+ "wrong type U, only LE-scalar, or byte types are allowed");
+ return span<uint8_t>(vec.Data(), vec.size() * sizeof(U));
+}
+
+template<class U>
+FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span<const uint8_t> make_bytes_span(
+ const Vector<U> &vec) FLATBUFFERS_NOEXCEPT {
+ static_assert(Vector<U>::scalar_tag::value,
+ "wrong type U, only LE-scalar, or byte types are allowed");
+ return span<const uint8_t>(vec.Data(), vec.size() * sizeof(U));
+}
+
+// Represent a vector much like the template above, but in this case we
+// don't know what the element types are (used with reflection.h).
+class VectorOfAny {
+ public:
+ uoffset_t size() const { return EndianScalar(length_); }
+
+ const uint8_t *Data() const {
+ return reinterpret_cast<const uint8_t *>(&length_ + 1);
+ }
+ uint8_t *Data() { return reinterpret_cast<uint8_t *>(&length_ + 1); }
+
+ protected:
+ VectorOfAny();
+
+ uoffset_t length_;
+
+ private:
+ VectorOfAny(const VectorOfAny &);
+ VectorOfAny &operator=(const VectorOfAny &);
+};
+
+template<typename T, typename U>
+Vector<Offset<T>> *VectorCast(Vector<Offset<U>> *ptr) {
+ static_assert(std::is_base_of<T, U>::value, "Unrelated types");
+ return reinterpret_cast<Vector<Offset<T>> *>(ptr);
+}
+
+template<typename T, typename U>
+const Vector<Offset<T>> *VectorCast(const Vector<Offset<U>> *ptr) {
+ static_assert(std::is_base_of<T, U>::value, "Unrelated types");
+ return reinterpret_cast<const Vector<Offset<T>> *>(ptr);
+}
+
+// Convenient helper function to get the length of any vector, regardless
+// of whether it is null or not (the field is not set).
+template<typename T> static inline size_t VectorLength(const Vector<T> *v) {
+ return v ? v->size() : 0;
+}
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_VERIFIER_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/vector_downward.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/vector_downward.h
new file mode 100644
index 00000000000..3391391856f
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/vector_downward.h
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_VECTOR_DOWNWARD_H_
+#define FLATBUFFERS_VECTOR_DOWNWARD_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/default_allocator.h"
+#include "flatbuffers/detached_buffer.h"
+
+namespace flatbuffers {
+
+// This is a minimal replication of std::vector<uint8_t> functionality,
+// except growing from higher to lower addresses. i.e push_back() inserts data
+// in the lowest address in the vector.
+// Since this vector leaves the lower part unused, we support a "scratch-pad"
+// that can be stored there for temporary data, to share the allocated space.
+// Essentially, this supports 2 std::vectors in a single buffer.
+class vector_downward {
+ public:
+ explicit vector_downward(size_t initial_size, Allocator *allocator,
+ bool own_allocator, size_t buffer_minalign)
+ : allocator_(allocator),
+ own_allocator_(own_allocator),
+ initial_size_(initial_size),
+ buffer_minalign_(buffer_minalign),
+ reserved_(0),
+ size_(0),
+ buf_(nullptr),
+ cur_(nullptr),
+ scratch_(nullptr) {}
+
+ vector_downward(vector_downward &&other)
+ // clang-format on
+ : allocator_(other.allocator_),
+ own_allocator_(other.own_allocator_),
+ initial_size_(other.initial_size_),
+ buffer_minalign_(other.buffer_minalign_),
+ reserved_(other.reserved_),
+ size_(other.size_),
+ buf_(other.buf_),
+ cur_(other.cur_),
+ scratch_(other.scratch_) {
+ // No change in other.allocator_
+ // No change in other.initial_size_
+ // No change in other.buffer_minalign_
+ other.own_allocator_ = false;
+ other.reserved_ = 0;
+ other.buf_ = nullptr;
+ other.cur_ = nullptr;
+ other.scratch_ = nullptr;
+ }
+
+ vector_downward &operator=(vector_downward &&other) {
+ // Move construct a temporary and swap idiom
+ vector_downward temp(std::move(other));
+ swap(temp);
+ return *this;
+ }
+
+ ~vector_downward() {
+ clear_buffer();
+ clear_allocator();
+ }
+
+ void reset() {
+ clear_buffer();
+ clear();
+ }
+
+ void clear() {
+ if (buf_) {
+ cur_ = buf_ + reserved_;
+ } else {
+ reserved_ = 0;
+ cur_ = nullptr;
+ }
+ size_ = 0;
+ clear_scratch();
+ }
+
+ void clear_scratch() { scratch_ = buf_; }
+
+ void clear_allocator() {
+ if (own_allocator_ && allocator_) { delete allocator_; }
+ allocator_ = nullptr;
+ own_allocator_ = false;
+ }
+
+ void clear_buffer() {
+ if (buf_) Deallocate(allocator_, buf_, reserved_);
+ buf_ = nullptr;
+ }
+
+ // Relinquish the pointer to the caller.
+ uint8_t *release_raw(size_t &allocated_bytes, size_t &offset) {
+ auto *buf = buf_;
+ allocated_bytes = reserved_;
+ offset = static_cast<size_t>(cur_ - buf_);
+
+ // release_raw only relinquishes the buffer ownership.
+ // Does not deallocate or reset the allocator. Destructor will do that.
+ buf_ = nullptr;
+ clear();
+ return buf;
+ }
+
+ // Relinquish the pointer to the caller.
+ DetachedBuffer release() {
+ // allocator ownership (if any) is transferred to DetachedBuffer.
+ DetachedBuffer fb(allocator_, own_allocator_, buf_, reserved_, cur_,
+ size());
+ if (own_allocator_) {
+ allocator_ = nullptr;
+ own_allocator_ = false;
+ }
+ buf_ = nullptr;
+ clear();
+ return fb;
+ }
+
+ size_t ensure_space(size_t len) {
+ FLATBUFFERS_ASSERT(cur_ >= scratch_ && scratch_ >= buf_);
+ if (len > static_cast<size_t>(cur_ - scratch_)) { reallocate(len); }
+ // Beyond this, signed offsets may not have enough range:
+ // (FlatBuffers > 2GB not supported).
+ FLATBUFFERS_ASSERT(size() < FLATBUFFERS_MAX_BUFFER_SIZE);
+ return len;
+ }
+
+ inline uint8_t *make_space(size_t len) {
+ if (len) {
+ ensure_space(len);
+ cur_ -= len;
+ size_ += static_cast<uoffset_t>(len);
+ }
+ return cur_;
+ }
+
+ // Returns nullptr if using the DefaultAllocator.
+ Allocator *get_custom_allocator() { return allocator_; }
+
+ inline uoffset_t size() const { return size_; }
+
+ uoffset_t scratch_size() const {
+ return static_cast<uoffset_t>(scratch_ - buf_);
+ }
+
+ size_t capacity() const { return reserved_; }
+
+ uint8_t *data() const {
+ FLATBUFFERS_ASSERT(cur_);
+ return cur_;
+ }
+
+ uint8_t *scratch_data() const {
+ FLATBUFFERS_ASSERT(buf_);
+ return buf_;
+ }
+
+ uint8_t *scratch_end() const {
+ FLATBUFFERS_ASSERT(scratch_);
+ return scratch_;
+ }
+
+ uint8_t *data_at(size_t offset) const { return buf_ + reserved_ - offset; }
+
+ void push(const uint8_t *bytes, size_t num) {
+ if (num > 0) { memcpy(make_space(num), bytes, num); }
+ }
+
+ // Specialized version of push() that avoids memcpy call for small data.
+ template<typename T> void push_small(const T &little_endian_t) {
+ make_space(sizeof(T));
+ *reinterpret_cast<T *>(cur_) = little_endian_t;
+ }
+
+ template<typename T> void scratch_push_small(const T &t) {
+ ensure_space(sizeof(T));
+ *reinterpret_cast<T *>(scratch_) = t;
+ scratch_ += sizeof(T);
+ }
+
+ // fill() is most frequently called with small byte counts (<= 4),
+ // which is why we're using loops rather than calling memset.
+ void fill(size_t zero_pad_bytes) {
+ make_space(zero_pad_bytes);
+ for (size_t i = 0; i < zero_pad_bytes; i++) cur_[i] = 0;
+ }
+
+ // Version for when we know the size is larger.
+ // Precondition: zero_pad_bytes > 0
+ void fill_big(size_t zero_pad_bytes) {
+ memset(make_space(zero_pad_bytes), 0, zero_pad_bytes);
+ }
+
+ void pop(size_t bytes_to_remove) {
+ cur_ += bytes_to_remove;
+ size_ -= static_cast<uoffset_t>(bytes_to_remove);
+ }
+
+ void scratch_pop(size_t bytes_to_remove) { scratch_ -= bytes_to_remove; }
+
+ void swap(vector_downward &other) {
+ using std::swap;
+ swap(allocator_, other.allocator_);
+ swap(own_allocator_, other.own_allocator_);
+ swap(initial_size_, other.initial_size_);
+ swap(buffer_minalign_, other.buffer_minalign_);
+ swap(reserved_, other.reserved_);
+ swap(size_, other.size_);
+ swap(buf_, other.buf_);
+ swap(cur_, other.cur_);
+ swap(scratch_, other.scratch_);
+ }
+
+ void swap_allocator(vector_downward &other) {
+ using std::swap;
+ swap(allocator_, other.allocator_);
+ swap(own_allocator_, other.own_allocator_);
+ }
+
+ private:
+ // You shouldn't really be copying instances of this class.
+ FLATBUFFERS_DELETE_FUNC(vector_downward(const vector_downward &));
+ FLATBUFFERS_DELETE_FUNC(vector_downward &operator=(const vector_downward &));
+
+ Allocator *allocator_;
+ bool own_allocator_;
+ size_t initial_size_;
+ size_t buffer_minalign_;
+ size_t reserved_;
+ uoffset_t size_;
+ uint8_t *buf_;
+ uint8_t *cur_; // Points at location between empty (below) and used (above).
+ uint8_t *scratch_; // Points to the end of the scratchpad in use.
+
+ void reallocate(size_t len) {
+ auto old_reserved = reserved_;
+ auto old_size = size();
+ auto old_scratch_size = scratch_size();
+ reserved_ +=
+ (std::max)(len, old_reserved ? old_reserved / 2 : initial_size_);
+ reserved_ = (reserved_ + buffer_minalign_ - 1) & ~(buffer_minalign_ - 1);
+ if (buf_) {
+ buf_ = ReallocateDownward(allocator_, buf_, old_reserved, reserved_,
+ old_size, old_scratch_size);
+ } else {
+ buf_ = Allocate(allocator_, reserved_);
+ }
+ cur_ = buf_ + reserved_ - old_size;
+ scratch_ = buf_ + old_scratch_size;
+ }
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_VECTOR_DOWNWARD_H_
diff --git a/chromium/third_party/flatbuffers/src/include/flatbuffers/verifier.h b/chromium/third_party/flatbuffers/src/include/flatbuffers/verifier.h
new file mode 100644
index 00000000000..b6971c1dccf
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/include/flatbuffers/verifier.h
@@ -0,0 +1,270 @@
+/*
+ * Copyright 2021 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLATBUFFERS_VERIFIER_H_
+#define FLATBUFFERS_VERIFIER_H_
+
+#include "flatbuffers/base.h"
+#include "flatbuffers/util.h"
+#include "flatbuffers/vector.h"
+
+namespace flatbuffers {
+
+// Helper class to verify the integrity of a FlatBuffer
+class Verifier FLATBUFFERS_FINAL_CLASS {
+ public:
+ Verifier(const uint8_t *buf, size_t buf_len, uoffset_t _max_depth = 64,
+ uoffset_t _max_tables = 1000000, bool _check_alignment = true)
+ : buf_(buf),
+ size_(buf_len),
+ depth_(0),
+ max_depth_(_max_depth),
+ num_tables_(0),
+ max_tables_(_max_tables),
+ upper_bound_(0),
+ check_alignment_(_check_alignment) {
+ FLATBUFFERS_ASSERT(size_ < FLATBUFFERS_MAX_BUFFER_SIZE);
+ }
+
+ // Central location where any verification failures register.
+ bool Check(bool ok) const {
+ // clang-format off
+ #ifdef FLATBUFFERS_DEBUG_VERIFICATION_FAILURE
+ FLATBUFFERS_ASSERT(ok);
+ #endif
+ #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
+ if (!ok)
+ upper_bound_ = 0;
+ #endif
+ // clang-format on
+ return ok;
+ }
+
+ // Verify any range within the buffer.
+ bool Verify(size_t elem, size_t elem_len) const {
+ // clang-format off
+ #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
+ auto upper_bound = elem + elem_len;
+ if (upper_bound_ < upper_bound)
+ upper_bound_ = upper_bound;
+ #endif
+ // clang-format on
+ return Check(elem_len < size_ && elem <= size_ - elem_len);
+ }
+
+ template<typename T> bool VerifyAlignment(size_t elem) const {
+ return Check((elem & (sizeof(T) - 1)) == 0 || !check_alignment_);
+ }
+
+ // Verify a range indicated by sizeof(T).
+ template<typename T> bool Verify(size_t elem) const {
+ return VerifyAlignment<T>(elem) && Verify(elem, sizeof(T));
+ }
+
+ bool VerifyFromPointer(const uint8_t *p, size_t len) {
+ auto o = static_cast<size_t>(p - buf_);
+ return Verify(o, len);
+ }
+
+ // Verify relative to a known-good base pointer.
+ bool Verify(const uint8_t *base, voffset_t elem_off, size_t elem_len) const {
+ return Verify(static_cast<size_t>(base - buf_) + elem_off, elem_len);
+ }
+
+ template<typename T>
+ bool Verify(const uint8_t *base, voffset_t elem_off) const {
+ return Verify(static_cast<size_t>(base - buf_) + elem_off, sizeof(T));
+ }
+
+ // Verify a pointer (may be NULL) of a table type.
+ template<typename T> bool VerifyTable(const T *table) {
+ return !table || table->Verify(*this);
+ }
+
+ // Verify a pointer (may be NULL) of any vector type.
+ template<typename T> bool VerifyVector(const Vector<T> *vec) const {
+ return !vec || VerifyVectorOrString(reinterpret_cast<const uint8_t *>(vec),
+ sizeof(T));
+ }
+
+ // Verify a pointer (may be NULL) of a vector to struct.
+ template<typename T> bool VerifyVector(const Vector<const T *> *vec) const {
+ return VerifyVector(reinterpret_cast<const Vector<T> *>(vec));
+ }
+
+ // Verify a pointer (may be NULL) to string.
+ bool VerifyString(const String *str) const {
+ size_t end;
+ return !str || (VerifyVectorOrString(reinterpret_cast<const uint8_t *>(str),
+ 1, &end) &&
+ Verify(end, 1) && // Must have terminator
+ Check(buf_[end] == '\0')); // Terminating byte must be 0.
+ }
+
+ // Common code between vectors and strings.
+ bool VerifyVectorOrString(const uint8_t *vec, size_t elem_size,
+ size_t *end = nullptr) const {
+ auto veco = static_cast<size_t>(vec - buf_);
+ // Check we can read the size field.
+ if (!Verify<uoffset_t>(veco)) return false;
+ // Check the whole array. If this is a string, the byte past the array
+ // must be 0.
+ auto size = ReadScalar<uoffset_t>(vec);
+ auto max_elems = FLATBUFFERS_MAX_BUFFER_SIZE / elem_size;
+ if (!Check(size < max_elems))
+ return false; // Protect against byte_size overflowing.
+ auto byte_size = sizeof(size) + elem_size * size;
+ if (end) *end = veco + byte_size;
+ return Verify(veco, byte_size);
+ }
+
+ // Special case for string contents, after the above has been called.
+ bool VerifyVectorOfStrings(const Vector<Offset<String>> *vec) const {
+ if (vec) {
+ for (uoffset_t i = 0; i < vec->size(); i++) {
+ if (!VerifyString(vec->Get(i))) return false;
+ }
+ }
+ return true;
+ }
+
+ // Special case for table contents, after the above has been called.
+ template<typename T> bool VerifyVectorOfTables(const Vector<Offset<T>> *vec) {
+ if (vec) {
+ for (uoffset_t i = 0; i < vec->size(); i++) {
+ if (!vec->Get(i)->Verify(*this)) return false;
+ }
+ }
+ return true;
+ }
+
+ __supress_ubsan__("unsigned-integer-overflow") bool VerifyTableStart(
+ const uint8_t *table) {
+ // Check the vtable offset.
+ auto tableo = static_cast<size_t>(table - buf_);
+ if (!Verify<soffset_t>(tableo)) return false;
+ // This offset may be signed, but doing the subtraction unsigned always
+ // gives the result we want.
+ auto vtableo = tableo - static_cast<size_t>(ReadScalar<soffset_t>(table));
+ // Check the vtable size field, then check vtable fits in its entirety.
+ return VerifyComplexity() && Verify<voffset_t>(vtableo) &&
+ VerifyAlignment<voffset_t>(ReadScalar<voffset_t>(buf_ + vtableo)) &&
+ Verify(vtableo, ReadScalar<voffset_t>(buf_ + vtableo));
+ }
+
+ template<typename T>
+ bool VerifyBufferFromStart(const char *identifier, size_t start) {
+ if (identifier && !Check((size_ >= 2 * sizeof(flatbuffers::uoffset_t) &&
+ BufferHasIdentifier(buf_ + start, identifier)))) {
+ return false;
+ }
+
+ // Call T::Verify, which must be in the generated code for this type.
+ auto o = VerifyOffset(start);
+ return o && reinterpret_cast<const T *>(buf_ + start + o)->Verify(*this)
+ // clang-format off
+ #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
+ && GetComputedSize()
+ #endif
+ ;
+ // clang-format on
+ }
+
+ template<typename T>
+ bool VerifyNestedFlatBuffer(const Vector<uint8_t> *buf,
+ const char *identifier) {
+ if (!buf) return true;
+ Verifier nested_verifier(buf->data(), buf->size());
+ return nested_verifier.VerifyBuffer<T>(identifier);
+ }
+
+ // Verify this whole buffer, starting with root type T.
+ template<typename T> bool VerifyBuffer() { return VerifyBuffer<T>(nullptr); }
+
+ template<typename T> bool VerifyBuffer(const char *identifier) {
+ return VerifyBufferFromStart<T>(identifier, 0);
+ }
+
+ template<typename T> bool VerifySizePrefixedBuffer(const char *identifier) {
+ return Verify<uoffset_t>(0U) &&
+ ReadScalar<uoffset_t>(buf_) == size_ - sizeof(uoffset_t) &&
+ VerifyBufferFromStart<T>(identifier, sizeof(uoffset_t));
+ }
+
+ uoffset_t VerifyOffset(size_t start) const {
+ if (!Verify<uoffset_t>(start)) return 0;
+ auto o = ReadScalar<uoffset_t>(buf_ + start);
+ // May not point to itself.
+ if (!Check(o != 0)) return 0;
+ // Can't wrap around / buffers are max 2GB.
+ if (!Check(static_cast<soffset_t>(o) >= 0)) return 0;
+ // Must be inside the buffer to create a pointer from it (pointer outside
+ // buffer is UB).
+ if (!Verify(start + o, 1)) return 0;
+ return o;
+ }
+
+ uoffset_t VerifyOffset(const uint8_t *base, voffset_t start) const {
+ return VerifyOffset(static_cast<size_t>(base - buf_) + start);
+ }
+
+ // Called at the start of a table to increase counters measuring data
+ // structure depth and amount, and possibly bails out with false if
+ // limits set by the constructor have been hit. Needs to be balanced
+ // with EndTable().
+ bool VerifyComplexity() {
+ depth_++;
+ num_tables_++;
+ return Check(depth_ <= max_depth_ && num_tables_ <= max_tables_);
+ }
+
+ // Called at the end of a table to pop the depth count.
+ bool EndTable() {
+ depth_--;
+ return true;
+ }
+
+ // Returns the message size in bytes
+ size_t GetComputedSize() const {
+ // clang-format off
+ #ifdef FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE
+ uintptr_t size = upper_bound_;
+ // Align the size to uoffset_t
+ size = (size - 1 + sizeof(uoffset_t)) & ~(sizeof(uoffset_t) - 1);
+ return (size > size_) ? 0 : size;
+ #else
+ // Must turn on FLATBUFFERS_TRACK_VERIFIER_BUFFER_SIZE for this to work.
+ (void)upper_bound_;
+ FLATBUFFERS_ASSERT(false);
+ return 0;
+ #endif
+ // clang-format on
+ }
+
+ private:
+ const uint8_t *buf_;
+ size_t size_;
+ uoffset_t depth_;
+ uoffset_t max_depth_;
+ uoffset_t num_tables_;
+ uoffset_t max_tables_;
+ mutable size_t upper_bound_;
+ bool check_alignment_;
+};
+
+} // namespace flatbuffers
+
+#endif // FLATBUFFERS_VERIFIER_H_
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferReadWriteBuf.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferReadWriteBuf.java
index 55acdd05a55..aaf72fe8192 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferReadWriteBuf.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferReadWriteBuf.java
@@ -2,7 +2,6 @@ package com.google.flatbuffers;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
-import java.nio.Buffer;
public class ByteBufferReadWriteBuf implements ReadWriteBuf {
@@ -15,7 +14,7 @@ public class ByteBufferReadWriteBuf implements ReadWriteBuf {
@Override
public void clear() {
- ((Buffer) buffer).clear();
+ buffer.clear();
}
@Override
@@ -118,9 +117,9 @@ public class ByteBufferReadWriteBuf implements ReadWriteBuf {
public void set(int index, byte[] value, int start, int length) {
requestCapacity(index + (length - start));
int curPos = buffer.position();
- ((Buffer) buffer).position(index);
+ buffer.position(index);
buffer.put(value, start, length);
- ((Buffer) buffer).position(curPos);
+ buffer.position(curPos);
}
@Override
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferUtil.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferUtil.java
index da86a35c4e9..624dc4e2f7c 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferUtil.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/ByteBufferUtil.java
@@ -19,7 +19,6 @@ package com.google.flatbuffers;
import static com.google.flatbuffers.Constants.*;
import java.nio.ByteBuffer;
-import java.nio.Buffer;
/// @file
/// @addtogroup flatbuffers_java_api
@@ -50,7 +49,7 @@ public class ByteBufferUtil {
*/
public static ByteBuffer removeSizePrefix(ByteBuffer bb) {
ByteBuffer s = bb.duplicate();
- ((Buffer) s).position(s.position() + SIZE_PREFIX_LENGTH);
+ s.position(s.position() + SIZE_PREFIX_LENGTH);
return s;
}
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlatBufferBuilder.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlatBufferBuilder.java
index 1e181488915..a954d9fbbd4 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlatBufferBuilder.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlatBufferBuilder.java
@@ -92,7 +92,7 @@ public class FlatBufferBuilder {
this.bb_factory = bb_factory;
if (existing_bb != null) {
bb = existing_bb;
- ((Buffer) bb).clear();
+ bb.clear();
bb.order(ByteOrder.LITTLE_ENDIAN);
} else {
bb = bb_factory.newByteBuffer(initial_size);
@@ -154,7 +154,7 @@ public class FlatBufferBuilder {
public FlatBufferBuilder init(ByteBuffer existing_bb, ByteBufferFactory bb_factory){
this.bb_factory = bb_factory;
bb = existing_bb;
- ((Buffer) bb).clear();
+ bb.clear();
bb.order(ByteOrder.LITTLE_ENDIAN);
minalign = 1;
space = bb.capacity();
@@ -235,7 +235,7 @@ public class FlatBufferBuilder {
*/
public void clear(){
space = bb.capacity();
- ((Buffer) bb).clear();
+ bb.clear();
minalign = 1;
while(vtable_in_use > 0) vtable[--vtable_in_use] = 0;
vtable_in_use = 0;
@@ -273,10 +273,10 @@ public class FlatBufferBuilder {
new_buf_size = (old_buf_size & 0xC0000000) != 0 ? MAX_BUFFER_SIZE : old_buf_size << 1;
}
- ((Buffer) bb).position(0);
+ bb.position(0);
ByteBuffer nbb = bb_factory.newByteBuffer(new_buf_size);
- new_buf_size = ((Buffer) nbb).clear().capacity(); // Ensure the returned buffer is treated as empty
- ((Buffer) nbb).position(new_buf_size - old_buf_size);
+ new_buf_size = nbb.clear().capacity(); // Ensure the returned buffer is treated as empty
+ nbb.position(new_buf_size - old_buf_size);
nbb.put(bb);
return nbb;
}
@@ -527,7 +527,7 @@ public class FlatBufferBuilder {
int length = elem_size * num_elems;
startVector(elem_size, num_elems, alignment);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
// Slice and limit the copy vector to point to the 'array'
ByteBuffer copy = bb.slice().order(ByteOrder.LITTLE_ENDIAN);
@@ -602,7 +602,7 @@ public class FlatBufferBuilder {
int length = utf8.encodedLength(s);
addByte((byte)0);
startVector(1, length, 1);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
utf8.encodeUtf8(s, bb);
return endVector();
}
@@ -617,7 +617,7 @@ public class FlatBufferBuilder {
int length = s.remaining();
addByte((byte)0);
startVector(1, length, 1);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
bb.put(s);
return endVector();
}
@@ -631,7 +631,7 @@ public class FlatBufferBuilder {
public int createByteVector(byte[] arr) {
int length = arr.length;
startVector(1, length, 1);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
bb.put(arr);
return endVector();
}
@@ -646,7 +646,7 @@ public class FlatBufferBuilder {
*/
public int createByteVector(byte[] arr, int offset, int length) {
startVector(1, length, 1);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
bb.put(arr, offset, length);
return endVector();
}
@@ -663,7 +663,7 @@ public class FlatBufferBuilder {
public int createByteVector(ByteBuffer byteBuffer) {
int length = byteBuffer.remaining();
startVector(1, length, 1);
- ((Buffer) bb).position(space -= length);
+ bb.position(space -= length);
bb.put(byteBuffer);
return endVector();
}
@@ -953,7 +953,7 @@ public class FlatBufferBuilder {
if (size_prefix) {
addInt(bb.capacity() - space);
}
- ((Buffer) bb).position(space);
+ bb.position(space);
finished = true;
}
@@ -1067,7 +1067,7 @@ public class FlatBufferBuilder {
public byte[] sizedByteArray(int start, int length){
finished();
byte[] array = new byte[length];
- ((Buffer) bb).position(start);
+ bb.position(start);
bb.get(array);
return array;
}
@@ -1090,7 +1090,7 @@ public class FlatBufferBuilder {
public InputStream sizedInputStream() {
finished();
ByteBuffer duplicate = bb.duplicate();
- ((Buffer) duplicate).position(space);
+ duplicate.position(space);
duplicate.limit(bb.capacity());
return new ByteBufferBackedInputStream(duplicate);
}
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlexBuffers.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlexBuffers.java
index c88624678d4..75a0595f67a 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlexBuffers.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/FlexBuffers.java
@@ -23,7 +23,6 @@ import static com.google.flatbuffers.FlexBuffers.Unsigned.shortToUnsignedInt;
import java.math.BigInteger;
import java.nio.ByteBuffer;
-import java.nio.Buffer;
import java.nio.charset.StandardCharsets;
/// @file
@@ -689,7 +688,7 @@ public class FlexBuffers {
*/
public ByteBuffer data() {
ByteBuffer dup = ByteBuffer.wrap(bb.data());
- ((Buffer) dup).position(end);
+ dup.position(end);
dup.limit(end + size());
return dup.asReadOnlyBuffer().slice();
}
@@ -789,7 +788,12 @@ public class FlexBuffers {
if (io == other.length) {
// in our buffer we have an additional \0 byte
// but this does not exist in regular Java strings, so we return now
- return c1 - c2;
+ int cmp = c1 - c2;
+ if (cmp != 0 || bb.get(ia) == '\0') {
+ return cmp;
+ } else {
+ return 1;
+ }
}
}
while (c1 == c2);
@@ -962,7 +966,12 @@ public class FlexBuffers {
if (l2 == other.length) {
// in our buffer we have an additional \0 byte
// but this does not exist in regular Java strings, so we return now
- return c1 - c2;
+ int cmp = c1 - c2;
+ if (cmp != 0 || bb.get(l1) == '\0') {
+ return cmp;
+ } else {
+ return 1;
+ }
}
}
while (c1 == c2);
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Table.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Table.java
index 46fa570448c..7f416396e30 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Table.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Table.java
@@ -18,7 +18,6 @@ package com.google.flatbuffers;
import static com.google.flatbuffers.Constants.*;
import java.nio.ByteBuffer;
-import java.nio.Buffer;
import java.nio.ByteOrder;
/// @cond FLATBUFFERS_INTERNAL
@@ -153,7 +152,7 @@ public class Table {
if (o == 0) return null;
ByteBuffer bb = this.bb.duplicate().order(ByteOrder.LITTLE_ENDIAN);
int vectorstart = __vector(o);
- ((Buffer) bb).position(vectorstart);
+ bb.position(vectorstart);
bb.limit(vectorstart + __vector_len(o) * elem_size);
return bb;
}
@@ -175,7 +174,7 @@ public class Table {
int vectorstart = __vector(o);
bb.rewind();
bb.limit(vectorstart + __vector_len(o) * elem_size);
- ((Buffer) bb).position(vectorstart);
+ bb.position(vectorstart);
return bb;
}
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Old.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Old.java
index 64b0cf02e00..3dac714bb67 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Old.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Old.java
@@ -17,7 +17,6 @@
package com.google.flatbuffers;
import java.nio.ByteBuffer;
-import java.nio.Buffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.CharsetDecoder;
@@ -56,7 +55,7 @@ public class Utf8Old extends Utf8 {
if (cache.lastOutput == null || cache.lastOutput.capacity() < estimated) {
cache.lastOutput = ByteBuffer.allocate(Math.max(128, estimated));
}
- ((Buffer) cache.lastOutput).clear();
+ cache.lastOutput.clear();
cache.lastInput = in;
CharBuffer wrap = (in instanceof CharBuffer) ?
(CharBuffer) in : CharBuffer.wrap(in);
@@ -68,7 +67,7 @@ public class Utf8Old extends Utf8 {
throw new IllegalArgumentException("bad character encoding", e);
}
}
- ((Buffer) cache.lastOutput).flip();
+ cache.lastOutput.flip();
return cache.lastOutput.remaining();
}
@@ -88,7 +87,7 @@ public class Utf8Old extends Utf8 {
CharsetDecoder decoder = CACHE.get().decoder;
decoder.reset();
buffer = buffer.duplicate();
- ((Buffer) buffer).position(offset);
+ buffer.position(offset);
buffer.limit(offset + length);
try {
CharBuffer result = decoder.decode(buffer);
diff --git a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Safe.java b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Safe.java
index a5900044ec9..523e3f1b4c4 100644
--- a/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Safe.java
+++ b/chromium/third_party/flatbuffers/src/java/com/google/flatbuffers/Utf8Safe.java
@@ -31,7 +31,6 @@
package com.google.flatbuffers;
import java.nio.ByteBuffer;
-import java.nio.Buffer;
import static java.lang.Character.MAX_SURROGATE;
import static java.lang.Character.MIN_SUPPLEMENTARY_CODE_POINT;
import static java.lang.Character.MIN_SURROGATE;
@@ -311,7 +310,7 @@ final public class Utf8Safe extends Utf8 {
}
if (inIx == inLength) {
// Successfully encoded the entire string.
- ((Buffer) out).position(outIx + inIx);
+ out.position(outIx + inIx);
return;
}
@@ -354,7 +353,7 @@ final public class Utf8Safe extends Utf8 {
}
// Successfully encoded the entire string.
- ((Buffer) out).position(outIx);
+ out.position(outIx);
} catch (IndexOutOfBoundsException e) {
// TODO(nathanmittler): Consider making the API throw IndexOutOfBoundsException instead.
@@ -435,7 +434,7 @@ final public class Utf8Safe extends Utf8 {
int start = out.arrayOffset();
int end = encodeUtf8Array(in, out.array(), start + out.position(),
out.remaining());
- ((Buffer) out).position(end - start);
+ out.position(end - start);
} else {
encodeUtf8Buffer(in, out);
}
diff --git a/chromium/third_party/flatbuffers/src/package.json b/chromium/third_party/flatbuffers/src/package.json
index 8d143bedb7d..49097874d2d 100644
--- a/chromium/third_party/flatbuffers/src/package.json
+++ b/chromium/third_party/flatbuffers/src/package.json
@@ -1,13 +1,13 @@
{
"name": "flatbuffers",
- "version": "2.0.3",
+ "version": "2.0.4",
"description": "Memory Efficient Serialization Library",
"files": [
- "js/*.js",
- "js/*.d.ts",
- "mjs/*.js",
- "mjs/*.d.ts",
- "ts/*.ts"
+ "js/**/*.js",
+ "js/**/*.d.ts",
+ "mjs/**/*.js",
+ "mjs/**/*.d.ts",
+ "ts/**/*.ts"
],
"main": "js/flatbuffers.js",
"module": "mjs/flatbuffers.js",
diff --git a/chromium/third_party/flatbuffers/src/pom.xml b/chromium/third_party/flatbuffers/src/pom.xml
index fe1cec7e51e..aa318d0050a 100644
--- a/chromium/third_party/flatbuffers/src/pom.xml
+++ b/chromium/third_party/flatbuffers/src/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.google.flatbuffers</groupId>
<artifactId>flatbuffers-java</artifactId>
- <version>2.0.2</version>
+ <version>2.0.3</version>
<packaging>bundle</packaging>
<name>FlatBuffers Java API</name>
<description>
@@ -42,103 +42,112 @@
</distributionManagement>
<build>
<sourceDirectory>java</sourceDirectory>
- <plugins>
- <plugin>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.8</source>
- <target>1.8</target>
- </configuration>
- <version>3.2</version>
- </plugin>
- <plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <includes>
- <include>**/*Test.java</include>
- </includes>
- </configuration>
- <version>2.18.1</version>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- <version>2.3</version>
- <executions>
- <execution>
- <id>attach-sources</id>
- <goals>
- <goal>jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-javadoc-plugin</artifactId>
- <version>2.9.1</version>
- <configuration>
- <additionalparam>-Xdoclint:none</additionalparam>
- <additionalOptions>-Xdoclint:none</additionalOptions>
- </configuration>
- <executions>
- <execution>
- <id>attach-javadocs</id>
- <goals>
- <goal>jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <version>3.0.1</version>
- <extensions>true</extensions>
- </plugin>
- <plugin>
- <groupId>org.sonatype.plugins</groupId>
- <artifactId>nexus-staging-maven-plugin</artifactId>
- <version>1.6.7</version>
- <extensions>true</extensions>
- <configuration>
- <serverId>ossrh</serverId>
- <nexusUrl>https://oss.sonatype.org/</nexusUrl>
- <autoReleaseAfterClose>true</autoReleaseAfterClose>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-gpg-plugin</artifactId>
- <version>1.6</version>
- <executions>
- <execution>
- <id>sign-artifacts</id>
- <phase>verify</phase>
- <goals>
- <goal>sign</goal>
- </goals>
+ </build>
+ <profiles>
+ <profile>
+ <id>jdk9</id>
+ <activation>
+ <jdk>[1.9,)</jdk>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
<configuration>
- <gpgArguments>
- <arg>--pinentry-mode</arg>
- <arg>loopback</arg>
- </gpgArguments>
+ <release>8</release>
</configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-release-plugin</artifactId>
- <version>2.5.3</version>
- <configuration>
- <autoVersionSubmodules>true</autoVersionSubmodules>
- <useReleaseProfile>false</useReleaseProfile>
- <releaseProfiles>release</releaseProfiles>
- <goals>deploy</goals>
- </configuration>
- </plugin>
- </plugins>
- </build>
+ <version>3.8.1</version>
+ </plugin>
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <includes>
+ <include>**/*Test.java</include>
+ </includes>
+ </configuration>
+ <version>2.22.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ <version>3.2.1</version>
+ <executions>
+ <execution>
+ <id>attach-sources</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>3.3.0</version>
+ <configuration>
+ <additionalparam>-Xdoclint:none</additionalparam>
+ <additionalOptions>-Xdoclint:none</additionalOptions>
+ </configuration>
+ <executions>
+ <execution>
+ <id>attach-javadocs</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <version>5.1.2</version>
+ <extensions>true</extensions>
+ </plugin>
+ <plugin>
+ <groupId>org.sonatype.plugins</groupId>
+ <artifactId>nexus-staging-maven-plugin</artifactId>
+ <version>1.6.8</version>
+ <extensions>true</extensions>
+ <configuration>
+ <serverId>ossrh</serverId>
+ <nexusUrl>https://oss.sonatype.org/</nexusUrl>
+ <autoReleaseAfterClose>true</autoReleaseAfterClose>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-gpg-plugin</artifactId>
+ <version>3.0.1</version>
+ <executions>
+ <execution>
+ <id>sign-artifacts</id>
+ <phase>verify</phase>
+ <goals>
+ <goal>sign</goal>
+ </goals>
+ <configuration>
+ <gpgArguments>
+ <arg>--pinentry-mode</arg>
+ <arg>loopback</arg>
+ </gpgArguments>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-release-plugin</artifactId>
+ <version>2.5.3</version>
+ <configuration>
+ <autoVersionSubmodules>true</autoVersionSubmodules>
+ <useReleaseProfile>false</useReleaseProfile>
+ <releaseProfiles>release</releaseProfiles>
+ <goals>deploy</goals>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
diff --git a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/builder.rs b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/builder.rs
index bf1ad029d38..9309877116b 100644
--- a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/builder.rs
+++ b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/builder.rs
@@ -554,42 +554,38 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
// serialize every FieldLoc to the vtable:
for &fl in self.field_locs.iter() {
let pos: VOffsetT = (object_revloc_to_vtable.value() - fl.off) as VOffsetT;
- debug_assert_eq!(
- vtfw.get_field_offset(fl.id),
- 0,
- "tried to write a vtable field multiple times"
- );
vtfw.write_field_offset(fl.id, pos);
}
}
- let dup_vt_use = {
- let this_vt = VTable::init(&self.owned_buf[..], self.head);
- self.find_duplicate_stored_vtable_revloc(this_vt)
- };
-
- let vt_use = match dup_vt_use {
- Some(n) => {
+ let new_vt_bytes = &self.owned_buf[vt_start_pos..vt_end_pos];
+ let found = self.written_vtable_revpos.binary_search_by(|old_vtable_revpos: &UOffsetT| {
+ let old_vtable_pos = self.owned_buf.len() - *old_vtable_revpos as usize;
+ let old_vtable = VTable::init(&self.owned_buf, old_vtable_pos);
+ new_vt_bytes.cmp(old_vtable.as_bytes())
+ });
+ let final_vtable_revpos = match found {
+ Ok(i) => {
+ // The new vtable is a duplicate so clear it.
VTableWriter::init(&mut self.owned_buf[vt_start_pos..vt_end_pos]).clear();
self.head += vtable_byte_len;
- n
+ self.written_vtable_revpos[i]
}
- None => {
- let new_vt_use = self.used_space() as UOffsetT;
- self.written_vtable_revpos.push(new_vt_use);
- new_vt_use
+ Err(i) => {
+ // This is a new vtable. Add it to the cache.
+ let new_vt_revpos = self.used_space() as UOffsetT;
+ self.written_vtable_revpos.insert(i, new_vt_revpos);
+ new_vt_revpos
}
};
-
- {
- let n = self.head + self.used_space() - object_revloc_to_vtable.value() as usize;
- let saw = unsafe { read_scalar_at::<UOffsetT>(&self.owned_buf, n) };
- debug_assert_eq!(saw, 0xF0F0_F0F0);
- unsafe {
- emplace_scalar::<SOffsetT>(
- &mut self.owned_buf[n..n + SIZE_SOFFSET],
- vt_use as SOffsetT - object_revloc_to_vtable.value() as SOffsetT,
- );
- }
+ // Write signed offset from table to its vtable.
+ let table_pos = self.owned_buf.len() - object_revloc_to_vtable.value() as usize;
+ let tmp_soffset_to_vt = unsafe { read_scalar_at::<UOffsetT>(&self.owned_buf, table_pos) };
+ debug_assert_eq!(tmp_soffset_to_vt, 0xF0F0_F0F0);
+ unsafe {
+ emplace_scalar::<SOffsetT>(
+ &mut self.owned_buf[table_pos..table_pos + SIZE_SOFFSET],
+ final_vtable_revpos as SOffsetT - object_revloc_to_vtable.value() as SOffsetT
+ );
}
self.field_locs.clear();
@@ -597,20 +593,6 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
object_revloc_to_vtable
}
- #[inline]
- fn find_duplicate_stored_vtable_revloc(&self, needle: VTable) -> Option<UOffsetT> {
- for &revloc in self.written_vtable_revpos.iter().rev() {
- let o = VTable::init(
- &self.owned_buf[..],
- self.head + self.used_space() - revloc as usize,
- );
- if needle == o {
- return Some(revloc);
- }
- }
- None
- }
-
// Only call this when you know it is safe to double the size of the buffer.
#[inline]
fn grow_owned_buf(&mut self) {
diff --git a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/get_root.rs b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/get_root.rs
index 2a01cf87e26..3305efade8a 100644
--- a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/get_root.rs
+++ b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/get_root.rs
@@ -43,7 +43,7 @@ pub fn root_with_opts<'opts, 'buf, T>(
where
T: 'buf + Follow<'buf> + Verifiable,
{
- let mut v = Verifier::new(&opts, data);
+ let mut v = Verifier::new(opts, data);
<ForwardsUOffset<T>>::run_verifier(&mut v, 0)?;
Ok(unsafe { root_unchecked::<T>(data) })
}
@@ -73,7 +73,7 @@ pub fn size_prefixed_root_with_opts<'opts, 'buf, T>(
where
T: 'buf + Follow<'buf> + Verifiable,
{
- let mut v = Verifier::new(&opts, data);
+ let mut v = Verifier::new(opts, data);
<SkipSizePrefix<ForwardsUOffset<T>>>::run_verifier(&mut v, 0)?;
Ok(unsafe { size_prefixed_root_unchecked::<T>(data) })
}
diff --git a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vector.rs b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vector.rs
index fe46c503691..0f29e6b605d 100644
--- a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vector.rs
+++ b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vector.rs
@@ -73,7 +73,7 @@ impl<'a, T: 'a> Vector<'a, T> {
#[inline(always)]
pub fn len(&self) -> usize {
- unsafe { read_scalar_at::<UOffsetT>(&self.0, self.1) as usize }
+ unsafe { read_scalar_at::<UOffsetT>(self.0, self.1) as usize }
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
@@ -103,7 +103,7 @@ impl<'a, T: SafeSliceAccess + 'a> Vector<'a, T> {
let loc = self.1;
let sz = size_of::<T>();
debug_assert!(sz > 0);
- let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) } as usize;
+ let len = unsafe { read_scalar_at::<UOffsetT>(buf, loc) } as usize;
let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz];
let ptr = data_buf.as_ptr() as *const T;
let s: &'a [T] = unsafe { from_raw_parts(ptr, len) };
@@ -144,7 +144,7 @@ pub fn follow_cast_ref<'a, T: Sized + 'a>(buf: &'a [u8], loc: usize) -> &'a T {
impl<'a> Follow<'a> for &'a str {
type Inner = &'a str;
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) } as usize;
+ let len = unsafe { read_scalar_at::<UOffsetT>(buf, loc) } as usize;
let slice = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len];
unsafe { from_utf8_unchecked(slice) }
}
@@ -154,7 +154,7 @@ impl<'a> Follow<'a> for &'a str {
fn follow_slice_helper<T>(buf: &[u8], loc: usize) -> &[T] {
let sz = size_of::<T>();
debug_assert!(sz > 0);
- let len = unsafe { read_scalar_at::<UOffsetT>(&buf, loc) as usize };
+ let len = unsafe { read_scalar_at::<UOffsetT>(buf, loc) as usize };
let data_buf = &buf[loc + SIZE_UOFFSET..loc + SIZE_UOFFSET + len * sz];
let ptr = data_buf.as_ptr() as *const T;
let s: &[T] = unsafe { from_raw_parts(ptr, len) };
diff --git a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vtable_writer.rs b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vtable_writer.rs
index 75eabd494ef..92e8522eb27 100644
--- a/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vtable_writer.rs
+++ b/chromium/third_party/flatbuffers/src/rust/flatbuffers/src/vtable_writer.rs
@@ -16,7 +16,7 @@
use std::ptr::write_bytes;
-use crate::endian_scalar::{emplace_scalar, read_scalar_at};
+use crate::endian_scalar::emplace_scalar;
use crate::primitives::*;
/// VTableWriter compartmentalizes actions needed to create a vtable.
@@ -54,16 +54,6 @@ impl<'a> VTableWriter<'a> {
}
}
- /// Gets an object field offset from the vtable. Only used for debugging.
- ///
- /// Note that this expects field offsets (which are like pointers), not
- /// field ids (which are like array indices).
- #[inline(always)]
- pub fn get_field_offset(&self, vtable_offset: VOffsetT) -> VOffsetT {
- let idx = vtable_offset as usize;
- unsafe { read_scalar_at::<VOffsetT>(&self.buf, idx) }
- }
-
/// Writes an object field offset into the vtable.
///
/// Note that this expects field offsets (which are like pointers), not
diff --git a/chromium/third_party/flatbuffers/src/samples/dart_sample.sh b/chromium/third_party/flatbuffers/src/samples/dart_sample.sh
index bc4328ede27..40326790dfc 100755
--- a/chromium/third_party/flatbuffers/src/samples/dart_sample.sh
+++ b/chromium/third_party/flatbuffers/src/samples/dart_sample.sh
@@ -28,14 +28,12 @@ if [[ "$sampledir" != "$currentdir" ]]; then
exit 1
fi
-cd ../dart/example
-
# Run `flatc`. Note: This requires you to compile using `cmake` from the
# root `/flatbuffers` directory.
-if [ -e ../../flatc ]; then
- ../../flatc --dart ../../samples/monster.fbs
-elif [ -e ../../Debug/flatc ]; then
- ../../Debug/flatc --dart ../../samples/monster.fbs
+if [ -e ../flatc ]; then
+ ../flatc --dart -o ../dart/example/ monster.fbs
+elif [ -e ../Debug/flatc ]; then
+ ../Debug/flatc --dart -o ../dart/example/ monster.fbs
else
echo 'flatc' could not be found. Make sure to build FlatBuffers from the \
$rootdir directory.
@@ -45,9 +43,7 @@ fi
echo Running the Dart sample.
# Execute the sample.
-dart example.dart
-
-# Cleanup temporary files.
-git checkout monster_my_game.sample_generated.dart
+dart ../dart/example/example.dart
-cd ../../samples
+# Copy the source schema so it is distributed when published to pub.dev
+cp monster.fbs ../dart/example/ \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/samples/monster.bfbs b/chromium/third_party/flatbuffers/src/samples/monster.bfbs
index c49fa8cd971..003f2288524 100644
--- a/chromium/third_party/flatbuffers/src/samples/monster.bfbs
+++ b/chromium/third_party/flatbuffers/src/samples/monster.bfbs
Binary files differ
diff --git a/chromium/third_party/flatbuffers/src/samples/monster_generated.h b/chromium/third_party/flatbuffers/src/samples/monster_generated.h
index 57402650100..efddb8d0322 100644
--- a/chromium/third_party/flatbuffers/src/samples/monster_generated.h
+++ b/chromium/third_party/flatbuffers/src/samples/monster_generated.h
@@ -103,6 +103,14 @@ template<> struct EquipmentTraits<MyGame::Sample::Weapon> {
static const Equipment enum_value = Equipment_Weapon;
};
+template<typename T> struct EquipmentUnionTraits {
+ static const Equipment enum_value = Equipment_NONE;
+};
+
+template<> struct EquipmentUnionTraits<MyGame::Sample::WeaponT> {
+ static const Equipment enum_value = Equipment_Weapon;
+};
+
struct EquipmentUnion {
Equipment type;
void *value;
@@ -120,17 +128,15 @@ struct EquipmentUnion {
void Reset();
-#ifndef FLATBUFFERS_CPP98_STL
template <typename T>
void Set(T&& val) {
- using RT = typename std::remove_reference<T>::type;
+ typedef typename std::remove_reference<T>::type RT;
Reset();
- type = EquipmentTraits<typename RT::TableType>::enum_value;
+ type = EquipmentUnionTraits<RT>::enum_value;
if (type != Equipment_NONE) {
value = new RT(std::forward<T>(val));
}
}
-#endif // FLATBUFFERS_CPP98_STL
static void *UnPack(const void *obj, Equipment type, const flatbuffers::resolver_function_t *resolver);
flatbuffers::Offset<void> Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const;
@@ -262,13 +268,13 @@ struct Monster FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
int16_t mana() const {
return GetField<int16_t>(VT_MANA, 150);
}
- bool mutate_mana(int16_t _mana) {
+ bool mutate_mana(int16_t _mana = 150) {
return SetField<int16_t>(VT_MANA, _mana, 150);
}
int16_t hp() const {
return GetField<int16_t>(VT_HP, 100);
}
- bool mutate_hp(int16_t _hp) {
+ bool mutate_hp(int16_t _hp = 100) {
return SetField<int16_t>(VT_HP, _hp, 100);
}
const flatbuffers::String *name() const {
@@ -286,7 +292,7 @@ struct Monster FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
MyGame::Sample::Color color() const {
return static_cast<MyGame::Sample::Color>(GetField<int8_t>(VT_COLOR, 2));
}
- bool mutate_color(MyGame::Sample::Color _color) {
+ bool mutate_color(MyGame::Sample::Color _color = static_cast<MyGame::Sample::Color>(2)) {
return SetField<int8_t>(VT_COLOR, static_cast<int8_t>(_color), 2);
}
const flatbuffers::Vector<flatbuffers::Offset<MyGame::Sample::Weapon>> *weapons() const {
@@ -390,7 +396,7 @@ struct MonsterBuilder {
inline flatbuffers::Offset<Monster> CreateMonster(
flatbuffers::FlatBufferBuilder &_fbb,
- const MyGame::Sample::Vec3 *pos = 0,
+ const MyGame::Sample::Vec3 *pos = nullptr,
int16_t mana = 150,
int16_t hp = 100,
flatbuffers::Offset<flatbuffers::String> name = 0,
@@ -416,7 +422,7 @@ inline flatbuffers::Offset<Monster> CreateMonster(
inline flatbuffers::Offset<Monster> CreateMonsterDirect(
flatbuffers::FlatBufferBuilder &_fbb,
- const MyGame::Sample::Vec3 *pos = 0,
+ const MyGame::Sample::Vec3 *pos = nullptr,
int16_t mana = 150,
int16_t hp = 100,
const char *name = nullptr,
@@ -471,7 +477,7 @@ struct Weapon FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
int16_t damage() const {
return GetField<int16_t>(VT_DAMAGE, 0);
}
- bool mutate_damage(int16_t _damage) {
+ bool mutate_damage(int16_t _damage = 0) {
return SetField<int16_t>(VT_DAMAGE, _damage, 0);
}
bool Verify(flatbuffers::Verifier &verifier) const {
@@ -564,7 +570,7 @@ inline void Monster::UnPackTo(MonsterT *_o, const flatbuffers::resolver_function
{ auto _e = name(); if (_e) _o->name = _e->str(); }
{ auto _e = inventory(); if (_e) { _o->inventory.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->inventory.begin()); } }
{ auto _e = color(); _o->color = _e; }
- { auto _e = weapons(); if (_e) { _o->weapons.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->weapons[_i] = flatbuffers::unique_ptr<MyGame::Sample::WeaponT>(_e->Get(_i)->UnPack(_resolver)); } } }
+ { auto _e = weapons(); if (_e) { _o->weapons.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->weapons[_i]) { _e->Get(_i)->UnPackTo(_o->weapons[_i].get(), _resolver); } else { _o->weapons[_i] = flatbuffers::unique_ptr<MyGame::Sample::WeaponT>(_e->Get(_i)->UnPack(_resolver)); }; } } }
{ auto _e = equipped_type(); _o->equipped.type = _e; }
{ auto _e = equipped(); if (_e) _o->equipped.value = MyGame::Sample::EquipmentUnion::UnPack(_e, equipped_type(), _resolver); }
{ auto _e = path(); if (_e) { _o->path.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->path[_i] = *_e->Get(_i); } } }
@@ -669,6 +675,7 @@ inline bool VerifyEquipmentVector(flatbuffers::Verifier &verifier, const flatbuf
}
inline void *EquipmentUnion::UnPack(const void *obj, Equipment type, const flatbuffers::resolver_function_t *resolver) {
+ (void)resolver;
switch (type) {
case Equipment_Weapon: {
auto ptr = reinterpret_cast<const MyGame::Sample::Weapon *>(obj);
@@ -679,6 +686,7 @@ inline void *EquipmentUnion::UnPack(const void *obj, Equipment type, const flatb
}
inline flatbuffers::Offset<void> EquipmentUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const {
+ (void)_rehasher;
switch (type) {
case Equipment_Weapon: {
auto ptr = reinterpret_cast<const MyGame::Sample::WeaponT *>(value);
@@ -834,6 +842,10 @@ inline Monster *GetMutableMonster(void *buf) {
return flatbuffers::GetMutableRoot<Monster>(buf);
}
+inline MyGame::Sample::Monster *GetMutableSizePrefixedMonster(void *buf) {
+ return flatbuffers::GetMutableSizePrefixedRoot<MyGame::Sample::Monster>(buf);
+}
+
inline bool VerifyMonsterBuffer(
flatbuffers::Verifier &verifier) {
return verifier.VerifyBuffer<MyGame::Sample::Monster>(nullptr);
diff --git a/chromium/third_party/flatbuffers/src/samples/monster_generated.rs b/chromium/third_party/flatbuffers/src/samples/monster_generated.rs
deleted file mode 100644
index b3d82674707..00000000000
--- a/chromium/third_party/flatbuffers/src/samples/monster_generated.rs
+++ /dev/null
@@ -1,1006 +0,0 @@
-// automatically generated by the FlatBuffers compiler, do not modify
-
-
-
-use std::mem;
-use std::cmp::Ordering;
-
-extern crate flatbuffers;
-use self::flatbuffers::{EndianScalar, Follow};
-
-#[allow(unused_imports, dead_code)]
-pub mod my_game {
-
- use std::mem;
- use std::cmp::Ordering;
-
- extern crate flatbuffers;
- use self::flatbuffers::{EndianScalar, Follow};
-#[allow(unused_imports, dead_code)]
-pub mod sample {
-
- use std::mem;
- use std::cmp::Ordering;
-
- extern crate flatbuffers;
- use self::flatbuffers::{EndianScalar, Follow};
-
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-pub const ENUM_MIN_COLOR: i8 = 0;
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-pub const ENUM_MAX_COLOR: i8 = 2;
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-#[allow(non_camel_case_types)]
-pub const ENUM_VALUES_COLOR: [Color; 3] = [
- Color::Red,
- Color::Green,
- Color::Blue,
-];
-
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
-#[repr(transparent)]
-pub struct Color(pub i8);
-#[allow(non_upper_case_globals)]
-impl Color {
- pub const Red: Self = Self(0);
- pub const Green: Self = Self(1);
- pub const Blue: Self = Self(2);
-
- pub const ENUM_MIN: i8 = 0;
- pub const ENUM_MAX: i8 = 2;
- pub const ENUM_VALUES: &'static [Self] = &[
- Self::Red,
- Self::Green,
- Self::Blue,
- ];
- /// Returns the variant's name or "" if unknown.
- pub fn variant_name(self) -> Option<&'static str> {
- match self {
- Self::Red => Some("Red"),
- Self::Green => Some("Green"),
- Self::Blue => Some("Blue"),
- _ => None,
- }
- }
-}
-impl std::fmt::Debug for Color {
- fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
- if let Some(name) = self.variant_name() {
- f.write_str(name)
- } else {
- f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
- }
- }
-}
-impl<'a> flatbuffers::Follow<'a> for Color {
- type Inner = Self;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- let b = unsafe {
- flatbuffers::read_scalar_at::<i8>(buf, loc)
- };
- Self(b)
- }
-}
-
-impl flatbuffers::Push for Color {
- type Output = Color;
- #[inline]
- fn push(&self, dst: &mut [u8], _rest: &[u8]) {
- unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); }
- }
-}
-
-impl flatbuffers::EndianScalar for Color {
- #[inline]
- fn to_little_endian(self) -> Self {
- let b = i8::to_le(self.0);
- Self(b)
- }
- #[inline]
- #[allow(clippy::wrong_self_convention)]
- fn from_little_endian(self) -> Self {
- let b = i8::from_le(self.0);
- Self(b)
- }
-}
-
-impl<'a> flatbuffers::Verifiable for Color {
- #[inline]
- fn run_verifier(
- v: &mut flatbuffers::Verifier, pos: usize
- ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
- use self::flatbuffers::Verifiable;
- i8::run_verifier(v, pos)
- }
-}
-
-impl flatbuffers::SimpleToVerifyInSlice for Color {}
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-pub const ENUM_MIN_EQUIPMENT: u8 = 0;
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-pub const ENUM_MAX_EQUIPMENT: u8 = 1;
-#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
-#[allow(non_camel_case_types)]
-pub const ENUM_VALUES_EQUIPMENT: [Equipment; 2] = [
- Equipment::NONE,
- Equipment::Weapon,
-];
-
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
-#[repr(transparent)]
-pub struct Equipment(pub u8);
-#[allow(non_upper_case_globals)]
-impl Equipment {
- pub const NONE: Self = Self(0);
- pub const Weapon: Self = Self(1);
-
- pub const ENUM_MIN: u8 = 0;
- pub const ENUM_MAX: u8 = 1;
- pub const ENUM_VALUES: &'static [Self] = &[
- Self::NONE,
- Self::Weapon,
- ];
- /// Returns the variant's name or "" if unknown.
- pub fn variant_name(self) -> Option<&'static str> {
- match self {
- Self::NONE => Some("NONE"),
- Self::Weapon => Some("Weapon"),
- _ => None,
- }
- }
-}
-impl std::fmt::Debug for Equipment {
- fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
- if let Some(name) = self.variant_name() {
- f.write_str(name)
- } else {
- f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
- }
- }
-}
-impl<'a> flatbuffers::Follow<'a> for Equipment {
- type Inner = Self;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- let b = unsafe {
- flatbuffers::read_scalar_at::<u8>(buf, loc)
- };
- Self(b)
- }
-}
-
-impl flatbuffers::Push for Equipment {
- type Output = Equipment;
- #[inline]
- fn push(&self, dst: &mut [u8], _rest: &[u8]) {
- unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); }
- }
-}
-
-impl flatbuffers::EndianScalar for Equipment {
- #[inline]
- fn to_little_endian(self) -> Self {
- let b = u8::to_le(self.0);
- Self(b)
- }
- #[inline]
- #[allow(clippy::wrong_self_convention)]
- fn from_little_endian(self) -> Self {
- let b = u8::from_le(self.0);
- Self(b)
- }
-}
-
-impl<'a> flatbuffers::Verifiable for Equipment {
- #[inline]
- fn run_verifier(
- v: &mut flatbuffers::Verifier, pos: usize
- ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
- use self::flatbuffers::Verifiable;
- u8::run_verifier(v, pos)
- }
-}
-
-impl flatbuffers::SimpleToVerifyInSlice for Equipment {}
-pub struct EquipmentUnionTableOffset {}
-
-#[non_exhaustive]
-#[derive(Debug, Clone, PartialEq)]
-pub enum EquipmentT {
- NONE,
- Weapon(Box<WeaponT>),
-}
-impl Default for EquipmentT {
- fn default() -> Self {
- Self::NONE
- }
-}
-impl EquipmentT {
- pub fn equipment_type(&self) -> Equipment {
- match self {
- Self::NONE => Equipment::NONE,
- Self::Weapon(_) => Equipment::Weapon,
- }
- }
- pub fn pack(&self, fbb: &mut flatbuffers::FlatBufferBuilder) -> Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>> {
- match self {
- Self::NONE => None,
- Self::Weapon(v) => Some(v.pack(fbb).as_union_value()),
- }
- }
- /// If the union variant matches, return the owned WeaponT, setting the union to NONE.
- pub fn take_weapon(&mut self) -> Option<Box<WeaponT>> {
- if let Self::Weapon(_) = self {
- let v = std::mem::replace(self, Self::NONE);
- if let Self::Weapon(w) = v {
- Some(w)
- } else {
- unreachable!()
- }
- } else {
- None
- }
- }
- /// If the union variant matches, return a reference to the WeaponT.
- pub fn as_weapon(&self) -> Option<&WeaponT> {
- if let Self::Weapon(v) = self { Some(v.as_ref()) } else { None }
- }
- /// If the union variant matches, return a mutable reference to the WeaponT.
- pub fn as_weapon_mut(&mut self) -> Option<&mut WeaponT> {
- if let Self::Weapon(v) = self { Some(v.as_mut()) } else { None }
- }
-}
-// struct Vec3, aligned to 4
-#[repr(transparent)]
-#[derive(Clone, Copy, PartialEq)]
-pub struct Vec3(pub [u8; 12]);
-impl Default for Vec3 {
- fn default() -> Self {
- Self([0; 12])
- }
-}
-impl std::fmt::Debug for Vec3 {
- fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
- f.debug_struct("Vec3")
- .field("x", &self.x())
- .field("y", &self.y())
- .field("z", &self.z())
- .finish()
- }
-}
-
-impl flatbuffers::SimpleToVerifyInSlice for Vec3 {}
-impl flatbuffers::SafeSliceAccess for Vec3 {}
-impl<'a> flatbuffers::Follow<'a> for Vec3 {
- type Inner = &'a Vec3;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- <&'a Vec3>::follow(buf, loc)
- }
-}
-impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
- type Inner = &'a Vec3;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
- }
-}
-impl<'b> flatbuffers::Push for Vec3 {
- type Output = Vec3;
- #[inline]
- fn push(&self, dst: &mut [u8], _rest: &[u8]) {
- let src = unsafe {
- ::std::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
- };
- dst.copy_from_slice(src);
- }
-}
-impl<'b> flatbuffers::Push for &'b Vec3 {
- type Output = Vec3;
-
- #[inline]
- fn push(&self, dst: &mut [u8], _rest: &[u8]) {
- let src = unsafe {
- ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
- };
- dst.copy_from_slice(src);
- }
-}
-
-impl<'a> flatbuffers::Verifiable for Vec3 {
- #[inline]
- fn run_verifier(
- v: &mut flatbuffers::Verifier, pos: usize
- ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
- use self::flatbuffers::Verifiable;
- v.in_buffer::<Self>(pos)
- }
-}
-impl<'a> Vec3 {
- #[allow(clippy::too_many_arguments)]
- pub fn new(
- x: f32,
- y: f32,
- z: f32,
- ) -> Self {
- let mut s = Self([0; 12]);
- s.set_x(x);
- s.set_y(y);
- s.set_z(z);
- s
- }
-
- pub fn x(&self) -> f32 {
- let mut mem = core::mem::MaybeUninit::<f32>::uninit();
- unsafe {
- core::ptr::copy_nonoverlapping(
- self.0[0..].as_ptr(),
- mem.as_mut_ptr() as *mut u8,
- core::mem::size_of::<f32>(),
- );
- mem.assume_init()
- }.from_little_endian()
- }
-
- pub fn set_x(&mut self, x: f32) {
- let x_le = x.to_little_endian();
- unsafe {
- core::ptr::copy_nonoverlapping(
- &x_le as *const f32 as *const u8,
- self.0[0..].as_mut_ptr(),
- core::mem::size_of::<f32>(),
- );
- }
- }
-
- pub fn y(&self) -> f32 {
- let mut mem = core::mem::MaybeUninit::<f32>::uninit();
- unsafe {
- core::ptr::copy_nonoverlapping(
- self.0[4..].as_ptr(),
- mem.as_mut_ptr() as *mut u8,
- core::mem::size_of::<f32>(),
- );
- mem.assume_init()
- }.from_little_endian()
- }
-
- pub fn set_y(&mut self, x: f32) {
- let x_le = x.to_little_endian();
- unsafe {
- core::ptr::copy_nonoverlapping(
- &x_le as *const f32 as *const u8,
- self.0[4..].as_mut_ptr(),
- core::mem::size_of::<f32>(),
- );
- }
- }
-
- pub fn z(&self) -> f32 {
- let mut mem = core::mem::MaybeUninit::<f32>::uninit();
- unsafe {
- core::ptr::copy_nonoverlapping(
- self.0[8..].as_ptr(),
- mem.as_mut_ptr() as *mut u8,
- core::mem::size_of::<f32>(),
- );
- mem.assume_init()
- }.from_little_endian()
- }
-
- pub fn set_z(&mut self, x: f32) {
- let x_le = x.to_little_endian();
- unsafe {
- core::ptr::copy_nonoverlapping(
- &x_le as *const f32 as *const u8,
- self.0[8..].as_mut_ptr(),
- core::mem::size_of::<f32>(),
- );
- }
- }
-
- pub fn unpack(&self) -> Vec3T {
- Vec3T {
- x: self.x(),
- y: self.y(),
- z: self.z(),
- }
- }
-}
-
-#[derive(Debug, Clone, PartialEq, Default)]
-pub struct Vec3T {
- pub x: f32,
- pub y: f32,
- pub z: f32,
-}
-impl Vec3T {
- pub fn pack(&self) -> Vec3 {
- Vec3::new(
- self.x,
- self.y,
- self.z,
- )
- }
-}
-
-pub enum MonsterOffset {}
-#[derive(Copy, Clone, PartialEq)]
-
-pub struct Monster<'a> {
- pub _tab: flatbuffers::Table<'a>,
-}
-
-impl<'a> flatbuffers::Follow<'a> for Monster<'a> {
- type Inner = Monster<'a>;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- Self { _tab: flatbuffers::Table { buf, loc } }
- }
-}
-
-impl<'a> Monster<'a> {
- #[inline]
- pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
- Monster { _tab: table }
- }
- #[allow(unused_mut)]
- pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
- _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
- args: &'args MonsterArgs<'args>) -> flatbuffers::WIPOffset<Monster<'bldr>> {
- let mut builder = MonsterBuilder::new(_fbb);
- if let Some(x) = args.path { builder.add_path(x); }
- if let Some(x) = args.equipped { builder.add_equipped(x); }
- if let Some(x) = args.weapons { builder.add_weapons(x); }
- if let Some(x) = args.inventory { builder.add_inventory(x); }
- if let Some(x) = args.name { builder.add_name(x); }
- if let Some(x) = args.pos { builder.add_pos(x); }
- builder.add_hp(args.hp);
- builder.add_mana(args.mana);
- builder.add_equipped_type(args.equipped_type);
- builder.add_color(args.color);
- builder.finish()
- }
-
- pub fn unpack(&self) -> MonsterT {
- let pos = self.pos().map(|x| {
- x.unpack()
- });
- let mana = self.mana();
- let hp = self.hp();
- let name = self.name().map(|x| {
- x.to_string()
- });
- let inventory = self.inventory().map(|x| {
- x.to_vec()
- });
- let color = self.color();
- let weapons = self.weapons().map(|x| {
- x.iter().map(|t| t.unpack()).collect()
- });
- let equipped = match self.equipped_type() {
- Equipment::NONE => EquipmentT::NONE,
- Equipment::Weapon => EquipmentT::Weapon(Box::new(
- self.equipped_as_weapon()
- .expect("Invalid union table, expected `Equipment::Weapon`.")
- .unpack()
- )),
- _ => EquipmentT::NONE,
- };
- let path = self.path().map(|x| {
- x.iter().map(|t| t.unpack()).collect()
- });
- MonsterT {
- pos,
- mana,
- hp,
- name,
- inventory,
- color,
- weapons,
- equipped,
- path,
- }
- }
- pub const VT_POS: flatbuffers::VOffsetT = 4;
- pub const VT_MANA: flatbuffers::VOffsetT = 6;
- pub const VT_HP: flatbuffers::VOffsetT = 8;
- pub const VT_NAME: flatbuffers::VOffsetT = 10;
- pub const VT_INVENTORY: flatbuffers::VOffsetT = 14;
- pub const VT_COLOR: flatbuffers::VOffsetT = 16;
- pub const VT_WEAPONS: flatbuffers::VOffsetT = 18;
- pub const VT_EQUIPPED_TYPE: flatbuffers::VOffsetT = 20;
- pub const VT_EQUIPPED: flatbuffers::VOffsetT = 22;
- pub const VT_PATH: flatbuffers::VOffsetT = 24;
-
- #[inline]
- pub fn pos(&self) -> Option<&'a Vec3> {
- self._tab.get::<Vec3>(Monster::VT_POS, None)
- }
- #[inline]
- pub fn mana(&self) -> i16 {
- self._tab.get::<i16>(Monster::VT_MANA, Some(150)).unwrap()
- }
- #[inline]
- pub fn hp(&self) -> i16 {
- self._tab.get::<i16>(Monster::VT_HP, Some(100)).unwrap()
- }
- #[inline]
- pub fn name(&self) -> Option<&'a str> {
- self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Monster::VT_NAME, None)
- }
- #[inline]
- pub fn inventory(&self) -> Option<&'a [u8]> {
- self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice())
- }
- #[inline]
- pub fn color(&self) -> Color {
- self._tab.get::<Color>(Monster::VT_COLOR, Some(Color::Blue)).unwrap()
- }
- #[inline]
- pub fn weapons(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>> {
- self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon>>>>(Monster::VT_WEAPONS, None)
- }
- #[inline]
- pub fn equipped_type(&self) -> Equipment {
- self._tab.get::<Equipment>(Monster::VT_EQUIPPED_TYPE, Some(Equipment::NONE)).unwrap()
- }
- #[inline]
- pub fn equipped(&self) -> Option<flatbuffers::Table<'a>> {
- self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Monster::VT_EQUIPPED, None)
- }
- #[inline]
- pub fn path(&self) -> Option<&'a [Vec3]> {
- self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Vec3>>>(Monster::VT_PATH, None).map(|v| v.safe_slice())
- }
- #[inline]
- #[allow(non_snake_case)]
- pub fn equipped_as_weapon(&self) -> Option<Weapon<'a>> {
- if self.equipped_type() == Equipment::Weapon {
- self.equipped().map(Weapon::init_from_table)
- } else {
- None
- }
- }
-
-}
-
-impl flatbuffers::Verifiable for Monster<'_> {
- #[inline]
- fn run_verifier(
- v: &mut flatbuffers::Verifier, pos: usize
- ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
- use self::flatbuffers::Verifiable;
- v.visit_table(pos)?
- .visit_field::<Vec3>(&"pos", Self::VT_POS, false)?
- .visit_field::<i16>(&"mana", Self::VT_MANA, false)?
- .visit_field::<i16>(&"hp", Self::VT_HP, false)?
- .visit_field::<flatbuffers::ForwardsUOffset<&str>>(&"name", Self::VT_NAME, false)?
- .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>(&"inventory", Self::VT_INVENTORY, false)?
- .visit_field::<Color>(&"color", Self::VT_COLOR, false)?
- .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Weapon>>>>(&"weapons", Self::VT_WEAPONS, false)?
- .visit_union::<Equipment, _>(&"equipped_type", Self::VT_EQUIPPED_TYPE, &"equipped", Self::VT_EQUIPPED, false, |key, v, pos| {
- match key {
- Equipment::Weapon => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Weapon>>("Equipment::Weapon", pos),
- _ => Ok(()),
- }
- })?
- .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Vec3>>>(&"path", Self::VT_PATH, false)?
- .finish();
- Ok(())
- }
-}
-pub struct MonsterArgs<'a> {
- pub pos: Option<&'a Vec3>,
- pub mana: i16,
- pub hp: i16,
- pub name: Option<flatbuffers::WIPOffset<&'a str>>,
- pub inventory: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
- pub color: Color,
- pub weapons: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>>>,
- pub equipped_type: Equipment,
- pub equipped: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
- pub path: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Vec3>>>,
-}
-impl<'a> Default for MonsterArgs<'a> {
- #[inline]
- fn default() -> Self {
- MonsterArgs {
- pos: None,
- mana: 150,
- hp: 100,
- name: None,
- inventory: None,
- color: Color::Blue,
- weapons: None,
- equipped_type: Equipment::NONE,
- equipped: None,
- path: None,
- }
- }
-}
-pub struct MonsterBuilder<'a: 'b, 'b> {
- fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
- start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
-}
-impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> {
- #[inline]
- pub fn add_pos(&mut self, pos: &Vec3) {
- self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos);
- }
- #[inline]
- pub fn add_mana(&mut self, mana: i16) {
- self.fbb_.push_slot::<i16>(Monster::VT_MANA, mana, 150);
- }
- #[inline]
- pub fn add_hp(&mut self, hp: i16) {
- self.fbb_.push_slot::<i16>(Monster::VT_HP, hp, 100);
- }
- #[inline]
- pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_NAME, name);
- }
- #[inline]
- pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_INVENTORY, inventory);
- }
- #[inline]
- pub fn add_color(&mut self, color: Color) {
- self.fbb_.push_slot::<Color>(Monster::VT_COLOR, color, Color::Blue);
- }
- #[inline]
- pub fn add_weapons(&mut self, weapons: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Weapon<'b >>>>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_WEAPONS, weapons);
- }
- #[inline]
- pub fn add_equipped_type(&mut self, equipped_type: Equipment) {
- self.fbb_.push_slot::<Equipment>(Monster::VT_EQUIPPED_TYPE, equipped_type, Equipment::NONE);
- }
- #[inline]
- pub fn add_equipped(&mut self, equipped: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_EQUIPPED, equipped);
- }
- #[inline]
- pub fn add_path(&mut self, path: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Vec3>>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_PATH, path);
- }
- #[inline]
- pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> {
- let start = _fbb.start_table();
- MonsterBuilder {
- fbb_: _fbb,
- start_: start,
- }
- }
- #[inline]
- pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> {
- let o = self.fbb_.end_table(self.start_);
- flatbuffers::WIPOffset::new(o.value())
- }
-}
-
-impl std::fmt::Debug for Monster<'_> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let mut ds = f.debug_struct("Monster");
- ds.field("pos", &self.pos());
- ds.field("mana", &self.mana());
- ds.field("hp", &self.hp());
- ds.field("name", &self.name());
- ds.field("inventory", &self.inventory());
- ds.field("color", &self.color());
- ds.field("weapons", &self.weapons());
- ds.field("equipped_type", &self.equipped_type());
- match self.equipped_type() {
- Equipment::Weapon => {
- if let Some(x) = self.equipped_as_weapon() {
- ds.field("equipped", &x)
- } else {
- ds.field("equipped", &"InvalidFlatbuffer: Union discriminant does not match value.")
- }
- },
- _ => {
- let x: Option<()> = None;
- ds.field("equipped", &x)
- },
- };
- ds.field("path", &self.path());
- ds.finish()
- }
-}
-#[non_exhaustive]
-#[derive(Debug, Clone, PartialEq)]
-pub struct MonsterT {
- pub pos: Option<Vec3T>,
- pub mana: i16,
- pub hp: i16,
- pub name: Option<String>,
- pub inventory: Option<Vec<u8>>,
- pub color: Color,
- pub weapons: Option<Vec<WeaponT>>,
- pub equipped: EquipmentT,
- pub path: Option<Vec<Vec3T>>,
-}
-impl Default for MonsterT {
- fn default() -> Self {
- Self {
- pos: None,
- mana: 150,
- hp: 100,
- name: None,
- inventory: None,
- color: Color::Blue,
- weapons: None,
- equipped: EquipmentT::NONE,
- path: None,
- }
- }
-}
-impl MonsterT {
- pub fn pack<'b>(
- &self,
- _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
- ) -> flatbuffers::WIPOffset<Monster<'b>> {
- let pos_tmp = self.pos.as_ref().map(|x| x.pack());
- let pos = pos_tmp.as_ref();
- let mana = self.mana;
- let hp = self.hp;
- let name = self.name.as_ref().map(|x|{
- _fbb.create_string(x)
- });
- let inventory = self.inventory.as_ref().map(|x|{
- _fbb.create_vector(x)
- });
- let color = self.color;
- let weapons = self.weapons.as_ref().map(|x|{
- let w: Vec<_> = x.iter().map(|t| t.pack(_fbb)).collect();_fbb.create_vector(&w)
- });
- let equipped_type = self.equipped.equipment_type();
- let equipped = self.equipped.pack(_fbb);
- let path = self.path.as_ref().map(|x|{
- let w: Vec<_> = x.iter().map(|t| t.pack()).collect();_fbb.create_vector(&w)
- });
- Monster::create(_fbb, &MonsterArgs{
- pos,
- mana,
- hp,
- name,
- inventory,
- color,
- weapons,
- equipped_type,
- equipped,
- path,
- })
- }
-}
-pub enum WeaponOffset {}
-#[derive(Copy, Clone, PartialEq)]
-
-pub struct Weapon<'a> {
- pub _tab: flatbuffers::Table<'a>,
-}
-
-impl<'a> flatbuffers::Follow<'a> for Weapon<'a> {
- type Inner = Weapon<'a>;
- #[inline]
- fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
- Self { _tab: flatbuffers::Table { buf, loc } }
- }
-}
-
-impl<'a> Weapon<'a> {
- #[inline]
- pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
- Weapon { _tab: table }
- }
- #[allow(unused_mut)]
- pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
- _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
- args: &'args WeaponArgs<'args>) -> flatbuffers::WIPOffset<Weapon<'bldr>> {
- let mut builder = WeaponBuilder::new(_fbb);
- if let Some(x) = args.name { builder.add_name(x); }
- builder.add_damage(args.damage);
- builder.finish()
- }
-
- pub fn unpack(&self) -> WeaponT {
- let name = self.name().map(|x| {
- x.to_string()
- });
- let damage = self.damage();
- WeaponT {
- name,
- damage,
- }
- }
- pub const VT_NAME: flatbuffers::VOffsetT = 4;
- pub const VT_DAMAGE: flatbuffers::VOffsetT = 6;
-
- #[inline]
- pub fn name(&self) -> Option<&'a str> {
- self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Weapon::VT_NAME, None)
- }
- #[inline]
- pub fn damage(&self) -> i16 {
- self._tab.get::<i16>(Weapon::VT_DAMAGE, Some(0)).unwrap()
- }
-}
-
-impl flatbuffers::Verifiable for Weapon<'_> {
- #[inline]
- fn run_verifier(
- v: &mut flatbuffers::Verifier, pos: usize
- ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
- use self::flatbuffers::Verifiable;
- v.visit_table(pos)?
- .visit_field::<flatbuffers::ForwardsUOffset<&str>>(&"name", Self::VT_NAME, false)?
- .visit_field::<i16>(&"damage", Self::VT_DAMAGE, false)?
- .finish();
- Ok(())
- }
-}
-pub struct WeaponArgs<'a> {
- pub name: Option<flatbuffers::WIPOffset<&'a str>>,
- pub damage: i16,
-}
-impl<'a> Default for WeaponArgs<'a> {
- #[inline]
- fn default() -> Self {
- WeaponArgs {
- name: None,
- damage: 0,
- }
- }
-}
-pub struct WeaponBuilder<'a: 'b, 'b> {
- fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
- start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
-}
-impl<'a: 'b, 'b> WeaponBuilder<'a, 'b> {
- #[inline]
- pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
- self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Weapon::VT_NAME, name);
- }
- #[inline]
- pub fn add_damage(&mut self, damage: i16) {
- self.fbb_.push_slot::<i16>(Weapon::VT_DAMAGE, damage, 0);
- }
- #[inline]
- pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WeaponBuilder<'a, 'b> {
- let start = _fbb.start_table();
- WeaponBuilder {
- fbb_: _fbb,
- start_: start,
- }
- }
- #[inline]
- pub fn finish(self) -> flatbuffers::WIPOffset<Weapon<'a>> {
- let o = self.fbb_.end_table(self.start_);
- flatbuffers::WIPOffset::new(o.value())
- }
-}
-
-impl std::fmt::Debug for Weapon<'_> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let mut ds = f.debug_struct("Weapon");
- ds.field("name", &self.name());
- ds.field("damage", &self.damage());
- ds.finish()
- }
-}
-#[non_exhaustive]
-#[derive(Debug, Clone, PartialEq)]
-pub struct WeaponT {
- pub name: Option<String>,
- pub damage: i16,
-}
-impl Default for WeaponT {
- fn default() -> Self {
- Self {
- name: None,
- damage: 0,
- }
- }
-}
-impl WeaponT {
- pub fn pack<'b>(
- &self,
- _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
- ) -> flatbuffers::WIPOffset<Weapon<'b>> {
- let name = self.name.as_ref().map(|x|{
- _fbb.create_string(x)
- });
- let damage = self.damage;
- Weapon::create(_fbb, &WeaponArgs{
- name,
- damage,
- })
- }
-}
-#[inline]
-#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
-pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
- unsafe { flatbuffers::root_unchecked::<Monster<'a>>(buf) }
-}
-
-#[inline]
-#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
-pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
- unsafe { flatbuffers::size_prefixed_root_unchecked::<Monster<'a>>(buf) }
-}
-
-#[inline]
-/// Verifies that a buffer of bytes contains a `Monster`
-/// and returns it.
-/// Note that verification is still experimental and may not
-/// catch every error, or be maximally performant. For the
-/// previous, unchecked, behavior use
-/// `root_as_monster_unchecked`.
-pub fn root_as_monster(buf: &[u8]) -> Result<Monster, flatbuffers::InvalidFlatbuffer> {
- flatbuffers::root::<Monster>(buf)
-}
-#[inline]
-/// Verifies that a buffer of bytes contains a size prefixed
-/// `Monster` and returns it.
-/// Note that verification is still experimental and may not
-/// catch every error, or be maximally performant. For the
-/// previous, unchecked, behavior use
-/// `size_prefixed_root_as_monster_unchecked`.
-pub fn size_prefixed_root_as_monster(buf: &[u8]) -> Result<Monster, flatbuffers::InvalidFlatbuffer> {
- flatbuffers::size_prefixed_root::<Monster>(buf)
-}
-#[inline]
-/// Verifies, with the given options, that a buffer of bytes
-/// contains a `Monster` and returns it.
-/// Note that verification is still experimental and may not
-/// catch every error, or be maximally performant. For the
-/// previous, unchecked, behavior use
-/// `root_as_monster_unchecked`.
-pub fn root_as_monster_with_opts<'b, 'o>(
- opts: &'o flatbuffers::VerifierOptions,
- buf: &'b [u8],
-) -> Result<Monster<'b>, flatbuffers::InvalidFlatbuffer> {
- flatbuffers::root_with_opts::<Monster<'b>>(opts, buf)
-}
-#[inline]
-/// Verifies, with the given verifier options, that a buffer of
-/// bytes contains a size prefixed `Monster` and returns
-/// it. Note that verification is still experimental and may not
-/// catch every error, or be maximally performant. For the
-/// previous, unchecked, behavior use
-/// `root_as_monster_unchecked`.
-pub fn size_prefixed_root_as_monster_with_opts<'b, 'o>(
- opts: &'o flatbuffers::VerifierOptions,
- buf: &'b [u8],
-) -> Result<Monster<'b>, flatbuffers::InvalidFlatbuffer> {
- flatbuffers::size_prefixed_root_with_opts::<Monster<'b>>(opts, buf)
-}
-#[inline]
-/// Assumes, without verification, that a buffer of bytes contains a Monster and returns it.
-/// # Safety
-/// Callers must trust the given bytes do indeed contain a valid `Monster`.
-pub unsafe fn root_as_monster_unchecked(buf: &[u8]) -> Monster {
- flatbuffers::root_unchecked::<Monster>(buf)
-}
-#[inline]
-/// Assumes, without verification, that a buffer of bytes contains a size prefixed Monster and returns it.
-/// # Safety
-/// Callers must trust the given bytes do indeed contain a valid size prefixed `Monster`.
-pub unsafe fn size_prefixed_root_as_monster_unchecked(buf: &[u8]) -> Monster {
- flatbuffers::size_prefixed_root_unchecked::<Monster>(buf)
-}
-#[inline]
-pub fn finish_monster_buffer<'a, 'b>(
- fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
- root: flatbuffers::WIPOffset<Monster<'a>>) {
- fbb.finish(root, None);
-}
-
-#[inline]
-pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) {
- fbb.finish_size_prefixed(root, None);
-}
-} // pub mod Sample
-} // pub mod MyGame
-
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/mod.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/mod.rs
new file mode 100644
index 00000000000..8560153d6ab
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/mod.rs
@@ -0,0 +1,17 @@
+// Automatically generated by the Flatbuffers compiler. Do not modify.
+pub mod my_game {
+ use super::*;
+ pub mod sample {
+ use super::*;
+ mod color_generated;
+ pub use self::color_generated::*;
+ mod equipment_generated;
+ pub use self::equipment_generated::*;
+ mod vec_3_generated;
+ pub use self::vec_3_generated::*;
+ mod monster_generated;
+ pub use self::monster_generated::*;
+ mod weapon_generated;
+ pub use self::weapon_generated::*;
+ } // sample
+} // my_game
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/color_generated.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/color_generated.rs
new file mode 100644
index 00000000000..1ef1d31b5c7
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/color_generated.rs
@@ -0,0 +1,97 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+extern crate flatbuffers;
+use std::mem;
+use std::cmp::Ordering;
+use self::flatbuffers::{EndianScalar, Follow};
+use super::*;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MIN_COLOR: i8 = 0;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MAX_COLOR: i8 = 2;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+#[allow(non_camel_case_types)]
+pub const ENUM_VALUES_COLOR: [Color; 3] = [
+ Color::Red,
+ Color::Green,
+ Color::Blue,
+];
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+pub struct Color(pub i8);
+#[allow(non_upper_case_globals)]
+impl Color {
+ pub const Red: Self = Self(0);
+ pub const Green: Self = Self(1);
+ pub const Blue: Self = Self(2);
+
+ pub const ENUM_MIN: i8 = 0;
+ pub const ENUM_MAX: i8 = 2;
+ pub const ENUM_VALUES: &'static [Self] = &[
+ Self::Red,
+ Self::Green,
+ Self::Blue,
+ ];
+ /// Returns the variant's name or "" if unknown.
+ pub fn variant_name(self) -> Option<&'static str> {
+ match self {
+ Self::Red => Some("Red"),
+ Self::Green => Some("Green"),
+ Self::Blue => Some("Blue"),
+ _ => None,
+ }
+ }
+}
+impl std::fmt::Debug for Color {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ if let Some(name) = self.variant_name() {
+ f.write_str(name)
+ } else {
+ f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
+ }
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for Color {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let b = unsafe {
+ flatbuffers::read_scalar_at::<i8>(buf, loc)
+ };
+ Self(b)
+ }
+}
+
+impl flatbuffers::Push for Color {
+ type Output = Color;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); }
+ }
+}
+
+impl flatbuffers::EndianScalar for Color {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let b = i8::to_le(self.0);
+ Self(b)
+ }
+ #[inline]
+ #[allow(clippy::wrong_self_convention)]
+ fn from_little_endian(self) -> Self {
+ let b = i8::from_le(self.0);
+ Self(b)
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for Color {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ i8::run_verifier(v, pos)
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for Color {}
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/equipment_generated.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/equipment_generated.rs
new file mode 100644
index 00000000000..919958a355a
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/equipment_generated.rs
@@ -0,0 +1,142 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+extern crate flatbuffers;
+use std::mem;
+use std::cmp::Ordering;
+use self::flatbuffers::{EndianScalar, Follow};
+use super::*;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MIN_EQUIPMENT: u8 = 0;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+pub const ENUM_MAX_EQUIPMENT: u8 = 1;
+#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
+#[allow(non_camel_case_types)]
+pub const ENUM_VALUES_EQUIPMENT: [Equipment; 2] = [
+ Equipment::NONE,
+ Equipment::Weapon,
+];
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+pub struct Equipment(pub u8);
+#[allow(non_upper_case_globals)]
+impl Equipment {
+ pub const NONE: Self = Self(0);
+ pub const Weapon: Self = Self(1);
+
+ pub const ENUM_MIN: u8 = 0;
+ pub const ENUM_MAX: u8 = 1;
+ pub const ENUM_VALUES: &'static [Self] = &[
+ Self::NONE,
+ Self::Weapon,
+ ];
+ /// Returns the variant's name or "" if unknown.
+ pub fn variant_name(self) -> Option<&'static str> {
+ match self {
+ Self::NONE => Some("NONE"),
+ Self::Weapon => Some("Weapon"),
+ _ => None,
+ }
+ }
+}
+impl std::fmt::Debug for Equipment {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ if let Some(name) = self.variant_name() {
+ f.write_str(name)
+ } else {
+ f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
+ }
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for Equipment {
+ type Inner = Self;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ let b = unsafe {
+ flatbuffers::read_scalar_at::<u8>(buf, loc)
+ };
+ Self(b)
+ }
+}
+
+impl flatbuffers::Push for Equipment {
+ type Output = Equipment;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); }
+ }
+}
+
+impl flatbuffers::EndianScalar for Equipment {
+ #[inline]
+ fn to_little_endian(self) -> Self {
+ let b = u8::to_le(self.0);
+ Self(b)
+ }
+ #[inline]
+ #[allow(clippy::wrong_self_convention)]
+ fn from_little_endian(self) -> Self {
+ let b = u8::from_le(self.0);
+ Self(b)
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for Equipment {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ u8::run_verifier(v, pos)
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for Equipment {}
+pub struct EquipmentUnionTableOffset {}
+
+#[allow(clippy::upper_case_acronyms)]
+#[non_exhaustive]
+#[derive(Debug, Clone, PartialEq)]
+pub enum EquipmentT {
+ NONE,
+ Weapon(Box<WeaponT>),
+}
+impl Default for EquipmentT {
+ fn default() -> Self {
+ Self::NONE
+ }
+}
+impl EquipmentT {
+ pub fn equipment_type(&self) -> Equipment {
+ match self {
+ Self::NONE => Equipment::NONE,
+ Self::Weapon(_) => Equipment::Weapon,
+ }
+ }
+ pub fn pack(&self, fbb: &mut flatbuffers::FlatBufferBuilder) -> Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>> {
+ match self {
+ Self::NONE => None,
+ Self::Weapon(v) => Some(v.pack(fbb).as_union_value()),
+ }
+ }
+ /// If the union variant matches, return the owned WeaponT, setting the union to NONE.
+ pub fn take_weapon(&mut self) -> Option<Box<WeaponT>> {
+ if let Self::Weapon(_) = self {
+ let v = std::mem::replace(self, Self::NONE);
+ if let Self::Weapon(w) = v {
+ Some(w)
+ } else {
+ unreachable!()
+ }
+ } else {
+ None
+ }
+ }
+ /// If the union variant matches, return a reference to the WeaponT.
+ pub fn as_weapon(&self) -> Option<&WeaponT> {
+ if let Self::Weapon(v) = self { Some(v.as_ref()) } else { None }
+ }
+ /// If the union variant matches, return a mutable reference to the WeaponT.
+ pub fn as_weapon_mut(&mut self) -> Option<&mut WeaponT> {
+ if let Self::Weapon(v) = self { Some(v.as_mut()) } else { None }
+ }
+}
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/monster_generated.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/monster_generated.rs
new file mode 100644
index 00000000000..d8988321247
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/monster_generated.rs
@@ -0,0 +1,444 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+extern crate flatbuffers;
+use std::mem;
+use std::cmp::Ordering;
+use self::flatbuffers::{EndianScalar, Follow};
+use super::*;
+pub enum MonsterOffset {}
+#[derive(Copy, Clone, PartialEq)]
+
+pub struct Monster<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Monster<'a> {
+ type Inner = Monster<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self { _tab: flatbuffers::Table { buf, loc } }
+ }
+}
+
+impl<'a> Monster<'a> {
+ pub const VT_POS: flatbuffers::VOffsetT = 4;
+ pub const VT_MANA: flatbuffers::VOffsetT = 6;
+ pub const VT_HP: flatbuffers::VOffsetT = 8;
+ pub const VT_NAME: flatbuffers::VOffsetT = 10;
+ pub const VT_INVENTORY: flatbuffers::VOffsetT = 14;
+ pub const VT_COLOR: flatbuffers::VOffsetT = 16;
+ pub const VT_WEAPONS: flatbuffers::VOffsetT = 18;
+ pub const VT_EQUIPPED_TYPE: flatbuffers::VOffsetT = 20;
+ pub const VT_EQUIPPED: flatbuffers::VOffsetT = 22;
+ pub const VT_PATH: flatbuffers::VOffsetT = 24;
+
+ pub const fn get_fully_qualified_name() -> &'static str {
+ "MyGame.Sample.Monster"
+ }
+
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Monster { _tab: table }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args MonsterArgs<'args>
+ ) -> flatbuffers::WIPOffset<Monster<'bldr>> {
+ let mut builder = MonsterBuilder::new(_fbb);
+ if let Some(x) = args.path { builder.add_path(x); }
+ if let Some(x) = args.equipped { builder.add_equipped(x); }
+ if let Some(x) = args.weapons { builder.add_weapons(x); }
+ if let Some(x) = args.inventory { builder.add_inventory(x); }
+ if let Some(x) = args.name { builder.add_name(x); }
+ if let Some(x) = args.pos { builder.add_pos(x); }
+ builder.add_hp(args.hp);
+ builder.add_mana(args.mana);
+ builder.add_equipped_type(args.equipped_type);
+ builder.add_color(args.color);
+ builder.finish()
+ }
+
+ pub fn unpack(&self) -> MonsterT {
+ let pos = self.pos().map(|x| {
+ x.unpack()
+ });
+ let mana = self.mana();
+ let hp = self.hp();
+ let name = self.name().map(|x| {
+ x.to_string()
+ });
+ let inventory = self.inventory().map(|x| {
+ x.to_vec()
+ });
+ let color = self.color();
+ let weapons = self.weapons().map(|x| {
+ x.iter().map(|t| t.unpack()).collect()
+ });
+ let equipped = match self.equipped_type() {
+ Equipment::NONE => EquipmentT::NONE,
+ Equipment::Weapon => EquipmentT::Weapon(Box::new(
+ self.equipped_as_weapon()
+ .expect("Invalid union table, expected `Equipment::Weapon`.")
+ .unpack()
+ )),
+ _ => EquipmentT::NONE,
+ };
+ let path = self.path().map(|x| {
+ x.iter().map(|t| t.unpack()).collect()
+ });
+ MonsterT {
+ pos,
+ mana,
+ hp,
+ name,
+ inventory,
+ color,
+ weapons,
+ equipped,
+ path,
+ }
+ }
+
+ #[inline]
+ pub fn pos(&self) -> Option<&'a Vec3> {
+ self._tab.get::<Vec3>(Monster::VT_POS, None)
+ }
+ #[inline]
+ pub fn mana(&self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_MANA, Some(150)).unwrap()
+ }
+ #[inline]
+ pub fn hp(&self) -> i16 {
+ self._tab.get::<i16>(Monster::VT_HP, Some(100)).unwrap()
+ }
+ #[inline]
+ pub fn name(&self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Monster::VT_NAME, None)
+ }
+ #[inline]
+ pub fn inventory(&self) -> Option<&'a [u8]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(Monster::VT_INVENTORY, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ pub fn color(&self) -> Color {
+ self._tab.get::<Color>(Monster::VT_COLOR, Some(Color::Blue)).unwrap()
+ }
+ #[inline]
+ pub fn weapons(&self) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon>>>>(Monster::VT_WEAPONS, None)
+ }
+ #[inline]
+ pub fn equipped_type(&self) -> Equipment {
+ self._tab.get::<Equipment>(Monster::VT_EQUIPPED_TYPE, Some(Equipment::NONE)).unwrap()
+ }
+ #[inline]
+ pub fn equipped(&self) -> Option<flatbuffers::Table<'a>> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(Monster::VT_EQUIPPED, None)
+ }
+ #[inline]
+ pub fn path(&self) -> Option<&'a [Vec3]> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Vec3>>>(Monster::VT_PATH, None).map(|v| v.safe_slice())
+ }
+ #[inline]
+ #[allow(non_snake_case)]
+ pub fn equipped_as_weapon(&self) -> Option<Weapon<'a>> {
+ if self.equipped_type() == Equipment::Weapon {
+ self.equipped().map(Weapon::init_from_table)
+ } else {
+ None
+ }
+ }
+
+}
+
+impl flatbuffers::Verifiable for Monster<'_> {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.visit_table(pos)?
+ .visit_field::<Vec3>("pos", Self::VT_POS, false)?
+ .visit_field::<i16>("mana", Self::VT_MANA, false)?
+ .visit_field::<i16>("hp", Self::VT_HP, false)?
+ .visit_field::<flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, false)?
+ .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>("inventory", Self::VT_INVENTORY, false)?
+ .visit_field::<Color>("color", Self::VT_COLOR, false)?
+ .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<Weapon>>>>("weapons", Self::VT_WEAPONS, false)?
+ .visit_union::<Equipment, _>("equipped_type", Self::VT_EQUIPPED_TYPE, "equipped", Self::VT_EQUIPPED, false, |key, v, pos| {
+ match key {
+ Equipment::Weapon => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Weapon>>("Equipment::Weapon", pos),
+ _ => Ok(()),
+ }
+ })?
+ .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Vec3>>>("path", Self::VT_PATH, false)?
+ .finish();
+ Ok(())
+ }
+}
+pub struct MonsterArgs<'a> {
+ pub pos: Option<&'a Vec3>,
+ pub mana: i16,
+ pub hp: i16,
+ pub name: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub inventory: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
+ pub color: Color,
+ pub weapons: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<Weapon<'a>>>>>,
+ pub equipped_type: Equipment,
+ pub equipped: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
+ pub path: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Vec3>>>,
+}
+impl<'a> Default for MonsterArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ MonsterArgs {
+ pos: None,
+ mana: 150,
+ hp: 100,
+ name: None,
+ inventory: None,
+ color: Color::Blue,
+ weapons: None,
+ equipped_type: Equipment::NONE,
+ equipped: None,
+ path: None,
+ }
+ }
+}
+pub struct MonsterBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> MonsterBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_pos(&mut self, pos: &Vec3) {
+ self.fbb_.push_slot_always::<&Vec3>(Monster::VT_POS, pos);
+ }
+ #[inline]
+ pub fn add_mana(&mut self, mana: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_MANA, mana, 150);
+ }
+ #[inline]
+ pub fn add_hp(&mut self, hp: i16) {
+ self.fbb_.push_slot::<i16>(Monster::VT_HP, hp, 100);
+ }
+ #[inline]
+ pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_NAME, name);
+ }
+ #[inline]
+ pub fn add_inventory(&mut self, inventory: flatbuffers::WIPOffset<flatbuffers::Vector<'b , u8>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_INVENTORY, inventory);
+ }
+ #[inline]
+ pub fn add_color(&mut self, color: Color) {
+ self.fbb_.push_slot::<Color>(Monster::VT_COLOR, color, Color::Blue);
+ }
+ #[inline]
+ pub fn add_weapons(&mut self, weapons: flatbuffers::WIPOffset<flatbuffers::Vector<'b , flatbuffers::ForwardsUOffset<Weapon<'b >>>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_WEAPONS, weapons);
+ }
+ #[inline]
+ pub fn add_equipped_type(&mut self, equipped_type: Equipment) {
+ self.fbb_.push_slot::<Equipment>(Monster::VT_EQUIPPED_TYPE, equipped_type, Equipment::NONE);
+ }
+ #[inline]
+ pub fn add_equipped(&mut self, equipped: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_EQUIPPED, equipped);
+ }
+ #[inline]
+ pub fn add_path(&mut self, path: flatbuffers::WIPOffset<flatbuffers::Vector<'b , Vec3>>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Monster::VT_PATH, path);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MonsterBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ MonsterBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Monster<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+impl std::fmt::Debug for Monster<'_> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut ds = f.debug_struct("Monster");
+ ds.field("pos", &self.pos());
+ ds.field("mana", &self.mana());
+ ds.field("hp", &self.hp());
+ ds.field("name", &self.name());
+ ds.field("inventory", &self.inventory());
+ ds.field("color", &self.color());
+ ds.field("weapons", &self.weapons());
+ ds.field("equipped_type", &self.equipped_type());
+ match self.equipped_type() {
+ Equipment::Weapon => {
+ if let Some(x) = self.equipped_as_weapon() {
+ ds.field("equipped", &x)
+ } else {
+ ds.field("equipped", &"InvalidFlatbuffer: Union discriminant does not match value.")
+ }
+ },
+ _ => {
+ let x: Option<()> = None;
+ ds.field("equipped", &x)
+ },
+ };
+ ds.field("path", &self.path());
+ ds.finish()
+ }
+}
+#[non_exhaustive]
+#[derive(Debug, Clone, PartialEq)]
+pub struct MonsterT {
+ pub pos: Option<Vec3T>,
+ pub mana: i16,
+ pub hp: i16,
+ pub name: Option<String>,
+ pub inventory: Option<Vec<u8>>,
+ pub color: Color,
+ pub weapons: Option<Vec<WeaponT>>,
+ pub equipped: EquipmentT,
+ pub path: Option<Vec<Vec3T>>,
+}
+impl Default for MonsterT {
+ fn default() -> Self {
+ Self {
+ pos: None,
+ mana: 150,
+ hp: 100,
+ name: None,
+ inventory: None,
+ color: Color::Blue,
+ weapons: None,
+ equipped: EquipmentT::NONE,
+ path: None,
+ }
+ }
+}
+impl MonsterT {
+ pub fn pack<'b>(
+ &self,
+ _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
+ ) -> flatbuffers::WIPOffset<Monster<'b>> {
+ let pos_tmp = self.pos.as_ref().map(|x| x.pack());
+ let pos = pos_tmp.as_ref();
+ let mana = self.mana;
+ let hp = self.hp;
+ let name = self.name.as_ref().map(|x|{
+ _fbb.create_string(x)
+ });
+ let inventory = self.inventory.as_ref().map(|x|{
+ _fbb.create_vector(x)
+ });
+ let color = self.color;
+ let weapons = self.weapons.as_ref().map(|x|{
+ let w: Vec<_> = x.iter().map(|t| t.pack(_fbb)).collect();_fbb.create_vector(&w)
+ });
+ let equipped_type = self.equipped.equipment_type();
+ let equipped = self.equipped.pack(_fbb);
+ let path = self.path.as_ref().map(|x|{
+ let w: Vec<_> = x.iter().map(|t| t.pack()).collect();_fbb.create_vector(&w)
+ });
+ Monster::create(_fbb, &MonsterArgs{
+ pos,
+ mana,
+ hp,
+ name,
+ inventory,
+ color,
+ weapons,
+ equipped_type,
+ equipped,
+ path,
+ })
+ }
+}
+#[inline]
+#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
+pub fn get_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ unsafe { flatbuffers::root_unchecked::<Monster<'a>>(buf) }
+}
+
+#[inline]
+#[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
+pub fn get_size_prefixed_root_as_monster<'a>(buf: &'a [u8]) -> Monster<'a> {
+ unsafe { flatbuffers::size_prefixed_root_unchecked::<Monster<'a>>(buf) }
+}
+
+#[inline]
+/// Verifies that a buffer of bytes contains a `Monster`
+/// and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_monster_unchecked`.
+pub fn root_as_monster(buf: &[u8]) -> Result<Monster, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::root::<Monster>(buf)
+}
+#[inline]
+/// Verifies that a buffer of bytes contains a size prefixed
+/// `Monster` and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `size_prefixed_root_as_monster_unchecked`.
+pub fn size_prefixed_root_as_monster(buf: &[u8]) -> Result<Monster, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::size_prefixed_root::<Monster>(buf)
+}
+#[inline]
+/// Verifies, with the given options, that a buffer of bytes
+/// contains a `Monster` and returns it.
+/// Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_monster_unchecked`.
+pub fn root_as_monster_with_opts<'b, 'o>(
+ opts: &'o flatbuffers::VerifierOptions,
+ buf: &'b [u8],
+) -> Result<Monster<'b>, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::root_with_opts::<Monster<'b>>(opts, buf)
+}
+#[inline]
+/// Verifies, with the given verifier options, that a buffer of
+/// bytes contains a size prefixed `Monster` and returns
+/// it. Note that verification is still experimental and may not
+/// catch every error, or be maximally performant. For the
+/// previous, unchecked, behavior use
+/// `root_as_monster_unchecked`.
+pub fn size_prefixed_root_as_monster_with_opts<'b, 'o>(
+ opts: &'o flatbuffers::VerifierOptions,
+ buf: &'b [u8],
+) -> Result<Monster<'b>, flatbuffers::InvalidFlatbuffer> {
+ flatbuffers::size_prefixed_root_with_opts::<Monster<'b>>(opts, buf)
+}
+#[inline]
+/// Assumes, without verification, that a buffer of bytes contains a Monster and returns it.
+/// # Safety
+/// Callers must trust the given bytes do indeed contain a valid `Monster`.
+pub unsafe fn root_as_monster_unchecked(buf: &[u8]) -> Monster {
+ flatbuffers::root_unchecked::<Monster>(buf)
+}
+#[inline]
+/// Assumes, without verification, that a buffer of bytes contains a size prefixed Monster and returns it.
+/// # Safety
+/// Callers must trust the given bytes do indeed contain a valid size prefixed `Monster`.
+pub unsafe fn size_prefixed_root_as_monster_unchecked(buf: &[u8]) -> Monster {
+ flatbuffers::size_prefixed_root_unchecked::<Monster>(buf)
+}
+#[inline]
+pub fn finish_monster_buffer<'a, 'b>(
+ fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish(root, None);
+}
+
+#[inline]
+pub fn finish_size_prefixed_monster_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<Monster<'a>>) {
+ fbb.finish_size_prefixed(root, None);
+}
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/vec_3_generated.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/vec_3_generated.rs
new file mode 100644
index 00000000000..33805b3f90a
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/vec_3_generated.rs
@@ -0,0 +1,184 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+extern crate flatbuffers;
+use std::mem;
+use std::cmp::Ordering;
+use self::flatbuffers::{EndianScalar, Follow};
+use super::*;
+// struct Vec3, aligned to 4
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq)]
+pub struct Vec3(pub [u8; 12]);
+impl Default for Vec3 {
+ fn default() -> Self {
+ Self([0; 12])
+ }
+}
+impl std::fmt::Debug for Vec3 {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ f.debug_struct("Vec3")
+ .field("x", &self.x())
+ .field("y", &self.y())
+ .field("z", &self.z())
+ .finish()
+ }
+}
+
+impl flatbuffers::SimpleToVerifyInSlice for Vec3 {}
+impl flatbuffers::SafeSliceAccess for Vec3 {}
+impl<'a> flatbuffers::Follow<'a> for Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ <&'a Vec3>::follow(buf, loc)
+ }
+}
+impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
+ type Inner = &'a Vec3;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
+ }
+}
+impl<'b> flatbuffers::Push for Vec3 {
+ type Output = Vec3;
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+impl<'b> flatbuffers::Push for &'b Vec3 {
+ type Output = Vec3;
+
+ #[inline]
+ fn push(&self, dst: &mut [u8], _rest: &[u8]) {
+ let src = unsafe {
+ ::std::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
+ };
+ dst.copy_from_slice(src);
+ }
+}
+
+impl<'a> flatbuffers::Verifiable for Vec3 {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.in_buffer::<Self>(pos)
+ }
+}
+impl<'a> Vec3 {
+ #[allow(clippy::too_many_arguments)]
+ pub fn new(
+ x: f32,
+ y: f32,
+ z: f32,
+ ) -> Self {
+ let mut s = Self([0; 12]);
+ s.set_x(x);
+ s.set_y(y);
+ s.set_z(z);
+ s
+ }
+
+ pub const fn get_fully_qualified_name() -> &'static str {
+ "MyGame.Sample.Vec3"
+ }
+
+ pub fn x(&self) -> f32 {
+ let mut mem = core::mem::MaybeUninit::<f32>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[0..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<f32>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_x(&mut self, x: f32) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const f32 as *const u8,
+ self.0[0..].as_mut_ptr(),
+ core::mem::size_of::<f32>(),
+ );
+ }
+ }
+
+ pub fn y(&self) -> f32 {
+ let mut mem = core::mem::MaybeUninit::<f32>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[4..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<f32>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_y(&mut self, x: f32) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const f32 as *const u8,
+ self.0[4..].as_mut_ptr(),
+ core::mem::size_of::<f32>(),
+ );
+ }
+ }
+
+ pub fn z(&self) -> f32 {
+ let mut mem = core::mem::MaybeUninit::<f32>::uninit();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ self.0[8..].as_ptr(),
+ mem.as_mut_ptr() as *mut u8,
+ core::mem::size_of::<f32>(),
+ );
+ mem.assume_init()
+ }.from_little_endian()
+ }
+
+ pub fn set_z(&mut self, x: f32) {
+ let x_le = x.to_little_endian();
+ unsafe {
+ core::ptr::copy_nonoverlapping(
+ &x_le as *const f32 as *const u8,
+ self.0[8..].as_mut_ptr(),
+ core::mem::size_of::<f32>(),
+ );
+ }
+ }
+
+ pub fn unpack(&self) -> Vec3T {
+ Vec3T {
+ x: self.x(),
+ y: self.y(),
+ z: self.z(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Default)]
+pub struct Vec3T {
+ pub x: f32,
+ pub y: f32,
+ pub z: f32,
+}
+impl Vec3T {
+ pub fn pack(&self) -> Vec3 {
+ Vec3::new(
+ self.x,
+ self.y,
+ self.z,
+ )
+ }
+}
+
diff --git a/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/weapon_generated.rs b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/weapon_generated.rs
new file mode 100644
index 00000000000..e2cede9d666
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/samples/rust_generated/my_game/sample/weapon_generated.rs
@@ -0,0 +1,156 @@
+// automatically generated by the FlatBuffers compiler, do not modify
+extern crate flatbuffers;
+use std::mem;
+use std::cmp::Ordering;
+use self::flatbuffers::{EndianScalar, Follow};
+use super::*;
+pub enum WeaponOffset {}
+#[derive(Copy, Clone, PartialEq)]
+
+pub struct Weapon<'a> {
+ pub _tab: flatbuffers::Table<'a>,
+}
+
+impl<'a> flatbuffers::Follow<'a> for Weapon<'a> {
+ type Inner = Weapon<'a>;
+ #[inline]
+ fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
+ Self { _tab: flatbuffers::Table { buf, loc } }
+ }
+}
+
+impl<'a> Weapon<'a> {
+ pub const VT_NAME: flatbuffers::VOffsetT = 4;
+ pub const VT_DAMAGE: flatbuffers::VOffsetT = 6;
+
+ pub const fn get_fully_qualified_name() -> &'static str {
+ "MyGame.Sample.Weapon"
+ }
+
+ #[inline]
+ pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
+ Weapon { _tab: table }
+ }
+ #[allow(unused_mut)]
+ pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
+ _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
+ args: &'args WeaponArgs<'args>
+ ) -> flatbuffers::WIPOffset<Weapon<'bldr>> {
+ let mut builder = WeaponBuilder::new(_fbb);
+ if let Some(x) = args.name { builder.add_name(x); }
+ builder.add_damage(args.damage);
+ builder.finish()
+ }
+
+ pub fn unpack(&self) -> WeaponT {
+ let name = self.name().map(|x| {
+ x.to_string()
+ });
+ let damage = self.damage();
+ WeaponT {
+ name,
+ damage,
+ }
+ }
+
+ #[inline]
+ pub fn name(&self) -> Option<&'a str> {
+ self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(Weapon::VT_NAME, None)
+ }
+ #[inline]
+ pub fn damage(&self) -> i16 {
+ self._tab.get::<i16>(Weapon::VT_DAMAGE, Some(0)).unwrap()
+ }
+}
+
+impl flatbuffers::Verifiable for Weapon<'_> {
+ #[inline]
+ fn run_verifier(
+ v: &mut flatbuffers::Verifier, pos: usize
+ ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
+ use self::flatbuffers::Verifiable;
+ v.visit_table(pos)?
+ .visit_field::<flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, false)?
+ .visit_field::<i16>("damage", Self::VT_DAMAGE, false)?
+ .finish();
+ Ok(())
+ }
+}
+pub struct WeaponArgs<'a> {
+ pub name: Option<flatbuffers::WIPOffset<&'a str>>,
+ pub damage: i16,
+}
+impl<'a> Default for WeaponArgs<'a> {
+ #[inline]
+ fn default() -> Self {
+ WeaponArgs {
+ name: None,
+ damage: 0,
+ }
+ }
+}
+pub struct WeaponBuilder<'a: 'b, 'b> {
+ fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
+ start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
+}
+impl<'a: 'b, 'b> WeaponBuilder<'a, 'b> {
+ #[inline]
+ pub fn add_name(&mut self, name: flatbuffers::WIPOffset<&'b str>) {
+ self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Weapon::VT_NAME, name);
+ }
+ #[inline]
+ pub fn add_damage(&mut self, damage: i16) {
+ self.fbb_.push_slot::<i16>(Weapon::VT_DAMAGE, damage, 0);
+ }
+ #[inline]
+ pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> WeaponBuilder<'a, 'b> {
+ let start = _fbb.start_table();
+ WeaponBuilder {
+ fbb_: _fbb,
+ start_: start,
+ }
+ }
+ #[inline]
+ pub fn finish(self) -> flatbuffers::WIPOffset<Weapon<'a>> {
+ let o = self.fbb_.end_table(self.start_);
+ flatbuffers::WIPOffset::new(o.value())
+ }
+}
+
+impl std::fmt::Debug for Weapon<'_> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut ds = f.debug_struct("Weapon");
+ ds.field("name", &self.name());
+ ds.field("damage", &self.damage());
+ ds.finish()
+ }
+}
+#[non_exhaustive]
+#[derive(Debug, Clone, PartialEq)]
+pub struct WeaponT {
+ pub name: Option<String>,
+ pub damage: i16,
+}
+impl Default for WeaponT {
+ fn default() -> Self {
+ Self {
+ name: None,
+ damage: 0,
+ }
+ }
+}
+impl WeaponT {
+ pub fn pack<'b>(
+ &self,
+ _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
+ ) -> flatbuffers::WIPOffset<Weapon<'b>> {
+ let name = self.name.as_ref().map(|x|{
+ _fbb.create_string(x)
+ });
+ let damage = self.damage;
+ Weapon::create(_fbb, &WeaponArgs{
+ name,
+ damage,
+ })
+ }
+}
diff --git a/chromium/third_party/flatbuffers/src/samples/sample_binary.rs b/chromium/third_party/flatbuffers/src/samples/sample_binary.rs
index 6972e7ff077..2f010b8d3cf 100644
--- a/chromium/third_party/flatbuffers/src/samples/sample_binary.rs
+++ b/chromium/third_party/flatbuffers/src/samples/sample_binary.rs
@@ -19,10 +19,9 @@ extern crate flatbuffers;
// import the generated code
#[allow(dead_code, unused_imports)]
-#[path = "./monster_generated.rs"]
#[allow(clippy::approx_constant)] // We use low precision PI as a default value.
-mod monster_generated;
-pub use monster_generated::my_game::sample::{Color, Equipment,
+mod rust_generated;
+pub use rust_generated::my_game::sample::{Color, Equipment,
Monster, MonsterArgs,
Vec3,
Weapon, WeaponArgs};
@@ -33,7 +32,7 @@ pub use monster_generated::my_game::sample::{Color, Equipment,
fn main() {
// Build up a serialized buffer algorithmically.
// Initialize it with a capacity of 1024 bytes.
- let mut builder = flatbuffers::FlatBufferBuilder::new_with_capacity(1024);
+ let mut builder = flatbuffers::FlatBufferBuilder::with_capacity(1024);
// Serialize some weapons for the Monster: A 'sword' and an 'axe'.
let weapon_one_name = builder.create_string("Sword");
diff --git a/chromium/third_party/flatbuffers/src/samples/sample_flexbuffers.rs b/chromium/third_party/flatbuffers/src/samples/sample_flexbuffers.rs
index 237dbf0b0f7..074a20017b2 100644
--- a/chromium/third_party/flatbuffers/src/samples/sample_flexbuffers.rs
+++ b/chromium/third_party/flatbuffers/src/samples/sample_flexbuffers.rs
@@ -16,7 +16,6 @@ extern crate flexbuffers;
use flexbuffers::{BitWidth, Builder, Reader, ReaderError};
-
// In this Example we're creating a monster that corresponds to the following JSON:
// {
// "coins": [5, 10, 25, 25, 25, 100],
diff --git a/chromium/third_party/flatbuffers/src/scripts/check-generate-code.sh b/chromium/third_party/flatbuffers/src/scripts/check-generate-code.sh
index 1f2d84d4d56..4cf27423105 100755
--- a/chromium/third_party/flatbuffers/src/scripts/check-generate-code.sh
+++ b/chromium/third_party/flatbuffers/src/scripts/check-generate-code.sh
@@ -15,6 +15,17 @@
# limitations under the License.
set -e
+if ! git diff --quiet; then
+ echo >&2
+ echo "ERROR: ********************************************************" >&2
+ echo "ERROR: The following differences were found after building." >&2
+ echo "ERROR: Perhaps there is a difference in the flags for the" >&2
+ echo "ERROR: CMakeLists.txt vs the tests/generate_code.sh script?" >&2
+ echo "ERROR: ********************************************************" >&2
+ echo >&2
+ git diff --binary --exit-code
+fi
+
cd tests
./generate_code.sh
cd ..
diff --git a/chromium/third_party/flatbuffers/src/scripts/generate_code.py b/chromium/third_party/flatbuffers/src/scripts/generate_code.py
new file mode 100755
index 00000000000..13968c5ece0
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/scripts/generate_code.py
@@ -0,0 +1,381 @@
+#!/usr/bin/env python3
+#
+# Copyright 2021 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import filecmp
+import glob
+import platform
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+# Get the path where this script is located so we can invoke the script from
+# any directory and have the paths work correctly.
+script_path = Path(__file__).parent.resolve()
+
+# Get the root path as an absolute path, so all derived paths are absolute.
+root_path = script_path.parent.absolute()
+
+# Get the location of the flatc executable, reading from the first command line
+# argument or defaulting to default names.
+flatc_exe = Path(
+ ("flatc" if not platform.system() == "Windows" else "flatc.exe")
+ if len(sys.argv) <= 1
+ else sys.argv[1]
+)
+
+# Find and assert flatc compiler is present.
+if root_path in flatc_exe.parents:
+ flatc_exe = flatc_exe.relative_to(root_path)
+flatc_path = Path(root_path, flatc_exe)
+assert flatc_path.exists(), "Cannot find the flatc compiler " + str(flatc_path)
+
+# Specify the other paths that will be referenced
+tests_path = Path(root_path, "tests")
+samples_path = Path(root_path, "samples")
+reflection_path = Path(root_path, "reflection")
+
+# Execute the flatc compiler with the specified parameters
+def flatc(
+ options, schema, prefix=None, include=None, data=None, cwd=tests_path
+):
+ cmd = [str(flatc_path)] + options
+ if prefix:
+ cmd += ["-o"] + [prefix]
+ if include:
+ cmd += ["-I"] + [include]
+ cmd += [schema] if isinstance(schema, str) else schema
+ if data:
+ cmd += [data] if isinstance(data, str) else data
+ subprocess.run(cmd, cwd=cwd)
+
+
+# Glob a pattern relative to file path
+def glob(path, pattern):
+ return [str(p) for p in path.glob(pattern)]
+
+
+# flatc options that are shared
+BASE_OPTS = ["--reflect-names", "--gen-mutable", "--gen-object-api"]
+NO_INCL_OPTS = BASE_OPTS + ["--no-includes"]
+
+# Language specific options
+CS_OPTS = ["--csharp", "--cs-gen-json-serializer"]
+CPP_OPTS = [
+ "--cpp",
+ "--gen-compare",
+ "--cpp-ptr-type",
+ "flatbuffers::unique_ptr",
+]
+CPP_17_OPTS = NO_INCL_OPTS + [
+ "--cpp",
+ "--cpp-std",
+ "c++17",
+ "--cpp-static-reflection",
+ "--gen-object-api",
+]
+RUST_OPTS = BASE_OPTS + ["--rust", "--gen-all", "--gen-name-strings"]
+TS_OPTS = ["--ts", "--gen-name-strings"]
+LOBSTER_OPTS = ["--lobster"]
+SWIFT_OPTS = ["--swift", "--gen-json-emit", "--bfbs-filenames", str(tests_path)]
+JAVA_OPTS = ["--java"]
+KOTLIN_OPTS = ["--kotlin"]
+PHP_OPTS = ["--php"]
+DART_OPTS = ["--dart"]
+PYTHON_OPTS = ["--python"]
+BINARY_OPTS = ["-b", "--schema", "--bfbs-comments", "--bfbs-builtins"]
+
+# Basic Usage
+
+flatc(
+ NO_INCL_OPTS
+ + CPP_OPTS
+ + CS_OPTS
+ + TS_OPTS
+ + [
+ "--binary",
+ "--java",
+ "--kotlin",
+ "--dart",
+ "--go",
+ "--lobster",
+ "--lua",
+ "--php",
+ ],
+ schema="monster_test.fbs",
+ include="include_test",
+ data="monsterdata_test.json",
+)
+
+flatc(
+ NO_INCL_OPTS + CPP_OPTS + ["--grpc"],
+ schema="monster_test.fbs",
+ include="include_test",
+ data="monsterdata_test.json",
+)
+
+flatc(
+ RUST_OPTS,
+ schema="monster_test.fbs",
+ include="include_test",
+ prefix="monster_test",
+ data="monsterdata_test.json",
+)
+
+flatc(
+ options=BASE_OPTS + ["--python"],
+ schema="monster_test.fbs",
+ include="include_test",
+ data="monsterdata_test.json",
+)
+
+# For Rust we currently generate two independent schemas, with namespace_test2
+# duplicating the types in namespace_test1
+flatc(
+ RUST_OPTS,
+ prefix="namespace_test",
+ schema=[
+ "namespace_test/namespace_test1.fbs",
+ "namespace_test/namespace_test2.fbs",
+ ],
+)
+
+flatc(
+ BASE_OPTS
+ + CPP_OPTS
+ + CS_OPTS
+ + TS_OPTS
+ + JAVA_OPTS
+ + KOTLIN_OPTS
+ + PHP_OPTS,
+ prefix="union_vector",
+ schema="union_vector/union_vector.fbs",
+)
+
+flatc(
+ BASE_OPTS + TS_OPTS + ["--gen-name-strings", "--gen-mutable"],
+ include="include_test",
+ schema="monster_test.fbs",
+)
+
+flatc(
+ BASE_OPTS + TS_OPTS + ["-b"],
+ include="include_test",
+ schema="monster_test.fbs",
+ data="unicode_test.json",
+)
+
+flatc(
+ BASE_OPTS + TS_OPTS + ["--gen-name-strings"],
+ prefix="union_vector",
+ schema="union_vector/union_vector.fbs",
+)
+
+flatc(
+ RUST_OPTS,
+ prefix="include_test1",
+ include="include_test",
+ schema="include_test/include_test1.fbs",
+)
+
+flatc(
+ RUST_OPTS,
+ prefix="include_test2",
+ include="include_test",
+ schema="include_test/sub/include_test2.fbs",
+)
+
+flatc(
+ BINARY_OPTS + ["--bfbs-filenames", str(tests_path)],
+ include="include_test",
+ schema="monster_test.fbs",
+)
+
+flatc(
+ CPP_OPTS
+ + NO_INCL_OPTS
+ + [
+ "--bfbs-comments",
+ "--bfbs-builtins",
+ "--bfbs-gen-embed",
+ "--bfbs-filenames",
+ str(tests_path),
+ ],
+ include="include_test",
+ schema="monster_test.fbs",
+)
+
+flatc(
+ BINARY_OPTS + ["--bfbs-filenames", str(tests_path)],
+ include="include_test",
+ schema="arrays_test.fbs",
+)
+
+flatc(
+ ["--jsonschema", "--schema"],
+ include="include_test",
+ schema="monster_test.fbs",
+)
+
+flatc(
+ CPP_OPTS + CS_OPTS + NO_INCL_OPTS + JAVA_OPTS + KOTLIN_OPTS + PYTHON_OPTS,
+ schema="monster_extra.fbs",
+ data="monsterdata_extra.json",
+)
+
+flatc(
+ CPP_OPTS
+ + CS_OPTS
+ + NO_INCL_OPTS
+ + JAVA_OPTS
+ + ["--jsonschema", "--scoped-enums"],
+ schema="arrays_test.fbs",
+)
+
+flatc(
+ RUST_OPTS,
+ prefix="arrays_test",
+ schema="arrays_test.fbs",
+)
+
+flatc(
+ BASE_OPTS + PYTHON_OPTS,
+ schema="arrays_test.fbs",
+)
+
+flatc(
+ DART_OPTS + ["--gen-object-api"],
+ schema="monster_extra.fbs",
+)
+
+
+# Optional Scalars
+optional_scalars_schema = "optional_scalars.fbs"
+flatc(
+ ["--java", "--kotlin", "--lobster", "--ts"], schema=optional_scalars_schema
+)
+
+flatc(["--csharp", "--gen-object-api"], schema=optional_scalars_schema)
+
+flatc(RUST_OPTS, prefix="optional_scalars", schema=optional_scalars_schema)
+
+flatc(NO_INCL_OPTS + CPP_OPTS, schema=optional_scalars_schema)
+
+# Generate string/vector default code for tests
+flatc(RUST_OPTS, prefix="more_defaults", schema="more_defaults.fbs")
+
+# Generate the schema evolution tests
+flatc(
+ CPP_OPTS + ["--scoped-enums"],
+ prefix="evolution_test",
+ schema=glob(tests_path, "evolution_test/evolution_v*.fbs"),
+)
+
+# Generate the keywords tests
+flatc(BASE_OPTS + CS_OPTS, schema="keyword_test.fbs")
+flatc(RUST_OPTS, prefix="keyword_test", schema="keyword_test.fbs")
+flatc(
+ BASE_OPTS + CS_OPTS + ["--cs-global-alias", "--gen-onefile"],
+ prefix="nested_namespace_test",
+ schema=glob(tests_path, "nested_namespace_test/nested_namespace_test*.fbs"),
+)
+
+# Swift Tests
+swift_prefix = "FlatBuffers.Test.Swift/Tests/FlatBuffers.Test.SwiftTests"
+flatc(
+ SWIFT_OPTS + NO_INCL_OPTS + ["--grpc"],
+ schema="monster_test.fbs",
+ include="include_test",
+ prefix=swift_prefix,
+)
+flatc(
+ SWIFT_OPTS + BASE_OPTS,
+ schema="union_vector/union_vector.fbs",
+ prefix=swift_prefix,
+)
+flatc(SWIFT_OPTS, schema="optional_scalars.fbs", prefix=swift_prefix)
+flatc(
+ SWIFT_OPTS + ["--gen-object-api"],
+ schema="more_defaults.fbs",
+ prefix=swift_prefix,
+)
+
+# --filename-suffix and --filename-ext tests
+flatc(
+ CPP_OPTS
+ + NO_INCL_OPTS
+ + ["--filename-suffix", "_suffix", "--filename-ext", "hpp"],
+ include="include_test",
+ schema="monster_test.fbs",
+)
+orig_monster_file = Path(tests_path, "monster_test_generated.h")
+new_monster_file = Path(tests_path, "monster_test_suffix.hpp")
+assert (
+ new_monster_file.exists()
+), "filename suffix option did not produce a file"
+assert filecmp.cmp(
+ orig_monster_file, new_monster_file
+), "filename suffix option did not produce identical results"
+new_monster_file.unlink()
+
+# Flag c++17 requires Clang6, GCC7, MSVC2017 (_MSC_VER >= 1914) or higher.
+cpp_17_prefix = "cpp17/generated_cpp17"
+flatc(
+ CPP_17_OPTS,
+ schema="monster_test.fbs",
+ include="include_test",
+ prefix=cpp_17_prefix,
+)
+flatc(
+ CPP_17_OPTS,
+ schema="optional_scalars.fbs",
+ prefix=cpp_17_prefix,
+)
+flatc(
+ CPP_17_OPTS,
+ schema="union_vector/union_vector.fbs",
+ prefix=cpp_17_prefix,
+)
+
+# Sample files
+samples_schema = "monster.fbs"
+flatc(
+ BASE_OPTS + CPP_OPTS + LOBSTER_OPTS, schema=samples_schema, cwd=samples_path
+)
+flatc(
+ RUST_OPTS, prefix="rust_generated", schema=samples_schema, cwd=samples_path
+)
+flatc(
+ BINARY_OPTS + ["--bfbs-filenames", str(samples_path)],
+ schema=samples_schema,
+ cwd=samples_path,
+)
+
+# Reflection
+temp_dir = ".tmp"
+flatc(
+ ["-c", "--cpp-std", "c++0x", "--no-prefix"],
+ prefix=temp_dir,
+ schema="reflection.fbs",
+ cwd=reflection_path,
+)
+new_reflection_file = Path(reflection_path, temp_dir, "reflection_generated.h")
+original_reflection_file = Path(
+ root_path, "include/flatbuffers/reflection_generated.h"
+)
+if not filecmp.cmp(new_reflection_file, original_reflection_file):
+ shutil.move(new_reflection_file, original_reflection_file)
+shutil.rmtree(Path(reflection_path, temp_dir))
diff --git a/chromium/third_party/flatbuffers/src/src/BUILD.bazel b/chromium/third_party/flatbuffers/src/src/BUILD.bazel
index d41d0fc528c..a1cad077fbd 100644
--- a/chromium/third_party/flatbuffers/src/src/BUILD.bazel
+++ b/chromium/third_party/flatbuffers/src/src/BUILD.bazel
@@ -17,6 +17,15 @@ cc_library(
"util.cpp",
],
hdrs = ["//:public_headers"],
+ linkopts = select({
+ # TODO: Bazel uses `clang` instead of `clang++` to link
+ # C++ code on BSD. Temporarily adding these linker flags while
+ # we wait for Bazel to resolve
+ # https://github.com/bazelbuild/bazel/issues/12023.
+ "//:platform_freebsd": ["-lm"],
+ "//:platform_openbsd": ["-lm"],
+ "//conditions:default": [],
+ }),
strip_include_prefix = "/include",
visibility = ["//:__pkg__"],
)
diff --git a/chromium/third_party/flatbuffers/src/src/code_generators.cpp b/chromium/third_party/flatbuffers/src/src/code_generators.cpp
index 745406ba95b..38e3b81c651 100644
--- a/chromium/third_party/flatbuffers/src/src/code_generators.cpp
+++ b/chromium/third_party/flatbuffers/src/src/code_generators.cpp
@@ -60,7 +60,7 @@ void CodeWriter::operator+=(std::string text) {
// Update the text to everything after the }}.
text = text.substr(end + 2);
}
- if (!text.empty() && string_back(text) == '\\') {
+ if (!text.empty() && text.back() == '\\') {
text.pop_back();
ignore_ident_ = true;
stream_ << text;
@@ -314,14 +314,10 @@ std::string SimpleFloatConstantGenerator::NaN(float v) const {
return this->NaN(static_cast<double>(v));
}
-std::string JavaCSharpMakeRule(const Parser &parser, const std::string &path,
+std::string JavaCSharpMakeRule(const bool java, const Parser &parser,
+ const std::string &path,
const std::string &file_name) {
- FLATBUFFERS_ASSERT(parser.opts.lang == IDLOptions::kJava ||
- parser.opts.lang == IDLOptions::kCSharp);
-
- std::string file_extension =
- (parser.opts.lang == IDLOptions::kJava) ? ".java" : ".cs";
-
+ const std::string file_extension = java ? ".java" : ".cs";
std::string make_rule;
for (auto it = parser.enums_.vec.begin(); it != parser.enums_.vec.end();
@@ -350,6 +346,15 @@ std::string JavaCSharpMakeRule(const Parser &parser, const std::string &path,
return make_rule;
}
+std::string JavaMakeRule(const Parser &parser, const std::string &path,
+ const std::string &file_name) {
+ return JavaCSharpMakeRule(true, parser, path, file_name);
+}
+std::string CSharpMakeRule(const Parser &parser, const std::string &path,
+ const std::string &file_name) {
+ return JavaCSharpMakeRule(false, parser, path, file_name);
+}
+
std::string BinaryFileName(const Parser &parser, const std::string &path,
const std::string &file_name) {
auto ext = parser.file_extension_.length() ? parser.file_extension_ : "bin";
diff --git a/chromium/third_party/flatbuffers/src/src/flatc.cpp b/chromium/third_party/flatbuffers/src/src/flatc.cpp
index 9afca6881fe..ae2aa5548c3 100644
--- a/chromium/third_party/flatbuffers/src/src/flatc.cpp
+++ b/chromium/third_party/flatbuffers/src/src/flatc.cpp
@@ -113,6 +113,7 @@ std::string FlatCompiler::GetUsageString(const char *program_name) const {
" If the language uses a single file for output (by default\n"
" the case for C++ and JS), all code will end up in this one\n"
" file.\n"
+ " --gen-json-emit Generates encoding code which emits Flatbuffers into JSON\n"
" --cpp-include Adds an #include in generated file.\n"
" --cpp-ptr-type T Set object API pointer type (default std::unique_ptr).\n"
" --cpp-str-type T Set object API string type (default std::string).\n"
@@ -179,6 +180,7 @@ std::string FlatCompiler::GetUsageString(const char *program_name) const {
" --flexbuffers Used with \"binary\" and \"json\" options, it generates\n"
" data using schema-less FlexBuffers.\n"
" --no-warnings Inhibit all warning messages.\n"
+ " --cs-global-alias Prepend \"global::\" to all user generated csharp classes and structs.\n"
"FILEs may be schemas (must end in .fbs), binary schemas (must end in .bfbs),\n"
"or JSON files (conforming to preceding schema). FILEs after the -- must be\n"
"binary flatbuffer format files.\n"
@@ -307,6 +309,8 @@ int FlatCompiler::Compile(int argc, const char **argv) {
opts.java_checkerframework = true;
} else if (arg == "--gen-generated") {
opts.gen_generated = true;
+ } else if (arg == "--gen-json-emit") {
+ opts.gen_json_coders = true;
} else if (arg == "--object-prefix") {
if (++argi >= argc) Error("missing prefix following: " + arg, true);
opts.object_prefix = argv[argi];
@@ -391,6 +395,8 @@ int FlatCompiler::Compile(int argc, const char **argv) {
opts.cpp_std = arg.substr(std::string("--cpp-std=").size());
} else if (arg == "--cpp-static-reflection") {
opts.cpp_static_reflection = true;
+ } else if (arg == "--cs-global-alias") {
+ opts.cs_global_alias = true;
} else {
for (size_t i = 0; i < params_.num_generators; ++i) {
if (arg == params_.generators[i].generator_opt_long ||
@@ -482,16 +488,17 @@ int FlatCompiler::Compile(int argc, const char **argv) {
contents.length() != strlen(contents.c_str())) {
Error("input file appears to be binary: " + filename, true);
}
- if (is_schema) {
+ if (is_schema || is_binary_schema) {
// If we're processing multiple schemas, make sure to start each
// one from scratch. If it depends on previous schemas it must do
// so explicitly using an include.
parser.reset(new flatbuffers::Parser(opts));
}
+ // Try to parse the file contents (binary schema/flexbuffer/textual
+ // schema)
if (is_binary_schema) {
LoadBinarySchema(*parser.get(), filename, contents);
- }
- if (opts.use_flexbuffers) {
+ } else if (opts.use_flexbuffers) {
if (opts.lang_to_generate == IDLOptions::kJson) {
parser->flex_root_ = flexbuffers::GetRoot(
reinterpret_cast<const uint8_t *>(contents.c_str()),
@@ -512,7 +519,7 @@ int FlatCompiler::Compile(int argc, const char **argv) {
}
if ((is_schema || is_binary_schema) && !conform_to_schema.empty()) {
auto err = parser->ConformTo(conform_parser);
- if (!err.empty()) Error("schemas don\'t conform: " + err);
+ if (!err.empty()) Error("schemas don\'t conform: " + err, false);
}
if (schema_binary || opts.binary_schema_gen_embed) {
parser->Serialize();
@@ -526,7 +533,6 @@ int FlatCompiler::Compile(int argc, const char **argv) {
flatbuffers::StripPath(flatbuffers::StripExtension(filename));
for (size_t i = 0; i < params_.num_generators; ++i) {
- parser->opts.lang = params_.generators[i].lang;
if (generator_enabled[i]) {
if (!print_make_rules) {
flatbuffers::EnsureDirExists(output_path);
@@ -577,6 +583,9 @@ int FlatCompiler::Compile(int argc, const char **argv) {
// in any files coming up next.
parser->MarkGenerated();
}
+ if (opts.lang_to_generate & IDLOptions::kRust && !parser->opts.one_file) {
+ GenerateRustModuleRootFile(*parser, output_path);
+ }
return 0;
}
diff --git a/chromium/third_party/flatbuffers/src/src/flatc_main.cpp b/chromium/third_party/flatbuffers/src/src/flatc_main.cpp
index b1966660cab..90c0e08076a 100644
--- a/chromium/third_party/flatbuffers/src/src/flatc_main.cpp
+++ b/chromium/third_party/flatbuffers/src/src/flatc_main.cpp
@@ -23,15 +23,15 @@ static void Warn(const flatbuffers::FlatCompiler *flatc,
const std::string &warn, bool show_exe_name) {
(void)flatc;
if (show_exe_name) { printf("%s: ", g_program_name); }
- printf("warning: %s\n", warn.c_str());
+ fprintf(stderr, "warning: %s\n", warn.c_str());
}
static void Error(const flatbuffers::FlatCompiler *flatc,
const std::string &err, bool usage, bool show_exe_name) {
if (show_exe_name) { printf("%s: ", g_program_name); }
- printf("error: %s\n", err.c_str());
+ fprintf(stderr, "error: %s\n", err.c_str());
if (usage && flatc) {
- printf("%s", flatc->GetUsageString(g_program_name).c_str());
+ fprintf(stderr, "%s", flatc->GetUsageString(g_program_name).c_str());
}
exit(1);
}
@@ -69,8 +69,7 @@ int main(int argc, const char *argv[]) {
"Generate Go files for tables/structs", nullptr },
{ flatbuffers::GenerateJava, "-j", "--java", "Java", true,
flatbuffers::GenerateJavaGRPC, flatbuffers::IDLOptions::kJava,
- "Generate Java classes for tables/structs",
- flatbuffers::JavaCSharpMakeRule },
+ "Generate Java classes for tables/structs", flatbuffers::JavaMakeRule },
{ flatbuffers::GenerateDart, "-d", "--dart", "Dart", true, nullptr,
flatbuffers::IDLOptions::kDart,
"Generate Dart classes for tables/structs", flatbuffers::DartMakeRule },
@@ -79,8 +78,7 @@ int main(int argc, const char *argv[]) {
"Generate TypeScript code for tables/structs", flatbuffers::TSMakeRule },
{ flatbuffers::GenerateCSharp, "-n", "--csharp", "C#", true, nullptr,
flatbuffers::IDLOptions::kCSharp,
- "Generate C# classes for tables/structs",
- flatbuffers::JavaCSharpMakeRule },
+ "Generate C# classes for tables/structs", flatbuffers::CSharpMakeRule },
{ flatbuffers::GeneratePython, "-p", "--python", "Python", true,
flatbuffers::GeneratePythonGRPC, flatbuffers::IDLOptions::kPython,
"Generate Python files for tables/structs", nullptr },
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_cpp.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_cpp.cpp
index c80ec90f25b..39d09786f91 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_cpp.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_cpp.cpp
@@ -505,6 +505,17 @@ class CppGenerator : public BaseGenerator {
code_ += " return flatbuffers::GetMutableRoot<{{STRUCT_NAME}}>(buf);";
code_ += "}";
code_ += "";
+
+ code_ += "inline \\";
+ code_ +=
+ "{{CPP_NAME}} "
+ "*{{NULLABLE_EXT}}GetMutableSizePrefixed{{STRUCT_NAME}}(void "
+ "*buf) {";
+ code_ +=
+ " return "
+ "flatbuffers::GetMutableSizePrefixedRoot<{{CPP_NAME}}>(buf);";
+ code_ += "}";
+ code_ += "";
}
if (parser_.file_identifier_.length()) {
@@ -691,7 +702,9 @@ class CppGenerator : public BaseGenerator {
}
case BASE_TYPE_UNION:
// fall through
- default: { return "void"; }
+ default: {
+ return "void";
+ }
}
}
@@ -929,10 +942,12 @@ class CppGenerator : public BaseGenerator {
}
std::string UnionVectorVerifySignature(const EnumDef &enum_def) {
- return "bool Verify" + Name(enum_def) + "Vector" +
+ auto name = Name(enum_def);
+ auto type = opts_.scoped_enums ? name : "uint8_t";
+ return "bool Verify" + name + "Vector" +
"(flatbuffers::Verifier &verifier, " +
"const flatbuffers::Vector<flatbuffers::Offset<void>> *values, " +
- "const flatbuffers::Vector<uint8_t> *types)";
+ "const flatbuffers::Vector<" + type + "> *types)";
}
std::string UnionUnPackSignature(const EnumDef &enum_def, bool inclass) {
@@ -1031,10 +1046,9 @@ class CppGenerator : public BaseGenerator {
? bt - BASE_TYPE_UTYPE + ET_UTYPE
: ET_SEQUENCE;
int ref_idx = -1;
- std::string ref_name =
- type.struct_def
- ? WrapInNameSpace(*type.struct_def)
- : type.enum_def ? WrapInNameSpace(*type.enum_def) : "";
+ std::string ref_name = type.struct_def ? WrapInNameSpace(*type.struct_def)
+ : type.enum_def ? WrapInNameSpace(*type.enum_def)
+ : "";
if (!ref_name.empty()) {
auto rit = type_refs.begin();
for (; rit != type_refs.end(); ++rit) {
@@ -1298,11 +1312,30 @@ class CppGenerator : public BaseGenerator {
}
if (opts_.generate_object_based_api && enum_def.is_union) {
- // Generate a union type
+ // Generate a union type and a trait type for it.
code_.SetValue("NAME", Name(enum_def));
FLATBUFFERS_ASSERT(enum_def.Lookup("NONE"));
code_.SetValue("NONE", GetEnumValUse(enum_def, *enum_def.Lookup("NONE")));
+ if (!enum_def.uses_multiple_type_instances) {
+ for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end();
+ ++it) {
+ const auto &ev = **it;
+
+ if (it == enum_def.Vals().begin()) {
+ code_ += "template<typename T> struct {{NAME}}UnionTraits {";
+ } else {
+ auto name = GetUnionElement(ev, true, opts_);
+ code_ += "template<> struct {{NAME}}UnionTraits<" + name + "> {";
+ }
+
+ auto value = GetEnumValUse(enum_def, ev);
+ code_ += " static const {{ENUM_NAME}} enum_value = " + value + ";";
+ code_ += "};";
+ code_ += "";
+ }
+ }
+
code_ += "struct {{NAME}}Union {";
code_ += " {{NAME}} type;";
code_ += " void *value;";
@@ -1326,18 +1359,15 @@ class CppGenerator : public BaseGenerator {
code_ += " void Reset();";
code_ += "";
if (!enum_def.uses_multiple_type_instances) {
- code_ += "#ifndef FLATBUFFERS_CPP98_STL";
code_ += " template <typename T>";
code_ += " void Set(T&& val) {";
- code_ += " using RT = typename std::remove_reference<T>::type;";
+ code_ += " typedef typename std::remove_reference<T>::type RT;";
code_ += " Reset();";
- code_ +=
- " type = {{NAME}}Traits<typename RT::TableType>::enum_value;";
+ code_ += " type = {{NAME}}UnionTraits<RT>::enum_value;";
code_ += " if (type != {{NONE}}) {";
code_ += " value = new RT(std::forward<T>(val));";
code_ += " }";
code_ += " }";
- code_ += "#endif // FLATBUFFERS_CPP98_STL";
code_ += "";
}
code_ += " " + UnionUnPackSignature(enum_def, true) + ";";
@@ -1483,6 +1513,7 @@ class CppGenerator : public BaseGenerator {
if (opts_.generate_object_based_api) {
// Generate union Unpack() and Pack() functions.
code_ += "inline " + UnionUnPackSignature(enum_def, false) + " {";
+ code_ += " (void)resolver;";
code_ += " switch (type) {";
for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end();
++it) {
@@ -1513,6 +1544,7 @@ class CppGenerator : public BaseGenerator {
code_ += "";
code_ += "inline " + UnionPackSignature(enum_def, false) + " {";
+ code_ += " (void)_rehasher;";
code_ += " switch (type) {";
for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end();
++it) {
@@ -1681,6 +1713,8 @@ class CppGenerator : public BaseGenerator {
} else {
return "0";
}
+ } else if (IsStruct(type) && (field.value.constant == "0")) {
+ return "nullptr";
} else {
return GenDefaultConstant(field);
}
@@ -1965,9 +1999,19 @@ class CppGenerator : public BaseGenerator {
}
default: break;
}
+
+ auto nfn = GetNestedFlatBufferName(field);
+ if (!nfn.empty()) {
+ code_.SetValue("CPP_NAME", nfn);
+ // FIXME: file_identifier.
+ code_ += "{{PRE}}verifier.VerifyNestedFlatBuffer<{{CPP_NAME}}>"
+ "({{NAME}}(), nullptr)\\";
+ }
+ break;
+ }
+ default: {
break;
}
- default: { break; }
}
}
@@ -2152,14 +2196,8 @@ class CppGenerator : public BaseGenerator {
// };
//
void GenFieldNames(const StructDef &struct_def) {
- auto non_deprecated_field_count = std::count_if(
- struct_def.fields.vec.begin(), struct_def.fields.vec.end(),
- [](const FieldDef *field) { return !field->deprecated; });
code_ += " static constexpr std::array<\\";
- code_.SetValue(
- "FIELD_COUNT",
- std::to_string(static_cast<long long>(non_deprecated_field_count)));
- code_ += "const char *, {{FIELD_COUNT}}> field_names = {\\";
+ code_ += "const char *, fields_number> field_names = {\\";
if (struct_def.fields.vec.empty()) {
code_ += "};";
return;
@@ -2181,7 +2219,7 @@ class CppGenerator : public BaseGenerator {
}
void GenFieldsNumber(const StructDef &struct_def) {
- auto non_deprecated_field_count = std::count_if(
+ const auto non_deprecated_field_count = std::count_if(
struct_def.fields.vec.begin(), struct_def.fields.vec.end(),
[](const FieldDef *field) { return !field->deprecated; });
code_.SetValue(
@@ -2205,9 +2243,9 @@ class CppGenerator : public BaseGenerator {
code_ +=
" static constexpr auto fully_qualified_name = "
"\"{{FULLY_QUALIFIED_NAME}}\";";
+ GenFieldsNumber(struct_def);
GenFieldNames(struct_def);
GenFieldTypeHelper(struct_def);
- GenFieldsNumber(struct_def);
}
code_ += "};";
code_ += "";
@@ -2228,15 +2266,25 @@ class CppGenerator : public BaseGenerator {
code_.SetValue("FIELD_VALUE",
GenUnderlyingCast(field, false, "_" + Name(field)));
- code_ +=
- " bool mutate_{{FIELD_NAME}}({{FIELD_TYPE}} "
- "_{{FIELD_NAME}}) {";
+ code_ += " bool mutate_{{FIELD_NAME}}({{FIELD_TYPE}} _{{FIELD_NAME}}\\";
if (false == field.IsScalarOptional()) {
code_.SetValue("DEFAULT_VALUE", GenDefaultConstant(field));
+ code_.SetValue(
+ "INTERFACE_DEFAULT_VALUE",
+ GenUnderlyingCast(field, true, GenDefaultConstant(field)));
+
+ // GenUnderlyingCast for a bool field generates 0 != 0
+ // So the type has to be checked and the appropriate default chosen
+ if (IsBool(field.value.type.base_type)) {
+ code_ += " = {{DEFAULT_VALUE}}) {";
+ } else {
+ code_ += " = {{INTERFACE_DEFAULT_VALUE}}) {";
+ }
code_ +=
" return {{SET_FN}}({{OFFSET_NAME}}, {{FIELD_VALUE}}, "
"{{DEFAULT_VALUE}});";
} else {
+ code_ += ") {";
code_ += " return {{SET_FN}}({{OFFSET_NAME}}, {{FIELD_VALUE}});";
}
code_ += " }";
@@ -2254,6 +2302,21 @@ class CppGenerator : public BaseGenerator {
}
}
+ std::string GetNestedFlatBufferName(const FieldDef &field) {
+ auto nested = field.attributes.Lookup("nested_flatbuffer");
+ if (!nested) return "";
+ std::string qualified_name = nested->constant;
+ auto nested_root = parser_.LookupStruct(nested->constant);
+ if (nested_root == nullptr) {
+ qualified_name =
+ parser_.current_namespace_->GetFullyQualifiedName(nested->constant);
+ nested_root = parser_.LookupStruct(qualified_name);
+ }
+ FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser.
+ (void)nested_root;
+ return TranslateNameSpace(qualified_name);
+ }
+
// Generate an accessor struct, builder structs & function for a table.
void GenTable(const StructDef &struct_def) {
if (opts_.generate_object_based_api) { GenNativeTable(struct_def); }
@@ -2317,19 +2380,9 @@ class CppGenerator : public BaseGenerator {
GenTableFieldGetter(field);
if (opts_.mutable_buffer) { GenTableFieldSetter(field); }
- auto nested = field.attributes.Lookup("nested_flatbuffer");
- if (nested) {
- std::string qualified_name = nested->constant;
- auto nested_root = parser_.LookupStruct(nested->constant);
- if (nested_root == nullptr) {
- qualified_name = parser_.current_namespace_->GetFullyQualifiedName(
- nested->constant);
- nested_root = parser_.LookupStruct(qualified_name);
- }
- FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser.
- (void)nested_root;
- code_.SetValue("CPP_NAME", TranslateNameSpace(qualified_name));
-
+ auto nfn = GetNestedFlatBufferName(field);
+ if (!nfn.empty()) {
+ code_.SetValue("CPP_NAME", nfn);
code_ += " const {{CPP_NAME}} *{{FIELD_NAME}}_nested_root() const {";
code_ +=
" return "
@@ -2763,9 +2816,19 @@ class CppGenerator : public BaseGenerator {
code += "/* else do nothing */";
}
} else {
+ const bool is_pointer =
+ field.value.type.VectorType().base_type == BASE_TYPE_STRUCT &&
+ !IsStruct(field.value.type.VectorType());
+ if (is_pointer) {
+ code += "if(_o->" + name + "[_i]" + ") { ";
+ code += indexing + "->UnPackTo(_o->" + name +
+ "[_i].get(), _resolver);";
+ code += " } else { ";
+ }
code += "_o->" + name + "[_i]" + access + " = ";
code += GenUnpackVal(field.value.type.VectorType(), indexing, true,
field);
+ if (is_pointer) { code += "; }"; }
}
code += "; } }";
}
@@ -2812,8 +2875,17 @@ class CppGenerator : public BaseGenerator {
} else {
// Generate code for assigning the value, of the form:
// _o->field = value;
+ const bool is_pointer =
+ field.value.type.base_type == BASE_TYPE_STRUCT &&
+ !IsStruct(field.value.type);
+ if (is_pointer) {
+ code += "{ if(_o->" + Name(field) + ") { ";
+ code += "_e->UnPackTo(_o->" + Name(field) + ".get(), _resolver);";
+ code += " } else { ";
+ }
code += "_o->" + Name(field) + " = ";
code += GenUnpackVal(field.value.type, "_e", false, field) + ";";
+ if (is_pointer) { code += " } }"; }
}
break;
}
@@ -2941,10 +3013,15 @@ class CppGenerator : public BaseGenerator {
}
case BASE_TYPE_UTYPE: {
value = StripUnionType(value);
- code += "_fbb.CreateVector<uint8_t>(" + value +
- ".size(), [](size_t i, _VectorArgs *__va) { "
- "return static_cast<uint8_t>(__va->_" +
- value + "[i].type); }, &_va)";
+ auto type = opts_.scoped_enums ? Name(*field.value.type.enum_def)
+ : "uint8_t";
+ auto enum_value = "__va->_" + value + "[i].type";
+ if (!opts_.scoped_enums)
+ enum_value = "static_cast<uint8_t>(" + enum_value + ")";
+
+ code += "_fbb.CreateVector<" + type + ">(" + value +
+ ".size(), [](size_t i, _VectorArgs *__va) { return " +
+ enum_value + "; }, &_va)";
break;
}
default: {
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_csharp.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_csharp.cpp
index 681ab6d642f..4639e4995a5 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_csharp.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_csharp.cpp
@@ -16,15 +16,13 @@
// independent from idl_parser, since this code is not needed for most clients
+#include <unordered_set>
+
#include "flatbuffers/code_generators.h"
#include "flatbuffers/flatbuffers.h"
#include "flatbuffers/idl.h"
#include "flatbuffers/util.h"
-#if defined(FLATBUFFERS_CPP98_STL)
-# include <cctype>
-#endif // defined(FLATBUFFERS_CPP98_STL)
-
namespace flatbuffers {
static TypedFloatConstantGenerator CSharpFloatGen("Double.", "Single.", "NaN",
@@ -46,8 +44,100 @@ class CSharpGenerator : public BaseGenerator {
public:
CSharpGenerator(const Parser &parser, const std::string &path,
const std::string &file_name)
- : BaseGenerator(parser, path, file_name, "", ".", "cs"),
- cur_name_space_(nullptr) {}
+ : BaseGenerator(parser, path, file_name,
+ parser.opts.cs_global_alias ? "global::" : "", ".", "cs"),
+ cur_name_space_(nullptr) {
+ // clang-format off
+
+ // List of keywords retrieved from here:
+ // https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/keywords/
+
+ // One per line to ease comparisons to that list are easier
+
+ static const char *const keywords[] = {
+ "abstract",
+ "as",
+ "base",
+ "bool",
+ "break",
+ "byte",
+ "case",
+ "catch",
+ "char",
+ "checked",
+ "class",
+ "const",
+ "continue",
+ "decimal",
+ "default",
+ "delegate",
+ "do",
+ "double",
+ "else",
+ "enum",
+ "event",
+ "explicit",
+ "extern",
+ "false",
+ "finally",
+ "fixed",
+ "float",
+ "for",
+ "foreach",
+ "goto",
+ "if",
+ "implicit",
+ "in",
+ "int",
+ "interface",
+ "internal",
+ "is",
+ "lock",
+ "long",
+ "namespace",
+ "new",
+ "null",
+ "object",
+ "operator",
+ "out",
+ "override",
+ "params",
+ "private",
+ "protected",
+ "public",
+ "readonly",
+ "ref",
+ "return",
+ "sbyte",
+ "sealed",
+ "short",
+ "sizeof",
+ "stackalloc",
+ "static",
+ "string",
+ "struct",
+ "switch",
+ "this",
+ "throw",
+ "true",
+ "try",
+ "typeof",
+ "uint",
+ "ulong",
+ "unchecked",
+ "unsafe",
+ "ushort",
+ "using",
+ "virtual",
+ "void",
+ "volatile",
+ "while",
+ nullptr,
+ // clang-format on
+ };
+
+ for (auto kw = keywords; *kw; kw++) keywords_.insert(*kw);
+ }
CSharpGenerator &operator=(const CSharpGenerator &);
@@ -65,7 +155,7 @@ class CSharpGenerator : public BaseGenerator {
one_file_code += enumcode;
} else {
if (!SaveType(enum_def.name, *enum_def.defined_namespace, enumcode,
- false))
+ false, parser_.opts))
return false;
}
}
@@ -81,22 +171,45 @@ class CSharpGenerator : public BaseGenerator {
one_file_code += declcode;
} else {
if (!SaveType(struct_def.name, *struct_def.defined_namespace, declcode,
- true))
+ true, parser_.opts))
return false;
}
}
if (parser_.opts.one_file) {
return SaveType(file_name_, *parser_.current_namespace_, one_file_code,
- true);
+ true, parser_.opts);
}
return true;
}
+ private:
+ std::unordered_set<std::string> keywords_;
+
+ std::string EscapeKeyword(const std::string &name) const {
+ return keywords_.find(name) == keywords_.end() ? name : "@" + name;
+ }
+
+ std::string Name(const FieldDef &field) const {
+ std::string name = MakeCamel(field.name, true);
+ return EscapeKeyword(name);
+ }
+
+ std::string Name(const Definition &def) const {
+ return EscapeKeyword(def.name);
+ }
+
+ std::string NamespacedName(const Definition &def) const {
+ return WrapInNameSpace(def.defined_namespace, Name(def));
+ }
+
+ std::string Name(const EnumVal &ev) const { return EscapeKeyword(ev.name); }
+
// Save out the generated code for a single class while adding
// declaration boilerplate.
bool SaveType(const std::string &defname, const Namespace &ns,
- const std::string &classcode, bool needs_includes) const {
+ const std::string &classcode, bool needs_includes,
+ const IDLOptions &options) const {
if (!classcode.length()) return true;
std::string code =
@@ -117,7 +230,10 @@ class CSharpGenerator : public BaseGenerator {
}
code += classcode;
if (!namespace_name.empty()) { code += "\n}\n"; }
- auto filename = NamespaceDir(ns) + defname + ".cs";
+ auto filename = NamespaceDir(ns) + defname;
+ if (options.one_file) { filename += options.filename_suffix; }
+ filename +=
+ options.filename_extension.empty() ? ".cs" : options.filename_extension;
return SaveFile(filename.c_str(), code, false);
}
@@ -134,9 +250,9 @@ class CSharpGenerator : public BaseGenerator {
// clang-format on
if (enableLangOverrides) {
- if (IsEnum(type)) return WrapInNameSpace(*type.enum_def);
+ if (IsEnum(type)) return NamespacedName(*type.enum_def);
if (type.base_type == BASE_TYPE_STRUCT) {
- return "Offset<" + WrapInNameSpace(*type.struct_def) + ">";
+ return "Offset<" + NamespacedName(*type.struct_def) + ">";
}
}
@@ -151,7 +267,7 @@ class CSharpGenerator : public BaseGenerator {
switch (type.base_type) {
case BASE_TYPE_STRING: return "string";
case BASE_TYPE_VECTOR: return GenTypeGet(type.VectorType());
- case BASE_TYPE_STRUCT: return WrapInNameSpace(*type.struct_def);
+ case BASE_TYPE_STRUCT: return NamespacedName(*type.struct_def);
case BASE_TYPE_UNION: return "TTable";
default: return "Table";
}
@@ -165,12 +281,12 @@ class CSharpGenerator : public BaseGenerator {
}
std::string GenOffsetType(const StructDef &struct_def) const {
- return "Offset<" + WrapInNameSpace(struct_def) + ">";
+ return "Offset<" + NamespacedName(struct_def) + ">";
}
std::string GenOffsetConstruct(const StructDef &struct_def,
const std::string &variable_name) const {
- return "new Offset<" + WrapInNameSpace(struct_def) + ">(" + variable_name +
+ return "new Offset<" + NamespacedName(struct_def) + ">(" + variable_name +
")";
}
@@ -179,7 +295,7 @@ class CSharpGenerator : public BaseGenerator {
if (IsSeries(type)) {
return DestinationCast(type.VectorType());
} else {
- if (IsEnum(type)) return "(" + WrapInNameSpace(*type.enum_def) + ")";
+ if (IsEnum(type)) return "(" + NamespacedName(*type.enum_def) + ")";
}
return "";
}
@@ -190,17 +306,19 @@ class CSharpGenerator : public BaseGenerator {
// would be cast down to int before being put onto the buffer. In C#, one cast
// directly cast an Enum to its underlying type, which is essential before
// putting it onto the buffer.
- std::string SourceCast(const Type &type) const {
+ std::string SourceCast(const Type &type,
+ const bool isOptional = false) const {
if (IsSeries(type)) {
return SourceCast(type.VectorType());
} else {
- if (IsEnum(type)) return "(" + GenTypeBasic(type, false) + ")";
+ if (IsEnum(type))
+ return "(" + GenTypeBasic(type, false) + (isOptional ? "?" : "") + ")";
}
return "";
}
- std::string SourceCastBasic(const Type &type) const {
- return IsScalar(type.base_type) ? SourceCast(type) : "";
+ std::string SourceCastBasic(const Type &type, const bool isOptional) const {
+ return IsScalar(type.base_type) ? SourceCast(type, isOptional) : "";
}
std::string GenEnumDefaultValue(const FieldDef &field) const {
@@ -208,7 +326,7 @@ class CSharpGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(value.type.enum_def);
auto &enum_def = *value.type.enum_def;
auto enum_val = enum_def.FindByValue(value.constant);
- return enum_val ? (WrapInNameSpace(enum_def) + "." + enum_val->name)
+ return enum_val ? (NamespacedName(enum_def) + "." + Name(*enum_val))
: value.constant;
}
@@ -252,7 +370,7 @@ class CSharpGenerator : public BaseGenerator {
switch (value.type.base_type) {
case BASE_TYPE_STRING: return "default(StringOffset)";
case BASE_TYPE_STRUCT:
- return "default(Offset<" + WrapInNameSpace(*value.type.struct_def) +
+ return "default(Offset<" + NamespacedName(*value.type.struct_def) +
">)";
case BASE_TYPE_VECTOR: return "default(VectorOffset)";
default: break;
@@ -293,14 +411,14 @@ class CSharpGenerator : public BaseGenerator {
} else {
code += "public ";
}
- code += "enum " + enum_def.name;
+ code += "enum " + Name(enum_def);
code += " : " + GenTypeBasic(enum_def.underlying_type, false);
code += "\n{\n";
for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end(); ++it) {
auto &ev = **it;
GenComment(ev.doc_comment, code_ptr, &comment_config, " ");
code += " ";
- code += ev.name + " = ";
+ code += Name(ev) + " = ";
code += enum_def.ToString(ev);
code += ",\n";
}
@@ -395,7 +513,8 @@ class CSharpGenerator : public BaseGenerator {
// don't clash, and to make it obvious these arguments are constructing
// a nested struct, prefix the name with the field name.
GenStructArgs(*field_type.struct_def, code_ptr,
- (nameprefix + (field.name + "_")).c_str(), array_cnt);
+ (nameprefix + (EscapeKeyword(field.name) + "_")).c_str(),
+ array_cnt);
} else {
code += ", ";
code += GenTypeBasic(type);
@@ -407,7 +526,7 @@ class CSharpGenerator : public BaseGenerator {
}
code += " ";
code += nameprefix;
- code += MakeCamel(field.name, true);
+ code += Name(field);
}
}
}
@@ -454,7 +573,7 @@ class CSharpGenerator : public BaseGenerator {
code += indent + " builder.Put";
code += GenMethod(type) + "(";
code += SourceCast(type);
- auto argname = nameprefix + MakeCamel(field.name, true);
+ auto argname = nameprefix + Name(field);
code += argname;
size_t array_cnt = index + (IsArray(field_type) ? 1 : 0);
if (array_cnt > 0) {
@@ -618,7 +737,8 @@ class CSharpGenerator : public BaseGenerator {
std::string dest_mask = "";
std::string dest_cast = DestinationCast(field.value.type);
std::string src_cast = SourceCast(field.value.type);
- std::string field_name_camel = MakeCamel(field.name, true);
+ std::string field_name_camel = Name(field);
+ if (field_name_camel == struct_def.name) { field_name_camel += "_"; }
std::string method_start =
" public " + type_name_dest + optional + " " + field_name_camel;
std::string obj = "(new " + type_name + "())";
@@ -745,8 +865,7 @@ class CSharpGenerator : public BaseGenerator {
HasUnionStringValue(*vectortype.enum_def)) {
code += member_suffix;
code += "}\n";
- code += " public string " + MakeCamel(field.name, true) +
- "AsString(int j)";
+ code += " public string " + Name(field) + "AsString(int j)";
code += offset_prefix + GenGetter(Type(BASE_TYPE_STRING));
code += "(" + index + ") : null";
}
@@ -759,8 +878,7 @@ class CSharpGenerator : public BaseGenerator {
if (HasUnionStringValue(*field.value.type.enum_def)) {
code += member_suffix;
code += "}\n";
- code += " public string " + MakeCamel(field.name, true) +
- "AsString()";
+ code += " public string " + Name(field) + "AsString()";
code += offset_prefix + GenGetter(Type(BASE_TYPE_STRING));
code += "(o + __p.bb_pos) : null";
}
@@ -795,7 +913,7 @@ class CSharpGenerator : public BaseGenerator {
code += member_suffix;
code += "}\n";
if (IsVector(field.value.type)) {
- code += " public int " + MakeCamel(field.name, true);
+ code += " public int " + Name(field);
code += "Length";
code += " { get";
code += offset_prefix;
@@ -810,9 +928,9 @@ class CSharpGenerator : public BaseGenerator {
for (auto kit = fields.begin(); kit != fields.end(); ++kit) {
auto &key_field = **kit;
if (key_field.key) {
- auto qualified_name = WrapInNameSpace(sd);
+ auto qualified_name = NamespacedName(sd);
code += " public " + qualified_name + "? ";
- code += MakeCamel(field.name, true) + "ByKey(";
+ code += Name(field) + "ByKey(";
code += GenTypeGet(key_field.value.type) + " key)";
code += offset_prefix;
code += qualified_name + ".__lookup_by_key(";
@@ -831,7 +949,7 @@ class CSharpGenerator : public BaseGenerator {
code += "#if ENABLE_SPAN_T\n";
code += " public Span<" + GenTypeBasic(field.value.type.VectorType()) +
"> Get";
- code += MakeCamel(field.name, true);
+ code += Name(field);
code += "Bytes() { return ";
code += "__p.__vector_as_span<" +
GenTypeBasic(field.value.type.VectorType()) + ">(";
@@ -841,7 +959,7 @@ class CSharpGenerator : public BaseGenerator {
code += "); }\n";
code += "#else\n";
code += " public ArraySegment<byte>? Get";
- code += MakeCamel(field.name, true);
+ code += Name(field);
code += "Bytes() { return ";
code += "__p.__vector_as_arraysegment(";
code += NumToString(field.value.offset);
@@ -852,7 +970,7 @@ class CSharpGenerator : public BaseGenerator {
code += " public ";
code += GenTypeBasic(field.value.type.VectorType());
code += "[] Get";
- code += MakeCamel(field.name, true);
+ code += Name(field);
code += "Array() { ";
if (IsEnum(field.value.type.VectorType())) {
// Since __vector_as_array does not work for enum types,
@@ -881,9 +999,9 @@ class CSharpGenerator : public BaseGenerator {
}
// generate object accessors if is nested_flatbuffer
if (field.nested_flatbuffer) {
- auto nested_type_name = WrapInNameSpace(*field.nested_flatbuffer);
+ auto nested_type_name = NamespacedName(*field.nested_flatbuffer);
auto nested_method_name =
- MakeCamel(field.name, true) + "As" + field.nested_flatbuffer->name;
+ Name(field) + "As" + field.nested_flatbuffer->name;
auto get_nested_method_name = nested_method_name;
get_nested_method_name = "Get" + nested_method_name;
conditional_cast = "(" + nested_type_name + "?)";
@@ -904,15 +1022,16 @@ class CSharpGenerator : public BaseGenerator {
is_series ? field.value.type.VectorType() : field.value.type;
// Boolean parameters have to be explicitly converted to byte
// representation.
- auto setter_parameter = underlying_type.base_type == BASE_TYPE_BOOL
- ? "(byte)(" + field.name + " ? 1 : 0)"
- : field.name;
+ auto setter_parameter =
+ underlying_type.base_type == BASE_TYPE_BOOL
+ ? "(byte)(" + EscapeKeyword(field.name) + " ? 1 : 0)"
+ : EscapeKeyword(field.name);
auto mutator_prefix = MakeCamel("mutate", true);
// A vector mutator also needs the index of the vector element it should
// mutate.
auto mutator_params = (is_series ? "(int j, " : "(") +
- GenTypeGet(underlying_type) + " " + field.name +
- ") { ";
+ GenTypeGet(underlying_type) + " " +
+ EscapeKeyword(field.name) + ") { ";
auto setter_index =
is_series
? "__p." +
@@ -926,7 +1045,7 @@ class CSharpGenerator : public BaseGenerator {
if (IsScalar(underlying_type.base_type) && !IsUnion(field.value.type)) {
code += " public ";
code += struct_def.fixed ? "void " : "bool ";
- code += mutator_prefix + MakeCamel(field.name, true);
+ code += mutator_prefix + Name(field);
code += mutator_params;
if (struct_def.fixed) {
code += GenSetter(underlying_type) + "(" + setter_index + ", ";
@@ -1002,13 +1121,13 @@ class CSharpGenerator : public BaseGenerator {
field.value.type.struct_def->defined_namespace,
GenTypeName_ObjectAPI(field.value.type.struct_def->name, opts));
code += " ";
- code += field.name;
+ code += EscapeKeyword(field.name);
code += " = null";
} else {
code += GenTypeBasic(field.value.type);
if (field.IsScalarOptional()) { code += "?"; }
code += " ";
- code += field.name;
+ code += EscapeKeyword(field.name);
if (!IsScalar(field.value.type.base_type)) code += "Offset";
code += " = ";
@@ -1028,13 +1147,13 @@ class CSharpGenerator : public BaseGenerator {
size == SizeOf(field.value.type.base_type))) {
code += " " + struct_def.name + ".";
code += "Add";
- code += MakeCamel(field.name) + "(builder, ";
+ code += Name(field) + "(builder, ";
if (IsStruct(field.value.type) &&
opts.generate_object_based_api) {
code += GenTypePointer(field.value.type) + ".Pack(builder, " +
- field.name + ")";
+ EscapeKeyword(field.name) + ")";
} else {
- code += field.name;
+ code += EscapeKeyword(field.name);
if (!IsScalar(field.value.type.base_type)) code += "Offset";
}
@@ -1062,17 +1181,17 @@ class CSharpGenerator : public BaseGenerator {
if (field.deprecated) continue;
if (field.key) key_field = &field;
code += " public static void Add";
- code += MakeCamel(field.name);
+ code += Name(field);
code += "(FlatBufferBuilder builder, ";
code += GenTypeBasic(field.value.type);
auto argname = MakeCamel(field.name, false);
if (!IsScalar(field.value.type.base_type)) argname += "Offset";
if (field.IsScalarOptional()) { code += "?"; }
- code += " " + argname + ") { builder.Add";
+ code += " " + EscapeKeyword(argname) + ") { builder.Add";
code += GenMethod(field.value.type) + "(";
code += NumToString(it - struct_def.fields.vec.begin()) + ", ";
- code += SourceCastBasic(field.value.type);
- code += argname;
+ code += SourceCastBasic(field.value.type, field.IsScalarOptional());
+ code += EscapeKeyword(argname);
if (!IsScalar(field.value.type.base_type) &&
field.value.type.base_type != BASE_TYPE_UNION) {
code += ".Value";
@@ -1093,7 +1212,7 @@ class CSharpGenerator : public BaseGenerator {
field_has_create_set.insert(&field);
code += " public static VectorOffset ";
code += "Create";
- code += MakeCamel(field.name);
+ code += Name(field);
code += "Vector(FlatBufferBuilder builder, ";
code += GenTypeBasic(vector_type) + "[] data) ";
code += "{ builder.StartVector(";
@@ -1105,7 +1224,11 @@ class CSharpGenerator : public BaseGenerator {
code += "Add";
code += GenMethod(vector_type);
code += "(";
- code += SourceCastBasic(vector_type);
+ // At the moment there is no support of the type Vector with
+ // optional enum, e.g. if we have enum type SomeEnum there is no way
+ // to define `SomeEmum?[] enums` in FlatBuffer schema, so isOptional
+ // = false
+ code += SourceCastBasic(vector_type, false);
code += "data[i]";
if (vector_type.base_type == BASE_TYPE_STRUCT ||
IsString(vector_type))
@@ -1115,7 +1238,7 @@ class CSharpGenerator : public BaseGenerator {
code += " public static VectorOffset ";
code += "Create";
- code += MakeCamel(field.name);
+ code += Name(field);
code += "VectorBlock(FlatBufferBuilder builder, ";
code += GenTypeBasic(vector_type) + "[] data) ";
code += "{ builder.StartVector(";
@@ -1127,7 +1250,7 @@ class CSharpGenerator : public BaseGenerator {
// Generate a method to start a vector, data to be added manually
// after.
code += " public static void Start";
- code += MakeCamel(field.name);
+ code += Name(field);
code += "Vector(FlatBufferBuilder builder, int numElems) ";
code += "{ builder.StartVector(";
code += NumToString(elem_size);
@@ -1215,7 +1338,7 @@ class CSharpGenerator : public BaseGenerator {
GenPackUnPack_ObjectAPI(struct_def, code_ptr, opts, struct_has_create,
field_has_create_set);
}
- code += "};\n\n";
+ code += "}\n\n";
if (opts.generate_object_based_api) {
GenStruct_ObjectAPI(struct_def, code_ptr, opts);
@@ -1305,19 +1428,27 @@ class CSharpGenerator : public BaseGenerator {
code += " }\n\n";
// As<T>
code += " public T As<T>() where T : class { return this.Value as T; }\n";
- // As
+ // As, From
for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end(); ++it) {
auto &ev = **it;
if (ev.union_type.base_type == BASE_TYPE_NONE) continue;
auto type_name = GenTypeGet_ObjectAPI(ev.union_type, opts);
- if (ev.union_type.base_type == BASE_TYPE_STRUCT &&
- ev.union_type.struct_def->attributes.Lookup("private")) {
- code += " internal ";
- } else {
- code += " public ";
- }
- code += type_name + " As" + ev.name + "() { return this.As<" + type_name +
- ">(); }\n";
+ std::string accessibility =
+ (ev.union_type.base_type == BASE_TYPE_STRUCT &&
+ ev.union_type.struct_def->attributes.Lookup("private"))
+ ? "internal"
+ : "public";
+ // As
+ code += " " + accessibility + " " + type_name + " As" + ev.name +
+ "() { return this.As<" + type_name + ">(); }\n";
+ // From
+ auto lower_ev_name = ev.name;
+ std::transform(lower_ev_name.begin(), lower_ev_name.end(),
+ lower_ev_name.begin(), CharToLower);
+ code += " " + accessibility + " static " + union_name + " From" +
+ ev.name + "(" + type_name + " _" + lower_ev_name +
+ ") { return new " + union_name + "{ Type = " + Name(enum_def) +
+ "." + Name(ev) + ", Value = _" + lower_ev_name + " }; }\n";
}
code += "\n";
// Pack()
@@ -1329,7 +1460,7 @@ class CSharpGenerator : public BaseGenerator {
if (ev.union_type.base_type == BASE_TYPE_NONE) {
code += " default: return 0;\n";
} else {
- code += " case " + enum_def.name + "." + ev.name + ": return ";
+ code += " case " + Name(enum_def) + "." + Name(ev) + ": return ";
if (IsString(ev.union_type)) {
code += "builder.CreateString(_o.As" + ev.name + "()).Value;\n";
} else {
@@ -1414,7 +1545,7 @@ class CSharpGenerator : public BaseGenerator {
code += " default: break;\n";
} else {
auto type_name = GenTypeGet_ObjectAPI(ev.union_type, opts);
- code += " case " + enum_def.name + "." + ev.name +
+ code += " case " + Name(enum_def) + "." + Name(ev) +
": _o.Value = serializer.Deserialize<" + type_name +
">(reader); break;\n";
}
@@ -1450,7 +1581,7 @@ class CSharpGenerator : public BaseGenerator {
} else {
code += indent + varialbe_name + " = new ";
}
- code += WrapInNameSpace(enum_def) + "Union();\n";
+ code += NamespacedName(enum_def) + "Union();\n";
code += indent + varialbe_name + ".Type = this." + camel_name + "Type" +
type_suffix + ";\n";
code +=
@@ -1461,7 +1592,7 @@ class CSharpGenerator : public BaseGenerator {
if (ev.union_type.base_type == BASE_TYPE_NONE) {
code += indent + " default: break;\n";
} else {
- code += indent + " case " + WrapInNameSpace(enum_def) + "." + ev.name +
+ code += indent + " case " + NamespacedName(enum_def) + "." + ev.name +
":\n";
code += indent + " " + varialbe_name + ".Value = this." + camel_name;
if (IsString(ev.union_type)) {
@@ -1499,7 +1630,7 @@ class CSharpGenerator : public BaseGenerator {
it != struct_def.fields.vec.end(); ++it) {
auto &field = **it;
if (field.deprecated) continue;
- auto camel_name = MakeCamel(field.name);
+ auto camel_name = Name(field);
auto start = " _o." + camel_name + " = ";
switch (field.value.type.base_type) {
case BASE_TYPE_STRUCT: {
@@ -1515,11 +1646,10 @@ class CSharpGenerator : public BaseGenerator {
case BASE_TYPE_ARRAY: {
auto type_name = GenTypeGet_ObjectAPI(field.value.type, opts);
auto length_str = NumToString(field.value.type.fixed_length);
- auto unpack_method = field.value.type.struct_def == nullptr
- ? ""
- : field.value.type.struct_def->fixed
- ? ".UnPack()"
- : "?.UnPack()";
+ auto unpack_method = field.value.type.struct_def == nullptr ? ""
+ : field.value.type.struct_def->fixed
+ ? ".UnPack()"
+ : "?.UnPack()";
code += start + "new " + type_name.substr(0, type_name.length() - 1) +
length_str + "];\n";
code += " for (var _j = 0; _j < " + length_str + "; ++_j) { _o." +
@@ -1574,7 +1704,7 @@ class CSharpGenerator : public BaseGenerator {
it != struct_def.fields.vec.end(); ++it) {
auto &field = **it;
if (field.deprecated) continue;
- auto camel_name = MakeCamel(field.name);
+ auto camel_name = Name(field);
// pre
switch (field.value.type.base_type) {
case BASE_TYPE_STRUCT: {
@@ -1628,12 +1758,12 @@ class CSharpGenerator : public BaseGenerator {
break;
case BASE_TYPE_UTYPE:
property_name = camel_name.substr(0, camel_name.size() - 4);
- array_type = WrapInNameSpace(*field.value.type.enum_def);
+ array_type = NamespacedName(*field.value.type.enum_def);
to_array = "_o." + property_name + "[_j].Type";
break;
case BASE_TYPE_UNION:
array_type = "int";
- to_array = WrapInNameSpace(*field.value.type.enum_def) +
+ to_array = NamespacedName(*field.value.type.enum_def) +
"Union.Pack(builder, _o." + property_name + "[_j])";
break;
default: gen_for_loop = false; break;
@@ -1688,7 +1818,7 @@ class CSharpGenerator : public BaseGenerator {
}
case BASE_TYPE_UNION: {
code += " var _" + field.name + "_type = _o." + camel_name +
- " == null ? " + WrapInNameSpace(*field.value.type.enum_def) +
+ " == null ? " + NamespacedName(*field.value.type.enum_def) +
".NONE : " + "_o." + camel_name + ".Type;\n";
code +=
" var _" + field.name + " = _o." + camel_name +
@@ -1707,7 +1837,7 @@ class CSharpGenerator : public BaseGenerator {
it != struct_def.fields.vec.end(); ++it) {
auto &field = **it;
if (field.deprecated) continue;
- auto camel_name = MakeCamel(field.name);
+ auto camel_name = Name(field);
switch (field.value.type.base_type) {
case BASE_TYPE_STRUCT: {
if (struct_def.fixed) {
@@ -1761,7 +1891,7 @@ class CSharpGenerator : public BaseGenerator {
it != struct_def.fields.vec.end(); ++it) {
auto &field = **it;
if (field.deprecated) continue;
- auto camel_name = MakeCamel(field.name);
+ auto camel_name = Name(field);
switch (field.value.type.base_type) {
case BASE_TYPE_STRUCT: {
if (field.value.type.struct_def->fixed) {
@@ -1857,7 +1987,7 @@ class CSharpGenerator : public BaseGenerator {
}
code += "] = _o";
for (size_t i = 0, j = 0; i < array_lengths.size(); ++i) {
- code += "." + MakeCamel(array_lengths[i].name);
+ code += "." + MakeCamel(array_lengths[i].name, true);
if (array_lengths[i].length <= 0) continue;
code += "[idx" + NumToString(j++) + "]";
}
@@ -1868,7 +1998,7 @@ class CSharpGenerator : public BaseGenerator {
} else {
code += "_o";
for (size_t i = 0; i < array_lengths.size(); ++i) {
- code += "." + MakeCamel(array_lengths[i].name);
+ code += "." + MakeCamel(array_lengths[i].name, true);
}
code += ";";
}
@@ -1911,13 +2041,13 @@ class CSharpGenerator : public BaseGenerator {
type_name.replace(type_name.length() - type_name_length,
type_name_length, new_type_name);
} else if (type.element == BASE_TYPE_UNION) {
- type_name = WrapInNameSpace(*type.enum_def) + "Union";
+ type_name = NamespacedName(*type.enum_def) + "Union";
}
break;
}
case BASE_TYPE_UNION: {
- type_name = WrapInNameSpace(*type.enum_def) + "Union";
+ type_name = NamespacedName(*type.enum_def) + "Union";
break;
}
default: break;
@@ -1961,10 +2091,10 @@ class CSharpGenerator : public BaseGenerator {
if (field.value.type.element == BASE_TYPE_UTYPE) continue;
auto type_name = GenTypeGet_ObjectAPI(field.value.type, opts);
if (field.IsScalarOptional()) type_name += "?";
- auto camel_name = MakeCamel(field.name, true);
+ auto camel_name = Name(field);
if (opts.cs_gen_json_serializer) {
if (IsUnion(field.value.type)) {
- auto utype_name = WrapInNameSpace(*field.value.type.enum_def);
+ auto utype_name = NamespacedName(*field.value.type.enum_def);
code +=
" [Newtonsoft.Json.JsonProperty(\"" + field.name + "_type\")]\n";
if (IsVector(field.value.type)) {
@@ -2027,7 +2157,7 @@ class CSharpGenerator : public BaseGenerator {
if (field.deprecated) continue;
if (field.value.type.base_type == BASE_TYPE_UTYPE) continue;
if (field.value.type.element == BASE_TYPE_UTYPE) continue;
- code += " this." + MakeCamel(field.name) + " = ";
+ code += " this." + Name(field) + " = ";
auto type_name = GenTypeGet_ObjectAPI(field.value.type, opts);
if (IsScalar(field.value.type.base_type)) {
code += GenDefaultValue(field) + ";\n";
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_dart.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_dart.cpp
index 7fefd87baaa..bf820976379 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_dart.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_dart.cpp
@@ -233,8 +233,7 @@ class DartGenerator : public BaseGenerator {
code += " factory " + name + ".fromValue(int value) {\n";
code += " final result = values[value];\n";
code += " if (result == null) {\n";
- code +=
- " throw new StateError('Invalid value $value for bit flag enum ";
+ code += " throw StateError('Invalid value $value for bit flag enum ";
code += name + "');\n";
code += " }\n";
@@ -265,8 +264,8 @@ class DartGenerator : public BaseGenerator {
if (it != enum_def.Vals().begin()) { code += '\n'; }
GenDocComment(ev.doc_comment, &code, "", " ");
}
- code += " static const " + name + " " + ev.name + " = ";
- code += "const " + name + "._(" + enum_def.ToString(ev) + ");\n";
+ code += " static const " + name + " " + ev.name + " = " + name + "._(" +
+ enum_def.ToString(ev) + ");\n";
}
code += " static const Map<int, " + name + "> values = {\n";
@@ -277,8 +276,8 @@ class DartGenerator : public BaseGenerator {
}
code += "};\n\n";
- code += " static const " + _kFb + ".Reader<" + name +
- "> reader = const _" + name + "Reader();\n\n";
+ code += " static const " + _kFb + ".Reader<" + name + "> reader = _" +
+ name + "Reader();\n\n";
code += " @override\n";
code += " String toString() {\n";
code += " return '" + name + "{value: $value}';\n";
@@ -301,7 +300,7 @@ class DartGenerator : public BaseGenerator {
code += " @override\n";
code +=
" " + name + " read(" + _kFb + ".BufferContext bc, int offset) =>\n";
- code += " new " + name + ".fromValue(const " + _kFb + "." +
+ code += " " + name + ".fromValue(const " + _kFb + "." +
GenType(enum_def.underlying_type) + "Reader().read(bc, offset));\n";
code += "}\n\n";
}
@@ -330,23 +329,33 @@ class DartGenerator : public BaseGenerator {
std::string GenReaderTypeName(const Type &type, Namespace *current_namespace,
const FieldDef &def,
- bool parent_is_vector = false) {
+ bool parent_is_vector = false, bool lazy = true,
+ bool constConstruct = true) {
+ std::string prefix = (constConstruct ? "const " : "") + _kFb;
if (type.base_type == BASE_TYPE_BOOL) {
- return "const " + _kFb + ".BoolReader()";
+ return prefix + ".BoolReader()";
} else if (IsVector(type)) {
- return "const " + _kFb + ".ListReader<" +
+ if (!type.VectorType().enum_def) {
+ if (type.VectorType().base_type == BASE_TYPE_CHAR) {
+ return prefix + ".Int8ListReader(" + (lazy ? ")" : "lazy: false)");
+ }
+ if (type.VectorType().base_type == BASE_TYPE_UCHAR) {
+ return prefix + ".Uint8ListReader(" + (lazy ? ")" : "lazy: false)");
+ }
+ }
+ return prefix + ".ListReader<" +
GenDartTypeName(type.VectorType(), current_namespace, def) + ">(" +
- GenReaderTypeName(type.VectorType(), current_namespace, def,
- true) +
- ")";
+ GenReaderTypeName(type.VectorType(), current_namespace, def, true,
+ true, false) +
+ (lazy ? ")" : ", lazy: false)");
} else if (IsString(type)) {
- return "const " + _kFb + ".StringReader()";
+ return prefix + ".StringReader()";
}
if (IsScalar(type.base_type)) {
if (type.enum_def && parent_is_vector) {
return GenDartTypeName(type, current_namespace, def) + ".reader";
}
- return "const " + _kFb + "." + GenType(type) + "Reader()";
+ return prefix + "." + GenType(type) + "Reader()";
} else {
return GenDartTypeName(type, current_namespace, def) + ".reader";
}
@@ -390,6 +399,15 @@ class DartGenerator : public BaseGenerator {
}
}
+ std::string GenDartTypeName(const Type &type, Namespace *current_namespace,
+ const FieldDef &def, bool nullable,
+ std::string struct_type_suffix) {
+ std::string typeName =
+ GenDartTypeName(type, current_namespace, def, struct_type_suffix);
+ if (nullable && typeName != "dynamic") typeName += "?";
+ return typeName;
+ }
+
static const std::string MaybeWrapNamespace(const std::string &type_name,
Namespace *current_ns,
const FieldDef &field) {
@@ -435,15 +453,15 @@ class DartGenerator : public BaseGenerator {
code += " " + object_name + "._(this._bc, this._bcOffset);\n";
if (!struct_def.fixed) {
code += " factory " + object_name + "(List<int> bytes) {\n";
- code += " " + _kFb + ".BufferContext rootRef = new " + _kFb +
- ".BufferContext.fromBytes(bytes);\n";
+ code +=
+ " final rootRef = " + _kFb + ".BufferContext.fromBytes(bytes);\n";
code += " return reader.read(rootRef, 0);\n";
code += " }\n";
}
code += "\n";
code += " static const " + _kFb + ".Reader<" + object_name +
- "> reader = const " + reader_name + "();\n\n";
+ "> reader = " + reader_name + "();\n\n";
code += " final " + _kFb + ".BufferContext _bc;\n";
code += " final int _bcOffset;\n\n";
@@ -495,7 +513,7 @@ class DartGenerator : public BaseGenerator {
GenDocComment(struct_def.doc_comment, &code, "");
std::string class_name = struct_def.name + "T";
- code += "class " + class_name + " {\n";
+ code += "class " + class_name + " implements " + _kFb + ".Packable {\n";
std::string constructor_args;
for (auto it = non_deprecated_fields.begin();
@@ -503,14 +521,13 @@ class DartGenerator : public BaseGenerator {
const FieldDef &field = *it->second;
std::string field_name = MakeCamel(field.name, false);
- std::string type_name = GenDartTypeName(
- field.value.type, struct_def.defined_namespace, field, "T");
-
std::string defaultValue = getDefaultValue(field.value);
- bool isNullable = defaultValue.empty() && !struct_def.fixed;
+ std::string type_name =
+ GenDartTypeName(field.value.type, struct_def.defined_namespace, field,
+ defaultValue.empty() && !struct_def.fixed, "T");
GenDocComment(field.doc_comment, &code, "", " ");
- code += " " + type_name + (isNullable ? "? " : " ") + field_name + ";\n";
+ code += " " + type_name + " " + field_name + ";\n";
if (!constructor_args.empty()) constructor_args += ",\n";
constructor_args += " ";
@@ -556,18 +573,32 @@ class DartGenerator : public BaseGenerator {
std::string field_name = MakeCamel(field.name, false);
if (!constructor_args.empty()) constructor_args += ",\n";
- constructor_args += " " + field_name + ": " + field_name;
+ constructor_args += " " + field_name + ": ";
const Type &type = field.value.type;
- bool isNullable =
- getDefaultValue(field.value).empty() && !struct_def.fixed;
+ std::string defaultValue = getDefaultValue(field.value);
+ bool isNullable = defaultValue.empty() && !struct_def.fixed;
std::string nullableValueAccessOperator = isNullable ? "?" : "";
if (type.base_type == BASE_TYPE_STRUCT) {
- constructor_args += nullableValueAccessOperator + ".unpack()";
- } else if (type.base_type == BASE_TYPE_VECTOR &&
- type.VectorType().base_type == BASE_TYPE_STRUCT) {
constructor_args +=
- nullableValueAccessOperator + ".map((e) => e.unpack()).toList()";
+ field_name + nullableValueAccessOperator + ".unpack()";
+ } else if (type.base_type == BASE_TYPE_VECTOR) {
+ if (type.VectorType().base_type == BASE_TYPE_STRUCT) {
+ constructor_args += field_name + nullableValueAccessOperator +
+ ".map((e) => e.unpack()).toList()";
+ } else {
+ constructor_args +=
+ GenReaderTypeName(field.value.type, struct_def.defined_namespace,
+ field, false, false);
+ constructor_args += ".vTableGet";
+ std::string offset = NumToString(field.value.offset);
+ constructor_args +=
+ isNullable
+ ? "Nullable(_bc, _bcOffset, " + offset + ")"
+ : "(_bc, _bcOffset, " + offset + ", " + defaultValue + ")";
+ }
+ } else {
+ constructor_args += field_name;
}
}
@@ -584,6 +615,7 @@ class DartGenerator : public BaseGenerator {
const std::vector<std::pair<int, FieldDef *>> &non_deprecated_fields) {
std::string code;
+ code += " @override\n";
code += " int pack(fb.Builder fbBuilder) {\n";
code += GenObjectBuilderImplementation(struct_def, non_deprecated_fields,
false, true);
@@ -633,15 +665,15 @@ class DartGenerator : public BaseGenerator {
auto &field = *pair.second;
std::string field_name = MakeCamel(field.name, false);
- std::string type_name = GenDartTypeName(
- field.value.type, struct_def.defined_namespace, field);
-
- GenDocComment(field.doc_comment, &code, "", " ");
std::string defaultValue = getDefaultValue(field.value);
bool isNullable = defaultValue.empty() && !struct_def.fixed;
+ std::string type_name =
+ GenDartTypeName(field.value.type, struct_def.defined_namespace, field,
+ isNullable, "");
+
+ GenDocComment(field.doc_comment, &code, "", " ");
- code += " " + type_name + (isNullable ? "?" : "");
- code += " get " + field_name;
+ code += " " + type_name + " get " + field_name;
if (field.value.type.base_type == BASE_TYPE_UNION) {
code += " {\n";
code += " switch (" + field_name + "Type?.value) {\n";
@@ -758,7 +790,7 @@ class DartGenerator : public BaseGenerator {
}
code += " @override\n";
code += " " + impl_name +
- " createObject(fb.BufferContext bc, int offset) => \n new " +
+ " createObject(fb.BufferContext bc, int offset) => \n " +
impl_name + "._(bc, offset);\n";
code += "}\n\n";
}
@@ -771,7 +803,7 @@ class DartGenerator : public BaseGenerator {
auto &builder_name = *builder_name_ptr;
code += "class " + builder_name + " {\n";
- code += " " + builder_name + "(this.fbBuilder) {}\n\n";
+ code += " " + builder_name + "(this.fbBuilder);\n\n";
code += " final " + _kFb + ".Builder fbBuilder;\n\n";
if (struct_def.fixed) {
@@ -835,7 +867,8 @@ class DartGenerator : public BaseGenerator {
auto &code = *code_ptr;
code += " void begin() {\n";
- code += " fbBuilder.startTable();\n";
+ code += " fbBuilder.startTable(" +
+ NumToString(non_deprecated_fields.size()) + ");\n";
code += " }\n\n";
for (auto it = non_deprecated_fields.begin();
@@ -888,9 +921,8 @@ class DartGenerator : public BaseGenerator {
code += " final " +
GenDartTypeName(field.value.type, struct_def.defined_namespace,
- field, "ObjectBuilder") +
- (struct_def.fixed ? "" : "?") + " _" +
- MakeCamel(field.name, false) + ";\n";
+ field, !struct_def.fixed, "ObjectBuilder") +
+ " _" + MakeCamel(field.name, false) + ";\n";
}
code += "\n";
code += " " + builder_name + "(";
@@ -905,9 +937,8 @@ class DartGenerator : public BaseGenerator {
code += " ";
code += (struct_def.fixed ? "required " : "") +
GenDartTypeName(field.value.type, struct_def.defined_namespace,
- field, "ObjectBuilder") +
- (struct_def.fixed ? "" : "?") + " " +
- MakeCamel(field.name, false) + ",\n";
+ field, !struct_def.fixed, "ObjectBuilder") +
+ " " + MakeCamel(field.name, false) + ",\n";
}
code += " })\n";
code += " : ";
@@ -937,10 +968,10 @@ class DartGenerator : public BaseGenerator {
code += " /// Convenience method to serialize to byte list.\n";
code += " @override\n";
code += " Uint8List toBytes([String? fileIdentifier]) {\n";
- code += " " + _kFb + ".Builder fbBuilder = new ";
- code += _kFb + ".Builder();\n";
- code += " int offset = finish(fbBuilder);\n";
- code += " return fbBuilder.finish(offset, fileIdentifier);\n";
+ code += " final fbBuilder = " + _kFb +
+ ".Builder(deduplicateTables: false);\n";
+ code += " fbBuilder.finish(finish(fbBuilder), fileIdentifier);\n";
+ code += " return fbBuilder.buffer;\n";
code += " }\n";
code += "}\n";
}
@@ -965,10 +996,10 @@ class DartGenerator : public BaseGenerator {
if (pack && IsVector(field.value.type) &&
field.value.type.VectorType().base_type == BASE_TYPE_STRUCT &&
field.value.type.struct_def->fixed) {
- code += " int? " + offset_name + " = null;\n";
- code += " if (" + field_name + "?.isNotEmpty == true) {\n";
+ code += " int? " + offset_name + ";\n";
+ code += " if (" + field_name + " != null) {\n";
code +=
- " " + field_name + "!.forEach((e) => e.pack(fbBuilder));\n";
+ " for (var e in " + field_name + "!) { e.pack(fbBuilder); }\n";
code += " " + MakeCamel(field.name, false) +
"Offset = fbBuilder.endStructVector(" + field_name +
"!.length);\n";
@@ -978,20 +1009,20 @@ class DartGenerator : public BaseGenerator {
code += " final int? " + offset_name;
if (IsVector(field.value.type)) {
- code += " = " + field_name + "?.isNotEmpty == true\n";
- code += " ? fbBuilder.writeList";
+ code += " = " + field_name + " == null ? null\n";
+ code += " : fbBuilder.writeList";
switch (field.value.type.VectorType().base_type) {
case BASE_TYPE_STRING:
- code += "(" + field_name +
- "!.map((b) => fbBuilder.writeString(b)!).toList())";
+ code +=
+ "(" + field_name + "!.map(fbBuilder.writeString).toList());\n";
break;
case BASE_TYPE_STRUCT:
if (field.value.type.struct_def->fixed) {
- code += "OfStructs(" + field_name + "!)";
+ code += "OfStructs(" + field_name + "!);\n";
} else {
code += "(" + field_name + "!.map((b) => b." +
(pack ? "pack" : "getOrCreateOffset") +
- "(fbBuilder)).toList())";
+ "(fbBuilder)).toList());\n";
}
break;
default:
@@ -1000,11 +1031,11 @@ class DartGenerator : public BaseGenerator {
if (field.value.type.enum_def) {
code += ".map((f) => f.value).toList()";
}
- code += ")";
+ code += ");\n";
}
- code += "\n : null;\n";
} else if (IsString(field.value.type)) {
- code += " = fbBuilder.writeString(" + field_name + ");\n";
+ code += " = " + field_name + " == null ? null\n";
+ code += " : fbBuilder.writeString(" + field_name + "!);\n";
} else {
code += " = " + field_name + "?." +
(pack ? "pack" : "getOrCreateOffset") + "(fbBuilder);\n";
@@ -1056,7 +1087,8 @@ class DartGenerator : public BaseGenerator {
const std::vector<std::pair<int, FieldDef *>> &non_deprecated_fields,
bool prependUnderscore = true, bool pack = false) {
std::string code;
- code += " fbBuilder.startTable();\n";
+ code += " fbBuilder.startTable(" +
+ NumToString(non_deprecated_fields.size()) + ");\n";
for (auto it = non_deprecated_fields.begin();
it != non_deprecated_fields.end(); ++it) {
@@ -1099,8 +1131,6 @@ bool GenerateDart(const Parser &parser, const std::string &path,
std::string DartMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name) {
- assert(parser.opts.lang <= IDLOptions::kMAX);
-
auto filebase =
flatbuffers::StripPath(flatbuffers::StripExtension(file_name));
dart::DartGenerator generator(parser, path, file_name);
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_grpc.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_grpc.cpp
index 9aea745d4e8..8cb8ef79822 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_grpc.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_grpc.cpp
@@ -191,7 +191,7 @@ class FlatBufPrinter : public grpc_generator::Printer {
}
}
- void SetIndentationSize(const int size) {
+ void SetIndentationSize(const size_t size) {
FLATBUFFERS_ASSERT(str_->empty());
indentation_size_ = size;
}
@@ -199,15 +199,15 @@ class FlatBufPrinter : public grpc_generator::Printer {
void Indent() { indent_++; }
void Outdent() {
+ FLATBUFFERS_ASSERT(indent_ > 0);
indent_--;
- FLATBUFFERS_ASSERT(indent_ >= 0);
}
private:
std::string *str_;
char escape_char_;
- int indent_;
- int indentation_size_;
+ size_t indent_;
+ size_t indentation_size_;
char indentation_type_;
};
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_java.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_java.cpp
index e333ca0ba1c..01dce4a3b4e 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_java.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_java.cpp
@@ -21,10 +21,6 @@
#include "flatbuffers/idl.h"
#include "flatbuffers/util.h"
-#if defined(FLATBUFFERS_CPP98_STL)
-# include <cctype>
-#endif // defined(FLATBUFFERS_CPP98_STL)
-
namespace flatbuffers {
namespace java {
@@ -355,7 +351,7 @@ class JavaGenerator : public BaseGenerator {
code += ";\n";
}
- // Generate a generate string table for enum values.
+ // Generate a string table for enum values.
// Problem is, if values are very sparse that could generate really big
// tables. Ideally in that case we generate a map lookup instead, but for
// the moment we simply don't output a table at all.
@@ -363,7 +359,9 @@ class JavaGenerator : public BaseGenerator {
// Average distance between values above which we consider a table
// "too sparse". Change at will.
static const uint64_t kMaxSparseness = 5;
- if (range / static_cast<uint64_t>(enum_def.size()) < kMaxSparseness) {
+ if (range / static_cast<uint64_t>(enum_def.size()) < kMaxSparseness &&
+ GenTypeBasic(DestinationType(enum_def.underlying_type, false)) !=
+ "long") {
code += "\n public static final String";
code += "[] names = { ";
auto val = enum_def.Vals().front();
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_json_schema.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_json_schema.cpp
index 2e1cef65af0..9ea37aec17e 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_json_schema.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_json_schema.cpp
@@ -130,7 +130,9 @@ std::string GenType(const Type &type) {
return union_type_string;
}
case BASE_TYPE_UTYPE: return GenTypeRef(type.enum_def);
- default: { return GenBaseType(type); }
+ default: {
+ return GenBaseType(type);
+ }
}
}
@@ -174,9 +176,10 @@ class JsonSchemaGenerator : public BaseGenerator {
// remove leading and trailing spaces from comment line
const auto start = std::find_if(comment_line.begin(), comment_line.end(),
[](char c) { return !isspace(c); });
- const auto end = std::find_if(comment_line.rbegin(), comment_line.rend(),
- [](char c) { return !isspace(c); })
- .base();
+ const auto end =
+ std::find_if(comment_line.rbegin(), comment_line.rend(), [](char c) {
+ return !isspace(c);
+ }).base();
if (start < end) {
comment.append(start, end);
} else {
@@ -198,7 +201,10 @@ class JsonSchemaGenerator : public BaseGenerator {
bool generate() {
code_ = "";
- if (parser_.root_struct_def_ == nullptr) { return false; }
+ if (parser_.root_struct_def_ == nullptr) {
+ std::cerr << "Error: Binary schema not generated, no root struct found\n";
+ return false;
+ }
code_ += "{" + NewLine();
code_ += Indent(1) +
"\"$schema\": \"https://json-schema.org/draft/2019-09/schema\"," +
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_kotlin.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_kotlin.cpp
index 4c76f1f991a..f2f135b89a2 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_kotlin.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_kotlin.cpp
@@ -23,9 +23,6 @@
#include "flatbuffers/flatbuffers.h"
#include "flatbuffers/idl.h"
#include "flatbuffers/util.h"
-#if defined(FLATBUFFERS_CPP98_STL)
-# include <cctype>
-#endif // defined(FLATBUFFERS_CPP98_STL)
namespace flatbuffers {
@@ -303,15 +300,15 @@ class KotlinGenerator : public BaseGenerator {
}
writer += ")";
});
- GenerateFunOneLine(writer, "name", "e: Int", "String",
- [&]() {
- writer += "names[e\\";
- if (enum_def.MinValue()->IsNonZero())
- writer += " - " + enum_def.MinValue()->name +
- ".toInt()\\";
- writer += "]";
- },
- parser_.opts.gen_jvmstatic);
+ GenerateFunOneLine(
+ writer, "name", "e: Int", "String",
+ [&]() {
+ writer += "names[e\\";
+ if (enum_def.MinValue()->IsNonZero())
+ writer += " - " + enum_def.MinValue()->name + ".toInt()\\";
+ writer += "]";
+ },
+ parser_.opts.gen_jvmstatic);
}
});
writer.DecrementIdentLevel();
@@ -425,7 +422,8 @@ class KotlinGenerator : public BaseGenerator {
(nameprefix + (field.name + "_")).c_str());
} else {
writer.SetValue("type", GenMethod(field.value.type));
- writer.SetValue("argname", nameprefix + MakeCamel(field.name, false));
+ writer.SetValue("argname",
+ nameprefix + MakeCamel(Esc(field.name), false));
writer.SetValue("cast", CastToSigned(field.value.type));
writer += "builder.put{{type}}({{argname}}{{cast}})";
}
@@ -625,9 +623,10 @@ class KotlinGenerator : public BaseGenerator {
auto id = identifier.length() > 0 ? ", \"" + identifier + "\"" : "";
auto params = "builder: FlatBufferBuilder, offset: Int";
auto method_name = "finish" + Esc(struct_def.name) + "Buffer";
- GenerateFunOneLine(writer, method_name, params, "",
- [&]() { writer += "builder.finish(offset" + id + ")"; },
- options.gen_jvmstatic);
+ GenerateFunOneLine(
+ writer, method_name, params, "",
+ [&]() { writer += "builder.finish(offset" + id + ")"; },
+ options.gen_jvmstatic);
}
void GenerateEndStructMethod(StructDef &struct_def, CodeWriter &writer,
@@ -638,21 +637,21 @@ class KotlinGenerator : public BaseGenerator {
auto returns = "Int";
auto field_vec = struct_def.fields.vec;
- GenerateFun(writer, name, params, returns,
- [&]() {
- writer += "val o = builder.endTable()";
- writer.IncrementIdentLevel();
- for (auto it = field_vec.begin(); it != field_vec.end();
- ++it) {
- auto &field = **it;
- if (field.deprecated || !field.IsRequired()) { continue; }
- writer.SetValue("offset", NumToString(field.value.offset));
- writer += "builder.required(o, {{offset}})";
- }
- writer.DecrementIdentLevel();
- writer += "return o";
- },
- options.gen_jvmstatic);
+ GenerateFun(
+ writer, name, params, returns,
+ [&]() {
+ writer += "val o = builder.endTable()";
+ writer.IncrementIdentLevel();
+ for (auto it = field_vec.begin(); it != field_vec.end(); ++it) {
+ auto &field = **it;
+ if (field.deprecated || !field.IsRequired()) { continue; }
+ writer.SetValue("offset", NumToString(field.value.offset));
+ writer += "builder.required(o, {{offset}})";
+ }
+ writer.DecrementIdentLevel();
+ writer += "return o";
+ },
+ options.gen_jvmstatic);
}
// Generate a method to create a vector from a Kotlin array.
@@ -667,18 +666,18 @@ class KotlinGenerator : public BaseGenerator {
writer.SetValue("root", GenMethod(vector_type));
writer.SetValue("cast", CastToSigned(vector_type));
- GenerateFun(writer, method_name, params, "Int",
- [&]() {
- writer +=
- "builder.startVector({{size}}, data.size, {{align}})";
- writer += "for (i in data.size - 1 downTo 0) {";
- writer.IncrementIdentLevel();
- writer += "builder.add{{root}}(data[i]{{cast}})";
- writer.DecrementIdentLevel();
- writer += "}";
- writer += "return builder.endVector()";
- },
- options.gen_jvmstatic);
+ GenerateFun(
+ writer, method_name, params, "Int",
+ [&]() {
+ writer += "builder.startVector({{size}}, data.size, {{align}})";
+ writer += "for (i in data.size - 1 downTo 0) {";
+ writer.IncrementIdentLevel();
+ writer += "builder.add{{root}}(data[i]{{cast}})";
+ writer.DecrementIdentLevel();
+ writer += "}";
+ writer += "return builder.endVector()";
+ },
+ options.gen_jvmstatic);
}
void GenerateStartVectorField(FieldDef &field, CodeWriter &writer,
@@ -704,21 +703,21 @@ class KotlinGenerator : public BaseGenerator {
auto field_type = GenTypeBasic(field.value.type.base_type);
auto secondArg = MakeCamel(Esc(field.name), false) + ": " + field_type;
- GenerateFunOneLine(writer, "add" + MakeCamel(Esc(field.name), true),
- "builder: FlatBufferBuilder, " + secondArg, "",
- [&]() {
- auto method = GenMethod(field.value.type);
- writer.SetValue("field_name",
- MakeCamel(Esc(field.name), false));
- writer.SetValue("method_name", method);
- writer.SetValue("pos", field_pos);
- writer.SetValue("default", GenFBBDefaultValue(field));
- writer.SetValue("cast", GenFBBValueCast(field));
-
- writer += "builder.add{{method_name}}({{pos}}, \\";
- writer += "{{field_name}}{{cast}}, {{default}})";
- },
- options.gen_jvmstatic);
+ GenerateFunOneLine(
+ writer, "add" + MakeCamel(Esc(field.name), true),
+ "builder: FlatBufferBuilder, " + secondArg, "",
+ [&]() {
+ auto method = GenMethod(field.value.type);
+ writer.SetValue("field_name", MakeCamel(Esc(field.name), false));
+ writer.SetValue("method_name", method);
+ writer.SetValue("pos", field_pos);
+ writer.SetValue("default", GenFBBDefaultValue(field));
+ writer.SetValue("cast", GenFBBValueCast(field));
+
+ writer += "builder.add{{method_name}}({{pos}}, \\";
+ writer += "{{field_name}}{{cast}}, {{default}})";
+ },
+ options.gen_jvmstatic);
}
static std::string ToSignedType(const Type &type) {
@@ -1014,8 +1013,9 @@ class KotlinGenerator : public BaseGenerator {
break;
default: found = "{{bbgetter}}({{index}}){{ucast}}";
}
- OffsetWrapper(writer, offset_val, [&]() { writer += found; },
- [&]() { writer += not_found; });
+ OffsetWrapper(
+ writer, offset_val, [&]() { writer += found; },
+ [&]() { writer += not_found; });
});
break;
}
@@ -1167,13 +1167,13 @@ class KotlinGenerator : public BaseGenerator {
if (struct_def.fixed) {
writer += "{{bbsetter}}({{index}}, {{params}}{{cast}})";
} else {
- OffsetWrapper(writer, offset_val,
- [&]() {
- writer +=
- "{{bbsetter}}({{index}}, {{params}}{{cast}})";
- writer += "true";
- },
- [&]() { writer += "false"; });
+ OffsetWrapper(
+ writer, offset_val,
+ [&]() {
+ writer += "{{bbsetter}}({{index}}, {{params}}{{cast}})";
+ writer += "true";
+ },
+ [&]() { writer += "false"; });
}
};
@@ -1316,12 +1316,13 @@ class KotlinGenerator : public BaseGenerator {
const IDLOptions options) {
// create a struct constructor function
auto params = StructConstructorParams(struct_def);
- GenerateFun(code, "create" + Esc(struct_def.name), params, "Int",
- [&]() {
- GenStructBody(struct_def, code, "");
- code += "return builder.offset()";
- },
- options.gen_jvmstatic);
+ GenerateFun(
+ code, "create" + Esc(struct_def.name), params, "Int",
+ [&]() {
+ GenStructBody(struct_def, code, "");
+ code += "return builder.offset()";
+ },
+ options.gen_jvmstatic);
}
static std::string StructConstructorParams(const StructDef &struct_def,
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_python.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_python.cpp
index 62e6d84c8b6..430aefad444 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_python.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_python.cpp
@@ -444,7 +444,7 @@ class PythonGenerator : public BaseGenerator {
code += Indent + Indent + Indent;
code += "from " + qualified_name + " import " + unqualified_name + "\n";
code += Indent + Indent + Indent + "return " + unqualified_name;
- code += ".GetRootAs";
+ code += ".GetRootAs" + unqualified_name;
code += "(self._tab.Bytes, self._tab.Vector(o))\n";
code += Indent + Indent + "return 0\n";
code += "\n";
@@ -555,23 +555,27 @@ class PythonGenerator : public BaseGenerator {
// Get the value of a table's starting offset.
void GetStartOfTable(const StructDef &struct_def, std::string *code_ptr) {
auto &code = *code_ptr;
- code += "def Start(builder): ";
+
+ // Generate method with struct name.
+ code += "def " + NormalizedName(struct_def) + "Start(builder): ";
code += "builder.StartObject(";
code += NumToString(struct_def.fields.vec.size());
code += ")\n";
- // Add alias with the old name.
- code += "def " + NormalizedName(struct_def) + "Start(builder):\n";
- code += Indent +
- "\"\"\"This method is deprecated. Please switch to Start.\"\"\"\n";
- code += Indent + "return Start(builder)\n";
+ // Generate method without struct name.
+ code += "def Start(builder):\n";
+ code +=
+ Indent + "return " + NormalizedName(struct_def) + "Start(builder)\n";
}
// Set the value of a table's field.
void BuildFieldOfTable(const StructDef &struct_def, const FieldDef &field,
const size_t offset, std::string *code_ptr) {
auto &code = *code_ptr;
- code += "def Add" + MakeCamel(NormalizedName(field));
+
+ // Generate method with struct name.
+ code += "def " + NormalizedName(struct_def) + "Add" +
+ MakeCamel(NormalizedName(field));
code += "(builder, ";
code += MakeCamel(NormalizedName(field), false);
code += "): ";
@@ -591,27 +595,25 @@ class PythonGenerator : public BaseGenerator {
: field.value.constant;
code += ")\n";
- // Add alias with the old name.
- code += "def " + NormalizedName(struct_def) + "Add" +
- MakeCamel(NormalizedName(field));
+ // Generate method without struct name.
+ code += "def Add" + MakeCamel(NormalizedName(field));
code += "(builder, ";
code += MakeCamel(NormalizedName(field), false);
code += "):\n";
- code += Indent + "\"\"\"This method is deprecated. Please switch to Add";
- code += MakeCamel(NormalizedName(field)) + ".\"\"\"\n";
- code += Indent + "return Add" + MakeCamel(NormalizedName(field));
+ code += Indent + "return " + NormalizedName(struct_def) + "Add" +
+ MakeCamel(NormalizedName(field));
code += "(builder, ";
code += MakeCamel(NormalizedName(field), false);
code += ")\n";
-
- // Add alias with the old name.
}
// Set the value of one of the members of a table's vector.
void BuildVectorOfTable(const StructDef &struct_def, const FieldDef &field,
std::string *code_ptr) {
auto &code = *code_ptr;
- code += "def Start";
+
+ // Generate method with struct name.
+ code += "def " + NormalizedName(struct_def) + "Start";
code += MakeCamel(NormalizedName(field));
code += "Vector(builder, numElems): return builder.StartVector(";
auto vector_type = field.value.type.VectorType();
@@ -621,13 +623,11 @@ class PythonGenerator : public BaseGenerator {
code += ", numElems, " + NumToString(alignment);
code += ")\n";
- // Add alias with the old name.
- code += "def " + NormalizedName(struct_def) + "Start";
+ // Generate method without struct name.
+ code += "def Start";
code += MakeCamel(NormalizedName(field));
code += "Vector(builder, numElems):\n";
- code += Indent +
- "\"\"\"This method is deprecated. Please switch to Start.\"\"\"\n";
- code += Indent + "return Start";
+ code += Indent + "return " + NormalizedName(struct_def) + "Start";
code += MakeCamel(NormalizedName(field));
code += "Vector(builder, numElems)\n";
}
@@ -635,7 +635,8 @@ class PythonGenerator : public BaseGenerator {
// Set the value of one of the members of a table's vector and fills in the
// elements from a bytearray. This is for simplifying the use of nested
// flatbuffers.
- void BuildVectorOfTableFromBytes(const FieldDef &field,
+ void BuildVectorOfTableFromBytes(const StructDef &struct_def,
+ const FieldDef &field,
std::string *code_ptr) {
auto nested = field.attributes.Lookup("nested_flatbuffer");
if (!nested) { return; } // There is no nested flatbuffer.
@@ -652,7 +653,11 @@ class PythonGenerator : public BaseGenerator {
(void)nested_root;
auto &code = *code_ptr;
- code += "def MakeVectorFromBytes(builder, bytes):\n";
+
+ // Generate method with struct and field name.
+ code += "def " + NormalizedName(struct_def) + "Make";
+ code += MakeCamel(NormalizedName(field));
+ code += "VectorFromBytes(builder, bytes):\n";
code += Indent + "builder.StartVector(";
auto vector_type = field.value.type.VectorType();
auto alignment = InlineAlignment(vector_type);
@@ -665,29 +670,26 @@ class PythonGenerator : public BaseGenerator {
code += " = bytes\n";
code += Indent + "return builder.EndVector()\n";
- // Add alias with the old name.
- code += "def Make" + MakeCamel(NormalizedName(field));
- code += "VectorFromBytes(builder, bytes):\n";
- code += Indent + "builder.StartVector(";
- code += NumToString(elem_size);
- code += ", len(bytes), " + NumToString(alignment);
- code += ")\n";
- code += Indent + "builder.head = builder.head - len(bytes)\n";
- code += Indent + "builder.Bytes[builder.head : builder.head + len(bytes)]";
- code += " = bytes\n";
- code += Indent + "return builder.EndVector()\n";
+ // Generate method without struct and field name.
+ code += "def Make" + MakeCamel(NormalizedName(field)) +
+ "VectorFromBytes(builder, bytes):\n";
+ code += Indent + "return " + NormalizedName(struct_def) + "Make" +
+ MakeCamel(NormalizedName(field)) +
+ "VectorFromBytes(builder, bytes)\n";
}
// Get the offset of the end of a table.
void GetEndOffsetOnTable(const StructDef &struct_def, std::string *code_ptr) {
auto &code = *code_ptr;
- code += "def End(builder): return builder.EndObject()\n";
- // Add alias with the old name.
- code += "def " + NormalizedName(struct_def) + "End(builder):\n";
- code += Indent +
- "\"\"\"This method is deprecated. Please switch to End.\"\"\"\n";
- code += Indent + "return End(builder)";
+ // Generate method with struct name.
+ code += "def " + NormalizedName(struct_def) + "End";
+ code += "(builder): ";
+ code += "return builder.EndObject()\n";
+
+ // Generate method without struct name.
+ code += "def End(builder):\n";
+ code += Indent + "return " + NormalizedName(struct_def) + "End(builder)";
}
// Generate the receiver for function signatures.
@@ -765,7 +767,7 @@ class PythonGenerator : public BaseGenerator {
BuildFieldOfTable(struct_def, field, offset, code_ptr);
if (IsVector(field.value.type)) {
BuildVectorOfTable(struct_def, field, code_ptr);
- BuildVectorOfTableFromBytes(field, code_ptr);
+ BuildVectorOfTableFromBytes(struct_def, field, code_ptr);
}
}
@@ -1294,8 +1296,9 @@ class PythonGenerator : public BaseGenerator {
code_prefix +=
GenIndents(2) + "if self." + field_instance_name + " is not None:";
if (field.value.type.struct_def->fixed) {
- code_prefix += GenIndents(3) + "Start" + field_accessor_name +
- "Vector(builder, len(self." + field_instance_name + "))";
+ code_prefix += GenIndents(3) + struct_name + "Start" +
+ field_accessor_name + "Vector(builder, len(self." +
+ field_instance_name + "))";
code_prefix += GenIndents(3) + "for i in reversed(range(len(self." +
field_instance_name + "))):";
code_prefix +=
@@ -1311,8 +1314,9 @@ class PythonGenerator : public BaseGenerator {
code_prefix += GenIndents(4) + field_instance_name + "list.append(self." +
field_instance_name + "[i].Pack(builder))";
- code_prefix += GenIndents(3) + "Start" + field_accessor_name +
- "Vector(builder, len(self." + field_instance_name + "))";
+ code_prefix += GenIndents(3) + struct_name + "Start" +
+ field_accessor_name + "Vector(builder, len(self." +
+ field_instance_name + "))";
code_prefix += GenIndents(3) + "for i in reversed(range(len(self." +
field_instance_name + "))):";
code_prefix += GenIndents(4) + "builder.PrependUOffsetTRelative" + "(" +
@@ -1323,8 +1327,8 @@ class PythonGenerator : public BaseGenerator {
// Adds the field into the struct.
code += GenIndents(2) + "if self." + field_instance_name + " is not None:";
- code += GenIndents(3) + "Add" + field_accessor_name + "(builder, " +
- field_instance_name + ")";
+ code += GenIndents(3) + struct_name + "Add" + field_accessor_name +
+ "(builder, " + field_instance_name + ")";
}
void GenPackForScalarVectorFieldHelper(const StructDef &struct_def,
@@ -1336,7 +1340,7 @@ class PythonGenerator : public BaseGenerator {
auto struct_name = NormalizedName(struct_def);
auto vectortype = field.value.type.VectorType();
- code += GenIndents(indents) + "Start" + field_accessor_name +
+ code += GenIndents(indents) + struct_name + "Start" + field_accessor_name +
"Vector(builder, len(self." + field_instance_name + "))";
code += GenIndents(indents) + "for i in reversed(range(len(self." +
field_instance_name + "))):";
@@ -1373,8 +1377,8 @@ class PythonGenerator : public BaseGenerator {
// Adds the field into the struct.
code += GenIndents(2) + "if self." + field_instance_name + " is not None:";
- code += GenIndents(3) + "Add" + field_accessor_name + "(builder, " +
- field_instance_name + ")";
+ code += GenIndents(3) + struct_name + "Add" + field_accessor_name +
+ "(builder, " + field_instance_name + ")";
// Creates the field.
code_prefix +=
@@ -1436,8 +1440,8 @@ class PythonGenerator : public BaseGenerator {
GenIndents(2) + "if self." + field_instance_name + " is not None:";
}
- code += GenIndents(3) + "Add" + field_accessor_name + "(builder, " +
- field_instance_name + ")";
+ code += GenIndents(3) + struct_name + "Add" + field_accessor_name +
+ "(builder, " + field_instance_name + ")";
}
void GenPackForUnionField(const StructDef &struct_def, const FieldDef &field,
@@ -1456,8 +1460,8 @@ class PythonGenerator : public BaseGenerator {
code_prefix += GenIndents(3) + field_instance_name + " = self." +
field_instance_name + ".Pack(builder)";
code += GenIndents(2) + "if self." + field_instance_name + " is not None:";
- code += GenIndents(3) + "Add" + field_accessor_name + "(builder, " +
- field_instance_name + ")";
+ code += GenIndents(3) + struct_name + "Add" + field_accessor_name +
+ "(builder, " + field_instance_name + ")";
}
void GenPackForTable(const StructDef &struct_def, std::string *code_ptr) {
@@ -1468,7 +1472,7 @@ class PythonGenerator : public BaseGenerator {
GenReceiverForObjectAPI(struct_def, code_ptr);
code_base += "Pack(self, builder):";
- code += GenIndents(2) + "Start(builder)";
+ code += GenIndents(2) + struct_name + "Start(builder)";
for (auto it = struct_def.fields.vec.begin();
it != struct_def.fields.vec.end(); ++it) {
auto &field = **it;
@@ -1507,21 +1511,22 @@ class PythonGenerator : public BaseGenerator {
")";
code += GenIndents(2) + "if self." + field_instance_name +
" is not None:";
- code += GenIndents(3) + "Add" + field_accessor_name + "(builder, " +
- field_instance_name + ")";
+ code += GenIndents(3) + struct_name + "Add" + field_accessor_name +
+ "(builder, " + field_instance_name + ")";
break;
}
default:
// Generates code for scalar values. If the value equals to the
// default value, builder will automatically ignore it. So we don't
// need to check the value ahead.
- code += GenIndents(2) + "Add" + field_accessor_name +
+ code += GenIndents(2) + struct_name + "Add" + field_accessor_name +
"(builder, self." + field_instance_name + ")";
break;
}
}
- code += GenIndents(2) + struct_instance_name + " = " + "End(builder)";
+ code += GenIndents(2) + struct_instance_name + " = " + struct_name +
+ "End(builder)";
code += GenIndents(2) + "return " + struct_instance_name;
code_base += code_prefix + code;
@@ -1595,6 +1600,8 @@ class PythonGenerator : public BaseGenerator {
// Creates an union object based on union type.
void GenUnionCreator(const EnumDef &enum_def, std::string *code_ptr) {
+ if (enum_def.generated) return;
+
auto &code = *code_ptr;
auto union_name = MakeUpperCamel(enum_def);
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_rust.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_rust.cpp
index 6614be74af2..813a4c199bf 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_rust.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_rust.cpp
@@ -51,6 +51,10 @@ std::string MakeUpper(const std::string &in) {
return s;
}
+std::string UnionTypeFieldName(const FieldDef &field) {
+ return MakeSnakeCase(field.name + "_type");
+}
+
// Encapsulate all logical field types in this enum. This allows us to write
// field logic based on type switches, instead of branches on the properties
// set on the Type.
@@ -178,31 +182,9 @@ FullType GetFullType(const Type &type) {
return ftBool;
}
-// If the second parameter is false then wrap the first with Option<...>
-std::string WrapInOptionIfNotRequired(std::string s, bool required) {
- if (required) {
- return s;
- } else {
- return "Option<" + s + ">";
- }
-}
-
-// If the second parameter is false then add .unwrap()
-std::string AddUnwrapIfRequired(std::string s, bool required) {
- if (required) {
- return s + ".unwrap()";
- } else {
- return s;
- }
-}
-
bool IsBitFlagsEnum(const EnumDef &enum_def) {
return enum_def.attributes.Lookup("bit_flags") != nullptr;
}
-bool IsBitFlagsEnum(const FieldDef &field) {
- EnumDef *ed = field.value.type.enum_def;
- return ed && IsBitFlagsEnum(*ed);
-}
// TableArgs make required non-scalars "Option<_>".
// TODO(cneo): Rework how we do defaults and stuff.
@@ -210,6 +192,63 @@ bool IsOptionalToBuilder(const FieldDef &field) {
return field.IsOptional() || !IsScalar(field.value.type.base_type);
}
+bool GenerateRustModuleRootFile(const Parser &parser,
+ const std::string &output_dir) {
+ // We gather the symbols into a tree of namespaces (which are rust mods) and
+ // generate a file that gathers them all.
+ struct Module {
+ std::map<std::string, Module> sub_modules;
+ std::vector<std::string> generated_files;
+ // Add a symbol into the tree.
+ void Insert(const Definition *s, const std::string suffix) {
+ const Definition &symbol = *s;
+ Module *current_module = this;
+ for (auto it = symbol.defined_namespace->components.begin();
+ it != symbol.defined_namespace->components.end(); it++) {
+ std::string ns_component = MakeSnakeCase(*it);
+ current_module = &current_module->sub_modules[ns_component];
+ }
+ current_module->generated_files.push_back(MakeSnakeCase(symbol.name) +
+ suffix);
+ }
+ // Recursively create the importer file.
+ void GenerateImports(CodeWriter &code) {
+ for (auto it = sub_modules.begin(); it != sub_modules.end(); it++) {
+ code += "pub mod " + it->first + " {";
+ code.IncrementIdentLevel();
+ code += "use super::*;";
+ it->second.GenerateImports(code);
+ code.DecrementIdentLevel();
+ code += "} // " + it->first;
+ }
+ for (auto it = generated_files.begin(); it != generated_files.end();
+ it++) {
+ code += "mod " + *it + ";";
+ code += "pub use self::" + *it + "::*;";
+ }
+ }
+ };
+ Module root_module;
+ for (auto it = parser.enums_.vec.begin(); it != parser.enums_.vec.end();
+ it++) {
+ root_module.Insert(*it, parser.opts.filename_suffix);
+ }
+ for (auto it = parser.structs_.vec.begin(); it != parser.structs_.vec.end();
+ it++) {
+ root_module.Insert(*it, parser.opts.filename_suffix);
+ }
+ CodeWriter code(" ");
+ // TODO(caspern): Move generated warning out of BaseGenerator.
+ code +=
+ "// Automatically generated by the Flatbuffers compiler. "
+ "Do not modify.";
+ root_module.GenerateImports(code);
+ const bool success =
+ SaveFile((output_dir + "mod.rs").c_str(), code.ToString(), false);
+ code.Clear();
+ return success;
+}
+
namespace rust {
class RustGenerator : public BaseGenerator {
@@ -320,13 +359,80 @@ class RustGenerator : public BaseGenerator {
"ENUM_MAX",
"ENUM_MIN",
"ENUM_VALUES",
+ // clang-format on
};
for (auto kw = keywords; *kw; kw++) keywords_.insert(*kw);
+ code_.SetPadding(" ");
+ }
+
+ bool generate() {
+ if (parser_.opts.one_file) {
+ return GenerateOneFile();
+ } else {
+ return GenerateIndividualFiles();
+ }
+ }
+
+ template<typename T>
+ bool GenerateSymbols(const SymbolTable<T> &symbols,
+ std::function<void(const T &)> gen_symbol) {
+ for (auto it = symbols.vec.begin(); it != symbols.vec.end(); it++) {
+ const T &symbol = **it;
+ if (symbol.generated) continue;
+ code_.Clear();
+ code_ += "// " + std::string(FlatBuffersGeneratedWarning());
+ code_ += "extern crate flatbuffers;";
+ code_ += "use std::mem;";
+ code_ += "use std::cmp::Ordering;";
+ code_ += "use self::flatbuffers::{EndianScalar, Follow};";
+ code_ += "use super::*;";
+ cur_name_space_ = symbol.defined_namespace;
+ gen_symbol(symbol);
+ std::stringstream file_path;
+ file_path << path_;
+ // Create filepath.
+ if (symbol.defined_namespace)
+ for (auto i = symbol.defined_namespace->components.begin();
+ i != symbol.defined_namespace->components.end(); i++) {
+ file_path << MakeSnakeCase(*i) << kPathSeparator;
+ EnsureDirExists(file_path.str());
+ }
+ file_path << MakeSnakeCase(symbol.name) << parser_.opts.filename_suffix
+ << ".rs";
+ const bool save_success =
+ SaveFile(file_path.str().c_str(), code_.ToString(),
+ /*binary=*/false);
+ if (!save_success) return false;
+ }
+ return true;
+ }
+
+ bool GenerateIndividualFiles() {
+ code_.Clear();
+ // Don't bother with imports. Use absolute paths everywhere.
+ return GenerateSymbols<EnumDef>(
+ parser_.enums_, [&](const EnumDef &e) { this->GenEnum(e); }) &&
+ GenerateSymbols<StructDef>(
+ parser_.structs_, [&](const StructDef &s) {
+ if (s.fixed) {
+ this->GenStruct(s);
+ } else {
+ this->GenTable(s);
+ if (this->parser_.opts.generate_object_based_api) {
+ this->GenTableObject(s);
+ }
+ }
+ if (this->parser_.root_struct_def_ == &s) {
+ this->GenRootTableFuncs(s);
+ }
+ });
}
+ // Generates code organized by .fbs files. This is broken legacy behavior
+ // that does not work with multiple fbs files with shared namespaces.
// Iterate through all definitions we haven't generated code for (enums,
// structs, and tables) and output them to a single file.
- bool generate() {
+ bool GenerateOneFile() {
code_.Clear();
code_ += "// " + std::string(FlatBuffersGeneratedWarning()) + "\n\n";
@@ -496,32 +602,25 @@ class RustGenerator : public BaseGenerator {
// example: f(A, D::E) -> super::D::E
// does not include leaf object (typically a struct type).
- size_t i = 0;
std::stringstream stream;
-
- auto s = src->components.begin();
- auto d = dst->components.begin();
- for (;;) {
- if (s == src->components.end()) { break; }
- if (d == dst->components.end()) { break; }
- if (*s != *d) { break; }
- ++s;
- ++d;
- ++i;
- }
-
- for (; s != src->components.end(); ++s) { stream << "super::"; }
- for (; d != dst->components.end(); ++d) {
- stream << MakeSnakeCase(*d) + "::";
- }
+ size_t common = 0;
+ std::vector<std::string> s, d;
+ if (src) s = src->components;
+ if (dst) d = dst->components;
+ while (common < s.size() && common < d.size() && s[common] == d[common])
+ common++;
+ // If src namespace is empty, this must be an absolute path.
+ for (size_t i = common; i < s.size(); i++) stream << "super::";
+ for (size_t i = common; i < d.size(); i++)
+ stream << MakeSnakeCase(d[i]) + "::";
return stream.str();
}
// Generate a comment from the schema.
void GenComment(const std::vector<std::string> &dc, const char *prefix = "") {
- std::string text;
- ::flatbuffers::GenComment(dc, &text, nullptr, prefix);
- code_ += text + "\\";
+ for (auto it = dc.begin(); it != dc.end(); it++) {
+ code_ += std::string(prefix) + "///" + *it;
+ }
}
// Return a Rust type from the table in idl.h.
@@ -618,7 +717,9 @@ class RustGenerator : public BaseGenerator {
const auto &ev = **it;
code_.SetValue("VARIANT", Name(ev));
code_.SetValue("VALUE", enum_def.ToString(ev));
+ code_.IncrementIdentLevel();
cb(ev);
+ code_.DecrementIdentLevel();
}
}
void ForAllEnumValues(const EnumDef &enum_def, std::function<void()> cb) {
@@ -655,8 +756,8 @@ class RustGenerator : public BaseGenerator {
code_ += " #[derive(Default)]";
code_ += " pub struct {{ENUM_NAME}}: {{BASE_TYPE}} {";
ForAllEnumValues1(enum_def, [&](const EnumVal &ev) {
- this->GenComment(ev.doc_comment, " ");
- code_ += " const {{VARIANT}} = {{VALUE}};";
+ this->GenComment(ev.doc_comment, " ");
+ code_ += " const {{VARIANT}} = {{VALUE}};";
});
code_ += " }";
code_ += " }";
@@ -686,7 +787,7 @@ class RustGenerator : public BaseGenerator {
code_ += "pub const ENUM_VALUES_{{ENUM_NAME_CAPS}}: [{{ENUM_NAME}}; " +
num_fields + "] = [";
ForAllEnumValues1(enum_def, [&](const EnumVal &ev) {
- code_ += " " + GetEnumValue(enum_def, ev) + ",";
+ code_ += GetEnumValue(enum_def, ev) + ",";
});
code_ += "];";
code_ += "";
@@ -703,21 +804,21 @@ class RustGenerator : public BaseGenerator {
code_ += "#[allow(non_upper_case_globals)]";
code_ += "impl {{ENUM_NAME}} {";
ForAllEnumValues1(enum_def, [&](const EnumVal &ev) {
- this->GenComment(ev.doc_comment, " ");
- code_ += " pub const {{VARIANT}}: Self = Self({{VALUE}});";
+ this->GenComment(ev.doc_comment);
+ code_ += "pub const {{VARIANT}}: Self = Self({{VALUE}});";
});
code_ += "";
// Generate Associated constants
code_ += " pub const ENUM_MIN: {{BASE_TYPE}} = {{ENUM_MIN_BASE_VALUE}};";
code_ += " pub const ENUM_MAX: {{BASE_TYPE}} = {{ENUM_MAX_BASE_VALUE}};";
code_ += " pub const ENUM_VALUES: &'static [Self] = &[";
- ForAllEnumValues(enum_def, [&]() { code_ += " Self::{{VARIANT}},"; });
+ ForAllEnumValues(enum_def, [&]() { code_ += " Self::{{VARIANT}},"; });
code_ += " ];";
code_ += " /// Returns the variant's name or \"\" if unknown.";
code_ += " pub fn variant_name(self) -> Option<&'static str> {";
code_ += " match self {";
ForAllEnumValues(enum_def, [&]() {
- code_ += " Self::{{VARIANT}} => Some(\"{{VARIANT}}\"),";
+ code_ += " Self::{{VARIANT}} => Some(\"{{VARIANT}}\"),";
});
code_ += " _ => None,";
code_ += " }";
@@ -813,7 +914,9 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("U_ELEMENT_NAME", MakeSnakeCase(Name(enum_val)));
code_.SetValue("U_ELEMENT_TABLE_TYPE",
NamespacedNativeName(*enum_val.union_type.struct_def));
+ code_.IncrementIdentLevel();
cb();
+ code_.DecrementIdentLevel();
}
}
void GenUnionObject(const EnumDef &enum_def) {
@@ -822,12 +925,14 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("NATIVE_NAME", NativeName(enum_def));
// Generate native union.
+ code_ += "#[allow(clippy::upper_case_acronyms)]"; // NONE's spelling is
+ // intended.
code_ += "#[non_exhaustive]";
code_ += "#[derive(Debug, Clone, PartialEq)]";
code_ += "pub enum {{NATIVE_NAME}} {";
code_ += " NONE,";
ForAllUnionObjectVariantsBesidesNone(enum_def, [&] {
- code_ += " {{NATIVE_VARIANT}}(Box<{{U_ELEMENT_TABLE_TYPE}}>),";
+ code_ += "{{NATIVE_VARIANT}}(Box<{{U_ELEMENT_TABLE_TYPE}}>),";
});
code_ += "}";
// Generate Default (NONE).
@@ -847,7 +952,7 @@ class RustGenerator : public BaseGenerator {
code_ += " Self::NONE => {{ENUM_NAME}}::NONE,";
ForAllUnionObjectVariantsBesidesNone(enum_def, [&] {
code_ +=
- " Self::{{NATIVE_VARIANT}}(_) => {{ENUM_NAME}}::"
+ " Self::{{NATIVE_VARIANT}}(_) => {{ENUM_NAME}}::"
"{{VARIANT_NAME}},";
});
code_ += " }";
@@ -860,9 +965,8 @@ class RustGenerator : public BaseGenerator {
code_ += " match self {";
code_ += " Self::NONE => None,";
ForAllUnionObjectVariantsBesidesNone(enum_def, [&] {
- code_ +=
- " Self::{{NATIVE_VARIANT}}(v) => "
- "Some(v.pack(fbb).as_union_value()),";
+ code_ += " Self::{{NATIVE_VARIANT}}(v) => \\";
+ code_ += "Some(v.pack(fbb).as_union_value()),";
});
code_ += " }";
code_ += " }";
@@ -871,44 +975,44 @@ class RustGenerator : public BaseGenerator {
ForAllUnionObjectVariantsBesidesNone(enum_def, [&] {
// Move accessor.
code_ +=
- " /// If the union variant matches, return the owned "
+ "/// If the union variant matches, return the owned "
"{{U_ELEMENT_TABLE_TYPE}}, setting the union to NONE.";
code_ +=
- " pub fn take_{{U_ELEMENT_NAME}}(&mut self) -> "
+ "pub fn take_{{U_ELEMENT_NAME}}(&mut self) -> "
"Option<Box<{{U_ELEMENT_TABLE_TYPE}}>> {";
- code_ += " if let Self::{{NATIVE_VARIANT}}(_) = self {";
- code_ += " let v = std::mem::replace(self, Self::NONE);";
- code_ += " if let Self::{{NATIVE_VARIANT}}(w) = v {";
- code_ += " Some(w)";
- code_ += " } else {";
- code_ += " unreachable!()";
- code_ += " }";
+ code_ += " if let Self::{{NATIVE_VARIANT}}(_) = self {";
+ code_ += " let v = std::mem::replace(self, Self::NONE);";
+ code_ += " if let Self::{{NATIVE_VARIANT}}(w) = v {";
+ code_ += " Some(w)";
code_ += " } else {";
- code_ += " None";
+ code_ += " unreachable!()";
code_ += " }";
+ code_ += " } else {";
+ code_ += " None";
code_ += " }";
+ code_ += "}";
// Immutable reference accessor.
code_ +=
- " /// If the union variant matches, return a reference to the "
+ "/// If the union variant matches, return a reference to the "
"{{U_ELEMENT_TABLE_TYPE}}.";
code_ +=
- " pub fn as_{{U_ELEMENT_NAME}}(&self) -> "
+ "pub fn as_{{U_ELEMENT_NAME}}(&self) -> "
"Option<&{{U_ELEMENT_TABLE_TYPE}}> {";
code_ +=
- " if let Self::{{NATIVE_VARIANT}}(v) = self "
+ " if let Self::{{NATIVE_VARIANT}}(v) = self "
"{ Some(v.as_ref()) } else { None }";
- code_ += " }";
+ code_ += "}";
// Mutable reference accessor.
code_ +=
- " /// If the union variant matches, return a mutable reference"
+ "/// If the union variant matches, return a mutable reference"
" to the {{U_ELEMENT_TABLE_TYPE}}.";
code_ +=
- " pub fn as_{{U_ELEMENT_NAME}}_mut(&mut self) -> "
+ "pub fn as_{{U_ELEMENT_NAME}}_mut(&mut self) -> "
"Option<&mut {{U_ELEMENT_TABLE_TYPE}}> {";
code_ +=
- " if let Self::{{NATIVE_VARIANT}}(v) = self "
+ " if let Self::{{NATIVE_VARIANT}}(v) = self "
"{ Some(v.as_mut()) } else { None }";
- code_ += " }";
+ code_ += "}";
});
code_ += "}"; // End union methods impl.
}
@@ -1478,10 +1582,11 @@ class RustGenerator : public BaseGenerator {
// Generates a fully-qualified name getter for use with --gen-name-strings
void GenFullyQualifiedNameGetter(const StructDef &struct_def,
const std::string &name) {
- code_ += " pub const fn get_fully_qualified_name() -> &'static str {";
- code_ += " \"" +
- struct_def.defined_namespace->GetFullyQualifiedName(name) + "\"";
- code_ += " }";
+ const std::string fully_qualified_name =
+ struct_def.defined_namespace->GetFullyQualifiedName(name);
+ code_ += " pub const fn get_fully_qualified_name() -> &'static str {";
+ code_ += " \"" + fully_qualified_name + "\"";
+ code_ += " }";
code_ += "";
}
@@ -1516,7 +1621,10 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("OFFSET_VALUE", NumToString(field.value.offset));
code_.SetValue("FIELD_NAME", Name(field));
code_.SetValue("BLDR_DEF_VAL", GetDefaultValue(field, kBuilder));
+ code_.SetValue("DISCRIMINANT", UnionTypeFieldName(field));
+ code_.IncrementIdentLevel();
cb(field);
+ code_.DecrementIdentLevel();
};
const auto &fields = struct_def.fields.vec;
if (reversed) {
@@ -1545,41 +1653,47 @@ class RustGenerator : public BaseGenerator {
code_ += "}";
code_ += "";
code_ += "impl<'a> flatbuffers::Follow<'a> for {{STRUCT_NAME}}<'a> {";
- code_ += " type Inner = {{STRUCT_NAME}}<'a>;";
- code_ += " #[inline]";
- code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
- code_ += " Self { _tab: flatbuffers::Table { buf, loc } }";
- code_ += " }";
+ code_ += " type Inner = {{STRUCT_NAME}}<'a>;";
+ code_ += " #[inline]";
+ code_ += " fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {";
+ code_ += " Self { _tab: flatbuffers::Table { buf, loc } }";
+ code_ += " }";
code_ += "}";
code_ += "";
code_ += "impl<'a> {{STRUCT_NAME}}<'a> {";
+ // Generate field id constants.
+ ForAllTableFields(struct_def, [&](const FieldDef &unused) {
+ (void)unused;
+ code_ +=
+ "pub const {{OFFSET_NAME}}: flatbuffers::VOffsetT = "
+ "{{OFFSET_VALUE}};";
+ });
+ code_ += "";
+
if (parser_.opts.generate_name_strings) {
GenFullyQualifiedNameGetter(struct_def, struct_def.name);
}
- code_ += " #[inline]";
+ code_ += " #[inline]";
code_ +=
- " pub fn init_from_table(table: flatbuffers::Table<'a>) -> "
+ " pub fn init_from_table(table: flatbuffers::Table<'a>) -> "
"Self {";
- code_ += " {{STRUCT_NAME}} { _tab: table }";
- code_ += " }";
+ code_ += " {{STRUCT_NAME}} { _tab: table }";
+ code_ += " }";
// Generate a convenient create* function that uses the above builder
// to create a table in one function call.
code_.SetValue("MAYBE_US", struct_def.fields.vec.size() == 0 ? "_" : "");
code_.SetValue("MAYBE_LT",
TableBuilderArgsNeedsLifetime(struct_def) ? "<'args>" : "");
- code_ += " #[allow(unused_mut)]";
- code_ += " pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(";
- code_ +=
- " _fbb: "
- "&'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,";
- code_ +=
- " {{MAYBE_US}}args: &'args {{STRUCT_NAME}}Args{{MAYBE_LT}})"
- " -> flatbuffers::WIPOffset<{{STRUCT_NAME}}<'bldr>> {";
+ code_ += " #[allow(unused_mut)]";
+ code_ += " pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(";
+ code_ += " _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,";
+ code_ += " {{MAYBE_US}}args: &'args {{STRUCT_NAME}}Args{{MAYBE_LT}}";
+ code_ += " ) -> flatbuffers::WIPOffset<{{STRUCT_NAME}}<'bldr>> {";
- code_ += " let mut builder = {{STRUCT_NAME}}Builder::new(_fbb);";
+ code_ += " let mut builder = {{STRUCT_NAME}}Builder::new(_fbb);";
for (size_t size = struct_def.sortbysize ? sizeof(largest_scalar_t) : 1;
size; size /= 2) {
ForAllTableFields(
@@ -1590,23 +1704,23 @@ class RustGenerator : public BaseGenerator {
return;
if (IsOptionalToBuilder(field)) {
code_ +=
- " if let Some(x) = args.{{FIELD_NAME}} "
+ " if let Some(x) = args.{{FIELD_NAME}} "
"{ builder.add_{{FIELD_NAME}}(x); }";
} else {
- code_ += " builder.add_{{FIELD_NAME}}(args.{{FIELD_NAME}});";
+ code_ += " builder.add_{{FIELD_NAME}}(args.{{FIELD_NAME}});";
}
},
/*reverse=*/true);
}
- code_ += " builder.finish()";
- code_ += " }";
+ code_ += " builder.finish()";
+ code_ += " }";
code_ += "";
// Generate Object API Packer function.
if (parser_.opts.generate_object_based_api) {
// TODO(cneo): Replace more for loops with ForAllX stuff.
// TODO(cneo): Manage indentation with IncrementIdentLevel?
code_.SetValue("OBJECT_NAME", NativeName(struct_def));
- code_ += " pub fn unpack(&self) -> {{OBJECT_NAME}} {";
+ code_ += " pub fn unpack(&self) -> {{OBJECT_NAME}} {";
ForAllObjectTableFields(struct_def, [&](const FieldDef &field) {
const Type &type = field.value.type;
switch (GetFullType(type)) {
@@ -1614,7 +1728,7 @@ class RustGenerator : public BaseGenerator {
case ftBool:
case ftFloat:
case ftEnumKey: {
- code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}();";
+ code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}();";
return;
}
case ftUnionKey: return;
@@ -1623,27 +1737,22 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("ENUM_NAME", WrapInNameSpace(enum_def));
code_.SetValue("NATIVE_ENUM_NAME", NamespacedNativeName(enum_def));
code_ +=
- " let {{FIELD_NAME}} = match "
- "self.{{FIELD_NAME}}_type() {";
- code_ +=
- " {{ENUM_NAME}}::NONE =>"
- " {{NATIVE_ENUM_NAME}}::NONE,";
+ " let {{FIELD_NAME}} = match self.{{FIELD_NAME}}_type() {";
+ code_ += " {{ENUM_NAME}}::NONE => {{NATIVE_ENUM_NAME}}::NONE,";
ForAllUnionObjectVariantsBesidesNone(enum_def, [&] {
code_ +=
- " {{ENUM_NAME}}::{{VARIANT_NAME}} => "
+ " {{ENUM_NAME}}::{{VARIANT_NAME}} => "
"{{NATIVE_ENUM_NAME}}::{{NATIVE_VARIANT}}(Box::new(";
+ code_ += " self.{{FIELD_NAME}}_as_{{U_ELEMENT_NAME}}()";
code_ +=
- " self.{{FIELD_NAME}}_as_"
- "{{U_ELEMENT_NAME}}()";
- code_ +=
- " .expect(\"Invalid union table, "
+ " .expect(\"Invalid union table, "
"expected `{{ENUM_NAME}}::{{VARIANT_NAME}}`.\")";
- code_ += " .unpack()";
- code_ += " )),";
+ code_ += " .unpack()";
+ code_ += " )),";
});
// Maybe we shouldn't throw away unknown discriminants?
- code_ += " _ => {{NATIVE_ENUM_NAME}}::NONE,";
- code_ += " };";
+ code_ += " _ => {{NATIVE_ENUM_NAME}}::NONE,";
+ code_ += " };";
return;
}
// The rest of the types need special handling based on if the field
@@ -1698,32 +1807,25 @@ class RustGenerator : public BaseGenerator {
}
}
if (field.IsOptional()) {
- code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}().map(|x| {";
- code_ += " {{EXPR}}";
- code_ += " });";
+ code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}().map(|x| {";
+ code_ += " {{EXPR}}";
+ code_ += " });";
} else {
- code_ += " let {{FIELD_NAME}} = {";
- code_ += " let x = self.{{FIELD_NAME}}();";
- code_ += " {{EXPR}}";
- code_ += " };";
+ code_ += " let {{FIELD_NAME}} = {";
+ code_ += " let x = self.{{FIELD_NAME}}();";
+ code_ += " {{EXPR}}";
+ code_ += " };";
}
});
- code_ += " {{OBJECT_NAME}} {";
+ code_ += " {{OBJECT_NAME}} {";
ForAllObjectTableFields(struct_def, [&](const FieldDef &field) {
if (field.value.type.base_type == BASE_TYPE_UTYPE) return;
- code_ += " {{FIELD_NAME}},";
+ code_ += " {{FIELD_NAME}},";
});
- code_ += " }";
code_ += " }";
+ code_ += " }";
}
- // Generate field id constants.
- ForAllTableFields(struct_def, [&](const FieldDef &unused) {
- (void)unused;
- code_ +=
- " pub const {{OFFSET_NAME}}: flatbuffers::VOffsetT = "
- "{{OFFSET_VALUE}};";
- });
if (struct_def.fields.vec.size() > 0) code_ += "";
// Generate the accessors. Each has one of two forms:
@@ -1741,11 +1843,11 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("RETURN_TYPE",
GenTableAccessorFuncReturnType(field, "'a"));
- this->GenComment(field.doc_comment, " ");
- code_ += " #[inline]";
- code_ += " pub fn {{FIELD_NAME}}(&self) -> {{RETURN_TYPE}} {";
- code_ += " " + GenTableAccessorFuncBody(field, "'a");
- code_ += " }";
+ this->GenComment(field.doc_comment);
+ code_ += "#[inline]";
+ code_ += "pub fn {{FIELD_NAME}}(&self) -> {{RETURN_TYPE}} {";
+ code_ += " " + GenTableAccessorFuncBody(field, "'a");
+ code_ += "}";
// Generate a comparison function for this field if it is a key.
if (field.key) { GenKeyFieldMethods(field); }
@@ -1763,38 +1865,37 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(nested_root); // Guaranteed to exist by parser.
code_.SetValue("NESTED", WrapInNameSpace(*nested_root));
- code_ += " pub fn {{FIELD_NAME}}_nested_flatbuffer(&'a self) -> \\";
+ code_ += "pub fn {{FIELD_NAME}}_nested_flatbuffer(&'a self) -> \\";
if (field.IsRequired()) {
code_ += "{{NESTED}}<'a> {";
- code_ += " let data = self.{{FIELD_NAME}}();";
- code_ += " use flatbuffers::Follow;";
+ code_ += " let data = self.{{FIELD_NAME}}();";
+ code_ += " use flatbuffers::Follow;";
code_ +=
- " <flatbuffers::ForwardsUOffset<{{NESTED}}<'a>>>"
+ " <flatbuffers::ForwardsUOffset<{{NESTED}}<'a>>>"
"::follow(data, 0)";
} else {
code_ += "Option<{{NESTED}}<'a>> {";
- code_ += " self.{{FIELD_NAME}}().map(|data| {";
- code_ += " use flatbuffers::Follow;";
+ code_ += " self.{{FIELD_NAME}}().map(|data| {";
+ code_ += " use flatbuffers::Follow;";
code_ +=
- " <flatbuffers::ForwardsUOffset<{{NESTED}}<'a>>>"
+ " <flatbuffers::ForwardsUOffset<{{NESTED}}<'a>>>"
"::follow(data, 0)";
- code_ += " })";
+ code_ += " })";
}
- code_ += " }";
+ code_ += "}";
}
});
// Explicit specializations for union accessors
ForAllTableFields(struct_def, [&](const FieldDef &field) {
if (field.value.type.base_type != BASE_TYPE_UNION) return;
- code_.SetValue("FIELD_TYPE_FIELD_NAME", field.name);
ForAllUnionVariantsBesidesNone(
*field.value.type.enum_def, [&](const EnumVal &unused) {
(void)unused;
- code_ += " #[inline]";
- code_ += " #[allow(non_snake_case)]";
+ code_ += "#[inline]";
+ code_ += "#[allow(non_snake_case)]";
code_ +=
- " pub fn {{FIELD_NAME}}_as_{{U_ELEMENT_NAME}}(&self) -> "
+ "pub fn {{FIELD_NAME}}_as_{{U_ELEMENT_NAME}}(&self) -> "
"Option<{{U_ELEMENT_TABLE_TYPE}}<'a>> {";
// If the user defined schemas name a field that clashes with a
// language reserved word, flatc will try to escape the field name
@@ -1808,24 +1909,22 @@ class RustGenerator : public BaseGenerator {
//
// To avoid this problem the type field name is used unescaped here:
code_ +=
- " if self.{{FIELD_TYPE_FIELD_NAME}}_type() == "
- "{{U_ELEMENT_ENUM_TYPE}} {";
+ " if self.{{DISCRIMINANT}}() == {{U_ELEMENT_ENUM_TYPE}} {";
// The following logic is not tested in the integration test,
// as of April 10, 2020
if (field.IsRequired()) {
- code_ += " let u = self.{{FIELD_NAME}}();";
- code_ +=
- " Some({{U_ELEMENT_TABLE_TYPE}}::init_from_table(u))";
+ code_ += " let u = self.{{FIELD_NAME}}();";
+ code_ += " Some({{U_ELEMENT_TABLE_TYPE}}::init_from_table(u))";
} else {
code_ +=
- " self.{{FIELD_NAME}}().map("
+ " self.{{FIELD_NAME}}().map("
"{{U_ELEMENT_TABLE_TYPE}}::init_from_table)";
}
- code_ += " } else {";
- code_ += " None";
- code_ += " }";
+ code_ += " } else {";
+ code_ += " None";
code_ += " }";
+ code_ += "}";
code_ += "";
});
});
@@ -1850,29 +1949,34 @@ class RustGenerator : public BaseGenerator {
// All types besides unions.
code_.SetValue("TY", FollowType(field.value.type, "'_"));
code_ +=
- "\n .visit_field::<{{TY}}>(&\"{{FIELD_NAME}}\", "
+ "\n .visit_field::<{{TY}}>(\"{{FIELD_NAME}}\", "
"Self::{{OFFSET_NAME}}, {{IS_REQ}})?\\";
return;
}
// Unions.
- EnumDef &union_def = *field.value.type.enum_def;
+ const EnumDef &union_def = *field.value.type.enum_def;
code_.SetValue("UNION_TYPE", WrapInNameSpace(union_def));
+ // TODO: Use the same function that generates the _type field for
+ // consistency. We do not call Name() because it inconsistently
+ // escapes keywords.
+ code_.SetValue("UNION_TYPE_OFFSET_NAME",
+ "VT_" + MakeUpper(field.name + "_type"));
code_ +=
"\n .visit_union::<{{UNION_TYPE}}, _>("
- "&\"{{FIELD_NAME}}_type\", Self::{{OFFSET_NAME}}_TYPE, "
- "&\"{{FIELD_NAME}}\", Self::{{OFFSET_NAME}}, {{IS_REQ}}, "
+ "\"{{FIELD_NAME}}_type\", Self::{{UNION_TYPE_OFFSET_NAME}}, "
+ "\"{{FIELD_NAME}}\", Self::{{OFFSET_NAME}}, {{IS_REQ}}, "
"|key, v, pos| {";
- code_ += " match key {";
+ code_ += " match key {";
ForAllUnionVariantsBesidesNone(union_def, [&](const EnumVal &unused) {
(void)unused;
code_ +=
- " {{U_ELEMENT_ENUM_TYPE}} => v.verify_union_variant::"
+ " {{U_ELEMENT_ENUM_TYPE}} => v.verify_union_variant::"
"<flatbuffers::ForwardsUOffset<{{U_ELEMENT_TABLE_TYPE}}>>("
"\"{{U_ELEMENT_ENUM_TYPE}}\", pos),";
});
- code_ += " _ => Ok(()),";
- code_ += " }";
- code_ += " })?\\";
+ code_ += " _ => Ok(()),";
+ code_ += " }";
+ code_ += " })?\\";
});
code_ += "\n .finish();";
code_ += " Ok(())";
@@ -1885,21 +1989,21 @@ class RustGenerator : public BaseGenerator {
code_ += "pub struct {{STRUCT_NAME}}Args{{MAYBE_LT}} {";
ForAllTableFields(struct_def, [&](const FieldDef &field) {
code_.SetValue("PARAM_TYPE", TableBuilderArgsDefnType(field, "'a"));
- code_ += " pub {{FIELD_NAME}}: {{PARAM_TYPE}},";
+ code_ += " pub {{FIELD_NAME}}: {{PARAM_TYPE}},";
});
code_ += "}";
// Generate an impl of Default for the *Args type:
code_ += "impl<'a> Default for {{STRUCT_NAME}}Args{{MAYBE_LT}} {";
- code_ += " #[inline]";
- code_ += " fn default() -> Self {";
- code_ += " {{STRUCT_NAME}}Args {";
+ code_ += " #[inline]";
+ code_ += " fn default() -> Self {";
+ code_ += " {{STRUCT_NAME}}Args {";
ForAllTableFields(struct_def, [&](const FieldDef &field) {
- code_ += " {{FIELD_NAME}}: {{BLDR_DEF_VAL}},\\";
+ code_ += " {{FIELD_NAME}}: {{BLDR_DEF_VAL}},\\";
code_ += field.IsRequired() ? " // required field" : "";
});
- code_ += " }";
code_ += " }";
+ code_ += " }";
code_ += "}";
// Generate a builder struct:
@@ -1929,18 +2033,18 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("FIELD_OFFSET", Name(struct_def) + "::" + offset);
code_.SetValue("FIELD_TYPE", TableBuilderArgsAddFuncType(field, "'b "));
code_.SetValue("FUNC_BODY", TableBuilderArgsAddFuncBody(field));
- code_ += " #[inline]";
+ code_ += "#[inline]";
code_ +=
- " pub fn add_{{FIELD_NAME}}(&mut self, {{FIELD_NAME}}: "
+ "pub fn add_{{FIELD_NAME}}(&mut self, {{FIELD_NAME}}: "
"{{FIELD_TYPE}}) {";
if (is_scalar && !field.IsOptional()) {
code_ +=
- " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}}, "
+ " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}}, "
"{{BLDR_DEF_VAL}});";
} else {
- code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}});";
+ code_ += " {{FUNC_BODY}}({{FIELD_OFFSET}}, {{FIELD_NAME}});";
}
- code_ += " }";
+ code_ += "}";
});
// Struct initializer (all fields required);
@@ -1966,7 +2070,7 @@ class RustGenerator : public BaseGenerator {
ForAllTableFields(struct_def, [&](const FieldDef &field) {
if (!field.IsRequired()) return;
code_ +=
- " self.fbb_.required(o, {{STRUCT_NAME}}::{{OFFSET_NAME}},"
+ " self.fbb_.required(o, {{STRUCT_NAME}}::{{OFFSET_NAME}},"
"\"{{FIELD_NAME}}\");";
});
code_ += " flatbuffers::WIPOffset::new(o.value())";
@@ -1983,35 +2087,33 @@ class RustGenerator : public BaseGenerator {
if (GetFullType(field.value.type) == ftUnionValue) {
// Generate a match statement to handle unions properly.
code_.SetValue("KEY_TYPE", GenTableAccessorFuncReturnType(field, ""));
- code_.SetValue("FIELD_TYPE_FIELD_NAME", field.name);
code_.SetValue("UNION_ERR",
"&\"InvalidFlatbuffer: Union discriminant"
" does not match value.\"");
- code_ += " match self.{{FIELD_NAME}}_type() {";
+ code_ += " match self.{{DISCRIMINANT}}() {";
ForAllUnionVariantsBesidesNone(
*field.value.type.enum_def, [&](const EnumVal &unused) {
(void)unused;
- code_ += " {{U_ELEMENT_ENUM_TYPE}} => {";
+ code_ += " {{U_ELEMENT_ENUM_TYPE}} => {";
code_ +=
- " if let Some(x) = "
- "self.{{FIELD_TYPE_FIELD_NAME}}_as_"
+ " if let Some(x) = "
+ "self.{{FIELD_NAME}}_as_"
"{{U_ELEMENT_NAME}}() {";
- code_ += " ds.field(\"{{FIELD_NAME}}\", &x)";
- code_ += " } else {";
- code_ +=
- " ds.field(\"{{FIELD_NAME}}\", {{UNION_ERR}})";
- code_ += " }";
- code_ += " },";
+ code_ += " ds.field(\"{{FIELD_NAME}}\", &x)";
+ code_ += " } else {";
+ code_ += " ds.field(\"{{FIELD_NAME}}\", {{UNION_ERR}})";
+ code_ += " }";
+ code_ += " },";
});
- code_ += " _ => {";
- code_ += " let x: Option<()> = None;";
- code_ += " ds.field(\"{{FIELD_NAME}}\", &x)";
- code_ += " },";
- code_ += " };";
+ code_ += " _ => {";
+ code_ += " let x: Option<()> = None;";
+ code_ += " ds.field(\"{{FIELD_NAME}}\", &x)";
+ code_ += " },";
+ code_ += " };";
} else {
// Most fields.
- code_ += " ds.field(\"{{FIELD_NAME}}\", &self.{{FIELD_NAME}}());";
+ code_ += " ds.field(\"{{FIELD_NAME}}\", &self.{{FIELD_NAME}}());";
}
});
code_ += " ds.finish()";
@@ -2031,7 +2133,7 @@ class RustGenerator : public BaseGenerator {
// Union objects combine both the union discriminant and value, so we
// skip making a field for the discriminant.
if (field.value.type.base_type == BASE_TYPE_UTYPE) return;
- code_ += " pub {{FIELD_NAME}}: {{FIELD_OBJECT_TYPE}},";
+ code_ += "pub {{FIELD_NAME}}: {{FIELD_OBJECT_TYPE}},";
});
code_ += "}";
@@ -2041,7 +2143,7 @@ class RustGenerator : public BaseGenerator {
ForAllObjectTableFields(table, [&](const FieldDef &field) {
if (field.value.type.base_type == BASE_TYPE_UTYPE) return;
std::string default_value = GetDefaultValue(field, kObject);
- code_ += " {{FIELD_NAME}}: " + default_value + ",";
+ code_ += " {{FIELD_NAME}}: " + default_value + ",";
});
code_ += " }";
code_ += " }";
@@ -2065,7 +2167,7 @@ class RustGenerator : public BaseGenerator {
case ftBool:
case ftFloat:
case ftEnumKey: {
- code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}};";
+ code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}};";
return;
}
case ftUnionKey: return; // Generate union type with union value.
@@ -2073,9 +2175,9 @@ class RustGenerator : public BaseGenerator {
code_.SetValue("SNAKE_CASE_ENUM_NAME",
MakeSnakeCase(Name(*field.value.type.enum_def)));
code_ +=
- " let {{FIELD_NAME}}_type = "
+ " let {{FIELD_NAME}}_type = "
"self.{{FIELD_NAME}}.{{SNAKE_CASE_ENUM_NAME}}_type();";
- code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}.pack(_fbb);";
+ code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}.pack(_fbb);";
return;
}
// The rest of the types require special casing around optionalness
@@ -2088,14 +2190,13 @@ class RustGenerator : public BaseGenerator {
// Hold the struct in a variable so we can reference it.
if (field.IsRequired()) {
code_ +=
- " let {{FIELD_NAME}}_tmp = "
- "Some(self.{{FIELD_NAME}}.pack());";
+ " let {{FIELD_NAME}}_tmp = Some(self.{{FIELD_NAME}}.pack());";
} else {
code_ +=
- " let {{FIELD_NAME}}_tmp = self.{{FIELD_NAME}}"
+ " let {{FIELD_NAME}}_tmp = self.{{FIELD_NAME}}"
".as_ref().map(|x| x.pack());";
}
- code_ += " let {{FIELD_NAME}} = {{FIELD_NAME}}_tmp.as_ref();";
+ code_ += " let {{FIELD_NAME}} = {{FIELD_NAME}}_tmp.as_ref();";
return;
}
@@ -2149,7 +2250,7 @@ class RustGenerator : public BaseGenerator {
code_ += " {{STRUCT_NAME}}::create(_fbb, &{{STRUCT_NAME}}Args{";
ForAllObjectTableFields(table, [&](const FieldDef &field) {
(void)field; // Unused.
- code_ += " {{FIELD_NAME}},";
+ code_ += " {{FIELD_NAME}},";
});
code_ += " })";
code_ += " }";
@@ -2163,21 +2264,23 @@ class RustGenerator : public BaseGenerator {
if (field.deprecated) continue;
code_.SetValue("FIELD_NAME", Name(field));
code_.SetValue("FIELD_OBJECT_TYPE", ObjectFieldType(field, true));
+ code_.IncrementIdentLevel();
cb(field);
+ code_.DecrementIdentLevel();
}
}
void MapNativeTableField(const FieldDef &field, const std::string &expr) {
if (field.IsOptional()) {
- code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}.as_ref().map(|x|{";
- code_ += " " + expr;
- code_ += " });";
+ code_ += " let {{FIELD_NAME}} = self.{{FIELD_NAME}}.as_ref().map(|x|{";
+ code_ += " " + expr;
+ code_ += " });";
} else {
// For some reason Args has optional types for required fields.
// TODO(cneo): Fix this... but its a breaking change?
- code_ += " let {{FIELD_NAME}} = Some({";
- code_ += " let x = &self.{{FIELD_NAME}};";
- code_ += " " + expr;
- code_ += " });";
+ code_ += " let {{FIELD_NAME}} = Some({";
+ code_ += " let x = &self.{{FIELD_NAME}};";
+ code_ += " " + expr;
+ code_ += " });";
}
}
@@ -2187,21 +2290,22 @@ class RustGenerator : public BaseGenerator {
FLATBUFFERS_ASSERT(field.key);
code_.SetValue("KEY_TYPE", GenTableAccessorFuncReturnType(field, ""));
+ code_.SetValue("REF", IsString(field.value.type) ? "" : "&");
- code_ += " #[inline]";
+ code_ += "#[inline]";
code_ +=
- " pub fn key_compare_less_than(&self, o: &{{STRUCT_NAME}}) -> "
- " bool {";
- code_ += " self.{{FIELD_NAME}}() < o.{{FIELD_NAME}}()";
- code_ += " }";
+ "pub fn key_compare_less_than(&self, o: &{{STRUCT_NAME}}) -> "
+ "bool {";
+ code_ += " self.{{FIELD_NAME}}() < o.{{FIELD_NAME}}()";
+ code_ += "}";
code_ += "";
- code_ += " #[inline]";
+ code_ += "#[inline]";
code_ +=
- " pub fn key_compare_with_value(&self, val: {{KEY_TYPE}}) -> "
- " ::std::cmp::Ordering {";
- code_ += " let key = self.{{FIELD_NAME}}();";
- code_ += " key.cmp(&val)";
- code_ += " }";
+ "pub fn key_compare_with_value(&self, val: {{KEY_TYPE}}) -> "
+ "::std::cmp::Ordering {";
+ code_ += " let key = self.{{FIELD_NAME}}();";
+ code_ += " key.cmp({{REF}}val)";
+ code_ += "}";
}
// Generate functions for accessing the root table object. This function
@@ -2427,7 +2531,9 @@ class RustGenerator : public BaseGenerator {
code_.SetValue(
"REF",
IsStruct(field.value.type) || IsArray(field.value.type) ? "&" : "");
+ code_.IncrementIdentLevel();
cb(field);
+ code_.DecrementIdentLevel();
const size_t size = InlineSize(field.value.type);
offset_to_field += size + field.padding;
}
@@ -2466,7 +2572,7 @@ class RustGenerator : public BaseGenerator {
code_ += " f.debug_struct(\"{{STRUCT_NAME}}\")";
ForAllStructFields(struct_def, [&](const FieldDef &unused) {
(void)unused;
- code_ += " .field(\"{{FIELD_NAME}}\", &self.{{FIELD_NAME}}())";
+ code_ += " .field(\"{{FIELD_NAME}}\", &self.{{FIELD_NAME}}())";
});
code_ += " .finish()";
code_ += " }";
@@ -2537,13 +2643,13 @@ class RustGenerator : public BaseGenerator {
code_ += " pub fn new(";
ForAllStructFields(struct_def, [&](const FieldDef &unused) {
(void)unused;
- code_ += " {{FIELD_NAME}}: {{REF}}{{FIELD_TYPE}},";
+ code_ += " {{FIELD_NAME}}: {{REF}}{{FIELD_TYPE}},";
});
code_ += " ) -> Self {";
code_ += " let mut s = Self([0; {{STRUCT_SIZE}}]);";
ForAllStructFields(struct_def, [&](const FieldDef &unused) {
(void)unused;
- code_ += " s.set_{{FIELD_NAME}}({{REF}}{{FIELD_NAME}});";
+ code_ += " s.set_{{FIELD_NAME}}({{FIELD_NAME}});";
});
code_ += " s";
code_ += " }";
@@ -2555,12 +2661,12 @@ class RustGenerator : public BaseGenerator {
// Generate accessor methods for the struct.
ForAllStructFields(struct_def, [&](const FieldDef &field) {
- this->GenComment(field.doc_comment, " ");
+ this->GenComment(field.doc_comment);
// Getter.
if (IsStruct(field.value.type)) {
- code_ += " pub fn {{FIELD_NAME}}(&self) -> &{{FIELD_TYPE}} {";
+ code_ += "pub fn {{FIELD_NAME}}(&self) -> &{{FIELD_TYPE}} {";
code_ +=
- " unsafe {"
+ " unsafe {"
" &*(self.0[{{FIELD_OFFSET}}..].as_ptr() as *const"
" {{FIELD_TYPE}}) }";
} else if (IsArray(field.value.type)) {
@@ -2568,30 +2674,31 @@ class RustGenerator : public BaseGenerator {
NumToString(field.value.type.fixed_length));
code_.SetValue("ARRAY_ITEM", GetTypeGet(field.value.type.VectorType()));
code_ +=
- " pub fn {{FIELD_NAME}}(&'a self) -> "
+ "pub fn {{FIELD_NAME}}(&'a self) -> "
"flatbuffers::Array<'a, {{ARRAY_ITEM}}, {{ARRAY_SIZE}}> {";
- code_ += " flatbuffers::Array::follow(&self.0, {{FIELD_OFFSET}})";
+ code_ += " flatbuffers::Array::follow(&self.0, {{FIELD_OFFSET}})";
} else {
- code_ += " pub fn {{FIELD_NAME}}(&self) -> {{FIELD_TYPE}} {";
+ code_ += "pub fn {{FIELD_NAME}}(&self) -> {{FIELD_TYPE}} {";
code_ +=
- " let mut mem = core::mem::MaybeUninit::"
+ " let mut mem = core::mem::MaybeUninit::"
"<{{FIELD_TYPE}}>::uninit();";
- code_ += " unsafe {";
- code_ += " core::ptr::copy_nonoverlapping(";
- code_ += " self.0[{{FIELD_OFFSET}}..].as_ptr(),";
- code_ += " mem.as_mut_ptr() as *mut u8,";
- code_ += " core::mem::size_of::<{{FIELD_TYPE}}>(),";
- code_ += " );";
- code_ += " mem.assume_init()";
- code_ += " }.from_little_endian()";
- }
- code_ += " }\n";
+ code_ += " unsafe {";
+ code_ += " core::ptr::copy_nonoverlapping(";
+ code_ += " self.0[{{FIELD_OFFSET}}..].as_ptr(),";
+ code_ += " mem.as_mut_ptr() as *mut u8,";
+ code_ += " core::mem::size_of::<{{FIELD_TYPE}}>(),";
+ code_ += " );";
+ code_ += " mem.assume_init()";
+ code_ += " }.from_little_endian()";
+ }
+ code_ += "}\n";
// Setter.
if (IsStruct(field.value.type)) {
code_.SetValue("FIELD_SIZE", NumToString(InlineSize(field.value.type)));
- code_ += " pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
+ code_ += "#[allow(clippy::identity_op)]"; // If FIELD_OFFSET=0.
+ code_ += "pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
code_ +=
- " self.0[{{FIELD_OFFSET}}..{{FIELD_OFFSET}}+{{FIELD_SIZE}}]"
+ " self.0[{{FIELD_OFFSET}}..{{FIELD_OFFSET}} + {{FIELD_SIZE}}]"
".copy_from_slice(&x.0)";
} else if (IsArray(field.value.type)) {
if (GetFullType(field.value.type) == ftArrayOfBuiltin) {
@@ -2601,36 +2708,35 @@ class RustGenerator : public BaseGenerator {
"ARRAY_ITEM_SIZE",
NumToString(InlineSize(field.value.type.VectorType())));
code_ +=
- " pub fn set_{{FIELD_NAME}}(&mut self, items: &{{FIELD_TYPE}}) "
+ "pub fn set_{{FIELD_NAME}}(&mut self, items: &{{FIELD_TYPE}}) "
"{";
code_ +=
- " flatbuffers::emplace_scalar_array(&mut self.0, "
+ " flatbuffers::emplace_scalar_array(&mut self.0, "
"{{FIELD_OFFSET}}, items);";
} else {
code_.SetValue("FIELD_SIZE",
NumToString(InlineSize(field.value.type)));
- code_ +=
- " pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
- code_ += " unsafe {";
- code_ += " std::ptr::copy(";
- code_ += " x.as_ptr() as *const u8,";
- code_ += " self.0.as_mut_ptr().add({{FIELD_OFFSET}}),";
- code_ += " {{FIELD_SIZE}},";
- code_ += " );";
- code_ += " }";
+ code_ += "pub fn set_{{FIELD_NAME}}(&mut self, x: &{{FIELD_TYPE}}) {";
+ code_ += " unsafe {";
+ code_ += " std::ptr::copy(";
+ code_ += " x.as_ptr() as *const u8,";
+ code_ += " self.0.as_mut_ptr().add({{FIELD_OFFSET}}),";
+ code_ += " {{FIELD_SIZE}},";
+ code_ += " );";
+ code_ += " }";
}
} else {
- code_ += " pub fn set_{{FIELD_NAME}}(&mut self, x: {{FIELD_TYPE}}) {";
- code_ += " let x_le = x.to_little_endian();";
- code_ += " unsafe {";
- code_ += " core::ptr::copy_nonoverlapping(";
- code_ += " &x_le as *const {{FIELD_TYPE}} as *const u8,";
- code_ += " self.0[{{FIELD_OFFSET}}..].as_mut_ptr(),";
- code_ += " core::mem::size_of::<{{FIELD_TYPE}}>(),";
- code_ += " );";
- code_ += " }";
- }
- code_ += " }\n";
+ code_ += "pub fn set_{{FIELD_NAME}}(&mut self, x: {{FIELD_TYPE}}) {";
+ code_ += " let x_le = x.to_little_endian();";
+ code_ += " unsafe {";
+ code_ += " core::ptr::copy_nonoverlapping(";
+ code_ += " &x_le as *const {{FIELD_TYPE}} as *const u8,";
+ code_ += " self.0[{{FIELD_OFFSET}}..].as_mut_ptr(),";
+ code_ += " core::mem::size_of::<{{FIELD_TYPE}}>(),";
+ code_ += " );";
+ code_ += " }";
+ }
+ code_ += "}\n";
// Generate a comparison function for this field if it is a key.
if (field.key) { GenKeyFieldMethods(field); }
@@ -2645,15 +2751,15 @@ class RustGenerator : public BaseGenerator {
if (IsArray(field.value.type)) {
if (GetFullType(field.value.type) == ftArrayOfStruct) {
code_ +=
- " {{FIELD_NAME}}: { let {{FIELD_NAME}} = "
+ " {{FIELD_NAME}}: { let {{FIELD_NAME}} = "
"self.{{FIELD_NAME}}(); flatbuffers::array_init(|i| "
"{{FIELD_NAME}}.get(i).unpack()) },";
} else {
- code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}().into(),";
+ code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}().into(),";
}
} else {
std::string unpack = IsStruct(field.value.type) ? ".unpack()" : "";
- code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}()" + unpack + ",";
+ code_ += " {{FIELD_NAME}}: self.{{FIELD_NAME}}()" + unpack + ",";
}
});
code_ += " }";
@@ -2670,7 +2776,7 @@ class RustGenerator : public BaseGenerator {
code_ += "pub struct {{NATIVE_STRUCT_NAME}} {";
ForAllStructFields(struct_def, [&](const FieldDef &field) {
(void)field; // unused.
- code_ += " pub {{FIELD_NAME}}: {{FIELD_OBJECT_TYPE}},";
+ code_ += "pub {{FIELD_NAME}}: {{FIELD_OBJECT_TYPE}},";
});
code_ += "}";
// The `pack` method that turns the native struct into its Flatbuffers
@@ -2680,17 +2786,17 @@ class RustGenerator : public BaseGenerator {
code_ += " {{STRUCT_NAME}}::new(";
ForAllStructFields(struct_def, [&](const FieldDef &field) {
if (IsStruct(field.value.type)) {
- code_ += " &self.{{FIELD_NAME}}.pack(),";
+ code_ += " &self.{{FIELD_NAME}}.pack(),";
} else if (IsArray(field.value.type)) {
if (GetFullType(field.value.type) == ftArrayOfStruct) {
code_ +=
- " &flatbuffers::array_init(|i| "
+ " &flatbuffers::array_init(|i| "
"self.{{FIELD_NAME}}[i].pack()),";
} else {
- code_ += " &self.{{FIELD_NAME}},";
+ code_ += " &self.{{FIELD_NAME}},";
}
} else {
- code_ += " self.{{FIELD_NAME}},";
+ code_ += " self.{{FIELD_NAME}},";
}
});
code_ += " )";
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_swift.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_swift.cpp
index 762cd291f72..620e7a3a858 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_swift.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_swift.cpp
@@ -188,7 +188,8 @@ class SwiftGenerator : public BaseGenerator {
GenComment(struct_def.doc_comment);
code_.SetValue("STRUCTNAME", NameWrappedInNameSpace(struct_def));
code_ +=
- "{{ACCESS_TYPE}} struct {{STRUCTNAME}}: NativeStruct, Verifiable\\";
+ "{{ACCESS_TYPE}} struct {{STRUCTNAME}}: NativeStruct, Verifiable, "
+ "FlatbuffersInitializable\\";
if (parser_.opts.generate_object_based_api) code_ += ", NativeObject\\";
code_ += " {";
code_ += "";
@@ -229,6 +230,7 @@ class SwiftGenerator : public BaseGenerator {
constructor += name + ": " + type;
}
code_ += "";
+ BuildStructConstructor(struct_def);
BuildObjectConstructor(main_constructor, constructor);
BuildObjectConstructor(base_constructor, "");
@@ -262,6 +264,36 @@ class SwiftGenerator : public BaseGenerator {
code_ += "}";
Outdent();
code_ += "}\n";
+ if (parser_.opts.gen_json_coders) GenerateJSONEncodingAPIs(struct_def);
+ }
+
+ void BuildStructConstructor(const StructDef &struct_def) {
+ code_ += "{{ACCESS_TYPE}} init(_ bb: ByteBuffer, o: Int32) {";
+ Indent();
+ code_ += "let {{ACCESS}} = Struct(bb: bb, position: o)";
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ auto &field = **it;
+ if (field.deprecated) continue;
+ auto name = Name(field);
+ auto type = field.value.type;
+ code_.SetValue("VALUENAME", name);
+ code_.SetValue("VALUETYPE", GenType(type));
+ code_.SetValue("OFFSET", NumToString(field.value.offset));
+ if (IsScalar(type.base_type)) {
+ if (IsEnum(type))
+ code_.SetValue("VALUETYPE", GenTypeBasic(field.value.type, false));
+ code_ +=
+ "_{{VALUENAME}} = {{ACCESS}}.readBuffer(of: {{VALUETYPE}}.self, "
+ "at: {{OFFSET}})";
+ } else {
+ code_ +=
+ "_{{VALUENAME}} = {{VALUETYPE}}({{ACCESS}}.bb, o: "
+ "{{ACCESS}}.postion + {{OFFSET}})";
+ }
+ }
+ Outdent();
+ code_ += "}\n";
}
void GenMutableStructReader(const StructDef &struct_def) {
@@ -378,7 +410,6 @@ class SwiftGenerator : public BaseGenerator {
void GenTable(const StructDef &struct_def) {
auto is_private_access = struct_def.attributes.Lookup("private");
code_.SetValue("ACCESS_TYPE", is_private_access ? "internal" : "public");
-
GenObjectHeader(struct_def);
GenTableAccessors(struct_def);
GenTableReader(struct_def);
@@ -389,6 +420,7 @@ class SwiftGenerator : public BaseGenerator {
GenerateVerifier(struct_def);
Outdent();
code_ += "}\n";
+ if (parser_.opts.gen_json_coders) GenerateJSONEncodingAPIs(struct_def);
}
// Generates the reader for swift
@@ -846,6 +878,166 @@ class SwiftGenerator : public BaseGenerator {
}
}
+ void GenerateCodingKeys(const StructDef &struct_def) {
+ code_ += "enum CodingKeys: String, CodingKey {";
+ Indent();
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ auto &field = **it;
+ if (field.deprecated) continue;
+ auto name = Name(field);
+
+ code_.SetValue("RAWVALUENAME", field.name);
+ code_.SetValue("VALUENAME", name);
+ code_ += "case {{VALUENAME}} = \"{{RAWVALUENAME}}\"";
+ }
+ Outdent();
+ code_ += "}";
+ }
+
+ void GenerateEncoderUnionBody(const FieldDef &field) {
+ EnumDef &union_def = *field.value.type.enum_def;
+ auto is_vector = field.value.type.base_type == BASE_TYPE_VECTOR ||
+ field.value.type.base_type == BASE_TYPE_ARRAY;
+ if (field.value.type.base_type == BASE_TYPE_UTYPE ||
+ (is_vector &&
+ field.value.type.VectorType().base_type == BASE_TYPE_UTYPE))
+ return;
+ if (is_vector) {
+ code_ +=
+ "var enumsEncoder = container.nestedUnkeyedContainer(forKey: "
+ ".{{VALUENAME}}Type)";
+ code_ +=
+ "var contentEncoder = container.nestedUnkeyedContainer(forKey: "
+ ".{{VALUENAME}})";
+ code_ += "for index in 0..<{{VALUENAME}}Count {";
+ Indent();
+ code_ +=
+ "guard let type = {{VALUENAME}}Type(at: index) else { continue }";
+ code_ += "try enumsEncoder.encode(type)";
+ code_ += "switch type {";
+ for (auto it = union_def.Vals().begin(); it != union_def.Vals().end();
+ ++it) {
+ const auto &ev = **it;
+
+ auto name = Name(ev);
+ auto type = GenType(ev.union_type);
+ code_.SetValue("KEY", name);
+ code_.SetValue("VALUETYPE", type);
+ if (ev.union_type.base_type == BASE_TYPE_NONE) { continue; }
+ code_ += "case .{{KEY}}:";
+ Indent();
+ code_ += "let _v = {{VALUENAME}}(at: index, type: {{VALUETYPE}}.self)";
+ code_ += "try contentEncoder.encode(_v)";
+ Outdent();
+ }
+ code_ += "default: break;";
+ code_ += "}";
+ Outdent();
+ code_ += "}";
+ return;
+ }
+
+ code_ += "switch {{VALUENAME}}Type {";
+ for (auto it = union_def.Vals().begin(); it != union_def.Vals().end();
+ ++it) {
+ const auto &ev = **it;
+
+ auto name = Name(ev);
+ auto type = GenType(ev.union_type);
+ code_.SetValue("KEY", name);
+ code_.SetValue("VALUETYPE", type);
+ if (ev.union_type.base_type == BASE_TYPE_NONE) { continue; }
+ code_ += "case .{{KEY}}:";
+ Indent();
+ code_ += "let _v = {{VALUENAME}}(type: {{VALUETYPE}}.self)";
+ code_ += "try container.encodeIfPresent(_v, forKey: .{{VALUENAME}})";
+ Outdent();
+ }
+ code_ += "default: break;";
+ code_ += "}";
+ }
+
+ void GenerateEncoderBody(const StructDef &struct_def) {
+ code_ += "var container = encoder.container(keyedBy: CodingKeys.self)";
+ for (auto it = struct_def.fields.vec.begin();
+ it != struct_def.fields.vec.end(); ++it) {
+ auto &field = **it;
+ if (field.deprecated) continue;
+ auto name = Name(field);
+ auto type = field.value.type;
+
+ auto is_non_union_vector =
+ (field.value.type.base_type == BASE_TYPE_ARRAY ||
+ field.value.type.base_type == BASE_TYPE_VECTOR) &&
+ field.value.type.VectorType().base_type != BASE_TYPE_UTYPE;
+
+ code_.SetValue("RAWVALUENAME", field.name);
+ code_.SetValue("VALUENAME", name);
+ code_.SetValue("CONSTANT", field.value.constant);
+ bool should_indent = true;
+ if (is_non_union_vector) {
+ code_ += "if {{VALUENAME}}Count > 0 {";
+ } else if (IsEnum(type) && !field.IsOptional()) {
+ code_.SetValue("CONSTANT", GenEnumDefaultValue(field));
+ code_ += "if {{VALUENAME}} != {{CONSTANT}} {";
+ } else if (IsScalar(type.base_type) && !IsEnum(type) &&
+ !IsBool(type.base_type) && !field.IsOptional()) {
+ code_ += "if {{VALUENAME}} != {{CONSTANT}} {";
+ } else if (IsBool(type.base_type) && !field.IsOptional()) {
+ std::string default_value =
+ "0" == field.value.constant ? "false" : "true";
+ code_.SetValue("CONSTANT", default_value);
+ code_ += "if {{VALUENAME}} != {{CONSTANT}} {";
+ } else {
+ should_indent = false;
+ }
+ if (should_indent) Indent();
+
+ if (IsUnion(type) && !IsEnum(type)) {
+ GenerateEncoderUnionBody(field);
+ } else if (is_non_union_vector &&
+ (!IsScalar(type.VectorType().base_type) ||
+ IsEnum(type.VectorType()))) {
+ code_ +=
+ "var contentEncoder = container.nestedUnkeyedContainer(forKey: "
+ ".{{VALUENAME}})";
+ code_ += "for index in 0..<{{VALUENAME}}Count {";
+ Indent();
+ code_ += "guard let type = {{VALUENAME}}(at: index) else { continue }";
+ code_ += "try contentEncoder.encode(type)";
+ Outdent();
+ code_ += "}";
+ } else {
+ code_ +=
+ "try container.encodeIfPresent({{VALUENAME}}, forKey: "
+ ".{{VALUENAME}})";
+ }
+ if (should_indent) Outdent();
+
+ if (is_non_union_vector ||
+ (IsScalar(type.base_type) && !field.IsOptional())) {
+ code_ += "}";
+ }
+ }
+ }
+
+ void GenerateJSONEncodingAPIs(const StructDef &struct_def) {
+ code_ += "extension {{STRUCTNAME}}: Encodable {";
+ Indent();
+ code_ += "";
+ if (struct_def.fields.vec.empty() == false) GenerateCodingKeys(struct_def);
+
+ code_ += "public func encode(to encoder: Encoder) throws {";
+ Indent();
+ if (struct_def.fields.vec.empty() == false) GenerateEncoderBody(struct_def);
+ Outdent();
+ code_ += "}";
+ Outdent();
+ code_ += "}";
+ code_ += "";
+ }
+
void GenerateVerifier(const StructDef &struct_def) {
code_ +=
"public static func verify<T>(_ verifier: inout Verifier, at position: "
@@ -1002,6 +1194,8 @@ class SwiftGenerator : public BaseGenerator {
AddMinOrMaxEnumValue(Name(*enum_def.MinValue()), "min");
Outdent();
code_ += "}\n";
+ if (parser_.opts.gen_json_coders) EnumEncoder(enum_def);
+ code_ += "";
if (parser_.opts.generate_object_based_api && enum_def.is_union) {
code_ += "{{ACCESS_TYPE}} struct {{ENUM_NAME}}Union {";
Indent();
@@ -1026,6 +1220,27 @@ class SwiftGenerator : public BaseGenerator {
}
}
+ void EnumEncoder(const EnumDef &enum_def) {
+ code_ += "extension {{ENUM_NAME}}: Encodable {";
+ Indent();
+ code_ += "{{ACCESS_TYPE}} func encode(to encoder: Encoder) throws {";
+ Indent();
+ code_ += "var container = encoder.singleValueContainer()";
+ code_ += "switch self {";
+ for (auto it = enum_def.Vals().begin(); it != enum_def.Vals().end(); ++it) {
+ const auto &ev = **it;
+ auto name = Name(ev);
+ code_.SetValue("KEY", name);
+ code_.SetValue("RAWKEY", ev.name);
+ code_ += "case .{{KEY}}: try container.encode(\"{{RAWKEY}}\")";
+ }
+ code_ += "}";
+ Outdent();
+ code_ += "}";
+ Outdent();
+ code_ += "}";
+ }
+
// MARK: - Object API
void GenerateObjectAPIExtensionHeader(std::string name) {
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_text.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_text.cpp
index 903c41ecdb5..f32243c8353 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_text.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_text.cpp
@@ -264,12 +264,12 @@ struct JsonPrinter {
FLATBUFFERS_ASSERT(IsStruct(fd.value.type) || IsArray(fd.value.type));
val = reinterpret_cast<const Struct *>(table)->GetStruct<const void *>(
fd.value.offset);
- } else if (fd.flexbuffer) {
+ } else if (fd.flexbuffer && opts.json_nested_flexbuffers) {
auto vec = table->GetPointer<const Vector<uint8_t> *>(fd.value.offset);
auto root = flexbuffers::GetRoot(vec->data(), vec->size());
root.ToString(true, opts.strict_json, text);
return true;
- } else if (fd.nested_flatbuffer) {
+ } else if (fd.nested_flatbuffer && opts.json_nested_flatbuffers) {
auto vec = table->GetPointer<const Vector<uint8_t> *>(fd.value.offset);
auto root = GetRoot<Table>(vec->data());
return GenStruct(*fd.nested_flatbuffer, root, indent);
diff --git a/chromium/third_party/flatbuffers/src/src/idl_gen_ts.cpp b/chromium/third_party/flatbuffers/src/src/idl_gen_ts.cpp
index bd48d630d24..d4a6353228e 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_gen_ts.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_gen_ts.cpp
@@ -583,7 +583,8 @@ class TsGenerator : public BaseGenerator {
std::string fileName) {
ImportDefinition import;
import.name = import_name;
- import.import_statement = "import " + import_name + " from '" + fileName + "';";
+ import.import_statement =
+ "import " + import_name + " from '" + fileName + "';";
imports.insert(std::make_pair(import_name, import));
}
@@ -1602,8 +1603,6 @@ bool GenerateTS(const Parser &parser, const std::string &path,
std::string TSMakeRule(const Parser &parser, const std::string &path,
const std::string &file_name) {
- FLATBUFFERS_ASSERT(parser.opts.lang <= IDLOptions::kMAX);
-
std::string filebase =
flatbuffers::StripPath(flatbuffers::StripExtension(file_name));
ts::TsGenerator generator(parser, path, file_name);
diff --git a/chromium/third_party/flatbuffers/src/src/idl_parser.cpp b/chromium/third_party/flatbuffers/src/src/idl_parser.cpp
index 720e803237e..4e18e192ad6 100644
--- a/chromium/third_party/flatbuffers/src/src/idl_parser.cpp
+++ b/chromium/third_party/flatbuffers/src/src/idl_parser.cpp
@@ -20,6 +20,7 @@
#include <string>
#include <utility>
+#include "flatbuffers/base.h"
#include "flatbuffers/idl.h"
#include "flatbuffers/util.h"
@@ -812,8 +813,7 @@ CheckedError Parser::ParseField(StructDef &struct_def) {
"or in structs.");
if (IsString(type) || IsVector(type)) {
advanced_features_ |= reflection::DefaultVectorsAndStrings;
- if (field->value.constant != "0" && field->value.constant != "null" &&
- !SupportsDefaultVectorsAndStrings()) {
+ if (field->value.constant != "0" && !SupportsDefaultVectorsAndStrings()) {
return Error(
"Default values for strings and vectors are not supported in one "
"of the specified programming languages");
@@ -1627,7 +1627,7 @@ CheckedError Parser::ParseArray(Value &array) {
auto length = array.type.fixed_length;
uoffset_t count = 0;
auto err = ParseVectorDelimiters(count, [&](uoffset_t &) -> CheckedError {
- vector_emplace_back(&stack, Value());
+ stack.emplace_back(Value());
auto &val = stack.back();
val.type = type;
if (IsStruct(type)) {
@@ -2479,8 +2479,7 @@ bool Parser::SupportsDefaultVectorsAndStrings() const {
}
bool Parser::SupportsAdvancedUnionFeatures() const {
- return opts.lang_to_generate != 0 &&
- (opts.lang_to_generate &
+ return (opts.lang_to_generate &
~(IDLOptions::kCpp | IDLOptions::kTs | IDLOptions::kPhp |
IDLOptions::kJava | IDLOptions::kCSharp | IDLOptions::kKotlin |
IDLOptions::kBinary | IDLOptions::kSwift)) == 0;
@@ -3256,7 +3255,7 @@ CheckedError Parser::ParseRoot(const char *source, const char **include_paths,
for (auto val_it = enum_def.Vals().begin();
val_it != enum_def.Vals().end(); ++val_it) {
auto &val = **val_it;
- if (!SupportsAdvancedUnionFeatures() &&
+ if (!(opts.lang_to_generate != 0 && SupportsAdvancedUnionFeatures()) &&
(IsStruct(val.union_type) || IsString(val.union_type)))
return Error(
"only tables can be union elements in the generated language: " +
@@ -3320,7 +3319,7 @@ CheckedError Parser::DoParse(const char *source, const char **include_paths,
ECHECK(ParseProtoDecl());
} else if (IsIdent("native_include")) {
NEXT();
- vector_emplace_back(&native_included_files_, attribute_);
+ native_included_files_.emplace_back(attribute_);
EXPECT(kTokenStringConstant);
EXPECT(';');
} else if (IsIdent("include") || (opts.proto_mode && IsIdent("import"))) {
@@ -3406,9 +3405,9 @@ CheckedError Parser::DoParse(const char *source, const char **include_paths,
NEXT();
file_identifier_ = attribute_;
EXPECT(kTokenStringConstant);
- if (file_identifier_.length() != FlatBufferBuilder::kFileIdentifierLength)
+ if (file_identifier_.length() != flatbuffers::kFileIdentifierLength)
return Error("file_identifier must be exactly " +
- NumToString(FlatBufferBuilder::kFileIdentifierLength) +
+ NumToString(flatbuffers::kFileIdentifierLength) +
" characters");
EXPECT(';');
} else if (IsIdent("file_extension")) {
diff --git a/chromium/third_party/flatbuffers/src/src/reflection.cpp b/chromium/third_party/flatbuffers/src/src/reflection.cpp
index 2dedcb4f18d..0af8994386b 100644
--- a/chromium/third_party/flatbuffers/src/src/reflection.cpp
+++ b/chromium/third_party/flatbuffers/src/src/reflection.cpp
@@ -180,7 +180,7 @@ class ResizeContext {
std::vector<uint8_t> *flatbuf,
const reflection::Object *root_table = nullptr)
: schema_(schema),
- startptr_(vector_data(*flatbuf) + start),
+ startptr_(flatbuf->data() + start),
delta_(delta),
buf_(*flatbuf),
dag_check_(flatbuf->size() / sizeof(uoffset_t), false) {
@@ -188,8 +188,8 @@ class ResizeContext {
delta_ = (delta_ + mask) & ~mask;
if (!delta_) return; // We can't shrink by less than largest_scalar_t.
// Now change all the offsets by delta_.
- auto root = GetAnyRoot(vector_data(buf_));
- Straddle<uoffset_t, 1>(vector_data(buf_), root, vector_data(buf_));
+ auto root = GetAnyRoot(buf_.data());
+ Straddle<uoffset_t, 1>(buf_.data(), root, buf_.data());
ResizeTable(root_table ? *root_table : *schema.root_table(), root);
// We can now add or remove bytes at start.
if (delta_ > 0)
@@ -217,7 +217,7 @@ class ResizeContext {
// will straddle and which won't.
uint8_t &DagCheck(const void *offsetloc) {
auto dag_idx = reinterpret_cast<const uoffset_t *>(offsetloc) -
- reinterpret_cast<const uoffset_t *>(vector_data(buf_));
+ reinterpret_cast<const uoffset_t *>(buf_.data());
return dag_check_[dag_idx];
}
@@ -309,19 +309,19 @@ void SetString(const reflection::Schema &schema, const std::string &val,
const reflection::Object *root_table) {
auto delta = static_cast<int>(val.size()) - static_cast<int>(str->size());
auto str_start = static_cast<uoffset_t>(
- reinterpret_cast<const uint8_t *>(str) - vector_data(*flatbuf));
+ reinterpret_cast<const uint8_t *>(str) - flatbuf->data());
auto start = str_start + static_cast<uoffset_t>(sizeof(uoffset_t));
if (delta) {
// Clear the old string, since we don't want parts of it remaining.
- memset(vector_data(*flatbuf) + start, 0, str->size());
+ memset(flatbuf->data() + start, 0, str->size());
// Different size, we must expand (or contract).
ResizeContext(schema, start, delta, flatbuf, root_table);
// Set the new length.
- WriteScalar(vector_data(*flatbuf) + str_start,
+ WriteScalar(flatbuf->data() + str_start,
static_cast<uoffset_t>(val.size()));
}
// Copy new data. Safe because we created the right amount of space.
- memcpy(vector_data(*flatbuf) + start, val.c_str(), val.size() + 1);
+ memcpy(flatbuf->data() + start, val.c_str(), val.size() + 1);
}
uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
@@ -330,25 +330,26 @@ uint8_t *ResizeAnyVector(const reflection::Schema &schema, uoffset_t newsize,
const reflection::Object *root_table) {
auto delta_elem = static_cast<int>(newsize) - static_cast<int>(num_elems);
auto delta_bytes = delta_elem * static_cast<int>(elem_size);
- auto vec_start =
- reinterpret_cast<const uint8_t *>(vec) - vector_data(*flatbuf);
- auto start = static_cast<uoffset_t>(vec_start + sizeof(uoffset_t) +
- elem_size * num_elems);
+ auto vec_start = reinterpret_cast<const uint8_t *>(vec) - flatbuf->data();
+ auto start = static_cast<uoffset_t>(vec_start) +
+ static_cast<uoffset_t>(sizeof(uoffset_t)) +
+ elem_size * num_elems;
if (delta_bytes) {
if (delta_elem < 0) {
// Clear elements we're throwing away, since some might remain in the
// buffer.
auto size_clear = -delta_elem * elem_size;
- memset(vector_data(*flatbuf) + start - size_clear, 0, size_clear);
+ memset(flatbuf->data() + start - size_clear, 0, size_clear);
}
ResizeContext(schema, start, delta_bytes, flatbuf, root_table);
- WriteScalar(vector_data(*flatbuf) + vec_start, newsize); // Length field.
+ WriteScalar(flatbuf->data() + vec_start, newsize); // Length field.
// Set new elements to 0.. this can be overwritten by the caller.
if (delta_elem > 0) {
- memset(vector_data(*flatbuf) + start, 0, delta_elem * elem_size);
+ memset(flatbuf->data() + start, 0,
+ static_cast<size_t>(delta_elem) * elem_size);
}
}
- return vector_data(*flatbuf) + start;
+ return flatbuf->data() + start;
}
const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
@@ -363,7 +364,7 @@ const uint8_t *AddFlatBuffer(std::vector<uint8_t> &flatbuf,
// Insert the entire FlatBuffer minus the root pointer.
flatbuf.insert(flatbuf.end(), newbuf + sizeof(uoffset_t), newbuf + newlen);
auto root_offset = ReadScalar<uoffset_t>(newbuf) - sizeof(uoffset_t);
- return vector_data(flatbuf) + insertion_point + root_offset;
+ return flatbuf.data() + insertion_point + root_offset;
}
void CopyInline(FlatBufferBuilder &fbb, const reflection::Field &fielddef,
diff --git a/chromium/third_party/flatbuffers/src/src/util.cpp b/chromium/third_party/flatbuffers/src/src/util.cpp
index 1a2b5f091fc..7b6378f52dc 100644
--- a/chromium/third_party/flatbuffers/src/src/util.cpp
+++ b/chromium/third_party/flatbuffers/src/src/util.cpp
@@ -158,7 +158,7 @@ std::string ConCatPathFileName(const std::string &path,
const std::string &filename) {
std::string filepath = path;
if (filepath.length()) {
- char &filepath_last_character = string_back(filepath);
+ char &filepath_last_character = filepath.back();
if (filepath_last_character == kPathSeparatorWindows) {
filepath_last_character = kPathSeparator;
} else if (filepath_last_character != kPathSeparator) {
@@ -273,8 +273,7 @@ ClassicLocale ClassicLocale::instance_;
std::string RemoveStringQuotes(const std::string &s) {
auto ch = *s.c_str();
- return ((s.size() >= 2) && (ch == '\"' || ch == '\'') &&
- (ch == string_back(s)))
+ return ((s.size() >= 2) && (ch == '\"' || ch == '\'') && (ch == s.back()))
? s.substr(1, s.length() - 2)
: s;
}
diff --git a/chromium/third_party/flatbuffers/src/swift.swiftformat b/chromium/third_party/flatbuffers/src/swift.swiftformat
index 3f4c45d14fb..b198b9292af 100644
--- a/chromium/third_party/flatbuffers/src/swift.swiftformat
+++ b/chromium/third_party/flatbuffers/src/swift.swiftformat
@@ -17,10 +17,11 @@
--typeattributes prev-line # wrapAttributes
# rules
---rules todos,anyObjectProtocol,redundantParens,redundantReturn,redundantSelf,sortedImports,strongifiedSelf,trailingCommas,trailingSpace,wrapArguments,wrapMultilineStatementBraces,indent,wrapAttributes,void,fileHeader
+--rules wrap,todos,anyObjectProtocol,redundantParens,redundantReturn,redundantSelf,sortedImports,strongifiedSelf,trailingCommas,trailingSpace,wrapArguments,wrapMultilineStatementBraces,indent,wrapAttributes,void,fileHeader
--disable trailingclosures
--exclude **/*_generated.swift
+--exclude **/swift_code_*.swift
--exclude **/*.grpc.swift
--header "/*\n * Copyright {year} Google Inc. All rights reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */" \ No newline at end of file
diff --git a/chromium/third_party/flatbuffers/src/swift/Package.swift b/chromium/third_party/flatbuffers/src/swift/Package.swift
index 5d4c7cc2ac2..d2d2d5c6f07 100644
--- a/chromium/third_party/flatbuffers/src/swift/Package.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Package.swift
@@ -31,5 +31,6 @@ let package = Package(
targets: [
.target(
name: "FlatBuffers",
- dependencies: []),
+ dependencies: [],
+ exclude: ["Documentation.docc/Resources/code/swift"]),
])
diff --git a/chromium/third_party/flatbuffers/src/swift/Package@swift-5.5.swift b/chromium/third_party/flatbuffers/src/swift/Package@swift-5.5.swift
new file mode 100644
index 00000000000..3cfdcf6024d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Package@swift-5.5.swift
@@ -0,0 +1,36 @@
+// swift-tools-version:5.5
+/*
+ * Copyright 2020 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import PackageDescription
+
+let package = Package(
+ name: "FlatBuffers",
+ platforms: [
+ .iOS(.v11),
+ .macOS(.v10_14),
+ ],
+ products: [
+ .library(
+ name: "FlatBuffers",
+ targets: ["FlatBuffers"]),
+ ],
+ targets: [
+ .target(
+ name: "FlatBuffers",
+ dependencies: []),
+ ])
+
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/ByteBuffer.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/ByteBuffer.swift
index c658d46799a..f0ba5d09993 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/ByteBuffer.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/ByteBuffer.swift
@@ -164,7 +164,10 @@ public struct ByteBuffer {
/// Constructor that creates a Flatbuffer from unsafe memory region without copying
/// - Parameter assumingMemoryBound: The unsafe memory region
/// - Parameter capacity: The size of the given memory region
- public init(assumingMemoryBound memory: UnsafeMutableRawPointer, capacity: Int) {
+ public init(
+ assumingMemoryBound memory: UnsafeMutableRawPointer,
+ capacity: Int)
+ {
_storage = Storage(memory: memory, capacity: capacity, unowned: true)
_writerSize = capacity
}
@@ -244,7 +247,10 @@ public struct ByteBuffer {
@usableFromInline
mutating func push(string str: String, len: Int) {
ensureSpace(size: len)
- if str.utf8.withContiguousStorageIfAvailable({ self.push(bytes: $0, len: len) }) != nil {
+ if str.utf8
+ .withContiguousStorageIfAvailable({ self.push(bytes: $0, len: len) }) !=
+ nil
+ {
} else {
let utf8View = str.utf8
for c in utf8View.reversed() {
@@ -304,7 +310,9 @@ public struct ByteBuffer {
/// - Parameter size: size of the `VTable`
@inline(__always)
mutating func pop(_ size: Int) {
- assert((_writerSize &- size) > 0, "New size should NOT be a negative number")
+ assert(
+ (_writerSize &- size) > 0,
+ "New size should NOT be a negative number")
memset(_storage.memory.advanced(by: writerIndex), 0, _writerSize &- size)
_writerSize = size
}
@@ -341,7 +349,8 @@ public struct ByteBuffer {
assert(
index + count <= _storage.capacity,
"Reading out of bounds is illegal")
- let start = _storage.memory.advanced(by: index).assumingMemoryBound(to: T.self)
+ let start = _storage.memory.advanced(by: index)
+ .assumingMemoryBound(to: T.self)
let array = UnsafeBufferPointer(start: start, count: count)
return Array(array)
}
@@ -359,7 +368,8 @@ public struct ByteBuffer {
assert(
index + count <= _storage.capacity,
"Reading out of bounds is illegal")
- let start = _storage.memory.advanced(by: index).assumingMemoryBound(to: UInt8.self)
+ let start = _storage.memory.advanced(by: index)
+ .assumingMemoryBound(to: UInt8.self)
let bufprt = UnsafeBufferPointer(start: start, count: count)
return String(bytes: Array(bufprt), encoding: type)
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Constants.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Constants.swift
index 50a58178a6b..8e643fdf899 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Constants.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Constants.swift
@@ -21,7 +21,8 @@ import Foundation
#endif
/// A boolean to see if the system is littleEndian
-let isLitteEndian = CFByteOrderGetCurrent() == Int(CFByteOrderLittleEndian.rawValue)
+let isLitteEndian = CFByteOrderGetCurrent() ==
+ Int(CFByteOrderLittleEndian.rawValue)
/// Constant for the file id length
let FileIdLength = 4
/// Type aliases
@@ -30,7 +31,8 @@ public typealias UOffset = UInt32
public typealias SOffset = Int32
public typealias VOffset = UInt16
/// Maximum size for a buffer
-public let FlatBufferMaxSize = UInt32.max << ((MemoryLayout<SOffset>.size * 8 - 1) - 1)
+public let FlatBufferMaxSize = UInt32
+ .max << ((MemoryLayout<SOffset>.size * 8 - 1) - 1)
/// Protocol that All Scalars should conform to
///
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Documentation.md b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Documentation.md
new file mode 100644
index 00000000000..a1510808ebf
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Documentation.md
@@ -0,0 +1,22 @@
+# ``FlatBuffers``
+
+FlatBuffers: Memory Efficient Serialization Library
+
+## Overview
+
+- Access to serialized data without parsing/unpacking - What sets FlatBuffers apart is that it represents hierarchical data in a flat binary buffer in such a way that it can still be accessed directly without parsing/unpacking, while also still supporting data structure evolution (forwards/backwards compatibility).
+- Memory efficiency and speed - The only memory needed to access your data is that of the buffer. It requires 0 additional allocations (in C++, other languages may vary). FlatBuffers is also very suitable for use with mmap (or streaming), requiring only part of the buffer to be in memory. Access is close to the speed of raw struct access with only one extra indirection (a kind of vtable) to allow for format evolution and optional fields. It is aimed at projects where spending time and space (many memory allocations) to be able to access or construct serialized data is undesirable, such as in games or any other performance sensitive applications. See the benchmarks for details.
+- Flexible - Optional fields means not only do you get great forwards and backwards compatibility (increasingly important for long-lived games: don't have to update all data with each new version!). It also means you have a lot of choice in what data you write and what data you don't, and how you design data structures.
+- Tiny code footprint - Small amounts of generated code, and just a single small header as the minimum dependency, which is very easy to integrate. Again, see the benchmark section for details.
+- Strongly typed - Errors happen at compile time rather than manually having to write repetitive and error prone run-time checks. Useful code can be generated for you.
+
+## Topics
+
+### Read this first
+
+- <doc:Tutorial_Table_of_Contents>
+
+### Where to start
+
+- ``FlatBufferBuilder``
+- ``ByteBuffer``
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_1.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_1.fbs
new file mode 100644
index 00000000000..8b137891791
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_1.fbs
@@ -0,0 +1 @@
+
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_2.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_2.fbs
new file mode 100644
index 00000000000..a43897845ef
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_2.fbs
@@ -0,0 +1 @@
+enum Color:byte { red, green, blue }
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_3.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_3.fbs
new file mode 100644
index 00000000000..d31a29cd029
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_3.fbs
@@ -0,0 +1,6 @@
+enum Color:byte { red, green, blue }
+
+struct Vec3 {
+ x:float;
+ y:float;
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_4.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_4.fbs
new file mode 100644
index 00000000000..51f7bb1aa60
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_4.fbs
@@ -0,0 +1,12 @@
+enum Color:byte { red, green, blue }
+
+struct Vec3 {
+ x:float;
+ y:float;
+}
+
+table Monster {
+ pos:Vec3;
+ color:Color = Blue;
+}
+
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_5.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_5.fbs
new file mode 100644
index 00000000000..8d0b72956b0
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_5.fbs
@@ -0,0 +1,18 @@
+enum Color:byte { red, green, blue }
+
+struct Vec3 {
+ x:float;
+ y:float;
+}
+
+table Monster {
+ pos:Vec3;
+ color:Color = Blue;
+
+ mana:short = 150;
+ hp:short = 100;
+ name:string;
+ equipped:Equipment;
+ weapons:[Weapon];
+ path:[Vec3];
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_6.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_6.fbs
new file mode 100644
index 00000000000..10c3eaf67d8
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_6.fbs
@@ -0,0 +1,25 @@
+enum Color:byte { red, green, blue }
+
+union Equipment { Weapon } // Optionally add more tables.
+
+struct Vec3 {
+ x:float;
+ y:float;
+}
+
+table Monster {
+ pos:Vec3;
+ color:Color = Blue;
+
+ mana:short = 150;
+ hp:short = 100;
+ name:string;
+ equipped:Equipment;
+ weapons:[Weapon];
+ path:[Vec3];
+}
+
+table Weapon {
+ name:string;
+ damage:short;
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_7.fbs b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_7.fbs
new file mode 100644
index 00000000000..b4dde6ced60
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/fbs/monster_step_7.fbs
@@ -0,0 +1,27 @@
+enum Color:byte { red, green, blue }
+
+union Equipment { Weapon } // Optionally add more tables.
+
+struct Vec3 {
+ x:float;
+ y:float;
+}
+
+table Monster {
+ pos:Vec3;
+ color:Color = Blue;
+
+ mana:short = 150;
+ hp:short = 100;
+ name:string;
+ equipped:Equipment;
+ weapons:[Weapon];
+ path:[Vec3];
+}
+
+table Weapon {
+ name:string;
+ damage:short;
+}
+
+root_type Monster; // flatc --swift monster.fbs
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_1.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_1.swift
new file mode 100644
index 00000000000..fecc4ab4499
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_1.swift
@@ -0,0 +1 @@
+import Foundation
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_10.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_10.swift
new file mode 100644
index 00000000000..51d4fbfcdb6
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_10.swift
@@ -0,0 +1,71 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weaponsOffset = builder.createVector(ofOffsets: [sword, axe])
+
+ // Name of the Monster.
+ let name = builder.create(string: "Orc")
+
+ let pathOffset = fbb.createVector(ofStructs: [
+ Vec3(x: 0, y: 0),
+ Vec3(x: 5, y: 5),
+ ])
+
+ // startVector(len, elementSize: MemoryLayout<Offset>.size)
+ // for o in offsets.reversed() {
+ // push(element: o)
+ // }
+ // endVector(len: len)
+
+ let orc = Monster.createMonster(
+ &builder,
+ pos: Vec3(x: 1, y: 2),
+ hp: 300,
+ nameOffset: name,
+ color: .red,
+ weaponsVectorOffset: weaponsOffset,
+ equippedType: .weapon,
+ equippedOffset: axe,
+ pathOffset: pathOffset)
+
+ // let start = Monster.startMonster(&builder)
+ // Monster.add(pos: Vec3(x: 1, y: 2), &builder)
+ // Monster.add(hp: 300, &builder)
+ // Monster.add(name: name, &builder)
+ // Monster.add(color: .red, &builder)
+ // Monster.addVectorOf(weapons: weaponsOffset, &builder)
+ // Monster.add(equippedType: .weapon, &builder)
+ // Monster.addVectorOf(paths: weaponsOffset, &builder)
+ // Monster.add(equipped: axe, &builder)
+ // var orc = Monster.endMonster(&builder, start: start)
+
+ // Call `finish(offset:)` to instruct the builder that this monster is complete.
+ builder.finish(offset: orc)
+ // This must be called after `finish()`.
+ // `sizedByteArray` returns the finished buf of type [UInt8].
+ let buf = builder.sizedByteArray
+
+ // or you can use to get an object of type Data
+ let bufData = ByteBuffer(data: builder.sizedBuffer)
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_11.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_11.swift
new file mode 100644
index 00000000000..3ed7ea2425d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_11.swift
@@ -0,0 +1,11 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a ByteBuffer(:) from an [UInt8] or Data()
+ let buf = [] // Get your data
+
+ // Get an accessor to the root object inside the buffer.
+ let monster: Monster = try! getCheckedRoot(byteBuffer: ByteBuffer(bytes: buf))
+ // let monster: Monster = getRoot(byteBuffer: ByteBuffer(bytes: buf))
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_12.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_12.swift
new file mode 100644
index 00000000000..895653ebf3d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_12.swift
@@ -0,0 +1,19 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a ByteBuffer(:) from an [UInt8] or Data()
+ let buf = [] // Get your data
+
+ // Get an accessor to the root object inside the buffer.
+ let monster: Monster = try! getCheckedRoot(byteBuffer: ByteBuffer(bytes: buf))
+ // let monster: Monster = getRoot(byteBuffer: ByteBuffer(bytes: buf))
+
+ let hp = monster.hp
+ let mana = monster.mana
+ let name = monster.name // returns an optional string
+
+ let pos = monster.pos
+ let x = pos.x
+ let y = pos.y
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_13.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_13.swift
new file mode 100644
index 00000000000..7aac982cf81
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_13.swift
@@ -0,0 +1,26 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a ByteBuffer(:) from an [UInt8] or Data()
+ let buf = [] // Get your data
+
+ // Get an accessor to the root object inside the buffer.
+ let monster: Monster = try! getCheckedRoot(byteBuffer: ByteBuffer(bytes: buf))
+ // let monster: Monster = getRoot(byteBuffer: ByteBuffer(bytes: buf))
+
+ let hp = monster.hp
+ let mana = monster.mana
+ let name = monster.name // returns an optional string
+
+ let pos = monster.pos
+ let x = pos.x
+ let y = pos.y
+
+ // Get and check if the monster has an equipped item
+ if monster.equippedType == .weapon {
+ let _weapon = monster.equipped(type: Weapon.self)
+ let name = _weapon.name // should return "Axe"
+ let dmg = _weapon.damage // should return 5
+ }
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_2.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_2.swift
new file mode 100644
index 00000000000..ddd066e7a82
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_2.swift
@@ -0,0 +1,2 @@
+import FlatBuffers
+import Foundation
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_3.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_3.swift
new file mode 100644
index 00000000000..bacdaa55133
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_3.swift
@@ -0,0 +1,7 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_4.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_4.swift
new file mode 100644
index 00000000000..87546993bf5
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_4.swift
@@ -0,0 +1,10 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_5.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_5.swift
new file mode 100644
index 00000000000..12e0d4ca660
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_5.swift
@@ -0,0 +1,22 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_6.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_6.swift
new file mode 100644
index 00000000000..bfb4f7e5157
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_6.swift
@@ -0,0 +1,26 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weaponsOffset = builder.createVector(ofOffsets: [sword, axe])
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_7.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_7.swift
new file mode 100644
index 00000000000..97264b018da
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_7.swift
@@ -0,0 +1,29 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weaponsOffset = builder.createVector(ofOffsets: [sword, axe])
+
+ // Name of the Monster.
+ let name = builder.create(string: "Orc")
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_8.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_8.swift
new file mode 100644
index 00000000000..a0c2819809f
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_8.swift
@@ -0,0 +1,40 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weaponsOffset = builder.createVector(ofOffsets: [sword, axe])
+
+ // Name of the Monster.
+ let name = builder.create(string: "Orc")
+
+ let pathOffset = fbb.createVector(ofStructs: [
+ Vec3(x: 0, y: 0),
+ Vec3(x: 5, y: 5),
+ ])
+
+ // startVector(len, elementSize: MemoryLayout<Offset>.size)
+ // for o in offsets.reversed() {
+ // push(element: o)
+ // }
+ // endVector(len: len)
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_9.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_9.swift
new file mode 100644
index 00000000000..51ce8fd2c2b
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/code/swift/swift_code_9.swift
@@ -0,0 +1,62 @@
+import FlatBuffers
+import Foundation
+
+func run() {
+ // create a `FlatBufferBuilder`, which will be used to serialize objects
+ let builder = FlatBufferBuilder(initialSize: 1024)
+
+ let weapon1Name = builder.create(string: "Sword")
+ let weapon2Name = builder.create(string: "Axe")
+
+ // start creating the weapon by calling startWeapon
+ let weapon1Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon1Name, &builder)
+ Weapon.add(damage: 3, &builder)
+ // end the object by passing the start point for the weapon 1
+ let sword = Weapon.endWeapon(&builder, start: weapon1Start)
+
+ let weapon2Start = Weapon.startWeapon(&builder)
+ Weapon.add(name: weapon2Name, &builder)
+ Weapon.add(damage: 5, &builder)
+ let axe = Weapon.endWeapon(&builder, start: weapon2Start)
+
+ // Create a FlatBuffer `vector` that contains offsets to the sword and axe
+ // we created above.
+ let weaponsOffset = builder.createVector(ofOffsets: [sword, axe])
+
+ // Name of the Monster.
+ let name = builder.create(string: "Orc")
+
+ let pathOffset = fbb.createVector(ofStructs: [
+ Vec3(x: 0, y: 0),
+ Vec3(x: 5, y: 5),
+ ])
+
+ // startVector(len, elementSize: MemoryLayout<Offset>.size)
+ // for o in offsets.reversed() {
+ // push(element: o)
+ // }
+ // endVector(len: len)
+
+ let orc = Monster.createMonster(
+ &builder,
+ pos: Vec3(x: 1, y: 2),
+ hp: 300,
+ nameOffset: name,
+ color: .red,
+ weaponsVectorOffset: weaponsOffset,
+ equippedType: .weapon,
+ equippedOffset: axe,
+ pathOffset: pathOffset)
+
+ // let start = Monster.startMonster(&builder)
+ // Monster.add(pos: Vec3(x: 1, y: 2), &builder)
+ // Monster.add(hp: 300, &builder)
+ // Monster.add(name: name, &builder)
+ // Monster.add(color: .red, &builder)
+ // Monster.addVectorOf(weapons: weaponsOffset, &builder)
+ // Monster.add(equippedType: .weapon, &builder)
+ // Monster.addVectorOf(paths: weaponsOffset, &builder)
+ // Monster.add(equipped: axe, &builder)
+ // var orc = Monster.endMonster(&builder, start: start)
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/images/tutorial_cover_image_1.png b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/images/tutorial_cover_image_1.png
new file mode 100644
index 00000000000..0e64fe60148
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Resources/images/tutorial_cover_image_1.png
Binary files differ
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/Tutorial_Table_of_Contents.tutorial b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/Tutorial_Table_of_Contents.tutorial
new file mode 100644
index 00000000000..009116fc4c8
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/Tutorial_Table_of_Contents.tutorial
@@ -0,0 +1,14 @@
+@Tutorials(name: "Starting with FlatBuffers") {
+ @Intro(title: "Starting with FlatBuffers") {
+ FlatBuffers is an efficient cross platform serialization library for C++,
+ C#, C, Go, Java, Kotlin, JavaScript, Lobster, Lua, TypeScript, PHP, Python, Rust and Swift.
+ It was originally created at Google for game development and other performance-critical applications.
+ }
+ @Chapter(name: "Generating your code") {
+ Start by generating your first FlatBuffers objects.
+ @Image(source: tutorial_cover_image_1.png, alt: "A code structure for a base struct in flatbuffers")
+ @TutorialReference(tutorial: "doc:creating_flatbuffer_schema")
+ @TutorialReference(tutorial: "doc:create_your_first_buffer")
+ @TutorialReference(tutorial: "doc:reading_bytebuffer")
+ }
+}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/create_your_first_buffer.tutorial b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/create_your_first_buffer.tutorial
new file mode 100644
index 00000000000..2f8089f7d3d
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/create_your_first_buffer.tutorial
@@ -0,0 +1,72 @@
+@Tutorial(time: 5) {
+ @Intro(title: "After having our code generated") {
+ After generating the code from the previous section, we will know start creating our monster object.
+ We will create a monster object called orc.
+ }
+
+ @Section(title: "Building your first buffer") {
+ @ContentAndMedia {}
+ @Steps {
+ @Step {
+ Starting with a new file, we will create our very first Flatbuffer.
+ @Code(name: "ViewController.swift", file: "swift_code_1.swift")
+ }
+ @Step {
+ First, we need to import ``FlatBuffers``
+ @Code(name: "ViewController.swift", file: "swift_code_2.swift")
+ }
+ @Step {
+ We need to create an instance of the `FlatBufferBuilder`, which will contain the buffer as it grows.
+ You can pass an initial size of the buffer (here 1024 bytes), which will grow automatically if needed.
+ @Code(name: "ViewController.swift", file: "swift_code_3.swift")
+ }
+ @Step {
+ After creating the builder, we can start serializing our data. Before we make our orc Monster,
+ let's create some Weapons: a Sword and an Axe. However we will start by naming our weapons as `Sword` and `Axe`
+ @Code(name: "ViewController.swift", file: "swift_code_4.swift")
+ }
+ @Step {
+ After naming the weapons, we will create two weapon objects with the damage that the weapon is going to deal.
+ That's done by calling the `start` Method on each table you will be creating, in this case its called `startWeapon`
+ and finished by calling `end`.
+ @Code(name: "ViewController.swift", file: "swift_code_5.swift")
+ }
+ @Step {
+ We will take our (Sword and Axe) serialized data and serialize their offsets as a vector of tables into our `ByteBuffer`.
+ So we can reference them later on from our Monster Object
+ @Code(name: "ViewController.swift", file: "swift_code_6.swift")
+ }
+ @Step {
+ We will add our Monster name as a string value just like we did with the weapons.
+ @Code(name: "ViewController.swift", file: "swift_code_7.swift")
+ }
+
+ @Step {
+ We will create a path that our monster should be using while roaming in its den. To create a vector of paths we would us
+ `createVector(ofStructs: [])` which will take a Native `Swift` struct that has been padded to fit the `FlatBuffers` standards.
+
+ There are usually two ways of creating vectors in `FlatBuffers` which you can see in commented out code.
+ And thus there are multiple convenience methods that will cover all the bases
+ when trying to create a vector so that you dont have to create it with `start` and `end`
+ @Code(name: "ViewController.swift", file: "swift_code_8.swift")
+ }
+
+ @Step {
+ Now to serialize our data into our `Monster` object. Which again there are two ways of doing, by calling the `create` method or
+ by serializing the objects yourself. What we added to our Monster were the `Equipped Type` and the `Equipped` union itself, which
+ allows the Monster to have the `Axe` as his equipped weapon.
+
+ Important: Unlike structs, you should not nest tables or other objects,
+ which is why we created all the `strings/vectors/tables` that this monster refers to before start.
+ If you try to create any of them between start and end, you will get an `assert`.
+ @Code(name: "ViewController.swift", file: "swift_code_9.swift")
+ }
+
+ @Step {
+ Finally you can just finalize the buffer by calling `builder.finish` and get the Byte array from the buffer.
+ @Code(name: "ViewController.swift", file: "swift_code_10.swift")
+ }
+
+ }
+ }
+ }
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/creating_flatbuffer_schema.tutorial b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/creating_flatbuffer_schema.tutorial
new file mode 100644
index 00000000000..0fcd362ef9e
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/creating_flatbuffer_schema.tutorial
@@ -0,0 +1,47 @@
+@Tutorial(time: 2) {
+ @Intro(title: "Creating a schema") {
+ You will need to have the FlatBuffer compiler to be installed on your device
+ }
+
+ @Section(title: "Creating a schema") {
+ @ContentAndMedia {}
+ @Steps {
+ @Step {
+ Start by creating a new empty folder called `monster.fbs`. We want to create a Monster table, that contains
+ position, color, and basic information about the monster.
+ @Code(name: "monster.fbs", file: "monster_step_1.fbs")
+ }
+ @Step {
+ We will start by adding our Color object. We will be using an enumerate, to represent this object
+ @Code(name: "monster.fbs", file: "monster_step_2.fbs")
+ }
+ @Step {
+ We will add a position object and will use a struct to represent that type of data. Where we will need the monsters
+ x and y positions.
+ @Code(name: "monster.fbs", file: "monster_step_3.fbs")
+ }
+ @Step {
+ Then we will be creating our Monster object of type table. This will contain the current position of our
+ monster and its color
+ @Code(name: "monster.fbs", file: "monster_step_4.fbs")
+ }
+ @Step {
+ Our Monster is missing a name, mana, hp, name, equipped Weapon, weapons, and path. We will be adding these
+ fields to our table with a proper data type for each. Example; weapons, and path would be a vector of data.
+ @Code(name: "monster.fbs", file: "monster_step_5.fbs")
+ }
+ @Step {
+ Now we are missing two data types here, `Weapon` and `Equipment`. And since Equipment can be a weapon, we will be using
+ a `Union` enumerate that can contain all the equipment that you would want your monster to have. And the weapon can simply
+ have a name and amount of damage
+ @Code(name: "monster.fbs", file: "monster_step_6.fbs")
+ }
+ @Step {
+ And to finalize our monster table, we can add a root type of type Monster.
+ Then run the command `flatc --swift monster.fbs`
+ Note: Make sure to import the file to your xcode project.
+ @Code(name: "monster.fbs", file: "monster_step_7.fbs")
+ }
+ }
+ }
+ }
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/reading_bytebuffer.tutorial b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/reading_bytebuffer.tutorial
new file mode 100644
index 00000000000..2c4609f7e3c
--- /dev/null
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Documentation.docc/Tutorials/reading_bytebuffer.tutorial
@@ -0,0 +1,27 @@
+@Tutorial(time: 2) {
+ @Intro(title: "Reading ByteBuffers") {
+ After getting our ByteBuffer created, we can now read it.
+ }
+
+ @Section(title: "Reading your first buffer") {
+ @ContentAndMedia {}
+ @Steps {
+ @Step {
+ After fetching the data from disk or network you need to access that data, and that can be done.
+ By simply calling `getCheckedRoot`, which checks if the data is valid before enabling you to read from a corrupt buffer.
+ however, if you are sure that the data is 100% correct you can simply call `getRoot`
+ @Code(name: "ViewController.swift", file: "swift_code_11.swift")
+ }
+ @Step {
+ Now since we have a Monster object, all the fields can be accessed by simply fetching the data. Note, Deprecated fields will not
+ show up
+ @Code(name: "ViewController.swift", file: "swift_code_12.swift")
+ }
+ @Step {
+ And you can access union types as easy as this
+ @Code(name: "ViewController.swift", file: "swift_code_13.swift")
+ }
+ }
+ }
+ }
+
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Enum.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Enum.swift
index f8cbebb1917..efb698b1e37 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Enum.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Enum.swift
@@ -17,7 +17,8 @@
import Foundation
/// Enum is a protocol that all flatbuffers enums should conform to
-/// Since it allows us to get the actual `ByteSize` and `Value`
+/// Since it allows us to get the actual `ByteSize` and `Value` from
+/// a swift enum.
public protocol Enum {
/// associatedtype that the type of the enum should conform to
associatedtype T: Scalar & Verifiable
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferBuilder.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferBuilder.swift
index 71b71399232..f9ae83b1dc5 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferBuilder.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferBuilder.swift
@@ -16,9 +16,16 @@
import Foundation
-/// `FlatBufferBuilder` builds a `FlatBuffer` through manipulating its internal state.
-/// This is done by creating a `ByteBuffer` that hosts the incoming data and
-/// has a hardcoded growth limit of `2GiB` which is set by the Flatbuffers standards
+/// ``FlatBufferBuilder`` builds a `FlatBuffer` through manipulating its internal state.
+///
+/// This is done by creating a ``ByteBuffer`` that hosts the incoming data and
+/// has a hardcoded growth limit of `2GiB` which is set by the Flatbuffers standards.
+///
+/// ```swift
+/// var builder = FlatBufferBuilder()
+/// ```
+/// The builder should be always created as a variable, since it would be passed into the writers
+///
@frozen
public struct FlatBufferBuilder {
@@ -47,21 +54,30 @@ public struct FlatBufferBuilder {
/// Gives a read access to the buffer's size
public var size: UOffset { _bb.size }
+
/// Data representation of the buffer
+ ///
+ /// Should only be used after ``finish(offset:addPrefix:)`` is called
public var data: Data {
assert(finished, "Data shouldn't be called before finish()")
return Data(
bytes: _bb.memory.advanced(by: _bb.writerIndex),
count: _bb.capacity &- _bb.writerIndex)
}
- /// Get's the fully sized buffer stored in memory
+
+ /// Returns the underlying bytes in the ``ByteBuffer``
+ ///
+ /// Note: This should be used with caution.
public var fullSizedByteArray: [UInt8] {
let ptr = UnsafeBufferPointer(
start: _bb.memory.assumingMemoryBound(to: UInt8.self),
count: _bb.capacity)
return Array(ptr)
}
- /// Returns the written size of the buffer
+
+ /// Returns the written bytes into the ``ByteBuffer``
+ ///
+ /// Should only be used after ``finish(offset:addPrefix:)`` is called
public var sizedByteArray: [UInt8] {
assert(finished, "Data shouldn't be called before finish()")
let cp = _bb.capacity &- _bb.writerIndex
@@ -71,10 +87,17 @@ public struct FlatBufferBuilder {
let ptr = UnsafeBufferPointer(start: start, count: cp)
return Array(ptr)
}
- /// Returns the buffer
+
+ /// Returns the original ``ByteBuffer``
+ ///
+ /// Returns the current buffer that was just created
+ /// with the offsets, and data written to it.
public var buffer: ByteBuffer { _bb }
- /// Returns A sized Buffer from the readable bytes
+ /// Returns a newly created sized ``ByteBuffer``
+ ///
+ /// returns a new buffer that is sized to the data written
+ /// to the main buffer
public var sizedBuffer: ByteBuffer {
assert(finished, "Data shouldn't be called before finish()")
return ByteBuffer(
@@ -84,20 +107,28 @@ public struct FlatBufferBuilder {
// MARK: - Init
- /// initialize the buffer with a size
+ /// Initialize the buffer with a size
/// - Parameters:
/// - initialSize: Initial size for the buffer
/// - force: Allows default to be serialized into the buffer
- public init(initialSize: Int32 = 1024, serializeDefaults force: Bool = false) {
+ ///
+ /// This initializes a new builder with an initialSize that would initialize
+ /// a new ``ByteBuffer``. ``FlatBufferBuilder`` by default doesnt serialize defaults
+ /// however the builder can be force by passing true for `serializeDefaults`
+ public init(
+ initialSize: Int32 = 1024,
+ serializeDefaults force: Bool = false)
+ {
assert(initialSize > 0, "Size should be greater than zero!")
guard isLitteEndian else {
- fatalError("Reading/Writing a buffer in big endian machine is not supported on swift")
+ fatalError(
+ "Reading/Writing a buffer in big endian machine is not supported on swift")
}
serializeDefaults = force
_bb = ByteBuffer(initialSize: Int(initialSize))
}
- /// Clears the buffer and the builder from it's data
+ /// Clears the builder and the buffer from the written data.
mutating public func clear() {
_minAlignment = 0
isNested = false
@@ -113,6 +144,9 @@ public struct FlatBufferBuilder {
/// - Parameters:
/// - table: offset for the table
/// - fields: Array of all the important fields to be serialized
+ ///
+ /// *NOTE: Never call this function, this is only supposed to be called
+ /// by the generated code*
mutating public func require(table: Offset, fields: [Int32]) {
for field in fields {
let start = _bb.capacity &- Int(table.o)
@@ -129,6 +163,23 @@ public struct FlatBufferBuilder {
/// - offset: Offset of the table
/// - fileId: Takes the fileId
/// - prefix: if false it wont add the size of the buffer
+ ///
+ /// ``finish(offset:fileId:addPrefix:)`` should be called at the end of creating
+ /// a table
+ /// ```swift
+ /// var root = SomeObject
+ /// .createObject(&builder,
+ /// name: nameOffset)
+ /// builder.finish(
+ /// offset: root,
+ /// fileId: "ax1a",
+ /// addPrefix: true)
+ /// ```
+ /// File id would append a file id name at the end of the written bytes before,
+ /// finishing the buffer.
+ ///
+ /// Whereas, if `addPrefix` is true, the written bytes would
+ /// include the size of the current buffer.
mutating public func finish(
offset: Offset,
fileId: String,
@@ -147,6 +198,19 @@ public struct FlatBufferBuilder {
/// - Parameters:
/// - offset: Offset of the table
/// - prefix: if false it wont add the size of the buffer
+ ///
+ /// ``finish(offset:addPrefix:)`` should be called at the end of creating
+ /// a table
+ /// ```swift
+ /// var root = SomeObject
+ /// .createObject(&builder,
+ /// name: nameOffset)
+ /// builder.finish(
+ /// offset: root,
+ /// addPrefix: true)
+ /// ```
+ /// If `addPrefix` is true, the written bytes would
+ /// include the size of the current buffer.
mutating public func finish(
offset: Offset,
addPrefix prefix: Bool = false)
@@ -160,10 +224,15 @@ public struct FlatBufferBuilder {
finished = true
}
- /// starttable will let the builder know, that a new object is being serialized.
+ /// ``startTable(with:)`` will let the builder know, that a new object is being serialized.
///
- /// The function will fatalerror if called while there is another object being serialized
+ /// The function will fatalerror if called while there is another object being serialized.
+ /// ```swift
+ /// let start = Monster
+ /// .startMonster(&fbb)
+ /// ```
/// - Parameter numOfFields: Number of elements to be written to the buffer
+ /// - Returns: Offset of the newly started table
mutating public func startTable(with numOfFields: Int) -> UOffset {
notNested()
isNested = true
@@ -171,11 +240,14 @@ public struct FlatBufferBuilder {
return _bb.size
}
- /// Endtable will let the builder know that the object that's written to it is completed
+ /// ``endTable(at:)`` will let the ``FlatBufferBuilder`` know that the
+ /// object that's written to it is completed
+ ///
+ /// This would be called after all the elements are serialized,
+ /// it will add the current vtable into the ``ByteBuffer``.
+ /// The functions will `fatalError` in case the object is called
+ /// without ``startTable(with:)``, or the object has exceeded the limit of 2GB.
///
- /// This would be called after all the elements are serialized, it will add the vtable into the buffer.
- /// it will fatalError in case the object is called without starttable, or the object has exceeded the limit of
- /// 2GB,
/// - Parameter startOffset:Start point of the object written
/// - returns: The root of the table
mutating public func endTable(at startOffset: UOffset) -> UOffset {
@@ -239,7 +311,7 @@ public struct FlatBufferBuilder {
// MARK: - Builds Buffer
- /// asserts to see if the object is not nested
+ /// Asserts to see if the object is not nested
@usableFromInline
mutating internal func notNested() {
assert(!isNested, "Object serialization must not be nested")
@@ -259,7 +331,10 @@ public struct FlatBufferBuilder {
/// - bufSize: Current size of the buffer + the offset of the object to be written
/// - elementSize: Element size
@inline(__always)
- mutating internal func padding(bufSize: UInt32, elementSize: UInt32) -> UInt32 {
+ mutating internal func padding(
+ bufSize: UInt32,
+ elementSize: UInt32) -> UInt32
+ {
((~bufSize) &+ 1) & (elementSize - 1)
}
@@ -304,7 +379,18 @@ public struct FlatBufferBuilder {
// MARK: - Inserting Vectors
- /// Starts a vector of length and Element size
+ /// ``startVector(_:elementSize:)`` creates a new vector within buffer
+ ///
+ /// The function checks if there is a current object being written, if
+ /// the check passes it creates a buffer alignment of `length * elementSize`
+ /// ```swift
+ /// builder.startVector(
+ /// int32Values.count, elementSize: 4)
+ /// ```
+ ///
+ /// - Parameters:
+ /// - len: Length of vector to be created
+ /// - elementSize: Size of object type to be written
mutating public func startVector(_ len: Int, elementSize: Int) {
notNested()
isNested = true
@@ -312,46 +398,102 @@ public struct FlatBufferBuilder {
preAlign(len: len &* elementSize, alignment: elementSize)
}
- /// Ends the vector of at length
+ /// ``endVector(len:)`` ends the currently created vector
+ ///
+ /// Calling ``endVector(len:)`` requires the length, of the current
+ /// vector. The length would be pushed to indicate the count of numbers
+ /// within the vector. If ``endVector(len:)`` is called without
+ /// ``startVector(_:elementSize:)`` it asserts.
+ ///
+ /// ```swift
+ /// let vectorOffset = builder.
+ /// endVector(len: int32Values.count)
+ /// ```
///
- /// The current function will fatalError if startVector is called before serializing the vector
/// - Parameter len: Length of the buffer
+ /// - Returns: Returns the current ``Offset`` in the ``ByteBuffer``
mutating public func endVector(len: Int) -> Offset {
assert(isNested, "Calling endVector without calling startVector")
isNested = false
return Offset(offset: push(element: Int32(len)))
}
- /// Creates a vector of type Scalar in the buffer
+ /// Creates a vector of type ``Scalar`` into the ``ByteBuffer``
+ ///
+ /// ``createVector(_:)-4swl0`` writes a vector of type Scalars into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``
+ /// ```swift
+ /// let vectorOffset = builder.
+ /// createVector([1, 2, 3, 4])
+ /// ```
+ ///
+ /// The underlying implementation simply calls ``createVector(_:size:)-4lhrv``
+ ///
/// - Parameter elements: elements to be written into the buffer
- /// - returns: Offset of the vector
+ /// - returns: ``Offset`` of the vector
mutating public func createVector<T: Scalar>(_ elements: [T]) -> Offset {
createVector(elements, size: elements.count)
}
/// Creates a vector of type Scalar in the buffer
+ ///
+ /// ``createVector(_:)-4swl0`` writes a vector of type Scalars into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``
+ /// ```swift
+ /// let vectorOffset = builder.
+ /// createVector([1, 2, 3, 4], size: 4)
+ /// ```
+ ///
/// - Parameter elements: Elements to be written into the buffer
/// - Parameter size: Count of elements
- /// - returns: Offset of the vector
- mutating public func createVector<T: Scalar>(_ elements: [T], size: Int) -> Offset {
+ /// - returns: ``Offset`` of the vector
+ mutating public func createVector<T: Scalar>(
+ _ elements: [T],
+ size: Int) -> Offset
+ {
let size = size
startVector(size, elementSize: MemoryLayout<T>.size)
_bb.push(elements: elements)
return endVector(len: size)
}
- /// Creates a vector of type Enums in the buffer
+ /// Creates a vector of type ``Enum`` into the ``ByteBuffer``
+ ///
+ /// ``createVector(_:)-9h189`` writes a vector of type ``Enum`` into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``
+ /// ```swift
+ /// let vectorOffset = builder.
+ /// createVector([.swift, .cpp])
+ /// ```
+ ///
+ /// The underlying implementation simply calls ``createVector(_:size:)-7cx6z``
+ ///
/// - Parameter elements: elements to be written into the buffer
- /// - returns: Offset of the vector
+ /// - returns: ``Offset`` of the vector
mutating public func createVector<T: Enum>(_ elements: [T]) -> Offset {
createVector(elements, size: elements.count)
}
- /// Creates a vector of type Enums in the buffer
+ /// Creates a vector of type ``Enum`` into the ``ByteBuffer``
+ ///
+ /// ``createVector(_:)-9h189`` writes a vector of type ``Enum`` into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``
+ /// ```swift
+ /// let vectorOffset = builder.
+ /// createVector([.swift, .cpp])
+ /// ```
+ ///
/// - Parameter elements: Elements to be written into the buffer
/// - Parameter size: Count of elements
- /// - returns: Offset of the vector
- mutating public func createVector<T: Enum>(_ elements: [T], size: Int) -> Offset {
+ /// - returns: ``Offset`` of the vector
+ mutating public func createVector<T: Enum>(
+ _ elements: [T],
+ size: Int) -> Offset
+ {
let size = size
startVector(size, elementSize: T.byteSize)
for e in elements.reversed() {
@@ -360,18 +502,42 @@ public struct FlatBufferBuilder {
return endVector(len: size)
}
- /// Creates a vector of type Offsets in the buffer
- /// - Parameter offsets:Array of offsets of type T
- /// - returns: Offset of the vector
+ /// Creates a vector of already written offsets
+ ///
+ /// ``createVector(ofOffsets:)`` creates a vector of ``Offset`` into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``.
+ ///
+ /// The underlying implementation simply calls ``createVector(ofOffsets:len:)``
+ ///
+ /// ```swift
+ /// let namesOffsets = builder.
+ /// createVector(ofOffsets: [name1, name2])
+ /// ```
+ /// - Parameter offsets: Array of offsets of type ``Offset``
+ /// - returns: ``Offset`` of the vector
mutating public func createVector(ofOffsets offsets: [Offset]) -> Offset {
createVector(ofOffsets: offsets, len: offsets.count)
}
- /// Creates a vector of type Offsets in the buffer
- /// - Parameter elements: Array of offsets of type T
+ /// Creates a vector of already written offsets
+ ///
+ /// ``createVector(ofOffsets:)`` creates a vector of ``Offset`` into
+ /// ``ByteBuffer``. This is a convenient method instead of calling,
+ /// ``startVector(_:elementSize:)`` and then ``endVector(len:)``
+ ///
+ /// ```swift
+ /// let namesOffsets = builder.
+ /// createVector(ofOffsets: [name1, name2])
+ /// ```
+ ///
+ /// - Parameter offsets: Array of offsets of type ``Offset``
/// - Parameter size: Count of elements
- /// - returns: Offset of the vector
- mutating public func createVector(ofOffsets offsets: [Offset], len: Int) -> Offset {
+ /// - returns: ``Offset`` of the vector
+ mutating public func createVector(
+ ofOffsets offsets: [Offset],
+ len: Int) -> Offset
+ {
startVector(len, elementSize: MemoryLayout<Offset>.size)
for o in offsets.reversed() {
push(element: o)
@@ -379,9 +545,21 @@ public struct FlatBufferBuilder {
return endVector(len: len)
}
- /// Creates a vector of Strings
- /// - Parameter str: a vector of strings that will be written into the buffer
- /// - returns: Offset of the vector
+ /// Creates a vector of strings
+ ///
+ /// ``createVector(ofStrings:)`` creates a vector of `String` into
+ /// ``ByteBuffer``. This is a convenient method instead of manually
+ /// creating the string offsets, you simply pass it to this function
+ /// and it would write the strings into the ``ByteBuffer``.
+ /// After that it calls ``createVector(ofOffsets:)``
+ ///
+ /// ```swift
+ /// let namesOffsets = builder.
+ /// createVector(ofStrings: ["Name", "surname"])
+ /// ```
+ ///
+ /// - Parameter str: Array of string
+ /// - returns: ``Offset`` of the vector
mutating public func createVector(ofStrings str: [String]) -> Offset {
var offsets: [Offset] = []
for s in str {
@@ -390,10 +568,22 @@ public struct FlatBufferBuilder {
return createVector(ofOffsets: offsets)
}
- /// Creates a vector of `Native swift structs` which were padded to flatbuffers standards
- /// - Parameter structs: A vector of structs
- /// - Returns: offset of the vector
- mutating public func createVector<T: NativeStruct>(ofStructs structs: [T]) -> Offset {
+ /// Creates a vector of type ``NativeStruct``.
+ ///
+ /// Any swift struct in the generated code, should confirm to
+ /// ``NativeStruct``. Since the generated swift structs are padded
+ /// to the `FlatBuffers` standards.
+ ///
+ /// ```swift
+ /// let offsets = builder.
+ /// createVector(ofStructs: [NativeStr(num: 1), NativeStr(num: 2)])
+ /// ```
+ ///
+ /// - Parameter structs: A vector of ``NativeStruct``
+ /// - Returns: ``Offset`` of the vector
+ mutating public func createVector<T: NativeStruct>(ofStructs structs: [T])
+ -> Offset
+ {
startVector(
structs.count * MemoryLayout<T>.size,
elementSize: MemoryLayout<T>.alignment)
@@ -405,11 +595,21 @@ public struct FlatBufferBuilder {
// MARK: - Inserting Structs
- /// Fills the buffer with a native struct that's build and padded according to flatbuffers standards
+ /// Writes a ``NativeStruct`` into the ``ByteBuffer``
+ ///
+ /// Adds a native struct that's build and padded according
+ /// to `FlatBuffers` standards. with a predefined position.
+ ///
+ /// ```swift
+ /// let offset = builder.create(
+ /// struct: NativeStr(num: 1),
+ /// position: 10)
+ /// ```
+ ///
/// - Parameters:
- /// - s: `Native swift` struct to insert
+ /// - s: ``NativeStruct`` to be inserted into the ``ByteBuffer``
/// - position: The predefined position of the object
- /// - Returns: offset of written struct
+ /// - Returns: ``Offset`` of written struct
@discardableResult
mutating public func create<T: NativeStruct>(
struct s: T, position: VOffset) -> Offset
@@ -421,10 +621,20 @@ public struct FlatBufferBuilder {
return offset
}
- /// Fills the buffer with a native struct that's build and padded according to flatbuffers standards
+ /// Writes a ``NativeStruct`` into the ``ByteBuffer``
+ ///
+ /// Adds a native struct that's build and padded according
+ /// to `FlatBuffers` standards, directly into the buffer without
+ /// a predefined position.
+ ///
+ /// ```swift
+ /// let offset = builder.create(
+ /// struct: NativeStr(num: 1))
+ /// ```
+ ///
/// - Parameters:
- /// - s: `Native swift` struct to insert
- /// - Returns: offset of written struct
+ /// - s: ``NativeStruct`` to be inserted into the ``ByteBuffer``
+ /// - Returns: ``Offset`` of written struct
@discardableResult
mutating public func create<T: NativeStruct>(
struct s: T) -> Offset
@@ -437,9 +647,18 @@ public struct FlatBufferBuilder {
// MARK: - Inserting Strings
- /// Insets a string into the buffer using UTF8
+ /// Insets a string into the buffer of type `UTF8`
+ ///
+ /// Adds a swift string into ``ByteBuffer`` by encoding it
+ /// using `UTF8`
+ ///
+ /// ```swift
+ /// let nameOffset = builder
+ /// .create(string: "welcome")
+ /// ```
+ ///
/// - Parameter str: String to be serialized
- /// - returns: The strings offset in the buffer
+ /// - returns: ``Offset`` of inserted string
mutating public func create(string str: String?) -> Offset {
guard let str = str else { return Offset() }
let len = str.utf8.count
@@ -451,11 +670,25 @@ public struct FlatBufferBuilder {
return Offset(offset: _bb.size)
}
- /// Inserts a shared string to the buffer
+ /// Insets a shared string into the buffer of type `UTF8`
+ ///
+ /// Adds a swift string into ``ByteBuffer`` by encoding it
+ /// using `UTF8`. The function will check if the string,
+ /// is already written to the ``ByteBuffer``
+ ///
+ /// ```swift
+ /// let nameOffset = builder
+ /// .createShared(string: "welcome")
+ ///
+ ///
+ /// let secondOffset = builder
+ /// .createShared(string: "welcome")
+ ///
+ /// assert(nameOffset.o == secondOffset.o)
+ /// ```
///
- /// The function checks the stringOffsetmap if it's seen a similar string before
/// - Parameter str: String to be serialized
- /// - returns: The strings offset in the buffer
+ /// - returns: ``Offset`` of inserted string
mutating public func createShared(string str: String?) -> Offset {
guard let str = str else { return Offset() }
if let offset = stringOffsetMap[str] {
@@ -468,18 +701,22 @@ public struct FlatBufferBuilder {
// MARK: - Inseting offsets
- /// Adds the offset of an object into the buffer
+ /// Writes the ``Offset`` of an already written table
+ ///
+ /// Writes the ``Offset`` of a table if not empty into the
+ /// ``ByteBuffer``
+ ///
/// - Parameters:
- /// - offset: Offset of another object to be written
- /// - position: The predefined position of the object
+ /// - offset: ``Offset`` of another object to be written
+ /// - position: The predefined position of the object
mutating public func add(offset: Offset, at position: VOffset) {
if offset.isEmpty { return }
add(element: refer(to: offset.o), def: 0, at: position)
}
- /// Pushes a value of type offset into the buffer
- /// - Parameter o: Offset
- /// - returns: Position of the offset
+ /// Pushes a value of type ``Offset`` into the ``ByteBuffer``
+ /// - Parameter o: ``Offset``
+ /// - returns: Current position of the ``Offset``
@discardableResult
mutating public func push(element o: Offset) -> UOffset {
push(element: refer(to: o.o))
@@ -487,18 +724,42 @@ public struct FlatBufferBuilder {
// MARK: - Inserting Scalars to Buffer
- /// Adds a value into the buffer of type Scalar
+ /// Writes a ``Scalar`` value into ``ByteBuffer``
+ ///
+ /// ``add(element:def:at:)`` takes in a default value, and current value
+ /// and the position within the `VTable`. The default value would not
+ /// be serialized if the value is the same as the current value or
+ /// `serializeDefaults` is equal to false.
+ ///
+ /// If serializing defaults is important ``init(initialSize:serializeDefaults:)``,
+ /// passing true for `serializeDefaults` would do the job.
+ ///
+ /// ```swift
+ /// // Adds 10 to the buffer
+ /// builder.add(element: Int(10), def: 1, position 12)
+ /// ```
+ ///
+ /// *NOTE: Never call this manually*
///
/// - Parameters:
/// - element: Element to insert
/// - def: Default value for that element
/// - position: The predefined position of the element
- mutating public func add<T: Scalar>(element: T, def: T, at position: VOffset) {
+ mutating public func add<T: Scalar>(
+ element: T,
+ def: T,
+ at position: VOffset)
+ {
if element == def && !serializeDefaults { return }
track(offset: push(element: element), at: position)
}
- /// Adds a value into the buffer of type optional Scalar
+ /// Writes a optional ``Scalar`` value into ``ByteBuffer``
+ ///
+ /// Takes an optional value to be written into the ``ByteBuffer``
+ ///
+ /// *NOTE: Never call this manually*
+ ///
/// - Parameters:
/// - element: Optional element of type scalar
/// - position: The predefined position of the element
@@ -507,7 +768,10 @@ public struct FlatBufferBuilder {
track(offset: push(element: element), at: position)
}
- /// Pushes the values into the buffer
+ /// Pushes a values of type ``Scalar`` into the ``ByteBuffer``
+ ///
+ /// *NOTE: Never call this manually*
+ ///
/// - Parameter element: Element to insert
/// - returns: Postion of the Element
@discardableResult
@@ -556,7 +820,9 @@ extension FlatBufferBuilder: CustomDebugStringConvertible {
/// Creates the memory to store the buffer in
@usableFromInline
init() {
- memory = UnsafeMutableRawBufferPointer.allocate(byteCount: 0, alignment: 0)
+ memory = UnsafeMutableRawBufferPointer.allocate(
+ byteCount: 0,
+ alignment: 0)
}
deinit {
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferObject.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferObject.swift
index cea3911eb32..df8ad8dbcfc 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferObject.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBufferObject.swift
@@ -23,6 +23,8 @@ public protocol NativeStruct {}
/// FlatbuffersInitializable is a protocol that allows any object to be
/// Initialized from a ByteBuffer
public protocol FlatbuffersInitializable {
+ /// Any flatbuffers object that confirms to this protocol is going to be
+ /// initializable through this initializer
init(_ bb: ByteBuffer, o: Int32)
}
@@ -31,26 +33,32 @@ public protocol FlatBufferObject: FlatbuffersInitializable {
var __buffer: ByteBuffer! { get }
}
-/// `ObjectAPIPacker` is a protocol that allows object to pack and unpack from a
-/// `NativeObject` to a flatbuffers Object and vice versa.
+/// ``ObjectAPIPacker`` is a protocol that allows object to pack and unpack from a
+/// ``NativeObject`` to a flatbuffers Object and vice versa.
public protocol ObjectAPIPacker {
/// associatedtype to the object that should be unpacked.
associatedtype T
- /// `pack` tries packs the variables of a native Object into the `ByteBuffer` by using
- /// the FlatBufferBuilder
+ /// ``pack(_:obj:)-3ptws`` tries to pacs the variables of a native Object into the `ByteBuffer` by using
+ /// a FlatBufferBuilder
/// - Parameters:
/// - builder: FlatBufferBuilder that will host incoming data
/// - obj: Object of associatedtype to the current implementer
+ ///
+ /// ``pack(_:obj:)-3ptws`` can be called by passing through an already initialized ``FlatBufferBuilder``
+ /// or it can be called by using the public API that will create a new ``FlatBufferBuilder``
static func pack(_ builder: inout FlatBufferBuilder, obj: inout T?) -> Offset
- /// `pack` packs the variables of a native Object into the `ByteBuffer` by using
+ /// ``pack(_:obj:)-20ipk`` packs the variables of a native Object into the `ByteBuffer` by using
/// the FlatBufferBuilder
/// - Parameters:
/// - builder: FlatBufferBuilder that will host incoming data
/// - obj: Object of associatedtype to the current implementer
+ ///
+ /// ``pack(_:obj:)-20ipk`` can be called by passing through an already initialized ``FlatBufferBuilder``
+ /// or it can be called by using the public API that will create a new ``FlatBufferBuilder``
static func pack(_ builder: inout FlatBufferBuilder, obj: inout T) -> Offset
- /// `Unpack` unpacks a flatbuffers object into a `NativeObject`
+ /// ``unpack()`` unpacks a ``FlatBuffers`` object into a Native swift object.
mutating func unpack() -> T
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBuffersUtils.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBuffersUtils.swift
index 2ab68fd2615..dc5f78595b0 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBuffersUtils.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatBuffersUtils.swift
@@ -29,6 +29,8 @@ public enum FlatBuffersUtils {
/// creates a new buffer use `readPrefixedSizeCheckedRoot` instead
/// unless a completely new buffer is required
/// - Parameter bb: Flatbuffer object
+ ///
+ ///
public static func removeSizePrefix(bb: ByteBuffer) -> ByteBuffer {
bb.duplicate(removing: MemoryLayout<Int32>.size)
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatbuffersErrors.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatbuffersErrors.swift
index 97188e9e828..74c06b9aef3 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatbuffersErrors.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/FlatbuffersErrors.swift
@@ -53,7 +53,10 @@ public enum FlatbuffersErrors: Error, Equatable {
fieldName: String)
case apparentSizeTooLarge
- public static func == (lhs: FlatbuffersErrors, rhs: FlatbuffersErrors) -> Bool {
+ public static func == (
+ lhs: FlatbuffersErrors,
+ rhs: FlatbuffersErrors) -> Bool
+ {
lhs.localizedDescription == rhs.localizedDescription
}
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Message.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Message.swift
index 76b7a622b28..eb0bad972b4 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Message.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Message.swift
@@ -38,10 +38,12 @@ public struct Message<T: FlatBufferObject>: FlatBufferGRPCMessage {
public var object: T {
T.init(
buffer,
- o: Int32(buffer.read(def: UOffset.self, position: buffer.reader)) + Int32(buffer.reader))
+ o: Int32(buffer.read(def: UOffset.self, position: buffer.reader)) +
+ Int32(buffer.reader))
}
- public var rawPointer: UnsafeMutableRawPointer { buffer.memory.advanced(by: buffer.reader) }
+ public var rawPointer: UnsafeMutableRawPointer {
+ buffer.memory.advanced(by: buffer.reader) }
public var size: Int { Int(buffer.size) }
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Mutable.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Mutable.swift
index 60f0f12237a..f77945cf0b6 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Mutable.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Mutable.swift
@@ -16,17 +16,17 @@
import Foundation
-/// Mutable is a protocol that allows us to mutate Scalar values within the buffer
+/// Mutable is a protocol that allows us to mutate Scalar values within a ``ByteBuffer``
public protocol Mutable {
/// makes Flatbuffer accessed within the Protocol
var bb: ByteBuffer { get }
- /// makes position of the table/struct accessed within the Protocol
+ /// makes position of the ``Table``/``struct`` accessed within the Protocol
var postion: Int32 { get }
}
extension Mutable {
- /// Mutates the memory in the buffer, this is only called from the access function of table and structs
+ /// Mutates the memory in the buffer, this is only called from the access function of ``Table`` and ``struct``
/// - Parameters:
/// - value: New value to be inserted to the buffer
/// - index: index of the Element
@@ -39,7 +39,7 @@ extension Mutable {
extension Mutable where Self == Table {
- /// Mutates a value by calling mutate with respect to the position in the table
+ /// Mutates a value by calling mutate with respect to the position in a ``Table``
/// - Parameters:
/// - value: New value to be inserted to the buffer
/// - index: index of the Element
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/NativeObject.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/NativeObject.swift
index 724c2ebafd5..bc896e637c6 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/NativeObject.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/NativeObject.swift
@@ -26,7 +26,9 @@ extension NativeObject {
/// Serialize is a helper function that serailizes the data from the Object API to a bytebuffer directly th
/// - Parameter type: Type of the Flatbuffer object
/// - Returns: returns the encoded sized ByteBuffer
- public func serialize<T: ObjectAPIPacker>(type: T.Type) -> ByteBuffer where T.T == Self {
+ public func serialize<T: ObjectAPIPacker>(type: T.Type) -> ByteBuffer
+ where T.T == Self
+ {
var builder = FlatBufferBuilder(initialSize: 1024)
return serialize(builder: &builder, type: type.self)
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Root.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Root.swift
index 8891cafb318..4d883b7b8c1 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Root.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Root.swift
@@ -23,6 +23,9 @@ import Foundation
/// - options: Verifier options
/// - Throws: FlatbuffersErrors
/// - Returns: Returns a valid, checked Flatbuffers object
+///
+/// ``getPrefixedSizeCheckedRoot(byteBuffer:options:)`` would skip the first Bytes in
+/// the ``ByteBuffer`` and verifies the buffer by calling ``getCheckedRoot(byteBuffer:options:)``
public func getPrefixedSizeCheckedRoot<T: FlatBufferObject & Verifiable>(
byteBuffer: inout ByteBuffer,
options: VerifierOptions = .init()) throws -> T
@@ -35,7 +38,12 @@ public func getPrefixedSizeCheckedRoot<T: FlatBufferObject & Verifiable>(
/// Returns a `NON-Checked` flatbuffers object
/// - Parameter byteBuffer: Buffer that contains data
/// - Returns: Returns a Flatbuffers object
-public func getPrefixedSizeRoot<T: FlatBufferObject>(byteBuffer: inout ByteBuffer) -> T {
+///
+/// ``getPrefixedSizeCheckedRoot(byteBuffer:options:)`` would skip the first Bytes in
+/// the ``ByteBuffer`` and then calls ``getRoot(byteBuffer:)``
+public func getPrefixedSizeRoot<T: FlatBufferObject>(byteBuffer: inout ByteBuffer)
+ -> T
+{
byteBuffer.skipPrefix()
return getRoot(byteBuffer: &byteBuffer)
@@ -47,6 +55,10 @@ public func getPrefixedSizeRoot<T: FlatBufferObject>(byteBuffer: inout ByteBuffe
/// - options: Verifier options
/// - Throws: FlatbuffersErrors
/// - Returns: Returns a valid, checked Flatbuffers object
+///
+/// ``getCheckedRoot(byteBuffer:options:)`` Takes in a ``ByteBuffer`` and verifies
+/// that by creating a ``Verifier`` and checkes if all the `Bytes` and correctly aligned
+/// and within the ``ByteBuffer`` range.
public func getCheckedRoot<T: FlatBufferObject & Verifiable>(
byteBuffer: inout ByteBuffer,
options: VerifierOptions = .init()) throws -> T
@@ -55,7 +67,8 @@ public func getCheckedRoot<T: FlatBufferObject & Verifiable>(
try ForwardOffset<T>.verify(&verifier, at: 0, of: T.self)
return T.init(
byteBuffer,
- o: Int32(byteBuffer.read(def: UOffset.self, position: byteBuffer.reader)) + Int32(byteBuffer.reader))
+ o: Int32(byteBuffer.read(def: UOffset.self, position: byteBuffer.reader)) +
+ Int32(byteBuffer.reader))
}
/// Returns a `NON-Checked` flatbuffers object
@@ -64,5 +77,6 @@ public func getCheckedRoot<T: FlatBufferObject & Verifiable>(
public func getRoot<T: FlatBufferObject>(byteBuffer: inout ByteBuffer) -> T {
T.init(
byteBuffer,
- o: Int32(byteBuffer.read(def: UOffset.self, position: byteBuffer.reader)) + Int32(byteBuffer.reader))
+ o: Int32(byteBuffer.read(def: UOffset.self, position: byteBuffer.reader)) +
+ Int32(byteBuffer.reader))
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/String+extension.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/String+extension.swift
index 5a16a81b66e..2f3168d2214 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/String+extension.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/String+extension.swift
@@ -47,7 +47,9 @@ extension String: Verifiable {
if !verifier._options._ignoreMissingNullTerminators && !isNullTerminated {
let str = verifier._buffer.readString(at: range.start, count: range.count)
- throw FlatbuffersErrors.missingNullTerminator(position: position, str: str)
+ throw FlatbuffersErrors.missingNullTerminator(
+ position: position,
+ str: str)
}
}
}
@@ -69,12 +71,18 @@ extension String: FlatbuffersInitializable {
extension String: ObjectAPIPacker {
- public static func pack(_ builder: inout FlatBufferBuilder, obj: inout String?) -> Offset {
+ public static func pack(
+ _ builder: inout FlatBufferBuilder,
+ obj: inout String?) -> Offset
+ {
guard var obj = obj else { return Offset() }
return pack(&builder, obj: &obj)
}
- public static func pack(_ builder: inout FlatBufferBuilder, obj: inout String) -> Offset {
+ public static func pack(
+ _ builder: inout FlatBufferBuilder,
+ obj: inout String) -> Offset
+ {
builder.create(string: obj)
}
@@ -86,7 +94,9 @@ extension String: ObjectAPIPacker {
extension String: NativeObject {
- public func serialize<T: ObjectAPIPacker>(type: T.Type) -> ByteBuffer where T.T == Self {
+ public func serialize<T: ObjectAPIPacker>(type: T.Type) -> ByteBuffer
+ where T.T == Self
+ {
fatalError("serialize should never be called from string directly")
}
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Table.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Table.swift
index 34efedd5f4d..ff501fc9b9f 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Table.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Table.swift
@@ -34,7 +34,8 @@ public struct Table {
/// - Note: This will `CRASH` if read on a big endian machine
public init(bb: ByteBuffer, position: Int32 = 0) {
guard isLitteEndian else {
- fatalError("Reading/Writing a buffer in big endian machine is not supported on swift")
+ fatalError(
+ "Reading/Writing a buffer in big endian machine is not supported on swift")
}
self.bb = bb
postion = position
@@ -46,9 +47,10 @@ public struct Table {
/// - Returns: offset of field within buffer
public func offset(_ o: Int32) -> Int32 {
let vtable = postion - bb.read(def: Int32.self, position: Int(postion))
- return o < bb.read(def: VOffset.self, position: Int(vtable)) ? Int32(bb.read(
- def: Int16.self,
- position: Int(vtable + o))) : 0
+ return o < bb
+ .read(def: VOffset.self, position: Int(vtable)) ? Int32(bb.read(
+ def: Int16.self,
+ position: Int(vtable + o))) : 0
}
/// Gets the indirect offset of the current stored object
@@ -163,7 +165,11 @@ public struct Table {
/// - vOffset: Field offset within a vtable
/// - fbb: ByteBuffer
/// - Returns: an position of a field
- static public func offset(_ o: Int32, vOffset: Int32, fbb: ByteBuffer) -> Int32 {
+ static public func offset(
+ _ o: Int32,
+ vOffset: Int32,
+ fbb: ByteBuffer) -> Int32
+ {
let vTable = Int32(fbb.capacity) - o
return vTable + Int32(fbb.read(
def: Int16.self,
@@ -178,7 +184,11 @@ public struct Table {
/// - off2: second offset to compare
/// - fbb: Bytebuffer
/// - Returns: returns the difference between
- static public func compare(_ off1: Int32, _ off2: Int32, fbb: ByteBuffer) -> Int32 {
+ static public func compare(
+ _ off1: Int32,
+ _ off2: Int32,
+ fbb: ByteBuffer) -> Int32
+ {
let memorySize = Int32(MemoryLayout<Int32>.size)
let _off1 = off1 + fbb.read(def: Int32.self, position: Int(off1))
let _off2 = off2 + fbb.read(def: Int32.self, position: Int(off2))
@@ -203,7 +213,11 @@ public struct Table {
/// - key: bytes array to compare to
/// - fbb: Bytebuffer
/// - Returns: returns the difference between
- static public func compare(_ off1: Int32, _ key: [Byte], fbb: ByteBuffer) -> Int32 {
+ static public func compare(
+ _ off1: Int32,
+ _ key: [Byte],
+ fbb: ByteBuffer) -> Int32
+ {
let memorySize = Int32(MemoryLayout<Int32>.size)
let _off1 = off1 + fbb.read(def: Int32.self, position: Int(off1))
let len1 = fbb.read(def: Int32.self, position: Int(_off1))
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/TableVerifier.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/TableVerifier.swift
index 6749b6f06ba..42a37f2888c 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/TableVerifier.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/TableVerifier.swift
@@ -64,8 +64,7 @@ public struct TableVerifier {
/// Reading the offset for the field needs to be read.
let offset: VOffset = try _verifier.getValue(
- at: Int(clamping: _vtable &+ Int(field))
- )
+ at: Int(clamping: _vtable &+ Int(field)))
if offset > 0 {
return Int(clamping: _position &+ Int(offset))
@@ -116,7 +115,8 @@ public struct TableVerifier {
unionKeyName: String,
fieldName: String,
required: Bool,
- completion: @escaping (inout Verifier, T, Int) throws -> Void) throws where T: UnionEnum
+ completion: @escaping (inout Verifier, T, Int) throws -> Void) throws
+ where T: UnionEnum
{
let keyPos = try dereference(key)
let valPos = try dereference(field)
@@ -170,7 +170,8 @@ public struct TableVerifier {
unionKeyName: String,
fieldName: String,
required: Bool,
- completion: @escaping (inout Verifier, T, Int) throws -> Void) throws where T: UnionEnum
+ completion: @escaping (inout Verifier, T, Int) throws -> Void) throws
+ where T: UnionEnum
{
let keyVectorPosition = try dereference(key)
let offsetVectorPosition = try dereference(field)
diff --git a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Verifier.swift b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Verifier.swift
index e465b140dcf..6f65ce702ff 100644
--- a/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Verifier.swift
+++ b/chromium/third_party/flatbuffers/src/swift/Sources/FlatBuffers/Verifier.swift
@@ -176,14 +176,18 @@ public struct Verifier {
let reportedOverflow: (partialValue: UInt32, overflow: Bool)
if offset > 0 {
- reportedOverflow = _int32Position.subtractingReportingOverflow(offset.magnitude)
+ reportedOverflow = _int32Position
+ .subtractingReportingOverflow(offset.magnitude)
} else {
- reportedOverflow = _int32Position.addingReportingOverflow(offset.magnitude)
+ reportedOverflow = _int32Position
+ .addingReportingOverflow(offset.magnitude)
}
/// since `subtractingReportingOverflow` & `addingReportingOverflow` returns true,
/// if there is overflow we return failure
- if reportedOverflow.overflow || reportedOverflow.partialValue > _buffer.capacity {
+ if reportedOverflow.overflow || reportedOverflow.partialValue > _buffer
+ .capacity
+ {
throw FlatbuffersErrors.signedOffsetOutOfBounds(
offset: Int(offset),
position: position)
diff --git a/chromium/third_party/flatbuffers/src/ts/flexbuffers.ts b/chromium/third_party/flatbuffers/src/ts/flexbuffers.ts
index fa518ecda0e..b8c722d1e65 100644
--- a/chromium/third_party/flatbuffers/src/ts/flexbuffers.ts
+++ b/chromium/third_party/flatbuffers/src/ts/flexbuffers.ts
@@ -7,7 +7,7 @@ export function builder(): Builder {
return new Builder();
}
-export function toObject(buffer: Uint8Array): unknown {
+export function toObject(buffer: ArrayBuffer): unknown {
return toReference(buffer).toObject();
}
diff --git a/chromium/third_party/flatbuffers/src/ts/flexbuffers/reference.ts b/chromium/third_party/flatbuffers/src/ts/flexbuffers/reference.ts
index a93c7431b8b..bfcd927c2c4 100644
--- a/chromium/third_party/flatbuffers/src/ts/flexbuffers/reference.ts
+++ b/chromium/third_party/flatbuffers/src/ts/flexbuffers/reference.ts
@@ -6,7 +6,7 @@ import { Long } from '../long';
import { fromUTF8Array } from './flexbuffers-util';
import { BitWidth } from './bit-width';
-export function toReference(buffer: Uint8Array): Reference {
+export function toReference(buffer: ArrayBuffer): Reference {
const len = buffer.byteLength;
if (len < 3) {