diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..a53e410 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + - package-ecosystem: "cargo" + directory: "rust/" + schedule: + interval: "monthly" + - package-ecosystem: "pip" + directory: "python/" + schedule: + interval: "monthly" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" diff --git a/.github/workflows/Lint-and-test.yml b/.github/workflows/Lint-and-test.yml new file mode 100644 index 0000000..b2849e3 --- /dev/null +++ b/.github/workflows/Lint-and-test.yml @@ -0,0 +1,64 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +on: + workflow_call: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + python-tests: + strategy: + matrix: + version: ['3.12', '3.13', '3.14'] + os: ["ubuntu-latest", "windows-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v6 + - name: Install uv and set the python version + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.version }} + - name: Install dependencies + run: uv sync --all-extras --dev + working-directory: python + - name: Test with pytest + run: uv run pytest tests + working-directory: python + rust-tests: + strategy: + matrix: + os: ["ubuntu-latest", "windows-latest"] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v6 + - name: install stable rust + run: rustup install stable + working-directory: rust + - name: Format check + run: cargo fmt --check + working-directory: rust + - name: Clippy + run: cargo clippy -- --deny warnings + working-directory: rust + - name: Tests + run: cargo test + working-directory: rust + - name: Package + run: cargo package + working-directory: rust + results: + if: ${{ always() }} + runs-on: ubuntu-latest + name: Final Results + needs: [python-tests, rust-tests] + steps: + - run: exit 1 + # see https://stackoverflow.com/a/67532120/4907315 + if: >- + ${{ + contains(needs.*.result, 'failure') + || contains(needs.*.result, 'cancelled') + }} diff --git a/.github/workflows/lint-and-test-nightly.yml b/.github/workflows/lint-and-test-nightly.yml new file mode 100644 index 0000000..5119ad4 --- /dev/null +++ b/.github/workflows/lint-and-test-nightly.yml @@ -0,0 +1,9 @@ +name: lint-and-test-nightly +on: + schedule: + - cron: "0 0 * * *" + workflow_dispatch: + +jobs: + lint-and-test-nightly: + uses: ./.github/workflows/Lint-and-test.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5326564 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,127 @@ +name: Publish +on: push +jobs: + py-build: + if: github.ref_type == 'tag' + name: build distribution + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: "3.13" + - name: Install pypa/build + run: >- + python3 -m + pip install + build + --user + - name: Build a binary wheel and a source tarball + run: python3 -m build + working-directory: python + - name: Store the distribution packages + uses: actions/upload-artifact@v6 + with: + name: python-package-distributions + path: python/dist/ + rust-build: + if: github.ref_type == 'tag' + name: build distribution + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v6 + - name: install stable rust + run: rustup install stable + - name: Format check + run: cargo fmt --check + - name: Clippy + run: cargo clippy -- --deny warnings + - name: Tests + run: cargo test + - name: set version + run: | + export VERSION=${{ github.ref_name }} + sed -i "s/0.0.0/$VERSION/g" Cargo.toml + - name: publish + run: cargo publish --dry-run --allow-dirty + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + + publish-to-pypi: + name: >- + Publish Python distribution to PyPI + if: github.ref_type == 'tag' + needs: [py-build, rust-build] # Don't publish anything until both python & rust builds pass... + runs-on: ubuntu-latest + environment: + name: release + url: https://pypi.org/p/isis-streaming-data-types + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v7 + with: + name: python-package-distributions + path: python/dist/ + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: python/ + + publish-to-crates-io: + name: >- + Publish Rust distribution to crates.io + runs-on: ubuntu-latest + if: github.ref_type == 'tag' + needs: [py-build, rust-build] # Don't publish anything until both python & rust builds pass... + steps: + - name: publish + run: cargo publish --allow-dirty + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + + github-release: + name: >- + Sign the Python distribution with Sigstore + and upload them to GitHub Release + needs: [py-build, publish-to-pypi, rust-build, publish-to-crates-io] + runs-on: ubuntu-latest + + permissions: + contents: write # IMPORTANT: mandatory for making GitHub Releases + id-token: write # IMPORTANT: mandatory for sigstore + + steps: + - name: Download all the dists + uses: actions/download-artifact@v7 + with: + name: python-package-distributions + path: python/dist/ + - name: Sign the dists with Sigstore + uses: sigstore/gh-action-sigstore-python@v3.2.0 + with: + inputs: >- + ./python/dist/*.tar.gz + ./python/dist/*.whl + - name: Create GitHub Release + env: + GITHUB_TOKEN: ${{ github.token }} + run: >- + gh release create + '${{ github.ref_name }}' + --repo '${{ github.repository }}' + --notes "" + - name: Upload artifact signatures to GitHub Release + env: + GITHUB_TOKEN: ${{ github.token }} + # Upload to GitHub Release using the `gh` CLI. + # `dist/` contains the built packages, and the + # sigstore-produced signatures and certificates. + run: >- + gh release upload + '${{ github.ref_name }}' python/dist/** + --repo '${{ github.repository }}' diff --git a/.gitignore b/.gitignore index 9e6822f..d836711 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,139 @@ -#directories +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +# Translations +*.mo +*.pot + +# macOS artifacts +*.DS_Store + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# PyCharm +.idea + +# VSCode +.vscode + +python/src/streaming_data_types/_version.py diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index 70b6e15..0000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,37 +0,0 @@ -# Just a cmake example of generating the flatbuffer headers. -# Personally, I like to generate these headers from the projects -# which use them. - -# Tries to locate flatc with find_program. - -cmake_minimum_required(VERSION 2.8.11) -project(streaming-data-types) - -if(EXISTS "${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - conan_basic_setup(SKIP_RPATH NO_OUTPUT_DIRS) -endif() - -find_program(FLATC flatc) -message("** Found flatc: ${FLATC}") - -set(flatbuffers_generated_headers "") -set(schemas_subdir "schemas") -file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${schemas_subdir}") -file(GLOB_RECURSE flatbuffers_schemata RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/schemas" "schemas/*.fbs") - -foreach (f0 ${flatbuffers_schemata}) - string(REGEX REPLACE "\\.fbs$" "" s0 ${f0}) - set(fbs "${schemas_subdir}/${s0}.fbs") - set(fbh "${schemas_subdir}/${s0}_generated.h") - add_custom_command( - OUTPUT "${fbh}" - COMMAND ${FLATC} --cpp --gen-mutable --gen-name-strings --scoped-enums "${CMAKE_CURRENT_SOURCE_DIR}/${fbs}" - DEPENDS "${fbs}" - WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${schemas_subdir}" - COMMENT "Process ${fbs} using ${FLATC}" - ) - list(APPEND flatbuffers_generated_headers "${CMAKE_CURRENT_BINARY_DIR}/${fbh}") -endforeach() - -add_custom_target(flatbuffers_generate ALL DEPENDS ${flatbuffers_generated_headers}) diff --git a/LICENSE b/LICENSE index c5bd7b6..79c6c4d 100644 --- a/LICENSE +++ b/LICENSE @@ -20,4 +20,4 @@ BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED -OF THE POSSIBILITY OF SUCH DAMAGE. +OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 6a5a132..0000000 --- a/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,18 +0,0 @@ -### Description of Work - -*Add a description of the changes here. The aim is provide information to help the approvers review and approve the PR.* - -### Issue - -*If there is an associated issue, write 'Closes #XXX'* - -### Developer Checklist - -- [ ] If there are new schema in this PR I have added them to the list in README.md -- [ ] If there are breaking changes to a schema, I have used a new file identifier and updated the list in README.md -- [ ] There is some documentation here or in the flat buffer file on the use case for this data, including which component is intended to send the data and/or which is the intended receiver. - -## Approval Criteria - -This PR should not be merged until the ECDC Group Leader (acting or permanent) has given their explicit approval in the comments section. -SCIPP/DRAM should also be consulted on changes which may affect them. diff --git a/README.md b/README.md index 3a24ff1..6f64417 100644 --- a/README.md +++ b/README.md @@ -1,103 +1,55 @@ # Streaming Data Types -[![DOI](https://zenodo.org/badge/81330954.svg)](https://zenodo.org/badge/latestdoi/81330954) - -FlatBuffers is the format chosen for the ESS messaging system. - -We would like to be able to read any message in the system at any time, -therefore: - -All schemas that we use for the transmission of data are collected in this -repository. - -The names of the schema files in this repository are prefixed by their unique -4-character `file_identifier`. This `file_identifier` must be set in the -schema definition file as: -``` -file_identifier = "abcd"; -``` - -The file identifiers (also called "schema id") must be unique on the network. -The `root_type` should include the schema version number. For example, the f144 schema has -the `root_type`: -``` -root_type f144_LogData; -``` - -The naming convention for new identifiers and a table of existing identifiers follows later in this README. -Please add your own (new schema) with file identifier to that table. - - -## Backwards compatibility - -Please, avoid changes which break binary compatibility. FlatBuffers documentation contains good information about how to maintain binary compatibility. If you need to make breaking changes to schemas that are not under development, acquire a new schema id. - -Schemas that are under development should be clearly marked as such in the schema file and in the **Schema ids** below to warn users of possible loss of backwards compatibility. - -## Not enough file identifiers available? - -If you feel that you may need a lot of schema ids, you can use a single schema -and work with the flat buffers union data type in your root element. - - -## Schema coding standard - -* Completely new schemas should have an ID comprising of two characters plus 00, e.g. hs00 -* When updating an existing schema with a breaking change then the new schema should have the same ID but with the number incremented, e.g. hs00 -> hs01 - * For older schema which don't end with two numbers, propose a new name which matches the convention. -* Prefix your schema files in this repository with the chosen schema id to more easily prevent id collision. -* Tables should use *UpperCamelCase*. -* Fields should use *snake_case*. -* Try to keep names consistent with equivalent fields in existing schema, e.g.: - * `timestamp` for timestamp - * `source_name` for a string indicating origin/source of data in flatbuffer - * `service_id` for a string indicating the name of the service that created the flatbuffer -* Do not use unsigned integers unless required for your application. - +FlatBuffers is the format chosen for the ISIS data streaming system, derived from the +[ESS messaging system](https://github.com/ess-dmsc/streaming-data-types). ## Schema ids | ID | File name | Description | |------|----------------------------------|-----------------------------------------------------------------------------------------------| -| f140 | `f140_general.fbs ` | [OBSOLETE] Can encode an arbitrary EPICS PV | -| f141 | `f141_ntarraydouble.fbs ` | [OBSOLETE] A simple array of double, testing file writing | -| f142 | `f142_logdata.fbs ` | (DEPRECATED) For log data, for example forwarded EPICS PV update [superseded by f144] | -| f143 | `f143_structure.fbs ` | [OBSOLETE] Arbitrary nested data | | f144 | `f144_logdata.fbs ` | Controls related log data, typically from EPICS or NICOS. Note: not to be used for array data | -| ev42 | `ev42_events.fbs ` | Multi-institution neutron event data for a single pulse | -| ev43 | `ev43_events.fbs ` | Multi-institution neutron event data from multiple pulses | | ev44 | `ev44_events.fbs ` | Multi-institution neutron event data for both single and multiple pulses | -| an44 | `an44_events.fbs ` | ANSTO-specific variant of ev44 | -| ar51 | `ar51_readout_data.fbs ` | Streaming raw ESS detector readout data | -| is84 | `is84_isis_events.fbs ` | ISIS specific addition to event messages | -| ba57 | `ba57_run_info.fbs ` | [OBSOLETE] Run start/stop information for Mantid [superseded by pl72] | | df12 | `df12_det_spec_map.fbs ` | Detector-spectrum map for Mantid | -| senv | `senv_data.fbs ` | (DEPRECATED) Used for storing for waveforms from DG ADC readout system. | | se00 | `se00_data.fbs ` | Used for storing arrays with optional timestamps, for example waveform data. Replaces _senv_. | -| NDAr | `NDAr_NDArray_schema.fbs ` | (DEPRECATED) Holds binary blob of data with n dimensions | -| ADAr | `ADAr_area_detector_array.fbs ` | (DEPRECATED) EPICS area detector array data [superseded by ad00] | | ad00 | `ad00_area_detector_array.fbs ` | EPICS area detector array data | -| mo01 | `mo01_nmx.fbs ` | Daquiri monitor data: pre-binned histograms, raw hits and NMX tracks | -| ns10 | `ns10_cache_entry.fbs ` | (DEPRECATED) NICOS cache entry | -| ns11 | `ns11_typed_cache_entry.fbs ` | (DEPRECATED) NICOS cache entry with typed data (not used) | -| hs00 | `hs00_event_histogram.fbs ` | (DEPRECATED) Event histogram stored in n dim array | | hs01 | `hs01_event_histogram.fbs ` | Event histogram stored in n dim array | -| dtdb | `dtdb_adc_pulse_debug.fbs ` | Debug fields that can be added to the ev42 schema | -| ep00 | `ep00_epics_connection_info.fbs` | (DEPRECATED) Status of the EPICS connection | | ep01 | `ep01_epics_connection.fbs ` | Status or event of EPICS connection. Replaces _ep00_ | | json | `json_json.fbs ` | Carries a JSON payload | -| tdct | `tdct_timestamps.fbs ` | Timestamps from a device (e.g. a chopper) | | pl72 | `pl72_run_start.fbs ` | File writing, run start message for file writer and Mantid | | 6s4t | `6s4t_run_stop.fbs ` | File writing, run stop message for file writer and Mantid | | answ | `answ_action_response.fbs ` | Holds the result of a command to the filewriter | | wrdn | `wrdn_finished_writing.fbs ` | Message from the filewriter when it is done writing a file | | x5f2 | `x5f2_status.fbs ` | Status update and heartbeat message for any software | -| rf5k | `rf5k_forwarder_config.fbs ` | (DEPRECATED) Configuration update for Forwarder [superseded by fc00] | | fc00 | `fc00_forwarder_config.fbs ` | Configuration update for Forwarder | | al00 | `al00_alarm.fbs ` | Generic alarm schema for EPICS, NICOS, etc. | | da00 | `da00_dataarray.fbs ` | Pseudo-scipp DataArray with time-dependent and constant Variables | | un00 | `un00_units.fbs ` | Engineering units update | -## Useful information: -- [Have CMake download and compile schema](documentation/cmakeCompileSchema.md) -- [Time formats we use and how to convert between them](documentation/timestamps.md) +## Adding new schemas + +### Add `.fbs` file to `schemas directory + +Check `ess-streaming-data-types` first; attempt not to diverge without reason. If a new schema is really needed, then +add a new `.fbs` schema in the `schemas` directory. + +Note: to generate code from `.fbs` schemas, you will need the `flatc` tool installed. It can be acquired from +https://github.com/google/flatbuffers/releases . + +### Adding Python bindings + +Python bindings have low-level code (autogenerated by `flatc`) in the `fbschemas` directory, but **also** +manually-written convenience serializers and deserializers in the top-level of the python module. + +When adding or modifying a schema: +- The low-level code must be generated by manually calling `flatc --python schemas\some_schema.fbs` and adding + the resulting generated code to `python/src/streaming_data_types/fbschemas`. +- Manually-written serializers & deserializers will need to be updated, and added to the lists in `__init__.py`. + +### Rust bindings + +Rust bindings have low-level code in `flatbuffers_generated`, and a small high-level wrapper +to deserialize any message. + +When adding or modifying a schema: +- The low-level code must be generated by calling `generate_rust_bindings.py` +- The wrapper (defined in `lib.rs`) will need to be updated with the new schema. diff --git a/conanfile.txt b/conanfile.txt deleted file mode 100644 index 243fd63..0000000 --- a/conanfile.txt +++ /dev/null @@ -1,8 +0,0 @@ -[requires] -flatbuffers/1.12.0 - -[build_requires] -flatc/1.12.0 - -[generators] -virtualrunenv diff --git a/documentation/cmakeCompileSchema.md b/documentation/cmakeCompileSchema.md deleted file mode 100644 index 0cc9ff5..0000000 --- a/documentation/cmakeCompileSchema.md +++ /dev/null @@ -1,19 +0,0 @@ -# Automatic downloading and compilation of schema files - -It is possible to have CMake download the schema files and generate C++ header files from those schema files automatically. Instructions are as follows. - -1. You will need to download the files `DownloadProject.cmake` and `DownloadProject.CMakeLists.cmake.in` from the following [page](https://gist.github.com/SkyToGround/b458ecbef74e11c880a4774058c6f560) and put them in a CMake modules project of your project repository (e.g. `cmake_modules`). -2. Set the CMake modules directory in your `CMakeLists.txt` file, (e.g. `set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)`). -3. The code for actually downloading and compiling the schemas follows. Modify it to suit your needs. - -```CMake -include(DownloadProject) -download_project(PROJ streaming-data-types GIT_REPOSITORY https://github.com/ess-dmsc/streaming-data-types.git GIT_TAG master) -file(GLOB fb_schemata_files "${streaming-data-types_SOURCE_DIR}/schemas/*.fbs") -set(fb_header_INC "${PROJECT_SOURCE_DIR}/src/schemas") -find_program(flatc flatc PATHS "$ENV{flatc}" "$ENV{HOME}/.tools" "/opt/local/flatbuffers") -foreach (fb_file ${fb_schemata_files}) - message(STATUS "Generating header file for ${fb_file}.") - execute_process(COMMAND ${flatc} --cpp --gen-mutable --gen-name-strings --scoped-enums -o ${fb_header_INC} ${fb_file}) -endforeach() -``` diff --git a/generate_rust_bindings.py b/generate_rust_bindings.py new file mode 100644 index 0000000..800134b --- /dev/null +++ b/generate_rust_bindings.py @@ -0,0 +1,51 @@ +import os +import shutil +import subprocess + + +def to_rust_file_name(schema: str): + name, ext = schema.split(".") + return f"{name}.rs" + + +def to_rust_mod_name(schema: str): + name, ext = schema.split(".") + return f"{name[5:]}_{name[0:4]}" + + +def generate_rust_bindings(): + shutil.rmtree("rust/src/flatbuffers_generated/") + os.makedirs("rust/src/flatbuffers_generated/") + + for schema in os.listdir("schemas"): + if not schema.endswith(".fbs"): + continue + subprocess.run( + [ + "flatc", + "--rust", + "-o", + os.path.join("rust", "src", "flatbuffers_generated"), + "--filename-suffix", + "", + "--gen-all", + os.path.join("schemas", schema), + ], + check=True, + ) + + with open("rust/src/flatbuffers_generated/mod.rs", "a") as f: + f.writelines( + [ + f'#[path = "{to_rust_file_name(schema)}"]\n', + f"pub mod {to_rust_mod_name(schema)};\n", + ] + ) + + +def main(): + generate_rust_bindings() + + +if __name__ == "__main__": + main() diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 0000000..180dcdd --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,99 @@ +[build-system] +requires = ["setuptools", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" + +[project] +name = "isis_streaming_data_types" +dynamic = ["version"] +description = "Python utilities for handling ISIS streamed data" +readme = "README.md" +requires-python = ">=3.12" +license-files = ["LICENSE"] + +authors = [ + {name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" } +] +maintainers = [ + {name = "ISIS Experiment Controls", email = "ISISExperimentControls@stfc.ac.uk" } +] + +# Classifiers help users find your project by categorizing it. +# +# For a list of valid classifiers, see https://pypi.org/classifiers/ +classifiers = [ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + + # Specify the Python versions you support here. In particular, ensure + # that you indicate you support Python 3. These classifiers are *not* + # checked by "pip install". See instead "requires-python" key in this file. + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Programming Language :: Python :: 3 :: Only", +] + +dependencies = [ + "flatbuffers", + "numpy>2" +] + +[project.optional-dependencies] +dev = [ + "ruff>=0.8", + "pyright", + "pytest", + "pytest-cov", +] + +[project.urls] +"Homepage" = "https://github.com/isiscomputinggroup/isis_streaming_data_types" +"Bug Reports" = "https://github.com/isiscomputinggroup/isis_streaming_data_types/issues" +"Source" = "https://github.com/isiscomputinggroup/isis_streaming_data_types" + +[tool.pytest.ini_options] +testpaths = "tests" +asyncio_mode = "auto" +addopts = "--cov --cov-report=html -vv" + +[tool.coverage.run] +branch = true +source = ["src"] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if TYPE_CHECKING:", + "if typing.TYPE_CHECKING:", + "@abstractmethod", +] + +[tool.coverage.html] +directory = "coverage_html_report" + +[tool.pyright] +include = ["src", "tests"] +reportConstantRedefinition = true +reportDeprecated = true +reportInconsistentConstructor = true +reportMissingParameterType = true +reportMissingTypeArgument = true +reportUnnecessaryCast = true +reportUnnecessaryComparison = true +reportUnnecessaryContains = true +reportUnnecessaryIsInstance = true +reportUntypedBaseClass = true +reportUntypedClassDecorator = true +reportUntypedFunctionDecorator = true + +[tool.setuptools_scm] +version_file = "src/streaming_data_types/_version.py" +relative_to = "pyproject.toml" +root = ".." + + diff --git a/python/src/streaming_data_types/__init__.py b/python/src/streaming_data_types/__init__.py new file mode 100644 index 0000000..cbcb897 --- /dev/null +++ b/python/src/streaming_data_types/__init__.py @@ -0,0 +1,59 @@ +from streaming_data_types._version import version +from streaming_data_types.action_response_answ import deserialise_answ, serialise_answ +from streaming_data_types.alarm_al00 import deserialise_al00, serialise_al00 +from streaming_data_types.area_detector_ad00 import deserialise_ad00, serialise_ad00 +from streaming_data_types.dataarray_da00 import deserialise_da00, serialise_da00 +from streaming_data_types.epics_connection_ep01 import deserialise_ep01, serialise_ep01 +from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn +from streaming_data_types.forwarder_config_update_fc00 import ( + deserialise_fc00, + serialise_fc00, +) +from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 +from streaming_data_types.json_json import deserialise_json, serialise_json +from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 +from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72 +from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t +from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 +from streaming_data_types.units_un00 import serialise_un00, deserialise_un00 + +__version__ = version + + +SERIALISERS = { + "ev44": serialise_ev44, + "hs01": serialise_hs01, + "f144": serialise_f144, + "pl72": serialise_pl72, + "6s4t": serialise_6s4t, + "x5f2": serialise_x5f2, + "ep01": serialise_ep01, + "fc00": serialise_fc00, + "answ": serialise_answ, + "wrdn": serialise_wrdn, + "al00": serialise_al00, + "json": serialise_json, + "ad00": serialise_ad00, + "da00": serialise_da00, + "un00": serialise_un00, +} + + +DESERIALISERS = { + "ev44": deserialise_ev44, + "hs01": deserialise_hs01, + "f144": deserialise_f144, + "pl72": deserialise_pl72, + "6s4t": deserialise_6s4t, + "x5f2": deserialise_x5f2, + "ep01": deserialise_ep01, + "fc00": deserialise_fc00, + "answ": deserialise_answ, + "wrdn": deserialise_wrdn, + "al00": deserialise_al00, + "json": deserialise_json, + "ad00": deserialise_ad00, + "da00": deserialise_da00, + "un00": deserialise_un00, +} diff --git a/python/src/streaming_data_types/action_response_answ.py b/python/src/streaming_data_types/action_response_answ.py new file mode 100644 index 0000000..5fc4706 --- /dev/null +++ b/python/src/streaming_data_types/action_response_answ.py @@ -0,0 +1,81 @@ +from datetime import datetime, timezone +from typing import NamedTuple, Union + +import flatbuffers + +import streaming_data_types.fbschemas.action_response_answ.ActionResponse as ActionResponse +from streaming_data_types.fbschemas.action_response_answ.ActionOutcome import ( + ActionOutcome, +) +from streaming_data_types.fbschemas.action_response_answ.ActionType import ActionType +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"answ" + + +def serialise_answ( + service_id: str, + job_id: str, + command_id: str, + action: ActionType, + outcome: ActionOutcome, + message: str, + status_code: int, + stop_time: datetime, +) -> bytes: + builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + message_offset = builder.CreateString(message) + command_id_offset = builder.CreateString(command_id) + + ActionResponse.ActionResponseStart(builder) + ActionResponse.ActionResponseAddServiceId(builder, service_id_offset) + ActionResponse.ActionResponseAddJobId(builder, job_id_offset) + ActionResponse.ActionResponseAddAction(builder, action) + ActionResponse.ActionResponseAddOutcome(builder, outcome) + ActionResponse.ActionResponseAddMessage(builder, message_offset) + ActionResponse.ActionResponseAddCommandId(builder, command_id_offset) + ActionResponse.ActionResponseAddStatusCode(builder, status_code) + ActionResponse.ActionResponseAddStopTime(builder, int(stop_time.timestamp() * 1000)) + + out_message = ActionResponse.ActionResponseEnd(builder) + builder.Finish(out_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +Response = NamedTuple( + "Response", + ( + ("service_id", str), + ("job_id", str), + ("command_id", str), + ("action", ActionType), + ("outcome", ActionOutcome), + ("message", str), + ("status_code", int), + ("stop_time", datetime), + ), +) + + +def deserialise_answ(buffer: Union[bytearray, bytes]): + check_schema_identifier(buffer, FILE_IDENTIFIER) + answ_message = ActionResponse.ActionResponse.GetRootAsActionResponse(buffer, 0) + max_time = datetime( + year=3001, month=1, day=1, hour=0, minute=0, second=0 + ).timestamp() + used_timestamp = answ_message.StopTime() / 1000 + if used_timestamp > max_time: + used_timestamp = max_time + return Response( + service_id=answ_message.ServiceId().decode("utf-8"), + job_id=answ_message.JobId().decode("utf-8"), + command_id=answ_message.CommandId().decode("utf-8"), + action=answ_message.Action(), + outcome=answ_message.Outcome(), + message=answ_message.Message().decode("utf-8"), + status_code=answ_message.StatusCode(), + stop_time=datetime.fromtimestamp(used_timestamp, tz=timezone.utc), + ) diff --git a/python/src/streaming_data_types/alarm_al00.py b/python/src/streaming_data_types/alarm_al00.py new file mode 100644 index 0000000..f779052 --- /dev/null +++ b/python/src/streaming_data_types/alarm_al00.py @@ -0,0 +1,65 @@ +from collections import namedtuple +from enum import Enum + +import flatbuffers + +import streaming_data_types.fbschemas.alarm_al00.Alarm as Alarm +import streaming_data_types.fbschemas.alarm_al00.Severity as FBSeverity +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"al00" + +AlarmInfo = namedtuple("AlarmInfo", ("source", "timestamp_ns", "severity", "message")) + + +class Severity(Enum): + OK = 0 + MINOR = 1 + MAJOR = 2 + INVALID = 3 + + +_enum_to_severity = { + Severity.OK: FBSeverity.Severity.OK, + Severity.MINOR: FBSeverity.Severity.MINOR, + Severity.MAJOR: FBSeverity.Severity.MAJOR, + Severity.INVALID: FBSeverity.Severity.INVALID, +} + +_severity_to_enum = { + FBSeverity.Severity.OK: Severity.OK, + FBSeverity.Severity.MINOR: Severity.MINOR, + FBSeverity.Severity.MAJOR: Severity.MAJOR, + FBSeverity.Severity.INVALID: Severity.INVALID, +} + + +def deserialise_al00(buffer) -> AlarmInfo: + check_schema_identifier(buffer, FILE_IDENTIFIER) + alarm = Alarm.Alarm.GetRootAsAlarm(buffer, 0) + + return AlarmInfo( + alarm.SourceName().decode("utf-8") if alarm.SourceName() else "", + alarm.Timestamp(), + _severity_to_enum[alarm.Severity()], + alarm.Message().decode("utf-8") if alarm.Message() else "", + ) + + +def serialise_al00( + source: str, timestamp_ns: int, severity: Severity, message: str +) -> bytes: + builder = flatbuffers.Builder(128) + + message_offset = builder.CreateString(message) + source_offset = builder.CreateString(source) + + Alarm.AlarmStart(builder) + Alarm.AlarmAddSourceName(builder, source_offset) + Alarm.AlarmAddTimestamp(builder, timestamp_ns) + Alarm.AlarmAddSeverity(builder, _enum_to_severity[severity]) + Alarm.AlarmAddMessage(builder, message_offset) + alarm = Alarm.AlarmEnd(builder) + + builder.Finish(alarm, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/area_detector_ad00.py b/python/src/streaming_data_types/area_detector_ad00.py new file mode 100644 index 0000000..cf1a734 --- /dev/null +++ b/python/src/streaming_data_types/area_detector_ad00.py @@ -0,0 +1,200 @@ +from struct import pack +from typing import List, NamedTuple, Union + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.area_detector_ad00.Attribute as ADArAttribute +from streaming_data_types.fbschemas.area_detector_ad00 import ad00_ADArray +from streaming_data_types.fbschemas.area_detector_ad00.DType import DType +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ad00" + + +class Attribute: + def __init__( + self, + name: str, + description: str, + source: str, + data: Union[np.ndarray, str, int, float], + ): + self.name = name + self.description = description + self.source = source + self.data = data + + def __eq__(self, other): + data_is_equal = type(self.data) == type(other.data) # noqa: E721 + if type(self.data) is np.ndarray: + data_is_equal = data_is_equal and np.array_equal(self.data, other.data) + else: + data_is_equal = data_is_equal and self.data == other.data + return ( + self.name == other.name + and self.description == other.description + and self.source == other.source + and data_is_equal + ) + + +def serialise_ad00( + source_name: str, + unique_id: int, + timestamp_ns: int, + data: Union[np.ndarray, str], + attributes: List[Attribute] = [], +) -> bytes: + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + type_map = { + np.dtype("uint8"): DType.uint8, + np.dtype("int8"): DType.int8, + np.dtype("uint16"): DType.uint16, + np.dtype("int16"): DType.int16, + np.dtype("uint32"): DType.uint32, + np.dtype("int32"): DType.int32, + np.dtype("uint64"): DType.uint64, + np.dtype("int64"): DType.int64, + np.dtype("float32"): DType.float32, + np.dtype("float64"): DType.float64, + } + + if type(data) is str: + data = np.frombuffer(data.encode(), np.uint8) + data_type = DType.c_string + else: + data_type = type_map[data.dtype] + + # Build dims + dims_offset = builder.CreateNumpyVector(np.asarray(data.shape)) + + # Build data + data_offset = builder.CreateNumpyVector(data.flatten().view(np.uint8)) + + source_name_offset = builder.CreateString(source_name) + + temp_attributes = [] + for item in attributes: + if type(item.data) is np.ndarray: + attr_data_type = type_map[item.data.dtype] + attr_data = item.data + elif type(item.data) is str: + attr_data_type = DType.c_string + attr_data = np.frombuffer(item.data.encode(), np.uint8) + elif type(item.data) is int: + attr_data_type = DType.int64 + attr_data = np.frombuffer(pack("q", item.data), np.uint8) + elif type(item.data) is float: + attr_data_type = DType.float64 + attr_data = np.frombuffer(pack("d", item.data), np.uint8) + attr_name_offset = builder.CreateString(item.name) + attr_desc_offset = builder.CreateString(item.description) + attr_src_offset = builder.CreateString(item.source) + attr_data_offset = builder.CreateNumpyVector(attr_data.flatten().view(np.uint8)) + ADArAttribute.AttributeStart(builder) + ADArAttribute.AttributeAddName(builder, attr_name_offset) + ADArAttribute.AttributeAddDescription(builder, attr_desc_offset) + ADArAttribute.AttributeAddSource(builder, attr_src_offset) + ADArAttribute.AttributeAddDataType(builder, attr_data_type) + ADArAttribute.AttributeAddData(builder, attr_data_offset) + attr_offset = ADArAttribute.AttributeEnd(builder) + temp_attributes.append(attr_offset) + + ad00_ADArray.ad00_ADArrayStartAttributesVector(builder, len(attributes)) + for item in reversed(temp_attributes): + builder.PrependUOffsetTRelative(item) + attributes_offset = builder.EndVector() + + # Build the actual buffer + ad00_ADArray.ad00_ADArrayStart(builder) + ad00_ADArray.ad00_ADArrayAddSourceName(builder, source_name_offset) + ad00_ADArray.ad00_ADArrayAddDataType(builder, data_type) + ad00_ADArray.ad00_ADArrayAddDimensions(builder, dims_offset) + ad00_ADArray.ad00_ADArrayAddId(builder, unique_id) + ad00_ADArray.ad00_ADArrayAddData(builder, data_offset) + ad00_ADArray.ad00_ADArrayAddTimestamp(builder, timestamp_ns) + ad00_ADArray.ad00_ADArrayAddAttributes(builder, attributes_offset) + array_message = ad00_ADArray.ad00_ADArrayEnd(builder) + + builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +ADArray = NamedTuple( + "ADArray", + ( + ("source_name", str), + ("unique_id", int), + ("timestamp_ns", int), + ("dimensions", np.ndarray), + ("data", np.ndarray), + ("attributes", List[Attribute]), + ), +) + + +def get_payload_data(fb_arr) -> np.ndarray: + return get_data(fb_arr).reshape(fb_arr.DimensionsAsNumpy()) + + +def get_data(fb_arr) -> np.ndarray: + """ + Converts the data array into the correct type. + """ + raw_data = fb_arr.DataAsNumpy() + type_map = { + DType.uint8: np.uint8, + DType.int8: np.int8, + DType.uint16: np.uint16, + DType.int16: np.int16, + DType.uint32: np.uint32, + DType.int32: np.int32, + DType.uint64: np.uint64, + DType.int64: np.int64, + DType.float32: np.float32, + DType.float64: np.float64, + } + return raw_data.view(type_map[fb_arr.DataType()]) + + +def deserialise_ad00(buffer: Union[bytearray, bytes]) -> ADArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + ad_array = ad00_ADArray.ad00_ADArray.GetRootAsad00_ADArray(buffer, 0) + unique_id = ad_array.Id() + if ad_array.DataType() == DType.c_string: + data = ad_array.DataAsNumpy().tobytes().decode() + else: + data = get_payload_data(ad_array) + + attributes_list = [] + for i in range(ad_array.AttributesLength()): + attribute_ptr = ad_array.Attributes(i) + if attribute_ptr.DataType() == DType.c_string: + attr_data = attribute_ptr.DataAsNumpy().tobytes().decode() + else: + attr_data = get_data(attribute_ptr) + temp_attribute = Attribute( + name=attribute_ptr.Name().decode(), + description=attribute_ptr.Description().decode(), + source=attribute_ptr.Source().decode(), + data=attr_data, + ) + if type(temp_attribute.data) is np.ndarray and len(temp_attribute.data) == 1: + if np.issubdtype(temp_attribute.data.dtype, np.floating): + temp_attribute.data = float(temp_attribute.data[0]) + elif np.issubdtype(temp_attribute.data.dtype, np.integer): + temp_attribute.data = int(temp_attribute.data[0]) + attributes_list.append(temp_attribute) + + return ADArray( + source_name=ad_array.SourceName().decode(), + unique_id=unique_id, + timestamp_ns=ad_array.Timestamp(), + dimensions=tuple(ad_array.DimensionsAsNumpy()), + data=data, + attributes=attributes_list, + ) diff --git a/python/src/streaming_data_types/dataarray_da00.py b/python/src/streaming_data_types/dataarray_da00.py new file mode 100644 index 0000000..b97ebda --- /dev/null +++ b/python/src/streaming_data_types/dataarray_da00.py @@ -0,0 +1,229 @@ +from dataclasses import dataclass +from struct import pack +from typing import List, NamedTuple, Tuple, Union + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.dataarray_da00.da00_Variable as VariableBuffer +from streaming_data_types.fbschemas.dataarray_da00 import da00_DataArray +from streaming_data_types.fbschemas.dataarray_da00.da00_dtype import da00_dtype +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"da00" + + +def get_dtype(data: Union[np.ndarray, str, float, int]): + if isinstance(data, np.ndarray): + type_map = { + np.dtype(x): d + for x, d in ( + ("int8", da00_dtype.int8), + ("int16", da00_dtype.int16), + ("int32", da00_dtype.int32), + ("int64", da00_dtype.int64), + ("uint8", da00_dtype.uint8), + ("uint16", da00_dtype.uint16), + ("uint32", da00_dtype.uint32), + ("uint64", da00_dtype.uint64), + ("float32", da00_dtype.float32), + ("float64", da00_dtype.float64), + ) + } + return type_map[data.dtype] + if isinstance(data, str): + return da00_dtype.c_string + if isinstance(data, float): + return da00_dtype.float64 + if isinstance(data, int): + return da00_dtype.int64 + raise RuntimeError(f"Unsupported data type {type(data)} in get_dtype") + + +def to_buffer(data: Union[np.ndarray, str, float, int]): + if isinstance(data, np.ndarray): + return data + if isinstance(data, str): + return np.frombuffer(data.encode(), np.uint8) + if isinstance(data, int): + return np.frombuffer(pack("q", data), np.uint8) + if isinstance(data, float): + return np.frombuffer(pack("d", data), np.uint8) + raise RuntimeError(f"Unsupported data type {type(data)} in to_buffer") + + +def from_buffer(fb_array) -> np.ndarray: + """Convert a flatbuffer array into the correct type""" + raw_data = fb_array.DataAsNumpy() + type_map = { + d: np.dtype(x) + for x, d in ( + ("int8", da00_dtype.int8), + ("int16", da00_dtype.int16), + ("int32", da00_dtype.int32), + ("int64", da00_dtype.int64), + ("uint8", da00_dtype.uint8), + ("uint16", da00_dtype.uint16), + ("uint32", da00_dtype.uint32), + ("uint64", da00_dtype.uint64), + ("float32", da00_dtype.float32), + ("float64", da00_dtype.float64), + ) + } + dtype = fb_array.DataType() + if da00_dtype.c_string == dtype: + return raw_data.tobytes().decode() + return raw_data.view(type_map[fb_array.DataType()]) + + +def create_optional_string(builder, string: Union[str, None]): + return None if string is None else builder.CreateString(string) + + +@dataclass +class Variable: + name: str + data: Union[np.ndarray, str] + axes: Union[List[str], None] = None + shape: Union[Tuple[int, ...], None] = None + unit: Union[str, None] = None + label: Union[str, None] = None + source: Union[str, None] = None + + def __post_init__(self): + # Calculate the shape when used, e.g., interactively + # -- but allow to read it back from the buffered object too + if self.axes is None: + self.axes = [] + if self.shape is None: + self.shape = to_buffer(self.data).shape + + def __eq__(self, other): + if not isinstance(other, Variable): + return False + same_data = type(self.data) == type(other.data) # noqa: E721 + if isinstance(self.data, np.ndarray): + same_data &= np.array_equal(self.data, other.data) + else: + same_data &= self.data == other.data + same_axes = len(self.axes) == len(other.axes) and all( + a == b for a, b in zip(self.axes, other.axes) + ) + return ( + same_data + and same_axes + and self.name == other.name + and self.unit == other.unit + and self.label == other.label + and self.source == other.source + and self.shape == other.shape + ) + + def pack(self, builder): + source_offset = create_optional_string(builder, self.source) + label_offset = create_optional_string(builder, self.label) + unit_offset = create_optional_string(builder, self.unit) + name_offset = builder.CreateString(self.name) + buf = to_buffer(self.data) + shape_offset = builder.CreateNumpyVector(np.asarray(buf.shape)) + data_offset = builder.CreateNumpyVector(buf.flatten().view(np.uint8)) + + temp_axes = [builder.CreateString(x) for x in self.axes] + VariableBuffer.StartAxesVector(builder, len(temp_axes)) + for dim in reversed(temp_axes): + builder.PrependUOffsetTRelative(dim) + axes_offset = builder.EndVector() + + VariableBuffer.Start(builder) + VariableBuffer.AddName(builder, name_offset) + if unit_offset is not None: + VariableBuffer.AddUnit(builder, unit_offset) + if label_offset is not None: + VariableBuffer.AddLabel(builder, label_offset) + if source_offset is not None: + VariableBuffer.AddSource(builder, source_offset) + VariableBuffer.AddDataType(builder, get_dtype(self.data)) + VariableBuffer.AddAxes(builder, axes_offset) + VariableBuffer.AddShape(builder, shape_offset) + VariableBuffer.AddData(builder, data_offset) + return VariableBuffer.End(builder) + + @classmethod + def unpack(cls, b: VariableBuffer): + data = from_buffer(b) + axes = [b.Axes(i).decode() for i in range(b.AxesLength())] + if len(axes): + data = data.reshape(b.ShapeAsNumpy()) + elif b.DataType() != da00_dtype.c_string and np.prod(data.shape) == 1: + data = data.item() + + unit = None if b.Unit() is None else b.Unit().decode() + label = None if b.Label() is None else b.Label().decode() + source = None if b.Source() is None else b.Source().decode() + name = b.Name().decode() + # the buffered shape is NOT the shape of the numpy array in all cases + buffered_shape = tuple(b.ShapeAsNumpy()) + return cls( + name=name, + unit=unit, + label=label, + source=source, + axes=axes, + data=data, + shape=buffered_shape, + ) + + +def insert_variable_list(starter, builder, objects: List[Variable]): + temp = [obj.pack(builder) for obj in objects] + starter(builder, len(temp)) + for obj in reversed(temp): + builder.PrependUOffsetTRelative(obj) + return builder.EndVector() + + +def serialise_da00( + source_name: str, + timestamp_ns: int, + data: List[Variable], +) -> bytes: + if not data: + raise RuntimeError("data must contain at least one Variable") + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + data_offset = insert_variable_list(da00_DataArray.StartDataVector, builder, data) + source_name_offset = builder.CreateString(source_name) + + # Build the actual buffer + da00_DataArray.Start(builder) + da00_DataArray.AddSourceName(builder, source_name_offset) + da00_DataArray.AddTimestamp(builder, timestamp_ns) + da00_DataArray.AddData(builder, data_offset) + array_message = da00_DataArray.End(builder) + + builder.Finish(array_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +da00_DataArray_t = NamedTuple( + "da00_DataArray", + ( + ("source_name", str), + ("timestamp_ns", int), + ("data", List[Variable]), + ), +) + + +def deserialise_da00(buffer: Union[bytearray, bytes]) -> da00_DataArray: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + da00 = da00_DataArray.da00_DataArray.GetRootAs(buffer, offset=0) + data = [Variable.unpack(da00.Data(j)) for j in range(da00.DataLength())] + + return da00_DataArray_t( + source_name=da00.SourceName().decode(), + timestamp_ns=da00.Timestamp(), + data=data, + ) diff --git a/python/src/streaming_data_types/epics_connection_ep01.py b/python/src/streaming_data_types/epics_connection_ep01.py new file mode 100644 index 0000000..62bb1c1 --- /dev/null +++ b/python/src/streaming_data_types/epics_connection_ep01.py @@ -0,0 +1,105 @@ +from collections import namedtuple +from enum import Enum +from typing import Optional, Union + +import flatbuffers + +from streaming_data_types.fbschemas.epics_connection_ep01 import EpicsPVConnectionInfo +from streaming_data_types.fbschemas.epics_connection_ep01.ConnectionInfo import ( + ConnectionInfo as FBConnectionInfo, +) +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ep01" + + +class ConnectionInfo(Enum): + UNKNOWN = 0 + NEVER_CONNECTED = 1 + CONNECTED = 2 + DISCONNECTED = 3 + DESTROYED = 4 + CANCELLED = 5 + FINISHED = 6 + REMOTE_ERROR = 7 + + +_enum_to_status = { + ConnectionInfo.UNKNOWN: FBConnectionInfo.UNKNOWN, + ConnectionInfo.NEVER_CONNECTED: FBConnectionInfo.NEVER_CONNECTED, + ConnectionInfo.CONNECTED: FBConnectionInfo.CONNECTED, + ConnectionInfo.DISCONNECTED: FBConnectionInfo.DISCONNECTED, + ConnectionInfo.DESTROYED: FBConnectionInfo.DESTROYED, + ConnectionInfo.CANCELLED: FBConnectionInfo.CANCELLED, + ConnectionInfo.FINISHED: FBConnectionInfo.FINISHED, + ConnectionInfo.REMOTE_ERROR: FBConnectionInfo.REMOTE_ERROR, +} + +_status_to_enum = { + FBConnectionInfo.UNKNOWN: ConnectionInfo.UNKNOWN, + FBConnectionInfo.NEVER_CONNECTED: ConnectionInfo.NEVER_CONNECTED, + FBConnectionInfo.CONNECTED: ConnectionInfo.CONNECTED, + FBConnectionInfo.DISCONNECTED: ConnectionInfo.DISCONNECTED, + FBConnectionInfo.DESTROYED: ConnectionInfo.DESTROYED, + FBConnectionInfo.CANCELLED: ConnectionInfo.CANCELLED, + FBConnectionInfo.FINISHED: ConnectionInfo.FINISHED, + FBConnectionInfo.REMOTE_ERROR: ConnectionInfo.REMOTE_ERROR, +} + + +def serialise_ep01( + timestamp_ns: int, + status: ConnectionInfo, + source_name: str, + service_id: Optional[str] = None, +) -> bytes: + builder = flatbuffers.Builder(136) + builder.ForceDefaults(True) + + if service_id is not None: + service_id_offset = builder.CreateString(service_id) + source_name_offset = builder.CreateString(source_name) + + EpicsPVConnectionInfo.EpicsPVConnectionInfoStart(builder) + if service_id is not None: + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddServiceId( + builder, service_id_offset + ) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddSourceName( + builder, source_name_offset + ) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddStatus( + builder, _enum_to_status[status] + ) + EpicsPVConnectionInfo.EpicsPVConnectionInfoAddTimestamp(builder, timestamp_ns) + + end = EpicsPVConnectionInfo.EpicsPVConnectionInfoEnd(builder) + builder.Finish(end, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +EpicsPVConnection = namedtuple( + "EpicsPVConnection", ("timestamp", "status", "source_name", "service_id") +) + + +def deserialise_ep01(buffer: Union[bytearray, bytes]) -> EpicsPVConnection: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + epics_connection = ( + EpicsPVConnectionInfo.EpicsPVConnectionInfo.GetRootAsEpicsPVConnectionInfo( + buffer, 0 + ) + ) + + source_name = ( + epics_connection.SourceName() if epics_connection.SourceName() else b"" + ) + service_id = epics_connection.ServiceId() if epics_connection.ServiceId() else b"" + + return EpicsPVConnection( + timestamp=epics_connection.Timestamp(), + status=_status_to_enum[epics_connection.Status()], + source_name=source_name.decode(), + service_id=service_id.decode(), + ) diff --git a/python/src/streaming_data_types/eventdata_ev44.py b/python/src/streaming_data_types/eventdata_ev44.py new file mode 100644 index 0000000..bb48f2d --- /dev/null +++ b/python/src/streaming_data_types/eventdata_ev44.py @@ -0,0 +1,89 @@ +from collections import namedtuple + +import flatbuffers +import numpy as np + +import streaming_data_types.fbschemas.eventdata_ev44.Event44Message as Event44Message +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"ev44" + + +EventData = namedtuple( + "EventData", + ( + "source_name", + "message_id", + "reference_time", + "reference_time_index", + "time_of_flight", + "pixel_id", + ), +) + + +def deserialise_ev44(buffer): + """ + Deserialise FlatBuffer ev44. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + event = Event44Message.Event44Message.GetRootAs(buffer, 0) + + return EventData( + event.SourceName().decode("utf-8"), + event.MessageId(), + event.ReferenceTimeAsNumpy(), + event.ReferenceTimeIndexAsNumpy(), + event.TimeOfFlightAsNumpy(), + event.PixelIdAsNumpy(), + ) + + +def serialise_ev44( + source_name, + message_id, + reference_time, + reference_time_index, + time_of_flight, + pixel_id, +): + """ + Serialise event data as an ev44 FlatBuffers message. + + :param source_name: + :param message_id: + :param reference_time: + :param reference_time_index: + :param time_of_flight: + :param pixel_id: + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + source = builder.CreateString(source_name) + ref_time_data = builder.CreateNumpyVector( + np.asarray(reference_time).astype(np.int64) + ) + ref_time_index_data = builder.CreateNumpyVector( + np.asarray(reference_time_index).astype(np.int32) + ) + tof_data = builder.CreateNumpyVector(np.asarray(time_of_flight).astype(np.int32)) + pixel_id_data = builder.CreateNumpyVector(np.asarray(pixel_id).astype(np.int32)) + + Event44Message.Event44MessageStart(builder) + Event44Message.Event44MessageAddReferenceTime(builder, ref_time_data) + Event44Message.Event44MessageAddReferenceTimeIndex(builder, ref_time_index_data) + Event44Message.Event44MessageAddTimeOfFlight(builder, tof_data) + Event44Message.Event44MessageAddPixelId(builder, pixel_id_data) + Event44Message.Event44MessageAddMessageId(builder, message_id) + Event44Message.Event44MessageAddSourceName(builder, source) + + data = Event44Message.Event44MessageEnd(builder) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/exceptions.py b/python/src/streaming_data_types/exceptions.py new file mode 100644 index 0000000..0efb440 --- /dev/null +++ b/python/src/streaming_data_types/exceptions.py @@ -0,0 +1,10 @@ +class StreamingDataTypesException(Exception): + pass + + +class WrongSchemaException(StreamingDataTypesException): + pass + + +class ShortBufferException(StreamingDataTypesException): + pass diff --git a/python/src/streaming_data_types/fbschemas/__init__.py b/python/src/streaming_data_types/fbschemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py new file mode 100644 index 0000000..c7f4d3a --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionOutcome.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class ActionOutcome(object): + Success = 0 + Failure = 1 diff --git a/python/src/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py new file mode 100644 index 0000000..92c5280 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionResponse.py @@ -0,0 +1,135 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ActionResponse(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsActionResponse(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ActionResponse() + x.Init(buf, n + offset) + return x + + @classmethod + def ActionResponseBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x6E\x73\x77", size_prefixed=size_prefixed + ) + + # ActionResponse + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ActionResponse + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ActionResponse + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ActionResponse + def Action(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def Outcome(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def StatusCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ActionResponse + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # ActionResponse + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ActionResponse + def CommandId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def ActionResponseStart(builder): + builder.StartObject(8) + + +def ActionResponseAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def ActionResponseAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def ActionResponseAddAction(builder, action): + builder.PrependInt8Slot(2, action, 0) + + +def ActionResponseAddOutcome(builder, outcome): + builder.PrependInt8Slot(3, outcome, 0) + + +def ActionResponseAddStatusCode(builder, statusCode): + builder.PrependInt32Slot(4, statusCode, 0) + + +def ActionResponseAddStopTime(builder, stopTime): + builder.PrependUint64Slot(5, stopTime, 0) + + +def ActionResponseAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def ActionResponseAddCommandId(builder, commandId): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0 + ) + + +def ActionResponseEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/action_response_answ/ActionType.py b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionType.py new file mode 100644 index 0000000..7120bb0 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/action_response_answ/ActionType.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class ActionType(object): + StartJob = 0 + SetStopTime = 1 diff --git a/python/src/streaming_data_types/fbschemas/action_response_answ/__init__.py b/python/src/streaming_data_types/fbschemas/action_response_answ/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/alarm_al00/Alarm.py b/python/src/streaming_data_types/fbschemas/alarm_al00/Alarm.py new file mode 100644 index 0000000..d292d46 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/alarm_al00/Alarm.py @@ -0,0 +1,76 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class Alarm(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsAlarm(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Alarm() + x.Init(buf, n + offset) + return x + + # Alarm + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Alarm + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Alarm + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Alarm + def Severity(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + + # Alarm + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def AlarmStart(builder): + builder.StartObject(4) + + +def AlarmAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AlarmAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def AlarmAddSeverity(builder, severity): + builder.PrependInt16Slot(2, severity, 0) + + +def AlarmAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def AlarmEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/alarm_al00/Severity.py b/python/src/streaming_data_types/fbschemas/alarm_al00/Severity.py new file mode 100644 index 0000000..8571963 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/alarm_al00/Severity.py @@ -0,0 +1,10 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Severity(object): + OK = 0 + MINOR = 1 + MAJOR = 2 + INVALID = 3 diff --git a/python/src/streaming_data_types/fbschemas/alarm_al00/__init__.py b/python/src/streaming_data_types/fbschemas/alarm_al00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py b/python/src/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py new file mode 100644 index 0000000..11300bb --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/area_detector_ad00/Attribute.py @@ -0,0 +1,164 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Attribute(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Attribute() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsAttribute(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def AttributeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x64\x30\x30", size_prefixed=size_prefixed + ) + + # Attribute + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Attribute + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Description(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Attribute + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Attribute + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # Attribute + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Attribute + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Attribute + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + +def AttributeStart(builder): + builder.StartObject(5) + + +def Start(builder): + AttributeStart(builder) + + +def AttributeAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AddName(builder: flatbuffers.Builder, name: int): + AttributeAddName(builder, name) + + +def AttributeAddDescription(builder, description): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0 + ) + + +def AddDescription(builder: flatbuffers.Builder, description: int): + AttributeAddDescription(builder, description) + + +def AttributeAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def AddSource(builder: flatbuffers.Builder, source: int): + AttributeAddSource(builder, source) + + +def AttributeAddDataType(builder, dataType): + builder.PrependInt8Slot(3, dataType, 0) + + +def AddDataType(builder: flatbuffers.Builder, dataType: int): + AttributeAddDataType(builder, dataType) + + +def AttributeAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder: flatbuffers.Builder, data: int): + AttributeAddData(builder, data) + + +def AttributeStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems: int) -> int: + return AttributeStartDataVector(builder, numElems) + + +def AttributeEnd(builder): + return builder.EndObject() + + +def End(builder): + return AttributeEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/area_detector_ad00/DType.py b/python/src/streaming_data_types/fbschemas/area_detector_ad00/DType.py new file mode 100644 index 0000000..22098af --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/area_detector_ad00/DType.py @@ -0,0 +1,17 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class DType(object): + int8 = 0 + uint8 = 1 + int16 = 2 + uint16 = 3 + int32 = 4 + uint32 = 5 + int64 = 6 + uint64 = 7 + float32 = 8 + float64 = 9 + c_string = 10 diff --git a/python/src/streaming_data_types/fbschemas/area_detector_ad00/__init__.py b/python/src/streaming_data_types/fbschemas/area_detector_ad00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py b/python/src/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py new file mode 100644 index 0000000..0d32e2b --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/area_detector_ad00/ad00_ADArray.py @@ -0,0 +1,252 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ad00_ADArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ad00_ADArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsad00_ADArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def ad00_ADArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x61\x64\x30\x30", size_prefixed=size_prefixed + ) + + # ad00_ADArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ad00_ADArray + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # ad00_ADArray + def Id(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Dimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ad00_ADArray + def DimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ad00_ADArray + def DimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def DimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # ad00_ADArray + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # ad00_ADArray + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # ad00_ADArray + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ad00_ADArray + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # ad00_ADArray + def Attributes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .Attribute import Attribute + + obj = Attribute() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # ad00_ADArray + def AttributesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ad00_ADArray + def AttributesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + +def ad00_ADArrayStart(builder): + builder.StartObject(7) + + +def Start(builder): + ad00_ADArrayStart(builder) + + +def ad00_ADArrayAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder: flatbuffers.Builder, sourceName: int): + ad00_ADArrayAddSourceName(builder, sourceName) + + +def ad00_ADArrayAddId(builder, id): + builder.PrependInt32Slot(1, id, 0) + + +def AddId(builder: flatbuffers.Builder, id: int): + ad00_ADArrayAddId(builder, id) + + +def ad00_ADArrayAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(2, timestamp, 0) + + +def AddTimestamp(builder: flatbuffers.Builder, timestamp: int): + ad00_ADArrayAddTimestamp(builder, timestamp) + + +def ad00_ADArrayAddDimensions(builder, dimensions): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0 + ) + + +def AddDimensions(builder: flatbuffers.Builder, dimensions: int): + ad00_ADArrayAddDimensions(builder, dimensions) + + +def ad00_ADArrayStartDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartDimensionsVector(builder, numElems: int) -> int: + return ad00_ADArrayStartDimensionsVector(builder, numElems) + + +def ad00_ADArrayAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def AddDataType(builder: flatbuffers.Builder, dataType: int): + ad00_ADArrayAddDataType(builder, dataType) + + +def ad00_ADArrayAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder: flatbuffers.Builder, data: int): + ad00_ADArrayAddData(builder, data) + + +def ad00_ADArrayStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems: int) -> int: + return ad00_ADArrayStartDataVector(builder, numElems) + + +def ad00_ADArrayAddAttributes(builder, attributes): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(attributes), 0 + ) + + +def AddAttributes(builder: flatbuffers.Builder, attributes: int): + ad00_ADArrayAddAttributes(builder, attributes) + + +def ad00_ADArrayStartAttributesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartAttributesVector(builder, numElems: int) -> int: + return ad00_ADArrayStartAttributesVector(builder, numElems) + + +def ad00_ADArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return ad00_ADArrayEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/dataarray_da00/__init__.py b/python/src/streaming_data_types/fbschemas/dataarray_da00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py new file mode 100644 index 0000000..2cad228 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_DataArray.py @@ -0,0 +1,128 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class da00_DataArray(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = da00_DataArray() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsda00_DataArray(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def da00_DataArrayBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x64\x61\x30\x30", size_prefixed=size_prefixed + ) + + # da00_DataArray + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # da00_DataArray + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_DataArray + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # da00_DataArray + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from streaming_data_types.fbschemas.dataarray_da00.da00_Variable import ( + da00_Variable, + ) + + obj = da00_Variable() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # da00_DataArray + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_DataArray + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + +def da00_DataArrayStart(builder): + builder.StartObject(3) + + +def Start(builder): + return da00_DataArrayStart(builder) + + +def da00_DataArrayAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + return da00_DataArrayAddSourceName(builder, sourceName) + + +def da00_DataArrayAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def AddTimestamp(builder, timestamp): + return da00_DataArrayAddTimestamp(builder, timestamp) + + +def da00_DataArrayAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder, data): + return da00_DataArrayAddData(builder, data) + + +def da00_DataArrayStartDataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartDataVector(builder, numElems): + return da00_DataArrayStartDataVector(builder, numElems) + + +def da00_DataArrayEnd(builder): + return builder.EndObject() + + +def End(builder): + return da00_DataArrayEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py new file mode 100644 index 0000000..1d0ad66 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_Variable.py @@ -0,0 +1,269 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class da00_Variable(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = da00_Variable() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsda00_Variable(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def da00_VariableBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x64\x61\x30\x30", size_prefixed=size_prefixed + ) + + # da00_Variable + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # da00_Variable + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Unit(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Label(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # da00_Variable + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # da00_Variable + def Axes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.String( + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4) + ) + return "" + + # da00_Variable + def AxesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def AxesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # da00_Variable + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # da00_Variable + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # da00_Variable + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + # da00_Variable + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Uint8Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1), + ) + return 0 + + # da00_Variable + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # da00_Variable + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # da00_Variable + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + +def da00_VariableStart(builder): + builder.StartObject(8) + + +def Start(builder): + return da00_VariableStart(builder) + + +def da00_VariableAddName(builder, name): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0 + ) + + +def AddName(builder, name): + return da00_VariableAddName(builder, name) + + +def da00_VariableAddUnit(builder, unit): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 + ) + + +def AddUnit(builder, unit): + return da00_VariableAddUnit(builder, unit) + + +def da00_VariableAddLabel(builder, label): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 + ) + + +def AddLabel(builder, label): + return da00_VariableAddLabel(builder, label) + + +def da00_VariableAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def AddSource(builder, source): + return da00_VariableAddSource(builder, source) + + +def da00_VariableAddDataType(builder, dataType): + builder.PrependInt8Slot(4, dataType, 0) + + +def AddDataType(builder, dataType): + return da00_VariableAddDataType(builder, dataType) + + +def da00_VariableAddAxes(builder, axes): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(axes), 0 + ) + + +def AddAxes(builder, axes): + return da00_VariableAddAxes(builder, axes) + + +def da00_VariableStartAxesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartAxesVector(builder, numElems): + return da00_VariableStartAxesVector(builder, numElems) + + +def da00_VariableAddShape(builder, shape): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0 + ) + + +def AddShape(builder, shape): + return da00_VariableAddShape(builder, shape) + + +def da00_VariableStartShapeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartShapeVector(builder, numElems): + return da00_VariableStartShapeVector(builder, numElems) + + +def da00_VariableAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def AddData(builder, data): + return da00_VariableAddData(builder, data) + + +def da00_VariableStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + + +def StartDataVector(builder, numElems): + return da00_VariableStartDataVector(builder, numElems) + + +def da00_VariableEnd(builder): + return builder.EndObject() + + +def End(builder): + return da00_VariableEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py new file mode 100644 index 0000000..1caf3fe --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/dataarray_da00/da00_dtype.py @@ -0,0 +1,18 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class da00_dtype(object): + none = 0 + int8 = 1 + uint8 = 2 + int16 = 3 + uint16 = 4 + int32 = 5 + uint32 = 6 + int64 = 7 + uint64 = 8 + float32 = 9 + float64 = 10 + c_string = 11 diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py b/python/src/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py new file mode 100644 index 0000000..b8033c6 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/epics_connection_ep01/ConnectionInfo.py @@ -0,0 +1,14 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class ConnectionInfo(object): + UNKNOWN = 0 + NEVER_CONNECTED = 1 + CONNECTED = 2 + DISCONNECTED = 3 + DESTROYED = 4 + CANCELLED = 5 + FINISHED = 6 + REMOTE_ERROR = 7 diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py b/python/src/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py new file mode 100644 index 0000000..28084db --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/epics_connection_ep01/EpicsPVConnectionInfo.py @@ -0,0 +1,114 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class EpicsPVConnectionInfo(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EpicsPVConnectionInfo() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsEpicsPVConnectionInfo(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def EpicsPVConnectionInfoBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x65\x70\x30\x31", size_prefixed=size_prefixed + ) + + # EpicsPVConnectionInfo + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EpicsPVConnectionInfo + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EpicsPVConnectionInfo + def Status(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + + # EpicsPVConnectionInfo + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EpicsPVConnectionInfo + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def EpicsPVConnectionInfoStart(builder): + builder.StartObject(4) + + +def Start(builder): + return EpicsPVConnectionInfoStart(builder) + + +def EpicsPVConnectionInfoAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(0, timestamp, 0) + + +def AddTimestamp(builder, timestamp): + return EpicsPVConnectionInfoAddTimestamp(builder, timestamp) + + +def EpicsPVConnectionInfoAddStatus(builder, status): + builder.PrependInt16Slot(1, status, 0) + + +def AddStatus(builder, status): + return EpicsPVConnectionInfoAddStatus(builder, status) + + +def EpicsPVConnectionInfoAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + return EpicsPVConnectionInfoAddSourceName(builder, sourceName) + + +def EpicsPVConnectionInfoAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def AddServiceId(builder, serviceId): + return EpicsPVConnectionInfoAddServiceId(builder, serviceId) + + +def EpicsPVConnectionInfoEnd(builder): + return builder.EndObject() + + +def End(builder): + return EpicsPVConnectionInfoEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py b/python/src/streaming_data_types/fbschemas/epics_connection_ep01/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py b/python/src/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py new file mode 100644 index 0000000..a9fc017 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/eventdata_ev44/Event44Message.py @@ -0,0 +1,274 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Event44Message(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Event44Message() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsEvent44Message(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def Event44MessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x65\x76\x34\x34", size_prefixed=size_prefixed + ) + + # Event44Message + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Event44Message + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Event44Message + def MessageId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Event44Message + def ReferenceTime(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # Event44Message + def ReferenceTimeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # Event44Message + def ReferenceTimeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def ReferenceTimeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Event44Message + def ReferenceTimeIndex(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event44Message + def ReferenceTimeIndexAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def ReferenceTimeIndexLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def ReferenceTimeIndexIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # Event44Message + def TimeOfFlight(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event44Message + def TimeOfFlightAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def TimeOfFlightLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def TimeOfFlightIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # Event44Message + def PixelId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # Event44Message + def PixelIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Event44Message + def PixelIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Event44Message + def PixelIdIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + +def Event44MessageStart(builder): + builder.StartObject(6) + + +def Start(builder): + return Event44MessageStart(builder) + + +def Event44MessageAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0 + ) + + +def AddSourceName(builder, sourceName): + return Event44MessageAddSourceName(builder, sourceName) + + +def Event44MessageAddMessageId(builder, messageId): + builder.PrependInt64Slot(1, messageId, 0) + + +def AddMessageId(builder, messageId): + return Event44MessageAddMessageId(builder, messageId) + + +def Event44MessageAddReferenceTime(builder, referenceTime): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTime), 0 + ) + + +def AddReferenceTime(builder, referenceTime): + return Event44MessageAddReferenceTime(builder, referenceTime) + + +def Event44MessageStartReferenceTimeVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def StartReferenceTimeVector(builder, numElems): + return Event44MessageStartReferenceTimeVector(builder, numElems) + + +def Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(referenceTimeIndex), 0 + ) + + +def AddReferenceTimeIndex(builder, referenceTimeIndex): + return Event44MessageAddReferenceTimeIndex(builder, referenceTimeIndex) + + +def Event44MessageStartReferenceTimeIndexVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartReferenceTimeIndexVector(builder, numElems): + return Event44MessageStartReferenceTimeIndexVector(builder, numElems) + + +def Event44MessageAddTimeOfFlight(builder, timeOfFlight): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(timeOfFlight), 0 + ) + + +def AddTimeOfFlight(builder, timeOfFlight): + return Event44MessageAddTimeOfFlight(builder, timeOfFlight) + + +def Event44MessageStartTimeOfFlightVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartTimeOfFlightVector(builder, numElems): + return Event44MessageStartTimeOfFlightVector(builder, numElems) + + +def Event44MessageAddPixelId(builder, pixelId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(pixelId), 0 + ) + + +def AddPixelId(builder, pixelId): + return Event44MessageAddPixelId(builder, pixelId) + + +def Event44MessageStartPixelIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def StartPixelIdVector(builder, numElems): + return Event44MessageStartPixelIdVector(builder, numElems) + + +def Event44MessageEnd(builder): + return builder.EndObject() + + +def End(builder): + return Event44MessageEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/eventdata_ev44/__init__.py b/python/src/streaming_data_types/fbschemas/eventdata_ev44/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py b/python/src/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py new file mode 100644 index 0000000..a7f2996 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/finished_writing_wrdn/FinishedWriting.py @@ -0,0 +1,115 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class FinishedWriting(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsFinishedWriting(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = FinishedWriting() + x.Init(buf, n + offset) + return x + + @classmethod + def FinishedWritingBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x77\x72\x64\x6E", size_prefixed=size_prefixed + ) + + # FinishedWriting + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # FinishedWriting + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def ErrorEncountered(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool( + self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) + ) + return False + + # FinishedWriting + def FileName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def Metadata(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # FinishedWriting + def Message(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def FinishedWritingStart(builder): + builder.StartObject(6) + + +def FinishedWritingAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def FinishedWritingAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def FinishedWritingAddErrorEncountered(builder, errorEncountered): + builder.PrependBoolSlot(2, errorEncountered, 0) + + +def FinishedWritingAddFileName(builder, fileName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(fileName), 0 + ) + + +def FinishedWritingAddMetadata(builder, metadata): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0 + ) + + +def FinishedWritingAddMessage(builder, message): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0 + ) + + +def FinishedWritingEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py b/python/src/streaming_data_types/fbschemas/finished_writing_wrdn/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py new file mode 100644 index 0000000..d3260a6 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Protocol.py @@ -0,0 +1,8 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Protocol(object): + PVA = 0 + CA = 1 + FAKE = 2 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py new file mode 100644 index 0000000..642420c --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/Stream.py @@ -0,0 +1,86 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Stream(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Stream() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStream(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StreamBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x63\x30\x30", size_prefixed=size_prefixed) + + # Stream + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Stream + def Channel(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Schema(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Topic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Stream + def Protocol(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + + # Stream + def Periodic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StreamStart(builder): builder.StartObject(5) +def Start(builder): + return StreamStart(builder) +def StreamAddChannel(builder, channel): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(channel), 0) +def AddChannel(builder, channel): + return StreamAddChannel(builder, channel) +def StreamAddSchema(builder, schema): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(schema), 0) +def AddSchema(builder, schema): + return StreamAddSchema(builder, schema) +def StreamAddTopic(builder, topic): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0) +def AddTopic(builder, topic): + return StreamAddTopic(builder, topic) +def StreamAddProtocol(builder, protocol): builder.PrependUint16Slot(3, protocol, 0) +def AddProtocol(builder, protocol): + return StreamAddProtocol(builder, protocol) +def StreamAddPeriodic(builder, periodic): builder.PrependInt32Slot(4, periodic, 0) +def AddPeriodic(builder, periodic): + return StreamAddPeriodic(builder, periodic) +def StreamEnd(builder): return builder.EndObject() +def End(builder): + return StreamEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py new file mode 100644 index 0000000..b349dbb --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/UpdateType.py @@ -0,0 +1,9 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class UpdateType(object): + ADD = 0 + REMOVE = 1 + REMOVEALL = 2 + REPLACE = 3 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py new file mode 100644 index 0000000..dbc6f6c --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/forwarder_config_update_fc00/fc00_ConfigUpdate.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class fc00_ConfigUpdate(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = fc00_ConfigUpdate() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsfc00_ConfigUpdate(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def fc00_ConfigUpdateBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x66\x63\x30\x30", size_prefixed=size_prefixed) + + # fc00_ConfigUpdate + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # fc00_ConfigUpdate + def ConfigChange(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + + # fc00_ConfigUpdate + def Streams(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .Stream import Stream + obj = Stream() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # fc00_ConfigUpdate + def StreamsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # fc00_ConfigUpdate + def StreamsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + +def fc00_ConfigUpdateStart(builder): builder.StartObject(2) +def Start(builder): + return fc00_ConfigUpdateStart(builder) +def fc00_ConfigUpdateAddConfigChange(builder, configChange): builder.PrependUint16Slot(0, configChange, 0) +def AddConfigChange(builder, configChange): + return fc00_ConfigUpdateAddConfigChange(builder, configChange) +def fc00_ConfigUpdateAddStreams(builder, streams): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(streams), 0) +def AddStreams(builder, streams): + return fc00_ConfigUpdateAddStreams(builder, streams) +def fc00_ConfigUpdateStartStreamsVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def StartStreamsVector(builder, numElems): + return fc00_ConfigUpdateStartStreamsVector(builder, numElems) +def fc00_ConfigUpdateEnd(builder): return builder.EndObject() +def End(builder): + return fc00_ConfigUpdateEnd(builder) \ No newline at end of file diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/Array.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/Array.py new file mode 100644 index 0000000..c1ac39d --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/Array.py @@ -0,0 +1,11 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + + +class Array(object): + NONE = 0 + ArrayInt = 1 + ArrayLong = 2 + ArrayDouble = 3 + ArrayFloat = 4 diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py new file mode 100644 index 0000000..47a10d5 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayDouble.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ArrayDouble(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayDoubleBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayDouble + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def ArrayDoubleStart(builder): + builder.StartObject(1) + + +def ArrayDoubleAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayDoubleStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayDoubleEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py new file mode 100644 index 0000000..c1e79de --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayFloat.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ArrayFloat(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayFloatBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Float32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayFloat + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def ArrayFloatStart(builder): + builder.StartObject(1) + + +def ArrayFloatAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayFloatStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayFloatEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py new file mode 100644 index 0000000..9ff65ae --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayInt.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ArrayInt(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayInt() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayIntBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # ArrayInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # ArrayInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ArrayInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayInt + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def ArrayIntStart(builder): + builder.StartObject(1) + + +def ArrayIntAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayIntStartValueVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def ArrayIntEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py new file mode 100644 index 0000000..9d96446 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/ArrayLong.py @@ -0,0 +1,77 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class ArrayLong(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsArrayLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayLong() + x.Init(buf, n + offset) + return x + + @classmethod + def ArrayLongBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # ArrayLong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayLong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int64Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8), + ) + return 0 + + # ArrayLong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ArrayLong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ArrayLong + def ValueIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + +def ArrayLongStart(builder): + builder.StartObject(1) + + +def ArrayLongAddValue(builder, value): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0 + ) + + +def ArrayLongStartValueVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + + +def ArrayLongEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py new file mode 100644 index 0000000..9c5631d --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/DimensionMetaData.py @@ -0,0 +1,102 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class DimensionMetaData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsDimensionMetaData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DimensionMetaData() + x.Init(buf, n + offset) + return x + + @classmethod + def DimensionMetaDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # DimensionMetaData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DimensionMetaData + def Length(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DimensionMetaData + def Unit(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def Label(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # DimensionMetaData + def BinBoundariesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetaData + def BinBoundaries(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + +def DimensionMetaDataStart(builder): + builder.StartObject(5) + + +def DimensionMetaDataAddLength(builder, length): + builder.PrependInt32Slot(0, length, 0) + + +def DimensionMetaDataAddUnit(builder, unit): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(unit), 0 + ) + + +def DimensionMetaDataAddLabel(builder, label): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(label), 0 + ) + + +def DimensionMetaDataAddBinBoundariesType(builder, binBoundariesType): + builder.PrependUint8Slot(3, binBoundariesType, 0) + + +def DimensionMetaDataAddBinBoundaries(builder, binBoundaries): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(binBoundaries), 0 + ) + + +def DimensionMetaDataEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py new file mode 100644 index 0000000..78117ab --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/histogram_hs01/EventHistogram.py @@ -0,0 +1,257 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class EventHistogram(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsEventHistogram(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = EventHistogram() + x.Init(buf, n + offset) + return x + + @classmethod + def EventHistogramBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x68\x73\x30\x31", size_prefixed=size_prefixed + ) + + # EventHistogram + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # EventHistogram + def Source(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # EventHistogram + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def DimMetadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + from .DimensionMetaData import DimensionMetaData + + obj = DimensionMetaData() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # EventHistogram + def DimMetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def DimMetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # EventHistogram + def LastMetadataTimestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def CurrentShape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventHistogram + def CurrentShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # EventHistogram + def CurrentShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def CurrentShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # EventHistogram + def Offset(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # EventHistogram + def OffsetAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # EventHistogram + def OffsetLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # EventHistogram + def OffsetIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # EventHistogram + def DataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Data(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def ErrorsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # EventHistogram + def Errors(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + from flatbuffers.table import Table + + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # EventHistogram + def Info(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def EventHistogramStart(builder): + builder.StartObject(11) + + +def EventHistogramAddSource(builder, source): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(source), 0 + ) + + +def EventHistogramAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + + +def EventHistogramAddDimMetadata(builder, dimMetadata): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0 + ) + + +def EventHistogramStartDimMetadataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddLastMetadataTimestamp(builder, lastMetadataTimestamp): + builder.PrependInt64Slot(3, lastMetadataTimestamp, 0) + + +def EventHistogramAddCurrentShape(builder, currentShape): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(currentShape), 0 + ) + + +def EventHistogramStartCurrentShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddOffset(builder, offset): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(offset), 0 + ) + + +def EventHistogramStartOffsetVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def EventHistogramAddDataType(builder, dataType): + builder.PrependUint8Slot(6, dataType, 0) + + +def EventHistogramAddData(builder, data): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0 + ) + + +def EventHistogramAddErrorsType(builder, errorsType): + builder.PrependUint8Slot(8, errorsType, 0) + + +def EventHistogramAddErrors(builder, errors): + builder.PrependUOffsetTRelativeSlot( + 9, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0 + ) + + +def EventHistogramAddInfo(builder, info): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(info), 0 + ) + + +def EventHistogramEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/histogram_hs01/__init__.py b/python/src/streaming_data_types/fbschemas/histogram_hs01/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/json_json/JsonData.py b/python/src/streaming_data_types/fbschemas/json_json/JsonData.py new file mode 100644 index 0000000..cb4d4c6 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/json_json/JsonData.py @@ -0,0 +1,67 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class JsonData(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = JsonData() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsJsonData(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + + @classmethod + def JsonDataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x6A\x73\x6F\x6E", size_prefixed=size_prefixed + ) + + # JsonData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # JsonData + def Json(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def JsonDataStart(builder): + builder.StartObject(1) + + +def Start(builder): + JsonDataStart(builder) + + +def JsonDataAddJson(builder, json): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(json), 0 + ) + + +def AddJson(builder: flatbuffers.Builder, json: int): + JsonDataAddJson(builder, json) + + +def JsonDataEnd(builder): + return builder.EndObject() + + +def End(builder): + return JsonDataEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/json_json/__init__.py b/python/src/streaming_data_types/fbschemas/json_json/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py new file mode 100644 index 0000000..3efb547 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayByte.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayByte() + x.Init(buf, n + offset) + return x + + # ArrayByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # ArrayByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int8Flags, o) + return 0 + + # ArrayByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayByteStart(builder): builder.StartObject(1) +def ArrayByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def ArrayByteEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py new file mode 100644 index 0000000..f92a478 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayDouble.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayDouble(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayDouble() + x.Init(buf, n + offset) + return x + + # ArrayDouble + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayDouble + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayDouble + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float64Flags, o) + return 0 + + # ArrayDouble + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayDoubleStart(builder): builder.StartObject(1) +def ArrayDoubleAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayDoubleStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayDoubleEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py new file mode 100644 index 0000000..584197d --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayFloat.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayFloat(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayFloat() + x.Init(buf, n + offset) + return x + + # ArrayFloat + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayFloat + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayFloat + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # ArrayFloat + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayFloatStart(builder): builder.StartObject(1) +def ArrayFloatAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayFloatStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayFloatEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py new file mode 100644 index 0000000..58b658a --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayInt.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayInt() + x.Init(buf, n + offset) + return x + + # ArrayInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ArrayInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayIntStart(builder): builder.StartObject(1) +def ArrayIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayIntEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py new file mode 100644 index 0000000..bad5528 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayLong.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayLong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayLong() + x.Init(buf, n + offset) + return x + + # ArrayLong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayLong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayLong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # ArrayLong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayLongStart(builder): builder.StartObject(1) +def ArrayLongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayLongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayLongEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py new file mode 100644 index 0000000..424e613 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayShort.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayShort() + x.Init(buf, n + offset) + return x + + # ArrayShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # ArrayShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int16Flags, o) + return 0 + + # ArrayShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayShortStart(builder): builder.StartObject(1) +def ArrayShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def ArrayShortEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py new file mode 100644 index 0000000..a5c9ca2 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUByte.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayUByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayUByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUByte() + x.Init(buf, n + offset) + return x + + # ArrayUByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUByte + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # ArrayUByte + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # ArrayUByte + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayUByteStart(builder): builder.StartObject(1) +def ArrayUByteAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUByteStartValueVector(builder, numElems): return builder.StartVector(1, numElems, 1) +def ArrayUByteEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py new file mode 100644 index 0000000..63fc129 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUInt.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayUInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayUInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUInt() + x.Init(buf, n + offset) + return x + + # ArrayUInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUInt + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ArrayUInt + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # ArrayUInt + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayUIntStart(builder): builder.StartObject(1) +def ArrayUIntAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUIntStartValueVector(builder, numElems): return builder.StartVector(4, numElems, 4) +def ArrayUIntEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py new file mode 100644 index 0000000..56f4c46 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayULong.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayULong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayULong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayULong() + x.Init(buf, n + offset) + return x + + # ArrayULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayULong + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # ArrayULong + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) + return 0 + + # ArrayULong + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayULongStart(builder): builder.StartObject(1) +def ArrayULongAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayULongStartValueVector(builder, numElems): return builder.StartVector(8, numElems, 8) +def ArrayULongEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py new file mode 100644 index 0000000..07b928e --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ArrayUShort.py @@ -0,0 +1,46 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ArrayUShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsArrayUShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ArrayUShort() + x.Init(buf, n + offset) + return x + + # ArrayUShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ArrayUShort + def Value(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # ArrayUShort + def ValueAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # ArrayUShort + def ValueLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + +def ArrayUShortStart(builder): builder.StartObject(1) +def ArrayUShortAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def ArrayUShortStartValueVector(builder, numElems): return builder.StartVector(2, numElems, 2) +def ArrayUShortEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Byte.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Byte.py new file mode 100644 index 0000000..03b1f1d --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Byte.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Byte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Byte() + x.Init(buf, n + offset) + return x + + # Byte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Byte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ByteStart(builder): builder.StartObject(1) +def ByteAddValue(builder, value): builder.PrependInt8Slot(0, value, 0) +def ByteEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Double.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Double.py new file mode 100644 index 0000000..912557e --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Double.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Double(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsDouble(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Double() + x.Init(buf, n + offset) + return x + + # Double + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Double + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float64Flags, o + self._tab.Pos) + return 0.0 + +def DoubleStart(builder): builder.StartObject(1) +def DoubleAddValue(builder, value): builder.PrependFloat64Slot(0, value, 0.0) +def DoubleEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Float.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Float.py new file mode 100644 index 0000000..e99f662 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Float.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Float(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsFloat(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Float() + x.Init(buf, n + offset) + return x + + # Float + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Float + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + +def FloatStart(builder): builder.StartObject(1) +def FloatAddValue(builder, value): builder.PrependFloat32Slot(0, value, 0.0) +def FloatEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Int.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Int.py new file mode 100644 index 0000000..ae7a4f3 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Int.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Int(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int() + x.Init(buf, n + offset) + return x + + # Int + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def IntStart(builder): builder.StartObject(1) +def IntAddValue(builder, value): builder.PrependInt32Slot(0, value, 0) +def IntEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Long.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Long.py new file mode 100644 index 0000000..3708376 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Long.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Long(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsLong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Long() + x.Init(buf, n + offset) + return x + + # Long + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Long + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + +def LongStart(builder): builder.StartObject(1) +def LongAddValue(builder, value): builder.PrependInt64Slot(0, value, 0) +def LongEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Short.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Short.py new file mode 100644 index 0000000..116cfc2 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Short.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class Short(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Short() + x.Init(buf, n + offset) + return x + + # Short + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Short + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) + return 0 + +def ShortStart(builder): builder.StartObject(1) +def ShortAddValue(builder, value): builder.PrependInt16Slot(0, value, 0) +def ShortEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/UByte.py b/python/src/streaming_data_types/fbschemas/logdata_f144/UByte.py new file mode 100644 index 0000000..483b5f9 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/UByte.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class UByte(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUByte(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UByte() + x.Init(buf, n + offset) + return x + + # UByte + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UByte + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + +def UByteStart(builder): builder.StartObject(1) +def UByteAddValue(builder, value): builder.PrependUint8Slot(0, value, 0) +def UByteEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/UInt.py b/python/src/streaming_data_types/fbschemas/logdata_f144/UInt.py new file mode 100644 index 0000000..91be34a --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/UInt.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class UInt(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUInt(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UInt() + x.Init(buf, n + offset) + return x + + # UInt + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UInt + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def UIntStart(builder): builder.StartObject(1) +def UIntAddValue(builder, value): builder.PrependUint32Slot(0, value, 0) +def UIntEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/ULong.py b/python/src/streaming_data_types/fbschemas/logdata_f144/ULong.py new file mode 100644 index 0000000..3db8717 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/ULong.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class ULong(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsULong(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ULong() + x.Init(buf, n + offset) + return x + + # ULong + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ULong + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 + +def ULongStart(builder): builder.StartObject(1) +def ULongAddValue(builder, value): builder.PrependUint64Slot(0, value, 0) +def ULongEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/UShort.py b/python/src/streaming_data_types/fbschemas/logdata_f144/UShort.py new file mode 100644 index 0000000..92b6ab8 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/UShort.py @@ -0,0 +1,30 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class UShort(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsUShort(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = UShort() + x.Init(buf, n + offset) + return x + + # UShort + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # UShort + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint16Flags, o + self._tab.Pos) + return 0 + +def UShortStart(builder): builder.StartObject(1) +def UShortAddValue(builder, value): builder.PrependUint16Slot(0, value, 0) +def UShortEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/Value.py b/python/src/streaming_data_types/fbschemas/logdata_f144/Value.py new file mode 100644 index 0000000..ae593ef --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/Value.py @@ -0,0 +1,27 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +class Value(object): + NONE = 0 + Byte = 1 + UByte = 2 + Short = 3 + UShort = 4 + Int = 5 + UInt = 6 + Long = 7 + ULong = 8 + Float = 9 + Double = 10 + ArrayByte = 11 + ArrayUByte = 12 + ArrayShort = 13 + ArrayUShort = 14 + ArrayInt = 15 + ArrayUInt = 16 + ArrayLong = 17 + ArrayULong = 18 + ArrayFloat = 19 + ArrayDouble = 20 + diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/__init__.py b/python/src/streaming_data_types/fbschemas/logdata_f144/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py b/python/src/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py new file mode 100644 index 0000000..6af1e50 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/logdata_f144/f144_LogData.py @@ -0,0 +1,57 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + +class f144_LogData(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAsf144_LogData(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = f144_LogData() + x.Init(buf, n + offset) + return x + + # f144_LogData + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # f144_LogData + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # f144_LogData + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # f144_LogData + def ValueType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # f144_LogData + def Value(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def f144_LogDataStart(builder): builder.StartObject(4) +def f144_LogDataAddSourceName(builder, sourceName): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) +def f144_LogDataAddTimestamp(builder, timestamp): builder.PrependInt64Slot(1, timestamp, 0) +def f144_LogDataAddValueType(builder, valueType): builder.PrependUint8Slot(2, valueType, 0) +def f144_LogDataAddValue(builder, value): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) +def f144_LogDataEnd(builder): return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/run_start_pl72/RunStart.py b/python/src/streaming_data_types/fbschemas/run_start_pl72/RunStart.py new file mode 100644 index 0000000..d6e86fd --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/run_start_pl72/RunStart.py @@ -0,0 +1,211 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class RunStart(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsRunStart(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RunStart() + x.Init(buf, n + offset) + return x + + @classmethod + def RunStartBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x70\x6C\x37\x32", size_prefixed=size_prefixed + ) + + # RunStart + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RunStart + def StartTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStart + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStart + def RunName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def InstrumentName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def NexusStructure(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def Broker(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def Filename(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def NPeriods(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 1 + + # RunStart + def DetectorSpectrumMap(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + from .SpectraDetectorMapping import SpectraDetectorMapping + + obj = SpectraDetectorMapping() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # RunStart + def Metadata(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStart + def ControlTopic(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def RunStartStart(builder): + builder.StartObject(13) + + +def RunStartAddStartTime(builder, startTime): + builder.PrependUint64Slot(0, startTime, 0) + + +def RunStartAddStopTime(builder, stopTime): + builder.PrependUint64Slot(1, stopTime, 0) + + +def RunStartAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStartAddInstrumentName(builder, instrumentName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(instrumentName), 0 + ) + + +def RunStartAddNexusStructure(builder, nexusStructure): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(nexusStructure), 0 + ) + + +def RunStartAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 5, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStartAddBroker(builder, broker): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(broker), 0 + ) + + +def RunStartAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 7, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStartAddFilename(builder, filename): + builder.PrependUOffsetTRelativeSlot( + 8, flatbuffers.number_types.UOffsetTFlags.py_type(filename), 0 + ) + + +def RunStartAddNPeriods(builder, nPeriods): + builder.PrependUint32Slot(9, nPeriods, 1) + + +def RunStartAddDetectorSpectrumMap(builder, detectorSpectrumMap): + builder.PrependUOffsetTRelativeSlot( + 10, flatbuffers.number_types.UOffsetTFlags.py_type(detectorSpectrumMap), 0 + ) + + +def RunStartAddMetadata(builder, metadata): + builder.PrependUOffsetTRelativeSlot( + 11, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0 + ) + + +def RunStartAddControlTopic(builder, controlTopic): + builder.PrependUOffsetTRelativeSlot( + 12, flatbuffers.number_types.UOffsetTFlags.py_type(controlTopic), 0 + ) + + +def RunStartEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py b/python/src/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py new file mode 100644 index 0000000..37a2ae6 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/run_start_pl72/SpectraDetectorMapping.py @@ -0,0 +1,109 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers + + +class SpectraDetectorMapping(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsSpectraDetectorMapping(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SpectraDetectorMapping() + x.Init(buf, n + offset) + return x + + # SpectraDetectorMapping + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SpectraDetectorMapping + def Spectrum(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # SpectraDetectorMapping + def SpectrumAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SpectraDetectorMapping + def SpectrumLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SpectraDetectorMapping + def DetectorId(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get( + flatbuffers.number_types.Int32Flags, + a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4), + ) + return 0 + + # SpectraDetectorMapping + def DetectorIdAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SpectraDetectorMapping + def DetectorIdLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SpectraDetectorMapping + def NSpectra(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + +def SpectraDetectorMappingStart(builder): + builder.StartObject(3) + + +def SpectraDetectorMappingAddSpectrum(builder, spectrum): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(spectrum), 0 + ) + + +def SpectraDetectorMappingStartSpectrumVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def SpectraDetectorMappingAddDetectorId(builder, detectorId): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(detectorId), 0 + ) + + +def SpectraDetectorMappingStartDetectorIdVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + + +def SpectraDetectorMappingAddNSpectra(builder, nSpectra): + builder.PrependInt32Slot(2, nSpectra, 0) + + +def SpectraDetectorMappingEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/run_start_pl72/__init__.py b/python/src/streaming_data_types/fbschemas/run_start_pl72/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py b/python/src/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py new file mode 100644 index 0000000..55faf97 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/run_stop_6s4t/RunStop.py @@ -0,0 +1,102 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class RunStop(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsRunStop(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = RunStop() + x.Init(buf, n + offset) + return x + + @classmethod + def RunStopBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x36\x73\x34\x74", size_prefixed=size_prefixed + ) + + # RunStop + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # RunStop + def StopTime(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint64Flags, o + self._tab.Pos + ) + return 0 + + # RunStop + def RunName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStop + def JobId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStop + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # RunStop + def CommandId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def RunStopStart(builder): + builder.StartObject(5) + + +def RunStopAddStopTime(builder, stopTime): + builder.PrependUint64Slot(0, stopTime, 0) + + +def RunStopAddRunName(builder, runName): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(runName), 0 + ) + + +def RunStopAddJobId(builder, jobId): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(jobId), 0 + ) + + +def RunStopAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def RunStopAddCommandId(builder, commandId): + builder.PrependUOffsetTRelativeSlot( + 4, flatbuffers.number_types.UOffsetTFlags.py_type(commandId), 0 + ) + + +def RunStopEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py b/python/src/streaming_data_types/fbschemas/run_stop_6s4t/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/status_x5f2/Status.py b/python/src/streaming_data_types/fbschemas/status_x5f2/Status.py new file mode 100644 index 0000000..24eee0f --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/status_x5f2/Status.py @@ -0,0 +1,128 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy + +np = import_numpy() + + +class Status(object): + __slots__ = ["_tab"] + + @classmethod + def GetRootAsStatus(cls, buf, offset): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Status() + x.Init(buf, n + offset) + return x + + @classmethod + def StatusBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier( + buf, offset, b"\x78\x35\x66\x32", size_prefixed=size_prefixed + ) + + # Status + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Status + def SoftwareName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def SoftwareVersion(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def ServiceId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def HostName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Status + def ProcessId(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 0 + + # Status + def UpdateInterval(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get( + flatbuffers.number_types.Uint32Flags, o + self._tab.Pos + ) + return 0 + + # Status + def StatusJson(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + +def StatusStart(builder): + builder.StartObject(7) + + +def StatusAddSoftwareName(builder, softwareName): + builder.PrependUOffsetTRelativeSlot( + 0, flatbuffers.number_types.UOffsetTFlags.py_type(softwareName), 0 + ) + + +def StatusAddSoftwareVersion(builder, softwareVersion): + builder.PrependUOffsetTRelativeSlot( + 1, flatbuffers.number_types.UOffsetTFlags.py_type(softwareVersion), 0 + ) + + +def StatusAddServiceId(builder, serviceId): + builder.PrependUOffsetTRelativeSlot( + 2, flatbuffers.number_types.UOffsetTFlags.py_type(serviceId), 0 + ) + + +def StatusAddHostName(builder, hostName): + builder.PrependUOffsetTRelativeSlot( + 3, flatbuffers.number_types.UOffsetTFlags.py_type(hostName), 0 + ) + + +def StatusAddProcessId(builder, processId): + builder.PrependUint32Slot(4, processId, 0) + + +def StatusAddUpdateInterval(builder, updateInterval): + builder.PrependUint32Slot(5, updateInterval, 0) + + +def StatusAddStatusJson(builder, statusJson): + builder.PrependUOffsetTRelativeSlot( + 6, flatbuffers.number_types.UOffsetTFlags.py_type(statusJson), 0 + ) + + +def StatusEnd(builder): + return builder.EndObject() diff --git a/python/src/streaming_data_types/fbschemas/status_x5f2/__init__.py b/python/src/streaming_data_types/fbschemas/status_x5f2/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/fbschemas/units_un00/Units.py b/python/src/streaming_data_types/fbschemas/units_un00/Units.py new file mode 100644 index 0000000..c2316d0 --- /dev/null +++ b/python/src/streaming_data_types/fbschemas/units_un00/Units.py @@ -0,0 +1,80 @@ +# automatically generated by the FlatBuffers compiler, do not modify + +# namespace: + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +class Units(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Units() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUnits(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def UnitsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x75\x6E\x30\x30", size_prefixed=size_prefixed) + + # Units + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Units + def SourceName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Units + def Timestamp(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # Units + def Units(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def UnitsStart(builder): + builder.StartObject(3) + +def Start(builder): + UnitsStart(builder) + +def UnitsAddSourceName(builder, sourceName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0) + +def AddSourceName(builder, sourceName): + UnitsAddSourceName(builder, sourceName) + +def UnitsAddTimestamp(builder, timestamp): + builder.PrependInt64Slot(1, timestamp, 0) + +def AddTimestamp(builder, timestamp): + UnitsAddTimestamp(builder, timestamp) + +def UnitsAddUnits(builder, units): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(units), 0) + +def AddUnits(builder, units): + UnitsAddUnits(builder, units) + +def UnitsEnd(builder): + return builder.EndObject() + +def End(builder): + return UnitsEnd(builder) diff --git a/python/src/streaming_data_types/fbschemas/units_un00/__init__.py b/python/src/streaming_data_types/fbschemas/units_un00/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/src/streaming_data_types/finished_writing_wrdn.py b/python/src/streaming_data_types/finished_writing_wrdn.py new file mode 100644 index 0000000..e17737c --- /dev/null +++ b/python/src/streaming_data_types/finished_writing_wrdn.py @@ -0,0 +1,84 @@ +from typing import NamedTuple, Optional, Union + +import flatbuffers + +from streaming_data_types.fbschemas.finished_writing_wrdn import FinishedWriting +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"wrdn" + + +def serialise_wrdn( + service_id: str, + job_id: str, + error_encountered: bool, + file_name: str, + metadata: Optional[str] = None, + message: Optional[str] = None, +) -> bytes: + builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) + + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + file_name_offset = builder.CreateString(file_name) + if metadata is not None: + metadata_offset = builder.CreateString(metadata) + if message is not None: + message_offset = builder.CreateString(message) + + # Build the actual buffer + FinishedWriting.FinishedWritingStart(builder) + FinishedWriting.FinishedWritingAddServiceId(builder, service_id_offset) + FinishedWriting.FinishedWritingAddJobId(builder, job_id_offset) + FinishedWriting.FinishedWritingAddErrorEncountered(builder, error_encountered) + FinishedWriting.FinishedWritingAddFileName(builder, file_name_offset) + if metadata: + FinishedWriting.FinishedWritingAddMetadata(builder, metadata_offset) + if message: + FinishedWriting.FinishedWritingAddMessage(builder, message_offset) + + finished_writing_message = FinishedWriting.FinishedWritingEnd(builder) + + builder.Finish(finished_writing_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +WritingFinished = NamedTuple( + "FinishedWriting", + ( + ("service_id", str), + ("job_id", str), + ("error_encountered", bool), + ("file_name", str), + ("metadata", Optional[str]), + ("message", Optional[str]), + ), +) + + +def deserialise_wrdn(buffer: Union[bytearray, bytes]) -> FinishedWriting: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + finished_writing = FinishedWriting.FinishedWriting.GetRootAsFinishedWriting( + buffer, 0 + ) + service_id = finished_writing.ServiceId() + job_id = finished_writing.JobId() + has_error = finished_writing.ErrorEncountered() + file_name = finished_writing.FileName() if finished_writing.FileName() else b"" + metadata = ( + finished_writing.Metadata().decode() if finished_writing.Metadata() else None + ) + message = ( + finished_writing.Message().decode() if finished_writing.Message() else None + ) + + return WritingFinished( + service_id=service_id.decode(), + job_id=job_id.decode(), + error_encountered=has_error, + file_name=file_name.decode(), + metadata=metadata, + message=message, + ) diff --git a/python/src/streaming_data_types/forwarder_config_update_fc00.py b/python/src/streaming_data_types/forwarder_config_update_fc00.py new file mode 100644 index 0000000..2c2d355 --- /dev/null +++ b/python/src/streaming_data_types/forwarder_config_update_fc00.py @@ -0,0 +1,125 @@ +from collections import namedtuple +from typing import List, Union + +import flatbuffers +from flatbuffers.packer import struct as flatbuffer_struct + +from streaming_data_types.fbschemas.forwarder_config_update_fc00 import ( + Protocol, + Stream, + UpdateType, + fc00_ConfigUpdate, +) +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"fc00" + +ConfigurationUpdate = namedtuple("ConfigurationUpdate", ("config_change", "streams")) + +StreamInfo = namedtuple( + "StreamInfo", ("channel", "schema", "topic", "protocol", "periodic") +) + + +def deserialise_fc00(buffer: Union[bytearray, bytes]) -> ConfigurationUpdate: + """ + Deserialise FlatBuffer fc00. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + config_message = fc00_ConfigUpdate.fc00_ConfigUpdate.GetRootAsfc00_ConfigUpdate( + buffer, 0 + ) + + streams = [] + try: + for i in range(config_message.StreamsLength()): + stream_message = config_message.Streams(i) + streams.append( + StreamInfo( + ( + stream_message.Channel().decode("utf-8") + if stream_message.Channel() + else "" + ), + ( + stream_message.Schema().decode("utf-8") + if stream_message.Schema() + else "" + ), + ( + stream_message.Topic().decode("utf-8") + if stream_message.Topic() + else "" + ), + stream_message.Protocol(), + stream_message.Periodic() if stream_message.Periodic() else 0, + ) + ) + except flatbuffer_struct.error: + pass # No streams in buffer + + return ConfigurationUpdate(config_message.ConfigChange(), streams) + + +def serialise_stream( + builder: flatbuffers.Builder, + protocol: Protocol, + channel_offset: int, + schema_offset: int, + topic_offset: int, + periodic_offset: int, +) -> int: + Stream.StreamStart(builder) + Stream.StreamAddProtocol(builder, protocol) + Stream.StreamAddTopic(builder, topic_offset) + Stream.StreamAddSchema(builder, schema_offset) + Stream.StreamAddChannel(builder, channel_offset) + Stream.StreamAddPeriodic(builder, periodic_offset) + return Stream.StreamEnd(builder) + + +def serialise_fc00(config_change: UpdateType, streams: List[StreamInfo]) -> bytes: + """ + Serialise config update message as an fc00 FlatBuffers message. + + :param config_change: + :param streams: channel, schema and output topic configurations + :return: + """ + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + if streams: + # We have to use multiple loops/list comprehensions here because we cannot create strings after we have + # called StreamStart and cannot create streams after we have called StartVector + stream_field_offsets = [ + ( + builder.CreateString(stream.channel), + builder.CreateString(stream.schema), + builder.CreateString(stream.topic), + ) + for stream in streams + ] + stream_offsets = [ + serialise_stream(builder, stream.protocol, *stream_fields, stream.periodic) + for stream, stream_fields in zip(streams, stream_field_offsets) + ] + + fc00_ConfigUpdate.fc00_ConfigUpdateStartStreamsVector(builder, len(streams)) + for stream_offset in stream_offsets: + builder.PrependUOffsetTRelative(stream_offset) + streams_offset = builder.EndVector() + + # Build the actual buffer + fc00_ConfigUpdate.fc00_ConfigUpdateStart(builder) + if streams: + fc00_ConfigUpdate.fc00_ConfigUpdateAddStreams(builder, streams_offset) + fc00_ConfigUpdate.fc00_ConfigUpdateAddConfigChange(builder, config_change) + data = fc00_ConfigUpdate.fc00_ConfigUpdateEnd(builder) + + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/histogram_hs01.py b/python/src/streaming_data_types/histogram_hs01.py new file mode 100644 index 0000000..160585f --- /dev/null +++ b/python/src/streaming_data_types/histogram_hs01.py @@ -0,0 +1,231 @@ +import flatbuffers +import numpy + +import streaming_data_types.fbschemas.histogram_hs01.ArrayDouble as ArrayDouble +import streaming_data_types.fbschemas.histogram_hs01.ArrayFloat as ArrayFloat +import streaming_data_types.fbschemas.histogram_hs01.ArrayInt as ArrayInt +import streaming_data_types.fbschemas.histogram_hs01.ArrayLong as ArrayLong +import streaming_data_types.fbschemas.histogram_hs01.DimensionMetaData as DimensionMetaData +import streaming_data_types.fbschemas.histogram_hs01.EventHistogram as EventHistogram +from streaming_data_types.fbschemas.histogram_hs01.Array import Array +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"hs01" + + +_array_for_type = { + Array.ArrayInt: ArrayInt.ArrayInt(), + Array.ArrayLong: ArrayLong.ArrayLong(), + Array.ArrayFloat: ArrayFloat.ArrayFloat(), +} + + +def _create_array_object_for_type(array_type): + return _array_for_type.get(array_type, ArrayDouble.ArrayDouble()) + + +def deserialise_hs01(buffer): + """ + Deserialise flatbuffer hs10 into a histogram. + + :param buffer: + :return: dict of histogram information + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + event_hist = EventHistogram.EventHistogram.GetRootAsEventHistogram(buffer, 0) + + dims = [] + for i in range(event_hist.DimMetadataLength()): + bins_fb = _create_array_object_for_type( + event_hist.DimMetadata(i).BinBoundariesType() + ) + + # Get bins + bins_offset = event_hist.DimMetadata(i).BinBoundaries() + bins_fb.Init(bins_offset.Bytes, bins_offset.Pos) + bin_boundaries = bins_fb.ValueAsNumpy() + + hist_info = { + "length": event_hist.DimMetadata(i).Length(), + "bin_boundaries": bin_boundaries, + "unit": event_hist.DimMetadata(i).Unit().decode("utf-8") + if event_hist.DimMetadata(i).Unit() + else "", + "label": event_hist.DimMetadata(i).Label().decode("utf-8") + if event_hist.DimMetadata(i).Label() + else "", + } + dims.append(hist_info) + + metadata_timestamp = event_hist.LastMetadataTimestamp() + + data_fb = _create_array_object_for_type(event_hist.DataType()) + data_offset = event_hist.Data() + data_fb.Init(data_offset.Bytes, data_offset.Pos) + shape = event_hist.CurrentShapeAsNumpy().tolist() + data = data_fb.ValueAsNumpy().reshape(shape) + + # Get the errors + errors_offset = event_hist.Errors() + if errors_offset: + errors_fb = _create_array_object_for_type(event_hist.ErrorsType()) + errors_fb.Init(errors_offset.Bytes, errors_offset.Pos) + errors = errors_fb.ValueAsNumpy().reshape(shape) + else: + errors = [] + + hist = { + "source": event_hist.Source().decode("utf-8") if event_hist.Source() else "", + "timestamp": event_hist.Timestamp(), + "current_shape": shape, + "dim_metadata": dims, + "data": data, + "errors": errors, + "last_metadata_timestamp": metadata_timestamp, + "info": event_hist.Info().decode("utf-8") if event_hist.Info() else "", + } + return hist + + +def _serialise_metadata(builder, length, edges, unit, label): + unit_offset = builder.CreateString(unit) + label_offset = builder.CreateString(label) + + bins_offset, bin_type = _serialise_array(builder, edges) + + DimensionMetaData.DimensionMetaDataStart(builder) + DimensionMetaData.DimensionMetaDataAddLength(builder, length) + DimensionMetaData.DimensionMetaDataAddBinBoundaries(builder, bins_offset) + DimensionMetaData.DimensionMetaDataAddBinBoundariesType(builder, bin_type) + DimensionMetaData.DimensionMetaDataAddLabel(builder, label_offset) + DimensionMetaData.DimensionMetaDataAddUnit(builder, unit_offset) + return DimensionMetaData.DimensionMetaDataEnd(builder) + + +def serialise_hs01(histogram): + """ + Serialise a histogram as an hs01 FlatBuffers message. + + If arrays are provided as numpy arrays with type np.int32, np.int64, np.float32 + or np.float64 then type is preserved in output buffer. + + :param histogram: A dictionary containing the histogram to serialise. + """ + source_offset = None + info_offset = None + + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + if "source" in histogram: + source_offset = builder.CreateString(histogram["source"]) + if "info" in histogram: + info_offset = builder.CreateString(histogram["info"]) + + # Build shape array + shape_offset = builder.CreateNumpyVector( + numpy.array(histogram["current_shape"]).astype(numpy.int32) + ) + + # Build dimensions metadata + metadata = [] + for meta in histogram["dim_metadata"]: + unit = "" if "unit" not in meta else meta["unit"] + label = "" if "label" not in meta else meta["label"] + metadata.append( + _serialise_metadata( + builder, meta["length"], meta["bin_boundaries"], unit, label + ) + ) + + rank = len(histogram["current_shape"]) + EventHistogram.EventHistogramStartDimMetadataVector(builder, rank) + # FlatBuffers builds arrays backwards + for m in reversed(metadata): + builder.PrependUOffsetTRelative(m) + metadata_vector = builder.EndVector() + + # Build the data + data_offset, data_type = _serialise_array(builder, histogram["data"]) + + errors_offset = None + if "errors" in histogram: + errors_offset, error_type = _serialise_array(builder, histogram["errors"]) + + # Build the actual buffer + EventHistogram.EventHistogramStart(builder) + if info_offset: + EventHistogram.EventHistogramAddInfo(builder, info_offset) + EventHistogram.EventHistogramAddData(builder, data_offset) + EventHistogram.EventHistogramAddCurrentShape(builder, shape_offset) + EventHistogram.EventHistogramAddDimMetadata(builder, metadata_vector) + EventHistogram.EventHistogramAddTimestamp(builder, histogram["timestamp"]) + if source_offset: + EventHistogram.EventHistogramAddSource(builder, source_offset) + EventHistogram.EventHistogramAddDataType(builder, data_type) + if errors_offset: + EventHistogram.EventHistogramAddErrors(builder, errors_offset) + EventHistogram.EventHistogramAddErrorsType(builder, error_type) + if "last_metadata_timestamp" in histogram: + EventHistogram.EventHistogramAddLastMetadataTimestamp( + builder, histogram["last_metadata_timestamp"] + ) + hist_message = EventHistogram.EventHistogramEnd(builder) + + builder.Finish(hist_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +def _serialise_array(builder, data): + flattened_data = numpy.asarray(data).flatten() + + # Carefully preserve explicitly supported types + if numpy.issubdtype(flattened_data.dtype, numpy.int32): + return _serialise_int32(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.int64): + return _serialise_int64(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.float32): + return _serialise_float(builder, flattened_data) + if numpy.issubdtype(flattened_data.dtype, numpy.float64): + return _serialise_double(builder, flattened_data) + + # Otherwise if it looks like an int then use int64, or use double as last resort + if numpy.issubdtype(flattened_data.dtype, numpy.int64): + return _serialise_int64(builder, flattened_data) + + return _serialise_double(builder, flattened_data) + + +def _serialise_float(builder, flattened_data): + data_type = Array.ArrayFloat + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayFloat.ArrayFloatStart(builder) + ArrayFloat.ArrayFloatAddValue(builder, data_vector) + data_offset = ArrayFloat.ArrayFloatEnd(builder) + return data_offset, data_type + + +def _serialise_double(builder, flattened_data): + data_type = Array.ArrayDouble + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayDouble.ArrayDoubleStart(builder) + ArrayDouble.ArrayDoubleAddValue(builder, data_vector) + data_offset = ArrayDouble.ArrayDoubleEnd(builder) + return data_offset, data_type + + +def _serialise_int32(builder, flattened_data): + data_type = Array.ArrayInt + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayInt.ArrayIntStart(builder) + ArrayInt.ArrayIntAddValue(builder, data_vector) + data_offset = ArrayInt.ArrayIntEnd(builder) + return data_offset, data_type + + +def _serialise_int64(builder, flattened_data): + data_type = Array.ArrayLong + data_vector = builder.CreateNumpyVector(flattened_data) + ArrayLong.ArrayLongStart(builder) + ArrayLong.ArrayLongAddValue(builder, data_vector) + data_offset = ArrayLong.ArrayLongEnd(builder) + return data_offset, data_type diff --git a/python/src/streaming_data_types/json_json.py b/python/src/streaming_data_types/json_json.py new file mode 100644 index 0000000..826b95c --- /dev/null +++ b/python/src/streaming_data_types/json_json.py @@ -0,0 +1,24 @@ +import flatbuffers + +import streaming_data_types.fbschemas.json_json.JsonData as JsonData +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"json" + + +def deserialise_json(buffer) -> str: + check_schema_identifier(buffer, FILE_IDENTIFIER) + return JsonData.JsonData.GetRootAsJsonData(buffer, 0).Json().decode("utf-8") + + +def serialise_json(json_str) -> bytes: + builder = flatbuffers.Builder(128) + + offset = builder.CreateString(json_str) + + JsonData.JsonDataStart(builder) + JsonData.AddJson(builder, offset) + result = JsonData.JsonDataEnd(builder) + + builder.Finish(result, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/logdata_f144.py b/python/src/streaming_data_types/logdata_f144.py new file mode 100644 index 0000000..4df2b3c --- /dev/null +++ b/python/src/streaming_data_types/logdata_f144.py @@ -0,0 +1,300 @@ +from collections import namedtuple +from typing import Any, NamedTuple, Union + +import flatbuffers +import numpy as np + +from streaming_data_types.fbschemas.logdata_f144 import f144_LogData +from streaming_data_types.fbschemas.logdata_f144.ArrayByte import ( + ArrayByte, + ArrayByteAddValue, + ArrayByteEnd, + ArrayByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayDouble import ( + ArrayDouble, + ArrayDoubleAddValue, + ArrayDoubleEnd, + ArrayDoubleStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayFloat import ( + ArrayFloat, + ArrayFloatAddValue, + ArrayFloatEnd, + ArrayFloatStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayInt import ( + ArrayInt, + ArrayIntAddValue, + ArrayIntEnd, + ArrayIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayLong import ( + ArrayLong, + ArrayLongAddValue, + ArrayLongEnd, + ArrayLongStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayShort import ( + ArrayShort, + ArrayShortAddValue, + ArrayShortEnd, + ArrayShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUByte import ( + ArrayUByte, + ArrayUByteAddValue, + ArrayUByteEnd, + ArrayUByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUInt import ( + ArrayUInt, + ArrayUIntAddValue, + ArrayUIntEnd, + ArrayUIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayULong import ( + ArrayULong, + ArrayULongAddValue, + ArrayULongEnd, + ArrayULongStart, +) +from streaming_data_types.fbschemas.logdata_f144.ArrayUShort import ( + ArrayUShort, + ArrayUShortAddValue, + ArrayUShortEnd, + ArrayUShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.Byte import ( + Byte, + ByteAddValue, + ByteEnd, + ByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.Double import ( + Double, + DoubleAddValue, + DoubleEnd, + DoubleStart, +) +from streaming_data_types.fbschemas.logdata_f144.Float import ( + Float, + FloatAddValue, + FloatEnd, + FloatStart, +) +from streaming_data_types.fbschemas.logdata_f144.Int import ( + Int, + IntAddValue, + IntEnd, + IntStart, +) +from streaming_data_types.fbschemas.logdata_f144.Long import ( + Long, + LongAddValue, + LongEnd, + LongStart, +) +from streaming_data_types.fbschemas.logdata_f144.Short import ( + Short, + ShortAddValue, + ShortEnd, + ShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.UByte import ( + UByte, + UByteAddValue, + UByteEnd, + UByteStart, +) +from streaming_data_types.fbschemas.logdata_f144.UInt import ( + UInt, + UIntAddValue, + UIntEnd, + UIntStart, +) +from streaming_data_types.fbschemas.logdata_f144.ULong import ( + ULong, + ULongAddValue, + ULongEnd, + ULongStart, +) +from streaming_data_types.fbschemas.logdata_f144.UShort import ( + UShort, + UShortAddValue, + UShortEnd, + UShortStart, +) +from streaming_data_types.fbschemas.logdata_f144.Value import Value +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"f144" + +SerialiserFunctions = namedtuple( + "SerialiserFunctionMap", + ("StartFunction", "AddValueFunction", "EndFunction", "value_type_enum"), +) + + +def _serialise_value( + builder: flatbuffers.Builder, value: Any, function_map: SerialiserFunctions +): + function_map.StartFunction(builder) + function_map.AddValueFunction(builder, value) + return function_map.EndFunction(builder) + + +_map_scalar_type_to_serialiser = { + np.dtype("byte"): SerialiserFunctions(ByteStart, ByteAddValue, ByteEnd, Value.Byte), + np.dtype("ubyte"): SerialiserFunctions( + UByteStart, UByteAddValue, UByteEnd, Value.UByte + ), + np.dtype("int16"): SerialiserFunctions( + ShortStart, ShortAddValue, ShortEnd, Value.Short + ), + np.dtype("uint16"): SerialiserFunctions( + UShortStart, UShortAddValue, UShortEnd, Value.UShort + ), + np.dtype("int32"): SerialiserFunctions(IntStart, IntAddValue, IntEnd, Value.Int), + np.dtype("uint32"): SerialiserFunctions( + UIntStart, UIntAddValue, UIntEnd, Value.UInt + ), + np.dtype("int64"): SerialiserFunctions( + LongStart, LongAddValue, LongEnd, Value.Long + ), + np.dtype("uint64"): SerialiserFunctions( + ULongStart, ULongAddValue, ULongEnd, Value.ULong + ), + np.dtype("float32"): SerialiserFunctions( + FloatStart, FloatAddValue, FloatEnd, Value.Float + ), + np.dtype("float64"): SerialiserFunctions( + DoubleStart, DoubleAddValue, DoubleEnd, Value.Double + ), +} + +_map_array_type_to_serialiser = { + np.dtype("byte"): SerialiserFunctions( + ArrayByteStart, ArrayByteAddValue, ArrayByteEnd, Value.ArrayByte + ), + np.dtype("int16"): SerialiserFunctions( + ArrayShortStart, ArrayShortAddValue, ArrayShortEnd, Value.ArrayShort + ), + np.dtype("int32"): SerialiserFunctions( + ArrayIntStart, ArrayIntAddValue, ArrayIntEnd, Value.ArrayInt + ), + np.dtype("int64"): SerialiserFunctions( + ArrayLongStart, ArrayLongAddValue, ArrayLongEnd, Value.ArrayLong + ), + np.dtype("ubyte"): SerialiserFunctions( + ArrayUByteStart, ArrayUByteAddValue, ArrayUByteEnd, Value.ArrayUByte + ), + np.dtype("uint16"): SerialiserFunctions( + ArrayUShortStart, ArrayUShortAddValue, ArrayUShortEnd, Value.ArrayUShort + ), + np.dtype("uint32"): SerialiserFunctions( + ArrayUIntStart, ArrayUIntAddValue, ArrayUIntEnd, Value.ArrayUInt + ), + np.dtype("uint64"): SerialiserFunctions( + ArrayULongStart, ArrayULongAddValue, ArrayULongEnd, Value.ArrayULong + ), + np.dtype("float32"): SerialiserFunctions( + ArrayFloatStart, ArrayFloatAddValue, ArrayFloatEnd, Value.ArrayFloat + ), + np.dtype("float64"): SerialiserFunctions( + ArrayDoubleStart, ArrayDoubleAddValue, ArrayDoubleEnd, Value.ArrayDouble + ), +} + + +def serialise_f144( + source_name: str, + value: Any, + timestamp_unix_ns: int = 0, +) -> bytes: + builder = flatbuffers.Builder(1024) + source_name_offset = builder.CreateString(source_name) + value = np.array(value) + if value.ndim == 1: + try: + c_func_map = _map_array_type_to_serialiser[value.dtype] + value_offset = _serialise_value( + builder, builder.CreateNumpyVector(value), c_func_map + ) + value_type = c_func_map.value_type_enum + except KeyError: + raise NotImplementedError( + f"f144 flatbuffer does not support values of type {value.dtype}." + ) + elif value.ndim == 0: + try: + c_func_map = _map_scalar_type_to_serialiser[value.dtype] + value_offset = _serialise_value(builder, value, c_func_map) + value_type = c_func_map.value_type_enum + except KeyError: + raise NotImplementedError( + f"f144 flatbuffer does not support values of type {value.dtype}." + ) + else: + raise NotImplementedError("f144 only supports scalars or 1D array values") + f144_LogData.f144_LogDataStart(builder) + f144_LogData.f144_LogDataAddSourceName(builder, source_name_offset) + f144_LogData.f144_LogDataAddValue(builder, value_offset) + f144_LogData.f144_LogDataAddValueType(builder, value_type) + f144_LogData.f144_LogDataAddTimestamp(builder, timestamp_unix_ns) + end = f144_LogData.f144_LogDataEnd(builder) + builder.Finish(end, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +_map_fb_enum_to_type = { + Value.Byte: Byte, + Value.UByte: UByte, + Value.Short: Short, + Value.UShort: UShort, + Value.Int: Int, + Value.UInt: UInt, + Value.Long: Long, + Value.ULong: ULong, + Value.Float: Float, + Value.Double: Double, + Value.ArrayByte: ArrayByte, + Value.ArrayUByte: ArrayUByte, + Value.ArrayShort: ArrayShort, + Value.ArrayUShort: ArrayUShort, + Value.ArrayInt: ArrayInt, + Value.ArrayUInt: ArrayUInt, + Value.ArrayLong: ArrayLong, + Value.ArrayULong: ArrayULong, + Value.ArrayFloat: ArrayFloat, + Value.ArrayDouble: ArrayDouble, +} + + +ExtractedLogData = NamedTuple( + "LogData", + ( + ("source_name", str), + ("value", Any), + ("timestamp_unix_ns", int), + ), +) + + +def deserialise_f144(buffer: Union[bytearray, bytes]) -> ExtractedLogData: + check_schema_identifier(buffer, FILE_IDENTIFIER) + log_data = f144_LogData.f144_LogData.GetRootAsf144_LogData(buffer, 0) + source_name = log_data.SourceName() if log_data.SourceName() else b"" + + value_offset = log_data.Value() + value_fb = _map_fb_enum_to_type[log_data.ValueType()]() + value_fb.Init(value_offset.Bytes, value_offset.Pos) + if hasattr(value_fb, "ValueAsNumpy"): + value = value_fb.ValueAsNumpy() + else: + value = value_fb.Value() + return ExtractedLogData( + source_name=source_name.decode(), + value=value, + timestamp_unix_ns=log_data.Timestamp(), + ) diff --git a/python/src/streaming_data_types/run_start_pl72.py b/python/src/streaming_data_types/run_start_pl72.py new file mode 100644 index 0000000..bcc50ab --- /dev/null +++ b/python/src/streaming_data_types/run_start_pl72.py @@ -0,0 +1,162 @@ +import time +from collections import namedtuple +from datetime import datetime +from typing import NamedTuple, Optional, Union + +import flatbuffers +import numpy as np + +from streaming_data_types.fbschemas.run_start_pl72 import ( + RunStart, + SpectraDetectorMapping, +) +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"pl72" + + +DetectorSpectrumMap = namedtuple( + "DetectorSpectrumMap", + ( + "spectrum_numbers", # numpy ndarray of int + "detector_ids", # numpy ndarray of int + "n_spectra", # int + ), +) + + +def serialise_pl72( + job_id: str, + filename: str, + start_time: Union[int, datetime, None] = None, + stop_time: Union[int, datetime, None] = None, + run_name: str = "test_run", + nexus_structure: str = "{}", + service_id: str = "", + instrument_name: str = "", + broker: str = "", + metadata: str = "{}", + detector_spectrum_map: Optional[DetectorSpectrumMap] = None, + control_topic: str = "", +) -> bytes: + builder = flatbuffers.Builder(512) + builder.ForceDefaults(True) + + if type(start_time) is datetime: + start_time = int(start_time.timestamp() * 1000) + elif start_time is None: + start_time = int(time.time() * 1000) + if service_id is None: + service_id = "" + if type(stop_time) is datetime: + stop_time = int(stop_time.timestamp() * 1000) + elif stop_time is None: + stop_time = 0 + + service_id_offset = builder.CreateString(service_id) + broker_offset = builder.CreateString(broker) + job_id_offset = builder.CreateString(job_id) + nexus_structure_offset = builder.CreateString(nexus_structure) + instrument_name_offset = builder.CreateString(instrument_name) + run_name_offset = builder.CreateString(run_name) + filename_offset = builder.CreateString(filename) + metadata_offset = builder.CreateString(metadata) + control_topic_offset = builder.CreateString(control_topic) + + # Build detector-spectrum map + if detector_spectrum_map is not None: + spectrum_map_offset = builder.CreateNumpyVector( + np.asarray(detector_spectrum_map.spectrum_numbers).astype(np.int32) + ) + det_id_map_offset = builder.CreateNumpyVector( + np.asarray(detector_spectrum_map.detector_ids).astype(np.int32) + ) + SpectraDetectorMapping.SpectraDetectorMappingStart(builder) + SpectraDetectorMapping.SpectraDetectorMappingAddSpectrum( + builder, spectrum_map_offset + ) + SpectraDetectorMapping.SpectraDetectorMappingAddDetectorId( + builder, det_id_map_offset + ) + SpectraDetectorMapping.SpectraDetectorMappingAddNSpectra( + builder, detector_spectrum_map.n_spectra + ) + detector_spectrum_map_offset = SpectraDetectorMapping.SpectraDetectorMappingEnd( + builder + ) + + # Build the actual buffer + RunStart.RunStartStart(builder) + RunStart.RunStartAddServiceId(builder, service_id_offset) + RunStart.RunStartAddBroker(builder, broker_offset) + RunStart.RunStartAddJobId(builder, job_id_offset) + RunStart.RunStartAddNexusStructure(builder, nexus_structure_offset) + RunStart.RunStartAddInstrumentName(builder, instrument_name_offset) + RunStart.RunStartAddRunName(builder, run_name_offset) + RunStart.RunStartAddStopTime(builder, stop_time) + RunStart.RunStartAddStartTime(builder, start_time) + RunStart.RunStartAddFilename(builder, filename_offset) + RunStart.RunStartAddNPeriods(builder, 1) + RunStart.RunStartAddMetadata(builder, metadata_offset) + if detector_spectrum_map is not None: + RunStart.RunStartAddDetectorSpectrumMap(builder, detector_spectrum_map_offset) + RunStart.RunStartAddControlTopic(builder, control_topic_offset) + + run_start_message = RunStart.RunStartEnd(builder) + + builder.Finish(run_start_message, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) + + +class RunStartInfo(NamedTuple): + job_id: str + filename: str + start_time: int + stop_time: int + nexus_structure: str + service_id: str + broker: str + run_name: str = "" + instrument_name: str = "" + metadata: str = "" + detector_spectrum_map: Optional[DetectorSpectrumMap] = None + control_topic: str = "" + + +def deserialise_pl72(buffer: Union[bytearray, bytes]) -> RunStartInfo: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + run_start = RunStart.RunStart.GetRootAsRunStart(buffer, 0) + service_id = run_start.ServiceId() if run_start.ServiceId() else b"" + broker = run_start.Broker() if run_start.Broker() else b"" + job_id = run_start.JobId() if run_start.JobId() else b"" + filename = run_start.Filename() if run_start.Filename() else b"" + nexus_structure = run_start.NexusStructure() if run_start.NexusStructure() else b"" + instrument_name = run_start.InstrumentName() if run_start.InstrumentName() else b"" + run_name = run_start.RunName() if run_start.RunName() else b"" + metadata = run_start.Metadata() if run_start.Metadata() else b"" + control_topic = run_start.ControlTopic() if run_start.ControlTopic() else b"" + + detector_spectrum_map = None + det_spec_map_buf = run_start.DetectorSpectrumMap() + if det_spec_map_buf is not None: + detector_spectrum_map = DetectorSpectrumMap( + det_spec_map_buf.SpectrumAsNumpy(), + det_spec_map_buf.DetectorIdAsNumpy(), + det_spec_map_buf.NSpectra(), + ) + + return RunStartInfo( + job_id=job_id.decode(), + filename=filename.decode(), + start_time=run_start.StartTime(), + stop_time=run_start.StopTime(), + run_name=run_name.decode(), + nexus_structure=nexus_structure.decode(), + service_id=service_id.decode(), + instrument_name=instrument_name.decode(), + broker=broker.decode(), + metadata=metadata.decode(), + detector_spectrum_map=detector_spectrum_map, + control_topic=control_topic.decode(), + ) diff --git a/python/src/streaming_data_types/run_stop_6s4t.py b/python/src/streaming_data_types/run_stop_6s4t.py new file mode 100644 index 0000000..fadf4ef --- /dev/null +++ b/python/src/streaming_data_types/run_stop_6s4t.py @@ -0,0 +1,76 @@ +from datetime import datetime +from typing import NamedTuple, Union + +import flatbuffers + +from streaming_data_types.fbschemas.run_stop_6s4t import RunStop +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"6s4t" + + +def serialise_6s4t( + job_id: str, + run_name: str = "test_run", + service_id: str = "", + command_id: str = "", + stop_time: Union[int, datetime, None] = None, +) -> bytes: + builder = flatbuffers.Builder(500) + builder.ForceDefaults(True) + + if service_id is None: + service_id = "" + if type(stop_time) is datetime: + stop_time = int(stop_time.timestamp() * 1000) + elif stop_time is None: + stop_time = 0 + + service_id_offset = builder.CreateString(service_id) + job_id_offset = builder.CreateString(job_id) + run_name_offset = builder.CreateString(run_name) + command_id_offset = builder.CreateString(command_id) + + # Build the actual buffer + RunStop.RunStopStart(builder) + RunStop.RunStopAddServiceId(builder, service_id_offset) + RunStop.RunStopAddJobId(builder, job_id_offset) + RunStop.RunStopAddRunName(builder, run_name_offset) + RunStop.RunStopAddStopTime(builder, stop_time) + RunStop.RunStopAddCommandId(builder, command_id_offset) + + run_stop_message = RunStop.RunStopEnd(builder) + builder.Finish(run_stop_message, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) + + +RunStopInfo = NamedTuple( + "RunStopInfo", + ( + ("stop_time", int), + ("run_name", str), + ("job_id", str), + ("service_id", str), + ("command_id", str), + ), +) + + +def deserialise_6s4t(buffer: Union[bytearray, bytes]) -> RunStopInfo: + check_schema_identifier(buffer, FILE_IDENTIFIER) + + run_stop = RunStop.RunStop.GetRootAsRunStop(buffer, 0) + service_id = run_stop.ServiceId() if run_stop.ServiceId() else b"" + job_id = run_stop.JobId() if run_stop.JobId() else b"" + run_name = run_stop.RunName() if run_stop.RunName() else b"" + stop_time = run_stop.StopTime() + command_id = run_stop.CommandId() + + return RunStopInfo( + stop_time=stop_time, + run_name=run_name.decode(), + job_id=job_id.decode(), + service_id=service_id.decode(), + command_id=command_id.decode(), + ) diff --git a/python/src/streaming_data_types/status_x5f2.py b/python/src/streaming_data_types/status_x5f2.py new file mode 100644 index 0000000..6d43684 --- /dev/null +++ b/python/src/streaming_data_types/status_x5f2.py @@ -0,0 +1,91 @@ +from collections import namedtuple + +import flatbuffers + +from streaming_data_types.fbschemas.status_x5f2 import Status +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"x5f2" + +StatusMessage = namedtuple( + "StatusMessage", + ( + "software_name", + "software_version", + "service_id", + "host_name", + "process_id", + "update_interval", + "status_json", + ), +) + + +def deserialise_x5f2(buffer): + """ + Deserialise FlatBuffer x5f2. + + :param buffer: The FlatBuffers buffer. + :return: The deserialised data. + """ + check_schema_identifier(buffer, FILE_IDENTIFIER) + + log_message = Status.Status.GetRootAsStatus(buffer, 0) + + return StatusMessage( + log_message.SoftwareName().decode("utf-8"), + log_message.SoftwareVersion().decode("utf-8"), + log_message.ServiceId().decode("utf-8"), + log_message.HostName().decode("utf-8"), + log_message.ProcessId(), + log_message.UpdateInterval(), + log_message.StatusJson().decode("utf-8"), + ) + + +def serialise_x5f2( + software_name: str, + software_version: str, + service_id: str, + host_name: str, + process_id: int, + update_interval: int, + status_json: str, +) -> bytes: + """ + Serialise status message as an x5f2 FlatBuffers message. + + :param software_name: + :param software_version: + :param service_id: + :param host_name: + :param process_id: + :param update_interval: + :param status_json: + :return: + """ + + builder = flatbuffers.Builder(1024) + builder.ForceDefaults(True) + + software_name = builder.CreateString(software_name) + software_version = builder.CreateString(software_version) + service_id = builder.CreateString(service_id) + host_name = builder.CreateString(host_name) + status_json = builder.CreateString(status_json) + + # Build the actual buffer + Status.StatusStart(builder) + + Status.StatusAddSoftwareName(builder, software_name) + Status.StatusAddSoftwareVersion(builder, software_version) + Status.StatusAddServiceId(builder, service_id) + Status.StatusAddHostName(builder, host_name) + Status.StatusAddProcessId(builder, process_id) + Status.StatusAddUpdateInterval(builder, update_interval) + Status.StatusAddStatusJson(builder, status_json) + + data = Status.StatusEnd(builder) + builder.Finish(data, file_identifier=FILE_IDENTIFIER) + + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/units_un00.py b/python/src/streaming_data_types/units_un00.py new file mode 100644 index 0000000..746f1d8 --- /dev/null +++ b/python/src/streaming_data_types/units_un00.py @@ -0,0 +1,41 @@ +from collections import namedtuple +from typing import Optional + +import flatbuffers + +from streaming_data_types.fbschemas.units_un00 import Units +from streaming_data_types.utils import check_schema_identifier + +FILE_IDENTIFIER = b"un00" + +UnitInfo = namedtuple("UnitInfo", ("source", "timestamp_ns", "units")) + + +def deserialise_un00(buffer) -> UnitInfo: + check_schema_identifier(buffer, FILE_IDENTIFIER) + units = Units.Units.GetRootAsUnits(buffer, 0) + + return UnitInfo( + units.SourceName().decode("utf-8") if units.SourceName() else "", + units.Timestamp(), + units.Units().decode("utf-8") if units.Units() is not None else None, + ) + + +def serialise_un00( + source: str, timestamp_ns: int, units: Optional[str] +) -> bytes: + builder = flatbuffers.Builder(128) + if units is not None: + units_offset = builder.CreateString(units) + source_offset = builder.CreateString(source) + + Units.UnitsStart(builder) + Units.UnitsAddSourceName(builder, source_offset) + Units.UnitsAddTimestamp(builder, timestamp_ns) + if units is not None: + Units.UnitsAddUnits(builder, units_offset) + _units = Units.UnitsEnd(builder) + + builder.Finish(_units, file_identifier=FILE_IDENTIFIER) + return bytes(builder.Output()) diff --git a/python/src/streaming_data_types/utils.py b/python/src/streaming_data_types/utils.py new file mode 100644 index 0000000..d8a4478 --- /dev/null +++ b/python/src/streaming_data_types/utils.py @@ -0,0 +1,26 @@ +from streaming_data_types.exceptions import ShortBufferException, WrongSchemaException + + +def get_schema(buffer) -> str: + """ + Extract the schema code embedded in the buffer + + :param buffer: The raw buffer of the FlatBuffers message. + :return: The schema identifier + """ + if len(buffer) < 8: + raise ShortBufferException("Could not retrieve schema as buffer too short") + return buffer[4:8].decode("utf-8") + + +def check_schema_identifier(buffer, expected_identifer: bytes): + """ + Check the schema code embedded in the buffer matches an expected identifier + + :param buffer: The raw buffer of the FlatBuffers message + :param expected_identifer: The expected flatbuffer identifier + """ + if get_schema(buffer) != expected_identifer.decode(): + raise WrongSchemaException( + f"Incorrect schema: expected {expected_identifer} but got {get_schema(buffer)}" + ) diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/tests/example_buffers/al00.bin b/python/tests/example_buffers/al00.bin new file mode 100644 index 0000000..bf25d3d Binary files /dev/null and b/python/tests/example_buffers/al00.bin differ diff --git a/python/tests/example_buffers/ev42.bin b/python/tests/example_buffers/ev42.bin new file mode 100644 index 0000000..783f737 Binary files /dev/null and b/python/tests/example_buffers/ev42.bin differ diff --git a/python/tests/example_buffers/f144.bin b/python/tests/example_buffers/f144.bin new file mode 100644 index 0000000..a91be71 Binary files /dev/null and b/python/tests/example_buffers/f144.bin differ diff --git a/python/tests/example_buffers/hs01.bin b/python/tests/example_buffers/hs01.bin new file mode 100644 index 0000000..4875ec0 Binary files /dev/null and b/python/tests/example_buffers/hs01.bin differ diff --git a/python/tests/test_6s4t.py b/python/tests/test_6s4t.py new file mode 100644 index 0000000..772bb61 --- /dev/null +++ b/python/tests/test_6s4t.py @@ -0,0 +1,39 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t + + +class TestSerialisation6s4t: + original_entry = { + "job_id": "some_key", + "stop_time": 578214, + "run_name": "test_run", + "service_id": "filewriter1", + "command_id": "some command id", + } + + def test_serialises_and_deserialises_6s4t_message_correctly(self): + buf = serialise_6s4t(**self.original_entry) + deserialised_tuple = deserialise_6s4t(buf) + + assert deserialised_tuple.job_id == self.original_entry["job_id"] + assert deserialised_tuple.stop_time == self.original_entry["stop_time"] + assert deserialised_tuple.run_name == self.original_entry["run_name"] + assert deserialised_tuple.service_id == self.original_entry["service_id"] + assert deserialised_tuple.command_id == self.original_entry["command_id"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_6s4t(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_6s4t(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "6s4t" in SERIALISERS + assert "6s4t" in DESERIALISERS diff --git a/python/tests/test_ad00.py b/python/tests/test_ad00.py new file mode 100644 index 0000000..cde2658 --- /dev/null +++ b/python/tests/test_ad00.py @@ -0,0 +1,105 @@ +import time + +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.area_detector_ad00 import ( + Attribute, + deserialise_ad00, + serialise_ad00, +) +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationAD00: + def test_serialises_and_deserialises_ad00_int_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp_ns": time.time_ns(), + "attributes": [ + Attribute("name1", "desc1", "src1", "value"), + Attribute("name2", "desc2", "src2", 11), + Attribute("name3", "desc3", "src3", 3.14), + Attribute("name4", "desc4", "src4", np.linspace(0, 10)), + ], + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert np.array_equal(entry.dimensions, original_entry["data"].shape) + assert np.array_equal(entry.data.shape, entry.dimensions) # Sanity check + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + assert len(entry.attributes) == len(original_entry["attributes"]) + for i in range(len(entry.attributes)): + assert entry.attributes[i] == original_entry["attributes"][i] + + def test_serialises_and_deserialises_ad00_float_array(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some other source name", + "unique_id": 789679, + "data": np.array([[1.1, 2.2, 3.3], [4.4, 5.5, 6.6]], dtype=np.float32), + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert np.array_equal(entry.data, original_entry["data"]) + assert entry.data.dtype == original_entry["data"].dtype + + def test_serialises_and_deserialises_ad00_string(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": "hi, this is a string", + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + entry = deserialise_ad00(buf) + + assert entry.unique_id == original_entry["unique_id"] + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert entry.data == original_entry["data"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some source name", + "unique_id": 754, + "data": np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + "timestamp_ns": time.time_ns(), + } + + buf = serialise_ad00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ad00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ad00" in SERIALISERS + assert "ad00" in DESERIALISERS diff --git a/python/tests/test_al00.py b/python/tests/test_al00.py new file mode 100644 index 0000000..51cb731 --- /dev/null +++ b/python/tests/test_al00.py @@ -0,0 +1,46 @@ +import pathlib +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.alarm_al00 import Severity, deserialise_al00, serialise_al00 +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationAl00: + def test_serialises_and_deserialises_al00_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + buf = serialise_al00("some_source", 1234567890, Severity.MAJOR, "Some message") + entry = deserialise_al00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.severity == Severity.MAJOR + assert entry.message == "Some message" + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_al00("some_source", 1234567890, Severity.MAJOR, "Some message") + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_al00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "al00" in SERIALISERS + assert "al00" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "al00.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_al00(buffer) + + assert result.source == "det_image2" + assert result.severity == Severity.OK + assert result.message == "" + assert result.timestamp_ns == 1668605811532484096 diff --git a/python/tests/test_answ.py b/python/tests/test_answ.py new file mode 100644 index 0000000..b911770 --- /dev/null +++ b/python/tests/test_answ.py @@ -0,0 +1,82 @@ +from datetime import datetime, timezone + +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.action_response_answ import ( + ActionOutcome, + ActionType, + deserialise_answ, + serialise_answ, +) +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationAnsw: + def test_serialise_and_deserialise_answ_message(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "command_id": "some command id", + "action": ActionType.SetStopTime, + "outcome": ActionOutcome.Failure, + "message": "some random error message", + "status_code": 123456789, + "stop_time": datetime( + year=2021, + month=2, + day=12, + hour=2, + minute=12, + second=12, + tzinfo=timezone.utc, + ), + } + + buf = serialise_answ(**original_entry) + entry = deserialise_answ(buf) + + assert entry.service_id == original_entry["service_id"] + assert entry.command_id == original_entry["command_id"] + assert entry.job_id == original_entry["job_id"] + assert entry.message == original_entry["message"] + assert entry.action == original_entry["action"] + assert entry.outcome == original_entry["outcome"] + assert entry.status_code == original_entry["status_code"] + assert entry.stop_time == original_entry["stop_time"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "command_id": "some command id", + "action": ActionType.SetStopTime, + "outcome": ActionOutcome.Failure, + "message": "some random error message", + "status_code": 123456789, + "stop_time": datetime( + year=2021, + month=2, + day=12, + hour=2, + minute=12, + second=12, + tzinfo=timezone.utc, + ), + } + + buf = serialise_answ(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_answ(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "answ" in SERIALISERS + assert "answ" in DESERIALISERS diff --git a/python/tests/test_da00.py b/python/tests/test_da00.py new file mode 100644 index 0000000..64ca8c4 --- /dev/null +++ b/python/tests/test_da00.py @@ -0,0 +1,182 @@ +import time + +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.dataarray_da00 import ( + Variable, + deserialise_da00, + serialise_da00, +) +from streaming_data_types.exceptions import WrongSchemaException + + +def test_serialises_and_deserialises_da00_int_array(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "timestamp_ns": time.time_ns(), + "data": [ + Variable( + name="data", + unit="counts", + axes=["time", "x", "y"], + data=np.array([[[1, 2, 3], [3, 4, 5]]], dtype=np.uint64), + ), + Variable( + name="time", + unit="hours", + label="elapsed clock time", + axes=["time"], + data=np.array([13, 21], dtype=np.float32), + ), + Variable( + name="x", + unit="m", + label="Position", + axes=["x"], + data=np.array([-1, 0, 1], dtype=np.float32), + ), + Variable( + name="y", + unit="m", + label="Position", + axes=["y"], + data=np.array([0, 2, 4, 6], dtype=np.float32), + ), + Variable(name="name1", data="value", label="desc1", source="src1"), + Variable(name="name2", data=11, label="desc2", source="src2"), + Variable(name="name3", data=3.14, label="desc3", source="src3"), + Variable( + name="name4", data=np.linspace(0, 10), label="desc4", source="src4" + ), + Variable( + name="name5", + data=np.array([[1, 2], [3, 4]]), + axes=["a", "b"], + label="desc5", + source="src5", + ), + ], + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_serialises_and_deserialises_da00_float_array(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some other source name", + "data": [ + Variable( + name="data", + axes=["x", "time", "y"], + data=np.array([[[1.1, 2.2, 3.3]], [[4.4, 5.5, 6.6]]], dtype=np.float32), + ), + Variable( + name="errors", axes=["y"], data=np.array([1, 2, 3], dtype=np.int8) + ), + Variable( + name="y", + unit="m", + label="Position", + axes=["y"], + data=np.array([0, 2, 4, 6], dtype=np.float64), + ), + Variable( + name="time", + unit="hours", + label="elapsed clock time", + axes=["time"], + data=np.array([13, 21], dtype=np.uint32), + ), + Variable( + name="x", + unit="m", + label="Position", + axes=["x"], + data=np.array([-1, 0, 1], dtype=np.int8), + ), + ], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_serialises_and_deserialises_da00_string(): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some source name", + "data": [Variable(data="hi, this is a string", axes=[], name="the_string")], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + entry = deserialise_da00(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.timestamp_ns == original_entry["timestamp_ns"] + assert len(entry.data) == len(original_entry["data"]) + for a, b in zip(entry.data, original_entry["data"]): + assert a == b + + +def test_no_variables_throws(): + original_entry = { + "source_name": "some source name", + "data": [], + "timestamp_ns": time.time_ns(), + } + + with pytest.raises(RuntimeError): + serialise_da00(**original_entry) + + +def test_if_buffer_has_wrong_id_then_throws(): + original_entry = { + "source_name": "some source name", + "data": [ + Variable( + name="data", + axes=["x", "y"], + data=np.array([[1, 2, 3], [3, 4, 5]], dtype=np.uint64), + ) + ], + "timestamp_ns": time.time_ns(), + } + + buf = serialise_da00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_da00(buf) + + +def test_da00_schema_type_is_in_global_serialisers_list(): + assert "da00" in SERIALISERS + assert "da00" in DESERIALISERS diff --git a/python/tests/test_ep01.py b/python/tests/test_ep01.py new file mode 100644 index 0000000..a39e515 --- /dev/null +++ b/python/tests/test_ep01.py @@ -0,0 +1,41 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.epics_connection_ep01 import ( + ConnectionInfo, + deserialise_ep01, + serialise_ep01, +) +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationEp01: + original_entry = { + "timestamp_ns": 1593620746000000000, + "status": ConnectionInfo.DISCONNECTED, + "source_name": "test_source", + "service_id": "test_service", + } + + def test_serialises_and_deserialises_ep01_message_correctly(self): + buf = serialise_ep01(**self.original_entry) + deserialised_tuple = deserialise_ep01(buf) + + assert deserialised_tuple.timestamp == self.original_entry["timestamp_ns"] + assert deserialised_tuple.status == self.original_entry["status"] + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.service_id == self.original_entry["service_id"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_ep01(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ep01(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ep01" in SERIALISERS + assert "ep01" in DESERIALISERS diff --git a/python/tests/test_ev44.py b/python/tests/test_ev44.py new file mode 100644 index 0000000..92cd699 --- /dev/null +++ b/python/tests/test_ev44.py @@ -0,0 +1,114 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44 +from streaming_data_types.exceptions import WrongSchemaException + + +class TestSerialisationEv44: + def test_serialises_and_deserialises_ev44_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ], + "reference_time_index": [2, 4, 5, 7], + "time_of_flight": [100, 200, 300, 400, 500, 600, 700, 800, 900], + "pixel_id": [10, 20, 30, 40, 50, 60, 70, 80, 90], + } + + buf = serialise_ev44(**original_entry) + entry = deserialise_ev44(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + + def test_serialises_and_deserialises_ev44_message_correctly_for_numpy_arrays(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + + buf = serialise_ev44(**original_entry) + entry = deserialise_ev44(buf) + + assert entry.source_name == original_entry["source_name"] + assert entry.message_id == original_entry["message_id"] + assert np.array_equal(entry.reference_time, original_entry["reference_time"]) + assert np.array_equal( + entry.reference_time_index, original_entry["reference_time_index"] + ) + assert np.array_equal(entry.time_of_flight, original_entry["time_of_flight"]) + assert np.array_equal(entry.pixel_id, original_entry["pixel_id"]) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "source_name": "some_source", + "message_id": 123456, + "reference_time": np.array( + [ + 1618573589123781958, + 1618573590133830371, + 1618573593677164112, + 1618573594185190549, + 1618573596217316066, + 1618573596725363109, + 1618573601295720976, + 1618573601799761445, + 1618573607354064836, + ] + ), + "reference_time_index": np.array([2, 4, 5, 7]), + "time_of_flight": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]), + "pixel_id": np.array([10, 20, 30, 40, 50, 60, 70, 80, 90]), + } + buf = serialise_ev44(**original_entry) + + # Manually introduce error in id. + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_ev44(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "ev44" in SERIALISERS + assert "ev44" in DESERIALISERS diff --git a/python/tests/test_f144.py b/python/tests/test_f144.py new file mode 100644 index 0000000..38983df --- /dev/null +++ b/python/tests/test_f144.py @@ -0,0 +1,169 @@ +import numpy as np +import pathlib +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144 + + +class TestSerialisationF144: + original_entry = { + "source_name": "some_source", + "value": 578214, + "timestamp_unix_ns": 1585332414000000000, + } + + def test_serialises_and_deserialises_integer_f144_message_correctly(self): + buf = serialise_f144(**self.original_entry) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == self.original_entry["source_name"] + assert deserialised_tuple.value == self.original_entry["value"] + assert ( + deserialised_tuple.timestamp_unix_ns + == self.original_entry["timestamp_unix_ns"] + ) + + def test_serialises_and_deserialises_byte_f144_message_correctly(self): + byte_log = { + "source_name": "some_source", + "value": 0x7F, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**byte_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == byte_log["source_name"] + assert deserialised_tuple.value == byte_log["value"] + assert deserialised_tuple.timestamp_unix_ns == byte_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_float_f144_message_correctly(self): + float_log = { + "source_name": "some_source", + "value": 1.234, + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**float_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == float_log["source_name"] + assert deserialised_tuple.value == float_log["value"] + assert deserialised_tuple.timestamp_unix_ns == float_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_scalar_ndarray_f144_message_correctly(self): + numpy_log = { + "source_name": "some_source", + "value": np.array(42), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**numpy_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == numpy_log["source_name"] + assert deserialised_tuple.value == np.array(numpy_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == numpy_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_native_list_correctly(self): + list_log = { + "source_name": "some_source", + "value": [1, 2, 3], + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**list_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == list_log["source_name"] + # Array values are output as numpy array + assert np.array_equal(deserialised_tuple.value, np.array(list_log["value"])) + assert deserialised_tuple.timestamp_unix_ns == list_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_integers_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_serialises_and_deserialises_numpy_array_preserves_byte_type_correctly( + self, + ): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3], dtype=np.uint8), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.value.dtype == array_log["value"].dtype + + def test_serialises_and_deserialises_numpy_array_preserves_integer_type_correctly( + self, + ): + array_log = { + "source_name": "some_source", + "value": np.array([1, 2, 3], dtype=np.uint16), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert np.array_equal(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.value.dtype == array_log["value"].dtype + + def test_serialises_and_deserialises_numpy_array_floats_correctly(self): + array_log = { + "source_name": "some_source", + "value": np.array([1.1, 2.2, 3.3]), + "timestamp_unix_ns": 1585332414000000000, + } + buf = serialise_f144(**array_log) + deserialised_tuple = deserialise_f144(buf) + + assert deserialised_tuple.source_name == array_log["source_name"] + assert np.allclose(deserialised_tuple.value, array_log["value"]) + assert deserialised_tuple.timestamp_unix_ns == array_log["timestamp_unix_ns"] + + def test_raises_not_implemented_error_when_trying_to_serialise_numpy_complex_number_type( + self, + ): + complex_log = { + "source_name": "some_source", + "value": complex(3, 4), + "timestamp_unix_ns": 1585332414000000000, + } + with pytest.raises(NotImplementedError): + serialise_f144(**complex_log) + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_f144(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_f144(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "f144" in SERIALISERS + assert "f144" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "f144.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_f144(buffer) + + assert result.source_name == "t_julabo" + assert result.timestamp_unix_ns == 1666004422815024128 + assert result.value == 19 diff --git a/python/tests/test_fc00.py b/python/tests/test_fc00.py new file mode 100644 index 0000000..49127ce --- /dev/null +++ b/python/tests/test_fc00.py @@ -0,0 +1,62 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.fbschemas.forwarder_config_update_fc00.UpdateType import ( + UpdateType, +) +from streaming_data_types.forwarder_config_update_fc00 import ( + Protocol, + StreamInfo, + deserialise_fc00, + serialise_fc00, +) + + +class TestSerialisationRf5k: + def test_serialises_and_deserialises_fc00_message_with_streams_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + stream_1 = StreamInfo("channel1", "f144", "topic1", Protocol.Protocol.PVA, 0) + stream_2 = StreamInfo("channel2", "TdcTime", "topic2", Protocol.Protocol.CA, 0) + stream_3 = StreamInfo("channel3", "f144", "topic3", Protocol.Protocol.PVA, 1) + original_entry = { + "config_change": UpdateType.ADD, + "streams": [stream_1, stream_2, stream_3], + } + + buf = serialise_fc00(**original_entry) + entry = deserialise_fc00(buf) + + assert entry.config_change == original_entry["config_change"] + assert stream_1 in entry.streams + assert stream_2 in entry.streams + assert stream_3 in entry.streams + + def test_serialises_and_deserialises_fc00_message_without_streams_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + + buf = serialise_fc00(**original_entry) + entry = deserialise_fc00(buf) + + assert entry.config_change == original_entry["config_change"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = {"config_change": UpdateType.REMOVEALL, "streams": []} + + buf = serialise_fc00(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_fc00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "fc00" in SERIALISERS + assert "fc00" in DESERIALISERS diff --git a/python/tests/test_hs01.py b/python/tests/test_hs01.py new file mode 100644 index 0000000..348bfb9 --- /dev/null +++ b/python/tests/test_hs01.py @@ -0,0 +1,384 @@ +import pathlib +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01 + + +def create_test_data_with_type(numpy_type): + return { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]).astype(numpy_type), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]).astype(numpy_type), + "errors": np.array([5, 4, 3, 2, 1]).astype(numpy_type), + "info": "info_string", + } + + +class TestSerialisationHs01: + def _check_metadata_for_one_dimension(self, data, original_data): + assert np.array_equal(data["bin_boundaries"], original_data["bin_boundaries"]) + assert data["length"] == original_data["length"] + assert data["unit"] == original_data["unit"] + assert data["label"] == original_data["label"] + + def test_serialises_and_deserialises_hs01_message_correctly_for_full_1d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + "errors": np.array([5.0, 4.0, 3.0, 2.0, 1.0]), + "info": "info_string", + } + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs01_message_correctly_for_minimal_1d_data( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == "" + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert len(hist["errors"]) == 0 + assert hist["info"] == "" + + def test_serialises_and_deserialises_hs01_message_correctly_for_full_2d_data(self): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": np.array([10.0, 11.0, 12.0]), + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + }, + ], + "last_metadata_timestamp": 123456, + "data": np.array([[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]]), + "errors": np.array([[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]]), + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_if_buffer_has_wrong_id_then_throws(self): + original_hist = { + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0]), + } + ], + "data": np.array([1.0, 2.0, 3.0, 4.0, 5.0]), + } + buf = serialise_hs01(original_hist) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_hs01(buf) + + def test_serialises_and_deserialises_hs01_message_correctly_for_int_array_data( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [5], + "dim_metadata": [ + { + "length": 5, + "unit": "m", + "label": "some_label", + "bin_boundaries": np.array([0, 1, 2, 3, 4, 5]), + } + ], + "last_metadata_timestamp": 123456, + "data": np.array([1, 2, 3, 4, 5]), + "errors": np.array([5, 4, 3, 2, 1]), + "info": "info_string", + } + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialise_and_deserialise_hs01_message_returns_int32_type(self): + original_hist = create_test_data_with_type(np.int32) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_int64_type(self): + original_hist = create_test_data_with_type(np.int64) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_float32_type(self): + original_hist = create_test_data_with_type(np.float32) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialise_and_deserialise_hs01_message_returns_float64_type(self): + original_hist = create_test_data_with_type(np.float64) + + buf = serialise_hs01(original_hist) + hist = deserialise_hs01(buf) + + assert np.issubdtype( + hist["dim_metadata"][0]["bin_boundaries"].dtype, + original_hist["dim_metadata"][0]["bin_boundaries"].dtype, + ) + assert np.issubdtype(hist["data"].dtype, original_hist["data"].dtype) + assert np.issubdtype(hist["errors"].dtype, original_hist["errors"].dtype) + + def test_serialises_and_deserialises_hs01_message_correctly_when_float_input_is_not_ndarray( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10.0, 11.0, 12.0], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0.0, 1.0, 2.0, 3.0, 4.0, 5.0], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1.0, 2.0, 3.0, 4.0, 5.0], [6.0, 7.0, 8.0, 9.0, 10.0]], + "errors": [[5.0, 4.0, 3.0, 2.0, 1.0], [10.0, 9.0, 8.0, 7.0, 6.0]], + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_serialises_and_deserialises_hs01_message_correctly_when_int_input_is_not_ndarray( + self, + ): + """ + Round-trip to check what we serialise is what we get back. + """ + original_hist = { + "source": "some_source", + "timestamp": 123456, + "current_shape": [2, 5], + "dim_metadata": [ + { + "length": 2, + "unit": "b", + "label": "y", + "bin_boundaries": [10, 11, 12], + }, + { + "length": 5, + "unit": "m", + "label": "x", + "bin_boundaries": [0, 1, 2, 3, 4, 5], + }, + ], + "last_metadata_timestamp": 123456, + "data": [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]], + "errors": [[5, 4, 3, 2, 1], [10, 9, 8, 7, 6]], + "info": "info_string", + } + buf = serialise_hs01(original_hist) + + hist = deserialise_hs01(buf) + assert hist["source"] == original_hist["source"] + assert hist["timestamp"] == original_hist["timestamp"] + assert hist["current_shape"] == original_hist["current_shape"] + self._check_metadata_for_one_dimension( + hist["dim_metadata"][0], original_hist["dim_metadata"][0] + ) + self._check_metadata_for_one_dimension( + hist["dim_metadata"][1], original_hist["dim_metadata"][1] + ) + assert np.array_equal(hist["data"], original_hist["data"]) + assert np.array_equal(hist["errors"], original_hist["errors"]) + assert hist["info"] == original_hist["info"] + assert ( + hist["last_metadata_timestamp"] == original_hist["last_metadata_timestamp"] + ) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "hs01" in SERIALISERS + assert "hs01" in DESERIALISERS + + def test_converts_real_buffer(self): + file_path = pathlib.Path(__file__).parent / "example_buffers" / "hs01.bin" + with open(file_path, "rb") as file: + buffer = file.read() + + result = deserialise_hs01(buffer) + + assert result['current_shape'] == [64, 200] + assert result['source'] == 'just-bin-it' + assert result['timestamp'] == 1668605515930621000 + assert len(result['data']) == 64 + assert result['data'][0][0] == 0 + assert result['data'][~0][~0] == 0 + assert len(result['dim_metadata'][0]['bin_boundaries']) == 65 + assert result['dim_metadata'][0]['bin_boundaries'][0] == 0 + assert result['dim_metadata'][0]['bin_boundaries'][64] == 64 + assert result['info'] == '{"id": "nicos-det_image1-1668605510", "start": 1668605510775, "stop": 1668605515775, "state": "FINISHED"}' diff --git a/python/tests/test_json.py b/python/tests/test_json.py new file mode 100644 index 0000000..9b3e39e --- /dev/null +++ b/python/tests/test_json.py @@ -0,0 +1,34 @@ +import json + +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.json_json import deserialise_json, serialise_json + + +class TestSerialisationJson: + def test_serialises_and_deserialises_json_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + json_str = json.dumps(["foo", "bar"]) + buf = serialise_json(json_str) + entry = deserialise_json(buf) + + assert entry == json_str + + def test_if_buffer_has_wrong_id_then_throws(self): + json_str = json.dumps(["foo", "bar"]) + buf = serialise_json(json_str) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_json(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "json" in SERIALISERS + assert "json" in DESERIALISERS diff --git a/python/tests/test_pl72.py b/python/tests/test_pl72.py new file mode 100644 index 0000000..eab7d78 --- /dev/null +++ b/python/tests/test_pl72.py @@ -0,0 +1,76 @@ +import numpy as np +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.run_start_pl72 import ( + DetectorSpectrumMap, + deserialise_pl72, + serialise_pl72, +) + + +class TestSerialisationPl72: + original_entry = { + "job_id": "some_key", + "filename": "test_file.nxs", + "start_time": 567890, + "stop_time": 578214, + "run_name": "test_run", + "nexus_structure": "{}", + "service_id": "filewriter1", + "instrument_name": "LOKI", + "broker": "localhost:9092", + "metadata": "{3:1}", + "detector_spectrum_map": DetectorSpectrumMap( + np.array([4, 5, 6]), np.array([0, 1, 2]), 3 + ), + "control_topic": "some_topic_name", + } + + def test_serialises_and_deserialises_pl72_message_correctly(self): + buf = serialise_pl72(**self.original_entry) + deserialised_tuple = deserialise_pl72(buf) + + assert deserialised_tuple.job_id == self.original_entry["job_id"] + assert deserialised_tuple.filename == self.original_entry["filename"] + assert deserialised_tuple.start_time == self.original_entry["start_time"] + assert deserialised_tuple.stop_time == self.original_entry["stop_time"] + assert deserialised_tuple.run_name == self.original_entry["run_name"] + assert ( + deserialised_tuple.nexus_structure == self.original_entry["nexus_structure"] + ) + assert deserialised_tuple.service_id == self.original_entry["service_id"] + assert ( + deserialised_tuple.instrument_name == self.original_entry["instrument_name"] + ) + assert deserialised_tuple.broker == self.original_entry["broker"] + assert deserialised_tuple.metadata == self.original_entry["metadata"] + + assert ( + deserialised_tuple.detector_spectrum_map.n_spectra + == self.original_entry["detector_spectrum_map"].n_spectra + ) + assert np.array_equal( + deserialised_tuple.detector_spectrum_map.spectrum_numbers, + self.original_entry["detector_spectrum_map"].spectrum_numbers, + ) + assert np.array_equal( + deserialised_tuple.detector_spectrum_map.detector_ids, + self.original_entry["detector_spectrum_map"].detector_ids, + ) + assert deserialised_tuple.control_topic == self.original_entry["control_topic"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_pl72(**self.original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_pl72(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "pl72" in SERIALISERS + assert "pl72" in DESERIALISERS diff --git a/python/tests/test_un00.py b/python/tests/test_un00.py new file mode 100644 index 0000000..c59f688 --- /dev/null +++ b/python/tests/test_un00.py @@ -0,0 +1,43 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.units_un00 import deserialise_un00, serialise_un00 + + +class TestSerialisationUn00: + def test_serialises_and_deserialises_un00_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + buf = serialise_un00("some_source", 1234567890, "Some unit") + entry = deserialise_un00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.units == "Some unit" + + def test_serialises_and_deserialises_un00_message_correctly_with_none_as_unit(self): + """ + Round-trip to check what we serialise is what we get back with None specified as a unit. + """ + buf = serialise_un00("some_source", 1234567890, None) + entry = deserialise_un00(buf) + + assert entry.source == "some_source" + assert entry.timestamp_ns == 1234567890 + assert entry.units is None + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_un00("some_source", 1234567890, "Some unit") + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_un00(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "un00" in SERIALISERS + assert "un00" in DESERIALISERS diff --git a/python/tests/test_utils.py b/python/tests/test_utils.py new file mode 100644 index 0000000..6c33224 --- /dev/null +++ b/python/tests/test_utils.py @@ -0,0 +1,10 @@ +import pytest + +from streaming_data_types.exceptions import ShortBufferException +from streaming_data_types.utils import check_schema_identifier + + +def test_schema_check_throws_if_buffer_too_short(): + short_buffer = b"1234567" + with pytest.raises(ShortBufferException): + check_schema_identifier(short_buffer, b"1234") diff --git a/python/tests/test_wrdn.py b/python/tests/test_wrdn.py new file mode 100644 index 0000000..d85bc56 --- /dev/null +++ b/python/tests/test_wrdn.py @@ -0,0 +1,54 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn + + +class TestEncoder(object): + def test_serialise_and_deserialise_wrdn_message(self): + """ + Round-trip to check what we serialise is what we get back. + """ + + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "error_encountered": True, + "file_name": "somefile.nxs", + "metadata": '{"hello":4}', + "message": "some random error message", + } + + buf = serialise_wrdn(**original_entry) + entry = deserialise_wrdn(buf) + + assert entry.service_id == original_entry["service_id"] + assert entry.job_id == original_entry["job_id"] + assert entry.error_encountered == original_entry["error_encountered"] + assert entry.file_name == original_entry["file_name"] + assert entry.metadata == original_entry["metadata"] + assert entry.message == original_entry["message"] + + def test_if_buffer_has_wrong_id_then_throws(self): + original_entry = { + "service_id": "some_service_id_1234", + "job_id": "some_job_id_abcdef", + "error_encountered": True, + "file_name": "somefile.nxs", + "metadata": '{"hello":4}', + "message": "some random error message", + } + + buf = serialise_wrdn(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_wrdn(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "wrdn" in SERIALISERS + assert "wrdn" in DESERIALISERS diff --git a/python/tests/test_x52f.py b/python/tests/test_x52f.py new file mode 100644 index 0000000..cbb3fc3 --- /dev/null +++ b/python/tests/test_x52f.py @@ -0,0 +1,46 @@ +import pytest + +from streaming_data_types import DESERIALISERS, SERIALISERS +from streaming_data_types.exceptions import WrongSchemaException +from streaming_data_types.status_x5f2 import deserialise_x5f2, serialise_x5f2 + +original_entry = { + "software_name": "nicos/test", + "software_version": "1.0.0", + "service_id": "1a2b3c", + "host_name": "localhost", + "process_id": 1234, + "update_interval": 0, + "status_json": '{"content" : "log_or_status_message"}', +} + + +class TestSerialisationX52f: + def test_serialises_and_deserialises_x5f2_message_correctly(self): + """ + Round-trip to check what we serialise is what we get back. + """ + buf = serialise_x5f2(**original_entry) + entry = deserialise_x5f2(buf) + + assert entry.software_name == original_entry["software_name"] + assert entry.software_version == original_entry["software_version"] + assert entry.service_id == original_entry["service_id"] + assert entry.host_name == original_entry["host_name"] + assert entry.process_id == original_entry["process_id"] + assert entry.update_interval == original_entry["update_interval"] + assert entry.status_json == original_entry["status_json"] + + def test_if_buffer_has_wrong_id_then_throws(self): + buf = serialise_x5f2(**original_entry) + + # Manually hack the id + buf = bytearray(buf) + buf[4:8] = b"1234" + + with pytest.raises(WrongSchemaException): + deserialise_x5f2(buf) + + def test_schema_type_is_in_global_serialisers_list(self): + assert "x5f2" in SERIALISERS + assert "x5f2" in DESERIALISERS diff --git a/rust/Cargo.lock b/rust/Cargo.lock new file mode 100644 index 0000000..a9f9a21 --- /dev/null +++ b/rust/Cargo.lock @@ -0,0 +1,41 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "flatbuffers" +version = "25.12.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35f6839d7b3b98adde531effaf34f0c2badc6f4735d26fe74709d8e513a96ef3" +dependencies = [ + "bitflags", + "rustc_version", +] + +[[package]] +name = "isis_streaming_data_types" +version = "0.0.0" +dependencies = [ + "flatbuffers", +] + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" diff --git a/rust/Cargo.toml b/rust/Cargo.toml new file mode 100644 index 0000000..625cac1 --- /dev/null +++ b/rust/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "isis_streaming_data_types" +version = "0.0.0" +edition = "2024" +description = "Streaming data types for the ISIS Neutron & Muon Source" +license-file = "../LICENSE" +documentation = "https://github.com/isisComputingGroup/streaming-data-types" +homepage = "https://github.com/isisComputingGroup/streaming-data-types" +repository = "https://github.com/isisComputingGroup/streaming-data-types" + +[dependencies] +flatbuffers = "*" \ No newline at end of file diff --git a/rust/src/flatbuffers_generated/6s4t_run_stop.rs b/rust/src/flatbuffers_generated/6s4t_run_stop.rs new file mode 100644 index 0000000..506c531 --- /dev/null +++ b/rust/src/flatbuffers_generated/6s4t_run_stop.rs @@ -0,0 +1,252 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum RunStopOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct RunStop<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for RunStop<'a> { + type Inner = RunStop<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> RunStop<'a> { + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 4; + pub const VT_RUN_NAME: ::flatbuffers::VOffsetT = 6; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 8; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 10; + pub const VT_COMMAND_ID: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + RunStop { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args RunStopArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = RunStopBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + if let Some(x) = args.command_id { builder.add_command_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.run_name { builder.add_run_name(x); } + builder.finish() + } + + + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStop::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn run_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_RUN_NAME, None)} + } + #[inline] + pub fn job_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_JOB_ID, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_SERVICE_ID, None)} + } + #[inline] + pub fn command_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStop::VT_COMMAND_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for RunStop<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("run_name", Self::VT_RUN_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("command_id", Self::VT_COMMAND_ID, false)? + .finish(); + Ok(()) + } +} +pub struct RunStopArgs<'a> { + pub stop_time: u64, + pub run_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub command_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for RunStopArgs<'a> { + #[inline] + fn default() -> Self { + RunStopArgs { + stop_time: 0, + run_name: None, + job_id: None, + service_id: None, + command_id: None, + } + } +} + +pub struct RunStopBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> RunStopBuilder<'a, 'b, A> { + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(RunStop::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_run_name(&mut self, run_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_RUN_NAME, run_name); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_command_id(&mut self, command_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStop::VT_COMMAND_ID, command_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> RunStopBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + RunStopBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for RunStop<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("RunStop"); + ds.field("stop_time", &self.stop_time()); + ds.field("run_name", &self.run_name()); + ds.field("job_id", &self.job_id()); + ds.field("service_id", &self.service_id()); + ds.field("command_id", &self.command_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `RunStop` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn root_as_run_stop(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `RunStop` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_run_stop_unchecked`. +pub fn size_prefixed_root_as_run_stop(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `RunStop` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn root_as_run_stop_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `RunStop` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_stop_unchecked`. +pub fn size_prefixed_root_as_run_stop_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a RunStop and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `RunStop`. +pub unsafe fn root_as_run_stop_unchecked(buf: &[u8]) -> RunStop<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed RunStop and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `RunStop`. +pub unsafe fn size_prefixed_root_as_run_stop_unchecked(buf: &[u8]) -> RunStop<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const RUN_STOP_IDENTIFIER: &str = "6s4t"; + +#[inline] +pub fn run_stop_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_STOP_IDENTIFIER, false) +} + +#[inline] +pub fn run_stop_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_STOP_IDENTIFIER, true) +} + +#[inline] +pub fn finish_run_stop_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(RUN_STOP_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_run_stop_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(RUN_STOP_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ad00_area_detector_array.rs b/rust/src/flatbuffers_generated/ad00_area_detector_array.rs new file mode 100644 index 0000000..6b87d1b --- /dev/null +++ b/rust/src/flatbuffers_generated/ad00_area_detector_array.rs @@ -0,0 +1,575 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_DTYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_DTYPE: i8 = 10; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_DTYPE: [DType; 11] = [ + DType::int8, + DType::uint8, + DType::int16, + DType::uint16, + DType::int32, + DType::uint32, + DType::int64, + DType::uint64, + DType::float32, + DType::float64, + DType::c_string, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct DType(pub i8); +#[allow(non_upper_case_globals)] +impl DType { + pub const int8: Self = Self(0); + pub const uint8: Self = Self(1); + pub const int16: Self = Self(2); + pub const uint16: Self = Self(3); + pub const int32: Self = Self(4); + pub const uint32: Self = Self(5); + pub const int64: Self = Self(6); + pub const uint64: Self = Self(7); + pub const float32: Self = Self(8); + pub const float64: Self = Self(9); + pub const c_string: Self = Self(10); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 10; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::int8, + Self::uint8, + Self::int16, + Self::uint16, + Self::int32, + Self::uint32, + Self::int64, + Self::uint64, + Self::float32, + Self::float64, + Self::c_string, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::int8 => Some("int8"), + Self::uint8 => Some("uint8"), + Self::int16 => Some("int16"), + Self::uint16 => Some("uint16"), + Self::int32 => Some("int32"), + Self::uint32 => Some("uint32"), + Self::int64 => Some("int64"), + Self::uint64 => Some("uint64"), + Self::float32 => Some("float32"), + Self::float64 => Some("float64"), + Self::c_string => Some("c_string"), + _ => None, + } + } +} +impl ::core::fmt::Debug for DType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for DType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for DType { + type Output = DType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for DType { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for DType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for DType {} +pub enum AttributeOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Attribute<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Attribute<'a> { + type Inner = Attribute<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Attribute<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_DESCRIPTION: ::flatbuffers::VOffsetT = 6; + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 8; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Attribute { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args AttributeArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = AttributeBuilder::new(_fbb); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.source { builder.add_source(x); } + if let Some(x) = args.description { builder.add_description(x); } + if let Some(x) = args.name { builder.add_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_NAME, None).unwrap()} + } + #[inline] + pub fn description(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_DESCRIPTION, None)} + } + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Attribute::VT_SOURCE, None)} + } + #[inline] + pub fn data_type(&self) -> DType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Attribute::VT_DATA_TYPE, Some(DType::int8)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(Attribute::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Attribute<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("description", Self::VT_DESCRIPTION, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct AttributeArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub description: Option<::flatbuffers::WIPOffset<&'a str>>, + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub data_type: DType, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for AttributeArgs<'a> { + #[inline] + fn default() -> Self { + AttributeArgs { + name: None, // required field + description: None, + source: None, + data_type: DType::int8, + data: None, // required field + } + } +} + +pub struct AttributeBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> AttributeBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_NAME, name); + } + #[inline] + pub fn add_description(&mut self, description: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_DESCRIPTION, description); + } + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_SOURCE, source); + } + #[inline] + pub fn add_data_type(&mut self, data_type: DType) { + self.fbb_.push_slot::(Attribute::VT_DATA_TYPE, data_type, DType::int8); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Attribute::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> AttributeBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + AttributeBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Attribute::VT_NAME,"name"); + self.fbb_.required(o, Attribute::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Attribute<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Attribute"); + ds.field("name", &self.name()); + ds.field("description", &self.description()); + ds.field("source", &self.source()); + ds.field("data_type", &self.data_type()); + ds.field("data", &self.data()); + ds.finish() + } +} +pub enum ad00_ADArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ad00_ADArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ad00_ADArray<'a> { + type Inner = ad00_ADArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ad00_ADArray<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 8; + pub const VT_DIMENSIONS: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 12; + pub const VT_DATA: ::flatbuffers::VOffsetT = 14; + pub const VT_ATTRIBUTES: ::flatbuffers::VOffsetT = 16; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ad00_ADArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ad00_ADArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ad00_ADArrayBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.attributes { builder.add_attributes(x); } + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.dimensions { builder.add_dimensions(x); } + builder.add_id(args.id); + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ad00_ADArray::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn id(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_ID, Some(0)).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn dimensions(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ad00_ADArray::VT_DIMENSIONS, None).unwrap()} + } + #[inline] + pub fn data_type(&self) -> DType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ad00_ADArray::VT_DATA_TYPE, Some(DType::int8)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(ad00_ADArray::VT_DATA, None).unwrap()} + } + #[inline] + pub fn attributes(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(ad00_ADArray::VT_ATTRIBUTES, None)} + } +} + +impl ::flatbuffers::Verifiable for ad00_ADArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("id", Self::VT_ID, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("dimensions", Self::VT_DIMENSIONS, true)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("attributes", Self::VT_ATTRIBUTES, false)? + .finish(); + Ok(()) + } +} +pub struct ad00_ADArrayArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub id: i32, + pub timestamp: i64, + pub dimensions: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub data_type: DType, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, + pub attributes: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for ad00_ADArrayArgs<'a> { + #[inline] + fn default() -> Self { + ad00_ADArrayArgs { + source_name: None, // required field + id: 0, + timestamp: 0, + dimensions: None, // required field + data_type: DType::int8, + data: None, // required field + attributes: None, + } + } +} + +pub struct ad00_ADArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ad00_ADArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_id(&mut self, id: i32) { + self.fbb_.push_slot::(ad00_ADArray::VT_ID, id, 0); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(ad00_ADArray::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_dimensions(&mut self, dimensions: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_DIMENSIONS, dimensions); + } + #[inline] + pub fn add_data_type(&mut self, data_type: DType) { + self.fbb_.push_slot::(ad00_ADArray::VT_DATA_TYPE, data_type, DType::int8); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_DATA, data); + } + #[inline] + pub fn add_attributes(&mut self, attributes: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ad00_ADArray::VT_ATTRIBUTES, attributes); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ad00_ADArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ad00_ADArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, ad00_ADArray::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, ad00_ADArray::VT_DIMENSIONS,"dimensions"); + self.fbb_.required(o, ad00_ADArray::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ad00_ADArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ad00_ADArray"); + ds.field("source_name", &self.source_name()); + ds.field("id", &self.id()); + ds.field("timestamp", &self.timestamp()); + ds.field("dimensions", &self.dimensions()); + ds.field("data_type", &self.data_type()); + ds.field("data", &self.data()); + ds.field("attributes", &self.attributes()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `ad00_ADArray` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn root_as_ad_00_adarray(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `ad00_ADArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_ad_00_adarray_unchecked`. +pub fn size_prefixed_root_as_ad_00_adarray(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `ad00_ADArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn root_as_ad_00_adarray_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `ad00_ADArray` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_ad_00_adarray_unchecked`. +pub fn size_prefixed_root_as_ad_00_adarray_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a ad00_ADArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `ad00_ADArray`. +pub unsafe fn root_as_ad_00_adarray_unchecked(buf: &[u8]) -> ad00_ADArray<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed ad00_ADArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `ad00_ADArray`. +pub unsafe fn size_prefixed_root_as_ad_00_adarray_unchecked(buf: &[u8]) -> ad00_ADArray<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const AD_00_ADARRAY_IDENTIFIER: &str = "ad00"; + +#[inline] +pub fn ad_00_adarray_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, AD_00_ADARRAY_IDENTIFIER, false) +} + +#[inline] +pub fn ad_00_adarray_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, AD_00_ADARRAY_IDENTIFIER, true) +} + +#[inline] +pub fn finish_ad_00_adarray_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(AD_00_ADARRAY_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_ad_00_adarray_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(AD_00_ADARRAY_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/al00_alarm.rs b/rust/src/flatbuffers_generated/al00_alarm.rs new file mode 100644 index 0000000..c81667e --- /dev/null +++ b/rust/src/flatbuffers_generated/al00_alarm.rs @@ -0,0 +1,327 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_SEVERITY: i16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_SEVERITY: i16 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_SEVERITY: [Severity; 4] = [ + Severity::OK, + Severity::MINOR, + Severity::MAJOR, + Severity::INVALID, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Severity(pub i16); +#[allow(non_upper_case_globals)] +impl Severity { + pub const OK: Self = Self(0); + pub const MINOR: Self = Self(1); + pub const MAJOR: Self = Self(2); + pub const INVALID: Self = Self(3); + + pub const ENUM_MIN: i16 = 0; + pub const ENUM_MAX: i16 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::OK, + Self::MINOR, + Self::MAJOR, + Self::INVALID, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::OK => Some("OK"), + Self::MINOR => Some("MINOR"), + Self::MAJOR => Some("MAJOR"), + Self::INVALID => Some("INVALID"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Severity { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Severity { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Severity { + type Output = Severity; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Severity { + type Scalar = i16; + #[inline] + fn to_little_endian(self) -> i16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i16) -> Self { + let b = i16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Severity { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Severity {} +pub enum AlarmOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Alarm<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Alarm<'a> { + type Inner = Alarm<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Alarm<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_SEVERITY: ::flatbuffers::VOffsetT = 8; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Alarm { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args AlarmArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = AlarmBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.message { builder.add_message(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_severity(args.severity); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Alarm::VT_SOURCE_NAME, None)} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Alarm::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn severity(&self) -> Severity { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Alarm::VT_SEVERITY, Some(Severity::OK)).unwrap()} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Alarm::VT_MESSAGE, None)} + } +} + +impl ::flatbuffers::Verifiable for Alarm<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::("severity", Self::VT_SEVERITY, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .finish(); + Ok(()) + } +} +pub struct AlarmArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub severity: Severity, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for AlarmArgs<'a> { + #[inline] + fn default() -> Self { + AlarmArgs { + source_name: None, + timestamp: 0, + severity: Severity::OK, + message: None, + } + } +} + +pub struct AlarmBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> AlarmBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Alarm::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(Alarm::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_severity(&mut self, severity: Severity) { + self.fbb_.push_slot::(Alarm::VT_SEVERITY, severity, Severity::OK); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Alarm::VT_MESSAGE, message); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> AlarmBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + AlarmBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Alarm<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Alarm"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("severity", &self.severity()); + ds.field("message", &self.message()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Alarm` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn root_as_alarm(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Alarm` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_alarm_unchecked`. +pub fn size_prefixed_root_as_alarm(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Alarm` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn root_as_alarm_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Alarm` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_alarm_unchecked`. +pub fn size_prefixed_root_as_alarm_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Alarm and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Alarm`. +pub unsafe fn root_as_alarm_unchecked(buf: &[u8]) -> Alarm<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Alarm and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Alarm`. +pub unsafe fn size_prefixed_root_as_alarm_unchecked(buf: &[u8]) -> Alarm<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const ALARM_IDENTIFIER: &str = "al00"; + +#[inline] +pub fn alarm_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ALARM_IDENTIFIER, false) +} + +#[inline] +pub fn alarm_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ALARM_IDENTIFIER, true) +} + +#[inline] +pub fn finish_alarm_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(ALARM_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_alarm_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(ALARM_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/answ_action_response.rs b/rust/src/flatbuffers_generated/answ_action_response.rs new file mode 100644 index 0000000..ea7c1c3 --- /dev/null +++ b/rust/src/flatbuffers_generated/answ_action_response.rs @@ -0,0 +1,473 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ACTION_TYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ACTION_TYPE: i8 = 1; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ACTION_TYPE: [ActionType; 2] = [ + ActionType::StartJob, + ActionType::SetStopTime, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ActionType(pub i8); +#[allow(non_upper_case_globals)] +impl ActionType { + pub const StartJob: Self = Self(0); + pub const SetStopTime: Self = Self(1); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 1; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::StartJob, + Self::SetStopTime, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::StartJob => Some("StartJob"), + Self::SetStopTime => Some("SetStopTime"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ActionType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ActionType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ActionType { + type Output = ActionType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ActionType { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ActionType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ActionType {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ACTION_OUTCOME: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ACTION_OUTCOME: i8 = 1; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ACTION_OUTCOME: [ActionOutcome; 2] = [ + ActionOutcome::Success, + ActionOutcome::Failure, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ActionOutcome(pub i8); +#[allow(non_upper_case_globals)] +impl ActionOutcome { + pub const Success: Self = Self(0); + pub const Failure: Self = Self(1); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 1; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::Success, + Self::Failure, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::Success => Some("Success"), + Self::Failure => Some("Failure"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ActionOutcome { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ActionOutcome { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ActionOutcome { + type Output = ActionOutcome; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ActionOutcome { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ActionOutcome { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ActionOutcome {} +pub enum ActionResponseOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ActionResponse<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ActionResponse<'a> { + type Inner = ActionResponse<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ActionResponse<'a> { + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 4; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_ACTION: ::flatbuffers::VOffsetT = 8; + pub const VT_OUTCOME: ::flatbuffers::VOffsetT = 10; + pub const VT_STATUS_CODE: ::flatbuffers::VOffsetT = 12; + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 14; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 16; + pub const VT_COMMAND_ID: ::flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ActionResponse { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ActionResponseArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ActionResponseBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + if let Some(x) = args.command_id { builder.add_command_id(x); } + if let Some(x) = args.message { builder.add_message(x); } + builder.add_status_code(args.status_code); + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + builder.add_outcome(args.outcome); + builder.add_action(args.action); + builder.finish() + } + + + #[inline] + pub fn service_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_SERVICE_ID, None).unwrap()} + } + #[inline] + pub fn job_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_JOB_ID, None).unwrap()} + } + #[inline] + pub fn action(&self) -> ActionType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_ACTION, Some(ActionType::StartJob)).unwrap()} + } + #[inline] + pub fn outcome(&self) -> ActionOutcome { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_OUTCOME, Some(ActionOutcome::Success)).unwrap()} + } + #[inline] + pub fn status_code(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_STATUS_CODE, Some(0)).unwrap()} + } + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ActionResponse::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_MESSAGE, None)} + } + #[inline] + pub fn command_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(ActionResponse::VT_COMMAND_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for ActionResponse<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, true)? + .visit_field::("action", Self::VT_ACTION, false)? + .visit_field::("outcome", Self::VT_OUTCOME, false)? + .visit_field::("status_code", Self::VT_STATUS_CODE, false)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("command_id", Self::VT_COMMAND_ID, false)? + .finish(); + Ok(()) + } +} +pub struct ActionResponseArgs<'a> { + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub action: ActionType, + pub outcome: ActionOutcome, + pub status_code: i32, + pub stop_time: u64, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, + pub command_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for ActionResponseArgs<'a> { + #[inline] + fn default() -> Self { + ActionResponseArgs { + service_id: None, // required field + job_id: None, // required field + action: ActionType::StartJob, + outcome: ActionOutcome::Success, + status_code: 0, + stop_time: 0, + message: None, + command_id: None, + } + } +} + +pub struct ActionResponseBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ActionResponseBuilder<'a, 'b, A> { + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_action(&mut self, action: ActionType) { + self.fbb_.push_slot::(ActionResponse::VT_ACTION, action, ActionType::StartJob); + } + #[inline] + pub fn add_outcome(&mut self, outcome: ActionOutcome) { + self.fbb_.push_slot::(ActionResponse::VT_OUTCOME, outcome, ActionOutcome::Success); + } + #[inline] + pub fn add_status_code(&mut self, status_code: i32) { + self.fbb_.push_slot::(ActionResponse::VT_STATUS_CODE, status_code, 0); + } + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(ActionResponse::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_MESSAGE, message); + } + #[inline] + pub fn add_command_id(&mut self, command_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ActionResponse::VT_COMMAND_ID, command_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ActionResponseBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ActionResponseBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, ActionResponse::VT_SERVICE_ID,"service_id"); + self.fbb_.required(o, ActionResponse::VT_JOB_ID,"job_id"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ActionResponse<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ActionResponse"); + ds.field("service_id", &self.service_id()); + ds.field("job_id", &self.job_id()); + ds.field("action", &self.action()); + ds.field("outcome", &self.outcome()); + ds.field("status_code", &self.status_code()); + ds.field("stop_time", &self.stop_time()); + ds.field("message", &self.message()); + ds.field("command_id", &self.command_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `ActionResponse` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn root_as_action_response(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `ActionResponse` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_action_response_unchecked`. +pub fn size_prefixed_root_as_action_response(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `ActionResponse` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn root_as_action_response_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `ActionResponse` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_action_response_unchecked`. +pub fn size_prefixed_root_as_action_response_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a ActionResponse and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `ActionResponse`. +pub unsafe fn root_as_action_response_unchecked(buf: &[u8]) -> ActionResponse<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed ActionResponse and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `ActionResponse`. +pub unsafe fn size_prefixed_root_as_action_response_unchecked(buf: &[u8]) -> ActionResponse<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const ACTION_RESPONSE_IDENTIFIER: &str = "answ"; + +#[inline] +pub fn action_response_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ACTION_RESPONSE_IDENTIFIER, false) +} + +#[inline] +pub fn action_response_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, ACTION_RESPONSE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_action_response_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(ACTION_RESPONSE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_action_response_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(ACTION_RESPONSE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/da00_dataarray.rs b/rust/src/flatbuffers_generated/da00_dataarray.rs new file mode 100644 index 0000000..1e79f6c --- /dev/null +++ b/rust/src/flatbuffers_generated/da00_dataarray.rs @@ -0,0 +1,562 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_DA_00_DTYPE: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_DA_00_DTYPE: i8 = 11; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_DA_00_DTYPE: [da00_dtype; 12] = [ + da00_dtype::none, + da00_dtype::int8, + da00_dtype::uint8, + da00_dtype::int16, + da00_dtype::uint16, + da00_dtype::int32, + da00_dtype::uint32, + da00_dtype::int64, + da00_dtype::uint64, + da00_dtype::float32, + da00_dtype::float64, + da00_dtype::c_string, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct da00_dtype(pub i8); +#[allow(non_upper_case_globals)] +impl da00_dtype { + pub const none: Self = Self(0); + pub const int8: Self = Self(1); + pub const uint8: Self = Self(2); + pub const int16: Self = Self(3); + pub const uint16: Self = Self(4); + pub const int32: Self = Self(5); + pub const uint32: Self = Self(6); + pub const int64: Self = Self(7); + pub const uint64: Self = Self(8); + pub const float32: Self = Self(9); + pub const float64: Self = Self(10); + pub const c_string: Self = Self(11); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 11; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::none, + Self::int8, + Self::uint8, + Self::int16, + Self::uint16, + Self::int32, + Self::uint32, + Self::int64, + Self::uint64, + Self::float32, + Self::float64, + Self::c_string, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::none => Some("none"), + Self::int8 => Some("int8"), + Self::uint8 => Some("uint8"), + Self::int16 => Some("int16"), + Self::uint16 => Some("uint16"), + Self::int32 => Some("int32"), + Self::uint32 => Some("uint32"), + Self::int64 => Some("int64"), + Self::uint64 => Some("uint64"), + Self::float32 => Some("float32"), + Self::float64 => Some("float64"), + Self::c_string => Some("c_string"), + _ => None, + } + } +} +impl ::core::fmt::Debug for da00_dtype { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for da00_dtype { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for da00_dtype { + type Output = da00_dtype; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for da00_dtype { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for da00_dtype { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for da00_dtype {} +pub enum da00_VariableOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct da00_Variable<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for da00_Variable<'a> { + type Inner = da00_Variable<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> da00_Variable<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_UNIT: ::flatbuffers::VOffsetT = 6; + pub const VT_LABEL: ::flatbuffers::VOffsetT = 8; + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 10; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 12; + pub const VT_AXES: ::flatbuffers::VOffsetT = 14; + pub const VT_SHAPE: ::flatbuffers::VOffsetT = 16; + pub const VT_DATA: ::flatbuffers::VOffsetT = 18; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + da00_Variable { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args da00_VariableArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = da00_VariableBuilder::new(_fbb); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.shape { builder.add_shape(x); } + if let Some(x) = args.axes { builder.add_axes(x); } + if let Some(x) = args.source { builder.add_source(x); } + if let Some(x) = args.label { builder.add_label(x); } + if let Some(x) = args.unit { builder.add_unit(x); } + if let Some(x) = args.name { builder.add_name(x); } + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_NAME, None).unwrap()} + } + #[inline] + pub fn unit(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_UNIT, None)} + } + #[inline] + pub fn label(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_LABEL, None)} + } + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_Variable::VT_SOURCE, None)} + } + #[inline] + pub fn data_type(&self) -> da00_dtype { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(da00_Variable::VT_DATA_TYPE, Some(da00_dtype::none)).unwrap()} + } + #[inline] + pub fn axes(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>>>(da00_Variable::VT_AXES, None)} + } + #[inline] + pub fn shape(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(da00_Variable::VT_SHAPE, None).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(da00_Variable::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for da00_Variable<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("unit", Self::VT_UNIT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("label", Self::VT_LABEL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("data_type", Self::VT_DATA_TYPE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset<&'_ str>>>>("axes", Self::VT_AXES, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("shape", Self::VT_SHAPE, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct da00_VariableArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub unit: Option<::flatbuffers::WIPOffset<&'a str>>, + pub label: Option<::flatbuffers::WIPOffset<&'a str>>, + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub data_type: da00_dtype, + pub axes: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset<&'a str>>>>, + pub shape: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for da00_VariableArgs<'a> { + #[inline] + fn default() -> Self { + da00_VariableArgs { + name: None, // required field + unit: None, + label: None, + source: None, + data_type: da00_dtype::none, + axes: None, + shape: None, // required field + data: None, // required field + } + } +} + +pub struct da00_VariableBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> da00_VariableBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_NAME, name); + } + #[inline] + pub fn add_unit(&mut self, unit: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_UNIT, unit); + } + #[inline] + pub fn add_label(&mut self, label: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_LABEL, label); + } + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_SOURCE, source); + } + #[inline] + pub fn add_data_type(&mut self, data_type: da00_dtype) { + self.fbb_.push_slot::(da00_Variable::VT_DATA_TYPE, data_type, da00_dtype::none); + } + #[inline] + pub fn add_axes(&mut self, axes: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset<&'b str>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_AXES, axes); + } + #[inline] + pub fn add_shape(&mut self, shape: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_SHAPE, shape); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_Variable::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> da00_VariableBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + da00_VariableBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, da00_Variable::VT_NAME,"name"); + self.fbb_.required(o, da00_Variable::VT_SHAPE,"shape"); + self.fbb_.required(o, da00_Variable::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for da00_Variable<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("da00_Variable"); + ds.field("name", &self.name()); + ds.field("unit", &self.unit()); + ds.field("label", &self.label()); + ds.field("source", &self.source()); + ds.field("data_type", &self.data_type()); + ds.field("axes", &self.axes()); + ds.field("shape", &self.shape()); + ds.field("data", &self.data()); + ds.finish() + } +} +pub enum da00_DataArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct da00_DataArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for da00_DataArray<'a> { + type Inner = da00_DataArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> da00_DataArray<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_DATA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + da00_DataArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args da00_DataArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = da00_DataArrayBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(da00_DataArray::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(da00_DataArray::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn data(&self) -> ::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(da00_DataArray::VT_DATA, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for da00_DataArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("data", Self::VT_DATA, true)? + .finish(); + Ok(()) + } +} +pub struct da00_DataArrayArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for da00_DataArrayArgs<'a> { + #[inline] + fn default() -> Self { + da00_DataArrayArgs { + source_name: None, // required field + timestamp: 0, + data: None, // required field + } + } +} + +pub struct da00_DataArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> da00_DataArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_DataArray::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(da00_DataArray::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(da00_DataArray::VT_DATA, data); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> da00_DataArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + da00_DataArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, da00_DataArray::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, da00_DataArray::VT_DATA,"data"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for da00_DataArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("da00_DataArray"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("data", &self.data()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `da00_DataArray` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn root_as_da_00_data_array(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `da00_DataArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_da_00_data_array_unchecked`. +pub fn size_prefixed_root_as_da_00_data_array(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `da00_DataArray` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn root_as_da_00_data_array_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `da00_DataArray` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_da_00_data_array_unchecked`. +pub fn size_prefixed_root_as_da_00_data_array_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a da00_DataArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `da00_DataArray`. +pub unsafe fn root_as_da_00_data_array_unchecked(buf: &[u8]) -> da00_DataArray<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed da00_DataArray and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `da00_DataArray`. +pub unsafe fn size_prefixed_root_as_da_00_data_array_unchecked(buf: &[u8]) -> da00_DataArray<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const DA_00_DATA_ARRAY_IDENTIFIER: &str = "da00"; + +#[inline] +pub fn da_00_data_array_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, DA_00_DATA_ARRAY_IDENTIFIER, false) +} + +#[inline] +pub fn da_00_data_array_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, DA_00_DATA_ARRAY_IDENTIFIER, true) +} + +#[inline] +pub fn finish_da_00_data_array_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(DA_00_DATA_ARRAY_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_da_00_data_array_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(DA_00_DATA_ARRAY_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/df12_det_spec_map.rs b/rust/src/flatbuffers_generated/df12_det_spec_map.rs new file mode 100644 index 0000000..06399f8 --- /dev/null +++ b/rust/src/flatbuffers_generated/df12_det_spec_map.rs @@ -0,0 +1,218 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum SpectraDetectorMappingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct SpectraDetectorMapping<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for SpectraDetectorMapping<'a> { + type Inner = SpectraDetectorMapping<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> SpectraDetectorMapping<'a> { + pub const VT_SPECTRUM: ::flatbuffers::VOffsetT = 4; + pub const VT_DETECTOR_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_N_SPECTRA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + SpectraDetectorMapping { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args SpectraDetectorMappingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = SpectraDetectorMappingBuilder::new(_fbb); + builder.add_n_spectra(args.n_spectra); + if let Some(x) = args.detector_id { builder.add_detector_id(x); } + if let Some(x) = args.spectrum { builder.add_spectrum(x); } + builder.finish() + } + + + #[inline] + pub fn spectrum(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_SPECTRUM, None)} + } + #[inline] + pub fn detector_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_DETECTOR_ID, None)} + } + #[inline] + pub fn n_spectra(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(SpectraDetectorMapping::VT_N_SPECTRA, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for SpectraDetectorMapping<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("spectrum", Self::VT_SPECTRUM, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("detector_id", Self::VT_DETECTOR_ID, false)? + .visit_field::("n_spectra", Self::VT_N_SPECTRA, false)? + .finish(); + Ok(()) + } +} +pub struct SpectraDetectorMappingArgs<'a> { + pub spectrum: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub detector_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub n_spectra: i32, +} +impl<'a> Default for SpectraDetectorMappingArgs<'a> { + #[inline] + fn default() -> Self { + SpectraDetectorMappingArgs { + spectrum: None, + detector_id: None, + n_spectra: 0, + } + } +} + +pub struct SpectraDetectorMappingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> SpectraDetectorMappingBuilder<'a, 'b, A> { + #[inline] + pub fn add_spectrum(&mut self, spectrum: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_SPECTRUM, spectrum); + } + #[inline] + pub fn add_detector_id(&mut self, detector_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_DETECTOR_ID, detector_id); + } + #[inline] + pub fn add_n_spectra(&mut self, n_spectra: i32) { + self.fbb_.push_slot::(SpectraDetectorMapping::VT_N_SPECTRA, n_spectra, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> SpectraDetectorMappingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + SpectraDetectorMappingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for SpectraDetectorMapping<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("SpectraDetectorMapping"); + ds.field("spectrum", &self.spectrum()); + ds.field("detector_id", &self.detector_id()); + ds.field("n_spectra", &self.n_spectra()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `SpectraDetectorMapping` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn root_as_spectra_detector_mapping(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `SpectraDetectorMapping` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_spectra_detector_mapping_unchecked`. +pub fn size_prefixed_root_as_spectra_detector_mapping(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `SpectraDetectorMapping` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn root_as_spectra_detector_mapping_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `SpectraDetectorMapping` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_spectra_detector_mapping_unchecked`. +pub fn size_prefixed_root_as_spectra_detector_mapping_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a SpectraDetectorMapping and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `SpectraDetectorMapping`. +pub unsafe fn root_as_spectra_detector_mapping_unchecked(buf: &[u8]) -> SpectraDetectorMapping<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed SpectraDetectorMapping and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `SpectraDetectorMapping`. +pub unsafe fn size_prefixed_root_as_spectra_detector_mapping_unchecked(buf: &[u8]) -> SpectraDetectorMapping<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const SPECTRA_DETECTOR_MAPPING_IDENTIFIER: &str = "df12"; + +#[inline] +pub fn spectra_detector_mapping_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SPECTRA_DETECTOR_MAPPING_IDENTIFIER, false) +} + +#[inline] +pub fn spectra_detector_mapping_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SPECTRA_DETECTOR_MAPPING_IDENTIFIER, true) +} + +#[inline] +pub fn finish_spectra_detector_mapping_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(SPECTRA_DETECTOR_MAPPING_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_spectra_detector_mapping_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(SPECTRA_DETECTOR_MAPPING_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ep01_epics_connection.rs b/rust/src/flatbuffers_generated/ep01_epics_connection.rs new file mode 100644 index 0000000..6937886 --- /dev/null +++ b/rust/src/flatbuffers_generated/ep01_epics_connection.rs @@ -0,0 +1,344 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_CONNECTION_INFO: i16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_CONNECTION_INFO: i16 = 7; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_CONNECTION_INFO: [ConnectionInfo; 8] = [ + ConnectionInfo::UNKNOWN, + ConnectionInfo::NEVER_CONNECTED, + ConnectionInfo::CONNECTED, + ConnectionInfo::DISCONNECTED, + ConnectionInfo::DESTROYED, + ConnectionInfo::CANCELLED, + ConnectionInfo::FINISHED, + ConnectionInfo::REMOTE_ERROR, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ConnectionInfo(pub i16); +#[allow(non_upper_case_globals)] +impl ConnectionInfo { + pub const UNKNOWN: Self = Self(0); + pub const NEVER_CONNECTED: Self = Self(1); + pub const CONNECTED: Self = Self(2); + pub const DISCONNECTED: Self = Self(3); + pub const DESTROYED: Self = Self(4); + pub const CANCELLED: Self = Self(5); + pub const FINISHED: Self = Self(6); + pub const REMOTE_ERROR: Self = Self(7); + + pub const ENUM_MIN: i16 = 0; + pub const ENUM_MAX: i16 = 7; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::UNKNOWN, + Self::NEVER_CONNECTED, + Self::CONNECTED, + Self::DISCONNECTED, + Self::DESTROYED, + Self::CANCELLED, + Self::FINISHED, + Self::REMOTE_ERROR, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::UNKNOWN => Some("UNKNOWN"), + Self::NEVER_CONNECTED => Some("NEVER_CONNECTED"), + Self::CONNECTED => Some("CONNECTED"), + Self::DISCONNECTED => Some("DISCONNECTED"), + Self::DESTROYED => Some("DESTROYED"), + Self::CANCELLED => Some("CANCELLED"), + Self::FINISHED => Some("FINISHED"), + Self::REMOTE_ERROR => Some("REMOTE_ERROR"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ConnectionInfo { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ConnectionInfo { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ConnectionInfo { + type Output = ConnectionInfo; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ConnectionInfo { + type Scalar = i16; + #[inline] + fn to_little_endian(self) -> i16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i16) -> Self { + let b = i16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ConnectionInfo { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ConnectionInfo {} +pub enum EpicsPVConnectionInfoOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct EpicsPVConnectionInfo<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for EpicsPVConnectionInfo<'a> { + type Inner = EpicsPVConnectionInfo<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> EpicsPVConnectionInfo<'a> { + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 4; + pub const VT_STATUS: ::flatbuffers::VOffsetT = 6; + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 8; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + EpicsPVConnectionInfo { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args EpicsPVConnectionInfoArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = EpicsPVConnectionInfoBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_status(args.status); + builder.finish() + } + + + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EpicsPVConnectionInfo::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn status(&self) -> ConnectionInfo { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EpicsPVConnectionInfo::VT_STATUS, Some(ConnectionInfo::UNKNOWN)).unwrap()} + } + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EpicsPVConnectionInfo::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EpicsPVConnectionInfo::VT_SERVICE_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for EpicsPVConnectionInfo<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::("status", Self::VT_STATUS, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .finish(); + Ok(()) + } +} +pub struct EpicsPVConnectionInfoArgs<'a> { + pub timestamp: i64, + pub status: ConnectionInfo, + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for EpicsPVConnectionInfoArgs<'a> { + #[inline] + fn default() -> Self { + EpicsPVConnectionInfoArgs { + timestamp: 0, + status: ConnectionInfo::UNKNOWN, + source_name: None, // required field + service_id: None, + } + } +} + +pub struct EpicsPVConnectionInfoBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> EpicsPVConnectionInfoBuilder<'a, 'b, A> { + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(EpicsPVConnectionInfo::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_status(&mut self, status: ConnectionInfo) { + self.fbb_.push_slot::(EpicsPVConnectionInfo::VT_STATUS, status, ConnectionInfo::UNKNOWN); + } + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EpicsPVConnectionInfo::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EpicsPVConnectionInfo::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> EpicsPVConnectionInfoBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + EpicsPVConnectionInfoBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, EpicsPVConnectionInfo::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for EpicsPVConnectionInfo<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("EpicsPVConnectionInfo"); + ds.field("timestamp", &self.timestamp()); + ds.field("status", &self.status()); + ds.field("source_name", &self.source_name()); + ds.field("service_id", &self.service_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `EpicsPVConnectionInfo` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn root_as_epics_pvconnection_info(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `EpicsPVConnectionInfo` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_epics_pvconnection_info_unchecked`. +pub fn size_prefixed_root_as_epics_pvconnection_info(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `EpicsPVConnectionInfo` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn root_as_epics_pvconnection_info_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `EpicsPVConnectionInfo` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_epics_pvconnection_info_unchecked`. +pub fn size_prefixed_root_as_epics_pvconnection_info_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a EpicsPVConnectionInfo and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `EpicsPVConnectionInfo`. +pub unsafe fn root_as_epics_pvconnection_info_unchecked(buf: &[u8]) -> EpicsPVConnectionInfo<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed EpicsPVConnectionInfo and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `EpicsPVConnectionInfo`. +pub unsafe fn size_prefixed_root_as_epics_pvconnection_info_unchecked(buf: &[u8]) -> EpicsPVConnectionInfo<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EPICS_PVCONNECTION_INFO_IDENTIFIER: &str = "ep01"; + +#[inline] +pub fn epics_pvconnection_info_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EPICS_PVCONNECTION_INFO_IDENTIFIER, false) +} + +#[inline] +pub fn epics_pvconnection_info_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EPICS_PVCONNECTION_INFO_IDENTIFIER, true) +} + +#[inline] +pub fn finish_epics_pvconnection_info_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EPICS_PVCONNECTION_INFO_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_epics_pvconnection_info_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EPICS_PVCONNECTION_INFO_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/ev44_events.rs b/rust/src/flatbuffers_generated/ev44_events.rs new file mode 100644 index 0000000..fa18ab4 --- /dev/null +++ b/rust/src/flatbuffers_generated/ev44_events.rs @@ -0,0 +1,272 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum Event44MessageOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Event44Message<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Event44Message<'a> { + type Inner = Event44Message<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Event44Message<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_MESSAGE_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_REFERENCE_TIME: ::flatbuffers::VOffsetT = 8; + pub const VT_REFERENCE_TIME_INDEX: ::flatbuffers::VOffsetT = 10; + pub const VT_TIME_OF_FLIGHT: ::flatbuffers::VOffsetT = 12; + pub const VT_PIXEL_ID: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Event44Message { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Event44MessageArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Event44MessageBuilder::new(_fbb); + builder.add_message_id(args.message_id); + if let Some(x) = args.pixel_id { builder.add_pixel_id(x); } + if let Some(x) = args.time_of_flight { builder.add_time_of_flight(x); } + if let Some(x) = args.reference_time_index { builder.add_reference_time_index(x); } + if let Some(x) = args.reference_time { builder.add_reference_time(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Event44Message::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn message_id(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Event44Message::VT_MESSAGE_ID, Some(0)).unwrap()} + } + #[inline] + pub fn reference_time(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(Event44Message::VT_REFERENCE_TIME, None).unwrap()} + } + #[inline] + pub fn reference_time_index(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_REFERENCE_TIME_INDEX, None).unwrap()} + } + #[inline] + pub fn time_of_flight(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_TIME_OF_FLIGHT, None)} + } + #[inline] + pub fn pixel_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Event44Message::VT_PIXEL_ID, None)} + } +} + +impl ::flatbuffers::Verifiable for Event44Message<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("message_id", Self::VT_MESSAGE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("reference_time", Self::VT_REFERENCE_TIME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("reference_time_index", Self::VT_REFERENCE_TIME_INDEX, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("time_of_flight", Self::VT_TIME_OF_FLIGHT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("pixel_id", Self::VT_PIXEL_ID, false)? + .finish(); + Ok(()) + } +} +pub struct Event44MessageArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message_id: i64, + pub reference_time: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub reference_time_index: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub time_of_flight: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub pixel_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for Event44MessageArgs<'a> { + #[inline] + fn default() -> Self { + Event44MessageArgs { + source_name: None, // required field + message_id: 0, + reference_time: None, // required field + reference_time_index: None, // required field + time_of_flight: None, + pixel_id: None, + } + } +} + +pub struct Event44MessageBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Event44MessageBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_message_id(&mut self, message_id: i64) { + self.fbb_.push_slot::(Event44Message::VT_MESSAGE_ID, message_id, 0); + } + #[inline] + pub fn add_reference_time(&mut self, reference_time: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_REFERENCE_TIME, reference_time); + } + #[inline] + pub fn add_reference_time_index(&mut self, reference_time_index: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_REFERENCE_TIME_INDEX, reference_time_index); + } + #[inline] + pub fn add_time_of_flight(&mut self, time_of_flight: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_TIME_OF_FLIGHT, time_of_flight); + } + #[inline] + pub fn add_pixel_id(&mut self, pixel_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Event44Message::VT_PIXEL_ID, pixel_id); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Event44MessageBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Event44MessageBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Event44Message::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, Event44Message::VT_REFERENCE_TIME,"reference_time"); + self.fbb_.required(o, Event44Message::VT_REFERENCE_TIME_INDEX,"reference_time_index"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Event44Message<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Event44Message"); + ds.field("source_name", &self.source_name()); + ds.field("message_id", &self.message_id()); + ds.field("reference_time", &self.reference_time()); + ds.field("reference_time_index", &self.reference_time_index()); + ds.field("time_of_flight", &self.time_of_flight()); + ds.field("pixel_id", &self.pixel_id()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Event44Message` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn root_as_event_44_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Event44Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_event_44_message_unchecked`. +pub fn size_prefixed_root_as_event_44_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Event44Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn root_as_event_44_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Event44Message` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_44_message_unchecked`. +pub fn size_prefixed_root_as_event_44_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Event44Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Event44Message`. +pub unsafe fn root_as_event_44_message_unchecked(buf: &[u8]) -> Event44Message<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Event44Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Event44Message`. +pub unsafe fn size_prefixed_root_as_event_44_message_unchecked(buf: &[u8]) -> Event44Message<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EVENT_44_MESSAGE_IDENTIFIER: &str = "ev44"; + +#[inline] +pub fn event_44_message_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_44_MESSAGE_IDENTIFIER, false) +} + +#[inline] +pub fn event_44_message_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_44_MESSAGE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_event_44_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EVENT_44_MESSAGE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_event_44_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EVENT_44_MESSAGE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/f144_logdata.rs b/rust/src/flatbuffers_generated/f144_logdata.rs new file mode 100644 index 0000000..6e05b3a --- /dev/null +++ b/rust/src/flatbuffers_generated/f144_logdata.rs @@ -0,0 +1,2767 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_VALUE: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_VALUE: u8 = 20; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_VALUE: [Value; 21] = [ + Value::NONE, + Value::Byte, + Value::UByte, + Value::Short, + Value::UShort, + Value::Int, + Value::UInt, + Value::Long, + Value::ULong, + Value::Float, + Value::Double, + Value::ArrayByte, + Value::ArrayUByte, + Value::ArrayShort, + Value::ArrayUShort, + Value::ArrayInt, + Value::ArrayUInt, + Value::ArrayLong, + Value::ArrayULong, + Value::ArrayFloat, + Value::ArrayDouble, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Value(pub u8); +#[allow(non_upper_case_globals)] +impl Value { + pub const NONE: Self = Self(0); + pub const Byte: Self = Self(1); + pub const UByte: Self = Self(2); + pub const Short: Self = Self(3); + pub const UShort: Self = Self(4); + pub const Int: Self = Self(5); + pub const UInt: Self = Self(6); + pub const Long: Self = Self(7); + pub const ULong: Self = Self(8); + pub const Float: Self = Self(9); + pub const Double: Self = Self(10); + pub const ArrayByte: Self = Self(11); + pub const ArrayUByte: Self = Self(12); + pub const ArrayShort: Self = Self(13); + pub const ArrayUShort: Self = Self(14); + pub const ArrayInt: Self = Self(15); + pub const ArrayUInt: Self = Self(16); + pub const ArrayLong: Self = Self(17); + pub const ArrayULong: Self = Self(18); + pub const ArrayFloat: Self = Self(19); + pub const ArrayDouble: Self = Self(20); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 20; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::Byte, + Self::UByte, + Self::Short, + Self::UShort, + Self::Int, + Self::UInt, + Self::Long, + Self::ULong, + Self::Float, + Self::Double, + Self::ArrayByte, + Self::ArrayUByte, + Self::ArrayShort, + Self::ArrayUShort, + Self::ArrayInt, + Self::ArrayUInt, + Self::ArrayLong, + Self::ArrayULong, + Self::ArrayFloat, + Self::ArrayDouble, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::Byte => Some("Byte"), + Self::UByte => Some("UByte"), + Self::Short => Some("Short"), + Self::UShort => Some("UShort"), + Self::Int => Some("Int"), + Self::UInt => Some("UInt"), + Self::Long => Some("Long"), + Self::ULong => Some("ULong"), + Self::Float => Some("Float"), + Self::Double => Some("Double"), + Self::ArrayByte => Some("ArrayByte"), + Self::ArrayUByte => Some("ArrayUByte"), + Self::ArrayShort => Some("ArrayShort"), + Self::ArrayUShort => Some("ArrayUShort"), + Self::ArrayInt => Some("ArrayInt"), + Self::ArrayUInt => Some("ArrayUInt"), + Self::ArrayLong => Some("ArrayLong"), + Self::ArrayULong => Some("ArrayULong"), + Self::ArrayFloat => Some("ArrayFloat"), + Self::ArrayDouble => Some("ArrayDouble"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Value { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Value { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Value { + type Output = Value; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Value { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Value { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Value {} +pub struct ValueUnionTableOffset {} + +pub enum ByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Byte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Byte<'a> { + type Inner = Byte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Byte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Byte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ByteArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ByteBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i8 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Byte::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Byte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ByteArgs { + pub value: i8, +} +impl<'a> Default for ByteArgs { + #[inline] + fn default() -> Self { + ByteArgs { + value: 0, + } + } +} + +pub struct ByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i8) { + self.fbb_.push_slot::(Byte::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Byte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Byte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UByte<'a> { + type Inner = UByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UByteArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UByteBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u8 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UByte::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UByteArgs { + pub value: u8, +} +impl<'a> Default for UByteArgs { + #[inline] + fn default() -> Self { + UByteArgs { + value: 0, + } + } +} + +pub struct UByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u8) { + self.fbb_.push_slot::(UByte::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Short<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Short<'a> { + type Inner = Short<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Short<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Short { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ShortArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ShortBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Short::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Short<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ShortArgs { + pub value: i16, +} +impl<'a> Default for ShortArgs { + #[inline] + fn default() -> Self { + ShortArgs { + value: 0, + } + } +} + +pub struct ShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i16) { + self.fbb_.push_slot::(Short::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Short<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Short"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UShort<'a> { + type Inner = UShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UShortArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UShortBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u16 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UShort::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UShortArgs { + pub value: u16, +} +impl<'a> Default for UShortArgs { + #[inline] + fn default() -> Self { + UShortArgs { + value: 0, + } + } +} + +pub struct UShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u16) { + self.fbb_.push_slot::(UShort::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum IntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int<'a> { + type Inner = Int<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args IntArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = IntBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Int::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct IntArgs { + pub value: i32, +} +impl<'a> Default for IntArgs { + #[inline] + fn default() -> Self { + IntArgs { + value: 0, + } + } +} + +pub struct IntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> IntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i32) { + self.fbb_.push_slot::(Int::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> IntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + IntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt<'a> { + type Inner = UInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UIntArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UIntBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(UInt::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct UIntArgs { + pub value: u32, +} +impl<'a> Default for UIntArgs { + #[inline] + fn default() -> Self { + UIntArgs { + value: 0, + } + } +} + +pub struct UIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u32) { + self.fbb_.push_slot::(UInt::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum LongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Long<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Long<'a> { + type Inner = Long<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Long<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Long { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args LongArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = LongBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Long::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Long<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct LongArgs { + pub value: i64, +} +impl<'a> Default for LongArgs { + #[inline] + fn default() -> Self { + LongArgs { + value: 0, + } + } +} + +pub struct LongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> LongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: i64) { + self.fbb_.push_slot::(Long::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> LongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + LongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Long<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Long"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ULongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ULong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ULong<'a> { + type Inner = ULong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ULong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ULong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ULongArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ULongBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(ULong::VT_VALUE, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for ULong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ULongArgs { + pub value: u64, +} +impl<'a> Default for ULongArgs { + #[inline] + fn default() -> Self { + ULongArgs { + value: 0, + } + } +} + +pub struct ULongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ULongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: u64) { + self.fbb_.push_slot::(ULong::VT_VALUE, value, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ULongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ULongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ULong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ULong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum FloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Float<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Float<'a> { + type Inner = Float<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Float<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Float { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FloatArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FloatBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> f32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Float::VT_VALUE, Some(0.0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Float<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct FloatArgs { + pub value: f32, +} +impl<'a> Default for FloatArgs { + #[inline] + fn default() -> Self { + FloatArgs { + value: 0.0, + } + } +} + +pub struct FloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: f32) { + self.fbb_.push_slot::(Float::VT_VALUE, value, 0.0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Float<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Float"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Double<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Double<'a> { + type Inner = Double<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Double<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Double { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DoubleArgs + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DoubleBuilder::new(_fbb); + builder.add_value(args.value); + builder.finish() + } + + + #[inline] + pub fn value(&self) -> f64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Double::VT_VALUE, Some(0.0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Double<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct DoubleArgs { + pub value: f64, +} +impl<'a> Default for DoubleArgs { + #[inline] + fn default() -> Self { + DoubleArgs { + value: 0.0, + } + } +} + +pub struct DoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: f64) { + self.fbb_.push_slot::(Double::VT_VALUE, value, 0.0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Double<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Double"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayByte<'a> { + type Inner = ArrayByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayByteArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayByteBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i8>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i8>>>(ArrayByte::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i8>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayByteArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i8>>>, +} +impl<'a> Default for ArrayByteArgs<'a> { + #[inline] + fn default() -> Self { + ArrayByteArgs { + value: None, + } + } +} + +pub struct ArrayByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayByte::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUByteOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUByte<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUByte<'a> { + type Inner = ArrayUByte<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUByte<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUByte { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUByteArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUByteBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u8>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(ArrayUByte::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUByte<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUByteArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for ArrayUByteArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUByteArgs { + value: None, + } + } +} + +pub struct ArrayUByteBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUByteBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUByte::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUByteBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUByteBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUByte<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUByte"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayShort<'a> { + type Inner = ArrayShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayShortArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayShortBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i16>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i16>>>(ArrayShort::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i16>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayShortArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i16>>>, +} +impl<'a> Default for ArrayShortArgs<'a> { + #[inline] + fn default() -> Self { + ArrayShortArgs { + value: None, + } + } +} + +pub struct ArrayShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayShort::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUShortOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUShort<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUShort<'a> { + type Inner = ArrayUShort<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUShort<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUShort { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUShortArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUShortBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u16>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u16>>>(ArrayUShort::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUShort<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u16>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUShortArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u16>>>, +} +impl<'a> Default for ArrayUShortArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUShortArgs { + value: None, + } + } +} + +pub struct ArrayUShortBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUShortBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUShort::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUShortBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUShortBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUShort<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUShort"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayInt<'a> { + type Inner = ArrayInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(ArrayInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for ArrayIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayIntArgs { + value: None, + } + } +} + +pub struct ArrayIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayUIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayUInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayUInt<'a> { + type Inner = ArrayUInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayUInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayUInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayUIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayUIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u32>>>(ArrayUInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayUInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayUIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u32>>>, +} +impl<'a> Default for ArrayUIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayUIntArgs { + value: None, + } + } +} + +pub struct ArrayUIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayUIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayUInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayUIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayUIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayUInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayUInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayLongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayLong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayLong<'a> { + type Inner = ArrayLong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayLong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayLong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayLongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayLongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ArrayLong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayLong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayLongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for ArrayLongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayLongArgs { + value: None, + } + } +} + +pub struct ArrayLongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayLongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayLong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayLongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayLongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayLong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayLong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayULongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayULong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayULong<'a> { + type Inner = ArrayULong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayULong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayULong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayULongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayULongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, u64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u64>>>(ArrayULong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayULong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayULongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u64>>>, +} +impl<'a> Default for ArrayULongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayULongArgs { + value: None, + } + } +} + +pub struct ArrayULongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayULongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayULong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayULongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayULongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayULong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayULong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayFloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayFloat<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayFloat<'a> { + type Inner = ArrayFloat<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayFloat<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayFloat { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayFloatArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayFloatBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(ArrayFloat::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayFloat<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayFloatArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for ArrayFloatArgs<'a> { + #[inline] + fn default() -> Self { + ArrayFloatArgs { + value: None, + } + } +} + +pub struct ArrayFloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayFloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayFloat::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayFloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayFloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayFloat<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayFloat"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayDoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayDouble<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayDouble<'a> { + type Inner = ArrayDouble<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayDouble<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayDouble { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayDoubleArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayDoubleBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(ArrayDouble::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayDouble<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayDoubleArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for ArrayDoubleArgs<'a> { + #[inline] + fn default() -> Self { + ArrayDoubleArgs { + value: None, + } + } +} + +pub struct ArrayDoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayDoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayDouble::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayDoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayDoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayDouble<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayDouble"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum f144_LogDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct f144_LogData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for f144_LogData<'a> { + type Inner = f144_LogData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> f144_LogData<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_VALUE_TYPE: ::flatbuffers::VOffsetT = 8; + pub const VT_VALUE: ::flatbuffers::VOffsetT = 10; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + f144_LogData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args f144_LogDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = f144_LogDataBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.value { builder.add_value(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.add_value_type(args.value_type); + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(f144_LogData::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(f144_LogData::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn value_type(&self) -> Value { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(f144_LogData::VT_VALUE_TYPE, Some(Value::NONE)).unwrap()} + } + #[inline] + pub fn value(&self) -> ::flatbuffers::Table<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(f144_LogData::VT_VALUE, None).unwrap()} + } + #[inline] + #[allow(non_snake_case)] + pub fn value_as_byte(&self) -> Option> { + if self.value_type() == Value::Byte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Byte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ubyte(&self) -> Option> { + if self.value_type() == Value::UByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_short(&self) -> Option> { + if self.value_type() == Value::Short { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Short::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ushort(&self) -> Option> { + if self.value_type() == Value::UShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_int(&self) -> Option> { + if self.value_type() == Value::Int { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_uint(&self) -> Option> { + if self.value_type() == Value::UInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_long(&self) -> Option> { + if self.value_type() == Value::Long { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Long::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_ulong(&self) -> Option> { + if self.value_type() == Value::ULong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ULong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_float(&self) -> Option> { + if self.value_type() == Value::Float { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Float::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_double(&self) -> Option> { + if self.value_type() == Value::Double { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Double::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_byte(&self) -> Option> { + if self.value_type() == Value::ArrayByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ubyte(&self) -> Option> { + if self.value_type() == Value::ArrayUByte { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUByte::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_short(&self) -> Option> { + if self.value_type() == Value::ArrayShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ushort(&self) -> Option> { + if self.value_type() == Value::ArrayUShort { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUShort::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_int(&self) -> Option> { + if self.value_type() == Value::ArrayInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_uint(&self) -> Option> { + if self.value_type() == Value::ArrayUInt { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayUInt::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_long(&self) -> Option> { + if self.value_type() == Value::ArrayLong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayLong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_ulong(&self) -> Option> { + if self.value_type() == Value::ArrayULong { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayULong::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_float(&self) -> Option> { + if self.value_type() == Value::ArrayFloat { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayFloat::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn value_as_array_double(&self) -> Option> { + if self.value_type() == Value::ArrayDouble { + let u = self.value(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { ArrayDouble::init_from_table(u) }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for f144_LogData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_union::("value_type", Self::VT_VALUE_TYPE, "value", Self::VT_VALUE, true, |key, v, pos| { + match key { + Value::Byte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Byte", pos), + Value::UByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UByte", pos), + Value::Short => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Short", pos), + Value::UShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UShort", pos), + Value::Int => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Int", pos), + Value::UInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::UInt", pos), + Value::Long => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Long", pos), + Value::ULong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ULong", pos), + Value::Float => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Float", pos), + Value::Double => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::Double", pos), + Value::ArrayByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayByte", pos), + Value::ArrayUByte => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUByte", pos), + Value::ArrayShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayShort", pos), + Value::ArrayUShort => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUShort", pos), + Value::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayInt", pos), + Value::ArrayUInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayUInt", pos), + Value::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayLong", pos), + Value::ArrayULong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayULong", pos), + Value::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayFloat", pos), + Value::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Value::ArrayDouble", pos), + _ => Ok(()), + } + })? + .finish(); + Ok(()) + } +} +pub struct f144_LogDataArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub value_type: Value, + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, +} +impl<'a> Default for f144_LogDataArgs<'a> { + #[inline] + fn default() -> Self { + f144_LogDataArgs { + source_name: None, // required field + timestamp: 0, + value_type: Value::NONE, + value: None, // required field + } + } +} + +pub struct f144_LogDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> f144_LogDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(f144_LogData::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(f144_LogData::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_value_type(&mut self, value_type: Value) { + self.fbb_.push_slot::(f144_LogData::VT_VALUE_TYPE, value_type, Value::NONE); + } + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(f144_LogData::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> f144_LogDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + f144_LogDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, f144_LogData::VT_SOURCE_NAME,"source_name"); + self.fbb_.required(o, f144_LogData::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for f144_LogData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("f144_LogData"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("value_type", &self.value_type()); + match self.value_type() { + Value::Byte => { + if let Some(x) = self.value_as_byte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UByte => { + if let Some(x) = self.value_as_ubyte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Short => { + if let Some(x) = self.value_as_short() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UShort => { + if let Some(x) = self.value_as_ushort() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Int => { + if let Some(x) = self.value_as_int() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::UInt => { + if let Some(x) = self.value_as_uint() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Long => { + if let Some(x) = self.value_as_long() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ULong => { + if let Some(x) = self.value_as_ulong() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Float => { + if let Some(x) = self.value_as_float() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::Double => { + if let Some(x) = self.value_as_double() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayByte => { + if let Some(x) = self.value_as_array_byte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUByte => { + if let Some(x) = self.value_as_array_ubyte() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayShort => { + if let Some(x) = self.value_as_array_short() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUShort => { + if let Some(x) = self.value_as_array_ushort() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayInt => { + if let Some(x) = self.value_as_array_int() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayUInt => { + if let Some(x) = self.value_as_array_uint() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayLong => { + if let Some(x) = self.value_as_array_long() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayULong => { + if let Some(x) = self.value_as_array_ulong() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayFloat => { + if let Some(x) = self.value_as_array_float() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Value::ArrayDouble => { + if let Some(x) = self.value_as_array_double() { + ds.field("value", &x) + } else { + ds.field("value", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("value", &x) + }, + }; + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `f144_LogData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn root_as_f_144_log_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `f144_LogData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_f_144_log_data_unchecked`. +pub fn size_prefixed_root_as_f_144_log_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `f144_LogData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn root_as_f_144_log_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `f144_LogData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_f_144_log_data_unchecked`. +pub fn size_prefixed_root_as_f_144_log_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a f144_LogData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `f144_LogData`. +pub unsafe fn root_as_f_144_log_data_unchecked(buf: &[u8]) -> f144_LogData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed f144_LogData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `f144_LogData`. +pub unsafe fn size_prefixed_root_as_f_144_log_data_unchecked(buf: &[u8]) -> f144_LogData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const F_144_LOG_DATA_IDENTIFIER: &str = "f144"; + +#[inline] +pub fn f_144_log_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, F_144_LOG_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn f_144_log_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, F_144_LOG_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_f_144_log_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(F_144_LOG_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_f_144_log_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(F_144_LOG_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/fc00_forwarder_config.rs b/rust/src/flatbuffers_generated/fc00_forwarder_config.rs new file mode 100644 index 0000000..144abb0 --- /dev/null +++ b/rust/src/flatbuffers_generated/fc00_forwarder_config.rs @@ -0,0 +1,545 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_UPDATE_TYPE: u16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_UPDATE_TYPE: u16 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_UPDATE_TYPE: [UpdateType; 4] = [ + UpdateType::ADD, + UpdateType::REMOVE, + UpdateType::REMOVEALL, + UpdateType::REPLACE, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct UpdateType(pub u16); +#[allow(non_upper_case_globals)] +impl UpdateType { + pub const ADD: Self = Self(0); + pub const REMOVE: Self = Self(1); + pub const REMOVEALL: Self = Self(2); + pub const REPLACE: Self = Self(3); + + pub const ENUM_MIN: u16 = 0; + pub const ENUM_MAX: u16 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::ADD, + Self::REMOVE, + Self::REMOVEALL, + Self::REPLACE, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::ADD => Some("ADD"), + Self::REMOVE => Some("REMOVE"), + Self::REMOVEALL => Some("REMOVEALL"), + Self::REPLACE => Some("REPLACE"), + _ => None, + } + } +} +impl ::core::fmt::Debug for UpdateType { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for UpdateType { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for UpdateType { + type Output = UpdateType; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for UpdateType { + type Scalar = u16; + #[inline] + fn to_little_endian(self) -> u16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u16) -> Self { + let b = u16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for UpdateType { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for UpdateType {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_PROTOCOL: u16 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_PROTOCOL: u16 = 2; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_PROTOCOL: [Protocol; 3] = [ + Protocol::PVA, + Protocol::CA, + Protocol::FAKE, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Protocol(pub u16); +#[allow(non_upper_case_globals)] +impl Protocol { + pub const PVA: Self = Self(0); + pub const CA: Self = Self(1); + pub const FAKE: Self = Self(2); + + pub const ENUM_MIN: u16 = 0; + pub const ENUM_MAX: u16 = 2; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::PVA, + Self::CA, + Self::FAKE, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::PVA => Some("PVA"), + Self::CA => Some("CA"), + Self::FAKE => Some("FAKE"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Protocol { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Protocol { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Protocol { + type Output = Protocol; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Protocol { + type Scalar = u16; + #[inline] + fn to_little_endian(self) -> u16 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u16) -> Self { + let b = u16::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Protocol { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u16::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Protocol {} +pub enum StreamOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Stream<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Stream<'a> { + type Inner = Stream<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Stream<'a> { + pub const VT_CHANNEL: ::flatbuffers::VOffsetT = 4; + pub const VT_SCHEMA: ::flatbuffers::VOffsetT = 6; + pub const VT_TOPIC: ::flatbuffers::VOffsetT = 8; + pub const VT_PROTOCOL: ::flatbuffers::VOffsetT = 10; + pub const VT_PERIODIC: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Stream { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args StreamArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = StreamBuilder::new(_fbb); + builder.add_periodic(args.periodic); + if let Some(x) = args.topic { builder.add_topic(x); } + if let Some(x) = args.schema { builder.add_schema(x); } + if let Some(x) = args.channel { builder.add_channel(x); } + builder.add_protocol(args.protocol); + builder.finish() + } + + + #[inline] + pub fn channel(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_CHANNEL, None)} + } + #[inline] + pub fn schema(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_SCHEMA, None)} + } + #[inline] + pub fn topic(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Stream::VT_TOPIC, None)} + } + #[inline] + pub fn protocol(&self) -> Protocol { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Stream::VT_PROTOCOL, Some(Protocol::PVA)).unwrap()} + } + #[inline] + pub fn periodic(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Stream::VT_PERIODIC, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Stream<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("channel", Self::VT_CHANNEL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("schema", Self::VT_SCHEMA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("topic", Self::VT_TOPIC, false)? + .visit_field::("protocol", Self::VT_PROTOCOL, false)? + .visit_field::("periodic", Self::VT_PERIODIC, false)? + .finish(); + Ok(()) + } +} +pub struct StreamArgs<'a> { + pub channel: Option<::flatbuffers::WIPOffset<&'a str>>, + pub schema: Option<::flatbuffers::WIPOffset<&'a str>>, + pub topic: Option<::flatbuffers::WIPOffset<&'a str>>, + pub protocol: Protocol, + pub periodic: i32, +} +impl<'a> Default for StreamArgs<'a> { + #[inline] + fn default() -> Self { + StreamArgs { + channel: None, + schema: None, + topic: None, + protocol: Protocol::PVA, + periodic: 0, + } + } +} + +pub struct StreamBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> StreamBuilder<'a, 'b, A> { + #[inline] + pub fn add_channel(&mut self, channel: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_CHANNEL, channel); + } + #[inline] + pub fn add_schema(&mut self, schema: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_SCHEMA, schema); + } + #[inline] + pub fn add_topic(&mut self, topic: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Stream::VT_TOPIC, topic); + } + #[inline] + pub fn add_protocol(&mut self, protocol: Protocol) { + self.fbb_.push_slot::(Stream::VT_PROTOCOL, protocol, Protocol::PVA); + } + #[inline] + pub fn add_periodic(&mut self, periodic: i32) { + self.fbb_.push_slot::(Stream::VT_PERIODIC, periodic, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> StreamBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + StreamBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Stream<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Stream"); + ds.field("channel", &self.channel()); + ds.field("schema", &self.schema()); + ds.field("topic", &self.topic()); + ds.field("protocol", &self.protocol()); + ds.field("periodic", &self.periodic()); + ds.finish() + } +} +pub enum fc00_ConfigUpdateOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct fc00_ConfigUpdate<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for fc00_ConfigUpdate<'a> { + type Inner = fc00_ConfigUpdate<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> fc00_ConfigUpdate<'a> { + pub const VT_CONFIG_CHANGE: ::flatbuffers::VOffsetT = 4; + pub const VT_STREAMS: ::flatbuffers::VOffsetT = 6; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + fc00_ConfigUpdate { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args fc00_ConfigUpdateArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = fc00_ConfigUpdateBuilder::new(_fbb); + if let Some(x) = args.streams { builder.add_streams(x); } + builder.add_config_change(args.config_change); + builder.finish() + } + + + #[inline] + pub fn config_change(&self) -> UpdateType { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(fc00_ConfigUpdate::VT_CONFIG_CHANGE, Some(UpdateType::ADD)).unwrap()} + } + #[inline] + pub fn streams(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(fc00_ConfigUpdate::VT_STREAMS, None)} + } +} + +impl ::flatbuffers::Verifiable for fc00_ConfigUpdate<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("config_change", Self::VT_CONFIG_CHANGE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("streams", Self::VT_STREAMS, false)? + .finish(); + Ok(()) + } +} +pub struct fc00_ConfigUpdateArgs<'a> { + pub config_change: UpdateType, + pub streams: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, +} +impl<'a> Default for fc00_ConfigUpdateArgs<'a> { + #[inline] + fn default() -> Self { + fc00_ConfigUpdateArgs { + config_change: UpdateType::ADD, + streams: None, + } + } +} + +pub struct fc00_ConfigUpdateBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> fc00_ConfigUpdateBuilder<'a, 'b, A> { + #[inline] + pub fn add_config_change(&mut self, config_change: UpdateType) { + self.fbb_.push_slot::(fc00_ConfigUpdate::VT_CONFIG_CHANGE, config_change, UpdateType::ADD); + } + #[inline] + pub fn add_streams(&mut self, streams: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(fc00_ConfigUpdate::VT_STREAMS, streams); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> fc00_ConfigUpdateBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + fc00_ConfigUpdateBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for fc00_ConfigUpdate<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("fc00_ConfigUpdate"); + ds.field("config_change", &self.config_change()); + ds.field("streams", &self.streams()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `fc00_ConfigUpdate` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn root_as_fc_00_config_update(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `fc00_ConfigUpdate` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_fc_00_config_update_unchecked`. +pub fn size_prefixed_root_as_fc_00_config_update(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `fc00_ConfigUpdate` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn root_as_fc_00_config_update_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `fc00_ConfigUpdate` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_fc_00_config_update_unchecked`. +pub fn size_prefixed_root_as_fc_00_config_update_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a fc00_ConfigUpdate and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `fc00_ConfigUpdate`. +pub unsafe fn root_as_fc_00_config_update_unchecked(buf: &[u8]) -> fc00_ConfigUpdate<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed fc00_ConfigUpdate and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `fc00_ConfigUpdate`. +pub unsafe fn size_prefixed_root_as_fc_00_config_update_unchecked(buf: &[u8]) -> fc00_ConfigUpdate<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const FC_00_CONFIG_UPDATE_IDENTIFIER: &str = "fc00"; + +#[inline] +pub fn fc_00_config_update_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FC_00_CONFIG_UPDATE_IDENTIFIER, false) +} + +#[inline] +pub fn fc_00_config_update_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FC_00_CONFIG_UPDATE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_fc_00_config_update_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(FC_00_CONFIG_UPDATE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_fc_00_config_update_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(FC_00_CONFIG_UPDATE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/hs01_event_histogram.rs b/rust/src/flatbuffers_generated/hs01_event_histogram.rs new file mode 100644 index 0000000..ed032bf --- /dev/null +++ b/rust/src/flatbuffers_generated/hs01_event_histogram.rs @@ -0,0 +1,1301 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_ARRAY: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_ARRAY: u8 = 4; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_ARRAY: [Array; 5] = [ + Array::NONE, + Array::ArrayInt, + Array::ArrayLong, + Array::ArrayDouble, + Array::ArrayFloat, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Array(pub u8); +#[allow(non_upper_case_globals)] +impl Array { + pub const NONE: Self = Self(0); + pub const ArrayInt: Self = Self(1); + pub const ArrayLong: Self = Self(2); + pub const ArrayDouble: Self = Self(3); + pub const ArrayFloat: Self = Self(4); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 4; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::ArrayInt, + Self::ArrayLong, + Self::ArrayDouble, + Self::ArrayFloat, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::ArrayInt => Some("ArrayInt"), + Self::ArrayLong => Some("ArrayLong"), + Self::ArrayDouble => Some("ArrayDouble"), + Self::ArrayFloat => Some("ArrayFloat"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Array { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Array { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Array { + type Output = Array; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Array { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Array { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Array {} +pub struct ArrayUnionTableOffset {} + +pub enum ArrayIntOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayInt<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayInt<'a> { + type Inner = ArrayInt<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayInt<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayInt { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayIntArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayIntBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(ArrayInt::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayInt<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayIntArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for ArrayIntArgs<'a> { + #[inline] + fn default() -> Self { + ArrayIntArgs { + value: None, + } + } +} + +pub struct ArrayIntBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayIntBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayInt::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayIntBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayIntBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayInt<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayInt"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayLongOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayLong<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayLong<'a> { + type Inner = ArrayLong<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayLong<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayLong { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayLongArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayLongBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(ArrayLong::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayLong<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayLongArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for ArrayLongArgs<'a> { + #[inline] + fn default() -> Self { + ArrayLongArgs { + value: None, + } + } +} + +pub struct ArrayLongBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayLongBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayLong::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayLongBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayLongBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayLong<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayLong"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayDoubleOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayDouble<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayDouble<'a> { + type Inner = ArrayDouble<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayDouble<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayDouble { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayDoubleArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayDoubleBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(ArrayDouble::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayDouble<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayDoubleArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for ArrayDoubleArgs<'a> { + #[inline] + fn default() -> Self { + ArrayDoubleArgs { + value: None, + } + } +} + +pub struct ArrayDoubleBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayDoubleBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayDouble::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayDoubleBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayDoubleBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayDouble<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayDouble"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum ArrayFloatOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct ArrayFloat<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for ArrayFloat<'a> { + type Inner = ArrayFloat<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> ArrayFloat<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + ArrayFloat { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args ArrayFloatArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = ArrayFloatBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> Option<::flatbuffers::Vector<'a, f32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(ArrayFloat::VT_VALUE, None)} + } +} + +impl ::flatbuffers::Verifiable for ArrayFloat<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, false)? + .finish(); + Ok(()) + } +} +pub struct ArrayFloatArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for ArrayFloatArgs<'a> { + #[inline] + fn default() -> Self { + ArrayFloatArgs { + value: None, + } + } +} + +pub struct ArrayFloatBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> ArrayFloatBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(ArrayFloat::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> ArrayFloatBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + ArrayFloatBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for ArrayFloat<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("ArrayFloat"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DimensionMetaDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct DimensionMetaData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for DimensionMetaData<'a> { + type Inner = DimensionMetaData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> DimensionMetaData<'a> { + pub const VT_LENGTH: ::flatbuffers::VOffsetT = 4; + pub const VT_UNIT: ::flatbuffers::VOffsetT = 6; + pub const VT_LABEL: ::flatbuffers::VOffsetT = 8; + pub const VT_BIN_BOUNDARIES_TYPE: ::flatbuffers::VOffsetT = 10; + pub const VT_BIN_BOUNDARIES: ::flatbuffers::VOffsetT = 12; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + DimensionMetaData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DimensionMetaDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DimensionMetaDataBuilder::new(_fbb); + if let Some(x) = args.bin_boundaries { builder.add_bin_boundaries(x); } + if let Some(x) = args.label { builder.add_label(x); } + if let Some(x) = args.unit { builder.add_unit(x); } + builder.add_length(args.length); + builder.add_bin_boundaries_type(args.bin_boundaries_type); + builder.finish() + } + + + #[inline] + pub fn length(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(DimensionMetaData::VT_LENGTH, Some(0)).unwrap()} + } + #[inline] + pub fn unit(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(DimensionMetaData::VT_UNIT, None)} + } + #[inline] + pub fn label(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(DimensionMetaData::VT_LABEL, None)} + } + #[inline] + pub fn bin_boundaries_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(DimensionMetaData::VT_BIN_BOUNDARIES_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn bin_boundaries(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(DimensionMetaData::VT_BIN_BOUNDARIES, None)} + } + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_int(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayInt { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_long(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayLong { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_double(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayDouble { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn bin_boundaries_as_array_float(&self) -> Option> { + if self.bin_boundaries_type() == Array::ArrayFloat { + self.bin_boundaries().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for DimensionMetaData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("length", Self::VT_LENGTH, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("unit", Self::VT_UNIT, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("label", Self::VT_LABEL, false)? + .visit_union::("bin_boundaries_type", Self::VT_BIN_BOUNDARIES_TYPE, "bin_boundaries", Self::VT_BIN_BOUNDARIES, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .finish(); + Ok(()) + } +} +pub struct DimensionMetaDataArgs<'a> { + pub length: i32, + pub unit: Option<::flatbuffers::WIPOffset<&'a str>>, + pub label: Option<::flatbuffers::WIPOffset<&'a str>>, + pub bin_boundaries_type: Array, + pub bin_boundaries: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, +} +impl<'a> Default for DimensionMetaDataArgs<'a> { + #[inline] + fn default() -> Self { + DimensionMetaDataArgs { + length: 0, + unit: None, + label: None, + bin_boundaries_type: Array::NONE, + bin_boundaries: None, + } + } +} + +pub struct DimensionMetaDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DimensionMetaDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_length(&mut self, length: i32) { + self.fbb_.push_slot::(DimensionMetaData::VT_LENGTH, length, 0); + } + #[inline] + pub fn add_unit(&mut self, unit: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_UNIT, unit); + } + #[inline] + pub fn add_label(&mut self, label: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_LABEL, label); + } + #[inline] + pub fn add_bin_boundaries_type(&mut self, bin_boundaries_type: Array) { + self.fbb_.push_slot::(DimensionMetaData::VT_BIN_BOUNDARIES_TYPE, bin_boundaries_type, Array::NONE); + } + #[inline] + pub fn add_bin_boundaries(&mut self, bin_boundaries: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DimensionMetaData::VT_BIN_BOUNDARIES, bin_boundaries); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DimensionMetaDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DimensionMetaDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for DimensionMetaData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("DimensionMetaData"); + ds.field("length", &self.length()); + ds.field("unit", &self.unit()); + ds.field("label", &self.label()); + ds.field("bin_boundaries_type", &self.bin_boundaries_type()); + match self.bin_boundaries_type() { + Array::ArrayInt => { + if let Some(x) = self.bin_boundaries_as_array_int() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.bin_boundaries_as_array_long() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.bin_boundaries_as_array_double() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.bin_boundaries_as_array_float() { + ds.field("bin_boundaries", &x) + } else { + ds.field("bin_boundaries", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("bin_boundaries", &x) + }, + }; + ds.finish() + } +} +pub enum EventHistogramOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct EventHistogram<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for EventHistogram<'a> { + type Inner = EventHistogram<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> EventHistogram<'a> { + pub const VT_SOURCE: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_DIM_METADATA: ::flatbuffers::VOffsetT = 8; + pub const VT_LAST_METADATA_TIMESTAMP: ::flatbuffers::VOffsetT = 10; + pub const VT_CURRENT_SHAPE: ::flatbuffers::VOffsetT = 12; + pub const VT_OFFSET: ::flatbuffers::VOffsetT = 14; + pub const VT_DATA_TYPE: ::flatbuffers::VOffsetT = 16; + pub const VT_DATA: ::flatbuffers::VOffsetT = 18; + pub const VT_ERRORS_TYPE: ::flatbuffers::VOffsetT = 20; + pub const VT_ERRORS: ::flatbuffers::VOffsetT = 22; + pub const VT_INFO: ::flatbuffers::VOffsetT = 24; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + EventHistogram { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args EventHistogramArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = EventHistogramBuilder::new(_fbb); + builder.add_last_metadata_timestamp(args.last_metadata_timestamp); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.info { builder.add_info(x); } + if let Some(x) = args.errors { builder.add_errors(x); } + if let Some(x) = args.data { builder.add_data(x); } + if let Some(x) = args.offset { builder.add_offset(x); } + if let Some(x) = args.current_shape { builder.add_current_shape(x); } + if let Some(x) = args.dim_metadata { builder.add_dim_metadata(x); } + if let Some(x) = args.source { builder.add_source(x); } + builder.add_errors_type(args.errors_type); + builder.add_data_type(args.data_type); + builder.finish() + } + + + #[inline] + pub fn source(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EventHistogram::VT_SOURCE, None)} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn dim_metadata(&self) -> Option<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>(EventHistogram::VT_DIM_METADATA, None)} + } + #[inline] + pub fn last_metadata_timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_LAST_METADATA_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn current_shape(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(EventHistogram::VT_CURRENT_SHAPE, None).unwrap()} + } + #[inline] + pub fn offset(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(EventHistogram::VT_OFFSET, None)} + } + #[inline] + pub fn data_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_DATA_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn data(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(EventHistogram::VT_DATA, None)} + } + #[inline] + pub fn errors_type(&self) -> Array { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(EventHistogram::VT_ERRORS_TYPE, Some(Array::NONE)).unwrap()} + } + #[inline] + pub fn errors(&self) -> Option<::flatbuffers::Table<'a>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(EventHistogram::VT_ERRORS, None)} + } + #[inline] + pub fn info(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(EventHistogram::VT_INFO, None)} + } + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_int(&self) -> Option> { + if self.data_type() == Array::ArrayInt { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_long(&self) -> Option> { + if self.data_type() == Array::ArrayLong { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_double(&self) -> Option> { + if self.data_type() == Array::ArrayDouble { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn data_as_array_float(&self) -> Option> { + if self.data_type() == Array::ArrayFloat { + self.data().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_int(&self) -> Option> { + if self.errors_type() == Array::ArrayInt { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayInt::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_long(&self) -> Option> { + if self.errors_type() == Array::ArrayLong { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayLong::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_double(&self) -> Option> { + if self.errors_type() == Array::ArrayDouble { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayDouble::init_from_table(t) } + }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn errors_as_array_float(&self) -> Option> { + if self.errors_type() == Array::ArrayFloat { + self.errors().map(|t| { + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + unsafe { ArrayFloat::init_from_table(t) } + }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for EventHistogram<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source", Self::VT_SOURCE, false)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, ::flatbuffers::ForwardsUOffset>>>("dim_metadata", Self::VT_DIM_METADATA, false)? + .visit_field::("last_metadata_timestamp", Self::VT_LAST_METADATA_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("current_shape", Self::VT_CURRENT_SHAPE, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("offset", Self::VT_OFFSET, false)? + .visit_union::("data_type", Self::VT_DATA_TYPE, "data", Self::VT_DATA, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .visit_union::("errors_type", Self::VT_ERRORS_TYPE, "errors", Self::VT_ERRORS, false, |key, v, pos| { + match key { + Array::ArrayInt => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayInt", pos), + Array::ArrayLong => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayLong", pos), + Array::ArrayDouble => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayDouble", pos), + Array::ArrayFloat => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("Array::ArrayFloat", pos), + _ => Ok(()), + } + })? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("info", Self::VT_INFO, false)? + .finish(); + Ok(()) + } +} +pub struct EventHistogramArgs<'a> { + pub source: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub dim_metadata: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, ::flatbuffers::ForwardsUOffset>>>>, + pub last_metadata_timestamp: i64, + pub current_shape: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub offset: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub data_type: Array, + pub data: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub errors_type: Array, + pub errors: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub info: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for EventHistogramArgs<'a> { + #[inline] + fn default() -> Self { + EventHistogramArgs { + source: None, + timestamp: 0, + dim_metadata: None, + last_metadata_timestamp: 0, + current_shape: None, // required field + offset: None, + data_type: Array::NONE, + data: None, + errors_type: Array::NONE, + errors: None, + info: None, + } + } +} + +pub struct EventHistogramBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> EventHistogramBuilder<'a, 'b, A> { + #[inline] + pub fn add_source(&mut self, source: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_SOURCE, source); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(EventHistogram::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_dim_metadata(&mut self, dim_metadata: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , ::flatbuffers::ForwardsUOffset>>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_DIM_METADATA, dim_metadata); + } + #[inline] + pub fn add_last_metadata_timestamp(&mut self, last_metadata_timestamp: i64) { + self.fbb_.push_slot::(EventHistogram::VT_LAST_METADATA_TIMESTAMP, last_metadata_timestamp, 0); + } + #[inline] + pub fn add_current_shape(&mut self, current_shape: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_CURRENT_SHAPE, current_shape); + } + #[inline] + pub fn add_offset(&mut self, offset: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_OFFSET, offset); + } + #[inline] + pub fn add_data_type(&mut self, data_type: Array) { + self.fbb_.push_slot::(EventHistogram::VT_DATA_TYPE, data_type, Array::NONE); + } + #[inline] + pub fn add_data(&mut self, data: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_DATA, data); + } + #[inline] + pub fn add_errors_type(&mut self, errors_type: Array) { + self.fbb_.push_slot::(EventHistogram::VT_ERRORS_TYPE, errors_type, Array::NONE); + } + #[inline] + pub fn add_errors(&mut self, errors: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_ERRORS, errors); + } + #[inline] + pub fn add_info(&mut self, info: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(EventHistogram::VT_INFO, info); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> EventHistogramBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + EventHistogramBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, EventHistogram::VT_CURRENT_SHAPE,"current_shape"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for EventHistogram<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("EventHistogram"); + ds.field("source", &self.source()); + ds.field("timestamp", &self.timestamp()); + ds.field("dim_metadata", &self.dim_metadata()); + ds.field("last_metadata_timestamp", &self.last_metadata_timestamp()); + ds.field("current_shape", &self.current_shape()); + ds.field("offset", &self.offset()); + ds.field("data_type", &self.data_type()); + match self.data_type() { + Array::ArrayInt => { + if let Some(x) = self.data_as_array_int() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.data_as_array_long() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.data_as_array_double() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.data_as_array_float() { + ds.field("data", &x) + } else { + ds.field("data", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("data", &x) + }, + }; + ds.field("errors_type", &self.errors_type()); + match self.errors_type() { + Array::ArrayInt => { + if let Some(x) = self.errors_as_array_int() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayLong => { + if let Some(x) = self.errors_as_array_long() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayDouble => { + if let Some(x) = self.errors_as_array_double() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + Array::ArrayFloat => { + if let Some(x) = self.errors_as_array_float() { + ds.field("errors", &x) + } else { + ds.field("errors", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("errors", &x) + }, + }; + ds.field("info", &self.info()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `EventHistogram` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn root_as_event_histogram(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `EventHistogram` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_event_histogram_unchecked`. +pub fn size_prefixed_root_as_event_histogram(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `EventHistogram` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn root_as_event_histogram_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `EventHistogram` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_event_histogram_unchecked`. +pub fn size_prefixed_root_as_event_histogram_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a EventHistogram and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `EventHistogram`. +pub unsafe fn root_as_event_histogram_unchecked(buf: &[u8]) -> EventHistogram<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed EventHistogram and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `EventHistogram`. +pub unsafe fn size_prefixed_root_as_event_histogram_unchecked(buf: &[u8]) -> EventHistogram<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const EVENT_HISTOGRAM_IDENTIFIER: &str = "hs01"; + +#[inline] +pub fn event_histogram_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_HISTOGRAM_IDENTIFIER, false) +} + +#[inline] +pub fn event_histogram_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, EVENT_HISTOGRAM_IDENTIFIER, true) +} + +#[inline] +pub fn finish_event_histogram_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(EVENT_HISTOGRAM_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_event_histogram_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(EVENT_HISTOGRAM_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/json_json.rs b/rust/src/flatbuffers_generated/json_json.rs new file mode 100644 index 0000000..1e77e5b --- /dev/null +++ b/rust/src/flatbuffers_generated/json_json.rs @@ -0,0 +1,184 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum JsonDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct JsonData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for JsonData<'a> { + type Inner = JsonData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> JsonData<'a> { + pub const VT_JSON: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + JsonData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args JsonDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = JsonDataBuilder::new(_fbb); + if let Some(x) = args.json { builder.add_json(x); } + builder.finish() + } + + + #[inline] + pub fn json(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(JsonData::VT_JSON, None)} + } +} + +impl ::flatbuffers::Verifiable for JsonData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("json", Self::VT_JSON, false)? + .finish(); + Ok(()) + } +} +pub struct JsonDataArgs<'a> { + pub json: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for JsonDataArgs<'a> { + #[inline] + fn default() -> Self { + JsonDataArgs { + json: None, + } + } +} + +pub struct JsonDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> JsonDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_json(&mut self, json: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(JsonData::VT_JSON, json); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> JsonDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + JsonDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for JsonData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("JsonData"); + ds.field("json", &self.json()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `JsonData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn root_as_json_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `JsonData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_json_data_unchecked`. +pub fn size_prefixed_root_as_json_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `JsonData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn root_as_json_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `JsonData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_json_data_unchecked`. +pub fn size_prefixed_root_as_json_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a JsonData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `JsonData`. +pub unsafe fn root_as_json_data_unchecked(buf: &[u8]) -> JsonData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed JsonData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `JsonData`. +pub unsafe fn size_prefixed_root_as_json_data_unchecked(buf: &[u8]) -> JsonData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const JSON_DATA_IDENTIFIER: &str = "json"; + +#[inline] +pub fn json_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, JSON_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn json_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, JSON_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_json_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(JSON_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_json_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(JSON_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/mod.rs b/rust/src/flatbuffers_generated/mod.rs new file mode 100644 index 0000000..b664214 --- /dev/null +++ b/rust/src/flatbuffers_generated/mod.rs @@ -0,0 +1,54 @@ +#[path = "6s4t_run_stop.rs"] +pub mod run_stop_6s4t; + +#[path = "ad00_area_detector_array.rs"] +pub mod area_detector_array_ad00; + +#[path = "al00_alarm.rs"] +pub mod alarm_al00; + +#[path = "answ_action_response.rs"] +pub mod action_response_answ; + +#[path = "da00_dataarray.rs"] +pub mod dataarray_da00; + +#[path = "df12_det_spec_map.rs"] +pub mod det_spec_map_df12; + +#[path = "ep01_epics_connection.rs"] +pub mod epics_connection_ep01; + +#[path = "ev44_events.rs"] +pub mod events_ev44; + +#[path = "f144_logdata.rs"] +pub mod logdata_f144; + +#[path = "fc00_forwarder_config.rs"] +pub mod forwarder_config_fc00; + +#[path = "hs01_event_histogram.rs"] +pub mod event_histogram_hs01; + +#[path = "json_json.rs"] +pub mod json_json; + +#[path = "pl72_run_start.rs"] +pub mod run_start_pl72; + +#[path = "pu00_pulse_metadata.rs"] +pub mod pulse_metadata_pu00; + +#[path = "se00_data.rs"] +pub mod data_se00; + +#[path = "un00_units.rs"] +pub mod units_un00; + +#[path = "wrdn_finished_writing.rs"] +pub mod finished_writing_wrdn; + +#[path = "x5f2_status.rs"] +pub mod status_x5f2; + diff --git a/rust/src/flatbuffers_generated/pl72_run_start.rs b/rust/src/flatbuffers_generated/pl72_run_start.rs new file mode 100644 index 0000000..412df29 --- /dev/null +++ b/rust/src/flatbuffers_generated/pl72_run_start.rs @@ -0,0 +1,518 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum SpectraDetectorMappingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct SpectraDetectorMapping<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for SpectraDetectorMapping<'a> { + type Inner = SpectraDetectorMapping<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> SpectraDetectorMapping<'a> { + pub const VT_SPECTRUM: ::flatbuffers::VOffsetT = 4; + pub const VT_DETECTOR_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_N_SPECTRA: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + SpectraDetectorMapping { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args SpectraDetectorMappingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = SpectraDetectorMappingBuilder::new(_fbb); + builder.add_n_spectra(args.n_spectra); + if let Some(x) = args.detector_id { builder.add_detector_id(x); } + if let Some(x) = args.spectrum { builder.add_spectrum(x); } + builder.finish() + } + + + #[inline] + pub fn spectrum(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_SPECTRUM, None)} + } + #[inline] + pub fn detector_id(&self) -> Option<::flatbuffers::Vector<'a, i32>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(SpectraDetectorMapping::VT_DETECTOR_ID, None)} + } + #[inline] + pub fn n_spectra(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(SpectraDetectorMapping::VT_N_SPECTRA, Some(0)).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for SpectraDetectorMapping<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("spectrum", Self::VT_SPECTRUM, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("detector_id", Self::VT_DETECTOR_ID, false)? + .visit_field::("n_spectra", Self::VT_N_SPECTRA, false)? + .finish(); + Ok(()) + } +} +pub struct SpectraDetectorMappingArgs<'a> { + pub spectrum: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub detector_id: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, + pub n_spectra: i32, +} +impl<'a> Default for SpectraDetectorMappingArgs<'a> { + #[inline] + fn default() -> Self { + SpectraDetectorMappingArgs { + spectrum: None, + detector_id: None, + n_spectra: 0, + } + } +} + +pub struct SpectraDetectorMappingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> SpectraDetectorMappingBuilder<'a, 'b, A> { + #[inline] + pub fn add_spectrum(&mut self, spectrum: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_SPECTRUM, spectrum); + } + #[inline] + pub fn add_detector_id(&mut self, detector_id: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(SpectraDetectorMapping::VT_DETECTOR_ID, detector_id); + } + #[inline] + pub fn add_n_spectra(&mut self, n_spectra: i32) { + self.fbb_.push_slot::(SpectraDetectorMapping::VT_N_SPECTRA, n_spectra, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> SpectraDetectorMappingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + SpectraDetectorMappingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for SpectraDetectorMapping<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("SpectraDetectorMapping"); + ds.field("spectrum", &self.spectrum()); + ds.field("detector_id", &self.detector_id()); + ds.field("n_spectra", &self.n_spectra()); + ds.finish() + } +} +pub enum RunStartOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct RunStart<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for RunStart<'a> { + type Inner = RunStart<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> RunStart<'a> { + pub const VT_START_TIME: ::flatbuffers::VOffsetT = 4; + pub const VT_STOP_TIME: ::flatbuffers::VOffsetT = 6; + pub const VT_RUN_NAME: ::flatbuffers::VOffsetT = 8; + pub const VT_INSTRUMENT_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_NEXUS_STRUCTURE: ::flatbuffers::VOffsetT = 12; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 14; + pub const VT_BROKER: ::flatbuffers::VOffsetT = 16; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 18; + pub const VT_FILENAME: ::flatbuffers::VOffsetT = 20; + pub const VT_N_PERIODS: ::flatbuffers::VOffsetT = 22; + pub const VT_DETECTOR_SPECTRUM_MAP: ::flatbuffers::VOffsetT = 24; + pub const VT_METADATA: ::flatbuffers::VOffsetT = 26; + pub const VT_CONTROL_TOPIC: ::flatbuffers::VOffsetT = 28; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + RunStart { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args RunStartArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = RunStartBuilder::new(_fbb); + builder.add_stop_time(args.stop_time); + builder.add_start_time(args.start_time); + if let Some(x) = args.control_topic { builder.add_control_topic(x); } + if let Some(x) = args.metadata { builder.add_metadata(x); } + if let Some(x) = args.detector_spectrum_map { builder.add_detector_spectrum_map(x); } + builder.add_n_periods(args.n_periods); + if let Some(x) = args.filename { builder.add_filename(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.broker { builder.add_broker(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.nexus_structure { builder.add_nexus_structure(x); } + if let Some(x) = args.instrument_name { builder.add_instrument_name(x); } + if let Some(x) = args.run_name { builder.add_run_name(x); } + builder.finish() + } + + + #[inline] + pub fn start_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_START_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn stop_time(&self) -> u64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_STOP_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn run_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_RUN_NAME, None)} + } + #[inline] + pub fn instrument_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_INSTRUMENT_NAME, None)} + } + #[inline] + pub fn nexus_structure(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_NEXUS_STRUCTURE, None)} + } + #[inline] + pub fn job_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_JOB_ID, None)} + } + #[inline] + pub fn broker(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_BROKER, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_SERVICE_ID, None)} + } + #[inline] + pub fn filename(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_FILENAME, None)} + } + #[inline] + pub fn n_periods(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(RunStart::VT_N_PERIODS, Some(1)).unwrap()} + } + #[inline] + pub fn detector_spectrum_map(&self) -> Option> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset>(RunStart::VT_DETECTOR_SPECTRUM_MAP, None)} + } + #[inline] + pub fn metadata(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_METADATA, None)} + } + #[inline] + pub fn control_topic(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(RunStart::VT_CONTROL_TOPIC, None)} + } +} + +impl ::flatbuffers::Verifiable for RunStart<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::("start_time", Self::VT_START_TIME, false)? + .visit_field::("stop_time", Self::VT_STOP_TIME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("run_name", Self::VT_RUN_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("instrument_name", Self::VT_INSTRUMENT_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("nexus_structure", Self::VT_NEXUS_STRUCTURE, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("broker", Self::VT_BROKER, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("filename", Self::VT_FILENAME, false)? + .visit_field::("n_periods", Self::VT_N_PERIODS, false)? + .visit_field::<::flatbuffers::ForwardsUOffset>("detector_spectrum_map", Self::VT_DETECTOR_SPECTRUM_MAP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("metadata", Self::VT_METADATA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("control_topic", Self::VT_CONTROL_TOPIC, false)? + .finish(); + Ok(()) + } +} +pub struct RunStartArgs<'a> { + pub start_time: u64, + pub stop_time: u64, + pub run_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub instrument_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub nexus_structure: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub broker: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub filename: Option<::flatbuffers::WIPOffset<&'a str>>, + pub n_periods: u32, + pub detector_spectrum_map: Option<::flatbuffers::WIPOffset>>, + pub metadata: Option<::flatbuffers::WIPOffset<&'a str>>, + pub control_topic: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for RunStartArgs<'a> { + #[inline] + fn default() -> Self { + RunStartArgs { + start_time: 0, + stop_time: 0, + run_name: None, + instrument_name: None, + nexus_structure: None, + job_id: None, + broker: None, + service_id: None, + filename: None, + n_periods: 1, + detector_spectrum_map: None, + metadata: None, + control_topic: None, + } + } +} + +pub struct RunStartBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> RunStartBuilder<'a, 'b, A> { + #[inline] + pub fn add_start_time(&mut self, start_time: u64) { + self.fbb_.push_slot::(RunStart::VT_START_TIME, start_time, 0); + } + #[inline] + pub fn add_stop_time(&mut self, stop_time: u64) { + self.fbb_.push_slot::(RunStart::VT_STOP_TIME, stop_time, 0); + } + #[inline] + pub fn add_run_name(&mut self, run_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_RUN_NAME, run_name); + } + #[inline] + pub fn add_instrument_name(&mut self, instrument_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_INSTRUMENT_NAME, instrument_name); + } + #[inline] + pub fn add_nexus_structure(&mut self, nexus_structure: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_NEXUS_STRUCTURE, nexus_structure); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_broker(&mut self, broker: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_BROKER, broker); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_filename(&mut self, filename: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_FILENAME, filename); + } + #[inline] + pub fn add_n_periods(&mut self, n_periods: u32) { + self.fbb_.push_slot::(RunStart::VT_N_PERIODS, n_periods, 1); + } + #[inline] + pub fn add_detector_spectrum_map(&mut self, detector_spectrum_map: ::flatbuffers::WIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset>(RunStart::VT_DETECTOR_SPECTRUM_MAP, detector_spectrum_map); + } + #[inline] + pub fn add_metadata(&mut self, metadata: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_METADATA, metadata); + } + #[inline] + pub fn add_control_topic(&mut self, control_topic: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(RunStart::VT_CONTROL_TOPIC, control_topic); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> RunStartBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + RunStartBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for RunStart<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("RunStart"); + ds.field("start_time", &self.start_time()); + ds.field("stop_time", &self.stop_time()); + ds.field("run_name", &self.run_name()); + ds.field("instrument_name", &self.instrument_name()); + ds.field("nexus_structure", &self.nexus_structure()); + ds.field("job_id", &self.job_id()); + ds.field("broker", &self.broker()); + ds.field("service_id", &self.service_id()); + ds.field("filename", &self.filename()); + ds.field("n_periods", &self.n_periods()); + ds.field("detector_spectrum_map", &self.detector_spectrum_map()); + ds.field("metadata", &self.metadata()); + ds.field("control_topic", &self.control_topic()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `RunStart` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn root_as_run_start(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `RunStart` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_run_start_unchecked`. +pub fn size_prefixed_root_as_run_start(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `RunStart` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn root_as_run_start_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `RunStart` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_run_start_unchecked`. +pub fn size_prefixed_root_as_run_start_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a RunStart and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `RunStart`. +pub unsafe fn root_as_run_start_unchecked(buf: &[u8]) -> RunStart<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed RunStart and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `RunStart`. +pub unsafe fn size_prefixed_root_as_run_start_unchecked(buf: &[u8]) -> RunStart<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const RUN_START_IDENTIFIER: &str = "pl72"; + +#[inline] +pub fn run_start_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_START_IDENTIFIER, false) +} + +#[inline] +pub fn run_start_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, RUN_START_IDENTIFIER, true) +} + +#[inline] +pub fn finish_run_start_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(RUN_START_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_run_start_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(RUN_START_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs b/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs new file mode 100644 index 0000000..81487a2 --- /dev/null +++ b/rust/src/flatbuffers_generated/pu00_pulse_metadata.rs @@ -0,0 +1,270 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum Pu00MessageOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Pu00Message<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Pu00Message<'a> { + type Inner = Pu00Message<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Pu00Message<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_MESSAGE_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_REFERENCE_TIME: ::flatbuffers::VOffsetT = 8; + pub const VT_VETOS: ::flatbuffers::VOffsetT = 10; + pub const VT_PERIOD_NUMBER: ::flatbuffers::VOffsetT = 12; + pub const VT_PROTON_CHARGE: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Pu00Message { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Pu00MessageArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Pu00MessageBuilder::new(_fbb); + builder.add_reference_time(args.reference_time); + builder.add_message_id(args.message_id); + if let Some(x) = args.proton_charge { builder.add_proton_charge(x); } + if let Some(x) = args.period_number { builder.add_period_number(x); } + if let Some(x) = args.vetos { builder.add_vetos(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Pu00Message::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn message_id(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_MESSAGE_ID, Some(0)).unwrap()} + } + #[inline] + pub fn reference_time(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_REFERENCE_TIME, Some(0)).unwrap()} + } + #[inline] + pub fn vetos(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_VETOS, None)} + } + #[inline] + pub fn period_number(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_PERIOD_NUMBER, None)} + } + #[inline] + pub fn proton_charge(&self) -> Option { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Pu00Message::VT_PROTON_CHARGE, None)} + } +} + +impl ::flatbuffers::Verifiable for Pu00Message<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("message_id", Self::VT_MESSAGE_ID, false)? + .visit_field::("reference_time", Self::VT_REFERENCE_TIME, false)? + .visit_field::("vetos", Self::VT_VETOS, false)? + .visit_field::("period_number", Self::VT_PERIOD_NUMBER, false)? + .visit_field::("proton_charge", Self::VT_PROTON_CHARGE, false)? + .finish(); + Ok(()) + } +} +pub struct Pu00MessageArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message_id: i64, + pub reference_time: i64, + pub vetos: Option, + pub period_number: Option, + pub proton_charge: Option, +} +impl<'a> Default for Pu00MessageArgs<'a> { + #[inline] + fn default() -> Self { + Pu00MessageArgs { + source_name: None, // required field + message_id: 0, + reference_time: 0, + vetos: None, + period_number: None, + proton_charge: None, + } + } +} + +pub struct Pu00MessageBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Pu00MessageBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Pu00Message::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_message_id(&mut self, message_id: i64) { + self.fbb_.push_slot::(Pu00Message::VT_MESSAGE_ID, message_id, 0); + } + #[inline] + pub fn add_reference_time(&mut self, reference_time: i64) { + self.fbb_.push_slot::(Pu00Message::VT_REFERENCE_TIME, reference_time, 0); + } + #[inline] + pub fn add_vetos(&mut self, vetos: u32) { + self.fbb_.push_slot_always::(Pu00Message::VT_VETOS, vetos); + } + #[inline] + pub fn add_period_number(&mut self, period_number: u32) { + self.fbb_.push_slot_always::(Pu00Message::VT_PERIOD_NUMBER, period_number); + } + #[inline] + pub fn add_proton_charge(&mut self, proton_charge: f32) { + self.fbb_.push_slot_always::(Pu00Message::VT_PROTON_CHARGE, proton_charge); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Pu00MessageBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Pu00MessageBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Pu00Message::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Pu00Message<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Pu00Message"); + ds.field("source_name", &self.source_name()); + ds.field("message_id", &self.message_id()); + ds.field("reference_time", &self.reference_time()); + ds.field("vetos", &self.vetos()); + ds.field("period_number", &self.period_number()); + ds.field("proton_charge", &self.proton_charge()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Pu00Message` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn root_as_pu_00_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Pu00Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_pu_00_message_unchecked`. +pub fn size_prefixed_root_as_pu_00_message(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Pu00Message` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn root_as_pu_00_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Pu00Message` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_pu_00_message_unchecked`. +pub fn size_prefixed_root_as_pu_00_message_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Pu00Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Pu00Message`. +pub unsafe fn root_as_pu_00_message_unchecked(buf: &[u8]) -> Pu00Message<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Pu00Message and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Pu00Message`. +pub unsafe fn size_prefixed_root_as_pu_00_message_unchecked(buf: &[u8]) -> Pu00Message<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const PU_00_MESSAGE_IDENTIFIER: &str = "pu00"; + +#[inline] +pub fn pu_00_message_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, PU_00_MESSAGE_IDENTIFIER, false) +} + +#[inline] +pub fn pu_00_message_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, PU_00_MESSAGE_IDENTIFIER, true) +} + +#[inline] +pub fn finish_pu_00_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(PU_00_MESSAGE_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_pu_00_message_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(PU_00_MESSAGE_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/se00_data.rs b/rust/src/flatbuffers_generated/se00_data.rs new file mode 100644 index 0000000..808b7f8 --- /dev/null +++ b/rust/src/flatbuffers_generated/se00_data.rs @@ -0,0 +1,1734 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_LOCATION: i8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_LOCATION: i8 = 3; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_LOCATION: [Location; 4] = [ + Location::Unknown, + Location::Start, + Location::Middle, + Location::End, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct Location(pub i8); +#[allow(non_upper_case_globals)] +impl Location { + pub const Unknown: Self = Self(0); + pub const Start: Self = Self(1); + pub const Middle: Self = Self(2); + pub const End: Self = Self(3); + + pub const ENUM_MIN: i8 = 0; + pub const ENUM_MAX: i8 = 3; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::Unknown, + Self::Start, + Self::Middle, + Self::End, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::Unknown => Some("Unknown"), + Self::Start => Some("Start"), + Self::Middle => Some("Middle"), + Self::End => Some("End"), + _ => None, + } + } +} +impl ::core::fmt::Debug for Location { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for Location { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for Location { + type Output = Location; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for Location { + type Scalar = i8; + #[inline] + fn to_little_endian(self) -> i8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: i8) -> Self { + let b = i8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for Location { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + i8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for Location {} +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MIN_VALUE_UNION: u8 = 0; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +pub const ENUM_MAX_VALUE_UNION: u8 = 10; +#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")] +#[allow(non_camel_case_types)] +pub const ENUM_VALUES_VALUE_UNION: [ValueUnion; 11] = [ + ValueUnion::NONE, + ValueUnion::Int8Array, + ValueUnion::UInt8Array, + ValueUnion::Int16Array, + ValueUnion::UInt16Array, + ValueUnion::Int32Array, + ValueUnion::UInt32Array, + ValueUnion::Int64Array, + ValueUnion::UInt64Array, + ValueUnion::DoubleArray, + ValueUnion::FloatArray, +]; + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[repr(transparent)] +pub struct ValueUnion(pub u8); +#[allow(non_upper_case_globals)] +impl ValueUnion { + pub const NONE: Self = Self(0); + pub const Int8Array: Self = Self(1); + pub const UInt8Array: Self = Self(2); + pub const Int16Array: Self = Self(3); + pub const UInt16Array: Self = Self(4); + pub const Int32Array: Self = Self(5); + pub const UInt32Array: Self = Self(6); + pub const Int64Array: Self = Self(7); + pub const UInt64Array: Self = Self(8); + pub const DoubleArray: Self = Self(9); + pub const FloatArray: Self = Self(10); + + pub const ENUM_MIN: u8 = 0; + pub const ENUM_MAX: u8 = 10; + pub const ENUM_VALUES: &'static [Self] = &[ + Self::NONE, + Self::Int8Array, + Self::UInt8Array, + Self::Int16Array, + Self::UInt16Array, + Self::Int32Array, + Self::UInt32Array, + Self::Int64Array, + Self::UInt64Array, + Self::DoubleArray, + Self::FloatArray, + ]; + /// Returns the variant's name or "" if unknown. + pub fn variant_name(self) -> Option<&'static str> { + match self { + Self::NONE => Some("NONE"), + Self::Int8Array => Some("Int8Array"), + Self::UInt8Array => Some("UInt8Array"), + Self::Int16Array => Some("Int16Array"), + Self::UInt16Array => Some("UInt16Array"), + Self::Int32Array => Some("Int32Array"), + Self::UInt32Array => Some("UInt32Array"), + Self::Int64Array => Some("Int64Array"), + Self::UInt64Array => Some("UInt64Array"), + Self::DoubleArray => Some("DoubleArray"), + Self::FloatArray => Some("FloatArray"), + _ => None, + } + } +} +impl ::core::fmt::Debug for ValueUnion { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + if let Some(name) = self.variant_name() { + f.write_str(name) + } else { + f.write_fmt(format_args!("", self.0)) + } + } +} +impl<'a> ::flatbuffers::Follow<'a> for ValueUnion { + type Inner = Self; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + let b = unsafe { ::flatbuffers::read_scalar_at::(buf, loc) }; + Self(b) + } +} + +impl ::flatbuffers::Push for ValueUnion { + type Output = ValueUnion; + #[inline] + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe { ::flatbuffers::emplace_scalar::(dst, self.0) }; + } +} + +impl ::flatbuffers::EndianScalar for ValueUnion { + type Scalar = u8; + #[inline] + fn to_little_endian(self) -> u8 { + self.0.to_le() + } + #[inline] + #[allow(clippy::wrong_self_convention)] + fn from_little_endian(v: u8) -> Self { + let b = u8::from_le(v); + Self(b) + } +} + +impl<'a> ::flatbuffers::Verifiable for ValueUnion { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + u8::run_verifier(v, pos) + } +} + +impl ::flatbuffers::SimpleToVerifyInSlice for ValueUnion {} +pub struct ValueUnionUnionTableOffset {} + +pub enum Int8ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int8Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int8Array<'a> { + type Inner = Int8Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int8Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int8Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int8ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int8ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i8>>>(Int8Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int8Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i8>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int8ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i8>>>, +} +impl<'a> Default for Int8ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int8ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int8ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int8ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int8Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int8ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int8ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int8Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int8Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int8Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt8ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt8Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt8Array<'a> { + type Inner = UInt8Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt8Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt8Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt8ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt8ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u8> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u8>>>(UInt8Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt8Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u8>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt8ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u8>>>, +} +impl<'a> Default for UInt8ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt8ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt8ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt8ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u8>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt8Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt8ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt8ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt8Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt8Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt8Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int16ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int16Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int16Array<'a> { + type Inner = Int16Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int16Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int16Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int16ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int16ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i16> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i16>>>(Int16Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int16Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i16>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int16ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i16>>>, +} +impl<'a> Default for Int16ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int16ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int16ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int16ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int16Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int16ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int16ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int16Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int16Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int16Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt16ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt16Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt16Array<'a> { + type Inner = UInt16Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt16Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt16Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt16ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt16ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u16> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u16>>>(UInt16Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt16Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u16>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt16ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u16>>>, +} +impl<'a> Default for UInt16ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt16ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt16ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt16ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u16>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt16Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt16ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt16ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt16Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt16Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt16Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int32ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int32Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int32Array<'a> { + type Inner = Int32Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int32Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int32Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int32ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int32ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i32>>>(Int32Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int32Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int32ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i32>>>, +} +impl<'a> Default for Int32ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int32ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int32ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int32ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int32Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int32ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int32ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int32Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int32Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int32Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt32ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt32Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt32Array<'a> { + type Inner = UInt32Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt32Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt32Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt32ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt32ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u32>>>(UInt32Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt32Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt32ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u32>>>, +} +impl<'a> Default for UInt32ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt32ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt32ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt32ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt32Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt32ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt32ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt32Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt32Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt32Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum Int64ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Int64Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Int64Array<'a> { + type Inner = Int64Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Int64Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Int64Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args Int64ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = Int64ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, i64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(Int64Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for Int64Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct Int64ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, +} +impl<'a> Default for Int64ArrayArgs<'a> { + #[inline] + fn default() -> Self { + Int64ArrayArgs { + value: None, // required field + } + } +} + +pub struct Int64ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> Int64ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Int64Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> Int64ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + Int64ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Int64Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Int64Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Int64Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum UInt64ArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct UInt64Array<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for UInt64Array<'a> { + type Inner = UInt64Array<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> UInt64Array<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + UInt64Array { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UInt64ArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UInt64ArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, u64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, u64>>>(UInt64Array::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for UInt64Array<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, u64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct UInt64ArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, u64>>>, +} +impl<'a> Default for UInt64ArrayArgs<'a> { + #[inline] + fn default() -> Self { + UInt64ArrayArgs { + value: None, // required field + } + } +} + +pub struct UInt64ArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UInt64ArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , u64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(UInt64Array::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UInt64ArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UInt64ArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, UInt64Array::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for UInt64Array<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("UInt64Array"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum DoubleArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct DoubleArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for DoubleArray<'a> { + type Inner = DoubleArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> DoubleArray<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + DoubleArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args DoubleArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = DoubleArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, f64> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f64>>>(DoubleArray::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for DoubleArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f64>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct DoubleArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f64>>>, +} +impl<'a> Default for DoubleArrayArgs<'a> { + #[inline] + fn default() -> Self { + DoubleArrayArgs { + value: None, // required field + } + } +} + +pub struct DoubleArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> DoubleArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(DoubleArray::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> DoubleArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + DoubleArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, DoubleArray::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for DoubleArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("DoubleArray"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum FloatArrayOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct FloatArray<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for FloatArray<'a> { + type Inner = FloatArray<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> FloatArray<'a> { + pub const VT_VALUE: ::flatbuffers::VOffsetT = 4; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + FloatArray { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FloatArrayArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FloatArrayBuilder::new(_fbb); + if let Some(x) = args.value { builder.add_value(x); } + builder.finish() + } + + + #[inline] + pub fn value(&self) -> ::flatbuffers::Vector<'a, f32> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, f32>>>(FloatArray::VT_VALUE, None).unwrap()} + } +} + +impl ::flatbuffers::Verifiable for FloatArray<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, f32>>>("value", Self::VT_VALUE, true)? + .finish(); + Ok(()) + } +} +pub struct FloatArrayArgs<'a> { + pub value: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, f32>>>, +} +impl<'a> Default for FloatArrayArgs<'a> { + #[inline] + fn default() -> Self { + FloatArrayArgs { + value: None, // required field + } + } +} + +pub struct FloatArrayBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FloatArrayBuilder<'a, 'b, A> { + #[inline] + pub fn add_value(&mut self, value: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , f32>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FloatArray::VT_VALUE, value); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FloatArrayBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FloatArrayBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, FloatArray::VT_VALUE,"value"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for FloatArray<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("FloatArray"); + ds.field("value", &self.value()); + ds.finish() + } +} +pub enum se00_SampleEnvironmentDataOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct se00_SampleEnvironmentData<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for se00_SampleEnvironmentData<'a> { + type Inner = se00_SampleEnvironmentData<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> se00_SampleEnvironmentData<'a> { + pub const VT_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_CHANNEL: ::flatbuffers::VOffsetT = 6; + pub const VT_PACKET_TIMESTAMP: ::flatbuffers::VOffsetT = 8; + pub const VT_TIME_DELTA: ::flatbuffers::VOffsetT = 10; + pub const VT_TIMESTAMP_LOCATION: ::flatbuffers::VOffsetT = 12; + pub const VT_VALUES_TYPE: ::flatbuffers::VOffsetT = 14; + pub const VT_VALUES: ::flatbuffers::VOffsetT = 16; + pub const VT_TIMESTAMPS: ::flatbuffers::VOffsetT = 18; + pub const VT_MESSAGE_COUNTER: ::flatbuffers::VOffsetT = 20; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + se00_SampleEnvironmentData { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args se00_SampleEnvironmentDataArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = se00_SampleEnvironmentDataBuilder::new(_fbb); + builder.add_message_counter(args.message_counter); + builder.add_time_delta(args.time_delta); + builder.add_packet_timestamp(args.packet_timestamp); + if let Some(x) = args.timestamps { builder.add_timestamps(x); } + if let Some(x) = args.values { builder.add_values(x); } + builder.add_channel(args.channel); + if let Some(x) = args.name { builder.add_name(x); } + builder.add_values_type(args.values_type); + builder.add_timestamp_location(args.timestamp_location); + builder.finish() + } + + + #[inline] + pub fn name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(se00_SampleEnvironmentData::VT_NAME, None).unwrap()} + } + #[inline] + pub fn channel(&self) -> i32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_CHANNEL, Some(0)).unwrap()} + } + #[inline] + pub fn packet_timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_PACKET_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn time_delta(&self) -> f64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_TIME_DELTA, Some(0.0)).unwrap()} + } + #[inline] + pub fn timestamp_location(&self) -> Location { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_TIMESTAMP_LOCATION, Some(Location::Unknown)).unwrap()} + } + #[inline] + pub fn values_type(&self) -> ValueUnion { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_VALUES_TYPE, Some(ValueUnion::NONE)).unwrap()} + } + #[inline] + pub fn values(&self) -> ::flatbuffers::Table<'a> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Table<'a>>>(se00_SampleEnvironmentData::VT_VALUES, None).unwrap()} + } + #[inline] + pub fn timestamps(&self) -> Option<::flatbuffers::Vector<'a, i64>> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'a, i64>>>(se00_SampleEnvironmentData::VT_TIMESTAMPS, None)} + } + #[inline] + pub fn message_counter(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(se00_SampleEnvironmentData::VT_MESSAGE_COUNTER, Some(0)).unwrap()} + } + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_8_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int8Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int8Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_8_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt8Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt8Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_16_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int16Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int16Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_16_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt16Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt16Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_32_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int32Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int32Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_32_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt32Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt32Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_int_64_array(&self) -> Option> { + if self.values_type() == ValueUnion::Int64Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { Int64Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_uint_64_array(&self) -> Option> { + if self.values_type() == ValueUnion::UInt64Array { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { UInt64Array::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_double_array(&self) -> Option> { + if self.values_type() == ValueUnion::DoubleArray { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { DoubleArray::init_from_table(u) }) + } else { + None + } + } + + #[inline] + #[allow(non_snake_case)] + pub fn values_as_float_array(&self) -> Option> { + if self.values_type() == ValueUnion::FloatArray { + let u = self.values(); + // Safety: + // Created from a valid Table for this object + // Which contains a valid union in this slot + Some(unsafe { FloatArray::init_from_table(u) }) + } else { + None + } + } + +} + +impl ::flatbuffers::Verifiable for se00_SampleEnvironmentData<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("name", Self::VT_NAME, true)? + .visit_field::("channel", Self::VT_CHANNEL, false)? + .visit_field::("packet_timestamp", Self::VT_PACKET_TIMESTAMP, false)? + .visit_field::("time_delta", Self::VT_TIME_DELTA, false)? + .visit_field::("timestamp_location", Self::VT_TIMESTAMP_LOCATION, false)? + .visit_union::("values_type", Self::VT_VALUES_TYPE, "values", Self::VT_VALUES, true, |key, v, pos| { + match key { + ValueUnion::Int8Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int8Array", pos), + ValueUnion::UInt8Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt8Array", pos), + ValueUnion::Int16Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int16Array", pos), + ValueUnion::UInt16Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt16Array", pos), + ValueUnion::Int32Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int32Array", pos), + ValueUnion::UInt32Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt32Array", pos), + ValueUnion::Int64Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::Int64Array", pos), + ValueUnion::UInt64Array => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::UInt64Array", pos), + ValueUnion::DoubleArray => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::DoubleArray", pos), + ValueUnion::FloatArray => v.verify_union_variant::<::flatbuffers::ForwardsUOffset>("ValueUnion::FloatArray", pos), + _ => Ok(()), + } + })? + .visit_field::<::flatbuffers::ForwardsUOffset<::flatbuffers::Vector<'_, i64>>>("timestamps", Self::VT_TIMESTAMPS, false)? + .visit_field::("message_counter", Self::VT_MESSAGE_COUNTER, false)? + .finish(); + Ok(()) + } +} +pub struct se00_SampleEnvironmentDataArgs<'a> { + pub name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub channel: i32, + pub packet_timestamp: i64, + pub time_delta: f64, + pub timestamp_location: Location, + pub values_type: ValueUnion, + pub values: Option<::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>>, + pub timestamps: Option<::flatbuffers::WIPOffset<::flatbuffers::Vector<'a, i64>>>, + pub message_counter: i64, +} +impl<'a> Default for se00_SampleEnvironmentDataArgs<'a> { + #[inline] + fn default() -> Self { + se00_SampleEnvironmentDataArgs { + name: None, // required field + channel: 0, + packet_timestamp: 0, + time_delta: 0.0, + timestamp_location: Location::Unknown, + values_type: ValueUnion::NONE, + values: None, // required field + timestamps: None, + message_counter: 0, + } + } +} + +pub struct se00_SampleEnvironmentDataBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> se00_SampleEnvironmentDataBuilder<'a, 'b, A> { + #[inline] + pub fn add_name(&mut self, name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_NAME, name); + } + #[inline] + pub fn add_channel(&mut self, channel: i32) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_CHANNEL, channel, 0); + } + #[inline] + pub fn add_packet_timestamp(&mut self, packet_timestamp: i64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_PACKET_TIMESTAMP, packet_timestamp, 0); + } + #[inline] + pub fn add_time_delta(&mut self, time_delta: f64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_TIME_DELTA, time_delta, 0.0); + } + #[inline] + pub fn add_timestamp_location(&mut self, timestamp_location: Location) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_TIMESTAMP_LOCATION, timestamp_location, Location::Unknown); + } + #[inline] + pub fn add_values_type(&mut self, values_type: ValueUnion) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_VALUES_TYPE, values_type, ValueUnion::NONE); + } + #[inline] + pub fn add_values(&mut self, values: ::flatbuffers::WIPOffset<::flatbuffers::UnionWIPOffset>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_VALUES, values); + } + #[inline] + pub fn add_timestamps(&mut self, timestamps: ::flatbuffers::WIPOffset<::flatbuffers::Vector<'b , i64>>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(se00_SampleEnvironmentData::VT_TIMESTAMPS, timestamps); + } + #[inline] + pub fn add_message_counter(&mut self, message_counter: i64) { + self.fbb_.push_slot::(se00_SampleEnvironmentData::VT_MESSAGE_COUNTER, message_counter, 0); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> se00_SampleEnvironmentDataBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + se00_SampleEnvironmentDataBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, se00_SampleEnvironmentData::VT_NAME,"name"); + self.fbb_.required(o, se00_SampleEnvironmentData::VT_VALUES,"values"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for se00_SampleEnvironmentData<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("se00_SampleEnvironmentData"); + ds.field("name", &self.name()); + ds.field("channel", &self.channel()); + ds.field("packet_timestamp", &self.packet_timestamp()); + ds.field("time_delta", &self.time_delta()); + ds.field("timestamp_location", &self.timestamp_location()); + ds.field("values_type", &self.values_type()); + match self.values_type() { + ValueUnion::Int8Array => { + if let Some(x) = self.values_as_int_8_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt8Array => { + if let Some(x) = self.values_as_uint_8_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int16Array => { + if let Some(x) = self.values_as_int_16_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt16Array => { + if let Some(x) = self.values_as_uint_16_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int32Array => { + if let Some(x) = self.values_as_int_32_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt32Array => { + if let Some(x) = self.values_as_uint_32_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::Int64Array => { + if let Some(x) = self.values_as_int_64_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::UInt64Array => { + if let Some(x) = self.values_as_uint_64_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::DoubleArray => { + if let Some(x) = self.values_as_double_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + ValueUnion::FloatArray => { + if let Some(x) = self.values_as_float_array() { + ds.field("values", &x) + } else { + ds.field("values", &"InvalidFlatbuffer: Union discriminant does not match value.") + } + }, + _ => { + let x: Option<()> = None; + ds.field("values", &x) + }, + }; + ds.field("timestamps", &self.timestamps()); + ds.field("message_counter", &self.message_counter()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `se00_SampleEnvironmentData` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn root_as_se_00_sample_environment_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `se00_SampleEnvironmentData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_se_00_sample_environment_data_unchecked`. +pub fn size_prefixed_root_as_se_00_sample_environment_data(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `se00_SampleEnvironmentData` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn root_as_se_00_sample_environment_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `se00_SampleEnvironmentData` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_se_00_sample_environment_data_unchecked`. +pub fn size_prefixed_root_as_se_00_sample_environment_data_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a se00_SampleEnvironmentData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `se00_SampleEnvironmentData`. +pub unsafe fn root_as_se_00_sample_environment_data_unchecked(buf: &[u8]) -> se00_SampleEnvironmentData<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed se00_SampleEnvironmentData and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `se00_SampleEnvironmentData`. +pub unsafe fn size_prefixed_root_as_se_00_sample_environment_data_unchecked(buf: &[u8]) -> se00_SampleEnvironmentData<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER: &str = "se00"; + +#[inline] +pub fn se_00_sample_environment_data_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER, false) +} + +#[inline] +pub fn se_00_sample_environment_data_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER, true) +} + +#[inline] +pub fn finish_se_00_sample_environment_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_se_00_sample_environment_data_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(SE_00_SAMPLE_ENVIRONMENT_DATA_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/un00_units.rs b/rust/src/flatbuffers_generated/un00_units.rs new file mode 100644 index 0000000..c5adb82 --- /dev/null +++ b/rust/src/flatbuffers_generated/un00_units.rs @@ -0,0 +1,219 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum UnitsOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Units<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Units<'a> { + type Inner = Units<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Units<'a> { + pub const VT_SOURCE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_TIMESTAMP: ::flatbuffers::VOffsetT = 6; + pub const VT_UNITS: ::flatbuffers::VOffsetT = 8; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Units { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args UnitsArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = UnitsBuilder::new(_fbb); + builder.add_timestamp(args.timestamp); + if let Some(x) = args.units { builder.add_units(x); } + if let Some(x) = args.source_name { builder.add_source_name(x); } + builder.finish() + } + + + #[inline] + pub fn source_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Units::VT_SOURCE_NAME, None).unwrap()} + } + #[inline] + pub fn timestamp(&self) -> i64 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Units::VT_TIMESTAMP, Some(0)).unwrap()} + } + #[inline] + pub fn units(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Units::VT_UNITS, None)} + } +} + +impl ::flatbuffers::Verifiable for Units<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("source_name", Self::VT_SOURCE_NAME, true)? + .visit_field::("timestamp", Self::VT_TIMESTAMP, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("units", Self::VT_UNITS, false)? + .finish(); + Ok(()) + } +} +pub struct UnitsArgs<'a> { + pub source_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub timestamp: i64, + pub units: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for UnitsArgs<'a> { + #[inline] + fn default() -> Self { + UnitsArgs { + source_name: None, // required field + timestamp: 0, + units: None, + } + } +} + +pub struct UnitsBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> UnitsBuilder<'a, 'b, A> { + #[inline] + pub fn add_source_name(&mut self, source_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Units::VT_SOURCE_NAME, source_name); + } + #[inline] + pub fn add_timestamp(&mut self, timestamp: i64) { + self.fbb_.push_slot::(Units::VT_TIMESTAMP, timestamp, 0); + } + #[inline] + pub fn add_units(&mut self, units: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Units::VT_UNITS, units); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> UnitsBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + UnitsBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, Units::VT_SOURCE_NAME,"source_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Units<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Units"); + ds.field("source_name", &self.source_name()); + ds.field("timestamp", &self.timestamp()); + ds.field("units", &self.units()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Units` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn root_as_units(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Units` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_units_unchecked`. +pub fn size_prefixed_root_as_units(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Units` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn root_as_units_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Units` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_units_unchecked`. +pub fn size_prefixed_root_as_units_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Units and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Units`. +pub unsafe fn root_as_units_unchecked(buf: &[u8]) -> Units<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Units and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Units`. +pub unsafe fn size_prefixed_root_as_units_unchecked(buf: &[u8]) -> Units<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const UNITS_IDENTIFIER: &str = "un00"; + +#[inline] +pub fn units_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, UNITS_IDENTIFIER, false) +} + +#[inline] +pub fn units_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, UNITS_IDENTIFIER, true) +} + +#[inline] +pub fn finish_units_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(UNITS_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_units_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(UNITS_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/wrdn_finished_writing.rs b/rust/src/flatbuffers_generated/wrdn_finished_writing.rs new file mode 100644 index 0000000..5a7dd17 --- /dev/null +++ b/rust/src/flatbuffers_generated/wrdn_finished_writing.rs @@ -0,0 +1,272 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum FinishedWritingOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct FinishedWriting<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for FinishedWriting<'a> { + type Inner = FinishedWriting<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> FinishedWriting<'a> { + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 4; + pub const VT_JOB_ID: ::flatbuffers::VOffsetT = 6; + pub const VT_ERROR_ENCOUNTERED: ::flatbuffers::VOffsetT = 8; + pub const VT_FILE_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_METADATA: ::flatbuffers::VOffsetT = 12; + pub const VT_MESSAGE: ::flatbuffers::VOffsetT = 14; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + FinishedWriting { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args FinishedWritingArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = FinishedWritingBuilder::new(_fbb); + if let Some(x) = args.message { builder.add_message(x); } + if let Some(x) = args.metadata { builder.add_metadata(x); } + if let Some(x) = args.file_name { builder.add_file_name(x); } + if let Some(x) = args.job_id { builder.add_job_id(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + builder.add_error_encountered(args.error_encountered); + builder.finish() + } + + + #[inline] + pub fn service_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_SERVICE_ID, None).unwrap()} + } + #[inline] + pub fn job_id(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_JOB_ID, None).unwrap()} + } + #[inline] + pub fn error_encountered(&self) -> bool { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(FinishedWriting::VT_ERROR_ENCOUNTERED, Some(false)).unwrap()} + } + #[inline] + pub fn file_name(&self) -> &'a str { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_FILE_NAME, None).unwrap()} + } + #[inline] + pub fn metadata(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_METADATA, None)} + } + #[inline] + pub fn message(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(FinishedWriting::VT_MESSAGE, None)} + } +} + +impl ::flatbuffers::Verifiable for FinishedWriting<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("job_id", Self::VT_JOB_ID, true)? + .visit_field::("error_encountered", Self::VT_ERROR_ENCOUNTERED, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("file_name", Self::VT_FILE_NAME, true)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("metadata", Self::VT_METADATA, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("message", Self::VT_MESSAGE, false)? + .finish(); + Ok(()) + } +} +pub struct FinishedWritingArgs<'a> { + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub job_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub error_encountered: bool, + pub file_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub metadata: Option<::flatbuffers::WIPOffset<&'a str>>, + pub message: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for FinishedWritingArgs<'a> { + #[inline] + fn default() -> Self { + FinishedWritingArgs { + service_id: None, // required field + job_id: None, // required field + error_encountered: false, + file_name: None, // required field + metadata: None, + message: None, + } + } +} + +pub struct FinishedWritingBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> FinishedWritingBuilder<'a, 'b, A> { + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_job_id(&mut self, job_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_JOB_ID, job_id); + } + #[inline] + pub fn add_error_encountered(&mut self, error_encountered: bool) { + self.fbb_.push_slot::(FinishedWriting::VT_ERROR_ENCOUNTERED, error_encountered, false); + } + #[inline] + pub fn add_file_name(&mut self, file_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_FILE_NAME, file_name); + } + #[inline] + pub fn add_metadata(&mut self, metadata: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_METADATA, metadata); + } + #[inline] + pub fn add_message(&mut self, message: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(FinishedWriting::VT_MESSAGE, message); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> FinishedWritingBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + FinishedWritingBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + self.fbb_.required(o, FinishedWriting::VT_SERVICE_ID,"service_id"); + self.fbb_.required(o, FinishedWriting::VT_JOB_ID,"job_id"); + self.fbb_.required(o, FinishedWriting::VT_FILE_NAME,"file_name"); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for FinishedWriting<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("FinishedWriting"); + ds.field("service_id", &self.service_id()); + ds.field("job_id", &self.job_id()); + ds.field("error_encountered", &self.error_encountered()); + ds.field("file_name", &self.file_name()); + ds.field("metadata", &self.metadata()); + ds.field("message", &self.message()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `FinishedWriting` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn root_as_finished_writing(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `FinishedWriting` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_finished_writing_unchecked`. +pub fn size_prefixed_root_as_finished_writing(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `FinishedWriting` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn root_as_finished_writing_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `FinishedWriting` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_finished_writing_unchecked`. +pub fn size_prefixed_root_as_finished_writing_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a FinishedWriting and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `FinishedWriting`. +pub unsafe fn root_as_finished_writing_unchecked(buf: &[u8]) -> FinishedWriting<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed FinishedWriting and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `FinishedWriting`. +pub unsafe fn size_prefixed_root_as_finished_writing_unchecked(buf: &[u8]) -> FinishedWriting<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const FINISHED_WRITING_IDENTIFIER: &str = "wrdn"; + +#[inline] +pub fn finished_writing_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FINISHED_WRITING_IDENTIFIER, false) +} + +#[inline] +pub fn finished_writing_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, FINISHED_WRITING_IDENTIFIER, true) +} + +#[inline] +pub fn finish_finished_writing_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(FINISHED_WRITING_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_finished_writing_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(FINISHED_WRITING_IDENTIFIER)); +} diff --git a/rust/src/flatbuffers_generated/x5f2_status.rs b/rust/src/flatbuffers_generated/x5f2_status.rs new file mode 100644 index 0000000..03ec509 --- /dev/null +++ b/rust/src/flatbuffers_generated/x5f2_status.rs @@ -0,0 +1,286 @@ +// automatically generated by the FlatBuffers compiler, do not modify +// @generated + +extern crate alloc; + +pub enum StatusOffset {} +#[derive(Copy, Clone, PartialEq)] + +pub struct Status<'a> { + pub _tab: ::flatbuffers::Table<'a>, +} + +impl<'a> ::flatbuffers::Follow<'a> for Status<'a> { + type Inner = Status<'a>; + #[inline] + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + Self { _tab: unsafe { ::flatbuffers::Table::new(buf, loc) } } + } +} + +impl<'a> Status<'a> { + pub const VT_SOFTWARE_NAME: ::flatbuffers::VOffsetT = 4; + pub const VT_SOFTWARE_VERSION: ::flatbuffers::VOffsetT = 6; + pub const VT_SERVICE_ID: ::flatbuffers::VOffsetT = 8; + pub const VT_HOST_NAME: ::flatbuffers::VOffsetT = 10; + pub const VT_PROCESS_ID: ::flatbuffers::VOffsetT = 12; + pub const VT_UPDATE_INTERVAL: ::flatbuffers::VOffsetT = 14; + pub const VT_STATUS_JSON: ::flatbuffers::VOffsetT = 16; + + #[inline] + pub unsafe fn init_from_table(table: ::flatbuffers::Table<'a>) -> Self { + Status { _tab: table } + } + #[allow(unused_mut)] + pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: ::flatbuffers::Allocator + 'bldr>( + _fbb: &'mut_bldr mut ::flatbuffers::FlatBufferBuilder<'bldr, A>, + args: &'args StatusArgs<'args> + ) -> ::flatbuffers::WIPOffset> { + let mut builder = StatusBuilder::new(_fbb); + if let Some(x) = args.status_json { builder.add_status_json(x); } + builder.add_update_interval(args.update_interval); + builder.add_process_id(args.process_id); + if let Some(x) = args.host_name { builder.add_host_name(x); } + if let Some(x) = args.service_id { builder.add_service_id(x); } + if let Some(x) = args.software_version { builder.add_software_version(x); } + if let Some(x) = args.software_name { builder.add_software_name(x); } + builder.finish() + } + + + #[inline] + pub fn software_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SOFTWARE_NAME, None)} + } + #[inline] + pub fn software_version(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SOFTWARE_VERSION, None)} + } + #[inline] + pub fn service_id(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_SERVICE_ID, None)} + } + #[inline] + pub fn host_name(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_HOST_NAME, None)} + } + #[inline] + pub fn process_id(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Status::VT_PROCESS_ID, Some(0)).unwrap()} + } + #[inline] + pub fn update_interval(&self) -> u32 { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::(Status::VT_UPDATE_INTERVAL, Some(0)).unwrap()} + } + #[inline] + pub fn status_json(&self) -> Option<&'a str> { + // Safety: + // Created from valid Table for this object + // which contains a valid value in this slot + unsafe { self._tab.get::<::flatbuffers::ForwardsUOffset<&str>>(Status::VT_STATUS_JSON, None)} + } +} + +impl ::flatbuffers::Verifiable for Status<'_> { + #[inline] + fn run_verifier( + v: &mut ::flatbuffers::Verifier, pos: usize + ) -> Result<(), ::flatbuffers::InvalidFlatbuffer> { + v.visit_table(pos)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("software_name", Self::VT_SOFTWARE_NAME, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("software_version", Self::VT_SOFTWARE_VERSION, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("service_id", Self::VT_SERVICE_ID, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("host_name", Self::VT_HOST_NAME, false)? + .visit_field::("process_id", Self::VT_PROCESS_ID, false)? + .visit_field::("update_interval", Self::VT_UPDATE_INTERVAL, false)? + .visit_field::<::flatbuffers::ForwardsUOffset<&str>>("status_json", Self::VT_STATUS_JSON, false)? + .finish(); + Ok(()) + } +} +pub struct StatusArgs<'a> { + pub software_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub software_version: Option<::flatbuffers::WIPOffset<&'a str>>, + pub service_id: Option<::flatbuffers::WIPOffset<&'a str>>, + pub host_name: Option<::flatbuffers::WIPOffset<&'a str>>, + pub process_id: u32, + pub update_interval: u32, + pub status_json: Option<::flatbuffers::WIPOffset<&'a str>>, +} +impl<'a> Default for StatusArgs<'a> { + #[inline] + fn default() -> Self { + StatusArgs { + software_name: None, + software_version: None, + service_id: None, + host_name: None, + process_id: 0, + update_interval: 0, + status_json: None, + } + } +} + +pub struct StatusBuilder<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> { + fbb_: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + start_: ::flatbuffers::WIPOffset<::flatbuffers::TableUnfinishedWIPOffset>, +} +impl<'a: 'b, 'b, A: ::flatbuffers::Allocator + 'a> StatusBuilder<'a, 'b, A> { + #[inline] + pub fn add_software_name(&mut self, software_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SOFTWARE_NAME, software_name); + } + #[inline] + pub fn add_software_version(&mut self, software_version: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SOFTWARE_VERSION, software_version); + } + #[inline] + pub fn add_service_id(&mut self, service_id: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_SERVICE_ID, service_id); + } + #[inline] + pub fn add_host_name(&mut self, host_name: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_HOST_NAME, host_name); + } + #[inline] + pub fn add_process_id(&mut self, process_id: u32) { + self.fbb_.push_slot::(Status::VT_PROCESS_ID, process_id, 0); + } + #[inline] + pub fn add_update_interval(&mut self, update_interval: u32) { + self.fbb_.push_slot::(Status::VT_UPDATE_INTERVAL, update_interval, 0); + } + #[inline] + pub fn add_status_json(&mut self, status_json: ::flatbuffers::WIPOffset<&'b str>) { + self.fbb_.push_slot_always::<::flatbuffers::WIPOffset<_>>(Status::VT_STATUS_JSON, status_json); + } + #[inline] + pub fn new(_fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>) -> StatusBuilder<'a, 'b, A> { + let start = _fbb.start_table(); + StatusBuilder { + fbb_: _fbb, + start_: start, + } + } + #[inline] + pub fn finish(self) -> ::flatbuffers::WIPOffset> { + let o = self.fbb_.end_table(self.start_); + ::flatbuffers::WIPOffset::new(o.value()) + } +} + +impl ::core::fmt::Debug for Status<'_> { + fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + let mut ds = f.debug_struct("Status"); + ds.field("software_name", &self.software_name()); + ds.field("software_version", &self.software_version()); + ds.field("service_id", &self.service_id()); + ds.field("host_name", &self.host_name()); + ds.field("process_id", &self.process_id()); + ds.field("update_interval", &self.update_interval()); + ds.field("status_json", &self.status_json()); + ds.finish() + } +} +#[inline] +/// Verifies that a buffer of bytes contains a `Status` +/// and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn root_as_status(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root::(buf) +} +#[inline] +/// Verifies that a buffer of bytes contains a size prefixed +/// `Status` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `size_prefixed_root_as_status_unchecked`. +pub fn size_prefixed_root_as_status(buf: &[u8]) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root::(buf) +} +#[inline] +/// Verifies, with the given options, that a buffer of bytes +/// contains a `Status` and returns it. +/// Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn root_as_status_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::root_with_opts::>(opts, buf) +} +#[inline] +/// Verifies, with the given verifier options, that a buffer of +/// bytes contains a size prefixed `Status` and returns +/// it. Note that verification is still experimental and may not +/// catch every error, or be maximally performant. For the +/// previous, unchecked, behavior use +/// `root_as_status_unchecked`. +pub fn size_prefixed_root_as_status_with_opts<'b, 'o>( + opts: &'o ::flatbuffers::VerifierOptions, + buf: &'b [u8], +) -> Result, ::flatbuffers::InvalidFlatbuffer> { + ::flatbuffers::size_prefixed_root_with_opts::>(opts, buf) +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a Status and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid `Status`. +pub unsafe fn root_as_status_unchecked(buf: &[u8]) -> Status<'_> { + unsafe { ::flatbuffers::root_unchecked::(buf) } +} +#[inline] +/// Assumes, without verification, that a buffer of bytes contains a size prefixed Status and returns it. +/// # Safety +/// Callers must trust the given bytes do indeed contain a valid size prefixed `Status`. +pub unsafe fn size_prefixed_root_as_status_unchecked(buf: &[u8]) -> Status<'_> { + unsafe { ::flatbuffers::size_prefixed_root_unchecked::(buf) } +} +pub const STATUS_IDENTIFIER: &str = "x5f2"; + +#[inline] +pub fn status_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, STATUS_IDENTIFIER, false) +} + +#[inline] +pub fn status_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { + ::flatbuffers::buffer_has_identifier(buf, STATUS_IDENTIFIER, true) +} + +#[inline] +pub fn finish_status_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>( + fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, + root: ::flatbuffers::WIPOffset>) { + fbb.finish(root, Some(STATUS_IDENTIFIER)); +} + +#[inline] +pub fn finish_size_prefixed_status_buffer<'a, 'b, A: ::flatbuffers::Allocator + 'a>(fbb: &'b mut ::flatbuffers::FlatBufferBuilder<'a, A>, root: ::flatbuffers::WIPOffset>) { + fbb.finish_size_prefixed(root, Some(STATUS_IDENTIFIER)); +} diff --git a/rust/src/lib.rs b/rust/src/lib.rs new file mode 100644 index 0000000..065212b --- /dev/null +++ b/rust/src/lib.rs @@ -0,0 +1,168 @@ +use crate::flatbuffers_generated::action_response_answ::{ActionResponse, root_as_action_response}; +use crate::flatbuffers_generated::alarm_al00::{Alarm, root_as_alarm}; +use crate::flatbuffers_generated::area_detector_array_ad00::{ad00_ADArray, root_as_ad_00_adarray}; +use crate::flatbuffers_generated::data_se00::{ + root_as_se_00_sample_environment_data, se00_SampleEnvironmentData, +}; +use crate::flatbuffers_generated::dataarray_da00::{da00_DataArray, root_as_da_00_data_array}; +use crate::flatbuffers_generated::det_spec_map_df12::{ + SpectraDetectorMapping, root_as_spectra_detector_mapping, +}; +use crate::flatbuffers_generated::epics_connection_ep01::{ + EpicsPVConnectionInfo, root_as_epics_pvconnection_info, +}; +use crate::flatbuffers_generated::event_histogram_hs01::{EventHistogram, root_as_event_histogram}; +use crate::flatbuffers_generated::events_ev44::{Event44Message, root_as_event_44_message}; +use crate::flatbuffers_generated::finished_writing_wrdn::{ + FinishedWriting, root_as_finished_writing, +}; +use crate::flatbuffers_generated::forwarder_config_fc00::{ + fc00_ConfigUpdate, root_as_fc_00_config_update, +}; +use crate::flatbuffers_generated::json_json::{JsonData, root_as_json_data}; +use crate::flatbuffers_generated::logdata_f144::{f144_LogData, root_as_f_144_log_data}; +use crate::flatbuffers_generated::run_start_pl72::{RunStart, root_as_run_start}; +use crate::flatbuffers_generated::run_stop_6s4t::{RunStop, root_as_run_stop}; +use crate::flatbuffers_generated::status_x5f2::{Status, root_as_status}; +use crate::flatbuffers_generated::units_un00::{Units, root_as_units}; +use flatbuffers::InvalidFlatbuffer; + +#[allow(clippy::all)] +#[rustfmt::skip] +#[allow(dead_code, unused, non_snake_case, non_camel_case_types, non_upper_case_globals)] +pub mod flatbuffers_generated; + +/// Enum containing all possible messages currently supported by +/// `deserialize_message`. +#[derive(Debug, Clone, PartialEq)] +pub enum DeserializedMessage<'a> { + EventDataEv44(Event44Message<'a>), + AreaDetectorAd00(ad00_ADArray<'a>), + RunStartPl72(RunStart<'a>), + RunStop6s4t(RunStop<'a>), + LogDataF144(f144_LogData<'a>), + DetSpecMapDf12(SpectraDetectorMapping<'a>), + SenvSe00(se00_SampleEnvironmentData<'a>), + HistogramHs01(EventHistogram<'a>), + EpicsConnectionEp01(EpicsPVConnectionInfo<'a>), + JsonDataJson(JsonData<'a>), + ActionResponseAnsw(ActionResponse<'a>), + FinishedWritingWrdn(FinishedWriting<'a>), + StatusX5f2(Status<'a>), + ForwarderConfigFc00(fc00_ConfigUpdate<'a>), + AlarmAl00(Alarm<'a>), + DataArrayDa00(da00_DataArray<'a>), + UnitsUn00(Units<'a>), +} + +/// Error raised from `deserialize_message` describing why a message +/// cannot be deserialized +#[derive(Debug, Eq, PartialEq)] +pub enum DeserializationError { + UnsupportedSchema(String), + InvalidFlatbuffer(InvalidFlatbuffer), +} + +impl From for DeserializationError { + fn from(value: InvalidFlatbuffer) -> Self { + DeserializationError::InvalidFlatbuffer(value) + } +} + +/// Get the schema ID from a message. +pub fn get_schema_id(data: &[u8]) -> Option<&[u8]> { + data.get(4..8) +} + +/// Deserialize an arbitrary message from Kafka. +/// +/// Returns `Ok(DeserializedMessage)` if the message type is understood by +/// this function and the message deserialized correctly, or `Err` otherwise. +pub fn deserialize_message(data: &[u8]) -> Result, DeserializationError> { + match get_schema_id(data) { + Some(b"ev44") => Ok(DeserializedMessage::EventDataEv44( + root_as_event_44_message(data)?, + )), + Some(b"ad00") => Ok(DeserializedMessage::AreaDetectorAd00( + root_as_ad_00_adarray(data)?, + )), + Some(b"pl72") => Ok(DeserializedMessage::RunStartPl72(root_as_run_start(data)?)), + Some(b"6s4t") => Ok(DeserializedMessage::RunStop6s4t(root_as_run_stop(data)?)), + Some(b"f144") => Ok(DeserializedMessage::LogDataF144(root_as_f_144_log_data( + data, + )?)), + Some(b"df12") => Ok(DeserializedMessage::DetSpecMapDf12( + root_as_spectra_detector_mapping(data)?, + )), + Some(b"se00") => Ok(DeserializedMessage::SenvSe00( + root_as_se_00_sample_environment_data(data)?, + )), + Some(b"hs01") => Ok(DeserializedMessage::HistogramHs01(root_as_event_histogram( + data, + )?)), + Some(b"ep01") => Ok(DeserializedMessage::EpicsConnectionEp01( + root_as_epics_pvconnection_info(data)?, + )), + Some(b"json") => Ok(DeserializedMessage::JsonDataJson(root_as_json_data(data)?)), + Some(b"answ") => Ok(DeserializedMessage::ActionResponseAnsw( + root_as_action_response(data)?, + )), + Some(b"wrdn") => Ok(DeserializedMessage::FinishedWritingWrdn( + root_as_finished_writing(data)?, + )), + Some(b"x5f2") => Ok(DeserializedMessage::StatusX5f2(root_as_status(data)?)), + Some(b"fc00") => Ok(DeserializedMessage::ForwarderConfigFc00( + root_as_fc_00_config_update(data)?, + )), + Some(b"al00") => Ok(DeserializedMessage::AlarmAl00(root_as_alarm(data)?)), + Some(b"da00") => Ok(DeserializedMessage::DataArrayDa00( + root_as_da_00_data_array(data)?, + )), + Some(b"un00") => Ok(DeserializedMessage::UnitsUn00(root_as_units(data)?)), + _ => Err(DeserializationError::UnsupportedSchema( + "Unknown message type passed to deserialize".to_owned(), + )), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::flatbuffers_generated::units_un00::{UnitsArgs, finish_units_buffer}; + use flatbuffers::FlatBufferBuilder; + + #[test] + fn test_deserialize_message() { + let mut fbb = FlatBufferBuilder::new(); + let un00_args = UnitsArgs { + source_name: Some(fbb.create_string("Hello")), + timestamp: 0, + units: Some(fbb.create_string("World")), + }; + let un00 = Units::create(&mut fbb, &un00_args); + finish_units_buffer(&mut fbb, un00); + + let deserialized = deserialize_message(fbb.finished_data()); + + match deserialized { + Ok(DeserializedMessage::UnitsUn00(msg)) => { + assert_eq!(msg.source_name(), "Hello"); + assert_eq!(msg.timestamp(), 0); + assert_eq!(msg.units(), Some("World")); + } + _ => panic!("Failed to deserialize message to correct type"), + } + } + + #[test] + fn test_fail_deserialize_message() { + let deserialized = deserialize_message(b"\0\0\0\0\0\0\0\0\0\0\0\0"); + + assert_eq!( + deserialized, + Err(DeserializationError::UnsupportedSchema( + "Unknown message type passed to deserialize".to_owned() + )) + ); + } +} diff --git a/schemas/ADAr_area_detector_array.fbs b/schemas/ADAr_area_detector_array.fbs deleted file mode 100644 index e9ec1cc..0000000 --- a/schemas/ADAr_area_detector_array.fbs +++ /dev/null @@ -1,26 +0,0 @@ - -// A flatbuffer schema for holding EPICS area detector updates - -file_identifier "ADAr"; - -enum DType:byte { int8, uint8, int16, uint16, int32, uint32, int64, uint64, float32, float64, c_string } - -table Attribute { - name: string (required); // Name of attribute - description: string; // Description of attribute - source: string; // EPICS PV name or DRV_INFO string of attribute - data_type: DType; // The type of the data (value) in this attribute - data: [ubyte] (required); // The data/value of the attribute -} - -table ADArray { - source_name: string (required); // Source name of array - id: int; // Unique id to this particular NDArray - timestamp: ulong; // Timestamp in nanoseconds since UNIX epoch - dimensions: [ulong] (required); // Dimensions of the array - data_type: DType; // The type of the data stored in the array - data: [ubyte] (required); // Elements in the array - attributes: [Attribute]; // Extra metadata about the array -} - -root_type ADArray; diff --git a/schemas/NDAr_NDArray_schema.fbs b/schemas/NDAr_NDArray_schema.fbs deleted file mode 100644 index 8f6e078..0000000 --- a/schemas/NDAr_NDArray_schema.fbs +++ /dev/null @@ -1,45 +0,0 @@ - -// NOTE: THIS SCHEMA HAS BEEN DEPRECATED AND WILL BE REMOVED SOON - -namespace FB_Tables; - -file_identifier "NDAr"; - -enum DType : byte { Int8, Uint8, Int16, Uint16, Int32, Uint32, Int64, Uint64, Float32, Float64, c_string } - -struct epicsTimeStamp { - secPastEpoch : int; - nsec : int; -} - -table NDAttribute { -pName: - string; -pDescription: - string; -pSource: - string; -dataType: - DType; -pData: - [ubyte]; -} - -table NDArray { -id: - int; -timeStamp: - double; -epicsTS: - epicsTimeStamp; -dims: - [ulong]; -dataType: - DType; -pData: - [ubyte]; -pAttributeList: - [NDAttribute]; -} - -root_type NDArray; diff --git a/schemas/amo0_psi_sinq.fbs b/schemas/amo0_psi_sinq.fbs deleted file mode 100644 index f0e59e8..0000000 --- a/schemas/amo0_psi_sinq.fbs +++ /dev/null @@ -1,11 +0,0 @@ - -file_identifier "amo0"; - -table EventMessage { - source_name : string; // used to be htype - message_id : ulong; // pid - pulse_time : ulong; // ts - time_of_flight : [uint]; // timestamp - detector_id : [uint]; // data -} -root_type EventMessage; diff --git a/schemas/an44_events.fbs b/schemas/an44_events.fbs deleted file mode 100644 index 00a792f..0000000 --- a/schemas/an44_events.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Schema for ansto variant of neutron detection event data - -file_identifier "an44"; - -table an44_EventMessage { - source_name : string (required); // Field identifying the producer type, for example detector type - message_id : long; // Consecutive numbers, to detect missing or unordered messages - reference_time : [long] (required); // Nanoseconds since Unix epoch (1 Jan 1970) - // If pulse times are available in the acquisition system, this field holds - // those timestamps. Holds wall time otherwise. - reference_time_index : [int] (required); // Index into the time_of_flight array for the start of the neutron events linked - // to the corresponding pulse/reference time. - // reference_time_index and reference_time are the same length. - time_of_flight : [int]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `reference_time`. - // Cannot be empty if events are being sent. - pixel_id : [int]; // Identifiers that represent the positions of the events in the detector(s). - // Can be empty even when events are sent if the pixel_id is implicit (e.g. single-pixel beam monitor). - weight : [short]; // Records a weight for the matching neutron event if present, otherwise is empty -} - -root_type an44_EventMessage; diff --git a/schemas/ar51_readout_data.fbs b/schemas/ar51_readout_data.fbs deleted file mode 100644 index c565120..0000000 --- a/schemas/ar51_readout_data.fbs +++ /dev/null @@ -1,12 +0,0 @@ -// Schema for arbitrary binary buffer data -// Developed for streaming raw ESS Readout Payload - -file_identifier "ar51"; - -table RawReadoutMessage { - source_name : string (required); // Field identifying the producer type, for example detector type - message_id : long; // Consecutive numbers, to detect missing or unordered messages - raw_data : [ubyte]; // UDP payload buffer, unsigned bytes -} - -root_type RawReadoutMessage; diff --git a/schemas/ba57_run_info.fbs b/schemas/ba57_run_info.fbs deleted file mode 100644 index 1b1df8e..0000000 --- a/schemas/ba57_run_info.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Run start/stop information for Mantid - -file_identifier "ba57"; - -table RunStart { - start_time : ulong; // nanoseconds since Unix epoch (1 Jan 1970) - run_number : int; // ID for the run - instrument_name : string; // Name of the instrument - n_periods : int; // Number of periods (ISIS only) -} - -table RunStop { - stop_time : ulong; // nanoseconds since Unix epoch (1 Jan 1970) - run_number : int; // ID for the run -} - -union InfoTypes { RunStart, RunStop } - -table RunInfo { - info_type : InfoTypes; -} - -root_type RunInfo; diff --git a/schemas/dtdb_adc_pulse_debug.fbs b/schemas/dtdb_adc_pulse_debug.fbs deleted file mode 100644 index a591243..0000000 --- a/schemas/dtdb_adc_pulse_debug.fbs +++ /dev/null @@ -1,14 +0,0 @@ -// Schema for transmitting additonal (debug) event information. -// All fields are optional. - -file_identifier "dtdb"; - -table AdcPulseDebug { - amplitude : [uint32]; // Amplitude of the pulse above bkg. - peak_area : [uint32]; // Area under the curve of the pulse. - background : [uint32]; // Background level of pulses. - threshold_time : [uint64]; // Timestamp in (ns) UNIX epoch when the pulse - // passed the threshold on the rising edge - peak_time : [uint64]; // Timestamp in (ns) UNIX epoch when the pulse - // reached its peak value -} diff --git a/schemas/ep00_epics_connection_info.fbs b/schemas/ep00_epics_connection_info.fbs deleted file mode 100644 index 188b11f..0000000 --- a/schemas/ep00_epics_connection_info.fbs +++ /dev/null @@ -1,23 +0,0 @@ -// Represent events about the underlying EPICS connection. - -file_identifier "ep00"; - -enum EventType: ushort { - UNKNOWN, - NEVER_CONNECTED, - CONNECTED, - DISCONNECTED, - DESTROYED, -} - -table EpicsConnectionInfo { - // Nanoseconds since UNIX epoch - timestamp: ulong; - type: EventType; - // The channel name, called `source_name` to stay in sync with `f142` - source_name: string; - // Identifies the client which has observed the event - service_id: string; -} - -root_type EpicsConnectionInfo; diff --git a/schemas/ev42_events.fbs b/schemas/ev42_events.fbs deleted file mode 100644 index e18e21f..0000000 --- a/schemas/ev42_events.fbs +++ /dev/null @@ -1,22 +0,0 @@ -// Schema for neutron detection event data - -include "is84_isis_events.fbs"; -include "dtdb_adc_pulse_debug.fbs"; - -file_identifier "ev42"; - -union FacilityData { ISISData, AdcPulseDebug } - -table EventMessage { - source_name : string; // optional field identifying the producer type, for example detector type - message_id : ulong; // consecutive numbers, to detect missing or unordered messages - pulse_time : ulong; // Nanoseconds since Unix epoch (1 Jan 1970) - // If a pulse time is available in the aquisition system, this field holds - // that timestamp. Holds wall time otherwise. - time_of_flight : [uint]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `pulse_time`. - detector_id : [uint]; // Identifiers that represent the positions of the events in the detector(s). - facility_specific_data : FacilityData; // optional field -} -root_type EventMessage; diff --git a/schemas/ev43_events.fbs b/schemas/ev43_events.fbs deleted file mode 100644 index 6caa531..0000000 --- a/schemas/ev43_events.fbs +++ /dev/null @@ -1,18 +0,0 @@ -// Schema for neutron detection event data with multiple pulse events - -file_identifier "ev43"; - -table Event43Message { - source_name : string; // optional field identifying the producer type, for example detector type - message_id : ulong; // consecutive numbers, to detect missing or unordered messages - pulse_time : [ulong]; // Nanoseconds since Unix epoch (1 Jan 1970) - // If pulse times are available in the aquisition system, this field holds - // those timestamps. Holds wall time otherwise. - pulse_index : [uint]; // Index into the array for the start of the neutron events linked to the - // corresponding pulse time. Pulse index and pulse time are the same length. - time_of_flight : [uint]; // Nanoseconds - // Time of flight for each event if pulse time is available. If not, a - // (positive) offset from the wall time stored in the `pulse_time`. - detector_id : [uint]; // Identifiers that represent the positions of the events in the detector(s). -} -root_type Event43Message; diff --git a/schemas/f140_general.fbs b/schemas/f140_general.fbs deleted file mode 100644 index fb82515..0000000 --- a/schemas/f140_general.fbs +++ /dev/null @@ -1,164 +0,0 @@ -// General schema which allows any PVStructure to be forwarded as a flatbuffer. -// Generality comes at a price: More overhead during construction in terms of space -// and cpu, more work for the receiver of the flatbuffer to access. - -// file_identifier "\\xf1\\x40"; - -file_identifier "f140"; - -namespace BrightnESS.FlatBufs.f140_general; - -table pvByte { - v: byte; -} - -table pvUByte { - v: ubyte; -} - -table pvShort { - v: short; -} - -table pvUShort { - v: ushort; -} - -table pvInt { - v: int; -} - -table pvUInt { - v: uint; -} - -table pvLong { - v: long; -} - -table pvULong { - v: ulong; -} - -table pvFloat { - v: float; -} - -table pvDouble { - v: double; -} - -table pvString { - v: string; -} - - - - -table pvByte_a { - v: [byte]; -} - -table pvShort_a { - v: [short]; -} - -table pvInt_a { - v: [int]; -} - -table pvLong_a { - v: [long]; -} - -table pvUByte_a { - v: [ubyte]; -} - -table pvUShort_a { - v: [ushort]; -} - -table pvUInt_a { - v: [uint]; -} - -table pvULong_a { - v: [ulong]; -} - -table pvFloat_a { - v: [float]; -} - -table pvDouble_a { - v: [double]; -} - -table pvString_a { - v: [string]; -} - - -union F { - pvByte, - pvShort, - pvInt, - pvLong, - pvUByte, - pvUShort, - pvUInt, - pvULong, - - pvFloat, - pvDouble, - - pvString, - - pvByte_a, - pvShort_a, - pvInt_a, - pvLong_a, - pvUByte_a, - pvUShort_a, - pvUInt_a, - pvULong_a, - - pvFloat_a, - pvDouble_a, - - pvString_a, - - Obj, - Obj_a, -} - - -table ObjM { - k: string; - v: F; -} - -table Obj { - ms: [ObjM]; -} - -table Obj_a { - v: [Obj]; -} - -struct fwdinfo_t { - seq: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: ubyte; -} - -table PV { - n: string; - v: F; - fwdinfo: fwdinfo_t; -} - -// Root must be a table -root_type PV; diff --git a/schemas/f141_epics_nt.fbs b/schemas/f141_epics_nt.fbs deleted file mode 100644 index 6bbca00..0000000 --- a/schemas/f141_epics_nt.fbs +++ /dev/null @@ -1,84 +0,0 @@ -file_identifier "f141"; - -namespace BrightnESS.FlatBufs.f141_epics_nt; - -struct timeStamp_t { - secondsPastEpoch: ulong; - nanoseconds: int; -} - -table NTScalarByte { value: byte; } -table NTScalarUByte { value: ubyte; } -table NTScalarShort { value: short; } -table NTScalarUShort { value: ushort; } -table NTScalarInt { value: int; } -table NTScalarUInt { value: uint; } -table NTScalarLong { value: long; } -table NTScalarULong { value: ulong; } -table NTScalarFloat { value: float; } -table NTScalarDouble { value: double; } - -table NTScalarArrayByte { value: [ byte]; } -table NTScalarArrayUByte { value: [ubyte]; } -table NTScalarArrayShort { value: [ short]; } -table NTScalarArrayUShort { value: [ushort]; } -table NTScalarArrayInt { value: [ int]; } -table NTScalarArrayUInt { value: [uint]; } -table NTScalarArrayLong { value: [ long]; } -table NTScalarArrayULong { value: [ulong]; } -table NTScalarArrayFloat { value: [ float]; } -table NTScalarArrayDouble { value: [ double]; } - -union PV { - NTScalarByte, - NTScalarUByte, - NTScalarShort, - NTScalarUShort, - NTScalarInt, - NTScalarUInt, - NTScalarLong, - NTScalarULong, - NTScalarFloat, - NTScalarDouble, - NTScalarArrayByte, - NTScalarArrayUByte, - NTScalarArrayShort, - NTScalarArrayUShort, - NTScalarArrayInt, - NTScalarArrayUInt, - NTScalarArrayLong, - NTScalarArrayULong, - NTScalarArrayFloat, - NTScalarArrayDouble -} - -struct fwdinfo_t { - seq: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: ubyte; - teamid: ulong; -} - -table fwdinfo_2_t { - seq_data: ulong; - seq_fwd: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: uint; - teamid: ulong; -} - -union fwdinfo_u { - fwdinfo_2_t, -} - -table EpicsPV { - name: string; - pv: PV; - timeStamp: timeStamp_t; - fwdinfo: fwdinfo_t; - fwdinfo2: fwdinfo_u; -} - -root_type EpicsPV; diff --git a/schemas/f142_logdata.fbs b/schemas/f142_logdata.fbs deleted file mode 100644 index 6c883f5..0000000 --- a/schemas/f142_logdata.fbs +++ /dev/null @@ -1,98 +0,0 @@ -// Log data, for example "slow" sample environment measurements -// -// Typical producers and consumers: -// Produced by EPICS forwarder from EPICS PV -// Produced by NeXus-Streamer from NXlogs -// Consumed by NeXus file writer -> NXLog -// Consumed by Mantid -> Workspace log - -file_identifier "f142"; - -table Byte { value: byte; } -table UByte { value: ubyte; } -table Short { value: short; } -table UShort { value: ushort; } -table Int { value: int; } -table UInt { value: uint; } -table Long { value: long; } -table ULong { value: ulong; } -table Float { value: float; } -table Double { value: double; } - -table ArrayByte { value: [ byte]; } -table ArrayUByte { value: [ubyte]; } -table ArrayShort { value: [ short]; } -table ArrayUShort { value: [ushort]; } -table ArrayInt { value: [ int]; } -table ArrayUInt { value: [uint]; } -table ArrayLong { value: [ long]; } -table ArrayULong { value: [ulong]; } -table ArrayFloat { value: [ float]; } -table ArrayDouble { value: [ double]; } - -union Value { - Byte, - UByte, - Short, - UShort, - Int, - UInt, - Long, - ULong, - Float, - Double, - ArrayByte, - ArrayUByte, - ArrayShort, - ArrayUShort, - ArrayInt, - ArrayUInt, - ArrayLong, - ArrayULong, - ArrayFloat, - ArrayDouble, -} - -enum AlarmStatus: ushort { - NO_ALARM, - READ, - WRITE, - HIHI, - HIGH, - LOLO, - LOW, - STATE, - COS, - COMM, - TIMED, - HWLIMIT, - CALC, - SCAN, - LINK, - SOFT, - BAD_SUB, - UDF, - DISABLE, - SIMM, - READ_ACCESS, - WRITE_ACCESS, - NO_CHANGE -} - -enum AlarmSeverity: ushort { - MINOR, - MAJOR, - NO_ALARM, - INVALID, - NO_CHANGE -} - -table LogData { - source_name: string; // identify source on multiplexed topics, e.g. PV name if from EPICS - value: Value; // may be scalar or array - timestamp: ulong; // nanoseconds past epoch (1 Jan 1970), zero reserved for invalid timestamp - status: AlarmStatus = NO_CHANGE; // details of EPICS alarm, default being NO_CHANGE: file writer only records changes - severity: AlarmSeverity = NO_CHANGE; // severity of current EPICS alarm status, default of NO_CHANGE should be used if status has value of NO_CHANGE -} - -root_type LogData; diff --git a/schemas/f143_structure.fbs b/schemas/f143_structure.fbs deleted file mode 100644 index a3af0bb..0000000 --- a/schemas/f143_structure.fbs +++ /dev/null @@ -1,77 +0,0 @@ -// General schema which allows any EPICS structure to be forwarded as a flatbuffer. -// Generality comes at a price: More overhead during construction in terms of space -// and cpu, more work for the receiver of the flatbuffer to access. - -include "fwdi_forwarder_internal.fbs"; - -file_identifier "f143"; - -namespace f143_structure; - -table Byte { value: byte; } -table UByte { value: ubyte; } -table Short { value: short; } -table UShort { value: ushort; } -table Int { value: int; } -table UInt { value: uint; } -table Long { value: long; } -table ULong { value: ulong; } -table Float { value: float; } -table Double { value: double; } -table String { value: string; } - -table ArrayByte { value: [ byte]; } -table ArrayUByte { value: [ubyte]; } -table ArrayShort { value: [ short]; } -table ArrayUShort { value: [ushort]; } -table ArrayInt { value: [ int]; } -table ArrayUInt { value: [uint]; } -table ArrayLong { value: [ long]; } -table ArrayULong { value: [ulong]; } -table ArrayFloat { value: [ float]; } -table ArrayDouble { value: [ double]; } -table ArrayString { value: [ string]; } - -union Value { - Byte, - Short, - Int, - Long, - UByte, - UShort, - UInt, - ULong, - Float, - Double, - String, - Obj, - ArrayByte, - ArrayShort, - ArrayInt, - ArrayLong, - ArrayUByte, - ArrayUShort, - ArrayUInt, - ArrayULong, - ArrayFloat, - ArrayDouble, - ArrayString, - ArrayObj, -} - -table ObjM { - k: string; - v: Value; -} - -table Obj { value: [ObjM]; } -table ArrayObj { value: [Obj]; } - -table Structure { - name: string; - value: Value; - timestamp: ulong; - fwdinfo: forwarder_internal; -} - -root_type Structure; diff --git a/schemas/fwdi_forwarder_internal.fbs b/schemas/fwdi_forwarder_internal.fbs deleted file mode 100644 index 2ddf61f..0000000 --- a/schemas/fwdi_forwarder_internal.fbs +++ /dev/null @@ -1,15 +0,0 @@ -file_identifier "fwdi"; - -// optional, currently only used by forwarder -table fwdinfo_1_t { - seq_data: ulong; - seq_fwd: ulong; - ts_data: ulong; - ts_fwd: ulong; - fwdix: uint; - teamid: ulong; -} - -union forwarder_internal { - fwdinfo_1_t, -} diff --git a/schemas/hs00_event_histogram.fbs b/schemas/hs00_event_histogram.fbs deleted file mode 100644 index 01d6463..0000000 --- a/schemas/hs00_event_histogram.fbs +++ /dev/null @@ -1,49 +0,0 @@ -// General schema for histogram - -file_identifier "hs00"; - -table ArrayUInt { value: [uint]; } -table ArrayULong { value: [ulong]; } -table ArrayDouble { value: [double]; } -table ArrayFloat { value: [float]; } - -// Union of allowed data types for the arrays -union Array { - ArrayUInt, - ArrayULong, - ArrayDouble, - ArrayFloat, -} - -// Meta information for one dimension -table DimensionMetaData { - length: uint; // Length of the full histogram along this dimension - unit: string; // Unit - label: string; // Label - bin_boundaries: Array; // Boundary information (should be of length: DimensionMetaData.length+1) -} - -// Represents a n-dimensional histogram -// Subsets of histogram are also supported -table EventHistogram { - source: string; // Source name - timestamp: ulong; // Timestamp (in ns, after unix epoch) - dim_metadata: [DimensionMetaData]; // Meta data for each dimension - last_metadata_timestamp: ulong; // Timestamp (ns, after unix epoch) when the last metadata information was written - current_shape: [uint] (required); // Shape of the current data in each dimension - offset: [uint]; // Offset giving the starting index in each dimension - data: Array; // Data represented in RowMajor order (C Style), filled with 0 if missing - errors: Array; // Errors in calculation of histogram data (same size as data) - info: string; // Additional information (Integrated/Processed) -} - -// The "current_shape" and "offset" fields can be used to define a slice of a -// larger histogram. This allows breaking a large histogram into multiple messages. -// For example the dim_metadata could look like this: -// dim_metadata=[DimensionMetaData(label="x", length=10, ...), DimensionMetaData(label="y", length=10, ...)] -// and each row could be sent as a separate message by using: -// current_shape=[10, 1] and offset=[0, 0] in the 1st message -// current_shape=[10, 1] and offset=[0, 1] in the 2nd message -// and so on. - -root_type EventHistogram; diff --git a/schemas/is84_isis_events.fbs b/schemas/is84_isis_events.fbs deleted file mode 100644 index 81afb65..0000000 --- a/schemas/is84_isis_events.fbs +++ /dev/null @@ -1,11 +0,0 @@ -// Schema for ISIS specific fields to be added to neutron event messages - -file_identifier "is84"; - -enum RunState : byte { SETUP=0, RUNNING=1 } - -table ISISData { - period_number : uint; - run_state : RunState; // current instrument run state - proton_charge : float; // at ESS this will likely come through EPICS forwarder instead -} diff --git a/schemas/mo01_nmx.fbs b/schemas/mo01_nmx.fbs deleted file mode 100644 index d65a3bb..0000000 --- a/schemas/mo01_nmx.fbs +++ /dev/null @@ -1,57 +0,0 @@ -// Schema for event-formation-unit detector monitoring, such as ADC and -// channel histograms, particle tracks or 'Hits'. -// Sent periodically (typically once per second or so) by the -// detector pipelines to Kafka and consumed by Daquiri for visualisation. -// Useful for debugging, commissioning and testing but not necessarily an -// essential service provided when ESS becomes fully operational. - -file_identifier "mo01"; - -// GEMHist is used by gdgem, sonde, multiblade for adc and channel histograms in HistSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/common/HistSerializer.cpp -// GEMTrack is used by gdgem for particle tracks in TrackSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/gdgem/nmx/TrackSerializer.cpp -// MONHit is used by multigrid for streaming readouts in ReadoutSerializer.cpp -// https://github.com/ess-dmsc/event-formation-unit/blob/master/prototype2/common/ReadoutSerializer.cpp - - -union DataField { GEMHist, GEMTrack, MONHit } - -// used for GEMTrack -table pos { - time : ushort; // Arbitrary units, could be ns or clock ticks - strip: ushort; // An index along one axis - adc : ushort; // ADC value from digitiser or other intensity/weight -} - -// -table MONHit { - plane : [ushort]; // A coordinate dimension (x, y, z) or detector dimension (wires, strips) - time : [uint]; // Arbitrary units, could be ns or clock ticks - channel : [ushort]; // a channels representing a 'position' along the plane - adc : [ushort]; // ADC value from digitiser or other intensity/weight -} - -table GEMTrack { - time_offset : ulong; // - xtrack : [pos]; // Particle track projection on x - ytrack : [pos]; // Particle track projection on y - xpos : double; // Calculated neutron entry position, x-coord - ypos : double; // Calculated neutron entry position, y-coord -} - -table GEMHist { - xstrips : [uint]; // Histogram counts along x-coord - ystrips : [uint]; // Histogram counts along y-coord - xspectrum : [uint]; - yspectrum : [uint]; - cluster_spectrum : [uint]; - bin_width : uint; -} - -table MonitorMessage { - source_name : string; - data : DataField; -} - -root_type MonitorMessage; diff --git a/schemas/ns10_cache_entry.fbs b/schemas/ns10_cache_entry.fbs deleted file mode 100644 index b03dd1d..0000000 --- a/schemas/ns10_cache_entry.fbs +++ /dev/null @@ -1,15 +0,0 @@ - -file_identifier "ns10"; - -/// pylint: skip-file -table CacheEntry { - key:string; // key for this entry (usually nicos/device/parameter) - time:double; // time (in seconds after epoch) when this entry was set - ttl:double; // time to live (in seconds after time field of this entry) - expired:bool = false; // already expired (manually or using ttl), supersedes ttl - // Value for the key. - // The value can be numerical types, strings, list, tuple, dictionaries, sets - value:string; -} - -root_type CacheEntry; diff --git a/schemas/ns11_typed_cache_entry.fbs b/schemas/ns11_typed_cache_entry.fbs deleted file mode 100644 index 207f336..0000000 --- a/schemas/ns11_typed_cache_entry.fbs +++ /dev/null @@ -1,48 +0,0 @@ -file_identifier "ns11"; - -table Bool { value: bool; } -table Long { value: long; } -table Double { value: double; } -table String { value: string; } -table Object { value: string; } // Python object represented as string - -enum ArrayType : byte { - ListType = 0, - TupleType, - SetType -} - -union Value { - Object, - Bool, - Long, - Double, - String, - Dict, - Array -} - -table DictMapping { - k: Value; - v: Value; -} - -table Dict { value: [DictMapping]; } - -table ArrayElement { v: Value; } - -table Array { - value: [ArrayElement]; - array_type: ArrayType; -} - -/// pylint: skip-file -table TypedCacheEntry { - key: string; // key for this entry (usually nicos/device/parameter) - time: double; // time (in seconds after epoch) when this entry was set - ttl: double; // time to live (in seconds after time field of this entry). NOT TO BE USED OUTSIDE OF NICOS! - expired: bool = false; // already expired (manually or using ttl), supersedes ttl. NOT TO BE USED OUTSIDE OF NICOS! - value: Value; -} - -root_type TypedCacheEntry; diff --git a/schemas/pu00_pulse_metadata.fbs b/schemas/pu00_pulse_metadata.fbs new file mode 100644 index 0000000..2d975f0 --- /dev/null +++ b/schemas/pu00_pulse_metadata.fbs @@ -0,0 +1,17 @@ +// Schema for neutron pulse information + +file_identifier "pu00"; + +table Pu00Message { + source_name : string (required); // Field identifying the producer type, for example detector type + message_id : long; // Consecutive numbers, to detect missing or unordered messages. + reference_time : long; // Nanoseconds since Unix epoch (1 Jan 1970) + // If pulse times are available in the aquisition system, this field holds + // those timestamps. Holds wall time otherwise. + + vetos : uint = null; // Veto bitmask for this frame, if present + period_number : uint = null; // Period number into which this pulse was collected, if present + proton_charge : float = null; // Proton charge for this frame (uAh per frame), if present +} + +root_type Pu00Message; diff --git a/schemas/rf5k_forwarder_config.fbs b/schemas/rf5k_forwarder_config.fbs deleted file mode 100644 index 2c5c29e..0000000 --- a/schemas/rf5k_forwarder_config.fbs +++ /dev/null @@ -1,40 +0,0 @@ -// Forwarder Configuration Update -// Add or remove channels from a Forwarder configuration -// -// Typical producers and consumers: -// Produced by NICOS -// Consumed by Forwarder - -file_identifier "rf5k"; - -enum UpdateType: ushort { - ADD, - REMOVE, - REMOVEALL -} - -enum Protocol: ushort { - PVA, // EPICS PV access - CA, // EPICS channel access - FAKE // Forwarder generates fake updates, frequency configurable with command line argument -} - -table Stream { - // If config_change=ADD then all fields of Stream must be populated. - // If config_change=REMOVE then at least one of the string fields must be populated, - // and the Forwarder will remove any streams which match all of the populated string fields. - // "populated" here means supplying a non-empty string for the field. - // Wildcards '?' (single character) and '*' (multi-character) can be used to match topic or channel name, - // Wildcards cannot be used to match schema as they are valid characters in schema identifiers. - channel: string; // Name of the EPICS channel/pv (e.g. "MYIOC:VALUE1") - schema: string; // Identify the output format for updates from the named channel (e.g. "f142" or "tdct") - topic: string; // Name of the output topic for updates from the named channel (e.g. "LOKI_motionControl") - protocol: Protocol = PVA; // Protocol for channel, EPICS PV access by default -} - -table ConfigUpdate { - config_change: UpdateType; // Type of config change, add streams, remove streams or remove all streams - streams: [Stream]; // Details what should be forwarded where, empty if config_change=REMOVEALL -} - -root_type ConfigUpdate; diff --git a/schemas/senv_data.fbs b/schemas/senv_data.fbs deleted file mode 100644 index 82af240..0000000 --- a/schemas/senv_data.fbs +++ /dev/null @@ -1,42 +0,0 @@ -//Used to transmit fast sample environment data -// NOTE: THIS SCHEMA HAS BEEN DEPRECATED - -file_identifier "senv"; - -enum Location : byte { Unknown = 0, Start, Middle, End } - -table Int8Array { value: [ byte] (required); } -table UInt8Array { value: [ubyte] (required); } -table Int16Array { value: [ short] (required); } -table UInt16Array { value: [ushort] (required); } -table Int32Array { value: [ int] (required); } -table UInt32Array { value: [uint] (required); } -table Int64Array { value: [ long] (required); } -table UInt64Array { value: [ulong] (required); } - -union ValueUnion { - Int8Array, - UInt8Array, - Int16Array, - UInt16Array, - Int32Array, - UInt32Array, - Int64Array, - UInt64Array -} - -table SampleEnvironmentData { - Name: string (required); // Name of the device/source of the data. - Channel: int; // Can be used to store the ADC channel number. Should be set to -1 if not used. - PacketTimestamp: ulong; // The timestamp (in nanoseconds since UNIX epoch) of the first sample in the value vector. - TimeDelta: double; // Time in nanoseconds between samples. Available for "compression" of the schema. Should - // be set to <= 0 if not used. - TimestampLocation: Location; // Relevant when the delta time between two consecutive timestamps is long in comparison - // to the resolution of the timestamp. For example, when using oversampling. - // middle or end of the samples that were summed to produce each oversampled sample. - Values: ValueUnion (required); // The sample values. - Timestamps: [ulong]; // OPTIONAL (nanosecond) timestamps of each individual sample. - MessageCounter: ulong; // Monotonically increasing counter. -} - -root_type SampleEnvironmentData; diff --git a/schemas/tdct_timestamps.fbs b/schemas/tdct_timestamps.fbs deleted file mode 100644 index 3e30061..0000000 --- a/schemas/tdct_timestamps.fbs +++ /dev/null @@ -1,9 +0,0 @@ -file_identifier "tdct"; - -table timestamp { - name: string (required); // Name of the device (e.g. "Chopper_3"). - timestamps: [ulong] (required); // Timestamps in the form of nano seconds since UNIX epoch. - sequence_counter: ulong; // Monotonically increasing counter. -} - -root_type timestamp;