diff --git a/.gitignore b/.gitignore index 7b8c4c5..1992cb8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,4 @@ -/cpp/generated/ /cpp/build/ -/python/petsird/ __pycache__/ # Common editor backups diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 16eae3f..7841b8e 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -16,3 +16,19 @@ add_executable(petsird_analysis petsird_analysis.cpp) target_link_libraries(petsird_analysis petsird_generated) add_subdirectory(generated) + +# PETSIRD Library (libpetsird) +add_library(petsird SHARED $) +target_link_libraries(petsird petsird_generated) +install(TARGETS petsird DESTINATION lib) + +# Set the target include directory where headers will be installed +set(INSTALL_INCLUDE_DIR "include/petsird") + +# Collect all header files in the current directory and subdirectories +file(GLOB_RECURSE HEADER_FILES "*.h") + +# Specify the installation directory for header files +install(FILES ${HEADER_FILES} + DESTINATION ${INSTALL_INCLUDE_DIR} +) diff --git a/cpp/generated/CMakeLists.txt b/cpp/generated/CMakeLists.txt new file mode 100644 index 0000000..087b224 --- /dev/null +++ b/cpp/generated/CMakeLists.txt @@ -0,0 +1,40 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# To opt out of generating this file, set cpp.generateCMakeLists to false in the _package.yml file. + +# To use the object library defined in this file, add the following to your CMakeLists.txt file: +# target_link_libraries( petsird_generated) +# add_subdirectory() + +set(HOWARD_HINNANT_DATE_MINIMUM_VERSION "3.0.0") +find_package(date ${HOWARD_HINNANT_DATE_MINIMUM_VERSION} REQUIRED) + +if(VCPKG_TARGET_TRIPLET) + set(HDF5_CXX_LIBRARIES hdf5::hdf5_cpp-shared) +else() + set(HDF5_CXX_LIBRARIES hdf5::hdf5_cpp) +endif() + +set(HDF5_MINIMUM_VERSION "1.10.5") +find_package(HDF5 ${HDF5_MINIMUM_VERSION} REQUIRED COMPONENTS C CXX) + +set(XTENSOR_MINIMUM_VERSION "0.21.10") +find_package(xtensor ${XTENSOR_MINIMUM_VERSION} REQUIRED) + +set(NLOHMANN_JSON_MINIMUM_VERSION "3.11.1") +find_package(nlohmann_json ${NLOHMANN_JSON_MINIMUM_VERSION} REQUIRED) +add_library(petsird_generated OBJECT + protocols.cc + types.cc + ndjson/protocols.cc + binary/protocols.cc + hdf5/protocols.cc +) + +target_link_libraries(petsird_generated + PUBLIC ${HDF5_C_LIBRARIES} + PUBLIC ${HDF5_CXX_LIBRARIES} + PUBLIC xtensor + PUBLIC date::date + PUBLIC nlohmann_json::nlohmann_json +) diff --git a/cpp/generated/binary/protocols.cc b/cpp/generated/binary/protocols.cc new file mode 100644 index 0000000..08f9814 --- /dev/null +++ b/cpp/generated/binary/protocols.cc @@ -0,0 +1,1158 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#include "protocols.h" + +#include + +#include "../yardl/detail/binary/coded_stream.h" +#include "../yardl/detail/binary/serializers.h" + +namespace yardl::binary { +#ifndef _MSC_VER +// Values of offsetof() are only used if types are standard-layout. +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Winvalid-offsetof" +#endif + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::CoincidenceEvent; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::detector_ids) + sizeof(__T__::tof_idx) + sizeof(__T__::energy_indices))) && + offsetof(__T__, detector_ids) < offsetof(__T__, tof_idx) && offsetof(__T__, tof_idx) < offsetof(__T__, energy_indices); +}; + +template +struct IsTriviallySerializable> { + using __T__ = petsird::SolidVolume; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::shape) + sizeof(__T__::material_id))) && + offsetof(__T__, shape) < offsetof(__T__, material_id); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Coordinate; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::c))); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::BoxShape; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::corners))); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::AnnulusShape; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::inner_radius) + sizeof(__T__::outer_radius) + sizeof(__T__::thickness) + sizeof(__T__::angular_range))) && + offsetof(__T__, inner_radius) < offsetof(__T__, outer_radius) && offsetof(__T__, outer_radius) < offsetof(__T__, thickness) && offsetof(__T__, thickness) < offsetof(__T__, angular_range); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::RigidTransformation; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::matrix))); +}; + +template +struct IsTriviallySerializable> { + using __T__ = petsird::ReplicatedObject; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::object) + sizeof(__T__::transforms) + sizeof(__T__::ids))) && + offsetof(__T__, object) < offsetof(__T__, transforms) && offsetof(__T__, transforms) < offsetof(__T__, ids); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::DetectorModule; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::detecting_elements) + sizeof(__T__::detecting_element_ids) + sizeof(__T__::non_detecting_elements))) && + offsetof(__T__, detecting_elements) < offsetof(__T__, detecting_element_ids) && offsetof(__T__, detecting_element_ids) < offsetof(__T__, non_detecting_elements); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::ScannerGeometry; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::replicated_modules) + sizeof(__T__::ids) + sizeof(__T__::non_detecting_volumes))) && + offsetof(__T__, replicated_modules) < offsetof(__T__, ids) && offsetof(__T__, ids) < offsetof(__T__, non_detecting_volumes); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Subject; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::name) + sizeof(__T__::id))) && + offsetof(__T__, name) < offsetof(__T__, id); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Institution; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::name) + sizeof(__T__::address))) && + offsetof(__T__, name) < offsetof(__T__, address); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::ExamInformation; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::subject) + sizeof(__T__::institution) + sizeof(__T__::protocol) + sizeof(__T__::start_of_acquisition))) && + offsetof(__T__, subject) < offsetof(__T__, institution) && offsetof(__T__, institution) < offsetof(__T__, protocol) && offsetof(__T__, protocol) < offsetof(__T__, start_of_acquisition); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Direction; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::c))); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::DirectionMatrix; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::matrix))); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Atom; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::mass_number) + sizeof(__T__::atomic_number))) && + offsetof(__T__, mass_number) < offsetof(__T__, atomic_number); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::BulkMaterial; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::id) + sizeof(__T__::name) + sizeof(__T__::density) + sizeof(__T__::atoms) + sizeof(__T__::mass_fractions))) && + offsetof(__T__, id) < offsetof(__T__, name) && offsetof(__T__, name) < offsetof(__T__, density) && offsetof(__T__, density) < offsetof(__T__, atoms) && offsetof(__T__, atoms) < offsetof(__T__, mass_fractions); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::ScannerInformation; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::model_name) + sizeof(__T__::scanner_geometry) + sizeof(__T__::bulk_materials) + sizeof(__T__::gantry_alignment) + sizeof(__T__::tof_bin_edges) + sizeof(__T__::tof_resolution) + sizeof(__T__::energy_bin_edges) + sizeof(__T__::energy_resolution_at_511) + sizeof(__T__::event_time_block_duration) + sizeof(__T__::coincidence_policy))) && + offsetof(__T__, model_name) < offsetof(__T__, scanner_geometry) && offsetof(__T__, scanner_geometry) < offsetof(__T__, bulk_materials) && offsetof(__T__, bulk_materials) < offsetof(__T__, gantry_alignment) && offsetof(__T__, gantry_alignment) < offsetof(__T__, tof_bin_edges) && offsetof(__T__, tof_bin_edges) < offsetof(__T__, tof_resolution) && offsetof(__T__, tof_resolution) < offsetof(__T__, energy_bin_edges) && offsetof(__T__, energy_bin_edges) < offsetof(__T__, energy_resolution_at_511) && offsetof(__T__, energy_resolution_at_511) < offsetof(__T__, event_time_block_duration) && offsetof(__T__, event_time_block_duration) < offsetof(__T__, coincidence_policy); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::Header; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::scanner) + sizeof(__T__::exam))) && + offsetof(__T__, scanner) < offsetof(__T__, exam); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::TripleEvent; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::detector_ids) + sizeof(__T__::tof_indices) + sizeof(__T__::energy_indices))) && + offsetof(__T__, detector_ids) < offsetof(__T__, tof_indices) && offsetof(__T__, tof_indices) < offsetof(__T__, energy_indices); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::EventTimeBlock; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::start) + sizeof(__T__::prompt_events) + sizeof(__T__::delayed_events) + sizeof(__T__::triple_events))) && + offsetof(__T__, start) < offsetof(__T__, prompt_events) && offsetof(__T__, prompt_events) < offsetof(__T__, delayed_events) && offsetof(__T__, delayed_events) < offsetof(__T__, triple_events); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::ExternalSignalTimeBlock; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::start) + sizeof(__T__::signal_id) + sizeof(__T__::signal_values))) && + offsetof(__T__, start) < offsetof(__T__, signal_id) && offsetof(__T__, signal_id) < offsetof(__T__, signal_values); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::BedMovementTimeBlock; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::start) + sizeof(__T__::transform))) && + offsetof(__T__, start) < offsetof(__T__, transform); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::GantryMovementTimeBlock; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::start) + sizeof(__T__::transform))) && + offsetof(__T__, start) < offsetof(__T__, transform); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::ExternalSignalType; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::type) + sizeof(__T__::description) + sizeof(__T__::id))) && + offsetof(__T__, type) < offsetof(__T__, description) && offsetof(__T__, description) < offsetof(__T__, id); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::TimeInterval; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::start) + sizeof(__T__::stop))) && + offsetof(__T__, start) < offsetof(__T__, stop); +}; + +template <> +struct IsTriviallySerializable { + using __T__ = petsird::TimeFrameInformation; + static constexpr bool value = + std::is_standard_layout_v<__T__> && + IsTriviallySerializable::value && + (sizeof(__T__) == (sizeof(__T__::time_frames))); +}; + +#ifndef _MSC_VER +#pragma GCC diagnostic pop // #pragma GCC diagnostic ignored "-Winvalid-offsetof" +#endif +} //namespace yardl::binary + +namespace { +template WriteT0, typename T1, yardl::binary::Writer WriteT1> +void WriteUnion(yardl::binary::CodedOutputStream& stream, std::variant const& value) { + yardl::binary::WriteInteger(stream, value.index()); + switch (value.index()) { + case 0: { + T0 const& v = std::get<0>(value); + WriteT0(stream, v); + break; + } + case 1: { + T1 const& v = std::get<1>(value); + WriteT1(stream, v); + break; + } + default: throw std::runtime_error("Invalid union index."); + } +} + +template ReadT0, typename T1, yardl::binary::Reader ReadT1> +void ReadUnion(yardl::binary::CodedInputStream& stream, std::variant& value) { + size_t index; + yardl::binary::ReadInteger(stream, index); + switch (index) { + case 0: { + T0 v; + ReadT0(stream, v); + value = std::move(v); + break; + } + case 1: { + T1 v; + ReadT1(stream, v); + value = std::move(v); + break; + } + default: throw std::runtime_error("Invalid union index."); + } +} + +template WriteT0, typename T1, yardl::binary::Writer WriteT1, typename T2, yardl::binary::Writer WriteT2, typename T3, yardl::binary::Writer WriteT3> +void WriteUnion(yardl::binary::CodedOutputStream& stream, std::variant const& value) { + yardl::binary::WriteInteger(stream, value.index()); + switch (value.index()) { + case 0: { + T0 const& v = std::get<0>(value); + WriteT0(stream, v); + break; + } + case 1: { + T1 const& v = std::get<1>(value); + WriteT1(stream, v); + break; + } + case 2: { + T2 const& v = std::get<2>(value); + WriteT2(stream, v); + break; + } + case 3: { + T3 const& v = std::get<3>(value); + WriteT3(stream, v); + break; + } + default: throw std::runtime_error("Invalid union index."); + } +} + +template ReadT0, typename T1, yardl::binary::Reader ReadT1, typename T2, yardl::binary::Reader ReadT2, typename T3, yardl::binary::Reader ReadT3> +void ReadUnion(yardl::binary::CodedInputStream& stream, std::variant& value) { + size_t index; + yardl::binary::ReadInteger(stream, index); + switch (index) { + case 0: { + T0 v; + ReadT0(stream, v); + value = std::move(v); + break; + } + case 1: { + T1 v; + ReadT1(stream, v); + value = std::move(v); + break; + } + case 2: { + T2 v; + ReadT2(stream, v); + value = std::move(v); + break; + } + case 3: { + T3 v; + ReadT3(stream, v); + value = std::move(v); + break; + } + default: throw std::runtime_error("Invalid union index."); + } +} +} // namespace + +namespace petsird::binary { +namespace { +[[maybe_unused]] void WriteCoincidenceEvent(yardl::binary::CodedOutputStream& stream, petsird::CoincidenceEvent const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteArray(stream, value.detector_ids); + yardl::binary::WriteInteger(stream, value.tof_idx); + yardl::binary::WriteArray(stream, value.energy_indices); +} + +[[maybe_unused]] void ReadCoincidenceEvent(yardl::binary::CodedInputStream& stream, petsird::CoincidenceEvent& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadArray(stream, value.detector_ids); + yardl::binary::ReadInteger(stream, value.tof_idx); + yardl::binary::ReadArray(stream, value.energy_indices); +} + +template WriteShape> +[[maybe_unused]] void WriteSolidVolume(yardl::binary::CodedOutputStream& stream, petsird::SolidVolume const& value) { + if constexpr (yardl::binary::IsTriviallySerializable>::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + WriteShape(stream, value.shape); + yardl::binary::WriteInteger(stream, value.material_id); +} + +template ReadShape> +[[maybe_unused]] void ReadSolidVolume(yardl::binary::CodedInputStream& stream, petsird::SolidVolume& value) { + if constexpr (yardl::binary::IsTriviallySerializable>::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + ReadShape(stream, value.shape); + yardl::binary::ReadInteger(stream, value.material_id); +} + +[[maybe_unused]] void WriteCoordinate(yardl::binary::CodedOutputStream& stream, petsird::Coordinate const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteFixedNDArray(stream, value.c); +} + +[[maybe_unused]] void ReadCoordinate(yardl::binary::CodedInputStream& stream, petsird::Coordinate& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadFixedNDArray(stream, value.c); +} + +[[maybe_unused]] void WriteBoxShape(yardl::binary::CodedOutputStream& stream, petsird::BoxShape const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteArray(stream, value.corners); +} + +[[maybe_unused]] void ReadBoxShape(yardl::binary::CodedInputStream& stream, petsird::BoxShape& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadArray(stream, value.corners); +} + +[[maybe_unused]] void WriteBoxSolidVolume(yardl::binary::CodedOutputStream& stream, petsird::BoxSolidVolume const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteSolidVolume(stream, value); +} + +[[maybe_unused]] void ReadBoxSolidVolume(yardl::binary::CodedInputStream& stream, petsird::BoxSolidVolume& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadSolidVolume(stream, value); +} + +[[maybe_unused]] void WriteAnnulusShape(yardl::binary::CodedOutputStream& stream, petsird::AnnulusShape const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteFloatingPoint(stream, value.inner_radius); + yardl::binary::WriteFloatingPoint(stream, value.outer_radius); + yardl::binary::WriteFloatingPoint(stream, value.thickness); + yardl::binary::WriteArray(stream, value.angular_range); +} + +[[maybe_unused]] void ReadAnnulusShape(yardl::binary::CodedInputStream& stream, petsird::AnnulusShape& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadFloatingPoint(stream, value.inner_radius); + yardl::binary::ReadFloatingPoint(stream, value.outer_radius); + yardl::binary::ReadFloatingPoint(stream, value.thickness); + yardl::binary::ReadArray(stream, value.angular_range); +} + +[[maybe_unused]] void WriteGeometricShape(yardl::binary::CodedOutputStream& stream, petsird::GeometricShape const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + WriteUnion(stream, value); +} + +[[maybe_unused]] void ReadGeometricShape(yardl::binary::CodedInputStream& stream, petsird::GeometricShape& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + ReadUnion(stream, value); +} + +[[maybe_unused]] void WriteGenericSolidVolume(yardl::binary::CodedOutputStream& stream, petsird::GenericSolidVolume const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteSolidVolume(stream, value); +} + +[[maybe_unused]] void ReadGenericSolidVolume(yardl::binary::CodedInputStream& stream, petsird::GenericSolidVolume& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadSolidVolume(stream, value); +} + +[[maybe_unused]] void WriteRigidTransformation(yardl::binary::CodedOutputStream& stream, petsird::RigidTransformation const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteFixedNDArray(stream, value.matrix); +} + +[[maybe_unused]] void ReadRigidTransformation(yardl::binary::CodedInputStream& stream, petsird::RigidTransformation& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadFixedNDArray(stream, value.matrix); +} + +template WriteT> +[[maybe_unused]] void WriteReplicatedObject(yardl::binary::CodedOutputStream& stream, petsird::ReplicatedObject const& value) { + if constexpr (yardl::binary::IsTriviallySerializable>::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + WriteT(stream, value.object); + yardl::binary::WriteVector(stream, value.transforms); + yardl::binary::WriteVector(stream, value.ids); +} + +template ReadT> +[[maybe_unused]] void ReadReplicatedObject(yardl::binary::CodedInputStream& stream, petsird::ReplicatedObject& value) { + if constexpr (yardl::binary::IsTriviallySerializable>::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + ReadT(stream, value.object); + yardl::binary::ReadVector(stream, value.transforms); + yardl::binary::ReadVector(stream, value.ids); +} + +[[maybe_unused]] void WriteReplicatedBoxSolidVolume(yardl::binary::CodedOutputStream& stream, petsird::ReplicatedBoxSolidVolume const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteReplicatedObject(stream, value); +} + +[[maybe_unused]] void ReadReplicatedBoxSolidVolume(yardl::binary::CodedInputStream& stream, petsird::ReplicatedBoxSolidVolume& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadReplicatedObject(stream, value); +} + +[[maybe_unused]] void WriteReplicatedGenericSolidVolume(yardl::binary::CodedOutputStream& stream, petsird::ReplicatedGenericSolidVolume const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteReplicatedObject(stream, value); +} + +[[maybe_unused]] void ReadReplicatedGenericSolidVolume(yardl::binary::CodedInputStream& stream, petsird::ReplicatedGenericSolidVolume& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadReplicatedObject(stream, value); +} + +[[maybe_unused]] void WriteDetectorModule(yardl::binary::CodedOutputStream& stream, petsird::DetectorModule const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteVector(stream, value.detecting_elements); + yardl::binary::WriteVector(stream, value.detecting_element_ids); + yardl::binary::WriteVector(stream, value.non_detecting_elements); +} + +[[maybe_unused]] void ReadDetectorModule(yardl::binary::CodedInputStream& stream, petsird::DetectorModule& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadVector(stream, value.detecting_elements); + yardl::binary::ReadVector(stream, value.detecting_element_ids); + yardl::binary::ReadVector(stream, value.non_detecting_elements); +} + +[[maybe_unused]] void WriteReplicatedDetectorModule(yardl::binary::CodedOutputStream& stream, petsird::ReplicatedDetectorModule const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteReplicatedObject(stream, value); +} + +[[maybe_unused]] void ReadReplicatedDetectorModule(yardl::binary::CodedInputStream& stream, petsird::ReplicatedDetectorModule& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadReplicatedObject(stream, value); +} + +[[maybe_unused]] void WriteScannerGeometry(yardl::binary::CodedOutputStream& stream, petsird::ScannerGeometry const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteVector(stream, value.replicated_modules); + yardl::binary::WriteVector(stream, value.ids); + yardl::binary::WriteOptional, yardl::binary::WriteVector>(stream, value.non_detecting_volumes); +} + +[[maybe_unused]] void ReadScannerGeometry(yardl::binary::CodedInputStream& stream, petsird::ScannerGeometry& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadVector(stream, value.replicated_modules); + yardl::binary::ReadVector(stream, value.ids); + yardl::binary::ReadOptional, yardl::binary::ReadVector>(stream, value.non_detecting_volumes); +} + +[[maybe_unused]] void WriteSubject(yardl::binary::CodedOutputStream& stream, petsird::Subject const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteOptional(stream, value.name); + yardl::binary::WriteString(stream, value.id); +} + +[[maybe_unused]] void ReadSubject(yardl::binary::CodedInputStream& stream, petsird::Subject& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadOptional(stream, value.name); + yardl::binary::ReadString(stream, value.id); +} + +[[maybe_unused]] void WriteInstitution(yardl::binary::CodedOutputStream& stream, petsird::Institution const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteString(stream, value.name); + yardl::binary::WriteString(stream, value.address); +} + +[[maybe_unused]] void ReadInstitution(yardl::binary::CodedInputStream& stream, petsird::Institution& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadString(stream, value.name); + yardl::binary::ReadString(stream, value.address); +} + +[[maybe_unused]] void WriteExamInformation(yardl::binary::CodedOutputStream& stream, petsird::ExamInformation const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteSubject(stream, value.subject); + petsird::binary::WriteInstitution(stream, value.institution); + yardl::binary::WriteOptional(stream, value.protocol); + yardl::binary::WriteOptional(stream, value.start_of_acquisition); +} + +[[maybe_unused]] void ReadExamInformation(yardl::binary::CodedInputStream& stream, petsird::ExamInformation& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadSubject(stream, value.subject); + petsird::binary::ReadInstitution(stream, value.institution); + yardl::binary::ReadOptional(stream, value.protocol); + yardl::binary::ReadOptional(stream, value.start_of_acquisition); +} + +[[maybe_unused]] void WriteDirection(yardl::binary::CodedOutputStream& stream, petsird::Direction const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteFixedNDArray(stream, value.c); +} + +[[maybe_unused]] void ReadDirection(yardl::binary::CodedInputStream& stream, petsird::Direction& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadFixedNDArray(stream, value.c); +} + +[[maybe_unused]] void WriteDirectionMatrix(yardl::binary::CodedOutputStream& stream, petsird::DirectionMatrix const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteFixedNDArray(stream, value.matrix); +} + +[[maybe_unused]] void ReadDirectionMatrix(yardl::binary::CodedInputStream& stream, petsird::DirectionMatrix& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadFixedNDArray(stream, value.matrix); +} + +[[maybe_unused]] void WriteAtom(yardl::binary::CodedOutputStream& stream, petsird::Atom const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.mass_number); + yardl::binary::WriteInteger(stream, value.atomic_number); +} + +[[maybe_unused]] void ReadAtom(yardl::binary::CodedInputStream& stream, petsird::Atom& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.mass_number); + yardl::binary::ReadInteger(stream, value.atomic_number); +} + +[[maybe_unused]] void WriteBulkMaterial(yardl::binary::CodedOutputStream& stream, petsird::BulkMaterial const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.id); + yardl::binary::WriteString(stream, value.name); + yardl::binary::WriteFloatingPoint(stream, value.density); + yardl::binary::WriteVector(stream, value.atoms); + yardl::binary::WriteVector(stream, value.mass_fractions); +} + +[[maybe_unused]] void ReadBulkMaterial(yardl::binary::CodedInputStream& stream, petsird::BulkMaterial& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.id); + yardl::binary::ReadString(stream, value.name); + yardl::binary::ReadFloatingPoint(stream, value.density); + yardl::binary::ReadVector(stream, value.atoms); + yardl::binary::ReadVector(stream, value.mass_fractions); +} + +[[maybe_unused]] void WriteScannerInformation(yardl::binary::CodedOutputStream& stream, petsird::ScannerInformation const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteString(stream, value.model_name); + petsird::binary::WriteScannerGeometry(stream, value.scanner_geometry); + yardl::binary::WriteVector(stream, value.bulk_materials); + yardl::binary::WriteOptional(stream, value.gantry_alignment); + yardl::binary::WriteNDArray(stream, value.tof_bin_edges); + yardl::binary::WriteFloatingPoint(stream, value.tof_resolution); + yardl::binary::WriteNDArray(stream, value.energy_bin_edges); + yardl::binary::WriteFloatingPoint(stream, value.energy_resolution_at_511); + yardl::binary::WriteInteger(stream, value.event_time_block_duration); + yardl::binary::WriteEnum(stream, value.coincidence_policy); +} + +[[maybe_unused]] void ReadScannerInformation(yardl::binary::CodedInputStream& stream, petsird::ScannerInformation& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadString(stream, value.model_name); + petsird::binary::ReadScannerGeometry(stream, value.scanner_geometry); + yardl::binary::ReadVector(stream, value.bulk_materials); + yardl::binary::ReadOptional(stream, value.gantry_alignment); + yardl::binary::ReadNDArray(stream, value.tof_bin_edges); + yardl::binary::ReadFloatingPoint(stream, value.tof_resolution); + yardl::binary::ReadNDArray(stream, value.energy_bin_edges); + yardl::binary::ReadFloatingPoint(stream, value.energy_resolution_at_511); + yardl::binary::ReadInteger(stream, value.event_time_block_duration); + yardl::binary::ReadEnum(stream, value.coincidence_policy); +} + +[[maybe_unused]] void WriteHeader(yardl::binary::CodedOutputStream& stream, petsird::Header const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + petsird::binary::WriteScannerInformation(stream, value.scanner); + yardl::binary::WriteOptional(stream, value.exam); +} + +[[maybe_unused]] void ReadHeader(yardl::binary::CodedInputStream& stream, petsird::Header& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + petsird::binary::ReadScannerInformation(stream, value.scanner); + yardl::binary::ReadOptional(stream, value.exam); +} + +[[maybe_unused]] void WriteTripleEvent(yardl::binary::CodedOutputStream& stream, petsird::TripleEvent const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteArray(stream, value.detector_ids); + yardl::binary::WriteArray(stream, value.tof_indices); + yardl::binary::WriteArray(stream, value.energy_indices); +} + +[[maybe_unused]] void ReadTripleEvent(yardl::binary::CodedInputStream& stream, petsird::TripleEvent& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadArray(stream, value.detector_ids); + yardl::binary::ReadArray(stream, value.tof_indices); + yardl::binary::ReadArray(stream, value.energy_indices); +} + +[[maybe_unused]] void WriteEventTimeBlock(yardl::binary::CodedOutputStream& stream, petsird::EventTimeBlock const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.start); + yardl::binary::WriteVector(stream, value.prompt_events); + yardl::binary::WriteOptional, yardl::binary::WriteVector>(stream, value.delayed_events); + yardl::binary::WriteOptional, yardl::binary::WriteVector>(stream, value.triple_events); +} + +[[maybe_unused]] void ReadEventTimeBlock(yardl::binary::CodedInputStream& stream, petsird::EventTimeBlock& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.start); + yardl::binary::ReadVector(stream, value.prompt_events); + yardl::binary::ReadOptional, yardl::binary::ReadVector>(stream, value.delayed_events); + yardl::binary::ReadOptional, yardl::binary::ReadVector>(stream, value.triple_events); +} + +[[maybe_unused]] void WriteExternalSignalTimeBlock(yardl::binary::CodedOutputStream& stream, petsird::ExternalSignalTimeBlock const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.start); + yardl::binary::WriteInteger(stream, value.signal_id); + yardl::binary::WriteVector(stream, value.signal_values); +} + +[[maybe_unused]] void ReadExternalSignalTimeBlock(yardl::binary::CodedInputStream& stream, petsird::ExternalSignalTimeBlock& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.start); + yardl::binary::ReadInteger(stream, value.signal_id); + yardl::binary::ReadVector(stream, value.signal_values); +} + +[[maybe_unused]] void WriteBedMovementTimeBlock(yardl::binary::CodedOutputStream& stream, petsird::BedMovementTimeBlock const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.start); + petsird::binary::WriteRigidTransformation(stream, value.transform); +} + +[[maybe_unused]] void ReadBedMovementTimeBlock(yardl::binary::CodedInputStream& stream, petsird::BedMovementTimeBlock& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.start); + petsird::binary::ReadRigidTransformation(stream, value.transform); +} + +[[maybe_unused]] void WriteGantryMovementTimeBlock(yardl::binary::CodedOutputStream& stream, petsird::GantryMovementTimeBlock const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.start); + petsird::binary::WriteRigidTransformation(stream, value.transform); +} + +[[maybe_unused]] void ReadGantryMovementTimeBlock(yardl::binary::CodedInputStream& stream, petsird::GantryMovementTimeBlock& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.start); + petsird::binary::ReadRigidTransformation(stream, value.transform); +} + +[[maybe_unused]] void WriteTimeBlock(yardl::binary::CodedOutputStream& stream, petsird::TimeBlock const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + WriteUnion(stream, value); +} + +[[maybe_unused]] void ReadTimeBlock(yardl::binary::CodedInputStream& stream, petsird::TimeBlock& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + ReadUnion(stream, value); +} + +[[maybe_unused]] void WriteExternalSignalType(yardl::binary::CodedOutputStream& stream, petsird::ExternalSignalType const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteEnum(stream, value.type); + yardl::binary::WriteString(stream, value.description); + yardl::binary::WriteInteger(stream, value.id); +} + +[[maybe_unused]] void ReadExternalSignalType(yardl::binary::CodedInputStream& stream, petsird::ExternalSignalType& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadEnum(stream, value.type); + yardl::binary::ReadString(stream, value.description); + yardl::binary::ReadInteger(stream, value.id); +} + +[[maybe_unused]] void WriteTimeInterval(yardl::binary::CodedOutputStream& stream, petsird::TimeInterval const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteInteger(stream, value.start); + yardl::binary::WriteInteger(stream, value.stop); +} + +[[maybe_unused]] void ReadTimeInterval(yardl::binary::CodedInputStream& stream, petsird::TimeInterval& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadInteger(stream, value.start); + yardl::binary::ReadInteger(stream, value.stop); +} + +[[maybe_unused]] void WriteTimeFrameInformation(yardl::binary::CodedOutputStream& stream, petsird::TimeFrameInformation const& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::WriteTriviallySerializable(stream, value); + return; + } + + yardl::binary::WriteVector(stream, value.time_frames); +} + +[[maybe_unused]] void ReadTimeFrameInformation(yardl::binary::CodedInputStream& stream, petsird::TimeFrameInformation& value) { + if constexpr (yardl::binary::IsTriviallySerializable::value) { + yardl::binary::ReadTriviallySerializable(stream, value); + return; + } + + yardl::binary::ReadVector(stream, value.time_frames); +} + +} // namespace + +void PETSIRDWriter::WriteHeaderImpl(petsird::Header const& value) { + petsird::binary::WriteHeader(stream_, value); +} + +void PETSIRDWriter::WriteTimeBlocksImpl(petsird::TimeBlock const& value) { + yardl::binary::WriteBlock(stream_, value); +} + +void PETSIRDWriter::WriteTimeBlocksImpl(std::vector const& values) { + if (!values.empty()) { + yardl::binary::WriteVector(stream_, values); + } +} + +void PETSIRDWriter::EndTimeBlocksImpl() { + yardl::binary::WriteInteger(stream_, 0U); +} + +void PETSIRDWriter::Flush() { + stream_.Flush(); +} + +void PETSIRDWriter::CloseImpl() { + stream_.Flush(); +} + +void PETSIRDReader::ReadHeaderImpl(petsird::Header& value) { + petsird::binary::ReadHeader(stream_, value); +} + +bool PETSIRDReader::ReadTimeBlocksImpl(petsird::TimeBlock& value) { + bool read_block_successful = false; + read_block_successful = yardl::binary::ReadBlock(stream_, current_block_remaining_, value); + return read_block_successful; +} + +bool PETSIRDReader::ReadTimeBlocksImpl(std::vector& values) { + yardl::binary::ReadBlocksIntoVector(stream_, current_block_remaining_, values); + return current_block_remaining_ != 0; +} + +void PETSIRDReader::CloseImpl() { + stream_.VerifyFinished(); +} + +} // namespace petsird::binary + diff --git a/cpp/generated/binary/protocols.h b/cpp/generated/binary/protocols.h new file mode 100644 index 0000000..1c4df4c --- /dev/null +++ b/cpp/generated/binary/protocols.h @@ -0,0 +1,61 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#pragma once +#include +#include +#include +#include +#include +#include + +#include "../protocols.h" +#include "../yardl/detail/binary/reader_writer.h" + +namespace petsird::binary { +// Binary writer for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDWriter : public petsird::PETSIRDWriterBase, yardl::binary::BinaryWriter { + public: + PETSIRDWriter(std::ostream& stream, Version version = Version::Current) + : yardl::binary::BinaryWriter(stream, petsird::PETSIRDWriterBase::SchemaFromVersion(version)), version_(version) {} + + PETSIRDWriter(std::string file_name, Version version = Version::Current) + : yardl::binary::BinaryWriter(file_name, petsird::PETSIRDWriterBase::SchemaFromVersion(version)), version_(version) {} + + void Flush() override; + + protected: + void WriteHeaderImpl(petsird::Header const& value) override; + void WriteTimeBlocksImpl(petsird::TimeBlock const& value) override; + void WriteTimeBlocksImpl(std::vector const& values) override; + void EndTimeBlocksImpl() override; + void CloseImpl() override; + + Version version_; +}; + +// Binary reader for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDReader : public petsird::PETSIRDReaderBase, yardl::binary::BinaryReader { + public: + PETSIRDReader(std::istream& stream) + : yardl::binary::BinaryReader(stream), version_(petsird::PETSIRDReaderBase::VersionFromSchema(schema_read_)) {} + + PETSIRDReader(std::string file_name) + : yardl::binary::BinaryReader(file_name), version_(petsird::PETSIRDReaderBase::VersionFromSchema(schema_read_)) {} + + Version GetVersion() { return version_; } + + protected: + void ReadHeaderImpl(petsird::Header& value) override; + bool ReadTimeBlocksImpl(petsird::TimeBlock& value) override; + bool ReadTimeBlocksImpl(std::vector& values) override; + void CloseImpl() override; + + Version version_; + + private: + size_t current_block_remaining_ = 0; +}; + +} // namespace petsird::binary diff --git a/cpp/generated/hdf5/protocols.cc b/cpp/generated/hdf5/protocols.cc new file mode 100644 index 0000000..14aa125 --- /dev/null +++ b/cpp/generated/hdf5/protocols.cc @@ -0,0 +1,879 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#include "protocols.h" + +#include "../yardl/detail/hdf5/io.h" +#include "../yardl/detail/hdf5/ddl.h" +#include "../yardl/detail/hdf5/inner_types.h" + +namespace { +template +class InnerUnion2 { + public: + InnerUnion2() : type_index_(-1) {} + InnerUnion2(std::variant const& v) : type_index_(static_cast(v.index())) { + Init(v); + } + + InnerUnion2(std::variant const& v) : type_index_(static_cast(v.index()) - 1) { + Init(v); + } + + InnerUnion2(InnerUnion2 const& v) = delete; + + InnerUnion2 operator=(InnerUnion2 const&) = delete; + + ~InnerUnion2() { + switch (type_index_) { + case 0: + value0_.~TInner0(); + break; + case 1: + value1_.~TInner1(); + break; + } + } + + void ToOuter(std::variant& o) const { + ToOuterImpl(o); + } + + void ToOuter(std::variant& o) const { + ToOuterImpl(o); + } + + int8_t type_index_; + union { + char empty0_[sizeof(TInner0)]{}; + TInner0 value0_; + }; + union { + char empty1_[sizeof(TInner1)]{}; + TInner1 value1_; + }; + + private: + template + void Init(T const& v) { + constexpr size_t offset = GetOuterVariantOffset>>(); + switch (type_index_) { + case 0: + new (&value0_) TInner0(std::get<0 + offset>(v)); + return; + case 1: + new (&value1_) TInner1(std::get<1 + offset>(v)); + return; + } + } + + template + void ToOuterImpl(TVariant& o) const { + constexpr size_t offset = GetOuterVariantOffset(); + switch (type_index_) { + case -1: + if constexpr (offset == 1) { + o.template emplace<0>(std::monostate{}); + return; + } + case 0: + o.template emplace<0 + offset>(); + yardl::hdf5::ToOuter(value0_, std::get<0 + offset>(o)); + return; + case 1: + o.template emplace<1 + offset>(); + yardl::hdf5::ToOuter(value1_, std::get<1 + offset>(o)); + return; + } + throw std::runtime_error("unrecognized type variant type index " + std::to_string(type_index_)); + } + + template + static constexpr size_t GetOuterVariantOffset() { + constexpr bool has_monostate = std::is_same_v>; + if constexpr (has_monostate) { + return 1; + } + return 0; + } +}; + +template +H5::CompType InnerUnion2Ddl(bool nullable, H5::DataType const& t0, std::string const& tag0, H5::DataType const& t1, std::string const& tag1) { + using UnionType = ::InnerUnion2; + H5::CompType rtn(sizeof(UnionType)); + rtn.insertMember("$type", HOFFSET(UnionType, type_index_), yardl::hdf5::UnionTypeEnumDdl(nullable, tag0, tag1)); + rtn.insertMember(tag0, HOFFSET(UnionType, value0_), t0); + rtn.insertMember(tag1, HOFFSET(UnionType, value1_), t1); + return rtn; +} + +template +class InnerUnion4 { + public: + InnerUnion4() : type_index_(-1) {} + InnerUnion4(std::variant const& v) : type_index_(static_cast(v.index())) { + Init(v); + } + + InnerUnion4(std::variant const& v) : type_index_(static_cast(v.index()) - 1) { + Init(v); + } + + InnerUnion4(InnerUnion4 const& v) = delete; + + InnerUnion4 operator=(InnerUnion4 const&) = delete; + + ~InnerUnion4() { + switch (type_index_) { + case 0: + value0_.~TInner0(); + break; + case 1: + value1_.~TInner1(); + break; + case 2: + value2_.~TInner2(); + break; + case 3: + value3_.~TInner3(); + break; + } + } + + void ToOuter(std::variant& o) const { + ToOuterImpl(o); + } + + void ToOuter(std::variant& o) const { + ToOuterImpl(o); + } + + int8_t type_index_; + union { + char empty0_[sizeof(TInner0)]{}; + TInner0 value0_; + }; + union { + char empty1_[sizeof(TInner1)]{}; + TInner1 value1_; + }; + union { + char empty2_[sizeof(TInner2)]{}; + TInner2 value2_; + }; + union { + char empty3_[sizeof(TInner3)]{}; + TInner3 value3_; + }; + + private: + template + void Init(T const& v) { + constexpr size_t offset = GetOuterVariantOffset>>(); + switch (type_index_) { + case 0: + new (&value0_) TInner0(std::get<0 + offset>(v)); + return; + case 1: + new (&value1_) TInner1(std::get<1 + offset>(v)); + return; + case 2: + new (&value2_) TInner2(std::get<2 + offset>(v)); + return; + case 3: + new (&value3_) TInner3(std::get<3 + offset>(v)); + return; + } + } + + template + void ToOuterImpl(TVariant& o) const { + constexpr size_t offset = GetOuterVariantOffset(); + switch (type_index_) { + case -1: + if constexpr (offset == 1) { + o.template emplace<0>(std::monostate{}); + return; + } + case 0: + o.template emplace<0 + offset>(); + yardl::hdf5::ToOuter(value0_, std::get<0 + offset>(o)); + return; + case 1: + o.template emplace<1 + offset>(); + yardl::hdf5::ToOuter(value1_, std::get<1 + offset>(o)); + return; + case 2: + o.template emplace<2 + offset>(); + yardl::hdf5::ToOuter(value2_, std::get<2 + offset>(o)); + return; + case 3: + o.template emplace<3 + offset>(); + yardl::hdf5::ToOuter(value3_, std::get<3 + offset>(o)); + return; + } + throw std::runtime_error("unrecognized type variant type index " + std::to_string(type_index_)); + } + + template + static constexpr size_t GetOuterVariantOffset() { + constexpr bool has_monostate = std::is_same_v>; + if constexpr (has_monostate) { + return 1; + } + return 0; + } +}; + +template +H5::CompType InnerUnion4Ddl(bool nullable, H5::DataType const& t0, std::string const& tag0, H5::DataType const& t1, std::string const& tag1, H5::DataType const& t2, std::string const& tag2, H5::DataType const& t3, std::string const& tag3) { + using UnionType = ::InnerUnion4; + H5::CompType rtn(sizeof(UnionType)); + rtn.insertMember("$type", HOFFSET(UnionType, type_index_), yardl::hdf5::UnionTypeEnumDdl(nullable, tag0, tag1, tag2, tag3)); + rtn.insertMember(tag0, HOFFSET(UnionType, value0_), t0); + rtn.insertMember(tag1, HOFFSET(UnionType, value1_), t1); + rtn.insertMember(tag2, HOFFSET(UnionType, value2_), t2); + rtn.insertMember(tag3, HOFFSET(UnionType, value3_), t3); + return rtn; +} +} + +namespace petsird::hdf5 { +namespace { +[[maybe_unused]] H5::EnumType GetCoincidencePolicyHdf5Ddl() { + H5::EnumType t(H5::PredType::NATIVE_INT32); + int32_t i = 0; + t.insert("rejectMultiples", &i); + i = 1; + t.insert("storeMultiplesAsPairs", &i); + i = 2; + t.insert("other", &i); + return t; +} + +[[maybe_unused]] H5::EnumType GetExternalSignalTypeEnumHdf5Ddl() { + H5::EnumType t(H5::PredType::NATIVE_INT32); + int32_t i = 0; + t.insert("ecgTrace", &i); + i = 1; + t.insert("ecgTrigger", &i); + i = 2; + t.insert("respTrace", &i); + i = 3; + t.insert("respTrigger", &i); + i = 4; + t.insert("otherMotionSignal", &i); + i = 5; + t.insert("otherMotionTrigger", &i); + i = 6; + t.insert("externalSync", &i); + i = 7; + t.insert("other", &i); + return t; +} + +template +struct _Inner_SolidVolume { + _Inner_SolidVolume() {} + _Inner_SolidVolume(petsird::SolidVolume const& o) + : shape(o.shape), + material_id(o.material_id) { + } + + void ToOuter (petsird::SolidVolume& o) const { + yardl::hdf5::ToOuter(shape, o.shape); + yardl::hdf5::ToOuter(material_id, o.material_id); + } + + _Shape_Inner shape; + uint32_t material_id; +}; + +template +struct _Inner_ReplicatedObject { + _Inner_ReplicatedObject() {} + _Inner_ReplicatedObject(petsird::ReplicatedObject const& o) + : object(o.object), + transforms(o.transforms), + ids(o.ids) { + } + + void ToOuter (petsird::ReplicatedObject& o) const { + yardl::hdf5::ToOuter(object, o.object); + yardl::hdf5::ToOuter(transforms, o.transforms); + yardl::hdf5::ToOuter(ids, o.ids); + } + + _T_Inner object; + yardl::hdf5::InnerVlen transforms; + yardl::hdf5::InnerVlen ids; +}; + +struct _Inner_DetectorModule { + _Inner_DetectorModule() {} + _Inner_DetectorModule(petsird::DetectorModule const& o) + : detecting_elements(o.detecting_elements), + detecting_element_ids(o.detecting_element_ids), + non_detecting_elements(o.non_detecting_elements) { + } + + void ToOuter (petsird::DetectorModule& o) const { + yardl::hdf5::ToOuter(detecting_elements, o.detecting_elements); + yardl::hdf5::ToOuter(detecting_element_ids, o.detecting_element_ids); + yardl::hdf5::ToOuter(non_detecting_elements, o.non_detecting_elements); + } + + yardl::hdf5::InnerVlen, petsird::BoxSolidVolume>, petsird::ReplicatedBoxSolidVolume> detecting_elements; + yardl::hdf5::InnerVlen detecting_element_ids; + yardl::hdf5::InnerVlen, petsird::GeometricShape>, petsird::GenericSolidVolume>, petsird::ReplicatedGenericSolidVolume> non_detecting_elements; +}; + +struct _Inner_ScannerGeometry { + _Inner_ScannerGeometry() {} + _Inner_ScannerGeometry(petsird::ScannerGeometry const& o) + : replicated_modules(o.replicated_modules), + ids(o.ids), + non_detecting_volumes(o.non_detecting_volumes) { + } + + void ToOuter (petsird::ScannerGeometry& o) const { + yardl::hdf5::ToOuter(replicated_modules, o.replicated_modules); + yardl::hdf5::ToOuter(ids, o.ids); + yardl::hdf5::ToOuter(non_detecting_volumes, o.non_detecting_volumes); + } + + yardl::hdf5::InnerVlen, petsird::ReplicatedDetectorModule> replicated_modules; + yardl::hdf5::InnerVlen ids; + yardl::hdf5::InnerOptional, petsird::GeometricShape>, petsird::GenericSolidVolume>, std::vector> non_detecting_volumes; +}; + +struct _Inner_Subject { + _Inner_Subject() {} + _Inner_Subject(petsird::Subject const& o) + : name(o.name), + id(o.id) { + } + + void ToOuter (petsird::Subject& o) const { + yardl::hdf5::ToOuter(name, o.name); + yardl::hdf5::ToOuter(id, o.id); + } + + yardl::hdf5::InnerOptional name; + yardl::hdf5::InnerVlenString id; +}; + +struct _Inner_Institution { + _Inner_Institution() {} + _Inner_Institution(petsird::Institution const& o) + : name(o.name), + address(o.address) { + } + + void ToOuter (petsird::Institution& o) const { + yardl::hdf5::ToOuter(name, o.name); + yardl::hdf5::ToOuter(address, o.address); + } + + yardl::hdf5::InnerVlenString name; + yardl::hdf5::InnerVlenString address; +}; + +struct _Inner_ExamInformation { + _Inner_ExamInformation() {} + _Inner_ExamInformation(petsird::ExamInformation const& o) + : subject(o.subject), + institution(o.institution), + protocol(o.protocol), + start_of_acquisition(o.start_of_acquisition) { + } + + void ToOuter (petsird::ExamInformation& o) const { + yardl::hdf5::ToOuter(subject, o.subject); + yardl::hdf5::ToOuter(institution, o.institution); + yardl::hdf5::ToOuter(protocol, o.protocol); + yardl::hdf5::ToOuter(start_of_acquisition, o.start_of_acquisition); + } + + petsird::hdf5::_Inner_Subject subject; + petsird::hdf5::_Inner_Institution institution; + yardl::hdf5::InnerOptional protocol; + yardl::hdf5::InnerOptional start_of_acquisition; +}; + +struct _Inner_BulkMaterial { + _Inner_BulkMaterial() {} + _Inner_BulkMaterial(petsird::BulkMaterial const& o) + : id(o.id), + name(o.name), + density(o.density), + atoms(o.atoms), + mass_fractions(o.mass_fractions) { + } + + void ToOuter (petsird::BulkMaterial& o) const { + yardl::hdf5::ToOuter(id, o.id); + yardl::hdf5::ToOuter(name, o.name); + yardl::hdf5::ToOuter(density, o.density); + yardl::hdf5::ToOuter(atoms, o.atoms); + yardl::hdf5::ToOuter(mass_fractions, o.mass_fractions); + } + + uint32_t id; + yardl::hdf5::InnerVlenString name; + float density; + yardl::hdf5::InnerVlen atoms; + yardl::hdf5::InnerVlen mass_fractions; +}; + +struct _Inner_ScannerInformation { + _Inner_ScannerInformation() {} + _Inner_ScannerInformation(petsird::ScannerInformation const& o) + : model_name(o.model_name), + scanner_geometry(o.scanner_geometry), + bulk_materials(o.bulk_materials), + gantry_alignment(o.gantry_alignment), + tof_bin_edges(o.tof_bin_edges), + tof_resolution(o.tof_resolution), + energy_bin_edges(o.energy_bin_edges), + energy_resolution_at_511(o.energy_resolution_at_511), + event_time_block_duration(o.event_time_block_duration), + coincidence_policy(o.coincidence_policy) { + } + + void ToOuter (petsird::ScannerInformation& o) const { + yardl::hdf5::ToOuter(model_name, o.model_name); + yardl::hdf5::ToOuter(scanner_geometry, o.scanner_geometry); + yardl::hdf5::ToOuter(bulk_materials, o.bulk_materials); + yardl::hdf5::ToOuter(gantry_alignment, o.gantry_alignment); + yardl::hdf5::ToOuter(tof_bin_edges, o.tof_bin_edges); + yardl::hdf5::ToOuter(tof_resolution, o.tof_resolution); + yardl::hdf5::ToOuter(energy_bin_edges, o.energy_bin_edges); + yardl::hdf5::ToOuter(energy_resolution_at_511, o.energy_resolution_at_511); + yardl::hdf5::ToOuter(event_time_block_duration, o.event_time_block_duration); + yardl::hdf5::ToOuter(coincidence_policy, o.coincidence_policy); + } + + yardl::hdf5::InnerVlenString model_name; + petsird::hdf5::_Inner_ScannerGeometry scanner_geometry; + yardl::hdf5::InnerVlen bulk_materials; + yardl::hdf5::InnerOptional gantry_alignment; + yardl::hdf5::InnerVlen tof_bin_edges; + float tof_resolution; + yardl::hdf5::InnerVlen energy_bin_edges; + float energy_resolution_at_511; + uint32_t event_time_block_duration; + petsird::CoincidencePolicy coincidence_policy; +}; + +struct _Inner_Header { + _Inner_Header() {} + _Inner_Header(petsird::Header const& o) + : scanner(o.scanner), + exam(o.exam) { + } + + void ToOuter (petsird::Header& o) const { + yardl::hdf5::ToOuter(scanner, o.scanner); + yardl::hdf5::ToOuter(exam, o.exam); + } + + petsird::hdf5::_Inner_ScannerInformation scanner; + yardl::hdf5::InnerOptional exam; +}; + +struct _Inner_EventTimeBlock { + _Inner_EventTimeBlock() {} + _Inner_EventTimeBlock(petsird::EventTimeBlock const& o) + : start(o.start), + prompt_events(o.prompt_events), + delayed_events(o.delayed_events), + triple_events(o.triple_events) { + } + + void ToOuter (petsird::EventTimeBlock& o) const { + yardl::hdf5::ToOuter(start, o.start); + yardl::hdf5::ToOuter(prompt_events, o.prompt_events); + yardl::hdf5::ToOuter(delayed_events, o.delayed_events); + yardl::hdf5::ToOuter(triple_events, o.triple_events); + } + + uint32_t start; + yardl::hdf5::InnerVlen prompt_events; + yardl::hdf5::InnerOptional, std::vector> delayed_events; + yardl::hdf5::InnerOptional, std::vector> triple_events; +}; + +struct _Inner_ExternalSignalTimeBlock { + _Inner_ExternalSignalTimeBlock() {} + _Inner_ExternalSignalTimeBlock(petsird::ExternalSignalTimeBlock const& o) + : start(o.start), + signal_id(o.signal_id), + signal_values(o.signal_values) { + } + + void ToOuter (petsird::ExternalSignalTimeBlock& o) const { + yardl::hdf5::ToOuter(start, o.start); + yardl::hdf5::ToOuter(signal_id, o.signal_id); + yardl::hdf5::ToOuter(signal_values, o.signal_values); + } + + uint32_t start; + uint32_t signal_id; + yardl::hdf5::InnerVlen signal_values; +}; + +struct _Inner_ExternalSignalType { + _Inner_ExternalSignalType() {} + _Inner_ExternalSignalType(petsird::ExternalSignalType const& o) + : type(o.type), + description(o.description), + id(o.id) { + } + + void ToOuter (petsird::ExternalSignalType& o) const { + yardl::hdf5::ToOuter(type, o.type); + yardl::hdf5::ToOuter(description, o.description); + yardl::hdf5::ToOuter(id, o.id); + } + + petsird::ExternalSignalTypeEnum type; + yardl::hdf5::InnerVlenString description; + uint32_t id; +}; + +struct _Inner_TimeFrameInformation { + _Inner_TimeFrameInformation() {} + _Inner_TimeFrameInformation(petsird::TimeFrameInformation const& o) + : time_frames(o.time_frames) { + } + + void ToOuter (petsird::TimeFrameInformation& o) const { + yardl::hdf5::ToOuter(time_frames, o.time_frames); + } + + yardl::hdf5::InnerVlen time_frames; +}; + +[[maybe_unused]] H5::CompType GetCoincidenceEventHdf5Ddl() { + using RecordType = petsird::CoincidenceEvent; + H5::CompType t(sizeof(RecordType)); + t.insertMember("detectorIds", HOFFSET(RecordType, detector_ids), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_UINT32, 2)); + t.insertMember("tofIdx", HOFFSET(RecordType, tof_idx), H5::PredType::NATIVE_UINT32); + t.insertMember("energyIndices", HOFFSET(RecordType, energy_indices), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_UINT32, 2)); + return t; +} + +template +[[maybe_unused]] H5::CompType GetSolidVolumeHdf5Ddl(H5::DataType const& Shape_type) { + using RecordType = petsird::hdf5::_Inner_SolidVolume<_Shape_Inner, Shape>; + H5::CompType t(sizeof(RecordType)); + t.insertMember("shape", HOFFSET(RecordType, shape), Shape_type); + t.insertMember("materialId", HOFFSET(RecordType, material_id), H5::PredType::NATIVE_UINT32); + return t; +} + +[[maybe_unused]] H5::CompType GetCoordinateHdf5Ddl() { + using RecordType = petsird::Coordinate; + H5::CompType t(sizeof(RecordType)); + t.insertMember("c", HOFFSET(RecordType, c), yardl::hdf5::FixedNDArrayDdl(H5::PredType::NATIVE_FLOAT, {3})); + return t; +} + +[[maybe_unused]] H5::CompType GetBoxShapeHdf5Ddl() { + using RecordType = petsird::BoxShape; + H5::CompType t(sizeof(RecordType)); + t.insertMember("corners", HOFFSET(RecordType, corners), yardl::hdf5::FixedVectorDdl(petsird::hdf5::GetCoordinateHdf5Ddl(), 8)); + return t; +} + +[[maybe_unused]] H5::CompType GetAnnulusShapeHdf5Ddl() { + using RecordType = petsird::AnnulusShape; + H5::CompType t(sizeof(RecordType)); + t.insertMember("innerRadius", HOFFSET(RecordType, inner_radius), H5::PredType::NATIVE_FLOAT); + t.insertMember("outerRadius", HOFFSET(RecordType, outer_radius), H5::PredType::NATIVE_FLOAT); + t.insertMember("thickness", HOFFSET(RecordType, thickness), H5::PredType::NATIVE_FLOAT); + t.insertMember("angularRange", HOFFSET(RecordType, angular_range), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_FLOAT, 2)); + return t; +} + +[[maybe_unused]] H5::CompType GetRigidTransformationHdf5Ddl() { + using RecordType = petsird::RigidTransformation; + H5::CompType t(sizeof(RecordType)); + t.insertMember("matrix", HOFFSET(RecordType, matrix), yardl::hdf5::FixedNDArrayDdl(H5::PredType::NATIVE_FLOAT, {3, 4})); + return t; +} + +template +[[maybe_unused]] H5::CompType GetReplicatedObjectHdf5Ddl(H5::DataType const& T_type) { + using RecordType = petsird::hdf5::_Inner_ReplicatedObject<_T_Inner, T>; + H5::CompType t(sizeof(RecordType)); + t.insertMember("object", HOFFSET(RecordType, object), T_type); + t.insertMember("transforms", HOFFSET(RecordType, transforms), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetRigidTransformationHdf5Ddl())); + t.insertMember("ids", HOFFSET(RecordType, ids), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_UINT32)); + return t; +} + +[[maybe_unused]] H5::CompType GetDetectorModuleHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_DetectorModule; + H5::CompType t(sizeof(RecordType)); + t.insertMember("detectingElements", HOFFSET(RecordType, detecting_elements), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetReplicatedObjectHdf5Ddl, petsird::BoxSolidVolume>(petsird::hdf5::GetSolidVolumeHdf5Ddl(petsird::hdf5::GetBoxShapeHdf5Ddl())))); + t.insertMember("detectingElementIds", HOFFSET(RecordType, detecting_element_ids), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_UINT32)); + t.insertMember("nonDetectingElements", HOFFSET(RecordType, non_detecting_elements), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetReplicatedObjectHdf5Ddl, petsird::GeometricShape>, petsird::GenericSolidVolume>(petsird::hdf5::GetSolidVolumeHdf5Ddl<::InnerUnion2, petsird::GeometricShape>(::InnerUnion2Ddl(false, petsird::hdf5::GetBoxShapeHdf5Ddl(), "BoxShape", petsird::hdf5::GetAnnulusShapeHdf5Ddl(), "AnnulusShape"))))); + return t; +} + +[[maybe_unused]] H5::CompType GetScannerGeometryHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_ScannerGeometry; + H5::CompType t(sizeof(RecordType)); + t.insertMember("replicatedModules", HOFFSET(RecordType, replicated_modules), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetReplicatedObjectHdf5Ddl(petsird::hdf5::GetDetectorModuleHdf5Ddl()))); + t.insertMember("ids", HOFFSET(RecordType, ids), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_UINT32)); + t.insertMember("nonDetectingVolumes", HOFFSET(RecordType, non_detecting_volumes), yardl::hdf5::OptionalTypeDdl, petsird::GeometricShape>, petsird::GenericSolidVolume>, std::vector>(yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetSolidVolumeHdf5Ddl<::InnerUnion2, petsird::GeometricShape>(::InnerUnion2Ddl(false, petsird::hdf5::GetBoxShapeHdf5Ddl(), "BoxShape", petsird::hdf5::GetAnnulusShapeHdf5Ddl(), "AnnulusShape"))))); + return t; +} + +[[maybe_unused]] H5::CompType GetSubjectHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_Subject; + H5::CompType t(sizeof(RecordType)); + t.insertMember("name", HOFFSET(RecordType, name), yardl::hdf5::OptionalTypeDdl(yardl::hdf5::InnerVlenStringDdl())); + t.insertMember("id", HOFFSET(RecordType, id), yardl::hdf5::InnerVlenStringDdl()); + return t; +} + +[[maybe_unused]] H5::CompType GetInstitutionHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_Institution; + H5::CompType t(sizeof(RecordType)); + t.insertMember("name", HOFFSET(RecordType, name), yardl::hdf5::InnerVlenStringDdl()); + t.insertMember("address", HOFFSET(RecordType, address), yardl::hdf5::InnerVlenStringDdl()); + return t; +} + +[[maybe_unused]] H5::CompType GetExamInformationHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_ExamInformation; + H5::CompType t(sizeof(RecordType)); + t.insertMember("subject", HOFFSET(RecordType, subject), petsird::hdf5::GetSubjectHdf5Ddl()); + t.insertMember("institution", HOFFSET(RecordType, institution), petsird::hdf5::GetInstitutionHdf5Ddl()); + t.insertMember("protocol", HOFFSET(RecordType, protocol), yardl::hdf5::OptionalTypeDdl(yardl::hdf5::InnerVlenStringDdl())); + t.insertMember("startOfAcquisition", HOFFSET(RecordType, start_of_acquisition), yardl::hdf5::OptionalTypeDdl(yardl::hdf5::DateTimeTypeDdl())); + return t; +} + +[[maybe_unused]] H5::CompType GetDirectionHdf5Ddl() { + using RecordType = petsird::Direction; + H5::CompType t(sizeof(RecordType)); + t.insertMember("c", HOFFSET(RecordType, c), yardl::hdf5::FixedNDArrayDdl(H5::PredType::NATIVE_FLOAT, {3})); + return t; +} + +[[maybe_unused]] H5::CompType GetDirectionMatrixHdf5Ddl() { + using RecordType = petsird::DirectionMatrix; + H5::CompType t(sizeof(RecordType)); + t.insertMember("matrix", HOFFSET(RecordType, matrix), yardl::hdf5::FixedNDArrayDdl(H5::PredType::NATIVE_FLOAT, {3, 3})); + return t; +} + +[[maybe_unused]] H5::CompType GetAtomHdf5Ddl() { + using RecordType = petsird::Atom; + H5::CompType t(sizeof(RecordType)); + t.insertMember("massNumber", HOFFSET(RecordType, mass_number), H5::PredType::NATIVE_UINT32); + t.insertMember("atomicNumber", HOFFSET(RecordType, atomic_number), H5::PredType::NATIVE_UINT32); + return t; +} + +[[maybe_unused]] H5::CompType GetBulkMaterialHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_BulkMaterial; + H5::CompType t(sizeof(RecordType)); + t.insertMember("id", HOFFSET(RecordType, id), H5::PredType::NATIVE_UINT32); + t.insertMember("name", HOFFSET(RecordType, name), yardl::hdf5::InnerVlenStringDdl()); + t.insertMember("density", HOFFSET(RecordType, density), H5::PredType::NATIVE_FLOAT); + t.insertMember("atoms", HOFFSET(RecordType, atoms), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetAtomHdf5Ddl())); + t.insertMember("massFractions", HOFFSET(RecordType, mass_fractions), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_FLOAT)); + return t; +} + +[[maybe_unused]] H5::CompType GetScannerInformationHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_ScannerInformation; + H5::CompType t(sizeof(RecordType)); + t.insertMember("modelName", HOFFSET(RecordType, model_name), yardl::hdf5::InnerVlenStringDdl()); + t.insertMember("scannerGeometry", HOFFSET(RecordType, scanner_geometry), petsird::hdf5::GetScannerGeometryHdf5Ddl()); + t.insertMember("bulkMaterials", HOFFSET(RecordType, bulk_materials), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetBulkMaterialHdf5Ddl())); + t.insertMember("gantryAlignment", HOFFSET(RecordType, gantry_alignment), yardl::hdf5::OptionalTypeDdl(petsird::hdf5::GetRigidTransformationHdf5Ddl())); + t.insertMember("tofBinEdges", HOFFSET(RecordType, tof_bin_edges), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_FLOAT)); + t.insertMember("tofResolution", HOFFSET(RecordType, tof_resolution), H5::PredType::NATIVE_FLOAT); + t.insertMember("energyBinEdges", HOFFSET(RecordType, energy_bin_edges), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_FLOAT)); + t.insertMember("energyResolutionAt511", HOFFSET(RecordType, energy_resolution_at_511), H5::PredType::NATIVE_FLOAT); + t.insertMember("eventTimeBlockDuration", HOFFSET(RecordType, event_time_block_duration), H5::PredType::NATIVE_UINT32); + t.insertMember("coincidencePolicy", HOFFSET(RecordType, coincidence_policy), petsird::hdf5::GetCoincidencePolicyHdf5Ddl()); + return t; +} + +[[maybe_unused]] H5::CompType GetHeaderHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_Header; + H5::CompType t(sizeof(RecordType)); + t.insertMember("scanner", HOFFSET(RecordType, scanner), petsird::hdf5::GetScannerInformationHdf5Ddl()); + t.insertMember("exam", HOFFSET(RecordType, exam), yardl::hdf5::OptionalTypeDdl(petsird::hdf5::GetExamInformationHdf5Ddl())); + return t; +} + +[[maybe_unused]] H5::CompType GetTripleEventHdf5Ddl() { + using RecordType = petsird::TripleEvent; + H5::CompType t(sizeof(RecordType)); + t.insertMember("detectorIds", HOFFSET(RecordType, detector_ids), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_UINT32, 3)); + t.insertMember("tofIndices", HOFFSET(RecordType, tof_indices), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_UINT32, 2)); + t.insertMember("energyIndices", HOFFSET(RecordType, energy_indices), yardl::hdf5::FixedVectorDdl(H5::PredType::NATIVE_UINT32, 3)); + return t; +} + +[[maybe_unused]] H5::CompType GetEventTimeBlockHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_EventTimeBlock; + H5::CompType t(sizeof(RecordType)); + t.insertMember("start", HOFFSET(RecordType, start), H5::PredType::NATIVE_UINT32); + t.insertMember("promptEvents", HOFFSET(RecordType, prompt_events), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetCoincidenceEventHdf5Ddl())); + t.insertMember("delayedEvents", HOFFSET(RecordType, delayed_events), yardl::hdf5::OptionalTypeDdl, std::vector>(yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetCoincidenceEventHdf5Ddl()))); + t.insertMember("tripleEvents", HOFFSET(RecordType, triple_events), yardl::hdf5::OptionalTypeDdl, std::vector>(yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetTripleEventHdf5Ddl()))); + return t; +} + +[[maybe_unused]] H5::CompType GetExternalSignalTimeBlockHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_ExternalSignalTimeBlock; + H5::CompType t(sizeof(RecordType)); + t.insertMember("start", HOFFSET(RecordType, start), H5::PredType::NATIVE_UINT32); + t.insertMember("signalID", HOFFSET(RecordType, signal_id), H5::PredType::NATIVE_UINT32); + t.insertMember("signalValues", HOFFSET(RecordType, signal_values), yardl::hdf5::InnerVlenDdl(H5::PredType::NATIVE_FLOAT)); + return t; +} + +[[maybe_unused]] H5::CompType GetBedMovementTimeBlockHdf5Ddl() { + using RecordType = petsird::BedMovementTimeBlock; + H5::CompType t(sizeof(RecordType)); + t.insertMember("start", HOFFSET(RecordType, start), H5::PredType::NATIVE_UINT32); + t.insertMember("transform", HOFFSET(RecordType, transform), petsird::hdf5::GetRigidTransformationHdf5Ddl()); + return t; +} + +[[maybe_unused]] H5::CompType GetGantryMovementTimeBlockHdf5Ddl() { + using RecordType = petsird::GantryMovementTimeBlock; + H5::CompType t(sizeof(RecordType)); + t.insertMember("start", HOFFSET(RecordType, start), H5::PredType::NATIVE_UINT32); + t.insertMember("transform", HOFFSET(RecordType, transform), petsird::hdf5::GetRigidTransformationHdf5Ddl()); + return t; +} + +[[maybe_unused]] H5::CompType GetExternalSignalTypeHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_ExternalSignalType; + H5::CompType t(sizeof(RecordType)); + t.insertMember("type", HOFFSET(RecordType, type), petsird::hdf5::GetExternalSignalTypeEnumHdf5Ddl()); + t.insertMember("description", HOFFSET(RecordType, description), yardl::hdf5::InnerVlenStringDdl()); + t.insertMember("id", HOFFSET(RecordType, id), H5::PredType::NATIVE_UINT32); + return t; +} + +[[maybe_unused]] H5::CompType GetTimeIntervalHdf5Ddl() { + using RecordType = petsird::TimeInterval; + H5::CompType t(sizeof(RecordType)); + t.insertMember("start", HOFFSET(RecordType, start), H5::PredType::NATIVE_UINT32); + t.insertMember("stop", HOFFSET(RecordType, stop), H5::PredType::NATIVE_UINT32); + return t; +} + +[[maybe_unused]] H5::CompType GetTimeFrameInformationHdf5Ddl() { + using RecordType = petsird::hdf5::_Inner_TimeFrameInformation; + H5::CompType t(sizeof(RecordType)); + t.insertMember("timeFrames", HOFFSET(RecordType, time_frames), yardl::hdf5::InnerVlenDdl(petsird::hdf5::GetTimeIntervalHdf5Ddl())); + return t; +} + +} // namespace + +PETSIRDWriter::PETSIRDWriter(std::string path) + : yardl::hdf5::Hdf5Writer::Hdf5Writer(path, "PETSIRD", schema_) { +} + +void PETSIRDWriter::WriteHeaderImpl(petsird::Header const& value) { + yardl::hdf5::WriteScalarDataset(group_, "header", petsird::hdf5::GetHeaderHdf5Ddl(), value); +} + +void PETSIRDWriter::WriteTimeBlocksImpl(petsird::TimeBlock const& value) { + if (!timeBlocks_dataset_state_) { + timeBlocks_dataset_state_ = std::make_unique>(group_, "timeBlocks", false, std::make_tuple(petsird::hdf5::GetEventTimeBlockHdf5Ddl(), "EventTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetExternalSignalTimeBlockHdf5Ddl(), "ExternalSignalTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetBedMovementTimeBlockHdf5Ddl(), "BedMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetGantryMovementTimeBlockHdf5Ddl(), "GantryMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant))))); + } + + std::visit( + [&](auto const& arg) { + using T = std::decay_t; + if constexpr (std::is_same_v) { + timeBlocks_dataset_state_->Append(static_cast(value.index()), arg); + } else if constexpr (std::is_same_v) { + timeBlocks_dataset_state_->Append(static_cast(value.index()), arg); + } else if constexpr (std::is_same_v) { + timeBlocks_dataset_state_->Append(static_cast(value.index()), arg); + } else if constexpr (std::is_same_v) { + timeBlocks_dataset_state_->Append(static_cast(value.index()), arg); + } else { + static_assert(yardl::hdf5::always_false_v, "non-exhaustive visitor!"); + } + }, + value); +} + +void PETSIRDWriter::EndTimeBlocksImpl() { + if (!timeBlocks_dataset_state_) { + timeBlocks_dataset_state_ = std::make_unique>(group_, "timeBlocks", false, std::make_tuple(petsird::hdf5::GetEventTimeBlockHdf5Ddl(), "EventTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetExternalSignalTimeBlockHdf5Ddl(), "ExternalSignalTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetBedMovementTimeBlockHdf5Ddl(), "BedMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetGantryMovementTimeBlockHdf5Ddl(), "GantryMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant))))); + } + + timeBlocks_dataset_state_.reset(); +} + +void PETSIRDWriter::Flush() { + if (timeBlocks_dataset_state_) { + timeBlocks_dataset_state_->Flush(); + } +} + +PETSIRDReader::PETSIRDReader(std::string path) + : yardl::hdf5::Hdf5Reader::Hdf5Reader(path, "PETSIRD", schema_) { +} + +void PETSIRDReader::ReadHeaderImpl(petsird::Header& value) { + yardl::hdf5::ReadScalarDataset(group_, "header", petsird::hdf5::GetHeaderHdf5Ddl(), value); +} + +bool PETSIRDReader::ReadTimeBlocksImpl(petsird::TimeBlock& value) { + if (!timeBlocks_dataset_state_) { + timeBlocks_dataset_state_ = std::make_unique>(group_, "timeBlocks", false, std::make_tuple(petsird::hdf5::GetEventTimeBlockHdf5Ddl(), "EventTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetExternalSignalTimeBlockHdf5Ddl(), "ExternalSignalTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetBedMovementTimeBlockHdf5Ddl(), "BedMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant)))), std::make_tuple(petsird::hdf5::GetGantryMovementTimeBlockHdf5Ddl(), "GantryMovementTimeBlock", static_cast(std::max(sizeof(::InnerUnion4), sizeof(std::variant))))); + } + + auto [has_result, type_index, reader] = timeBlocks_dataset_state_->ReadIndex(); + if (!has_result) { + timeBlocks_dataset_state_.reset(); + return false; + } + + switch (type_index) { + case 0: { + petsird::EventTimeBlock& ref = value.emplace<0>(); + reader->Read(ref); + break; + } + case 1: { + petsird::ExternalSignalTimeBlock& ref = value.emplace<1>(); + reader->Read(ref); + break; + } + case 2: { + petsird::BedMovementTimeBlock& ref = value.emplace<2>(); + reader->Read(ref); + break; + } + case 3: { + petsird::GantryMovementTimeBlock& ref = value.emplace<3>(); + reader->Read(ref); + break; + } + } + + return true; +} + +} // namespace petsird::hdf5 + diff --git a/cpp/generated/hdf5/protocols.h b/cpp/generated/hdf5/protocols.h new file mode 100644 index 0000000..688564c --- /dev/null +++ b/cpp/generated/hdf5/protocols.h @@ -0,0 +1,49 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#pragma once +#include +#include +#include +#include +#include + +#include "../protocols.h" +#include "../yardl/detail/hdf5/io.h" + +namespace petsird::hdf5 { +// HDF5 writer for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDWriter : public petsird::PETSIRDWriterBase, public yardl::hdf5::Hdf5Writer { + public: + PETSIRDWriter(std::string path); + + protected: + void WriteHeaderImpl(petsird::Header const& value) override; + + void WriteTimeBlocksImpl(petsird::TimeBlock const& value) override; + + void EndTimeBlocksImpl() override; + + public: + void Flush() override; + + private: + std::unique_ptr> timeBlocks_dataset_state_; +}; + +// HDF5 reader for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDReader : public petsird::PETSIRDReaderBase, public yardl::hdf5::Hdf5Reader { + public: + PETSIRDReader(std::string path); + + void ReadHeaderImpl(petsird::Header& value) override; + + bool ReadTimeBlocksImpl(petsird::TimeBlock& value) override; + + private: + std::unique_ptr> timeBlocks_dataset_state_; +}; + +} // namespace petsird + diff --git a/cpp/generated/ndjson/protocols.cc b/cpp/generated/ndjson/protocols.cc new file mode 100644 index 0000000..c1d021a --- /dev/null +++ b/cpp/generated/ndjson/protocols.cc @@ -0,0 +1,914 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#include "../yardl/detail/ndjson/serializers.h" +#include "protocols.h" + +namespace petsird { +using ordered_json = nlohmann::ordered_json; + +void to_json(ordered_json& j, petsird::CoincidenceEvent const& value); +void from_json(ordered_json const& j, petsird::CoincidenceEvent& value); + +template +void to_json(ordered_json& j, petsird::SolidVolume const& value); +template +void from_json(ordered_json const& j, petsird::SolidVolume& value); + +void to_json(ordered_json& j, petsird::Coordinate const& value); +void from_json(ordered_json const& j, petsird::Coordinate& value); + +void to_json(ordered_json& j, petsird::BoxShape const& value); +void from_json(ordered_json const& j, petsird::BoxShape& value); + +void to_json(ordered_json& j, petsird::AnnulusShape const& value); +void from_json(ordered_json const& j, petsird::AnnulusShape& value); + +void to_json(ordered_json& j, petsird::RigidTransformation const& value); +void from_json(ordered_json const& j, petsird::RigidTransformation& value); + +template +void to_json(ordered_json& j, petsird::ReplicatedObject const& value); +template +void from_json(ordered_json const& j, petsird::ReplicatedObject& value); + +void to_json(ordered_json& j, petsird::DetectorModule const& value); +void from_json(ordered_json const& j, petsird::DetectorModule& value); + +void to_json(ordered_json& j, petsird::ScannerGeometry const& value); +void from_json(ordered_json const& j, petsird::ScannerGeometry& value); + +void to_json(ordered_json& j, petsird::Subject const& value); +void from_json(ordered_json const& j, petsird::Subject& value); + +void to_json(ordered_json& j, petsird::Institution const& value); +void from_json(ordered_json const& j, petsird::Institution& value); + +void to_json(ordered_json& j, petsird::ExamInformation const& value); +void from_json(ordered_json const& j, petsird::ExamInformation& value); + +void to_json(ordered_json& j, petsird::Direction const& value); +void from_json(ordered_json const& j, petsird::Direction& value); + +void to_json(ordered_json& j, petsird::DirectionMatrix const& value); +void from_json(ordered_json const& j, petsird::DirectionMatrix& value); + +void to_json(ordered_json& j, petsird::Atom const& value); +void from_json(ordered_json const& j, petsird::Atom& value); + +void to_json(ordered_json& j, petsird::BulkMaterial const& value); +void from_json(ordered_json const& j, petsird::BulkMaterial& value); + +void to_json(ordered_json& j, petsird::CoincidencePolicy const& value); +void from_json(ordered_json const& j, petsird::CoincidencePolicy& value); + +void to_json(ordered_json& j, petsird::ScannerInformation const& value); +void from_json(ordered_json const& j, petsird::ScannerInformation& value); + +void to_json(ordered_json& j, petsird::Header const& value); +void from_json(ordered_json const& j, petsird::Header& value); + +void to_json(ordered_json& j, petsird::TripleEvent const& value); +void from_json(ordered_json const& j, petsird::TripleEvent& value); + +void to_json(ordered_json& j, petsird::EventTimeBlock const& value); +void from_json(ordered_json const& j, petsird::EventTimeBlock& value); + +void to_json(ordered_json& j, petsird::ExternalSignalTimeBlock const& value); +void from_json(ordered_json const& j, petsird::ExternalSignalTimeBlock& value); + +void to_json(ordered_json& j, petsird::BedMovementTimeBlock const& value); +void from_json(ordered_json const& j, petsird::BedMovementTimeBlock& value); + +void to_json(ordered_json& j, petsird::GantryMovementTimeBlock const& value); +void from_json(ordered_json const& j, petsird::GantryMovementTimeBlock& value); + +void to_json(ordered_json& j, petsird::ExternalSignalTypeEnum const& value); +void from_json(ordered_json const& j, petsird::ExternalSignalTypeEnum& value); + +void to_json(ordered_json& j, petsird::ExternalSignalType const& value); +void from_json(ordered_json const& j, petsird::ExternalSignalType& value); + +void to_json(ordered_json& j, petsird::TimeInterval const& value); +void from_json(ordered_json const& j, petsird::TimeInterval& value); + +void to_json(ordered_json& j, petsird::TimeFrameInformation const& value); +void from_json(ordered_json const& j, petsird::TimeFrameInformation& value); + +} // namespace petsird + +NLOHMANN_JSON_NAMESPACE_BEGIN + +template <> +struct adl_serializer> { + static void to_json(ordered_json& j, std::variant const& value) { + switch (value.index()) { + case 0: + j = ordered_json{ {"BoxShape", std::get(value)} }; + break; + case 1: + j = ordered_json{ {"AnnulusShape", std::get(value)} }; + break; + default: + throw std::runtime_error("Invalid union value"); + } + } + + static void from_json(ordered_json const& j, std::variant& value) { + auto it = j.begin(); + std::string tag = it.key(); + if (tag == "BoxShape") { + value = it.value().get(); + return; + } + if (tag == "AnnulusShape") { + value = it.value().get(); + return; + } + } +}; + +template <> +struct adl_serializer> { + static void to_json(ordered_json& j, std::variant const& value) { + switch (value.index()) { + case 0: + j = ordered_json{ {"EventTimeBlock", std::get(value)} }; + break; + case 1: + j = ordered_json{ {"ExternalSignalTimeBlock", std::get(value)} }; + break; + case 2: + j = ordered_json{ {"BedMovementTimeBlock", std::get(value)} }; + break; + case 3: + j = ordered_json{ {"GantryMovementTimeBlock", std::get(value)} }; + break; + default: + throw std::runtime_error("Invalid union value"); + } + } + + static void from_json(ordered_json const& j, std::variant& value) { + auto it = j.begin(); + std::string tag = it.key(); + if (tag == "EventTimeBlock") { + value = it.value().get(); + return; + } + if (tag == "ExternalSignalTimeBlock") { + value = it.value().get(); + return; + } + if (tag == "BedMovementTimeBlock") { + value = it.value().get(); + return; + } + if (tag == "GantryMovementTimeBlock") { + value = it.value().get(); + return; + } + } +}; + +NLOHMANN_JSON_NAMESPACE_END + +namespace petsird { +using ordered_json = nlohmann::ordered_json; + +void to_json(ordered_json& j, petsird::CoincidenceEvent const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.detector_ids)) { + j.push_back({"detectorIds", value.detector_ids}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.tof_idx)) { + j.push_back({"tofIdx", value.tof_idx}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.energy_indices)) { + j.push_back({"energyIndices", value.energy_indices}); + } +} + +void from_json(ordered_json const& j, petsird::CoincidenceEvent& value) { + if (auto it = j.find("detectorIds"); it != j.end()) { + it->get_to(value.detector_ids); + } + if (auto it = j.find("tofIdx"); it != j.end()) { + it->get_to(value.tof_idx); + } + if (auto it = j.find("energyIndices"); it != j.end()) { + it->get_to(value.energy_indices); + } +} + +template +void to_json(ordered_json& j, petsird::SolidVolume const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.shape)) { + j.push_back({"shape", value.shape}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.material_id)) { + j.push_back({"materialId", value.material_id}); + } +} + +template +void from_json(ordered_json const& j, petsird::SolidVolume& value) { + if (auto it = j.find("shape"); it != j.end()) { + it->get_to(value.shape); + } + if (auto it = j.find("materialId"); it != j.end()) { + it->get_to(value.material_id); + } +} + +void to_json(ordered_json& j, petsird::Coordinate const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.c)) { + j.push_back({"c", value.c}); + } +} + +void from_json(ordered_json const& j, petsird::Coordinate& value) { + if (auto it = j.find("c"); it != j.end()) { + it->get_to(value.c); + } +} + +void to_json(ordered_json& j, petsird::BoxShape const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.corners)) { + j.push_back({"corners", value.corners}); + } +} + +void from_json(ordered_json const& j, petsird::BoxShape& value) { + if (auto it = j.find("corners"); it != j.end()) { + it->get_to(value.corners); + } +} + +void to_json(ordered_json& j, petsird::AnnulusShape const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.inner_radius)) { + j.push_back({"innerRadius", value.inner_radius}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.outer_radius)) { + j.push_back({"outerRadius", value.outer_radius}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.thickness)) { + j.push_back({"thickness", value.thickness}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.angular_range)) { + j.push_back({"angularRange", value.angular_range}); + } +} + +void from_json(ordered_json const& j, petsird::AnnulusShape& value) { + if (auto it = j.find("innerRadius"); it != j.end()) { + it->get_to(value.inner_radius); + } + if (auto it = j.find("outerRadius"); it != j.end()) { + it->get_to(value.outer_radius); + } + if (auto it = j.find("thickness"); it != j.end()) { + it->get_to(value.thickness); + } + if (auto it = j.find("angularRange"); it != j.end()) { + it->get_to(value.angular_range); + } +} + +void to_json(ordered_json& j, petsird::RigidTransformation const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.matrix)) { + j.push_back({"matrix", value.matrix}); + } +} + +void from_json(ordered_json const& j, petsird::RigidTransformation& value) { + if (auto it = j.find("matrix"); it != j.end()) { + it->get_to(value.matrix); + } +} + +template +void to_json(ordered_json& j, petsird::ReplicatedObject const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.object)) { + j.push_back({"object", value.object}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.transforms)) { + j.push_back({"transforms", value.transforms}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.ids)) { + j.push_back({"ids", value.ids}); + } +} + +template +void from_json(ordered_json const& j, petsird::ReplicatedObject& value) { + if (auto it = j.find("object"); it != j.end()) { + it->get_to(value.object); + } + if (auto it = j.find("transforms"); it != j.end()) { + it->get_to(value.transforms); + } + if (auto it = j.find("ids"); it != j.end()) { + it->get_to(value.ids); + } +} + +void to_json(ordered_json& j, petsird::DetectorModule const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.detecting_elements)) { + j.push_back({"detectingElements", value.detecting_elements}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.detecting_element_ids)) { + j.push_back({"detectingElementIds", value.detecting_element_ids}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.non_detecting_elements)) { + j.push_back({"nonDetectingElements", value.non_detecting_elements}); + } +} + +void from_json(ordered_json const& j, petsird::DetectorModule& value) { + if (auto it = j.find("detectingElements"); it != j.end()) { + it->get_to(value.detecting_elements); + } + if (auto it = j.find("detectingElementIds"); it != j.end()) { + it->get_to(value.detecting_element_ids); + } + if (auto it = j.find("nonDetectingElements"); it != j.end()) { + it->get_to(value.non_detecting_elements); + } +} + +void to_json(ordered_json& j, petsird::ScannerGeometry const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.replicated_modules)) { + j.push_back({"replicatedModules", value.replicated_modules}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.ids)) { + j.push_back({"ids", value.ids}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.non_detecting_volumes)) { + j.push_back({"nonDetectingVolumes", value.non_detecting_volumes}); + } +} + +void from_json(ordered_json const& j, petsird::ScannerGeometry& value) { + if (auto it = j.find("replicatedModules"); it != j.end()) { + it->get_to(value.replicated_modules); + } + if (auto it = j.find("ids"); it != j.end()) { + it->get_to(value.ids); + } + if (auto it = j.find("nonDetectingVolumes"); it != j.end()) { + it->get_to(value.non_detecting_volumes); + } +} + +void to_json(ordered_json& j, petsird::Subject const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.name)) { + j.push_back({"name", value.name}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.id)) { + j.push_back({"id", value.id}); + } +} + +void from_json(ordered_json const& j, petsird::Subject& value) { + if (auto it = j.find("name"); it != j.end()) { + it->get_to(value.name); + } + if (auto it = j.find("id"); it != j.end()) { + it->get_to(value.id); + } +} + +void to_json(ordered_json& j, petsird::Institution const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.name)) { + j.push_back({"name", value.name}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.address)) { + j.push_back({"address", value.address}); + } +} + +void from_json(ordered_json const& j, petsird::Institution& value) { + if (auto it = j.find("name"); it != j.end()) { + it->get_to(value.name); + } + if (auto it = j.find("address"); it != j.end()) { + it->get_to(value.address); + } +} + +void to_json(ordered_json& j, petsird::ExamInformation const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.subject)) { + j.push_back({"subject", value.subject}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.institution)) { + j.push_back({"institution", value.institution}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.protocol)) { + j.push_back({"protocol", value.protocol}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.start_of_acquisition)) { + j.push_back({"startOfAcquisition", value.start_of_acquisition}); + } +} + +void from_json(ordered_json const& j, petsird::ExamInformation& value) { + if (auto it = j.find("subject"); it != j.end()) { + it->get_to(value.subject); + } + if (auto it = j.find("institution"); it != j.end()) { + it->get_to(value.institution); + } + if (auto it = j.find("protocol"); it != j.end()) { + it->get_to(value.protocol); + } + if (auto it = j.find("startOfAcquisition"); it != j.end()) { + it->get_to(value.start_of_acquisition); + } +} + +void to_json(ordered_json& j, petsird::Direction const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.c)) { + j.push_back({"c", value.c}); + } +} + +void from_json(ordered_json const& j, petsird::Direction& value) { + if (auto it = j.find("c"); it != j.end()) { + it->get_to(value.c); + } +} + +void to_json(ordered_json& j, petsird::DirectionMatrix const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.matrix)) { + j.push_back({"matrix", value.matrix}); + } +} + +void from_json(ordered_json const& j, petsird::DirectionMatrix& value) { + if (auto it = j.find("matrix"); it != j.end()) { + it->get_to(value.matrix); + } +} + +void to_json(ordered_json& j, petsird::Atom const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.mass_number)) { + j.push_back({"massNumber", value.mass_number}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.atomic_number)) { + j.push_back({"atomicNumber", value.atomic_number}); + } +} + +void from_json(ordered_json const& j, petsird::Atom& value) { + if (auto it = j.find("massNumber"); it != j.end()) { + it->get_to(value.mass_number); + } + if (auto it = j.find("atomicNumber"); it != j.end()) { + it->get_to(value.atomic_number); + } +} + +void to_json(ordered_json& j, petsird::BulkMaterial const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.id)) { + j.push_back({"id", value.id}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.name)) { + j.push_back({"name", value.name}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.density)) { + j.push_back({"density", value.density}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.atoms)) { + j.push_back({"atoms", value.atoms}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.mass_fractions)) { + j.push_back({"massFractions", value.mass_fractions}); + } +} + +void from_json(ordered_json const& j, petsird::BulkMaterial& value) { + if (auto it = j.find("id"); it != j.end()) { + it->get_to(value.id); + } + if (auto it = j.find("name"); it != j.end()) { + it->get_to(value.name); + } + if (auto it = j.find("density"); it != j.end()) { + it->get_to(value.density); + } + if (auto it = j.find("atoms"); it != j.end()) { + it->get_to(value.atoms); + } + if (auto it = j.find("massFractions"); it != j.end()) { + it->get_to(value.mass_fractions); + } +} + +namespace { +std::unordered_map const __CoincidencePolicy_values = { + {"rejectMultiples", petsird::CoincidencePolicy::kRejectMultiples}, + {"storeMultiplesAsPairs", petsird::CoincidencePolicy::kStoreMultiplesAsPairs}, + {"other", petsird::CoincidencePolicy::kOther}, +}; +} //namespace + +void to_json(ordered_json& j, petsird::CoincidencePolicy const& value) { + switch (value) { + case petsird::CoincidencePolicy::kRejectMultiples: + j = "rejectMultiples"; + break; + case petsird::CoincidencePolicy::kStoreMultiplesAsPairs: + j = "storeMultiplesAsPairs"; + break; + case petsird::CoincidencePolicy::kOther: + j = "other"; + break; + default: + using underlying_type = typename std::underlying_type::type; + j = static_cast(value); + break; + } +} + +void from_json(ordered_json const& j, petsird::CoincidencePolicy& value) { + if (j.is_string()) { + auto symbol = j.get(); + if (auto res = __CoincidencePolicy_values.find(symbol); res != __CoincidencePolicy_values.end()) { + value = res->second; + return; + } + throw std::runtime_error("Invalid enum value '" + symbol + "' for enum petsird::CoincidencePolicy"); + } + using underlying_type = typename std::underlying_type::type; + value = static_cast(j.get()); +} + +void to_json(ordered_json& j, petsird::ScannerInformation const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.model_name)) { + j.push_back({"modelName", value.model_name}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.scanner_geometry)) { + j.push_back({"scannerGeometry", value.scanner_geometry}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.bulk_materials)) { + j.push_back({"bulkMaterials", value.bulk_materials}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.gantry_alignment)) { + j.push_back({"gantryAlignment", value.gantry_alignment}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.tof_bin_edges)) { + j.push_back({"tofBinEdges", value.tof_bin_edges}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.tof_resolution)) { + j.push_back({"tofResolution", value.tof_resolution}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.energy_bin_edges)) { + j.push_back({"energyBinEdges", value.energy_bin_edges}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.energy_resolution_at_511)) { + j.push_back({"energyResolutionAt511", value.energy_resolution_at_511}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.event_time_block_duration)) { + j.push_back({"eventTimeBlockDuration", value.event_time_block_duration}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.coincidence_policy)) { + j.push_back({"coincidencePolicy", value.coincidence_policy}); + } +} + +void from_json(ordered_json const& j, petsird::ScannerInformation& value) { + if (auto it = j.find("modelName"); it != j.end()) { + it->get_to(value.model_name); + } + if (auto it = j.find("scannerGeometry"); it != j.end()) { + it->get_to(value.scanner_geometry); + } + if (auto it = j.find("bulkMaterials"); it != j.end()) { + it->get_to(value.bulk_materials); + } + if (auto it = j.find("gantryAlignment"); it != j.end()) { + it->get_to(value.gantry_alignment); + } + if (auto it = j.find("tofBinEdges"); it != j.end()) { + it->get_to(value.tof_bin_edges); + } + if (auto it = j.find("tofResolution"); it != j.end()) { + it->get_to(value.tof_resolution); + } + if (auto it = j.find("energyBinEdges"); it != j.end()) { + it->get_to(value.energy_bin_edges); + } + if (auto it = j.find("energyResolutionAt511"); it != j.end()) { + it->get_to(value.energy_resolution_at_511); + } + if (auto it = j.find("eventTimeBlockDuration"); it != j.end()) { + it->get_to(value.event_time_block_duration); + } + if (auto it = j.find("coincidencePolicy"); it != j.end()) { + it->get_to(value.coincidence_policy); + } +} + +void to_json(ordered_json& j, petsird::Header const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.scanner)) { + j.push_back({"scanner", value.scanner}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.exam)) { + j.push_back({"exam", value.exam}); + } +} + +void from_json(ordered_json const& j, petsird::Header& value) { + if (auto it = j.find("scanner"); it != j.end()) { + it->get_to(value.scanner); + } + if (auto it = j.find("exam"); it != j.end()) { + it->get_to(value.exam); + } +} + +void to_json(ordered_json& j, petsird::TripleEvent const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.detector_ids)) { + j.push_back({"detectorIds", value.detector_ids}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.tof_indices)) { + j.push_back({"tofIndices", value.tof_indices}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.energy_indices)) { + j.push_back({"energyIndices", value.energy_indices}); + } +} + +void from_json(ordered_json const& j, petsird::TripleEvent& value) { + if (auto it = j.find("detectorIds"); it != j.end()) { + it->get_to(value.detector_ids); + } + if (auto it = j.find("tofIndices"); it != j.end()) { + it->get_to(value.tof_indices); + } + if (auto it = j.find("energyIndices"); it != j.end()) { + it->get_to(value.energy_indices); + } +} + +void to_json(ordered_json& j, petsird::EventTimeBlock const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.start)) { + j.push_back({"start", value.start}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.prompt_events)) { + j.push_back({"promptEvents", value.prompt_events}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.delayed_events)) { + j.push_back({"delayedEvents", value.delayed_events}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.triple_events)) { + j.push_back({"tripleEvents", value.triple_events}); + } +} + +void from_json(ordered_json const& j, petsird::EventTimeBlock& value) { + if (auto it = j.find("start"); it != j.end()) { + it->get_to(value.start); + } + if (auto it = j.find("promptEvents"); it != j.end()) { + it->get_to(value.prompt_events); + } + if (auto it = j.find("delayedEvents"); it != j.end()) { + it->get_to(value.delayed_events); + } + if (auto it = j.find("tripleEvents"); it != j.end()) { + it->get_to(value.triple_events); + } +} + +void to_json(ordered_json& j, petsird::ExternalSignalTimeBlock const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.start)) { + j.push_back({"start", value.start}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.signal_id)) { + j.push_back({"signalID", value.signal_id}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.signal_values)) { + j.push_back({"signalValues", value.signal_values}); + } +} + +void from_json(ordered_json const& j, petsird::ExternalSignalTimeBlock& value) { + if (auto it = j.find("start"); it != j.end()) { + it->get_to(value.start); + } + if (auto it = j.find("signalID"); it != j.end()) { + it->get_to(value.signal_id); + } + if (auto it = j.find("signalValues"); it != j.end()) { + it->get_to(value.signal_values); + } +} + +void to_json(ordered_json& j, petsird::BedMovementTimeBlock const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.start)) { + j.push_back({"start", value.start}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.transform)) { + j.push_back({"transform", value.transform}); + } +} + +void from_json(ordered_json const& j, petsird::BedMovementTimeBlock& value) { + if (auto it = j.find("start"); it != j.end()) { + it->get_to(value.start); + } + if (auto it = j.find("transform"); it != j.end()) { + it->get_to(value.transform); + } +} + +void to_json(ordered_json& j, petsird::GantryMovementTimeBlock const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.start)) { + j.push_back({"start", value.start}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.transform)) { + j.push_back({"transform", value.transform}); + } +} + +void from_json(ordered_json const& j, petsird::GantryMovementTimeBlock& value) { + if (auto it = j.find("start"); it != j.end()) { + it->get_to(value.start); + } + if (auto it = j.find("transform"); it != j.end()) { + it->get_to(value.transform); + } +} + +namespace { +std::unordered_map const __ExternalSignalTypeEnum_values = { + {"ecgTrace", petsird::ExternalSignalTypeEnum::kEcgTrace}, + {"ecgTrigger", petsird::ExternalSignalTypeEnum::kEcgTrigger}, + {"respTrace", petsird::ExternalSignalTypeEnum::kRespTrace}, + {"respTrigger", petsird::ExternalSignalTypeEnum::kRespTrigger}, + {"otherMotionSignal", petsird::ExternalSignalTypeEnum::kOtherMotionSignal}, + {"otherMotionTrigger", petsird::ExternalSignalTypeEnum::kOtherMotionTrigger}, + {"externalSync", petsird::ExternalSignalTypeEnum::kExternalSync}, + {"other", petsird::ExternalSignalTypeEnum::kOther}, +}; +} //namespace + +void to_json(ordered_json& j, petsird::ExternalSignalTypeEnum const& value) { + switch (value) { + case petsird::ExternalSignalTypeEnum::kEcgTrace: + j = "ecgTrace"; + break; + case petsird::ExternalSignalTypeEnum::kEcgTrigger: + j = "ecgTrigger"; + break; + case petsird::ExternalSignalTypeEnum::kRespTrace: + j = "respTrace"; + break; + case petsird::ExternalSignalTypeEnum::kRespTrigger: + j = "respTrigger"; + break; + case petsird::ExternalSignalTypeEnum::kOtherMotionSignal: + j = "otherMotionSignal"; + break; + case petsird::ExternalSignalTypeEnum::kOtherMotionTrigger: + j = "otherMotionTrigger"; + break; + case petsird::ExternalSignalTypeEnum::kExternalSync: + j = "externalSync"; + break; + case petsird::ExternalSignalTypeEnum::kOther: + j = "other"; + break; + default: + using underlying_type = typename std::underlying_type::type; + j = static_cast(value); + break; + } +} + +void from_json(ordered_json const& j, petsird::ExternalSignalTypeEnum& value) { + if (j.is_string()) { + auto symbol = j.get(); + if (auto res = __ExternalSignalTypeEnum_values.find(symbol); res != __ExternalSignalTypeEnum_values.end()) { + value = res->second; + return; + } + throw std::runtime_error("Invalid enum value '" + symbol + "' for enum petsird::ExternalSignalTypeEnum"); + } + using underlying_type = typename std::underlying_type::type; + value = static_cast(j.get()); +} + +void to_json(ordered_json& j, petsird::ExternalSignalType const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.type)) { + j.push_back({"type", value.type}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.description)) { + j.push_back({"description", value.description}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.id)) { + j.push_back({"id", value.id}); + } +} + +void from_json(ordered_json const& j, petsird::ExternalSignalType& value) { + if (auto it = j.find("type"); it != j.end()) { + it->get_to(value.type); + } + if (auto it = j.find("description"); it != j.end()) { + it->get_to(value.description); + } + if (auto it = j.find("id"); it != j.end()) { + it->get_to(value.id); + } +} + +void to_json(ordered_json& j, petsird::TimeInterval const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.start)) { + j.push_back({"start", value.start}); + } + if (yardl::ndjson::ShouldSerializeFieldValue(value.stop)) { + j.push_back({"stop", value.stop}); + } +} + +void from_json(ordered_json const& j, petsird::TimeInterval& value) { + if (auto it = j.find("start"); it != j.end()) { + it->get_to(value.start); + } + if (auto it = j.find("stop"); it != j.end()) { + it->get_to(value.stop); + } +} + +void to_json(ordered_json& j, petsird::TimeFrameInformation const& value) { + j = ordered_json::object(); + if (yardl::ndjson::ShouldSerializeFieldValue(value.time_frames)) { + j.push_back({"timeFrames", value.time_frames}); + } +} + +void from_json(ordered_json const& j, petsird::TimeFrameInformation& value) { + if (auto it = j.find("timeFrames"); it != j.end()) { + it->get_to(value.time_frames); + } +} + +} // namespace petsird + +namespace petsird::ndjson { +void PETSIRDWriter::WriteHeaderImpl(petsird::Header const& value) { + ordered_json json_value = value; + yardl::ndjson::WriteProtocolValue(stream_, "header", json_value);} + +void PETSIRDWriter::WriteTimeBlocksImpl(petsird::TimeBlock const& value) { + ordered_json json_value = value; + yardl::ndjson::WriteProtocolValue(stream_, "timeBlocks", json_value);} + +void PETSIRDWriter::Flush() { + stream_.flush(); +} + +void PETSIRDWriter::CloseImpl() { + stream_.flush(); +} + +void PETSIRDReader::ReadHeaderImpl(petsird::Header& value) { + yardl::ndjson::ReadProtocolValue(stream_, line_, "header", true, unused_step_, value); +} + +bool PETSIRDReader::ReadTimeBlocksImpl(petsird::TimeBlock& value) { + return yardl::ndjson::ReadProtocolValue(stream_, line_, "timeBlocks", false, unused_step_, value); +} + +void PETSIRDReader::CloseImpl() { + VerifyFinished(); +} + +} // namespace petsird::ndjson + diff --git a/cpp/generated/ndjson/protocols.h b/cpp/generated/ndjson/protocols.h new file mode 100644 index 0000000..592e9a8 --- /dev/null +++ b/cpp/generated/ndjson/protocols.h @@ -0,0 +1,54 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#pragma once +#include +#include +#include +#include +#include +#include + +#include "../yardl/detail/ndjson/reader_writer.h" +#include "../protocols.h" + +namespace petsird::ndjson { +// NDJSON writer for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDWriter : public petsird::PETSIRDWriterBase, yardl::ndjson::NDJsonWriter { + public: + PETSIRDWriter(std::ostream& stream) + : yardl::ndjson::NDJsonWriter(stream, schema_) { + } + + PETSIRDWriter(std::string file_name) + : yardl::ndjson::NDJsonWriter(file_name, schema_) { + } + + void Flush() override; + + protected: + void WriteHeaderImpl(petsird::Header const& value) override; + void WriteTimeBlocksImpl(petsird::TimeBlock const& value) override; + void EndTimeBlocksImpl() override {} + void CloseImpl() override; +}; + +// NDJSON reader for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDReader : public petsird::PETSIRDReaderBase, yardl::ndjson::NDJsonReader { + public: + PETSIRDReader(std::istream& stream) + : yardl::ndjson::NDJsonReader(stream, schema_) { + } + + PETSIRDReader(std::string file_name) + : yardl::ndjson::NDJsonReader(file_name, schema_) { + } + + protected: + void ReadHeaderImpl(petsird::Header& value) override; + bool ReadTimeBlocksImpl(petsird::TimeBlock& value) override; + void CloseImpl() override; +}; + +} // namespace petsird::ndjson diff --git a/cpp/generated/protocols.cc b/cpp/generated/protocols.cc new file mode 100644 index 0000000..53a8035 --- /dev/null +++ b/cpp/generated/protocols.cc @@ -0,0 +1,208 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#include "protocols.h" + +#ifdef _MSC_VER +#define unlikely(x) x +#else +#define unlikely(x) __builtin_expect((x), 0) +#endif + +namespace petsird { +namespace { +void PETSIRDWriterBaseInvalidState(uint8_t attempted, [[maybe_unused]] bool end, uint8_t current) { + std::string expected_method; + switch (current) { + case 0: expected_method = "WriteHeader()"; break; + case 1: expected_method = "WriteTimeBlocks() or EndTimeBlocks()"; break; + } + std::string attempted_method; + switch (attempted) { + case 0: attempted_method = "WriteHeader()"; break; + case 1: attempted_method = end ? "EndTimeBlocks()" : "WriteTimeBlocks()"; break; + case 2: attempted_method = "Close()"; break; + } + throw std::runtime_error("Expected call to " + expected_method + " but received call to " + attempted_method + " instead."); +} + +void PETSIRDReaderBaseInvalidState(uint8_t attempted, uint8_t current) { + auto f = [](uint8_t i) -> std::string { + switch (i/2) { + case 0: return "ReadHeader()"; + case 1: return "ReadTimeBlocks()"; + case 2: return "Close()"; + default: return ""; + } + }; + throw std::runtime_error("Expected call to " + f(current) + " but received call to " + f(attempted) + " instead."); +} + +} // namespace + +std::string PETSIRDWriterBase::schema_ = R"({"protocol":{"name":"PETSIRD","sequence":[{"name":"header","type":"PETSIRD.Header"},{"name":"timeBlocks","type":{"stream":{"items":"PETSIRD.TimeBlock"}}}]},"types":[{"name":"AnnulusShape","fields":[{"name":"innerRadius","type":"float32"},{"name":"outerRadius","type":"float32"},{"name":"thickness","type":"float32"},{"name":"angularRange","type":{"vector":{"items":"float32","length":2}}}]},{"name":"Atom","fields":[{"name":"massNumber","type":"uint32"},{"name":"atomicNumber","type":"uint32"}]},{"name":"BedMovementTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"transform","type":"PETSIRD.RigidTransformation"}]},{"name":"BoxShape","fields":[{"name":"corners","type":{"vector":{"items":"PETSIRD.Coordinate","length":8}}}]},{"name":"BoxSolidVolume","type":{"name":"PETSIRD.SolidVolume","typeArguments":["PETSIRD.BoxShape"]}},{"name":"BulkMaterial","fields":[{"name":"id","type":"uint32"},{"name":"name","type":"string"},{"name":"density","type":"float32"},{"name":"atoms","type":{"vector":{"items":"PETSIRD.Atom"}}},{"name":"massFractions","type":{"vector":{"items":"float32"}}}]},{"name":"CoincidenceEvent","fields":[{"name":"detectorIds","type":{"vector":{"items":"uint32","length":2}}},{"name":"tofIdx","type":"uint32"},{"name":"energyIndices","type":{"vector":{"items":"uint32","length":2}}}]},{"name":"CoincidencePolicy","values":[{"symbol":"rejectMultiples","value":0},{"symbol":"storeMultiplesAsPairs","value":1},{"symbol":"other","value":2}]},{"name":"Coordinate","fields":[{"name":"c","type":{"array":{"items":"float32","dimensions":[{"length":3}]}}}]},{"name":"DetectorModule","fields":[{"name":"detectingElements","type":{"vector":{"items":"PETSIRD.ReplicatedBoxSolidVolume"}}},{"name":"detectingElementIds","type":{"vector":{"items":"uint32"}}},{"name":"nonDetectingElements","type":{"vector":{"items":"PETSIRD.ReplicatedGenericSolidVolume"}}}]},{"name":"EventTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"promptEvents","type":{"vector":{"items":"PETSIRD.CoincidenceEvent"}}},{"name":"delayedEvents","type":[null,{"vector":{"items":"PETSIRD.CoincidenceEvent"}}]},{"name":"tripleEvents","type":[null,{"vector":{"items":"PETSIRD.TripleEvent"}}]}]},{"name":"ExamInformation","fields":[{"name":"subject","type":"PETSIRD.Subject"},{"name":"institution","type":"PETSIRD.Institution"},{"name":"protocol","type":[null,"string"]},{"name":"startOfAcquisition","type":[null,"datetime"]}]},{"name":"ExternalSignalTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"signalID","type":"uint32"},{"name":"signalValues","type":{"vector":{"items":"float32"}}}]},{"name":"GantryMovementTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"transform","type":"PETSIRD.RigidTransformation"}]},{"name":"GenericSolidVolume","type":{"name":"PETSIRD.SolidVolume","typeArguments":["PETSIRD.GeometricShape"]}},{"name":"GeometricShape","type":[{"tag":"BoxShape","type":"PETSIRD.BoxShape"},{"tag":"AnnulusShape","type":"PETSIRD.AnnulusShape"}]},{"name":"Header","fields":[{"name":"scanner","type":"PETSIRD.ScannerInformation"},{"name":"exam","type":[null,"PETSIRD.ExamInformation"]}]},{"name":"Institution","fields":[{"name":"name","type":"string"},{"name":"address","type":"string"}]},{"name":"ReplicatedBoxSolidVolume","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.BoxSolidVolume"]}},{"name":"ReplicatedDetectorModule","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.DetectorModule"]}},{"name":"ReplicatedGenericSolidVolume","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.GenericSolidVolume"]}},{"name":"ReplicatedObject","typeParameters":["T"],"fields":[{"name":"object","type":"T"},{"name":"transforms","type":{"vector":{"items":"PETSIRD.RigidTransformation"}}},{"name":"ids","type":{"vector":{"items":"uint32"}}}]},{"name":"RigidTransformation","fields":[{"name":"matrix","type":{"array":{"items":"float32","dimensions":[{"length":3},{"length":4}]}}}]},{"name":"ScannerGeometry","fields":[{"name":"replicatedModules","type":{"vector":{"items":"PETSIRD.ReplicatedDetectorModule"}}},{"name":"ids","type":{"vector":{"items":"uint32"}}},{"name":"nonDetectingVolumes","type":[null,{"vector":{"items":"PETSIRD.GenericSolidVolume"}}]}]},{"name":"ScannerInformation","fields":[{"name":"modelName","type":"string"},{"name":"scannerGeometry","type":"PETSIRD.ScannerGeometry"},{"name":"bulkMaterials","type":{"vector":{"items":"PETSIRD.BulkMaterial"}}},{"name":"gantryAlignment","type":[null,"PETSIRD.RigidTransformation"]},{"name":"tofBinEdges","type":{"array":{"items":"float32","dimensions":1}}},{"name":"tofResolution","type":"float32"},{"name":"energyBinEdges","type":{"array":{"items":"float32","dimensions":1}}},{"name":"energyResolutionAt511","type":"float32"},{"name":"eventTimeBlockDuration","type":"uint32"},{"name":"coincidencePolicy","type":"PETSIRD.CoincidencePolicy"}]},{"name":"SolidVolume","typeParameters":["Shape"],"fields":[{"name":"shape","type":"Shape"},{"name":"materialId","type":"uint32"}]},{"name":"Subject","fields":[{"name":"name","type":[null,"string"]},{"name":"id","type":"string"}]},{"name":"TimeBlock","type":[{"tag":"EventTimeBlock","type":"PETSIRD.EventTimeBlock"},{"tag":"ExternalSignalTimeBlock","type":"PETSIRD.ExternalSignalTimeBlock"},{"tag":"BedMovementTimeBlock","type":"PETSIRD.BedMovementTimeBlock"},{"tag":"GantryMovementTimeBlock","type":"PETSIRD.GantryMovementTimeBlock"}]},{"name":"TripleEvent","fields":[{"name":"detectorIds","type":{"vector":{"items":"uint32","length":3}}},{"name":"tofIndices","type":{"vector":{"items":"uint32","length":2}}},{"name":"energyIndices","type":{"vector":{"items":"uint32","length":3}}}]}]})"; + +std::vector PETSIRDWriterBase::previous_schemas_ = { +}; + +std::string PETSIRDWriterBase::SchemaFromVersion(Version version) { + switch (version) { + case Version::Current: return PETSIRDWriterBase::schema_; break; + default: throw std::runtime_error("The version does not correspond to any schema supported by protocol PETSIRD."); + } + +} +void PETSIRDWriterBase::WriteHeader(petsird::Header const& value) { + if (unlikely(state_ != 0)) { + PETSIRDWriterBaseInvalidState(0, false, state_); + } + + WriteHeaderImpl(value); + state_ = 1; +} + +void PETSIRDWriterBase::WriteTimeBlocks(petsird::TimeBlock const& value) { + if (unlikely(state_ != 1)) { + PETSIRDWriterBaseInvalidState(1, false, state_); + } + + WriteTimeBlocksImpl(value); +} + +void PETSIRDWriterBase::WriteTimeBlocks(std::vector const& values) { + if (unlikely(state_ != 1)) { + PETSIRDWriterBaseInvalidState(1, false, state_); + } + + WriteTimeBlocksImpl(values); +} + +void PETSIRDWriterBase::EndTimeBlocks() { + if (unlikely(state_ != 1)) { + PETSIRDWriterBaseInvalidState(1, true, state_); + } + + EndTimeBlocksImpl(); + state_ = 2; +} + +// fallback implementation +void PETSIRDWriterBase::WriteTimeBlocksImpl(std::vector const& values) { + for (auto const& v : values) { + WriteTimeBlocksImpl(v); + } +} + +void PETSIRDWriterBase::Close() { + if (unlikely(state_ != 2)) { + PETSIRDWriterBaseInvalidState(2, false, state_); + } + + CloseImpl(); +} + +std::string PETSIRDReaderBase::schema_ = PETSIRDWriterBase::schema_; + +std::vector PETSIRDReaderBase::previous_schemas_ = PETSIRDWriterBase::previous_schemas_; + +Version PETSIRDReaderBase::VersionFromSchema(std::string const& schema) { + if (schema == PETSIRDWriterBase::schema_) { + return Version::Current; + } + throw std::runtime_error("The schema does not match any version supported by protocol PETSIRD."); +} +void PETSIRDReaderBase::ReadHeader(petsird::Header& value) { + if (unlikely(state_ != 0)) { + PETSIRDReaderBaseInvalidState(0, state_); + } + + ReadHeaderImpl(value); + state_ = 2; +} + +bool PETSIRDReaderBase::ReadTimeBlocks(petsird::TimeBlock& value) { + if (unlikely(state_ != 2)) { + if (state_ == 3) { + state_ = 4; + return false; + } + PETSIRDReaderBaseInvalidState(2, state_); + } + + bool result = ReadTimeBlocksImpl(value); + if (!result) { + state_ = 4; + } + return result; +} + +bool PETSIRDReaderBase::ReadTimeBlocks(std::vector& values) { + if (values.capacity() == 0) { + throw std::runtime_error("vector must have a nonzero capacity."); + } + if (unlikely(state_ != 2)) { + if (state_ == 3) { + state_ = 4; + values.clear(); + return false; + } + PETSIRDReaderBaseInvalidState(2, state_); + } + + if (!ReadTimeBlocksImpl(values)) { + state_ = 3; + return values.size() > 0; + } + return true; +} + +// fallback implementation +bool PETSIRDReaderBase::ReadTimeBlocksImpl(std::vector& values) { + size_t i = 0; + while (true) { + if (i == values.size()) { + values.resize(i + 1); + } + if (!ReadTimeBlocksImpl(values[i])) { + values.resize(i); + return false; + } + i++; + if (i == values.capacity()) { + return true; + } + } +} + +void PETSIRDReaderBase::Close() { + if (unlikely(state_ != 4)) { + if (state_ == 3) { + state_ = 4; + } else { + PETSIRDReaderBaseInvalidState(4, state_); + } + } + + CloseImpl(); +} +void PETSIRDReaderBase::CopyTo(PETSIRDWriterBase& writer, size_t time_blocks_buffer_size) { + { + petsird::Header value; + ReadHeader(value); + writer.WriteHeader(value); + } + if (time_blocks_buffer_size > 1) { + std::vector values; + values.reserve(time_blocks_buffer_size); + while(ReadTimeBlocks(values)) { + writer.WriteTimeBlocks(values); + } + writer.EndTimeBlocks(); + } else { + petsird::TimeBlock value; + while(ReadTimeBlocks(value)) { + writer.WriteTimeBlocks(value); + } + writer.EndTimeBlocks(); + } +} +} // namespace petsird diff --git a/cpp/generated/protocols.h b/cpp/generated/protocols.h new file mode 100644 index 0000000..fabafe7 --- /dev/null +++ b/cpp/generated/protocols.h @@ -0,0 +1,89 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#pragma once +#include "types.h" + +namespace petsird { +enum class Version { + Current +}; +// Abstract writer for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDWriterBase { + public: + // Ordinal 0. + void WriteHeader(petsird::Header const& value); + + // Ordinal 1. + // Call this method for each element of the `timeBlocks` stream, then call `EndTimeBlocks() when done.` + void WriteTimeBlocks(petsird::TimeBlock const& value); + + // Ordinal 1. + // Call this method to write many values to the `timeBlocks` stream, then call `EndTimeBlocks()` when done. + void WriteTimeBlocks(std::vector const& values); + + // Marks the end of the `timeBlocks` stream. + void EndTimeBlocks(); + + // Optionaly close this writer before destructing. Validates that all steps were completed. + void Close(); + + virtual ~PETSIRDWriterBase() = default; + + // Flushes all buffered data. + virtual void Flush() {} + + protected: + virtual void WriteHeaderImpl(petsird::Header const& value) = 0; + virtual void WriteTimeBlocksImpl(petsird::TimeBlock const& value) = 0; + virtual void WriteTimeBlocksImpl(std::vector const& value); + virtual void EndTimeBlocksImpl() = 0; + virtual void CloseImpl() {} + + static std::string schema_; + + static std::vector previous_schemas_; + + static std::string SchemaFromVersion(Version version); + + private: + uint8_t state_ = 0; + + friend class PETSIRDReaderBase; +}; + +// Abstract reader for the PETSIRD protocol. +// Definition of the stream of data +class PETSIRDReaderBase { + public: + // Ordinal 0. + void ReadHeader(petsird::Header& value); + + // Ordinal 1. + [[nodiscard]] bool ReadTimeBlocks(petsird::TimeBlock& value); + + // Ordinal 1. + [[nodiscard]] bool ReadTimeBlocks(std::vector& values); + + // Optionaly close this writer before destructing. Validates that all steps were completely read. + void Close(); + + void CopyTo(PETSIRDWriterBase& writer, size_t time_blocks_buffer_size = 1); + + virtual ~PETSIRDReaderBase() = default; + + protected: + virtual void ReadHeaderImpl(petsird::Header& value) = 0; + virtual bool ReadTimeBlocksImpl(petsird::TimeBlock& value) = 0; + virtual bool ReadTimeBlocksImpl(std::vector& values); + virtual void CloseImpl() {} + static std::string schema_; + + static std::vector previous_schemas_; + + static Version VersionFromSchema(const std::string& schema); + + private: + uint8_t state_ = 0; +}; +} // namespace petsird diff --git a/cpp/generated/types.cc b/cpp/generated/types.cc new file mode 100644 index 0000000..ce5cab8 --- /dev/null +++ b/cpp/generated/types.cc @@ -0,0 +1,6 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#include "types.h" +namespace petsird { +} // namespace petsird + diff --git a/cpp/generated/types.h b/cpp/generated/types.h new file mode 100644 index 0000000..6f0ecd7 --- /dev/null +++ b/cpp/generated/types.h @@ -0,0 +1,579 @@ +// This file was generated by the "yardl" tool. DO NOT EDIT. + +#pragma once +#include +#include +#include +#include +#include +#include + +#include "yardl/yardl.h" + +namespace petsird { +// All information about a coincidence event specified as identifiers or indices (i.e. discretized). Indices start from 0. +// TODO: this might take up too much space, so some/all of these could be combined in a single index if necessary. +struct CoincidenceEvent { + // the pair of detector elements + std::array detector_ids{}; + // an index into the tofBinEdges field in the ScannerInformation + uint32_t tof_idx{}; + // a pair of indices into the energyBinEdges field in the ScannerInformation + std::array energy_indices{}; + + bool operator==(const CoincidenceEvent& other) const { + return detector_ids == other.detector_ids && + tof_idx == other.tof_idx && + energy_indices == other.energy_indices; + } + + bool operator!=(const CoincidenceEvent& other) const { + return !(*this == other); + } +}; + +// A shape filled with a uniform material +template +struct SolidVolume { + Shape shape{}; + // identifier referring to `ScannerInformation.bulkMaterials` list + uint32_t material_id{}; + + bool operator==(const SolidVolume& other) const { + return shape == other.shape && + material_id == other.material_id; + } + + bool operator!=(const SolidVolume& other) const { + return !(*this == other); + } +}; + +// 3D coordinates (in mm) +struct Coordinate { + yardl::FixedNDArray c{}; + + bool operator==(const Coordinate& other) const { + return c == other.c; + } + + bool operator!=(const Coordinate& other) const { + return !(*this == other); + } +}; + +// A box-shape specified by 8 corners (e.g. cuboid, wedge, etc.) +// TODO need to think about a clear definition of planes +// We do not want to have to check about intersection planes +// Potential mechanisms: +// - lexicographical ordering of corner coordinates? +// - first 4 coordinates give first plane, 5th and 6th need to define plane with first 2, etc. +struct BoxShape { + std::array corners{}; + + bool operator==(const BoxShape& other) const { + return corners == other.corners; + } + + bool operator!=(const BoxShape& other) const { + return !(*this == other); + } +}; + +using BoxSolidVolume = petsird::SolidVolume; + +// Annulus of certain thickness centered at [0,0,0] and oriented along the [0,0,1] axis +// in radians. An angle of 0 corresponds to the [1,0,0] axis, Pi/2 corresponds to the [0,1,0] axis. +struct AnnulusShape { + // inner radius (in mm) + float inner_radius{}; + // outer radius (in mm) + float outer_radius{}; + // thickness of the annulus, i.e. length along the axis (in mm) + float thickness{}; + // start-stop angle (in radians) + std::array angular_range{}; + + bool operator==(const AnnulusShape& other) const { + return inner_radius == other.inner_radius && + outer_radius == other.outer_radius && + thickness == other.thickness && + angular_range == other.angular_range; + } + + bool operator!=(const AnnulusShape& other) const { + return !(*this == other); + } +}; + +// Union of all possible shapes +using GeometricShape = std::variant; + +using GenericSolidVolume = petsird::SolidVolume; + +// Rigid transformation, encoded via homogenous transformation +// transformed_coord = matrix * [c, 1] (where [c,1] is a column vector) +// with `c` of type `Coordinate` +struct RigidTransformation { + yardl::FixedNDArray matrix{}; + + bool operator==(const RigidTransformation& other) const { + return matrix == other.matrix; + } + + bool operator!=(const RigidTransformation& other) const { + return !(*this == other); + } +}; + +// A list of identical objects at different locations +template +struct ReplicatedObject { + T object{}; + // list of transforms + // constraint: length >= 1 + std::vector transforms{}; + // list of unique ids for every replicated solid volume + // constraint: size(transforms) == size(ids) + std::vector ids{}; + + yardl::Size NumberOfObjects() const { + return transforms.size(); + } + + bool operator==(const ReplicatedObject& other) const { + return object == other.object && + transforms == other.transforms && + ids == other.ids; + } + + bool operator!=(const ReplicatedObject& other) const { + return !(*this == other); + } +}; + +// A list of identical SolidVolumes at different locations +using ReplicatedBoxSolidVolume = petsird::ReplicatedObject; + +// A list of identical SolidVolumes at different locations +using ReplicatedGenericSolidVolume = petsird::ReplicatedObject; + +// Top-level detector structure, consisting of one or more lists of detecting elements (or "crystals") +// This allows having different types of detecting elements (e.g. for phoswich detectors) +// TODO this could be made into a hierarchical structure +struct DetectorModule { + std::vector detecting_elements{}; + // list of unique ids for every replicated solid volume + // constraint: size(detectingElements) == size(detectingElementsIds) + std::vector detecting_element_ids{}; + // optional list describing shielding/optical reflectors etc + std::vector non_detecting_elements{}; + + bool operator==(const DetectorModule& other) const { + return detecting_elements == other.detecting_elements && + detecting_element_ids == other.detecting_element_ids && + non_detecting_elements == other.non_detecting_elements; + } + + bool operator!=(const DetectorModule& other) const { + return !(*this == other); + } +}; + +// A list of identical modules at different locations +using ReplicatedDetectorModule = petsird::ReplicatedObject; + +// Full definition of the geometry of the scanner, consisting of +// one of more types of modules replicated in space and (optional) other structures (e.g. side-shielding) +struct ScannerGeometry { + // list of different types of replicated modules + std::vector replicated_modules{}; + // list of unique ids for every replicated module + // constraint: size(replicated_modules) == size(ids) + std::vector ids{}; + // shielding etc + std::optional> non_detecting_volumes{}; + + bool operator==(const ScannerGeometry& other) const { + return replicated_modules == other.replicated_modules && + ids == other.ids && + non_detecting_volumes == other.non_detecting_volumes; + } + + bool operator!=(const ScannerGeometry& other) const { + return !(*this == other); + } +}; + +struct Subject { + std::optional name{}; + std::string id{}; + + bool operator==(const Subject& other) const { + return name == other.name && + id == other.id; + } + + bool operator!=(const Subject& other) const { + return !(*this == other); + } +}; + +struct Institution { + std::string name{}; + std::string address{}; + + bool operator==(const Institution& other) const { + return name == other.name && + address == other.address; + } + + bool operator!=(const Institution& other) const { + return !(*this == other); + } +}; + +// Items describing the exam (incomplete) +struct ExamInformation { + petsird::Subject subject{}; + petsird::Institution institution{}; + std::optional protocol{}; + std::optional start_of_acquisition{}; + + bool operator==(const ExamInformation& other) const { + return subject == other.subject && + institution == other.institution && + protocol == other.protocol && + start_of_acquisition == other.start_of_acquisition; + } + + bool operator!=(const ExamInformation& other) const { + return !(*this == other); + } +}; + +// 3D direction vector (normalized to 1) +struct Direction { + yardl::FixedNDArray c{}; + + bool operator==(const Direction& other) const { + return c == other.c; + } + + bool operator!=(const Direction& other) const { + return !(*this == other); + } +}; + +// Orthonormal matrix +// direction_of_first_axis = matrix * [1, 0 ,0] (as a column vector) +struct DirectionMatrix { + yardl::FixedNDArray matrix{}; + + bool operator==(const DirectionMatrix& other) const { + return matrix == other.matrix; + } + + bool operator!=(const DirectionMatrix& other) const { + return !(*this == other); + } +}; + +// Atom definition in terms of Z and A +struct Atom { + // A + uint32_t mass_number{}; + // Z + uint32_t atomic_number{}; + + bool operator==(const Atom& other) const { + return mass_number == other.mass_number && + atomic_number == other.atomic_number; + } + + bool operator!=(const Atom& other) const { + return !(*this == other); + } +}; + +// Specification of materials used in the scanner. +// TODO agree with vendors if this information can be supplied and to what accuracy +// Ideally this list should be reasonably accurate to be useful for Monte Carlo simulations, but can be approximate. +struct BulkMaterial { + // unique id that can be used to refer to the material in voxelised maps etc + uint32_t id{}; + // informative string, not standardised. + // Expected examples: + // detecting: BGO, LSO, LYSO, LaBr, GAGG, plastic + // non-detecting: tungsten, lead + std::string name{}; + // density of the material + // Units: g/cc + float density{}; + // List of atoms + std::vector atoms{}; + // List of massFractions for the atoms. + // constraint: sum of massFractions should be 1 + // constraint: size(atoms) == size(massFractions) + std::vector mass_fractions{}; + + bool operator==(const BulkMaterial& other) const { + return id == other.id && + name == other.name && + density == other.density && + atoms == other.atoms && + mass_fractions == other.mass_fractions; + } + + bool operator!=(const BulkMaterial& other) const { + return !(*this == other); + } +}; + +// Type definition for how to encode how the scanner handles multiple coincidences when recording the prompts. +// Due to various effects (such as high count rate, prompt gammas), it is possible that multiple single +// events are detected within the coincidence window. This type encodes some different ways +// that this multiple events are handled, and recorded in the coincidence stream. +enum class CoincidencePolicy { + // multiples will be rejected + kRejectMultiples = 0, + // multiples will be stored as a sequence of pairs, e.g. a triple leads to 3 pairs + kStoreMultiplesAsPairs = 1, + // other options, to be listed in the future + kOther = 2, +}; + +struct ScannerInformation { + std::string model_name{}; + // Geometric information for all detector elements + // All coordinates are in the PET gantry coordinate system. + petsird::ScannerGeometry scanner_geometry{}; + // List of materials present in the scanner geometry. The `material_id`s there will refer to the + // identifiers in this list below. + std::vector bulk_materials{}; + // Fixed transformation to reference location for this scanner. + // This field can be used to encode alignment with the CT or MRI gantry for instance. + // The transformation should convert from the PET gantry coordinate system to the reference. + // An empty field implies the identity transformation. + std::optional gantry_alignment{}; + // Edge information for TOF bins in mm (given as from first to last edge, so there is one more edge than the number of bins) + // 0 corresponds to the same arrival time. Negative numbers indicate that the first detector detected first. + // For instance, a coincidence event is stored as 2 detectorIds, denoting the arrival time at the first + // detector t1 and the arrival time at the second detector t2, we store (t1-t2)*c/2. + // Note: for non-TOF scanners, this defines the coincidence window + // TODO: this currently assumes equal size for each TOF bin, but some scanners "stretch" TOF bins depending on length of LOR + yardl::NDArray tof_bin_edges{}; + // TOF resolution (as FWHM) in mm + // Scanner coincidence timing resolution (CTR) without tof-binning + float tof_resolution{}; + // Edge information for energy windows in keV (given as from first to last edge, so there is one more edge than the number of bins) + yardl::NDArray energy_bin_edges{}; + // FWHM of photopeak for incoming gamma of 511 keV, expressed as a ratio w.r.t. 511 + float energy_resolution_at_511{}; + // duration of each event time block in ms + uint32_t event_time_block_duration{}; + // Encode how the scanner handles multiple coincidences + petsird::CoincidencePolicy coincidence_policy{}; + + yardl::Size NumberOfTOFBins() const { + return yardl::size(tof_bin_edges) - 1ULL; + } + + yardl::Size NumberOfEnergyBins() const { + return yardl::size(energy_bin_edges) - 1ULL; + } + + bool operator==(const ScannerInformation& other) const { + return model_name == other.model_name && + scanner_geometry == other.scanner_geometry && + bulk_materials == other.bulk_materials && + gantry_alignment == other.gantry_alignment && + tof_bin_edges == other.tof_bin_edges && + tof_resolution == other.tof_resolution && + energy_bin_edges == other.energy_bin_edges && + energy_resolution_at_511 == other.energy_resolution_at_511 && + event_time_block_duration == other.event_time_block_duration && + coincidence_policy == other.coincidence_policy; + } + + bool operator!=(const ScannerInformation& other) const { + return !(*this == other); + } +}; + +struct Header { + petsird::ScannerInformation scanner{}; + std::optional exam{}; + + bool operator==(const Header& other) const { + return scanner == other.scanner && + exam == other.exam; + } + + bool operator!=(const Header& other) const { + return !(*this == other); + } +}; + +// All information about a triple event specified as identifiers or indices (i.e. discretized). +struct TripleEvent { + std::array detector_ids{}; + // timing differences (converted to mm) w.r.t. first event, stored as + // indices into the tofBinEdges field in the ScannerInformation + // Note: only 2, corresponding to the arrival time differences of the second and third detectorId + // listed w.r.t. the first detectorId + std::array tof_indices{}; + // indices for each single event into the energyBinEdges field in the ScannerInformation + std::array energy_indices{}; + + bool operator==(const TripleEvent& other) const { + return detector_ids == other.detector_ids && + tof_indices == other.tof_indices && + energy_indices == other.energy_indices; + } + + bool operator!=(const TripleEvent& other) const { + return !(*this == other); + } +}; + +struct EventTimeBlock { + // start time since ExamInformation.startOfAcquisition in ms + // Note: duration is given by ScannerInformation.eventTimeBlockDuration + uint32_t start{}; + // TODO encode end time? + // list of prompts in this time block + // TODO might be better to use !array + std::vector prompt_events{}; + // optional list of delayed coincidences in this time block + std::optional> delayed_events{}; + // optional list of triple coincidences in this time block + std::optional> triple_events{}; + + bool operator==(const EventTimeBlock& other) const { + return start == other.start && + prompt_events == other.prompt_events && + delayed_events == other.delayed_events && + triple_events == other.triple_events; + } + + bool operator!=(const EventTimeBlock& other) const { + return !(*this == other); + } +}; + +struct ExternalSignalTimeBlock { + // start time since ExamInformation.startOfAcquisition in ms + uint32_t start{}; + // refer to ExternalSignalType.id + uint32_t signal_id{}; + // Note for triggers, this field is to be ignored + std::vector signal_values{}; + + bool operator==(const ExternalSignalTimeBlock& other) const { + return start == other.start && + signal_id == other.signal_id && + signal_values == other.signal_values; + } + + bool operator!=(const ExternalSignalTimeBlock& other) const { + return !(*this == other); + } +}; + +struct BedMovementTimeBlock { + // start time since ExamInformation.startOfAcquisition in ms + uint32_t start{}; + petsird::RigidTransformation transform{}; + + bool operator==(const BedMovementTimeBlock& other) const { + return start == other.start && + transform == other.transform; + } + + bool operator!=(const BedMovementTimeBlock& other) const { + return !(*this == other); + } +}; + +struct GantryMovementTimeBlock { + // start time since ExamInformation.startOfAcquisition in ms + uint32_t start{}; + petsird::RigidTransformation transform{}; + + bool operator==(const GantryMovementTimeBlock& other) const { + return start == other.start && + transform == other.transform; + } + + bool operator!=(const GantryMovementTimeBlock& other) const { + return !(*this == other); + } +}; + +// types of timeBlocks +// TODO more types could be needed +using TimeBlock = std::variant; + +enum class ExternalSignalTypeEnum { + kEcgTrace = 0, + kEcgTrigger = 1, + kRespTrace = 2, + kRespTrigger = 3, + kOtherMotionSignal = 4, + kOtherMotionTrigger = 5, + kExternalSync = 6, + // other options, to be listed in the future + kOther = 7, +}; + +struct ExternalSignalType { + petsird::ExternalSignalTypeEnum type{}; + std::string description{}; + uint32_t id{}; + + bool operator==(const ExternalSignalType& other) const { + return type == other.type && + description == other.description && + id == other.id; + } + + bool operator!=(const ExternalSignalType& other) const { + return !(*this == other); + } +}; + +// Time interval in milliseconds since start of acquisition +struct TimeInterval { + uint32_t start{}; + uint32_t stop{}; + + bool operator==(const TimeInterval& other) const { + return start == other.start && + stop == other.stop; + } + + bool operator!=(const TimeInterval& other) const { + return !(*this == other); + } +}; + +// A sequence of time intervals (could be consecutive) +struct TimeFrameInformation { + std::vector time_frames{}; + + yardl::Size NumberOfTimeFrames() const { + return time_frames.size(); + } + + bool operator==(const TimeFrameInformation& other) const { + return time_frames == other.time_frames; + } + + bool operator!=(const TimeFrameInformation& other) const { + return !(*this == other); + } +}; + +} // namespace petsird + diff --git a/cpp/generated/yardl/detail/binary/coded_stream.h b/cpp/generated/yardl/detail/binary/coded_stream.h new file mode 100644 index 0000000..7d439d2 --- /dev/null +++ b/cpp/generated/yardl/detail/binary/coded_stream.h @@ -0,0 +1,358 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace yardl::binary { + +static int const MAX_VARINT32_BYTES = 5; +static int const MAX_VARINT64_BYTES = 10; + +/** + * An exception thrown when EOF is reached prematurely. + */ +class EndOfStreamException : public std::exception { + public: + char const* what() const noexcept override { return "Unexpected end of stream"; } +}; + +/** + * A buffered output stream that provides methods for writing integers in a + * compact form. Unsigned integers written using Protobuf "varint" encoding, + * and signed integers are first converted to unsigned using Protobuf's + * "zig-zag" encoding. + */ +class CodedOutputStream { + public: + CodedOutputStream(std::ostream& stream, size_t buffer_size = 65536) + : stream_(stream), + buffer_(buffer_size), + buffer_ptr_(buffer_.data()), + buffer_end_ptr_(buffer_ptr_ + buffer_.size()) { + } + + ~CodedOutputStream() { + Flush(); + } + + template && sizeof(T) == 1, bool> = true> + void WriteByte(T const& v) { + if (RemainingBufferSpace() == 0) { + FlushBuffer(); + } + + *buffer_ptr_++ = v; + } + + void WriteVarInt32(uint32_t const& value) { + if (RemainingBufferSpace() < MAX_VARINT32_BYTES) { + FlushBuffer(); + } + + WriteVarInt(value); + } + + void WriteVarInt32(int32_t const& value) { + WriteVarInt32(ZigZagEncode32(value)); + } + + template && + sizeof(T) == 8 && + std::is_unsigned_v, + bool> = true> + void WriteVarInt64(T const& value) { + if (RemainingBufferSpace() < MAX_VARINT64_BYTES) { + FlushBuffer(); + } + + WriteVarInt(value); + } + + void WriteVarInt64(int64_t const& value) { + WriteVarInt64(ZigZagEncode64(value)); + } + + template , bool> = true> + void WriteFixedInteger(T const& value) { + if (RemainingBufferSpace() < sizeof(value)) { + FlushBuffer(); + } + +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + memcpy(buffer_ptr_, &value, sizeof(value)); +#else + static_assert(false, "Unsupported byte order"); +#endif + + buffer_ptr_ += sizeof(value); + } + + void WriteBytes(void const* data, size_t size_in_bytes) { + while (true) { + const size_t remaining_buffer_space = RemainingBufferSpace(); + if (remaining_buffer_space >= size_in_bytes) { + memcpy(buffer_ptr_, data, size_in_bytes); + buffer_ptr_ += size_in_bytes; + break; + } + + if (remaining_buffer_space > 0) { + memcpy(buffer_ptr_, data, remaining_buffer_space); + buffer_ptr_ += remaining_buffer_space; + data = static_cast(data) + remaining_buffer_space; + size_in_bytes -= remaining_buffer_space; + } + + FlushBuffer(); + } + } + + void Flush() { + FlushBuffer(); + stream_.flush(); + } + + private: + size_t RemainingBufferSpace() { + assert(buffer_ptr_ <= buffer_end_ptr_); + return buffer_end_ptr_ - buffer_ptr_; + } + + template && std::is_unsigned_v, bool> = true> + void WriteVarInt(T value) { + while (value > 0x7F) { + *buffer_ptr_++ = static_cast(value) | 0x80; + value >>= 7; + } + + *buffer_ptr_++ = static_cast(value); + } + + static uint32_t ZigZagEncode32(int32_t v) { + return (static_cast(v) << 1) ^ static_cast(v >> 31); + } + + static uint64_t ZigZagEncode64(int64_t v) { + return (static_cast(v) << 1) ^ static_cast(v >> 63); + } + + void FlushBuffer() { + if (buffer_ptr_ == buffer_.data()) { + return; + } + + stream_.write(reinterpret_cast(const_cast(buffer_.data())), + buffer_ptr_ - buffer_.data()); + buffer_ptr_ = buffer_.data(); + if (stream_.bad()) { + throw std::runtime_error("Failed to write to stream"); + } + } + + std::ostream& stream_; + std::vector buffer_; + uint8_t* buffer_ptr_; + uint8_t* buffer_end_ptr_; +}; + +/** + * A buffered input stream that provides methods reading data written + * using a CodedOutputStream. + */ +class CodedInputStream { + public: + CodedInputStream(std::istream& stream, size_t buffer_size = 65536) + : stream_(stream), + buffer_(buffer_size), + buffer_ptr_(buffer_.data()), + buffer_end_ptr_(buffer_ptr_) { + } + + public: + template && sizeof(T) == 1, bool> = true> + void ReadByte(T& v) { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + } + v = *buffer_ptr_++; + } + + template , bool> = true> + void ReadFixedInteger(T& value) { + if (RemainingBufferSpace() < sizeof(value)) { + ReadFixedIntegerSlow(value); + } else { + ReadFixedIntegerFastFromArray(value, buffer_ptr_); + } + } + + void ReadVarInt32(uint32_t& value) { + if (RemainingBufferSpace() < MAX_VARINT32_BYTES) { + ReadVarIntegerSlow(value); + } else { + ReadVarIntegerFastFromArray(value, buffer_ptr_); + } + } + + void ReadVarInt32(int32_t& value) { + uint32_t v; + ReadVarInt32(v); + value = ZigZagDecode32(v); + } + + template && + sizeof(T) == 8 && + std::is_unsigned_v, + bool> = true> + void ReadVarInt64(T& value) { + if (RemainingBufferSpace() < MAX_VARINT64_BYTES) { + ReadVarIntegerSlow(value); + } else { + ReadVarIntegerFastFromArray(value, buffer_ptr_); + } + } + + void ReadVarInt64(int64_t& value) { + uint64_t v; + ReadVarInt64(v); + value = ZigZagDecode64(v); + } + + void ReadBytes(void* data, size_t size_in_bytes) { + uint8_t* uint8_data = static_cast(data); + while (size_in_bytes > 0) { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + } + + size_t bytes_to_copy = std::min( + size_in_bytes, + static_cast(buffer_end_ptr_ - buffer_ptr_)); + memcpy(uint8_data, buffer_ptr_, bytes_to_copy); + buffer_ptr_ += bytes_to_copy; + uint8_data += bytes_to_copy; + size_in_bytes -= bytes_to_copy; + } + } + + void VerifyFinished() { + if (at_eof_) { + if (buffer_ptr_ == buffer_end_ptr_) { + return; + } + } else { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + if (at_eof_ && buffer_ptr_ == buffer_end_ptr_) { + return; + } + } + } + + throw std::runtime_error("Stream was not completely read"); + } + + private: + template , bool> = true> + static void ReadFixedIntegerFastFromArray(T& value, uint8_t*& local_buffer_ptr) { +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ + memcpy(&value, local_buffer_ptr, sizeof(value)); +#else + static_assert(false, "Unsupported byte order"); +#endif + + local_buffer_ptr += sizeof(value); + } + + template , bool> = true> + void ReadFixedIntegerSlow(T& value) { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + ReadFixedIntegerFastFromArray(value, buffer_ptr_); + return; + } + + uint8_t bytes[sizeof(T)]; + ReadBytes(bytes, sizeof(T)); + uint8_t* bytes_ptr = bytes; + ReadFixedIntegerFastFromArray(value, bytes_ptr); + } + + template , bool> = true> + static void ReadVarIntegerFastFromArray(T& value, uint8_t*& local_buffer_ptr) { + value = 0; + int shift = 0; + while (true) { + uint8_t byte = *local_buffer_ptr++; + value |= static_cast(byte & 0x7F) << shift; + if ((byte & 0x80) == 0) { + break; + } + shift += 7; + } + } + + template , bool> = true> + void ReadVarIntegerSlow(T& value) { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + ReadVarIntegerFastFromArray(value, buffer_ptr_); + return; + } + + value = 0; + int shift = 0; + while (true) { + if (buffer_ptr_ == buffer_end_ptr_) { + FillBuffer(); + } + uint8_t byte = *buffer_ptr_++; + value |= static_cast(byte & 0x7F) << shift; + if ((byte & 0x80) == 0) { + break; + } + shift += 7; + } + } + + static int32_t ZigZagDecode32(uint32_t n) { + return static_cast((n >> 1) ^ (~(n & 1) + 1)); + } + + static int64_t ZigZagDecode64(uint64_t n) { + return static_cast((n >> 1) ^ (~(n & 1) + 1)); + } + + size_t FillBuffer() { + if (at_eof_) { + throw EndOfStreamException(); + } + + stream_.read(reinterpret_cast(buffer_.data()), buffer_.size()); + at_eof_ = stream_.eof(); + auto bytes_read = stream_.gcount(); + buffer_ptr_ = buffer_.data(); + buffer_end_ptr_ = buffer_ptr_ + bytes_read; + return bytes_read; + } + + size_t RemainingBufferSpace() { + assert(buffer_ptr_ <= buffer_end_ptr_); + return buffer_end_ptr_ - buffer_ptr_; + } + + std::istream& stream_; + std::vector buffer_; + uint8_t* buffer_ptr_; + uint8_t* buffer_end_ptr_; + bool at_eof_ = false; +}; + +} // namespace yardl::binary diff --git a/cpp/generated/yardl/detail/binary/header.h b/cpp/generated/yardl/detail/binary/header.h new file mode 100644 index 0000000..9d0e8e2 --- /dev/null +++ b/cpp/generated/yardl/detail/binary/header.h @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include "serializers.h" + +namespace yardl::binary { +static inline std::array MAGIC_BYTES = {'y', 'a', 'r', 'd', 'l'}; +static inline uint32_t kBinaryFormatVersionNumber = 1; + +inline void WriteHeader(CodedOutputStream& w, std::string const& schema) { + w.WriteBytes(MAGIC_BYTES.data(), MAGIC_BYTES.size()); + w.WriteFixedInteger(kBinaryFormatVersionNumber); + yardl::binary::WriteString(w, schema); +} + +inline std::string ReadHeader(CodedInputStream& r) { + std::array magic_bytes{}; + r.ReadBytes(magic_bytes.data(), magic_bytes.size()); + if (magic_bytes != MAGIC_BYTES) { + throw std::runtime_error("Data in the stream is not in the expected format."); + } + + uint32_t version_number; + r.ReadFixedInteger(version_number); + if (version_number != kBinaryFormatVersionNumber) { + throw std::runtime_error( + "Data in the stream is not in the expected format. Unsupported version."); + } + + std::string actual_schema; + yardl::binary::ReadString(r, actual_schema); + return actual_schema; +} + +} // namespace yardl::binary diff --git a/cpp/generated/yardl/detail/binary/reader_writer.h b/cpp/generated/yardl/detail/binary/reader_writer.h new file mode 100644 index 0000000..1f81118 --- /dev/null +++ b/cpp/generated/yardl/detail/binary/reader_writer.h @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include + +#include "header.h" + +namespace yardl::binary { +class BinaryWriter { + protected: + BinaryWriter(std::ostream& stream, std::string const& schema) + : stream_(stream) { + WriteHeader(stream_, schema); + } + + BinaryWriter(std::string file_name, std::string const& schema) + : owned_file_stream_(open_file(file_name)), stream_(*owned_file_stream_) { + WriteHeader(stream_, schema); + } + + private: + static std::unique_ptr open_file(std::string filename) { + auto file_stream = std::make_unique(filename, std::ios::binary | std::ios::out); + if (!file_stream->good()) { + throw std::runtime_error("Failed to open file for writing."); + } + + return file_stream; + } + + private: + std::unique_ptr owned_file_stream_{}; + + protected: + yardl::binary::CodedOutputStream stream_; +}; + +class BinaryReader { + protected: + BinaryReader(std::istream& stream) + : stream_(stream) { + schema_read_ = ReadHeader(stream_); + } + + BinaryReader(std::string file_name) + : owned_file_stream_(open_file(file_name)), stream_(*owned_file_stream_) { + schema_read_ = ReadHeader(stream_); + } + + private: + static std::unique_ptr open_file(std::string filename) { + auto file_stream = std::make_unique(filename, std::ios::binary | std::ios::in); + if (!file_stream->good()) { + throw std::runtime_error("Failed to open file for reading."); + } + + return file_stream; + } + + private: + std::unique_ptr owned_file_stream_{}; + + protected: + yardl::binary::CodedInputStream stream_; + std::string schema_read_; +}; + +} // namespace yardl::binary diff --git a/cpp/generated/yardl/detail/binary/serializers.h b/cpp/generated/yardl/detail/binary/serializers.h new file mode 100644 index 0000000..f188295 --- /dev/null +++ b/cpp/generated/yardl/detail/binary/serializers.h @@ -0,0 +1,486 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include "../../yardl.h" +#include "coded_stream.h" + +namespace yardl::binary { + +/** + * @brief Function pointer type for writing a single value to a stream. + * + * @tparam T the type of the value to write + */ +template +using Writer = void (*)(CodedOutputStream& stream, T const& value); + +/** + * @brief Function pointer type for reading a single value from a stream. + * + * @tparam T the type of the value to read + */ +template +using Reader = void (*)(CodedInputStream& stream, T& value); + +/** + * If T can be serialized using a simple memcpy, provides the member + * constant value equal to true. Otherwise value is false. + */ +template +struct IsTriviallySerializable + : std::false_type { +}; + +template +struct IsTriviallySerializable && + sizeof(T) == 1>> + : std::true_type { +}; + +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +template +struct IsTriviallySerializable>> + : std::true_type { +}; + +template +struct IsTriviallySerializable> || + std::is_same_v>>> + : std::true_type { +}; +#endif + +template +struct IsTriviallySerializable, + typename std::enable_if_t::value>> + : std::true_type { +}; + +template +struct IsTriviallySerializable, + typename std::enable_if_t::value>> + : std::true_type { +}; + +template +inline void WriteTriviallySerializable(CodedOutputStream& stream, T const& value) { + static_assert(IsTriviallySerializable::value, "T must be trivially serializable"); + stream.WriteBytes(reinterpret_cast(std::addressof(value)), sizeof(value)); +} + +template +inline void ReadTriviallySerializable(CodedInputStream& stream, T& value) { + static_assert(IsTriviallySerializable::value, "T must be trivially serializable"); + stream.ReadBytes(reinterpret_cast(std::addressof(value)), sizeof(value)); +} + +template && sizeof(T) == 1, bool> = true> +inline void WriteInteger(CodedOutputStream& stream, T const& value) { + stream.WriteByte(value); +} + +template && sizeof(T) == 1, bool> = true> +inline void ReadInteger(CodedInputStream& stream, T& value) { + stream.ReadByte(value); +} + +template && sizeof(T) == 2, bool> = true> +inline void WriteInteger(CodedOutputStream& stream, T const& value) { + if constexpr (std::is_signed_v) { + stream.WriteVarInt32(static_cast(value)); + } else { + stream.WriteVarInt32(static_cast(value)); + } +} + +template && sizeof(T) == 2, bool> = true> +inline void ReadInteger(CodedInputStream& stream, T& value) { + if constexpr (std::is_signed_v) { + int32_t val; + stream.ReadVarInt32(val); + value = val; + } else { + uint32_t val; + stream.ReadVarInt32(val); + value = val; + } +} + +template && sizeof(T) == 4, bool> = true> +inline void WriteInteger(CodedOutputStream& stream, T const& value) { + stream.WriteVarInt32(value); +} + +template && sizeof(T) == 4, bool> = true> +inline void ReadInteger(CodedInputStream& stream, T& value) { + stream.ReadVarInt32(value); +} + +template && sizeof(T) == 8, bool> = true> +inline void WriteInteger(CodedOutputStream& stream, T const& value) { + stream.WriteVarInt64(value); +} + +template && sizeof(T) == 8, bool> = true> +inline void ReadInteger(CodedInputStream& stream, T& value) { + stream.ReadVarInt64(value); +} + +template || + std::is_same_v> || + std::is_same_v>, + bool> = true> +inline void WriteFloatingPoint(CodedOutputStream& stream, T const& value) { + WriteTriviallySerializable(stream, value); +} + +template || + std::is_same_v> || + std::is_same_v>, + bool> = true> +inline void ReadFloatingPoint(CodedInputStream& stream, T& value) { + ReadTriviallySerializable(stream, value); +} + +inline void WriteString(CodedOutputStream& stream, std::string const& value) { + WriteInteger(stream, value.size()); + stream.WriteBytes(value.data(), value.size()); +} + +inline void ReadString(CodedInputStream& stream, std::string& value) { + size_t size; + ReadInteger(stream, size); + value.resize(size); + stream.ReadBytes(value.data(), size); +} + +inline void WriteDate(CodedOutputStream& stream, yardl::Date const& value) { + auto days = value.time_since_epoch().count(); + WriteInteger(stream, days); +} + +inline void ReadDate(CodedInputStream& stream, yardl::Date& value) { + int64_t days; + ReadInteger(stream, days); + value = yardl::Date(date::days(days)); +} + +inline void WriteTime(CodedOutputStream& stream, yardl::Time const& value) { + WriteInteger(stream, value.count()); +} + +inline void ReadTime(CodedInputStream& stream, yardl::Time& value) { + int64_t count; + ReadInteger(stream, count); + value = yardl::Time(count); +} + +inline void WriteDateTime(CodedOutputStream& stream, yardl::DateTime const& value) { + auto ns = value.time_since_epoch().count(); + WriteInteger(stream, ns); +} + +inline void ReadDateTime(CodedInputStream& stream, yardl::DateTime& value) { + int64_t ns; + ReadInteger(stream, ns); + value = yardl::DateTime{ + std::chrono::time_point(std::chrono::nanoseconds(ns))}; +} + +template WriteElement> +inline void WriteOptional(CodedOutputStream& stream, std::optional const& value) { + stream.WriteByte(value.has_value()); + if (value.has_value()) { + WriteElement(stream, value.value()); + } +} + +template ReadElement> +inline void ReadOptional(CodedInputStream& stream, std::optional& value) { + bool has_value; + stream.ReadByte(has_value); + if (has_value) { + T tmp; + ReadElement(stream, tmp); + value = std::move(tmp); + } else { + value = std::nullopt; + } +} + +template WriteElement> +inline void WriteVector(CodedOutputStream& stream, std::vector const& value) { + WriteInteger(stream, value.size()); + + if constexpr (IsTriviallySerializable::value) { + stream.WriteBytes(value.data(), value.size() * sizeof(T)); + return; + } + + for (auto const& element : value) { + WriteElement(stream, element); + } +} + +template ReadElement> +inline void ReadVector(CodedInputStream& stream, std::vector& value) { + uint64_t size; + ReadInteger(stream, size); + value.resize(size); + + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(value.data(), value.size() * sizeof(T)); + return; + } + + for (size_t i = 0; i < size; i++) { + ReadElement(stream, value[i]); + } +} + +template WriteElement, size_t N> +inline void WriteArray(CodedOutputStream& stream, std::array const& value) { + if constexpr (IsTriviallySerializable::value) { + stream.WriteBytes(value.data(), value.size() * sizeof(T)); + return; + } + + for (size_t i = 0; i < N; i++) { + WriteElement(stream, value[i]); + } +} + +template ReadElement, size_t N> +inline void ReadArray(CodedInputStream& stream, std::array& value) { + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(value.data(), value.size() * sizeof(T)); + return; + } + + for (size_t i = 0; i < N; i++) { + ReadElement(stream, value[i]); + } +} + +template WriteElement> +inline void WriteDynamicNDArray(CodedOutputStream& stream, yardl::DynamicNDArray const& value) { + auto shape = yardl::shape(value); + WriteInteger(stream, shape.size()); + for (auto const& dim : shape) { + WriteInteger(stream, dim); + } + + if constexpr (IsTriviallySerializable::value) { + stream.WriteBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto const& element : value) { + WriteElement(stream, element); + } +} + +template ReadElement> +inline void ReadDynamicNDArray(CodedInputStream& stream, yardl::DynamicNDArray& value) { + std::vector shape; + ReadVector(stream, shape); + yardl::resize(value, shape); + + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto& element : value) { + ReadElement(stream, element); + } +} + +template WriteElement, size_t N> +inline void WriteNDArray(CodedOutputStream& stream, yardl::NDArray const& value) { + for (auto const& dim : yardl::shape(value)) { + WriteInteger(stream, dim); + } + + if constexpr (IsTriviallySerializable::value) { + stream.WriteBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto const& element : value) { + WriteElement(stream, element); + } +} + +template ReadElement, size_t N> +inline void ReadNDArray(CodedInputStream& stream, yardl::NDArray& value) { + std::array shape; + ReadArray(stream, shape); + yardl::resize(value, shape); + + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto& element : value) { + ReadElement(stream, element); + } +} + +template WriteElement, size_t... Dims> +inline void WriteFixedNDArray(CodedOutputStream& stream, + yardl::FixedNDArray const& value) { + if constexpr (IsTriviallySerializable::value) { + stream.WriteBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto const& element : value) { + WriteElement(stream, element); + } +} + +template ReadElement, size_t... Dims> +inline void ReadFixedNDArray(CodedInputStream& stream, yardl::FixedNDArray& value) { + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(yardl::dataptr(value), yardl::size(value) * sizeof(T)); + return; + } + + for (auto& element : value) { + ReadElement(stream, element); + } +} + +template WriteKey, Writer WriteValue> +inline void WriteMap(CodedOutputStream& stream, std::unordered_map const& value) { + WriteInteger(stream, value.size()); + for (auto const& [key, value] : value) { + WriteKey(stream, key); + WriteValue(stream, value); + } +} + +template ReadKey, Reader ReadValue> +inline void ReadMap(CodedInputStream& stream, std::unordered_map& value) { + uint64_t size; + ReadInteger(stream, size); + + for (size_t i = 0; i < size; i++) { + TKey k; + ReadKey(stream, k); + TValue v; + ReadValue(stream, v); + value.emplace(std::move(k), std::move(v)); + } +} + +inline void WriteMonostate([[maybe_unused]] CodedOutputStream& stream, + [[maybe_unused]] std::monostate const& value) { +} + +inline void ReadMonostate([[maybe_unused]] CodedInputStream& stream, + [[maybe_unused]] std::monostate& value) { +} + +template +inline void WriteEnum(CodedOutputStream& stream, T const& value) { + using underlying_type = std::underlying_type_t; + underlying_type underlying_value = static_cast(value); + yardl::binary::WriteInteger(stream, underlying_value); +} + +template +inline void ReadEnum(CodedInputStream& stream, T& value) { + using underlying_type = std::underlying_type_t; + underlying_type underlying_value; + yardl::binary::ReadInteger(stream, underlying_value); + value = static_cast(underlying_value); +} + +template +inline void WriteFlags(CodedOutputStream& stream, T const& value) { + yardl::binary::WriteInteger(stream, value.Value()); +} + +template +inline void ReadFlags(CodedInputStream& stream, T& value) { + using underlying_type = typename T::value_type; + underlying_type underlying_value; + yardl::binary::ReadInteger(stream, underlying_value); + value = underlying_value; +} + +template WriteElement> +inline void WriteBlock(CodedOutputStream& stream, T const& source) { + WriteInteger(stream, 1U); + WriteElement(stream, source); +} + +template ReadElement> +inline bool ReadBlock(CodedInputStream& stream, size_t& current_block_remaining, T& destination) { + if (current_block_remaining == 0) { + ReadInteger(stream, current_block_remaining); + if (current_block_remaining == 0) { + return false; + } + } + + ReadElement(stream, destination); + current_block_remaining--; + return true; +} + +template ReadElement> +inline void ReadBlocksIntoVector(CodedInputStream& stream, size_t& current_block_remaining, std::vector& destination) { + if (current_block_remaining == 0) { + ReadInteger(stream, current_block_remaining); + } + + size_t offset = 0; + size_t remaining_capacity = destination.capacity(); + while (current_block_remaining > 0) { + size_t read_count = std::min(current_block_remaining, remaining_capacity); + if (read_count + offset > destination.size()) { + destination.resize(offset + read_count); + } + + if constexpr (IsTriviallySerializable::value) { + stream.ReadBytes(destination.data() + offset, read_count * sizeof(T)); + } else { + for (size_t i = 0; i < read_count; i++) { + ReadElement(stream, destination[offset + i]); + } + } + + current_block_remaining -= read_count; + offset += read_count; + remaining_capacity -= read_count; + + if (current_block_remaining == 0) { + ReadInteger(stream, current_block_remaining); + } + + if (remaining_capacity == 0) { + return; + } + } + + if (remaining_capacity > 0) { + destination.resize(offset); + } +} +} // namespace yardl::binary diff --git a/cpp/generated/yardl/detail/hdf5/ddl.h b/cpp/generated/yardl/detail/hdf5/ddl.h new file mode 100644 index 0000000..8bf8d5c --- /dev/null +++ b/cpp/generated/yardl/detail/hdf5/ddl.h @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "../../yardl.h" +#include "inner_types.h" + +// Helper functions for defining HDF5 datatypes. + +namespace yardl::hdf5 { + +/** + * @brief Returns the HDF5 type for yardl::Size. + */ +static inline H5::PredType const& SizeTypeDdl() { + static_assert(sizeof(hsize_t) == sizeof(size_t)); + static_assert(std::is_signed_v == std::is_signed_v); + return H5::PredType::NATIVE_HSIZE; +} + +/** + * @brief Creates an HDF5 type for dates. These are stored as an integer number + * of days since the epoch. + */ +static inline H5::DataType DateTypeDdl() { + static_assert(sizeof(yardl::Date) == sizeof(int32_t)); + static_assert(std::is_same_v); + return H5::PredType::NATIVE_INT32; +} + +/** + * @brief Creates an HDF5 type for times. These are stored as an int32 number + * of nanoseconds since midnight. + */ +static inline H5::DataType TimeTypeDdl() { + static_assert(sizeof(yardl::Time) == sizeof(int64_t)); + static_assert(std::is_same_v); + return H5::PredType::NATIVE_INT64; +} + +/** + * @brief Creates an HDF5 type for datetimes. These are stored as an int64 + * number of nanoseconds since the epoch, ignoring leap seconds + */ +static inline H5::DataType DateTimeTypeDdl() { + static_assert(sizeof(yardl::DateTime) == sizeof(int64_t)); + static_assert(std::is_same_v); + return H5::PredType::NATIVE_INT64; +} + +/** + * @brief Creates an HDF5 optional type. + */ +template +H5::CompType OptionalTypeDdl(H5::DataType const& value_type) { + using InnerOptionalType = InnerOptional; + H5::CompType type(sizeof(InnerOptionalType)); + type.insertMember("has_value", HOFFSET(InnerOptionalType, has_value), H5::PredType::NATIVE_HBOOL); + type.insertMember("value", HOFFSET(InnerOptionalType, value), value_type); + return type; +} + +/** + * @brief Creates an HDF5 v-len data type. + */ +static inline H5::VarLenType InnerVlenDdl(H5::DataType const& element_type) { + return H5::VarLenType(element_type); +} + +/** + * @brief Creates an HDF5 data type for an std::unordered_map + */ +template +static inline H5::VarLenType InnerMapDdl(H5::DataType const& key_type, H5::DataType const& value_type) { + using InnerMapPairType = std::pair; + auto pair_type = H5::CompType(sizeof(InnerMapPairType)); + pair_type.insertMember("key", HOFFSET(InnerMapPairType, first), key_type); + pair_type.insertMember("value", HOFFSET(InnerMapPairType, second), value_type); + return H5::VarLenType(pair_type); +} + +/** + * @brief Creates a compund datatype for std::complex + */ +template +H5::CompType ComplexTypeDdl() { + H5::DataType inner_type; + if constexpr (std::is_same_v) { + inner_type = H5::PredType::NATIVE_FLOAT; + } else { + static_assert(std::is_same_v, "Unsupported type parameter"); + inner_type = H5::PredType::NATIVE_DOUBLE; + } + + H5::CompType type(sizeof(std::complex)); + type.insertMember("real", 0, inner_type); + type.insertMember("imaginary", sizeof(T), inner_type); + return type; +} + +/** + * @brief Creates a variable-length string datatype. + */ +static inline H5::StrType InnerVlenStringDdl() { + static_assert(sizeof(InnerVlenString) == sizeof(char*)); + H5::StrType type(0, H5T_VARIABLE); + type.setCset(H5T_CSET_UTF8); + return type; +} + +/** + * @brief Creates a datatype for a fixed-length vector. + */ +static inline H5::ArrayType FixedVectorDdl(H5::DataType const& element_type, hsize_t length) { + hsize_t size = length; + return H5::ArrayType(element_type, 1, &size); +} + +/** + * @brief Creates a datatype for a fixed-size NDArray + */ +static inline H5::ArrayType FixedNDArrayDdl(H5::DataType const& element_type, + std::initializer_list dimensions) { + return H5::ArrayType(element_type, static_cast(dimensions.size()), std::data(dimensions)); +} + +/** + * @brief Creates a datatype for an NDArray with a known number of dimensions. + */ +template +H5::CompType NDArrayDdl(H5::DataType const& element_type) { + using ArrayType = InnerNdArray; + H5::CompType compType(sizeof(ArrayType)); + hsize_t dims = N; + compType.insertMember("dimensions", HOFFSET(ArrayType, dimensions_), + H5::ArrayType(H5::PredType::NATIVE_UINT64, 1, &dims)); + compType.insertMember("data", HOFFSET(ArrayType, data_), H5::VarLenType(element_type)); + + assert(H5::PredType::NATIVE_UINT64.getSize() == sizeof(size_t)); + return compType; +} + +/** + * @brief Creates a datatype for DynamicNDArray (unknown number of dimensions) + */ +template +H5::CompType DynamicNDArrayDdl(H5::DataType const& element_type) { + using ArrayType = InnerDynamicNdArray; + H5::CompType compType(sizeof(ArrayType)); + compType.insertMember("dimensions", HOFFSET(ArrayType, dimensions_), + H5::VarLenType(H5::PredType::NATIVE_UINT64)); + compType.insertMember("data", HOFFSET(ArrayType, data_), H5::VarLenType(element_type)); + return compType; +} + +template +H5::EnumType UnionTypeEnumDdl(bool nullable, Tags const&... labels) { + H5::EnumType type_enum(H5::PredType::NATIVE_INT8); + int8_t type_value = -1; + if (nullable) { + type_enum.insert("null", &type_value); + } + + ((type_value++, type_enum.insert(labels, &type_value)), ...); + + return type_enum; +} + +struct IndexEntry { + int8_t type_; + uint64_t offset_; +}; + +static inline H5::CompType UnionIndexDatasetElementTypeDdl(H5::EnumType type_enum) { + H5::CompType element_type(sizeof(IndexEntry)); + element_type.insertMember("type", HOFFSET(IndexEntry, type_), type_enum); + element_type.insertMember("offset", HOFFSET(IndexEntry, offset_), H5::PredType::NATIVE_UINT64); + return element_type; +} + +} // namespace yardl::hdf5 diff --git a/cpp/generated/yardl/detail/hdf5/inner_types.h b/cpp/generated/yardl/detail/hdf5/inner_types.h new file mode 100644 index 0000000..30f730d --- /dev/null +++ b/cpp/generated/yardl/detail/hdf5/inner_types.h @@ -0,0 +1,513 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "../../yardl.h" + +// HDF5 cannot handle c++ containers like std::vector, so we make use of the +// templates in this file to represent data in a form that is compatible with HDF5. +// If the normal "outer" type is trivially copyable and does not contain +// unsupported constructs, is does not need an inner type. +// The types below generally have a constructor that takes a corresponding +// "outer" type, and override the conversion opertator back to that type. +namespace yardl::hdf5 { + +/** + * If TInner is the same as TOuter, assigns outer to inner. + * Otherwise, calls inner.ToOuter(outer). + */ +template +static inline void ToOuter(TInner const& inner, TOuter& outer) { + if constexpr (std::is_same_v) { + outer = inner; + } else { + inner.ToOuter(outer); + } +} + +static inline void* MallocOrThrow(size_t size) { + auto res = std::malloc(size); + if (!res) { + throw std::bad_alloc{}; + } + + return res; +} + +/** + * @brief An HDF5-compatible representation of std::optional. T + * std::optional does not document the inner layout so we we can't use it directly. + */ +template +struct InnerOptional { + InnerOptional() { + } + + InnerOptional(std::optional const& o) { + if (o.has_value()) { + has_value = true; + if constexpr (std::is_same_v) { + value = o.value(); + } else { + new (&value) TInner(o.value()); + } + } + } + + ~InnerOptional() { + if (has_value) { + value.~TInner(); + } + } + + void ToOuter(std::optional& o) const { + if (has_value) { + if constexpr (std::is_same_v) { + o.emplace(value); + } else { + o.emplace(); + value.ToOuter(*o); + } + } else { + o.reset(); + } + } + + union { + char empty[sizeof(TInner)]{}; + TInner value; + }; + + bool has_value = false; +}; + +/** + * @brief An HDF5-compatible representation of variable-length + * std::vector and NDArray + */ +template +struct InnerVlen : public hvl_t { + InnerVlen() : hvl_t{0, nullptr} { + } + + InnerVlen(std::vector const& v) + : hvl_t{v.size(), MallocOrThrow(v.size() * sizeof(TInner))} { + if constexpr (std::is_same_v) { + // TODO: we could avoid this copy by having separate read/write types + std::memcpy(p, const_cast(v.data()), len * sizeof(TInner)); + } else { + for (size_t i = 0; i < len; i++) { + auto dest = (TInner*)p + i; + new (dest) TInner(v[i]); + } + } + } + + InnerVlen(NDArray const& o) + : hvl_t{yardl::size(o), MallocOrThrow(yardl::size(o) * sizeof(TInner))} { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(p, yardl::dataptr(o), len * sizeof(TInner)); + } else { + auto o_iter = std::begin(o); + auto p_ptr = static_cast(p); + for (size_t i = 0; i < len; i++) { + new (p_ptr++) TInner(*o_iter++); + } + } + } + + InnerVlen(InnerVlen const&) = delete; + + ~InnerVlen() { + if (p != nullptr) { + if constexpr (!std::is_trivially_destructible_v) { + for (size_t i = 0; i < len; i++) { + auto inner_object = (TInner*)p + i; + inner_object->~TInner(); + } + } + + free(p); + p = nullptr; + len = 0; + } + } + + InnerVlen& operator=(InnerVlen const&) = delete; + + void ToOuter(std::vector& v) const { + v.resize(len); + if (len > 0) { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(v.data(), p, len * sizeof(TOuter)); + } else { + TInner* inner_objects = static_cast(p); + auto v_iter = v.begin(); + for (size_t i = 0; i < len; i++) { + TInner& inner_object = inner_objects[i]; + inner_object.ToOuter(*v_iter++); + } + } + } + } + + void ToOuter(NDArray& o) const { + std::array shape = {len}; + yardl::resize(o, shape); + if (len > 0) { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(yardl::dataptr(o), static_cast(p), len * sizeof(TOuter)); + } else { + TInner* inner_objects = static_cast(p); + auto o_iter = std::begin(o); + for (size_t i = 0; i < len; i++) { + TInner& inner_object = inner_objects[i]; + inner_object.ToOuter(*o_iter++); + } + } + } + } +}; + +/** + * @brief An HDF5-compatible representation of variable-length std::string + */ +struct InnerVlenString { + InnerVlenString() : c_str(nullptr) {} + InnerVlenString(std::string s) { + c_str = static_cast(MallocOrThrow((s.size() + 1) * sizeof(std::string::value_type))); + std::memcpy(c_str, s.c_str(), (s.size() + 1) * sizeof(std::string::value_type)); + } + + ~InnerVlenString() { + free(c_str); + c_str = nullptr; + } + + InnerVlenString(InnerVlenString const&) = delete; + + InnerVlenString& operator=(InnerVlenString const& other) = delete; + + void ToOuter(std::string& s) const { + s = c_str; + } + + char* c_str; +}; + +/** + * @brief An HDF5-compatible representation of a fixed-size vector (std::array). + */ +template +class InnerFixedVector : public std::array { + public: + InnerFixedVector() {} + InnerFixedVector(std::array const& o) { + for (size_t i = 0; i < N; i++) { + new (&(*this)[i]) TInner(o[i]); + } + } + + void ToOuter(std::array& o) const { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(o.data(), this->data(), N * sizeof(TOuter)); + } else { + for (size_t i = 0; i < N; i++) { + ((*this)[i]).ToOuter(o[i]); + } + } + } +}; + +/** + * @brief An HDF5-compatible representation a fixed-size multidimensional array. + */ +template +class InnerFixedNdArray : public yardl::FixedNDArray { + public: + InnerFixedNdArray() {} + InnerFixedNdArray(yardl::FixedNDArray const& o) { + auto o_iter = std::begin(o); + TInner* i_data_ptr = yardl::dataptr(*this); + for (size_t i = 0; i < length; i++) { + new (i_data_ptr++) TInner(*o_iter++); + } + } + + template , + TFixedNDArray>, + bool> = true> + void ToOuter(TFixedNDArray& o) const { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(yardl::dataptr(o), yardl::dataptr(*this), length * sizeof(TOuter)); + } else { + auto o_iter = std::begin(o); + auto i_iter = std::begin(*this); + for (size_t i = 0; i < length; i++) { + (*i_iter++).ToOuter(*o_iter++); + } + } + } + + private: + static constexpr size_t length = (Dims * ...); +}; + +/** + * @brief An HDF5-compatible representation of an NDArray with a + * known number of dimensions + */ +template +struct InnerNdArray { + InnerNdArray() : dimensions_{}, data_{0, nullptr} {} + + InnerNdArray(NDArray const& o) + : dimensions_(yardl::shape(o)), data_{yardl::size(o), malloc(yardl::size(o) * sizeof(TInner))} { + if constexpr (std::is_same_v) { + std::memcpy(data_.p, yardl::dataptr(o), data_.len * sizeof(TInner)); + } else { + auto o_iter = std::begin(o); + auto p = static_cast(data_.p); + for (size_t i = 0; i < data_.len; i++) { + new (p++) TInner(*o_iter++); + } + } + } + + InnerNdArray(InnerNdArray const&) = delete; + + ~InnerNdArray() { + if (data_.p != nullptr) { + if constexpr (!std::is_trivially_destructible_v) { + for (size_t i = 0; i < data_.len; i++) { + auto inner_object = static_cast(data_.p) + i; + inner_object->~TInner(); + } + } + + free(data_.p); + data_.p = nullptr; + data_.len = 0; + } + } + + InnerNdArray& operator=(InnerNdArray const&) = delete; + + template , TNDArray>, bool> = true> + void ToOuter(TNDArray& rtn) const { + yardl::resize(rtn, dimensions_); + if (data_.len > 0) { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(yardl::dataptr(rtn), static_cast(data_.p), data_.len * sizeof(TInner)); + } else { + TInner* inner_objects = static_cast(data_.p); + auto rtn_iter = std::begin(rtn); + for (size_t i = 0; i < data_.len; i++) { + TInner& inner_object = inner_objects[i]; + inner_object.ToOuter(*rtn_iter++); + } + } + } + } + + std::array dimensions_; + hvl_t data_; +}; + +/** + * @brief An HDF5-compatible representation of a DynamicNDArray (unknown number of dimensions). + */ +template +struct InnerDynamicNdArray { + InnerDynamicNdArray() : dimensions_{0, nullptr}, data_{0, nullptr} {} + + InnerDynamicNdArray(DynamicNDArray const& o) + : dimensions_{yardl::dimension(o), MallocOrThrow(yardl::dimension(o) * sizeof(size_t))}, + data_{yardl::size(o), MallocOrThrow(yardl::size(o) * sizeof(TInner))} { + memcpy(dimensions_.p, yardl::shape(o).data(), yardl::dimension(o) * sizeof(size_t)); + if constexpr (std::is_same_v) { + std::memcpy(data_.p, yardl::dataptr(o), data_.len * sizeof(TInner)); + } else { + auto o_iter = std::begin(o); + auto p = static_cast(data_.p); + for (size_t i = 0; i < data_.len; i++) { + new (p++) TInner(*o_iter++); + } + } + } + + InnerDynamicNdArray(InnerDynamicNdArray const&) = delete; + + ~InnerDynamicNdArray() { + if (dimensions_.p != nullptr) { + free(dimensions_.p); + dimensions_ = hvl_t{}; + } + + if (data_.p != nullptr) { + if constexpr (!std::is_trivially_destructible_v) { + for (size_t i = 0; i < data_.len; i++) { + auto inner_object = static_cast(data_.p) + i; + inner_object->~TInner(); + } + } + + free(data_.p); + data_.p = nullptr; + data_.len = 0; + } + } + + InnerDynamicNdArray& operator=(InnerDynamicNdArray const&) = delete; + + template , TDynamicNDArray>, bool> = true> + void ToOuter(TDynamicNDArray& rtn) const { + std::vector dims(static_cast(dimensions_.p), + static_cast(dimensions_.p) + dimensions_.len); + yardl::resize(rtn, dims); + if (data_.len > 0) { + if constexpr (std::is_same_v) { + static_assert(std::is_trivially_copyable_v); + std::memcpy(yardl::dataptr(rtn), static_cast(data_.p), data_.len * sizeof(TInner)); + } else { + TInner* inner_objects = static_cast(data_.p); + auto rtn_iter = std::begin(rtn); + for (size_t i = 0; i < data_.len; i++) { + TInner& inner_object = inner_objects[i]; + inner_object.ToOuter(*rtn_iter++); + } + } + } + } + + hvl_t dimensions_; + hvl_t data_; +}; + +/** + * @brief An HDF5-compatible representation of std::unordered_map. + */ +template +struct InnerMap : public hvl_t { + using pair_type = std::pair; + + InnerMap() : hvl_t{0, nullptr} { + } + + InnerMap(std::unordered_map const& map) + : hvl_t{map.size(), MallocOrThrow(map.size() * sizeof(pair_type))} { + int i = 0; + for (auto const& [k, v] : map) { + auto dest = (pair_type*)p + i++; + new (&(dest->first)) TInnerKey(k); + new (&(dest->second)) TInnerValue(v); + } + } + + InnerMap(InnerMap const&) = delete; + + ~InnerMap() { + if (p != nullptr) { + if constexpr (!std::is_trivially_destructible_v) { + for (size_t i = 0; i < len; i++) { + auto inner_object = (pair_type*)p + i; + inner_object->~pair_type(); + } + } + + free(p); + p = nullptr; + len = 0; + } + } + + InnerMap& operator=(InnerMap const&) = delete; + + void ToOuter(std::unordered_map& map) const { + if (len > 0) { + pair_type* inner_objects = static_cast(p); + for (size_t i = 0; i < len; i++) { + pair_type& inner_object = inner_objects[i]; + TOuterKey k; + if constexpr (std::is_same_v) { + k = inner_object.first; + } else { + inner_object.first.ToOuter(k); + } + + TOuterValue v; + if constexpr (std::is_same_v) { + v = inner_object.second; + } else { + inner_object.second.ToOuter(v); + } + map.emplace(std::move(k), std::move(v)); + } + } + } +}; + +template +class InnerTypeBuffer { + public: + InnerTypeBuffer(size_t size) : data_(size * sizeof(TInner)) { + } + + InnerTypeBuffer(std::vector const& o) : data_(o.size() * sizeof(TInner)) { + static_assert(!std::is_same_v, "InnerTypeBuffer should only be used for type conversion"); + auto p = reinterpret_cast(data_.data()); + for (size_t i = 0; i < o.size(); i++) { + new (p + i) TInner(o[i]); + } + } + + ~InnerTypeBuffer() { + if constexpr (!std::is_trivially_destructible_v) { + auto p = reinterpret_cast(data_.data()); + size_t count = data_.size() / sizeof(TInner); + for (size_t i = 0; i < count; i++) { + (p + i)->~TInner(); + } + } + } + + TInner* data() { + return reinterpret_cast(data_.data()); + } + + TInner const* begin() const { + return reinterpret_cast(data_.data()); + } + + TInner const* end() const { + return begin() + data_.size() / sizeof(TInner); + } + + private: + std::vector data_; +}; + +} // namespace yardl::hdf5 diff --git a/cpp/generated/yardl/detail/hdf5/io.h b/cpp/generated/yardl/detail/hdf5/io.h new file mode 100644 index 0000000..87d0093 --- /dev/null +++ b/cpp/generated/yardl/detail/hdf5/io.h @@ -0,0 +1,419 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include + +#include + +#include "ddl.h" + +namespace yardl::hdf5 { +// To be incremented when we make a structural change to the +// way we store data in an HDF5 file +static const inline uint32_t kHdf5FormatVersionNumber = 1; +static const inline std::string kHdf5FormatVersionNumberAttributeName = "$yardl_format_version"; + +static const inline std::string kSchemaDatasetName = "$yardl_schema"; + +template +static inline void WriteScalarDataset(H5::Group const& group, std::string name, + H5::DataType const& hdf5_type, TOuter const& value) { + H5::DataSpace dataspace; + auto dataset = group.createDataSet(name, hdf5_type, dataspace); + + if constexpr (std::is_same_v) { + dataset.write(&value, hdf5_type); + } else { + TInner inner_value(value); + dataset.write(&inner_value, hdf5_type); + } +} + +template +static inline void ReadScalarDataset(H5::Group const& group, std::string name, + H5::DataType const& hdf5_type, TOuter& value) { + auto dataset = group.openDataSet(name); + H5::DataSpace dataspace; + if constexpr (std::is_same_v) { + dataset.read(&value, hdf5_type, dataspace, dataspace); + } else { + TInner inner_value; + dataset.read(&inner_value, hdf5_type, dataspace, dataspace); + inner_value.ToOuter(value); + } +} + +class TransferPropertiesContainer { + public: + TransferPropertiesContainer(size_t buffer_size) + : buffer_(buffer_size), background_buffer_(buffer_size) { + properties_.setBuffer(buffer_size, buffer_.data(), background_buffer_.data()); + } + + TransferPropertiesContainer(TransferPropertiesContainer const&) = delete; + + H5::DSetMemXferPropList const& properties() const { + return properties_; + } + + private: + H5::DSetMemXferPropList properties_{}; + std::vector buffer_; + std::vector background_buffer_; +}; + +class DatasetWriter { + public: + DatasetWriter(H5::Group const& group, std::string name, + H5::DataType const& element_type, size_t conversion_buffer_size) + : dataset_(CreateDataset(group, name, element_type)), element_type_(element_type) { + hsize_t rows_at_a_time = 1; + mem_space_ = H5::DataSpace(1, &rows_at_a_time, NULL); + if (conversion_buffer_size > 0) { + transfer_properties_.emplace(conversion_buffer_size); + } + } + + template + hsize_t Append(TOuter const& value) { + hsize_t new_size = offset_ + 1; + dataset_.extend(&new_size); + + H5::DataSpace fileSpace = dataset_.getSpace(); + fileSpace.selectElements(H5S_SELECT_SET, 1, &offset_); + + if constexpr (std::is_same_v) { + dataset_.write(&value, element_type_, mem_space_, fileSpace); + } else { + TInner inner_value(value); + dataset_.write(&inner_value, element_type_, mem_space_, fileSpace, transfer_properties()); + } + + return offset_++; + } + + template + hsize_t AppendBatch(std::vector const& value) { + hsize_t new_size = offset_ + value.size(); + dataset_.extend(&new_size); + H5::DataSpace fileSpace = dataset_.getSpace(); + + hsize_t new_row_count = value.size(); + fileSpace.selectHyperslab(H5S_SELECT_SET, &new_row_count, &offset_); + H5::DataSpace mem_space(1, &new_row_count, NULL); + + if constexpr (std::is_same_v) { + dataset_.write(value.data(), element_type_, mem_space, fileSpace); + } else { + InnerTypeBuffer inner_value(value); + dataset_.write(inner_value.data(), element_type_, mem_space, fileSpace, transfer_properties()); + } + + auto initial_offset = offset_; + offset_ += value.size(); + return initial_offset; + } + + private: + static H5::DataSet + CreateDataset(H5::Group const& group, std::string name, + H5::DataType const& element_type) { + hsize_t dims = 0; + hsize_t maxdims = H5S_UNLIMITED; + + // Because we do not know the dataset size up-front, we need to enable chunking. + // Based on some testing, 8MB chunk size seems to have good throughput, + // but we could make this a tunable parameter in the future. + hsize_t chunk_dims = std::max(static_cast(1), + static_cast(8 * 1024 * 1024 / element_type.getSize())); + + H5::DataSpace dataspace(1, &dims, &maxdims); + + H5::DSetCreatPropList prop; + prop.setChunk(1, &chunk_dims); + return group.createDataSet(name, element_type, dataspace, prop); + } + + H5::DSetMemXferPropList const& transfer_properties() const { + return transfer_properties_.has_value() + ? transfer_properties_.value().properties() + : H5::DSetMemXferPropList::DEFAULT; + } + + H5::DataSet dataset_; + H5::DataType element_type_; + hsize_t offset_{}; + H5::DataSpace mem_space_; + std::optional transfer_properties_; +}; + +class DatasetReader { + public: + DatasetReader(H5::Group const& group, std::string name, + H5::DataType const& element_type, size_t conversion_buffer_size = 0) + : dataset_(group.openDataSet(name)), + element_type_(element_type), + filespace_(dataset_.getSpace()) { + filespace_.getSimpleExtentDims(&total_rows_); + if (conversion_buffer_size > 0) { + transfer_properties_.emplace(std::max(static_cast(conversion_buffer_size), + dataset_.getDataType().getSize())); + } + } + + template + bool Read(TOuter& value) { + if (offset_ >= total_rows_) { + return false; + } + + hsize_t rows_at_a_time = 1; + H5::DataSpace memspace(1, &rows_at_a_time, NULL); + filespace_.selectElements(H5S_SELECT_SET, 1, &offset_); + + if constexpr (std::is_same_v) { + dataset_.read(&value, element_type_, memspace, filespace_); + } else { + TInner inner_value; + dataset_.read(&inner_value, element_type_, memspace, filespace_, transfer_properties()); + inner_value.ToOuter(value); + } + + offset_++; + return true; + } + + template + bool ReadBatch(std::vector& values) { + if (offset_ >= total_rows_) { + return false; + } + + hsize_t rows_to_read = std::min(total_rows_ - offset_, static_cast(values.capacity())); + if (rows_to_read != values.size()) { + values.resize(rows_to_read); + } + + H5::DataSpace memspace(1, &rows_to_read, NULL); + filespace_.selectHyperslab(H5S_SELECT_SET, &rows_to_read, &offset_); + + if constexpr (std::is_same_v) { + dataset_.read(values.data(), element_type_, memspace, filespace_); + } else { + InnerTypeBuffer inner_value(values.size()); + dataset_.read(inner_value.data(), element_type_, memspace, filespace_, transfer_properties()); + + auto i_iter = inner_value.begin(); + auto o_iter = values.begin(); + + while (i_iter != inner_value.end()) { + i_iter->ToOuter(*o_iter); + ++i_iter; + ++o_iter; + } + } + + offset_ += rows_to_read; + return offset_ < total_rows_; + } + + private: + H5::DSetMemXferPropList const& transfer_properties() const { + return transfer_properties_.has_value() + ? transfer_properties_.value().properties() + : H5::DSetMemXferPropList::DEFAULT; + } + + H5::DataSet dataset_; + H5::DataType element_type_; + H5::DataSpace filespace_; + hsize_t offset_{}; + hsize_t total_rows_; + std::optional transfer_properties_; +}; + +template +class UnionDatasetWriter { + public: + template + UnionDatasetWriter(H5::Group const& parent_group, std::string name, + bool nullable, TypeLabelBufferSizeTuples const&... type_label_buffer_size_tuples) + : group_(CreateGroup(parent_group, name)), + index_writer_( + group_, "$index", + UnionIndexDatasetElementTypeDdl( + UnionTypeEnumDdl(nullable, std::get<1>(type_label_buffer_size_tuples)...)), + 0), + writers_{DatasetWriter{group_, + std::get<1>(type_label_buffer_size_tuples), + std::get<0>(type_label_buffer_size_tuples), + std::get<2>(type_label_buffer_size_tuples)}...} { + index_entry_buffer_.reserve(8192); + } + + ~UnionDatasetWriter() { + Flush(); + } + + void Flush() { + if (!index_entry_buffer_.empty()) { + index_writer_.AppendBatch(index_entry_buffer_); + index_entry_buffer_.clear(); + } + } + + template + void Append(int8_t type, TElementOuter const& value) { + uint64_t offset; + if (type == -1) { + offset = 0; + } else { + DatasetWriter& ds = writers_[type]; + offset = static_cast(ds.Append(value)); + } + + index_entry_buffer_.push_back({type, offset}); + if (index_entry_buffer_.size() == index_entry_buffer_.capacity()) { + Flush(); + } + } + + private: + static H5::Group CreateGroup(H5::Group const& parent_group, std::string group_name) { + if (parent_group.nameExists(group_name)) { + throw std::runtime_error("Unable to create group '" + group_name + + "' for protocol because it already exists."); + } + return parent_group.createGroup(group_name); + } + + H5::Group group_; + DatasetWriter index_writer_; + std::vector index_entry_buffer_; + std::array writers_; +}; + +template +class UnionDatasetReader { + public: + template + UnionDatasetReader(H5::Group const& parent_group, std::string name, + bool nullable, TypeLabelBufferSizeTuples const&... type_label_buffer_size_tuples) + : group_(parent_group.openGroup(name)), + index_reader_(group_, "$index", + UnionIndexDatasetElementTypeDdl( + UnionTypeEnumDdl(nullable, std::get<1>(type_label_buffer_size_tuples)...))), + readers_{DatasetReader{group_, + std::get<1>(type_label_buffer_size_tuples), + std::get<0>(type_label_buffer_size_tuples), + std::get<2>(type_label_buffer_size_tuples)}...} { + index_entry_buffer_.reserve(8192); + } + + std::tuple ReadIndex() { + if (index_entry_buffer_offset_ == index_entry_buffer_.size()) { + if (!index_reader_has_more_) { + return {false, 0, nullptr}; + } + + index_reader_has_more_ = index_reader_.ReadBatch(index_entry_buffer_); + index_entry_buffer_offset_ = 0; + + if (index_entry_buffer_.empty()) { + return {false, 0, nullptr}; + } + } + + IndexEntry const& entry = index_entry_buffer_[index_entry_buffer_offset_++]; + + if (entry.type_ < 0) { + return {true, entry.type_, nullptr}; + } + + return {true, entry.type_, &readers_[entry.type_]}; + } + + H5::Group group_; + DatasetReader index_reader_; + std::vector index_entry_buffer_; + size_t index_entry_buffer_offset_{}; + bool index_reader_has_more_{true}; + std::array readers_; +}; + +class Hdf5Writer { + public: + Hdf5Writer(std::string const& path, std::string const& group_name, std::string const& schema) + : file_(path, H5F_ACC_CREAT | H5F_ACC_RDWR), + group_(CreateGroup(file_, group_name)) { + WriteScalarDataset(group_, kSchemaDatasetName, InnerVlenStringDdl(), schema); + } + + private: + static H5::Group CreateGroup(H5::H5File const& file, std::string group_name) { + if (file.nameExists(group_name)) { + throw std::runtime_error("Unable to create group '" + group_name + + "' for protocol because it already exists."); + } + H5::Group group = file.createGroup(group_name); + + uint32_t version = kHdf5FormatVersionNumber; + group.createAttribute(kHdf5FormatVersionNumberAttributeName, H5::PredType::NATIVE_INT, H5::DataSpace()) + .write(H5::PredType::NATIVE_UINT32, &version); + + return group; + } + + protected: + H5::H5File file_; + H5::Group group_; +}; + +class Hdf5Reader { + public: + Hdf5Reader(std::string path, std::string group_name, std::string const& expected_schema) + : file_(path, H5F_ACC_RDONLY), group_(OpenGroup(file_, group_name)) { + std::string actual_schema; + ReadScalarDataset(group_, kSchemaDatasetName, InnerVlenStringDdl(), actual_schema); + if (actual_schema != expected_schema) { + throw std::runtime_error( + "The schema of the data to be read is not compatible with the current protocol."); + } + } + + private: + static H5::Group OpenGroup(H5::H5File const& file, std::string group_name) { + if (!file.nameExists(group_name)) { + throw std::runtime_error("Unable to open group '" + group_name + + "' for protocol because it does not exist."); + } + + H5::Group group = file.openGroup(group_name); + if (group.attrExists(kHdf5FormatVersionNumberAttributeName)) { + uint32_t version; + group.openAttribute(kHdf5FormatVersionNumberAttributeName) + .read(H5::PredType::NATIVE_UINT32, &version); + if (version == kHdf5FormatVersionNumber) { + return group; + } + } + + throw std::runtime_error("The data in the HDF5 file is not in the expected format."); + } + + protected: + H5::H5File file_; + H5::Group group_; +}; + +template +inline constexpr bool always_false_v = false; + +} // namespace yardl::hdf5 diff --git a/cpp/generated/yardl/detail/ndarray/impl.h b/cpp/generated/yardl/detail/ndarray/impl.h new file mode 100644 index 0000000..53a966b --- /dev/null +++ b/cpp/generated/yardl/detail/ndarray/impl.h @@ -0,0 +1,117 @@ +#pragma once + +#include + +#include +#include +#include +#include + +namespace yardl { + +/** + * @brief A multidimensional array where all dimension sizes + * are known at compile-time. + * + * @tparam T the element type + * @tparam Dims the array dimensions + */ +template +using FixedNDArray = xt::xtensor_fixed, + xt::layout_type::row_major, false>; + +/** + * @brief A multidimensional array where the number of dimensions + * is known at compile-time + * + * @tparam T the element type + * @tparam N the number of dimensions + */ +template +using NDArray = xt::xtensor; + +/** + * @brief A multidimensional array where the number of dimensions + * is not known at compile-time + * + * @tparam T the element type + */ +template +using DynamicNDArray = xt::xarray; + +/**** FixedNDArray Implementation ****/ +template +constexpr size_t size(FixedNDArray const& arr) { return arr.size(); } + +template +constexpr size_t dimension(FixedNDArray const& arr) { return arr.dimension(); } + +template +constexpr std::array shape(FixedNDArray const& arr) { return arr.shape(); } + +template +constexpr size_t shape(FixedNDArray const& arr, size_t dim) { return arr.shape(dim); } + +template +constexpr T* dataptr(FixedNDArray& arr) { return arr.data(); } +template +constexpr T const* dataptr(FixedNDArray const& arr) { return arr.data(); } + +template +constexpr T const& at(FixedNDArray const& arr, Args... idx) { return arr.at(idx...); } + +/**** NDArray Implementation ****/ +template +size_t size(NDArray const& arr) { return arr.size(); } + +template +size_t dimension(NDArray const& arr) { return arr.dimension(); } + +template +std::array shape(NDArray const& arr) { return arr.shape(); } + +template +size_t shape(NDArray const& arr, size_t dim) { return arr.shape(dim); } + +template +void resize(NDArray& arr, std::array const& shape) { arr.resize(shape, true); } + +template +T* dataptr(NDArray& arr) { return arr.data(); } +template +T const* dataptr(NDArray const& arr) { return arr.data(); } + +template +T const& at(NDArray const& arr, Args... idx) { return arr.at(idx...); } + +/**** DynamicNDArray Implementation ****/ +template +size_t size(DynamicNDArray const& arr) { return arr.size(); } + +template +size_t dimension(DynamicNDArray const& arr) { return arr.dimension(); } + +template +std::vector shape(DynamicNDArray const& arr) { + // Xtensor xarray.shape() is an xt::svector, not a std::vector + auto shape = arr.shape(); + std::vector vshape; + std::copy(shape.begin(), shape.end(), std::back_inserter(vshape)); + return vshape; +} + +template +size_t shape(DynamicNDArray const& arr, size_t dim) { return arr.shape(dim); } + +template +void resize(DynamicNDArray& arr, std::vector const& shape) { arr.resize(shape, true); } + +template +T* dataptr(DynamicNDArray& arr) { return arr.data(); } +template +T const* dataptr(DynamicNDArray const& arr) { return arr.data(); } + +template +T const& at(DynamicNDArray const& arr, Args... idx) { return arr.at(idx...); } + +} // namespace yardl diff --git a/cpp/generated/yardl/detail/ndjson/header.h b/cpp/generated/yardl/detail/ndjson/header.h new file mode 100644 index 0000000..9279c0d --- /dev/null +++ b/cpp/generated/yardl/detail/ndjson/header.h @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include + +#include + +namespace yardl::ndjson { +using ordered_json = nlohmann::ordered_json; + +static inline uint32_t kNDJsonFormatVersionNumber = 1; + +inline void WriteHeader(std::ostream& stream, std::string& schema) { + auto parsed_schema = ordered_json::parse(schema); + + ordered_json metadata = {{"yardl", {{"version", kNDJsonFormatVersionNumber}, {"schema", parsed_schema}}}}; + stream << metadata << "\n"; +} + +inline ordered_json ReadHeader(std::istream& stream) { + std::string line; + std::getline(stream, line); + try { + ordered_json actual_header_json = ordered_json::parse(line); + actual_header_json = actual_header_json.at("yardl"); + if (actual_header_json["version"] != kNDJsonFormatVersionNumber) { + throw std::runtime_error( + "Unsupported Yardl NDJSON format version."); + } + + return actual_header_json.at("schema"); + } catch (ordered_json::exception const&) { + throw std::runtime_error( + "Data in the stream is not in the expected Yardl NDJSON format."); + } +} + +inline void ReadAndValidateHeader(std::istream& stream, std::string& expected_schema) { + ordered_json expected_schema_json = ordered_json::parse(expected_schema); + ordered_json actual_schema_json = ReadHeader(stream); + if (expected_schema_json != actual_schema_json) { + throw std::runtime_error( + "The schema of the data to be read is not compatible with the current protocol."); + } +} +} // namespace yardl::ndjson diff --git a/cpp/generated/yardl/detail/ndjson/reader_writer.h b/cpp/generated/yardl/detail/ndjson/reader_writer.h new file mode 100644 index 0000000..f952113 --- /dev/null +++ b/cpp/generated/yardl/detail/ndjson/reader_writer.h @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include + +#include + +#include "header.h" + +namespace yardl::ndjson { +using ordered_json = nlohmann::ordered_json; + +class NDJsonWriter { + protected: + NDJsonWriter(std::ostream& stream, std::string& schema) + : stream_(stream) { + WriteHeader(stream_, schema); + } + + NDJsonWriter(std::string file_name, std::string& schema) + : owned_file_stream_(open_file(file_name)), stream_(*owned_file_stream_) { + WriteHeader(stream_, schema); + } + + private: + static std::unique_ptr open_file(std::string filename) { + auto file_stream = std::make_unique(filename, std::ios::out); + if (!file_stream->good()) { + throw std::runtime_error("Failed to open file for writing."); + } + + return file_stream; + } + + private: + std::unique_ptr owned_file_stream_{}; + + protected: + std::ostream& stream_; +}; + +class NDJsonReader { + protected: + NDJsonReader(std::istream& stream, std::string& schema) + : stream_(stream) { + ReadAndValidateHeader(stream_, schema); + } + + NDJsonReader(std::string file_name, std::string& schema) + : owned_file_stream_(open_file(file_name)), stream_(*owned_file_stream_) { + ReadAndValidateHeader(stream_, schema); + } + + void VerifyFinished() { + if (unused_step_ || stream_.peek() != EOF) { + throw std::runtime_error("The stream was not read to completion."); + } + } + + private: + static std::unique_ptr open_file(std::string filename) { + auto file_stream = std::make_unique(filename, std::ios::in); + if (!file_stream->good()) { + throw std::runtime_error("Failed to open file for reading."); + } + + return file_stream; + } + + private: + std::unique_ptr owned_file_stream_{}; + + protected: + std::istream& stream_; + std::string line_{}; + std::optional unused_step_{}; +}; + +} // namespace yardl::ndjson diff --git a/cpp/generated/yardl/detail/ndjson/serializers.h b/cpp/generated/yardl/detail/ndjson/serializers.h new file mode 100644 index 0000000..3249558 --- /dev/null +++ b/cpp/generated/yardl/detail/ndjson/serializers.h @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "../../yardl.h" + +namespace yardl::ndjson { + +using ordered_json = nlohmann::ordered_json; + +inline void WriteProtocolValue(std::ostream& stream, std::string const& stepName, ordered_json const& value) { + stream << ordered_json{{stepName, value}} << '\n'; +} + +template +inline bool ReadProtocolValue(std::istream& stream, std::string& line, std::string const& stepName, bool required, std::optional& unused_step, T& value) { + ordered_json* json_value; + if (unused_step) { + try { + ordered_json& v = unused_step->at(stepName); + json_value = &v; + } catch (ordered_json::out_of_range&) { + if (required) { + unused_step.reset(); + throw std::runtime_error("encountered unexpected protocol value"); + } + return false; + } + + json_value->get_to(value); + unused_step.reset(); + return true; + } + + if (!std::getline(stream, line)) { + if (!required) { + return false; + } + throw std::runtime_error("missing protocol step " + stepName); + } + + ordered_json parsed_step = ordered_json::parse(line); + try { + ordered_json& v = parsed_step.at(stepName); + json_value = &v; + } catch (ordered_json::out_of_range&) { + if (required) { + throw std::runtime_error("encountered unexpected protocol value"); + } + unused_step.emplace(std::move(parsed_step)); + return false; + } + + json_value->get_to(value); + return true; +} + +template +constexpr bool ShouldSerializeFieldValue(T const&) { + return true; +} + +template +constexpr bool ShouldSerializeFieldValue(std::optional const& value) { + return value.has_value(); +} + +template +constexpr bool ShouldSerializeFieldValue(std::variant const& var) { + if constexpr (std::is_same_v>>) { + return var.index() != 0; + } + + return true; +} + +} // namespace yardl::ndjson + +NLOHMANN_JSON_NAMESPACE_BEGIN + +template +struct adl_serializer> { + static void to_json(ordered_json& j, std::optional const& value) { + if (value) { + j = *value; + } else { + j = nullptr; + } + } + + static void from_json(ordered_json const& j, std::optional& value) { + if (j.is_null()) { + value = std::nullopt; + } else { + value = j.get(); + } + } +}; + +template <> +struct adl_serializer { + static void to_json(ordered_json& j, [[maybe_unused]] std::monostate const& value) { + j = nullptr; + } + + static void from_json(ordered_json const& j, [[maybe_unused]] std::monostate& value) { + if (!j.is_null()) { + throw std::runtime_error("expected null"); + } + } +}; + +template <> +struct adl_serializer { + static void to_json(ordered_json& j, yardl::Date const& value) { + j = date::format("%F", value); + } + + static void from_json(ordered_json const& j, yardl::Date& value) { + std::stringstream ss{j.get()}; + ss >> date::parse("%F", value); + if (ss.fail()) { + throw std::runtime_error("invalid date format"); + } + } +}; + +template <> +struct adl_serializer { + static void to_json(ordered_json& j, yardl::Time const& value) { + j = date::format("%T", value); + } + + static void from_json(ordered_json const& j, yardl::Time& value) { + std::stringstream ss{j.get()}; + ss >> date::parse("%T", value); + if (ss.fail()) { + throw std::runtime_error("invalid time format"); + } + } +}; + +template <> +struct adl_serializer { + static void to_json(ordered_json& j, yardl::DateTime const& value) { + j = date::format("%FT%T", value); + } + + static void from_json(ordered_json const& j, yardl::DateTime& value) { + std::stringstream ss{j.get()}; + ss >> date::parse("%FT%T", value); + if (ss.fail()) { + throw std::runtime_error("invalid datetime format"); + } + } +}; + +template +struct adl_serializer> { + static void to_json(ordered_json& j, std::complex const& value) { + j = ordered_json::array({value.real(), value.imag()}); + } + + static void from_json(ordered_json const& j, std::complex& value) { + value = std::complex{j.at(0).get(), j.at(1).get()}; + } +}; + +template +struct adl_serializer> { + static void to_json(ordered_json& j, yardl::DynamicNDArray const& value) { + auto shape = yardl::shape(value); + auto data_array = ordered_json::array(); + for (auto const& v : value) { + data_array.push_back(v); + } + j = ordered_json{{"shape", shape}, {"data", data_array}}; + } + + static void from_json(ordered_json const& j, yardl::DynamicNDArray& value) { + yardl::resize(value, j.at("shape").get>()); + auto data_array = j.at("data").get>(); + size_t i = 0; + for (auto& element : value) { + element = data_array[i++]; + } + } +}; + +template +struct adl_serializer> { + static void to_json(ordered_json& j, yardl::NDArray const& value) { + auto shape = yardl::shape(value); + auto data_array = ordered_json::array(); + for (auto const& v : value) { + data_array.push_back(v); + } + j = ordered_json{{"shape", shape}, {"data", data_array}}; + } + + static void from_json(ordered_json const& j, yardl::NDArray& value) { + yardl::resize(value, j.at("shape").get>()); + auto data_array = j.at("data").get>(); + size_t i = 0; + for (auto& element : value) { + element = data_array[i++]; + } + } +}; + +template +struct adl_serializer> { + static void to_json(ordered_json& j, yardl::FixedNDArray const& value) { + auto data_array = ordered_json::array(); + for (auto const& v : value) { + data_array.push_back(v); + } + j = data_array; + } + + static void from_json(ordered_json const& j, yardl::FixedNDArray& value) { + auto data_array = j.get>(); + size_t i = 0; + for (auto& element : value) { + element = data_array[i++]; + } + } +}; + +NLOHMANN_JSON_NAMESPACE_END diff --git a/cpp/generated/yardl/yardl.h b/cpp/generated/yardl/yardl.h new file mode 100644 index 0000000..d0318e3 --- /dev/null +++ b/cpp/generated/yardl/yardl.h @@ -0,0 +1,144 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include + +#include + +#include "detail/ndarray/impl.h" + +namespace yardl { + +/** + * @brief Represents a date as a number of days since the epoch. + */ +using Date = date::local_days; + +/** + * @brief Represents a time of day as the number of nanoseconds since midnight. + */ +using Time = std::chrono::duration; + +/** + * @brief Represents a datetime as the number of nanoseconds since the epoch. + */ +using DateTime = std::chrono::time_point; + +/** + * @brief The same as size_t when it is 64 bits, otherwise uint64_t. + */ +using Size = std::conditional_t; + +/** + * @brief A base template for generated flags classes + + * @tparam TValue the underlying integral type + * @tparam TDerived the derived flags class + */ +template +struct BaseFlags { + static_assert(std::is_integral_v, "TValue must be an integral type"); + + public: + BaseFlags() = default; + BaseFlags(TValue value) : value_(value) {} + BaseFlags(TDerived const& other) : value_(other.value_) {}; + + using value_type = TValue; + + [[nodiscard]] TDerived operator|(TDerived rhs) const { + return TDerived(value_ | rhs.value_); + } + + [[nodiscard]] TDerived operator&(TDerived rhs) const { + return TDerived(value_ & rhs.value_); + } + + [[nodiscard]] TDerived operator^(TDerived rhs) const { + return TDerived(value_ ^ rhs.value_); + } + + TDerived operator~() const { + return TDerived(~value_); + } + + TDerived& operator=(TDerived const& rhs) { + value_ = rhs.value_; + return static_cast(*this); + } + + TDerived& operator=(TValue rhs) { + value_ = rhs; + return static_cast(*this); + } + + TDerived& operator|=(TDerived rhs) { + value_ |= rhs.value_; + return static_cast(*this); + } + + TDerived& operator&=(TDerived rhs) { + value_ &= rhs.value_; + return static_cast(*this); + } + + TDerived& operator^=(TDerived rhs) { + value_ ^= rhs.value_; + return static_cast(*this); + } + + bool operator==(TDerived rhs) const { + return value_ == rhs.value_; + } + + bool operator==(TValue rhs) const { + return value_ == rhs; + } + + bool operator!=(TDerived rhs) const { + return value_ != rhs.value_; + } + + bool operator!=(TValue rhs) const { + return value_ != rhs; + } + + [[nodiscard]] bool HasFlags(TDerived flag) const { + return (value_ & flag.value_) == flag.value_; + } + + void SetFlags(TDerived flag) { + value_ |= flag.value_; + } + + void UnsetFlags(TDerived flag) { + value_ &= ~flag.value_; + } + + void Clear() { + value_ = 0; + } + + [[nodiscard]] TDerived WithFlags(TDerived flag) const { + return *this | flag; + } + + [[nodiscard]] TDerived WithoutFlags(TDerived flag) const { + return *this & ~flag; + } + + [[nodiscard]] explicit operator TValue() const { + return value_; + } + + [[nodiscard]] TValue Value() const { + return value_; + } + + private: + TValue value_{}; +}; + +} // namespace yardl diff --git a/cpp/petsird.h b/cpp/petsird.h new file mode 100644 index 0000000..fc48d09 --- /dev/null +++ b/cpp/petsird.h @@ -0,0 +1,2 @@ +#pragma once +#include "generated/binary/protocols.h" diff --git a/cpp_test/CMakeLists.txt b/cpp_test/CMakeLists.txt new file mode 100644 index 0000000..688e25f --- /dev/null +++ b/cpp_test/CMakeLists.txt @@ -0,0 +1,12 @@ +cmake_minimum_required(VERSION 3.12.0) # older would work, but could give warnings on policy CMP0074 +project(petsird_test VERSION 0.2.0) + +set(CMAKE_CXX_STANDARD 17) + +if(WIN32) + add_compile_options(/W3 /WX) +else() + add_compile_options(-Wall -Wextra -pedantic) +endif() + +find_package(petsird REQUIRED) diff --git a/python/petsird/__init__.py b/python/petsird/__init__.py new file mode 100644 index 0000000..cab9704 --- /dev/null +++ b/python/petsird/__init__.py @@ -0,0 +1,71 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# pyright: reportUnusedImport=false +from typing import Tuple as _Tuple +import re as _re +import numpy as _np + +_MIN_NUMPY_VERSION = (1, 22, 0) + +def _parse_version(version: str) -> _Tuple[int, ...]: + try: + return tuple(map(int, version.split("."))) + except ValueError: + # ignore any prerelease suffix + version = _re.sub(r"[^0-9.]", "", version) + return tuple(map(int, version.split("."))) + +if _parse_version(_np.__version__) < _MIN_NUMPY_VERSION: + raise ImportError(f"Your installed numpy version is {_np.__version__}, but version >= {'.'.join(str(i) for i in _MIN_NUMPY_VERSION)} is required.") + +from .yardl_types import * +from .types import ( + AnnulusShape, + Atom, + BedMovementTimeBlock, + BoxShape, + BoxSolidVolume, + BulkMaterial, + CoincidenceEvent, + CoincidencePolicy, + Coordinate, + DetectorModule, + Direction, + DirectionMatrix, + EventTimeBlock, + ExamInformation, + ExternalSignalTimeBlock, + ExternalSignalType, + ExternalSignalTypeEnum, + GantryMovementTimeBlock, + GenericSolidVolume, + GeometricShape, + Header, + Institution, + ReplicatedBoxSolidVolume, + ReplicatedDetectorModule, + ReplicatedGenericSolidVolume, + ReplicatedObject, + RigidTransformation, + ScannerGeometry, + ScannerInformation, + SolidVolume, + Subject, + TimeBlock, + TimeFrameInformation, + TimeInterval, + TripleEvent, + get_dtype, +) +from .protocols import ( + PETSIRDReaderBase, + PETSIRDWriterBase, +) +from .binary import ( + BinaryPETSIRDReader, + BinaryPETSIRDWriter, +) +from .ndjson import ( + NDJsonPETSIRDReader, + NDJsonPETSIRDWriter, +) diff --git a/python/petsird/_binary.py b/python/petsird/_binary.py new file mode 100644 index 0000000..8c3c7d8 --- /dev/null +++ b/python/petsird/_binary.py @@ -0,0 +1,1345 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# pyright: reportUnnecessaryIsInstance=false + +import datetime +from enum import Enum +from io import BufferedIOBase, BufferedReader, BytesIO +from typing import ( + BinaryIO, + Iterable, + Protocol, + TypeVar, + Generic, + Any, + Optional, + Tuple, + cast, +) +from abc import ABC, abstractmethod +import struct +import sys +import numpy as np + +from numpy.lib import recfunctions +import numpy.typing as npt + +from .yardl_types import * + +if sys.byteorder != "little": + raise RuntimeError("Only little-endian systems are currently supported") + +MAGIC_BYTES: bytes = b"yardl" +CURRENT_BINARY_FORMAT_VERSION: int = 1 + +INT8_MIN: int = np.iinfo(np.int8).min +INT8_MAX: int = np.iinfo(np.int8).max + +UINT8_MAX: int = np.iinfo(np.uint8).max + +INT16_MIN: int = np.iinfo(np.int16).min +INT16_MAX: int = np.iinfo(np.int16).max + +UINT16_MAX: int = np.iinfo(np.uint16).max + +INT32_MIN: int = np.iinfo(np.int32).min +INT32_MAX: int = np.iinfo(np.int32).max + +UINT32_MAX: int = np.iinfo(np.uint32).max + +INT64_MIN: int = np.iinfo(np.int64).min +INT64_MAX: int = np.iinfo(np.int64).max + +UINT64_MAX: int = np.iinfo(np.uint64).max + + +class BinaryProtocolWriter(ABC): + def __init__(self, stream: Union[BinaryIO, str], schema: str) -> None: + self._stream = CodedOutputStream(stream) + self._stream.write_bytes(MAGIC_BYTES) + write_fixed_int32(self._stream, CURRENT_BINARY_FORMAT_VERSION) + string_serializer.write(self._stream, schema) + + def _close(self) -> None: + self._stream.close() + + def _end_stream(self) -> None: + self._stream.ensure_capacity(1) + self._stream.write_byte_no_check(0) + + +class BinaryProtocolReader(ABC): + def __init__( + self, + stream: Union[BufferedReader, BytesIO, BinaryIO, str], + expected_schema: Optional[str], + ) -> None: + self._stream = CodedInputStream(stream) + magic_bytes = self._stream.read_view(len(MAGIC_BYTES)) + if magic_bytes != MAGIC_BYTES: # pyright: ignore [reportUnnecessaryComparison] + raise RuntimeError("Invalid magic bytes") + + version = read_fixed_int32(self._stream) + if version != CURRENT_BINARY_FORMAT_VERSION: + raise RuntimeError("Invalid binary format version") + + self._schema = string_serializer.read(self._stream) + if expected_schema and self._schema != expected_schema: + raise RuntimeError("Invalid schema") + + def _close(self) -> None: + self._stream.close() + + +class CodedOutputStream: + def __init__( + self, stream: Union[BinaryIO, str], *, buffer_size: int = 65536 + ) -> None: + if isinstance(stream, str): + self._stream = cast(BinaryIO, open(stream, "wb")) + self._owns_stream = True + else: + self._stream = stream + self._owns_stream = False + + self._buffer = bytearray(buffer_size) + self._offset = 0 + + def close(self) -> None: + self.flush() + if self._owns_stream: + self._stream.close() + + def ensure_capacity(self, size: int) -> None: + if (len(self._buffer) - self._offset) < size: + self.flush() + + def flush(self) -> None: + if self._offset > 0: + self._stream.write(self._buffer[: self._offset]) + self._stream.flush() + self._offset = 0 + + def write(self, formatter: struct.Struct, *args: Any) -> None: + size = formatter.size + if (len(self._buffer) - self._offset) < size: + self.flush() + + formatter.pack_into(self._buffer, self._offset, *args) + self._offset += size + + def write_bytes(self, data: Union[bytes, bytearray]) -> None: + if len(data) > (len(self._buffer) - self._offset): + self.flush() + self._stream.write(data) + else: + self._buffer[self._offset : self._offset + len(data)] = data + self._offset += len(data) + + def write_bytes_directly(self, data: Union[bytes, bytearray, memoryview]) -> None: + self.flush() + self._stream.write(data) + + def write_byte_no_check(self, value: int) -> None: + assert 0 <= value <= UINT8_MAX + self._buffer[self._offset] = value + self._offset += 1 + + def write_unsigned_varint( + self, + value: Union[int, np.uint8, np.uint16, np.uint32, np.uint64], + ) -> None: + if (len(self._buffer) - self._offset) < 10: + self.flush() + + int_val = int(value) # bitwise ops not supported on numpy types + + while True: + if int_val < 0x80: + self.write_byte_no_check(int_val) + return + + self.write_byte_no_check((int_val & 0x7F) | 0x80) + int_val >>= 7 + + def zigzag_encode( + self, + value: Union[int, np.int8, np.int16, np.int32, np.int64], + ) -> int: + int_val = int(value) + return (int_val << 1) ^ (int_val >> 63) + + def write_signed_varint( + self, + value: Union[int, np.int8, np.int16, np.int32, np.int64], + ) -> None: + self.write_unsigned_varint(self.zigzag_encode(value)) + + +class CodedInputStream: + def __init__( + self, + stream: Union[BufferedReader, BytesIO, BinaryIO, str], + *, + buffer_size: int = 65536, + ) -> None: + if isinstance(stream, str): + self._stream = open(stream, "rb") + self._owns_stream = True + else: + if not isinstance(stream, BufferedIOBase): + self._stream = BufferedReader(stream) # type: ignore + else: + self._stream = stream + self._owns_stream = False + + self._last_read_count = 0 + self._buffer = bytearray(buffer_size) + self._view = memoryview(self._buffer) + self._offset = 0 + self._at_end = False + + def close(self) -> None: + if self._owns_stream: + self._stream.close() + + def read(self, formatter: struct.Struct) -> tuple[Any, ...]: + if self._last_read_count - self._offset < formatter.size: + self._fill_buffer(formatter.size) + + result = formatter.unpack_from(self._buffer, self._offset) + self._offset += formatter.size + return result + + def read_byte(self) -> int: + if self._last_read_count - self._offset < 1: + self._fill_buffer(1) + + result = self._buffer[self._offset] + self._offset += 1 + return result + + def read_unsigned_varint(self) -> int: + result = 0 + shift = 0 + while True: + if self._last_read_count - self._offset < 1: + self._fill_buffer(1) + + byte = self._buffer[self._offset] + self._offset += 1 + result |= (byte & 0x7F) << shift + if byte < 0x80: + return result + shift += 7 + + def zigzag_decode(self, value: int) -> int: + return (value >> 1) ^ -(value & 1) + + def read_signed_varint(self) -> int: + return self.zigzag_decode(self.read_unsigned_varint()) + + def read_view(self, count: int) -> memoryview: + if count <= (self._last_read_count - self._offset): + res = self._view[self._offset : self._offset + count] + self._offset += count + return res + + if count > len(self._buffer): + local_buf = bytearray(count) + local_view = memoryview(local_buf) + remaining = self._last_read_count - self._offset + local_view[:remaining] = self._view[self._offset : self._last_read_count] + self._offset = self._last_read_count + if self._stream.readinto(local_view[remaining:]) < count - remaining: + raise EOFError("Unexpected EOF") + return local_view + + self._fill_buffer(count) + result = self._view[self._offset : self._offset + count] + self._offset += count + return result + + def read_bytearray(self, count: int) -> bytearray: + if count <= (self._last_read_count - self._offset): + res = bytearray(self._view[self._offset : self._offset + count]) + self._offset += count + return res + + if count > len(self._buffer): + local_buf = bytearray(count) + local_view = memoryview(local_buf) + remaining = self._last_read_count - self._offset + local_view[:remaining] = self._view[self._offset : self._last_read_count] + self._offset = self._last_read_count + if self._stream.readinto(local_view[remaining:]) < count - remaining: + raise EOFError("Unexpected EOF") + return local_buf + + self._fill_buffer(count) + result = self._view[self._offset : self._offset + count] + self._offset += count + return bytearray(result) + + def _fill_buffer(self, min_count: int = 0) -> None: + remaining = self._last_read_count - self._offset + if remaining > 0: + remaining_view = memoryview(self._buffer)[ + self._offset : self._offset + remaining + 1 + ] + self._buffer[:remaining] = remaining_view + + slice = memoryview(self._buffer)[remaining:] + self._last_read_count = self._stream.readinto(slice) + remaining + self._offset = 0 + if self._last_read_count == 0: + self._at_end = True + if min_count > 0 and (self._last_read_count) < min_count: + raise EOFError("Unexpected EOF") + + +T = TypeVar("T") +T_NP = TypeVar("T_NP", bound=np.generic) + + +class TypeSerializer(Generic[T, T_NP], ABC): + def __init__(self, dtype: npt.DTypeLike) -> None: + self._dtype: np.dtype[Any] = np.dtype(dtype) + + def overall_dtype(self) -> np.dtype[Any]: + return self._dtype + + def struct_format_str(self) -> Optional[str]: + return None + + @abstractmethod + def write(self, stream: CodedOutputStream, value: T) -> None: + raise NotImplementedError + + @abstractmethod + def write_numpy(self, stream: CodedOutputStream, value: T_NP) -> None: + raise NotImplementedError + + @abstractmethod + def read(self, stream: CodedInputStream) -> T: + raise NotImplementedError + + @abstractmethod + def read_numpy(self, stream: CodedInputStream) -> T_NP: + raise NotImplementedError + + def is_trivially_serializable(self) -> bool: + return False + + +class StructSerializer(TypeSerializer[T, T_NP]): + def __init__(self, numpy_type: type, format_string: str) -> None: + super().__init__(numpy_type) + self._struct = struct.Struct(format_string) + self._numpy_type = numpy_type + + def write(self, stream: CodedOutputStream, value: T) -> None: + stream.write(self._struct, value) + + def write_numpy(self, stream: CodedOutputStream, value: T_NP) -> None: + stream.write(self._struct, value) + + def read(self, stream: CodedInputStream) -> T: + return cast(T, stream.read(self._struct)[0]) + + def read_numpy(self, stream: CodedInputStream) -> T_NP: + return cast(T_NP, self._numpy_type(stream.read(self._struct)[0])) + + def struct_format_str(self) -> str: + return self._struct.format + + +class BoolSerializer(StructSerializer[bool, np.bool_]): + def __init__(self) -> None: + super().__init__(np.bool_, " bool: + return super().read(stream) + + def read_numpy(self, stream: CodedInputStream) -> np.bool_: + return super().read_numpy(stream) + + +bool_serializer = BoolSerializer() + + +class Int8Serializer(StructSerializer[Int8, np.int8]): + def __init__(self) -> None: + super().__init__(np.int8, " Int8: + return super().read(stream) + + def is_trivially_serializable(self) -> bool: + return True + + +int8_serializer = Int8Serializer() + + +class UInt8Serializer(StructSerializer[UInt8, np.uint8]): + def __init__(self) -> None: + super().__init__(np.uint8, " UInt8: + return super().read(stream) + + def is_trivially_serializable(self) -> bool: + return True + + +uint8_serializer = UInt8Serializer() + + +class Int16Serializer(TypeSerializer[Int16, np.int16]): + def __init__(self) -> None: + super().__init__(np.int16) + + def write(self, stream: CodedOutputStream, value: Int16) -> None: + if isinstance(value, int): + if value < INT16_MIN or value > INT16_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 16-bit integer" + ) + elif not isinstance(value, cast(type, np.int16)): + raise ValueError(f"Value is not a signed 16-bit integer: {value}") + + stream.write_signed_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.int16) -> None: + stream.write_signed_varint(value) + + def read(self, stream: CodedInputStream) -> Int16: + return stream.read_signed_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.int16: + return np.int16(stream.read_signed_varint()) + + +int16_serializer = Int16Serializer() + + +class UInt16Serializer(TypeSerializer[UInt16, np.uint16]): + def __init__(self) -> None: + super().__init__(np.uint16) + + def write(self, stream: CodedOutputStream, value: UInt16) -> None: + if isinstance(value, int): + if value < 0 or value > UINT16_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 16-bit integer" + ) + elif not isinstance(value, cast(type, np.uint16)): + raise ValueError(f"Value is not an unsigned 16-bit integer: {value}") + + stream.write_unsigned_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.uint16) -> None: + stream.write_unsigned_varint(value) + + def read(self, stream: CodedInputStream) -> UInt16: + return stream.read_unsigned_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.uint16: + return np.uint16(stream.read_unsigned_varint()) + + +uint16_serializer = UInt16Serializer() + + +class Int32Serializer(TypeSerializer[Int32, np.int32]): + def __init__(self) -> None: + super().__init__(np.int32) + + def write(self, stream: CodedOutputStream, value: Int32) -> None: + if isinstance(value, int): + if value < INT32_MIN or value > INT32_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 32-bit integer" + ) + elif not isinstance(value, cast(type, np.int32)): + raise ValueError(f"Value is not a signed 32-bit integer: {value}") + + stream.write_signed_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.int32) -> None: + stream.write_signed_varint(value) + + def read(self, stream: CodedInputStream) -> Int32: + return stream.read_signed_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.int32: + return np.int32(stream.read_signed_varint()) + + +int32_serializer = Int32Serializer() + + +class UInt32Serializer(TypeSerializer[UInt32, np.uint32]): + def __init__(self) -> None: + super().__init__(np.uint32) + + def write(self, stream: CodedOutputStream, value: UInt32) -> None: + if isinstance(value, int): + if value < 0 or value > UINT32_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 32-bit integer" + ) + elif not isinstance(value, cast(type, np.uint32)): + raise ValueError(f"Value is not an unsigned 32-bit integer: {value}") + + stream.write_unsigned_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.uint32) -> None: + stream.write_unsigned_varint(value) + + def read(self, stream: CodedInputStream) -> UInt32: + return stream.read_unsigned_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.uint32: + return np.uint32(stream.read_unsigned_varint()) + + +uint32_serializer = UInt32Serializer() + + +class Int64Serializer(TypeSerializer[Int64, np.int64]): + def __init__(self) -> None: + super().__init__(np.int64) + + def write(self, stream: CodedOutputStream, value: Int64) -> None: + if isinstance(value, int): + if value < INT64_MIN or value > INT64_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 64-bit integer" + ) + elif not isinstance(value, cast(type, np.int64)): + raise ValueError(f"Value is not a signed 64-bit integer: {value}") + + stream.write_signed_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.int64) -> None: + stream.write_signed_varint(value) + + def read(self, stream: CodedInputStream) -> Int64: + return stream.read_signed_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.int64: + return np.int64(stream.read_signed_varint()) + + +int64_serializer = Int64Serializer() + + +class UInt64Serializer(TypeSerializer[UInt64, np.uint64]): + def __init__(self) -> None: + super().__init__(np.uint64) + + def write(self, stream: CodedOutputStream, value: UInt64) -> None: + if isinstance(value, int): + if value < 0 or value > UINT64_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 64-bit integer" + ) + elif not isinstance(value, cast(type, np.uint64)): + raise ValueError(f"Value is not an unsigned 64-bit integer: {value}") + + stream.write_unsigned_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.uint64) -> None: + stream.write_unsigned_varint(value) + + def read(self, stream: CodedInputStream) -> UInt64: + return stream.read_unsigned_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.uint64: + return np.uint64(stream.read_unsigned_varint()) + + +uint64_serializer = UInt64Serializer() + + +class SizeSerializer(TypeSerializer[Size, np.uint64]): + def __init__(self) -> None: + super().__init__(np.uint64) + + def write(self, stream: CodedOutputStream, value: Size) -> None: + if isinstance(value, int): + if value < 0 or value > UINT64_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 64-bit integer" + ) + elif not isinstance(value, cast(type, np.uint64)): + raise ValueError(f"Value is not an unsigned 64-bit integer: {value}") + + stream.write_unsigned_varint(value) + + def write_numpy(self, stream: CodedOutputStream, value: np.uint64) -> None: + stream.write_unsigned_varint(value) + + def read(self, stream: CodedInputStream) -> Size: + return stream.read_unsigned_varint() + + def read_numpy(self, stream: CodedInputStream) -> np.uint64: + return np.uint64(stream.read_unsigned_varint()) + + +size_serializer = SizeSerializer() + + +class Float32Serializer(StructSerializer[Float32, np.float32]): + def __init__(self) -> None: + super().__init__(np.float32, " Float32: + return super().read(stream) + + def is_trivially_serializable(self) -> bool: + return True + + +float32_serializer = Float32Serializer() + + +class Float64Serializer(StructSerializer[Float64, np.float64]): + def __init__(self) -> None: + super().__init__(np.float64, " Float64: + return super().read(stream) + + def is_trivially_serializable(self) -> bool: + return True + + +float64_serializer = Float64Serializer() + + +class Complex32Serializer(StructSerializer[ComplexFloat, np.complex64]): + def __init__(self) -> None: + super().__init__(np.complex64, " None: + stream.write(self._struct, value.real, value.imag) + + def write_numpy(self, stream: CodedOutputStream, value: np.complex64) -> None: + stream.write(self._struct, value.real, value.imag) + + def read(self, stream: CodedInputStream) -> ComplexFloat: + return ComplexFloat(*stream.read(self._struct)) + + def read_numpy(self, stream: CodedInputStream) -> np.complex64: + real, imag = stream.read(self._struct) + return np.complex64(complex(real, imag)) + + def is_trivially_serializable(self) -> bool: + return True + + +complexfloat32_serializer = Complex32Serializer() + + +class Complex64Serializer(StructSerializer[ComplexDouble, np.complex128]): + def __init__(self) -> None: + super().__init__(np.complex128, " None: + stream.write(self._struct, value.real, value.imag) + + def write_numpy(self, stream: CodedOutputStream, value: np.complex128) -> None: + stream.write(self._struct, value.real, value.imag) + + def read(self, stream: CodedInputStream) -> ComplexDouble: + return ComplexDouble(*stream.read(self._struct)) + + def read_numpy(self, stream: CodedInputStream) -> np.complex128: + real, imag = stream.read(self._struct) + return np.complex128(complex(real, imag)) + + def is_trivially_serializable(self) -> bool: + return True + + +complexfloat64_serializer = Complex64Serializer() + + +class StringSerializer(TypeSerializer[str, np.object_]): + def __init__(self) -> None: + super().__init__(np.object_) + + def write(self, stream: CodedOutputStream, value: str) -> None: + b = value.encode("utf-8") + stream.write_unsigned_varint(len(b)) + stream.write_bytes(b) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(str, value)) + + def read(self, stream: CodedInputStream) -> str: + length = stream.read_unsigned_varint() + view = stream.read_view(length) + return str(view, "utf-8") + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return np.object_(self.read(stream)) + + +string_serializer = StringSerializer() + +EPOCH_ORDINAL_DAYS = datetime.date(1970, 1, 1).toordinal() +DATETIME_DAYS_DTYPE = np.dtype("datetime64[D]") + + +class DateSerializer(TypeSerializer[datetime.date, np.datetime64]): + def __init__(self) -> None: + super().__init__(DATETIME_DAYS_DTYPE) + + def write(self, stream: CodedOutputStream, value: datetime.date) -> None: + if isinstance(value, datetime.date): + stream.write_signed_varint(value.toordinal() - EPOCH_ORDINAL_DAYS) + else: + if not isinstance(value, np.datetime64): + raise ValueError( + f"Expected datetime.date or numpy.datetime64, got {type(value)}" + ) + + self.write_numpy(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.datetime64) -> None: + if value.dtype == DATETIME_DAYS_DTYPE: + stream.write_signed_varint(value.astype(np.int32)) + else: + stream.write_signed_varint( + value.astype(DATETIME_DAYS_DTYPE).astype(np.int32) + ) + + def read(self, stream: CodedInputStream) -> datetime.date: + days_since_epoch = stream.read_signed_varint() + return datetime.date.fromordinal(days_since_epoch + EPOCH_ORDINAL_DAYS) + + def read_numpy(self, stream: CodedInputStream) -> np.datetime64: + days_since_epoch = stream.read_signed_varint() + return np.datetime64(days_since_epoch, "D") + + +date_serializer = DateSerializer() + +TIMEDELTA_NANOSECONDS_DTYPE = np.dtype("timedelta64[ns]") + + +class TimeSerializer(TypeSerializer[Time, np.timedelta64]): + def __init__(self) -> None: + super().__init__(TIMEDELTA_NANOSECONDS_DTYPE) + + def write(self, stream: CodedOutputStream, value: Time) -> None: + if isinstance(value, Time): + self.write_numpy(stream, value.numpy_value) + elif isinstance(value, datetime.time): + self.write_numpy(stream, Time.from_time(value).numpy_value) + else: + if not isinstance(value, np.timedelta64): + raise ValueError( + f"Expected a Time, datetime.time or np.timedelta64, got {type(value)}" + ) + + self.write_numpy(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.timedelta64) -> None: + if value.dtype == TIMEDELTA_NANOSECONDS_DTYPE: + stream.write_signed_varint(value.astype(np.int64)) + else: + stream.write_signed_varint( + value.astype(DATETIME_NANOSECONDS_DTYPE).astype(np.int64) + ) + + def read(self, stream: CodedInputStream) -> Time: + nanoseconds_since_midnight = stream.read_signed_varint() + return Time(nanoseconds_since_midnight) + + def read_numpy(self, stream: CodedInputStream) -> np.timedelta64: + nanoseconds_since_midnight = stream.read_signed_varint() + return np.timedelta64(nanoseconds_since_midnight, "ns") + + +time_serializer = TimeSerializer() + +DATETIME_NANOSECONDS_DTYPE = np.dtype("datetime64[ns]") +EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) + + +class DateTimeSerializer(TypeSerializer[DateTime, np.datetime64]): + def __init__(self) -> None: + super().__init__(DATETIME_NANOSECONDS_DTYPE) + + def write(self, stream: CodedOutputStream, value: DateTime) -> None: + if isinstance(value, DateTime): + self.write_numpy(stream, value.numpy_value) + elif isinstance(value, datetime.datetime): + self.write_numpy(stream, DateTime.from_datetime(value).numpy_value) + else: + if not isinstance(value, np.datetime64): + raise ValueError( + f"Expected datetime.datetime or numpy.datetime64, got {type(value)}" + ) + + self.write_numpy(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.datetime64) -> None: + if value.dtype == DATETIME_NANOSECONDS_DTYPE: + stream.write_signed_varint(value.astype(np.int64)) + else: + stream.write_signed_varint( + value.astype(DATETIME_NANOSECONDS_DTYPE).astype(np.int64) + ) + + def read(self, stream: CodedInputStream) -> DateTime: + nanoseconds_since_epoch = stream.read_signed_varint() + return DateTime(nanoseconds_since_epoch) + + def read_numpy(self, stream: CodedInputStream) -> np.datetime64: + nanoseconds_since_epoch = stream.read_signed_varint() + return np.datetime64(nanoseconds_since_epoch, "ns") + + +datetime_serializer = DateTimeSerializer() + + +class NoneSerializer(TypeSerializer[None, Any]): + def __init__(self) -> None: + super().__init__(np.object_) + + def write(self, stream: CodedOutputStream, value: None) -> None: + pass + + def write_numpy(self, stream: CodedOutputStream, value: Any) -> None: + pass + + def read(self, stream: CodedInputStream) -> None: + return None + + def read_numpy(self, stream: CodedInputStream) -> Any: + return np.object_() + + +none_serializer = NoneSerializer() + +TEnum = TypeVar("TEnum", bound=Enum) + + +class EnumSerializer(Generic[TEnum, T, T_NP], TypeSerializer[TEnum, T_NP]): + def __init__( + self, integer_serializer: TypeSerializer[T, T_NP], enum_type: type + ) -> None: + super().__init__(integer_serializer.overall_dtype()) + self._integer_serializer = integer_serializer + self._enum_type = enum_type + + def write(self, stream: CodedOutputStream, value: TEnum) -> None: + self._integer_serializer.write(stream, value.value) + + def write_numpy(self, stream: CodedOutputStream, value: T_NP) -> None: + return self._integer_serializer.write_numpy(stream, value) + + def read(self, stream: CodedInputStream) -> TEnum: + int_value = self._integer_serializer.read(stream) + return self._enum_type(int_value) + + def read_numpy(self, stream: CodedInputStream) -> T_NP: + return self._integer_serializer.read_numpy(stream) + + def is_trivially_serializable(self) -> bool: + return self._integer_serializer.is_trivially_serializable() + + +class OptionalSerializer(Generic[T, T_NP], TypeSerializer[Optional[T], np.void]): + def __init__(self, element_serializer: TypeSerializer[T, T_NP]) -> None: + super().__init__( + np.dtype( + [("has_value", np.bool_), ("value", element_serializer.overall_dtype())] + ) + ) + self._element_serializer = element_serializer + self._none = cast(np.void, np.zeros((), dtype=self.overall_dtype())[()]) + + def write(self, stream: CodedOutputStream, value: Optional[T]) -> None: + stream.ensure_capacity(1) + if value is None: + stream.write_byte_no_check(0) + else: + stream.write_byte_no_check(1) + self._element_serializer.write(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.void) -> None: + stream.ensure_capacity(1) + if not value["has_value"]: + stream.write_byte_no_check(0) + else: + stream.write_byte_no_check(1) + self._element_serializer.write_numpy(stream, value["value"]) + + def read(self, stream: CodedInputStream) -> Optional[T]: + has_value = stream.read_byte() + if has_value == 0: + return None + else: + return self._element_serializer.read(stream) + + def read_numpy(self, stream: CodedInputStream) -> np.void: + has_value = stream.read_byte() + if has_value == 0: + return self._none + else: + return cast(np.void, (True, self._element_serializer.read_numpy(stream))) + + def is_trivially_serializable(self) -> bool: + return super().is_trivially_serializable() + + +class UnionCaseProtocol(Protocol): + index: int + value: Any + + +class UnionSerializer(TypeSerializer[T, np.object_]): + def __init__( + self, + union_type: type, + cases: list[Optional[tuple[type, TypeSerializer[Any, Any]]]], + ) -> None: + super().__init__(np.object_) + self._union_type = union_type + self._cases = cases + self._offset = 1 if cases[0] is None else 0 + + def write(self, stream: CodedOutputStream, value: T) -> None: + if value is None: + if self._cases[0] is None: + stream.write_byte_no_check(0) + return + else: + raise ValueError("None is not a valid for this union type") + + if not isinstance(value, self._union_type): + raise ValueError( + f"Expected union value of type {self._union_type} but got {type(value)}" + ) + + union_value = cast(UnionCaseProtocol, value) + + tag_index = union_value.index + self._offset + stream.ensure_capacity(1) + stream.write_byte_no_check(tag_index) + type_case = self._cases[tag_index] + assert type_case is not None + type_case[1].write(stream, union_value.value) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(T, value)) + + def read(self, stream: CodedInputStream) -> T: + case_index = stream.read_byte() + if case_index == 0 and self._offset == 1: + return None # type: ignore + case_type, case_serializer = self._cases[case_index] # type: ignore + return case_type(case_serializer.read(stream)) # type: ignore + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return self.read(stream) # type: ignore + + +class StreamSerializer(TypeSerializer[Iterable[T], Any]): + def __init__(self, element_serializer: TypeSerializer[T, T_NP]) -> None: + super().__init__(np.object_) + self._element_serializer = element_serializer + + def write(self, stream: CodedOutputStream, value: Iterable[T]) -> None: + # Note that the final 0 is missing and will be added before the next protocol step + # or the protocol is closed. + if isinstance(value, list) and len(value) > 0: + stream.write_unsigned_varint(len(value)) + for element in value: + self._element_serializer.write(stream, element) + else: + for element in value: + stream.write_byte_no_check(1) + self._element_serializer.write(stream, element) + + def write_numpy(self, stream: CodedOutputStream, value: Any) -> None: + raise NotImplementedError() + + def read(self, stream: CodedInputStream) -> Iterable[T]: + while (i := stream.read_unsigned_varint()) > 0: + for _ in range(i): + yield self._element_serializer.read(stream) + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + raise NotImplementedError() + + +class FixedVectorSerializer(Generic[T, T_NP], TypeSerializer[list[T], np.object_]): + def __init__( + self, element_serializer: TypeSerializer[T, T_NP], length: int + ) -> None: + super().__init__(np.dtype((element_serializer.overall_dtype(), length))) + self.element_serializer = element_serializer + self._length = length + + def write(self, stream: CodedOutputStream, value: list[T]) -> None: + if len(value) != self._length: + raise ValueError( + f"Expected a list of length {self._length}, got {len(value)}" + ) + for element in value: + self.element_serializer.write(stream, element) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + raise NotImplementedError("Internal error: expected this to be a subarray") + + def read(self, stream: CodedInputStream) -> list[T]: + return [self.element_serializer.read(stream) for _ in range(self._length)] + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + raise NotImplementedError("Internal error: expected this to be a subarray") + + def is_trivially_serializable(self) -> bool: + return self.element_serializer.is_trivially_serializable() + + +class VectorSerializer(Generic[T, T_NP], TypeSerializer[list[T], np.object_]): + def __init__(self, element_serializer: TypeSerializer[T, T_NP]) -> None: + super().__init__(np.object_) + self._element_serializer = element_serializer + + def write(self, stream: CodedOutputStream, value: list[T]) -> None: + stream.write_unsigned_varint(len(value)) + for element in value: + self._element_serializer.write(stream, element) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + if not isinstance(value, list): + raise ValueError(f"Expected a list, got {type(value)}") + + stream.write_unsigned_varint(len(value)) + for element in cast(list[T], value): + self._element_serializer.write(stream, element) + + def read(self, stream: CodedInputStream) -> list[T]: + length = stream.read_unsigned_varint() + return [self._element_serializer.read(stream) for _ in range(length)] + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return np.object_(self.read(stream)) + + +TKey = TypeVar("TKey") +TKey_NP = TypeVar("TKey_NP", bound=np.generic) +TValue = TypeVar("TValue") +TValue_NP = TypeVar("TValue_NP", bound=np.generic) + + +class MapSerializer( + Generic[TKey, TKey_NP, TValue, TValue_NP], + TypeSerializer[dict[TKey, TValue], np.object_], +): + def __init__( + self, + key_serializer: TypeSerializer[TKey, TKey_NP], + value_serializer: TypeSerializer[TValue, TValue_NP], + ) -> None: + super().__init__(np.object_) + self._key_serializer = key_serializer + self._value_serializer = value_serializer + + def write(self, stream: CodedOutputStream, value: dict[TKey, TValue]) -> None: + stream.write_unsigned_varint(len(value)) + for k, v in value.items(): + self._key_serializer.write(stream, k) + self._value_serializer.write(stream, v) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(dict[TKey, TValue], value)) + + def read(self, stream: CodedInputStream) -> dict[TKey, TValue]: + length = stream.read_unsigned_varint() + return { + self._key_serializer.read(stream): self._value_serializer.read(stream) + for _ in range(length) + } + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return np.object_(self.read(stream)) + + +class NDArraySerializerBase( + Generic[T, T_NP], TypeSerializer[npt.NDArray[Any], np.object_] +): + def __init__( + self, + overall_dtype: npt.DTypeLike, + element_serializer: TypeSerializer[T, T_NP], + dtype: npt.DTypeLike, + ) -> None: + super().__init__(overall_dtype) + self.element_serializer = element_serializer + + ( + self._array_dtype, + self._subarray_shape, + ) = NDArraySerializerBase._get_dtype_and_subarray_shape( + dtype + if isinstance(dtype, np.dtype) + else np.dtype(dtype) # pyright: ignore [reportUnknownArgumentType] + ) + if self._subarray_shape == (): + self._subarray_shape = None + else: + if isinstance(element_serializer, FixedNDArraySerializer) or isinstance( + element_serializer, FixedVectorSerializer + ): + self.element_serializer = cast( + TypeSerializer[T, T_NP], + element_serializer.element_serializer, # pyright: ignore [reportUnknownMemberType] + ) + + @staticmethod + def _get_dtype_and_subarray_shape( + dtype: np.dtype[Any], + ) -> tuple[np.dtype[Any], tuple[int, ...]]: + if dtype.subdtype is None: + return dtype, () + subres = NDArraySerializerBase._get_dtype_and_subarray_shape(dtype.subdtype[0]) + return (subres[0], dtype.subdtype[1] + subres[1]) + + def _write_data(self, stream: CodedOutputStream, value: npt.NDArray[Any]) -> None: + if value.dtype != self._array_dtype: + # see if it's the same dtype but packed, not aligned + packed_dtype = recfunctions.repack_fields(self._array_dtype, align=False, recurse=True) # type: ignore + if packed_dtype != value.dtype: + if packed_dtype == self._array_dtype: + message = f"Expected dtype {self._array_dtype}, got {value.dtype}" + else: + message = f"Expected dtype {self._array_dtype} or {packed_dtype}, got {value.dtype}" + + raise ValueError(message) + + if self._is_current_array_trivially_serializable(value): + stream.write_bytes_directly(value.data) + else: + for element in value.flat: + self.element_serializer.write_numpy(stream, element) + + def _read_data( + self, stream: CodedInputStream, shape: tuple[int, ...] + ) -> npt.NDArray[Any]: + flat_length = int(np.prod(shape)) # type: ignore + + if self.element_serializer.is_trivially_serializable(): + flat_byte_length = flat_length * self._array_dtype.itemsize + byte_array = stream.read_bytearray(flat_byte_length) + return np.frombuffer(byte_array, dtype=self._array_dtype).reshape(shape) + + result: npt.NDArray[T_NP] = np.ndarray((flat_length,), dtype=self._array_dtype) + for i in range(flat_length): + result[i] = self.element_serializer.read_numpy(stream) + + return result.reshape(shape) + + def _is_current_array_trivially_serializable(self, value: npt.NDArray[Any]) -> bool: + return ( + self.element_serializer.is_trivially_serializable() + and value.flags.c_contiguous + and ( + self._array_dtype.fields is None + or all(f != "" for f in self._array_dtype.fields) + ) + ) + + +class DynamicNDArraySerializer(NDArraySerializerBase[T, T_NP]): + def __init__( + self, + element_serializer: TypeSerializer[T, T_NP], + ) -> None: + super().__init__( + np.object_, element_serializer, element_serializer.overall_dtype() + ) + + def write(self, stream: CodedOutputStream, value: npt.NDArray[Any]) -> None: + if self._subarray_shape is None: + stream.write_unsigned_varint(value.ndim) + for dim in value.shape: + stream.write_unsigned_varint(dim) + else: + if len(value.shape) < len(self._subarray_shape) or ( + value.shape[-len(self._subarray_shape) :] != self._subarray_shape + ): + raise ValueError( + f"The array is required to have shape (..., {(', '.join((str(i) for i in self._subarray_shape)))})" + ) + stream.write_unsigned_varint(value.ndim - len(self._subarray_shape)) + for dim in value.shape[: -len(self._subarray_shape)]: + stream.write_unsigned_varint(dim) + + self._write_data(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(npt.NDArray[Any], value)) + + def read(self, stream: CodedInputStream) -> npt.NDArray[Any]: + if self._subarray_shape is None: + ndims = stream.read_unsigned_varint() + shape = tuple(stream.read_unsigned_varint() for _ in range(ndims)) + else: + ndims = stream.read_unsigned_varint() + shape = ( + tuple(stream.read_unsigned_varint() for _ in range(ndims)) + + self._subarray_shape + ) + return self._read_data(stream, shape) + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return cast(np.object_, self.read(stream)) + + +class NDArraySerializer(Generic[T, T_NP], NDArraySerializerBase[T, T_NP]): + def __init__( + self, + element_serializer: TypeSerializer[T, T_NP], + ndims: int, + ) -> None: + super().__init__( + np.object_, element_serializer, element_serializer.overall_dtype() + ) + self._ndims = ndims + + def write(self, stream: CodedOutputStream, value: npt.NDArray[Any]) -> None: + if self._subarray_shape is None: + if value.ndim != self._ndims: + raise ValueError(f"Expected {self._ndims} dimensions, got {value.ndim}") + + for dim in value.shape: + stream.write_unsigned_varint(dim) + else: + total_dims = len(self._subarray_shape) + self._ndims + if value.ndim != total_dims: + raise ValueError(f"Expected {total_dims} dimensions, got {value.ndim}") + + if value.shape[-len(self._subarray_shape) :] != self._subarray_shape: + raise ValueError( + f"The array is required to have shape (..., {(', '.join((str(i) for i in self._subarray_shape)))})" + ) + + for dim in value.shape[: -len(self._subarray_shape)]: + stream.write_unsigned_varint(dim) + + self._write_data(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(npt.NDArray[Any], value)) + + def read(self, stream: CodedInputStream) -> npt.NDArray[Any]: + shape = tuple(stream.read_unsigned_varint() for _ in range(self._ndims)) + if self._subarray_shape is not None: + shape += self._subarray_shape + + return self._read_data(stream, shape) + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return cast(np.object_, self.read(stream)) + + +class FixedNDArraySerializer(Generic[T, T_NP], NDArraySerializerBase[T, T_NP]): + def __init__( + self, + element_serializer: TypeSerializer[T, T_NP], + shape: tuple[int, ...], + ) -> None: + dtype = element_serializer.overall_dtype() + super().__init__(np.dtype((dtype, shape)), element_serializer, dtype) + self._shape = shape + + def write(self, stream: CodedOutputStream, value: npt.NDArray[Any]) -> None: + required_shape = ( + self._shape + if self._subarray_shape is None + else self._shape + self._subarray_shape + ) + if value.shape != required_shape: + raise ValueError(f"Expected shape {required_shape}, got {value.shape}") + + self._write_data(stream, value) + + def write_numpy(self, stream: CodedOutputStream, value: np.object_) -> None: + self.write(stream, cast(npt.NDArray[Any], value)) + + def read(self, stream: CodedInputStream) -> npt.NDArray[Any]: + full_shape = ( + self._shape + if self._subarray_shape is None + else self._shape + self._subarray_shape + ) + return self._read_data(stream, full_shape) + + def read_numpy(self, stream: CodedInputStream) -> np.object_: + return cast(np.object_, self.read(stream)) + + def is_trivially_serializable(self) -> bool: + return self.element_serializer.is_trivially_serializable() + + +class RecordSerializer(TypeSerializer[T, np.void]): + def __init__( + self, field_serializers: list[Tuple[str, TypeSerializer[Any, Any]]] + ) -> None: + super().__init__( + np.dtype( + [ + (name, serializer.overall_dtype()) + for name, serializer in field_serializers + ], + align=True, + ) + ) + + self._field_serializers = field_serializers + + def is_trivially_serializable(self) -> bool: + return all( + serializer.is_trivially_serializable() + for _, serializer in self._field_serializers + ) + + def _write(self, stream: CodedOutputStream, *values: Any) -> None: + for i, (_, serializer) in enumerate(self._field_serializers): + serializer.write(stream, values[i]) + + def _read(self, stream: CodedInputStream) -> tuple[Any, ...]: + return tuple( + serializer.read(stream) for _, serializer in self._field_serializers + ) + + def read_numpy(self, stream: CodedInputStream) -> np.void: + return cast(np.void, self._read(stream)) + + +# Only used in the header +int32_struct = struct.Struct(" None: + if value < INT32_MIN or value > INT32_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 32-bit integer" + ) + stream.write(int32_struct, value) + + +def read_fixed_int32(stream: CodedInputStream) -> int: + return stream.read(int32_struct)[0] diff --git a/python/petsird/_dtypes.py b/python/petsird/_dtypes.py new file mode 100644 index 0000000..5841788 --- /dev/null +++ b/python/petsird/_dtypes.py @@ -0,0 +1,89 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +import datetime +from types import GenericAlias +import sys + +if sys.version_info >= (3, 10): + from types import UnionType + +from typing import Any, Callable, Union, cast, get_args, get_origin +import numpy as np +from . import yardl_types as yardl + + +def make_get_dtype_func( + dtype_map: dict[ + Union[type, GenericAlias], + Union[np.dtype[Any], Callable[[tuple[type, ...]], np.dtype[Any]]], + ] +) -> Callable[[Union[type, GenericAlias]], np.dtype[Any]]: + dtype_map[bool] = np.dtype(np.bool_) + dtype_map[yardl.Int8] = np.dtype(np.int8) + dtype_map[yardl.UInt8] = np.dtype(np.uint8) + dtype_map[yardl.Int16] = np.dtype(np.int16) + dtype_map[yardl.UInt16] = np.dtype(np.uint16) + dtype_map[yardl.Int32] = np.dtype(np.int32) + dtype_map[yardl.UInt32] = np.dtype(np.uint32) + dtype_map[yardl.Int64] = np.dtype(np.int64) + dtype_map[yardl.UInt64] = np.dtype(np.uint64) + dtype_map[yardl.Size] = np.dtype(np.uint64) + dtype_map[yardl.Float32] = np.dtype(np.float32) + dtype_map[yardl.Float64] = np.dtype(np.float64) + dtype_map[yardl.ComplexFloat] = np.dtype(np.complex64) + dtype_map[yardl.ComplexDouble] = np.dtype(np.complex128) + dtype_map[datetime.date] = np.dtype("datetime64[D]") + dtype_map[yardl.Time] = np.dtype("timedelta64[ns]") + dtype_map[yardl.DateTime] = np.dtype("datetime64[ns]") + dtype_map[str] = np.dtype(np.object_) + + # Add the Python types to the dictionary too, but these may not be + # correct since they map to several dtypes + dtype_map[int] = np.dtype(np.int64) + dtype_map[float] = np.dtype(np.float64) + dtype_map[complex] = np.dtype(np.complex128) + + dtype_map[list] = lambda type_args: np.dtype(np.object_) + dtype_map[dict] = lambda type_args: np.dtype(np.object_) + dtype_map[np.ndarray] = lambda type_args: np.dtype(np.object_) # type: ignore + + def get_dtype_impl( + dtype_map: dict[ + Union[type, GenericAlias], + Union[np.dtype[Any], Callable[[tuple[type, ...]], np.dtype[Any]]], + ], + t: Union[type, GenericAlias], + ) -> np.dtype[Any]: + # Check dtype map for this type first + if (res := dtype_map.get(t, None)) is not None: + if callable(res): + raise RuntimeError(f"Generic type arguments not provided for {t}") + else: + return res + + origin = get_origin(t) + + if origin == Union or ( + sys.version_info >= (3, 10) and isinstance(t, UnionType) + ): + return _get_union_dtype(get_args(t)) + + # Here, t is either invalid (no dtype registered) + # or t is a types.GenericAlias with type arguments specified + if origin is not None and (res := dtype_map.get(origin, None)) is not None: + if callable(res): + return res(get_args(t)) + + raise RuntimeError(f"Cannot find dtype for {t}") + + def _get_union_dtype(args: tuple[type, ...]) -> np.dtype[Any]: + if len(args) == 2 and args[1] == cast(type, type(None)): + # This is an optional type + inner_type = get_dtype_impl(dtype_map, args[0]) + return np.dtype( + [("has_value", np.bool_), ("value", inner_type)], align=True + ) + return np.dtype(np.object_) + + return lambda t: get_dtype_impl(dtype_map, t) diff --git a/python/petsird/_ndjson.py b/python/petsird/_ndjson.py new file mode 100644 index 0000000..7cb67c1 --- /dev/null +++ b/python/petsird/_ndjson.py @@ -0,0 +1,1194 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# pyright: reportUnnecessaryIsInstance=false +# pyright: reportUnknownArgumentType=false +# pyright: reportUnknownVariableType=false + +from abc import ABC, abstractmethod +import datetime +from enum import IntFlag +import io +import json +from typing import Any, Generic, Optional, TextIO, TypeVar, Union, cast + +import numpy as np +import numpy.typing as npt +from numpy.lib import recfunctions + +from .yardl_types import * + +CURRENT_NDJSON_FORMAT_VERSION: int = 1 + +INT8_MIN: int = np.iinfo(np.int8).min +INT8_MAX: int = np.iinfo(np.int8).max + +UINT8_MAX: int = np.iinfo(np.uint8).max + +INT16_MIN: int = np.iinfo(np.int16).min +INT16_MAX: int = np.iinfo(np.int16).max + +UINT16_MAX: int = np.iinfo(np.uint16).max + +INT32_MIN: int = np.iinfo(np.int32).min +INT32_MAX: int = np.iinfo(np.int32).max + +UINT32_MAX: int = np.iinfo(np.uint32).max + +INT64_MIN: int = np.iinfo(np.int64).min +INT64_MAX: int = np.iinfo(np.int64).max + +UINT64_MAX: int = np.iinfo(np.uint64).max + +MISSING_SENTINEL = object() + + +class NDJsonProtocolWriter(ABC): + def __init__(self, stream: Union[TextIO, str], schema: str) -> None: + if isinstance(stream, str): + self._stream = open(stream, "w", encoding="utf-8") + self._owns_stream = True + else: + self._stream = stream + self._owns_stream = False + + self._write_json_line( + { + "yardl": { + "version": CURRENT_NDJSON_FORMAT_VERSION, + "schema": json.loads(schema), + }, + }, + ) + + def _close(self) -> None: + if self._owns_stream: + self._stream.close() + + def _end_stream(self) -> None: + pass + + def _write_json_line(self, value: object) -> None: + json.dump( + value, + self._stream, + ensure_ascii=False, + separators=(",", ":"), + check_circular=False, + ) + self._stream.write("\n") + + +class NDJsonProtocolReader: + def __init__( + self, stream: Union[io.BufferedReader, TextIO, str], schema: str + ) -> None: + if isinstance(stream, str): + self._stream = open(stream, "r", encoding="utf-8") + self._owns_stream = True + else: + self._stream = stream + self._owns_stream = False + + self._unused_value: Optional[dict[str, object]] = None + + line = self._stream.readline() + try: + header_json = json.loads(line) + except json.JSONDecodeError: + raise ValueError( + "Data in the stream is not in the expected Yardl NDJSON format." + ) + + if not isinstance(header_json, dict) or not "yardl" in header_json: + raise ValueError( + "Data in the stream is not in the expected Yardl NDJSON format." + ) + + header_json = header_json["yardl"] + if not isinstance(header_json, dict): + raise ValueError( + "Data in the stream is not in the expected Yardl NDJSON format." + ) + + if ( + header_json.get("version") # pyright: ignore [reportUnknownMemberType] + != CURRENT_NDJSON_FORMAT_VERSION + ): + raise ValueError("Unsupported yardl version.") + + if header_json.get( # pyright: ignore [reportUnknownMemberType] + "schema" + ) != json.loads(schema): + raise ValueError( + "The schema of the data to be read is not compatible with the current protocol." + ) + + def _close(self) -> None: + if self._owns_stream: + self._stream.close() + + def _read_json_line(self, stepName: str, required: bool) -> object: + missing = MISSING_SENTINEL + if self._unused_value is not None: + if (value := self._unused_value.get(stepName, missing)) is not missing: + self._unused_value = None + return value + if required: + raise ValueError(f"Expected protocol step '{stepName}' not found.") + return MISSING_SENTINEL + + line = self._stream.readline() + if line == "": + if not required: + return MISSING_SENTINEL + raise ValueError( + f"Encountered EOF but expected to find protocol step '{stepName}'." + ) + + json_object = json.loads(line) + if (value := json_object.get(stepName, missing)) is not MISSING_SENTINEL: + return value + + if not required: + self._unused_value = json_object + return MISSING_SENTINEL + + raise ValueError(f"Expected protocol step '{stepName}' not found.") + + +T = TypeVar("T") +T_NP = TypeVar("T_NP", bound=np.generic) + + +class JsonConverter(Generic[T, T_NP], ABC): + def __init__(self, dtype: npt.DTypeLike) -> None: + self._dtype: np.dtype[Any] = np.dtype(dtype) + + def overall_dtype(self) -> np.dtype[Any]: + return self._dtype + + @abstractmethod + def to_json(self, value: T) -> object: + raise NotImplementedError + + @abstractmethod + def numpy_to_json(self, value: T_NP) -> object: + raise NotImplementedError + + @abstractmethod + def from_json(self, json_object: object) -> T: + raise NotImplementedError + + @abstractmethod + def from_json_to_numpy(self, json_object: object) -> T_NP: + raise NotImplementedError + + def supports_none(self) -> bool: + return False + + +class BoolConverter(JsonConverter[bool, np.bool_]): + def __init__(self) -> None: + super().__init__(np.bool_) + + def to_json(self, value: bool) -> object: + if not isinstance(value, bool): + raise TypeError(f"Expected a bool but got {type(value)}") + + return value + + def numpy_to_json(self, value: np.bool_) -> object: + return bool(value) + + def from_json(self, json_object: object) -> bool: + return bool(json_object) + + def from_json_to_numpy(self, json_object: object) -> np.bool_: + return np.bool_(json_object) + + +bool_converter = BoolConverter() + + +class Int8Converter(JsonConverter[int, np.int8]): + def __init__(self) -> None: + super().__init__(np.int8) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not a signed 8-bit integer: {value}") + if value < INT8_MIN or value > INT8_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 8-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.int8) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.int8: + return np.int8(cast(int, json_object)) + + +int8_converter = Int8Converter() + + +class UInt8Converter(JsonConverter[int, np.uint8]): + def __init__(self) -> None: + super().__init__(np.uint8) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not an unsigned 8-bit integer: {value}") + if value < 0 or value > UINT8_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 8-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.uint8) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.uint8: + return np.uint8(cast(int, json_object)) + + +uint8_converter = UInt8Converter() + + +class Int16Converter(JsonConverter[int, np.int16]): + def __init__(self) -> None: + super().__init__(np.int16) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not a signed 16-bit integer: {value}") + if value < INT16_MIN or value > INT16_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 16-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.int16) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.int16: + return np.int16(cast(int, json_object)) + + +int16_converter = Int16Converter() + + +class UInt16Converter(JsonConverter[int, np.uint16]): + def __init__(self) -> None: + super().__init__(np.uint16) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not an unsigned 16-bit integer: {value}") + if value < 0 or value > UINT16_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 16-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.uint16) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.uint16: + return np.uint16(cast(int, json_object)) + + +uint16_converter = UInt16Converter() + + +class Int32Converter(JsonConverter[int, np.int32]): + def __init__(self) -> None: + super().__init__(np.int32) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not a signed 32-bit integer: {value}") + if value < INT32_MIN or value > INT32_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 32-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.int32) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.int32: + return np.int32(cast(int, json_object)) + + +int32_converter = Int32Converter() + + +class UInt32Converter(JsonConverter[int, np.uint32]): + def __init__(self) -> None: + super().__init__(np.uint32) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not an unsigned 32-bit integer: {value}") + if value < 0 or value > UINT32_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 32-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.uint32) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.uint32: + return np.uint32(cast(int, json_object)) + + +uint32_converter = UInt32Converter() + + +class Int64Converter(JsonConverter[int, np.int64]): + def __init__(self) -> None: + super().__init__(np.int64) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not a signed 64-bit integer: {value}") + if value < INT64_MIN or value > INT64_MAX: + raise ValueError( + f"Value {value} is outside the range of a signed 64-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.int64) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.int64: + return np.int64(cast(int, json_object)) + + +int64_converter = Int64Converter() + + +class UInt64Converter(JsonConverter[int, np.uint64]): + def __init__(self) -> None: + super().__init__(np.uint64) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not an unsigned 64-bit integer: {value}") + if value < 0 or value > UINT64_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 64-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.uint64) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.uint64: + return np.uint64(cast(int, json_object)) + + +uint64_converter = UInt64Converter() + + +class SizeConverter(JsonConverter[int, np.uint64]): + def __init__(self) -> None: + super().__init__(np.uint64) + + def to_json(self, value: int) -> object: + if not isinstance(value, int): + raise ValueError(f"Value in not an unsigned 64-bit integer: {value}") + if value < 0 or value > UINT64_MAX: + raise ValueError( + f"Value {value} is outside the range of an unsigned 64-bit integer" + ) + + return value + + def numpy_to_json(self, value: np.uint64) -> object: + return int(value) + + def from_json(self, json_object: object) -> int: + return cast(int, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.uint64: + return np.uint64(cast(int, json_object)) + + +size_converter = SizeConverter() + + +class Float32Converter(JsonConverter[float, np.float32]): + def __init__(self) -> None: + super().__init__(np.float32) + + def to_json(self, value: float) -> object: + if not isinstance(value, float): + raise ValueError(f"Value in not a 32-bit float: {value}") + + return value + + def numpy_to_json(self, value: np.float32) -> object: + return float(value) + + def from_json(self, json_object: object) -> float: + return cast(float, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.float32: + return np.float32(cast(float, json_object)) + + +float32_converter = Float32Converter() + + +class Float64Converter(JsonConverter[float, np.float64]): + def __init__(self) -> None: + super().__init__(np.float64) + + def to_json(self, value: float) -> object: + if not isinstance(value, float): + raise ValueError(f"Value in not a 64-bit float: {value}") + + return value + + def numpy_to_json(self, value: np.float64) -> object: + return float(value) + + def from_json(self, json_object: object) -> float: + return cast(float, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.float64: + return np.float64(cast(float, json_object)) + + +float64_converter = Float64Converter() + + +class Complex32Converter(JsonConverter[complex, np.complex64]): + def __init__(self) -> None: + super().__init__(np.complex64) + + def to_json(self, value: complex) -> object: + if not isinstance(value, complex): + raise ValueError(f"Value in not a 32-bit complex value: {value}") + + return [value.real, value.imag] + + def numpy_to_json(self, value: np.complex64) -> object: + return [float(value.real), float(value.imag)] + + def from_json(self, json_object: object) -> complex: + if not isinstance(json_object, list) or len(json_object) != 2: + raise ValueError(f"Expected a list of two floating-point numbers.") + + return complex(json_object[0], json_object[1]) + + def from_json_to_numpy(self, json_object: object) -> np.complex64: + return np.complex64(self.from_json(json_object)) + + +complexfloat32_converter = Complex32Converter() + + +class Complex64Converter(JsonConverter[complex, np.complex128]): + def __init__(self) -> None: + super().__init__(np.complex128) + + def to_json(self, value: complex) -> object: + if not isinstance(value, complex): + raise ValueError(f"Value in not a 64-bit complex value: {value}") + + return [value.real, value.imag] + + def numpy_to_json(self, value: np.complex128) -> object: + return [float(value.real), float(value.imag)] + + def from_json(self, json_object: object) -> complex: + if not isinstance(json_object, list) or len(json_object) != 2: + raise ValueError(f"Expected a list of two floating-point numbers.") + + return complex(json_object[0], json_object[1]) + + def from_json_to_numpy(self, json_object: object) -> np.complex128: + return np.complex128(self.from_json(json_object)) + + +complexfloat64_converter = Complex64Converter() + + +class StringConverter(JsonConverter[str, np.object_]): + def __init__(self) -> None: + super().__init__(np.object_) + + def to_json(self, value: str) -> object: + if not isinstance(value, str): + raise ValueError(f"Value in not a string: {value}") + return value + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(str, value)) + + def from_json(self, json_object: object) -> str: + return cast(str, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return np.object_(json_object) + + +string_converter = StringConverter() + + +class DateConverter(JsonConverter[datetime.date, np.datetime64]): + def __init__(self) -> None: + super().__init__(np.datetime64) + + def to_json(self, value: datetime.date) -> object: + if not isinstance(value, datetime.date): + raise ValueError(f"Value in not a date: {value}") + return value.isoformat() + + def numpy_to_json(self, value: np.datetime64) -> object: + return str(value.astype("datetime64[D]")) + + def from_json(self, json_object: object) -> datetime.date: + return datetime.date.fromisoformat(cast(str, json_object)) + + def from_json_to_numpy(self, json_object: object) -> np.datetime64: + return np.datetime64(cast(str, json_object), "D") + + +date_converter = DateConverter() + + +class TimeConverter(JsonConverter[Time, np.timedelta64]): + def __init__(self) -> None: + super().__init__(np.timedelta64) + + def to_json(self, value: Time) -> object: + if isinstance(value, Time): + return str(value) + elif isinstance(value, datetime.time): + return value.isoformat() + + raise ValueError(f"Value in not a time: {value}") + + def numpy_to_json(self, value: np.timedelta64) -> object: + return str(Time(value)) + + def from_json(self, json_object: object) -> Time: + return Time.parse(cast(str, json_object)) + + def from_json_to_numpy(self, json_object: object) -> np.timedelta64: + return self.from_json(json_object).numpy_value + + +time_converter = TimeConverter() + + +class DateTimeConverter(JsonConverter[DateTime, np.datetime64]): + def __init__(self) -> None: + super().__init__(np.datetime64) + + def to_json(self, value: DateTime) -> object: + if isinstance(value, DateTime): + return str(value) + elif isinstance(value, datetime.datetime): + return value.isoformat() + + raise ValueError(f"Value in not a datetime: {value}") + + def numpy_to_json(self, value: np.datetime64) -> object: + return str(value) + + def from_json(self, json_object: object) -> DateTime: + return DateTime.parse(cast(str, json_object)) + + def from_json_to_numpy(self, json_object: object) -> np.datetime64: + return self.from_json(json_object).numpy_value + + +datetime_converter = DateTimeConverter() + +TEnum = TypeVar("TEnum", bound=OutOfRangeEnum) + + +class EnumConverter(Generic[TEnum, T_NP], JsonConverter[TEnum, T_NP]): + def __init__( + self, + enum_type: type[TEnum], + numpy_type: type, + name_to_value: dict[str, TEnum], + value_to_name: dict[TEnum, str], + ) -> None: + super().__init__(numpy_type) + self._enum_type = enum_type + self._name_to_value = name_to_value + self._value_to_name = value_to_name + + def to_json(self, value: TEnum) -> object: + if not isinstance(value, self._enum_type): + raise ValueError(f"Value in not an enum or not the right type: {value}") + if value.name == "": + return value.value + + return self._value_to_name[value] + + def numpy_to_json(self, value: T_NP) -> object: + return self.to_json(self._enum_type(value)) + + def from_json(self, json_object: object) -> TEnum: + if isinstance(json_object, int): + return self._enum_type(json_object) + + return self._name_to_value[cast(str, json_object)] + + def from_json_to_numpy(self, json_object: object) -> T_NP: + return self.from_json(json_object).value + + +TFlag = TypeVar("TFlag", bound=IntFlag) + + +class FlagsConverter(Generic[TFlag, T_NP], JsonConverter[TFlag, T_NP]): + def __init__( + self, + enum_type: type[TFlag], + numpy_type: type, + name_to_value: dict[str, TFlag], + value_to_name: dict[TFlag, str], + ) -> None: + super().__init__(numpy_type) + self._enum_type = enum_type + self._name_to_value = name_to_value + self._value_to_name = value_to_name + self._zero_enum = enum_type(0) + self._zero_json = ( + [value_to_name[self._zero_enum]] if self._zero_enum in value_to_name else [] + ) + + def to_json(self, value: TFlag) -> object: + if not isinstance(value, self._enum_type): + raise ValueError(f"Value in not an enum or not the right type: {value}") + if value.value == 0: + return self._zero_json + + remaining_int_value = value.value + result: list[str] = [] + for enum_value in self._value_to_name: + if enum_value.value == 0: + continue + if enum_value.value & remaining_int_value == enum_value.value: + result.append(self._value_to_name[enum_value]) + remaining_int_value &= ~enum_value.value + if remaining_int_value == 0: + break + + if remaining_int_value == 0: + return result + + return value.value + + def numpy_to_json(self, value: T_NP) -> object: + return self.to_json(self._enum_type(int(value))) # type: ignore + + def from_json(self, json_object: object) -> TFlag: + if isinstance(json_object, int): + return self._enum_type(json_object) + + assert isinstance(json_object, list) + res = self._zero_enum + + for name in json_object: + res |= self._name_to_value[name] + + return res + + def from_json_to_numpy(self, json_object: object) -> T_NP: + return self.from_json(json_object).value # type: ignore + + +class OptionalConverter(Generic[T, T_NP], JsonConverter[Optional[T], np.void]): + def __init__(self, element_converter: JsonConverter[T, T_NP]) -> None: + super().__init__( + np.dtype( + [("has_value", np.bool_), ("value", element_converter.overall_dtype())] + ) + ) + self._element_converter = element_converter + self._none = cast(np.void, np.zeros((), dtype=self.overall_dtype())[()]) + + def to_json(self, value: Optional[T]) -> object: + if value is None: + return None + return self._element_converter.to_json(value) + + def numpy_to_json(self, value: np.void) -> object: + if value["has_value"]: + return self._element_converter.numpy_to_json(value["value"]) + return None + + def from_json(self, json_object: object) -> Optional[T]: + if json_object is None: + return None + return self._element_converter.from_json(json_object) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if json_object is None: + return self._none + return (True, self._element_converter.from_json_to_numpy(json_object)) # type: ignore + + def supports_none(self) -> bool: + return True + + +class UnionConverter(JsonConverter[T, np.object_]): + def __init__( + self, + union_type: type, + cases: list[Optional[tuple[type, JsonConverter[Any, Any], list[type]]]], + simple: bool, + ) -> None: + super().__init__(np.object_) + self._union_type = union_type + self._cases = cases + self._simple = simple + self._offset = 1 if cases[0] is None else 0 + if self._simple: + self._json_type_to_case_index = { + json_type: case_index + for (case_index, case) in enumerate(cases) + if case is not None + for json_type in case[2] + } + else: + self.tag_to_case_index: dict[str, int] = { + case[0].tag: case_index # type: ignore + for (case_index, case) in enumerate(cases) + if case is not None + } + + def to_json(self, value: T) -> object: + if value is None: + if self._cases[0] is None: + return None + else: + raise ValueError("None is not a valid for this union type") + + if not isinstance(value, self._union_type): + raise ValueError(f"Value in not a union or not the right type: {value}") + + tag_index = value.index + self._offset # type: ignore + inner_json_value = self._cases[tag_index][1].to_json(value.value) # type: ignore + + if self._simple: + return inner_json_value + else: + return {value.tag: inner_json_value} # type: ignore + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(T, value)) + + def from_json(self, json_object: object) -> T: + if json_object is None: + if self._cases[0] is None: + return None # type: ignore + else: + raise ValueError("None is not a valid for this union type") + + if self._simple: + idx = self._json_type_to_case_index[type(json_object)] + case = self._cases[idx] + return case[0](case[1].from_json(json_object)) # type: ignore + else: + assert isinstance(json_object, dict) + tag, inner_json_object = next(iter(json_object.items())) + case = self._cases[self.tag_to_case_index[tag]] + return case[0](case[1].from_json(inner_json_object)) # type: ignore + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return self.from_json(json_object) # type: ignore + + def supports_none(self) -> bool: + return self._cases[0] is None + + +class VectorConverter(Generic[T, T_NP], JsonConverter[list[T], np.object_]): + def __init__(self, element_converter: JsonConverter[T, T_NP]) -> None: + super().__init__(np.object_) + self._element_converter = element_converter + + def to_json(self, value: list[T]) -> object: + if not isinstance(value, list): + raise ValueError(f"Value in not a list: {value}") + return [self._element_converter.to_json(v) for v in value] + + def numpy_to_json(self, value: object) -> object: + if isinstance(value, list): + return [self._element_converter.to_json(v) for v in value] + + if not isinstance(value, np.ndarray): + raise ValueError(f"Value in not a list or ndarray: {value}") + + if value.ndim != 1: + raise ValueError(f"Value in not a 1-dimensional ndarray: {value}") + + return [self._element_converter.numpy_to_json(v) for v in value] + + def from_json(self, json_object: object) -> list[T]: + if not isinstance(json_object, list): + raise ValueError(f"Value in not a list: {json_object}") + return [self._element_converter.from_json(v) for v in json_object] + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return cast(np.object_, self.from_json(json_object)) + + +class FixedVectorConverter(Generic[T, T_NP], JsonConverter[list[T], np.object_]): + def __init__(self, element_converter: JsonConverter[T, T_NP], length: int) -> None: + super().__init__(np.dtype((element_converter.overall_dtype(), length))) + self._element_converter = element_converter + self._length = length + + def to_json(self, value: list[T]) -> object: + if not isinstance(value, list): + raise ValueError(f"Value in not a list: {value}") + if len(value) != self._length: + raise ValueError(f"Value in not a list of length {self._length}: {value}") + return [self._element_converter.to_json(v) for v in value] + + def numpy_to_json(self, value: np.object_) -> object: + if not isinstance(value, np.ndarray): + raise ValueError(f"Value in not an ndarray: {value}") + if value.shape != (self._length,): + raise ValueError(f"Value does not have expected shape of {self._length}") + + return [self._element_converter.numpy_to_json(v) for v in value] + + def from_json(self, json_object: object) -> list[T]: + if not isinstance(json_object, list): + raise ValueError(f"Value in not a list: {json_object}") + if len(json_object) != self._length: + raise ValueError( + f"Value in not a list of length {self._length}: {json_object}" + ) + return [self._element_converter.from_json(v) for v in json_object] + + def from_json_to_numpy(self, json_object: object) -> np.object_: + if not isinstance(json_object, list): + raise ValueError(f"Value in not a list: {json_object}") + if len(json_object) != self._length: + raise ValueError( + f"Value in not a list of length {self._length}: {json_object}" + ) + return cast( + np.object_, + [self._element_converter.from_json_to_numpy(v) for v in json_object], + ) + + +TKey = TypeVar("TKey") +TKey_NP = TypeVar("TKey_NP", bound=np.generic) +TValue = TypeVar("TValue") +TValue_NP = TypeVar("TValue_NP", bound=np.generic) + + +class MapConverter( + Generic[TKey, TKey_NP, TValue, TValue_NP], + JsonConverter[dict[TKey, TValue], np.object_], +): + def __init__( + self, + key_converter: JsonConverter[TKey, TKey_NP], + value_converter: JsonConverter[TValue, TValue_NP], + ) -> None: + super().__init__(np.object_) + self._key_converter = key_converter + self._value_converter = value_converter + + def to_json(self, value: dict[TKey, TValue]) -> object: + if not isinstance(value, dict): + raise ValueError(f"Value in not a dict: {value}") + + if isinstance(self._key_converter, StringConverter): + return { + cast(str, k): self._value_converter.to_json(v) for k, v in value.items() + } + + return [ + [self._key_converter.to_json(k), self._value_converter.to_json(v)] + for k, v in value.items() + ] + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(dict[TKey, TValue], value)) + + def from_json(self, json_object: object) -> dict[TKey, TValue]: + if isinstance(self._key_converter, StringConverter): + if not isinstance(json_object, dict): + raise ValueError(f"Value in not a dict: {json_object}") + + return { + cast(TKey, k): self._value_converter.from_json(v) + for k, v in json_object.items() + } + + if not isinstance(json_object, list): + raise ValueError(f"Value in not a list: {json_object}") + + return { + self._key_converter.from_json(k): self._value_converter.from_json(v) + for [k, v] in json_object + } + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return cast(np.object_, self.from_json(json_object)) + + +class NDArrayConverterBase( + Generic[T, T_NP], JsonConverter[npt.NDArray[Any], np.object_] +): + def __init__( + self, + overall_dtype: npt.DTypeLike, + element_converter: JsonConverter[T, T_NP], + dtype: npt.DTypeLike, + ) -> None: + super().__init__(overall_dtype) + self._element_converter = element_converter + + ( + self._array_dtype, + self._subarray_shape, + ) = NDArrayConverterBase._get_dtype_and_subarray_shape( + dtype if isinstance(dtype, np.dtype) else np.dtype(dtype) + ) + if self._subarray_shape == (): + self._subarray_shape = None + + @staticmethod + def _get_dtype_and_subarray_shape( + dtype: np.dtype[Any], + ) -> tuple[np.dtype[Any], tuple[int, ...]]: + if dtype.subdtype is None: + return dtype, () + subres = NDArrayConverterBase._get_dtype_and_subarray_shape(dtype.subdtype[0]) + return (subres[0], dtype.subdtype[1] + subres[1]) + + def check_dtype(self, input_dtype: npt.DTypeLike): + if input_dtype != self._array_dtype: + # see if it's the same dtype but packed, not aligned + packed_dtype = recfunctions.repack_fields(self._array_dtype, align=False, recurse=True) # type: ignore + if packed_dtype != input_dtype: + if packed_dtype == self._array_dtype: + message = f"Expected dtype {self._array_dtype}, got {input_dtype}" + else: + message = f"Expected dtype {self._array_dtype} or {packed_dtype}, got {input_dtype}" + + raise ValueError(message) + + def _read( + self, shape: tuple[int, ...], json_object: list[object] + ) -> npt.NDArray[Any]: + subarray_shape_not_none = ( + () if self._subarray_shape is None else self._subarray_shape + ) + + partially_flattened_shape = (np.prod(shape),) + subarray_shape_not_none # type: ignore + result = np.ndarray(partially_flattened_shape, dtype=self._array_dtype) + for i in range(partially_flattened_shape[0]): + result[i] = self._element_converter.from_json_to_numpy(json_object[i]) + + return result.reshape(shape + subarray_shape_not_none) + + +class FixedNDArrayConverter(Generic[T, T_NP], NDArrayConverterBase[T, T_NP]): + def __init__( + self, + element_converter: JsonConverter[T, T_NP], + shape: tuple[int, ...], + ) -> None: + dtype = element_converter.overall_dtype() + super().__init__(np.dtype((dtype, shape)), element_converter, dtype) + self._shape = shape + + def to_json(self, value: npt.NDArray[Any]) -> object: + if not isinstance(value, np.ndarray): + raise ValueError(f"Value in not an ndarray: {value}") + + self.check_dtype(value.dtype) + + required_shape = ( + self._shape + if self._subarray_shape is None + else self._shape + self._subarray_shape + ) + + if value.shape != required_shape: + raise ValueError(f"Expected shape {required_shape}, got {value.shape}") + + if self._subarray_shape is None: + return [self._element_converter.numpy_to_json(v) for v in value.flat] + + reshaped = value.reshape((-1,) + self._subarray_shape) + return [self._element_converter.numpy_to_json(v) for v in reshaped] + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(npt.NDArray[Any], value)) + + def from_json(self, json_object: object) -> npt.NDArray[Any]: + if not isinstance(json_object, list): + raise ValueError(f"Value in not a list: {json_object}") + + return self._read(self._shape, json_object) + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return cast(np.object_, self.from_json(json_object)) + + +class DynamicNDArrayConverter(NDArrayConverterBase[T, T_NP]): + def __init__( + self, + element_serializer: JsonConverter[T, T_NP], + ) -> None: + super().__init__( + np.object_, element_serializer, element_serializer.overall_dtype() + ) + + def to_json(self, value: npt.NDArray[Any]) -> object: + if not isinstance(value, np.ndarray): + raise ValueError(f"Value in not an ndarray: {value}") + + self.check_dtype(value.dtype) + + if self._subarray_shape is None: + return { + "shape": value.shape, + "data": [self._element_converter.numpy_to_json(v) for v in value.flat], + } + + if len(value.shape) < len(self._subarray_shape) or ( + value.shape[-len(self._subarray_shape) :] != self._subarray_shape + ): + raise ValueError( + f"The array is required to have shape (..., {(', '.join((str(i) for i in self._subarray_shape)))})" + ) + + reshaped = value.reshape((-1,) + self._subarray_shape) + return { + "shape": value.shape[: -len(self._subarray_shape)], + "data": [self._element_converter.numpy_to_json(v) for v in reshaped], + } + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(npt.NDArray[Any], value)) + + def from_json(self, json_object: object) -> npt.NDArray[Any]: + if not isinstance(json_object, dict): + raise ValueError(f"Value in not a dict: {json_object}") + + if "shape" not in json_object or "data" not in json_object: + raise ValueError(f"Value in not a dict with shape and data: {json_object}") + + shape = tuple(json_object["shape"]) + data = json_object["data"] + + return self._read(shape, data) + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return cast(np.object_, self.from_json(json_object)) + + +class NDArrayConverter(Generic[T, T_NP], NDArrayConverterBase[T, T_NP]): + def __init__( + self, + element_converter: JsonConverter[T, T_NP], + ndims: int, + ) -> None: + super().__init__( + np.object_, element_converter, element_converter.overall_dtype() + ) + self._ndims = ndims + + def to_json(self, value: npt.NDArray[Any]) -> object: + if not isinstance(value, np.ndarray): + raise ValueError(f"Value in not an ndarray: {value}") + + self.check_dtype(value.dtype) + + if self._subarray_shape is None: + if value.ndim != self._ndims: + raise ValueError(f"Expected {self._ndims} dimensions, got {value.ndim}") + + return { + "shape": value.shape, + "data": [self._element_converter.numpy_to_json(v) for v in value.flat], + } + + total_dims = len(self._subarray_shape) + self._ndims + if value.ndim != total_dims: + raise ValueError(f"Expected {total_dims} dimensions, got {value.ndim}") + + if value.shape[-len(self._subarray_shape) :] != self._subarray_shape: + raise ValueError( + f"The array is required to have shape (..., {(', '.join((str(i) for i in self._subarray_shape)))})" + ) + + reshaped = value.reshape((-1,) + self._subarray_shape) + return { + "shape": value.shape[: -len(self._subarray_shape)], + "data": [self._element_converter.numpy_to_json(v) for v in reshaped], + } + + def numpy_to_json(self, value: np.object_) -> object: + return self.to_json(cast(npt.NDArray[Any], value)) + + def from_json(self, json_object: object) -> npt.NDArray[Any]: + if not isinstance(json_object, dict): + raise ValueError(f"Value in not a dict: {json_object}") + + if "shape" not in json_object or "data" not in json_object: + raise ValueError(f"Value in not a dict with shape and data: {json_object}") + + shape = tuple(json_object["shape"]) + data = json_object["data"] + + return self._read(shape, data) + + def from_json_to_numpy(self, json_object: object) -> np.object_: + return cast(np.object_, self.from_json(json_object)) diff --git a/python/petsird/binary.py b/python/petsird/binary.py new file mode 100644 index 0000000..7f4e2d3 --- /dev/null +++ b/python/petsird/binary.py @@ -0,0 +1,524 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# pyright: reportUnusedClass=false +# pyright: reportUnusedImport=false +# pyright: reportUnknownArgumentType=false +# pyright: reportUnknownMemberType=false +# pyright: reportUnknownVariableType=false + +import collections.abc +import io +import typing + +import numpy as np +import numpy.typing as npt + +from .types import * + +from .protocols import * +from . import _binary +from . import yardl_types as yardl + +class BinaryPETSIRDWriter(_binary.BinaryProtocolWriter, PETSIRDWriterBase): + """Binary writer for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self, stream: typing.Union[typing.BinaryIO, str]) -> None: + PETSIRDWriterBase.__init__(self) + _binary.BinaryProtocolWriter.__init__(self, stream, PETSIRDWriterBase.schema) + + def _write_header(self, value: Header) -> None: + HeaderSerializer().write(self._stream, value) + + def _write_time_blocks(self, value: collections.abc.Iterable[TimeBlock]) -> None: + _binary.StreamSerializer(_binary.UnionSerializer(TimeBlock, [(TimeBlock.EventTimeBlock, EventTimeBlockSerializer()), (TimeBlock.ExternalSignalTimeBlock, ExternalSignalTimeBlockSerializer()), (TimeBlock.BedMovementTimeBlock, BedMovementTimeBlockSerializer()), (TimeBlock.GantryMovementTimeBlock, GantryMovementTimeBlockSerializer())])).write(self._stream, value) + + +class BinaryPETSIRDReader(_binary.BinaryProtocolReader, PETSIRDReaderBase): + """Binary writer for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self, stream: typing.Union[io.BufferedReader, io.BytesIO, typing.BinaryIO, str]) -> None: + PETSIRDReaderBase.__init__(self) + _binary.BinaryProtocolReader.__init__(self, stream, PETSIRDReaderBase.schema) + + def _read_header(self) -> Header: + return HeaderSerializer().read(self._stream) + + def _read_time_blocks(self) -> collections.abc.Iterable[TimeBlock]: + return _binary.StreamSerializer(_binary.UnionSerializer(TimeBlock, [(TimeBlock.EventTimeBlock, EventTimeBlockSerializer()), (TimeBlock.ExternalSignalTimeBlock, ExternalSignalTimeBlockSerializer()), (TimeBlock.BedMovementTimeBlock, BedMovementTimeBlockSerializer()), (TimeBlock.GantryMovementTimeBlock, GantryMovementTimeBlockSerializer())])).read(self._stream) + +class CoincidenceEventSerializer(_binary.RecordSerializer[CoincidenceEvent]): + def __init__(self) -> None: + super().__init__([("detector_ids", _binary.FixedVectorSerializer(_binary.uint32_serializer, 2)), ("tof_idx", _binary.uint32_serializer), ("energy_indices", _binary.FixedVectorSerializer(_binary.uint32_serializer, 2))]) + + def write(self, stream: _binary.CodedOutputStream, value: CoincidenceEvent) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.detector_ids, value.tof_idx, value.energy_indices) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['detector_ids'], value['tof_idx'], value['energy_indices']) + + def read(self, stream: _binary.CodedInputStream) -> CoincidenceEvent: + field_values = self._read(stream) + return CoincidenceEvent(detector_ids=field_values[0], tof_idx=field_values[1], energy_indices=field_values[2]) + + +class SolidVolumeSerializer(typing.Generic[Shape, Shape_NP], _binary.RecordSerializer[SolidVolume[Shape]]): + def __init__(self, shape_serializer: _binary.TypeSerializer[Shape, Shape_NP]) -> None: + super().__init__([("shape", shape_serializer), ("material_id", _binary.uint32_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: SolidVolume[Shape]) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.shape, value.material_id) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['shape'], value['material_id']) + + def read(self, stream: _binary.CodedInputStream) -> SolidVolume[Shape]: + field_values = self._read(stream) + return SolidVolume[Shape](shape=field_values[0], material_id=field_values[1]) + + +class CoordinateSerializer(_binary.RecordSerializer[Coordinate]): + def __init__(self) -> None: + super().__init__([("c", _binary.FixedNDArraySerializer(_binary.float32_serializer, (3,)))]) + + def write(self, stream: _binary.CodedOutputStream, value: Coordinate) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.c) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['c']) + + def read(self, stream: _binary.CodedInputStream) -> Coordinate: + field_values = self._read(stream) + return Coordinate(c=field_values[0]) + + +class BoxShapeSerializer(_binary.RecordSerializer[BoxShape]): + def __init__(self) -> None: + super().__init__([("corners", _binary.FixedVectorSerializer(CoordinateSerializer(), 8))]) + + def write(self, stream: _binary.CodedOutputStream, value: BoxShape) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.corners) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['corners']) + + def read(self, stream: _binary.CodedInputStream) -> BoxShape: + field_values = self._read(stream) + return BoxShape(corners=field_values[0]) + + +class AnnulusShapeSerializer(_binary.RecordSerializer[AnnulusShape]): + def __init__(self) -> None: + super().__init__([("inner_radius", _binary.float32_serializer), ("outer_radius", _binary.float32_serializer), ("thickness", _binary.float32_serializer), ("angular_range", _binary.FixedVectorSerializer(_binary.float32_serializer, 2))]) + + def write(self, stream: _binary.CodedOutputStream, value: AnnulusShape) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.inner_radius, value.outer_radius, value.thickness, value.angular_range) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['inner_radius'], value['outer_radius'], value['thickness'], value['angular_range']) + + def read(self, stream: _binary.CodedInputStream) -> AnnulusShape: + field_values = self._read(stream) + return AnnulusShape(inner_radius=field_values[0], outer_radius=field_values[1], thickness=field_values[2], angular_range=field_values[3]) + + +class RigidTransformationSerializer(_binary.RecordSerializer[RigidTransformation]): + def __init__(self) -> None: + super().__init__([("matrix", _binary.FixedNDArraySerializer(_binary.float32_serializer, (3, 4,)))]) + + def write(self, stream: _binary.CodedOutputStream, value: RigidTransformation) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.matrix) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['matrix']) + + def read(self, stream: _binary.CodedInputStream) -> RigidTransformation: + field_values = self._read(stream) + return RigidTransformation(matrix=field_values[0]) + + +class ReplicatedObjectSerializer(typing.Generic[T, T_NP], _binary.RecordSerializer[ReplicatedObject[T]]): + def __init__(self, t_serializer: _binary.TypeSerializer[T, T_NP]) -> None: + super().__init__([("object", t_serializer), ("transforms", _binary.VectorSerializer(RigidTransformationSerializer())), ("ids", _binary.VectorSerializer(_binary.uint32_serializer))]) + + def write(self, stream: _binary.CodedOutputStream, value: ReplicatedObject[T]) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.object, value.transforms, value.ids) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['object'], value['transforms'], value['ids']) + + def read(self, stream: _binary.CodedInputStream) -> ReplicatedObject[T]: + field_values = self._read(stream) + return ReplicatedObject[T](object=field_values[0], transforms=field_values[1], ids=field_values[2]) + + +class DetectorModuleSerializer(_binary.RecordSerializer[DetectorModule]): + def __init__(self) -> None: + super().__init__([("detecting_elements", _binary.VectorSerializer(ReplicatedObjectSerializer(SolidVolumeSerializer(BoxShapeSerializer())))), ("detecting_element_ids", _binary.VectorSerializer(_binary.uint32_serializer)), ("non_detecting_elements", _binary.VectorSerializer(ReplicatedObjectSerializer(SolidVolumeSerializer(_binary.UnionSerializer(GeometricShape, [(GeometricShape.BoxShape, BoxShapeSerializer()), (GeometricShape.AnnulusShape, AnnulusShapeSerializer())])))))]) + + def write(self, stream: _binary.CodedOutputStream, value: DetectorModule) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.detecting_elements, value.detecting_element_ids, value.non_detecting_elements) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['detecting_elements'], value['detecting_element_ids'], value['non_detecting_elements']) + + def read(self, stream: _binary.CodedInputStream) -> DetectorModule: + field_values = self._read(stream) + return DetectorModule(detecting_elements=field_values[0], detecting_element_ids=field_values[1], non_detecting_elements=field_values[2]) + + +class ScannerGeometrySerializer(_binary.RecordSerializer[ScannerGeometry]): + def __init__(self) -> None: + super().__init__([("replicated_modules", _binary.VectorSerializer(ReplicatedObjectSerializer(DetectorModuleSerializer()))), ("ids", _binary.VectorSerializer(_binary.uint32_serializer)), ("non_detecting_volumes", _binary.OptionalSerializer(_binary.VectorSerializer(SolidVolumeSerializer(_binary.UnionSerializer(GeometricShape, [(GeometricShape.BoxShape, BoxShapeSerializer()), (GeometricShape.AnnulusShape, AnnulusShapeSerializer())])))))]) + + def write(self, stream: _binary.CodedOutputStream, value: ScannerGeometry) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.replicated_modules, value.ids, value.non_detecting_volumes) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['replicated_modules'], value['ids'], value['non_detecting_volumes']) + + def read(self, stream: _binary.CodedInputStream) -> ScannerGeometry: + field_values = self._read(stream) + return ScannerGeometry(replicated_modules=field_values[0], ids=field_values[1], non_detecting_volumes=field_values[2]) + + +class SubjectSerializer(_binary.RecordSerializer[Subject]): + def __init__(self) -> None: + super().__init__([("name", _binary.OptionalSerializer(_binary.string_serializer)), ("id", _binary.string_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: Subject) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.name, value.id) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['name'], value['id']) + + def read(self, stream: _binary.CodedInputStream) -> Subject: + field_values = self._read(stream) + return Subject(name=field_values[0], id=field_values[1]) + + +class InstitutionSerializer(_binary.RecordSerializer[Institution]): + def __init__(self) -> None: + super().__init__([("name", _binary.string_serializer), ("address", _binary.string_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: Institution) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.name, value.address) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['name'], value['address']) + + def read(self, stream: _binary.CodedInputStream) -> Institution: + field_values = self._read(stream) + return Institution(name=field_values[0], address=field_values[1]) + + +class ExamInformationSerializer(_binary.RecordSerializer[ExamInformation]): + def __init__(self) -> None: + super().__init__([("subject", SubjectSerializer()), ("institution", InstitutionSerializer()), ("protocol", _binary.OptionalSerializer(_binary.string_serializer)), ("start_of_acquisition", _binary.OptionalSerializer(_binary.datetime_serializer))]) + + def write(self, stream: _binary.CodedOutputStream, value: ExamInformation) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.subject, value.institution, value.protocol, value.start_of_acquisition) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['subject'], value['institution'], value['protocol'], value['start_of_acquisition']) + + def read(self, stream: _binary.CodedInputStream) -> ExamInformation: + field_values = self._read(stream) + return ExamInformation(subject=field_values[0], institution=field_values[1], protocol=field_values[2], start_of_acquisition=field_values[3]) + + +class DirectionSerializer(_binary.RecordSerializer[Direction]): + def __init__(self) -> None: + super().__init__([("c", _binary.FixedNDArraySerializer(_binary.float32_serializer, (3,)))]) + + def write(self, stream: _binary.CodedOutputStream, value: Direction) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.c) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['c']) + + def read(self, stream: _binary.CodedInputStream) -> Direction: + field_values = self._read(stream) + return Direction(c=field_values[0]) + + +class DirectionMatrixSerializer(_binary.RecordSerializer[DirectionMatrix]): + def __init__(self) -> None: + super().__init__([("matrix", _binary.FixedNDArraySerializer(_binary.float32_serializer, (3, 3,)))]) + + def write(self, stream: _binary.CodedOutputStream, value: DirectionMatrix) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.matrix) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['matrix']) + + def read(self, stream: _binary.CodedInputStream) -> DirectionMatrix: + field_values = self._read(stream) + return DirectionMatrix(matrix=field_values[0]) + + +class AtomSerializer(_binary.RecordSerializer[Atom]): + def __init__(self) -> None: + super().__init__([("mass_number", _binary.uint32_serializer), ("atomic_number", _binary.uint32_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: Atom) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.mass_number, value.atomic_number) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['mass_number'], value['atomic_number']) + + def read(self, stream: _binary.CodedInputStream) -> Atom: + field_values = self._read(stream) + return Atom(mass_number=field_values[0], atomic_number=field_values[1]) + + +class BulkMaterialSerializer(_binary.RecordSerializer[BulkMaterial]): + def __init__(self) -> None: + super().__init__([("id", _binary.uint32_serializer), ("name", _binary.string_serializer), ("density", _binary.float32_serializer), ("atoms", _binary.VectorSerializer(AtomSerializer())), ("mass_fractions", _binary.VectorSerializer(_binary.float32_serializer))]) + + def write(self, stream: _binary.CodedOutputStream, value: BulkMaterial) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.id, value.name, value.density, value.atoms, value.mass_fractions) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['id'], value['name'], value['density'], value['atoms'], value['mass_fractions']) + + def read(self, stream: _binary.CodedInputStream) -> BulkMaterial: + field_values = self._read(stream) + return BulkMaterial(id=field_values[0], name=field_values[1], density=field_values[2], atoms=field_values[3], mass_fractions=field_values[4]) + + +class ScannerInformationSerializer(_binary.RecordSerializer[ScannerInformation]): + def __init__(self) -> None: + super().__init__([("model_name", _binary.string_serializer), ("scanner_geometry", ScannerGeometrySerializer()), ("bulk_materials", _binary.VectorSerializer(BulkMaterialSerializer())), ("gantry_alignment", _binary.OptionalSerializer(RigidTransformationSerializer())), ("tof_bin_edges", _binary.NDArraySerializer(_binary.float32_serializer, 1)), ("tof_resolution", _binary.float32_serializer), ("energy_bin_edges", _binary.NDArraySerializer(_binary.float32_serializer, 1)), ("energy_resolution_at_511", _binary.float32_serializer), ("event_time_block_duration", _binary.uint32_serializer), ("coincidence_policy", _binary.EnumSerializer(_binary.int32_serializer, CoincidencePolicy))]) + + def write(self, stream: _binary.CodedOutputStream, value: ScannerInformation) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.model_name, value.scanner_geometry, value.bulk_materials, value.gantry_alignment, value.tof_bin_edges, value.tof_resolution, value.energy_bin_edges, value.energy_resolution_at_511, value.event_time_block_duration, value.coincidence_policy) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['model_name'], value['scanner_geometry'], value['bulk_materials'], value['gantry_alignment'], value['tof_bin_edges'], value['tof_resolution'], value['energy_bin_edges'], value['energy_resolution_at_511'], value['event_time_block_duration'], value['coincidence_policy']) + + def read(self, stream: _binary.CodedInputStream) -> ScannerInformation: + field_values = self._read(stream) + return ScannerInformation(model_name=field_values[0], scanner_geometry=field_values[1], bulk_materials=field_values[2], gantry_alignment=field_values[3], tof_bin_edges=field_values[4], tof_resolution=field_values[5], energy_bin_edges=field_values[6], energy_resolution_at_511=field_values[7], event_time_block_duration=field_values[8], coincidence_policy=field_values[9]) + + +class HeaderSerializer(_binary.RecordSerializer[Header]): + def __init__(self) -> None: + super().__init__([("scanner", ScannerInformationSerializer()), ("exam", _binary.OptionalSerializer(ExamInformationSerializer()))]) + + def write(self, stream: _binary.CodedOutputStream, value: Header) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.scanner, value.exam) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['scanner'], value['exam']) + + def read(self, stream: _binary.CodedInputStream) -> Header: + field_values = self._read(stream) + return Header(scanner=field_values[0], exam=field_values[1]) + + +class TripleEventSerializer(_binary.RecordSerializer[TripleEvent]): + def __init__(self) -> None: + super().__init__([("detector_ids", _binary.FixedVectorSerializer(_binary.uint32_serializer, 3)), ("tof_indices", _binary.FixedVectorSerializer(_binary.uint32_serializer, 2)), ("energy_indices", _binary.FixedVectorSerializer(_binary.uint32_serializer, 3))]) + + def write(self, stream: _binary.CodedOutputStream, value: TripleEvent) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.detector_ids, value.tof_indices, value.energy_indices) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['detector_ids'], value['tof_indices'], value['energy_indices']) + + def read(self, stream: _binary.CodedInputStream) -> TripleEvent: + field_values = self._read(stream) + return TripleEvent(detector_ids=field_values[0], tof_indices=field_values[1], energy_indices=field_values[2]) + + +class EventTimeBlockSerializer(_binary.RecordSerializer[EventTimeBlock]): + def __init__(self) -> None: + super().__init__([("start", _binary.uint32_serializer), ("prompt_events", _binary.VectorSerializer(CoincidenceEventSerializer())), ("delayed_events", _binary.OptionalSerializer(_binary.VectorSerializer(CoincidenceEventSerializer()))), ("triple_events", _binary.OptionalSerializer(_binary.VectorSerializer(TripleEventSerializer())))]) + + def write(self, stream: _binary.CodedOutputStream, value: EventTimeBlock) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.start, value.prompt_events, value.delayed_events, value.triple_events) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['start'], value['prompt_events'], value['delayed_events'], value['triple_events']) + + def read(self, stream: _binary.CodedInputStream) -> EventTimeBlock: + field_values = self._read(stream) + return EventTimeBlock(start=field_values[0], prompt_events=field_values[1], delayed_events=field_values[2], triple_events=field_values[3]) + + +class ExternalSignalTimeBlockSerializer(_binary.RecordSerializer[ExternalSignalTimeBlock]): + def __init__(self) -> None: + super().__init__([("start", _binary.uint32_serializer), ("signal_id", _binary.uint32_serializer), ("signal_values", _binary.VectorSerializer(_binary.float32_serializer))]) + + def write(self, stream: _binary.CodedOutputStream, value: ExternalSignalTimeBlock) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.start, value.signal_id, value.signal_values) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['start'], value['signal_id'], value['signal_values']) + + def read(self, stream: _binary.CodedInputStream) -> ExternalSignalTimeBlock: + field_values = self._read(stream) + return ExternalSignalTimeBlock(start=field_values[0], signal_id=field_values[1], signal_values=field_values[2]) + + +class BedMovementTimeBlockSerializer(_binary.RecordSerializer[BedMovementTimeBlock]): + def __init__(self) -> None: + super().__init__([("start", _binary.uint32_serializer), ("transform", RigidTransformationSerializer())]) + + def write(self, stream: _binary.CodedOutputStream, value: BedMovementTimeBlock) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.start, value.transform) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['start'], value['transform']) + + def read(self, stream: _binary.CodedInputStream) -> BedMovementTimeBlock: + field_values = self._read(stream) + return BedMovementTimeBlock(start=field_values[0], transform=field_values[1]) + + +class GantryMovementTimeBlockSerializer(_binary.RecordSerializer[GantryMovementTimeBlock]): + def __init__(self) -> None: + super().__init__([("start", _binary.uint32_serializer), ("transform", RigidTransformationSerializer())]) + + def write(self, stream: _binary.CodedOutputStream, value: GantryMovementTimeBlock) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.start, value.transform) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['start'], value['transform']) + + def read(self, stream: _binary.CodedInputStream) -> GantryMovementTimeBlock: + field_values = self._read(stream) + return GantryMovementTimeBlock(start=field_values[0], transform=field_values[1]) + + +class ExternalSignalTypeSerializer(_binary.RecordSerializer[ExternalSignalType]): + def __init__(self) -> None: + super().__init__([("type", _binary.EnumSerializer(_binary.int32_serializer, ExternalSignalTypeEnum)), ("description", _binary.string_serializer), ("id", _binary.uint32_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: ExternalSignalType) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.type, value.description, value.id) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['type'], value['description'], value['id']) + + def read(self, stream: _binary.CodedInputStream) -> ExternalSignalType: + field_values = self._read(stream) + return ExternalSignalType(type=field_values[0], description=field_values[1], id=field_values[2]) + + +class TimeIntervalSerializer(_binary.RecordSerializer[TimeInterval]): + def __init__(self) -> None: + super().__init__([("start", _binary.uint32_serializer), ("stop", _binary.uint32_serializer)]) + + def write(self, stream: _binary.CodedOutputStream, value: TimeInterval) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.start, value.stop) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['start'], value['stop']) + + def read(self, stream: _binary.CodedInputStream) -> TimeInterval: + field_values = self._read(stream) + return TimeInterval(start=field_values[0], stop=field_values[1]) + + +class TimeFrameInformationSerializer(_binary.RecordSerializer[TimeFrameInformation]): + def __init__(self) -> None: + super().__init__([("time_frames", _binary.VectorSerializer(TimeIntervalSerializer()))]) + + def write(self, stream: _binary.CodedOutputStream, value: TimeFrameInformation) -> None: + if isinstance(value, np.void): + self.write_numpy(stream, value) + return + self._write(stream, value.time_frames) + + def write_numpy(self, stream: _binary.CodedOutputStream, value: np.void) -> None: + self._write(stream, value['time_frames']) + + def read(self, stream: _binary.CodedInputStream) -> TimeFrameInformation: + field_values = self._read(stream) + return TimeFrameInformation(time_frames=field_values[0]) + + diff --git a/python/petsird/ndjson.py b/python/petsird/ndjson.py new file mode 100644 index 0000000..b04264a --- /dev/null +++ b/python/petsird/ndjson.py @@ -0,0 +1,1358 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# pyright: reportUnusedClass=false +# pyright: reportUnusedImport=false +# pyright: reportUnknownArgumentType=false +# pyright: reportUnknownMemberType=false +# pyright: reportUnknownVariableType=false + +import collections.abc +import io +import typing + +import numpy as np +import numpy.typing as npt + +from .types import * + +from .protocols import * +from . import _ndjson +from . import yardl_types as yardl + +class CoincidenceEventConverter(_ndjson.JsonConverter[CoincidenceEvent, np.void]): + def __init__(self) -> None: + self._detector_ids_converter = _ndjson.FixedVectorConverter(_ndjson.uint32_converter, 2) + self._tof_idx_converter = _ndjson.uint32_converter + self._energy_indices_converter = _ndjson.FixedVectorConverter(_ndjson.uint32_converter, 2) + super().__init__(np.dtype([ + ("detector_ids", self._detector_ids_converter.overall_dtype()), + ("tof_idx", self._tof_idx_converter.overall_dtype()), + ("energy_indices", self._energy_indices_converter.overall_dtype()), + ])) + + def to_json(self, value: CoincidenceEvent) -> object: + if not isinstance(value, CoincidenceEvent): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'CoincidenceEvent' instance") + json_object = {} + + json_object["detectorIds"] = self._detector_ids_converter.to_json(value.detector_ids) + json_object["tofIdx"] = self._tof_idx_converter.to_json(value.tof_idx) + json_object["energyIndices"] = self._energy_indices_converter.to_json(value.energy_indices) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["detectorIds"] = self._detector_ids_converter.numpy_to_json(value["detector_ids"]) + json_object["tofIdx"] = self._tof_idx_converter.numpy_to_json(value["tof_idx"]) + json_object["energyIndices"] = self._energy_indices_converter.numpy_to_json(value["energy_indices"]) + return json_object + + def from_json(self, json_object: object) -> CoincidenceEvent: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return CoincidenceEvent( + detector_ids=self._detector_ids_converter.from_json(json_object["detectorIds"],), + tof_idx=self._tof_idx_converter.from_json(json_object["tofIdx"],), + energy_indices=self._energy_indices_converter.from_json(json_object["energyIndices"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._detector_ids_converter.from_json_to_numpy(json_object["detectorIds"]), + self._tof_idx_converter.from_json_to_numpy(json_object["tofIdx"]), + self._energy_indices_converter.from_json_to_numpy(json_object["energyIndices"]), + ) # type:ignore + + +class SolidVolumeConverter(typing.Generic[Shape, Shape_NP], _ndjson.JsonConverter[SolidVolume[Shape], np.void]): + def __init__(self, shape_converter: _ndjson.JsonConverter[Shape, Shape_NP]) -> None: + self._shape_converter = shape_converter + self._shape_supports_none = self._shape_converter.supports_none() + self._material_id_converter = _ndjson.uint32_converter + super().__init__(np.dtype([ + ("shape", self._shape_converter.overall_dtype()), + ("material_id", self._material_id_converter.overall_dtype()), + ])) + + def to_json(self, value: SolidVolume[Shape]) -> object: + if not isinstance(value, SolidVolume): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'SolidVolume[Shape]' instance") + json_object = {} + + if not self._shape_supports_none or value.shape is not None: + json_object["shape"] = self._shape_converter.to_json(value.shape) + json_object["materialId"] = self._material_id_converter.to_json(value.material_id) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + if not self._shape_supports_none or value["shape"] is not None: + json_object["shape"] = self._shape_converter.numpy_to_json(value["shape"]) + json_object["materialId"] = self._material_id_converter.numpy_to_json(value["material_id"]) + return json_object + + def from_json(self, json_object: object) -> SolidVolume[Shape]: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return SolidVolume[Shape]( + shape=self._shape_converter.from_json(json_object.get("shape") if self._shape_supports_none else json_object["shape"]), + material_id=self._material_id_converter.from_json(json_object["materialId"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._shape_converter.from_json_to_numpy(json_object.get("shape") if self._shape_supports_none else json_object["shape"]), + self._material_id_converter.from_json_to_numpy(json_object["materialId"]), + ) # type:ignore + + +class CoordinateConverter(_ndjson.JsonConverter[Coordinate, np.void]): + def __init__(self) -> None: + self._c_converter = _ndjson.FixedNDArrayConverter(_ndjson.float32_converter, (3,)) + super().__init__(np.dtype([ + ("c", self._c_converter.overall_dtype()), + ])) + + def to_json(self, value: Coordinate) -> object: + if not isinstance(value, Coordinate): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Coordinate' instance") + json_object = {} + + json_object["c"] = self._c_converter.to_json(value.c) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["c"] = self._c_converter.numpy_to_json(value["c"]) + return json_object + + def from_json(self, json_object: object) -> Coordinate: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Coordinate( + c=self._c_converter.from_json(json_object["c"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._c_converter.from_json_to_numpy(json_object["c"]), + ) # type:ignore + + +class BoxShapeConverter(_ndjson.JsonConverter[BoxShape, np.void]): + def __init__(self) -> None: + self._corners_converter = _ndjson.FixedVectorConverter(CoordinateConverter(), 8) + super().__init__(np.dtype([ + ("corners", self._corners_converter.overall_dtype()), + ])) + + def to_json(self, value: BoxShape) -> object: + if not isinstance(value, BoxShape): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'BoxShape' instance") + json_object = {} + + json_object["corners"] = self._corners_converter.to_json(value.corners) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["corners"] = self._corners_converter.numpy_to_json(value["corners"]) + return json_object + + def from_json(self, json_object: object) -> BoxShape: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return BoxShape( + corners=self._corners_converter.from_json(json_object["corners"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._corners_converter.from_json_to_numpy(json_object["corners"]), + ) # type:ignore + + +class AnnulusShapeConverter(_ndjson.JsonConverter[AnnulusShape, np.void]): + def __init__(self) -> None: + self._inner_radius_converter = _ndjson.float32_converter + self._outer_radius_converter = _ndjson.float32_converter + self._thickness_converter = _ndjson.float32_converter + self._angular_range_converter = _ndjson.FixedVectorConverter(_ndjson.float32_converter, 2) + super().__init__(np.dtype([ + ("inner_radius", self._inner_radius_converter.overall_dtype()), + ("outer_radius", self._outer_radius_converter.overall_dtype()), + ("thickness", self._thickness_converter.overall_dtype()), + ("angular_range", self._angular_range_converter.overall_dtype()), + ])) + + def to_json(self, value: AnnulusShape) -> object: + if not isinstance(value, AnnulusShape): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'AnnulusShape' instance") + json_object = {} + + json_object["innerRadius"] = self._inner_radius_converter.to_json(value.inner_radius) + json_object["outerRadius"] = self._outer_radius_converter.to_json(value.outer_radius) + json_object["thickness"] = self._thickness_converter.to_json(value.thickness) + json_object["angularRange"] = self._angular_range_converter.to_json(value.angular_range) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["innerRadius"] = self._inner_radius_converter.numpy_to_json(value["inner_radius"]) + json_object["outerRadius"] = self._outer_radius_converter.numpy_to_json(value["outer_radius"]) + json_object["thickness"] = self._thickness_converter.numpy_to_json(value["thickness"]) + json_object["angularRange"] = self._angular_range_converter.numpy_to_json(value["angular_range"]) + return json_object + + def from_json(self, json_object: object) -> AnnulusShape: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return AnnulusShape( + inner_radius=self._inner_radius_converter.from_json(json_object["innerRadius"],), + outer_radius=self._outer_radius_converter.from_json(json_object["outerRadius"],), + thickness=self._thickness_converter.from_json(json_object["thickness"],), + angular_range=self._angular_range_converter.from_json(json_object["angularRange"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._inner_radius_converter.from_json_to_numpy(json_object["innerRadius"]), + self._outer_radius_converter.from_json_to_numpy(json_object["outerRadius"]), + self._thickness_converter.from_json_to_numpy(json_object["thickness"]), + self._angular_range_converter.from_json_to_numpy(json_object["angularRange"]), + ) # type:ignore + + +class RigidTransformationConverter(_ndjson.JsonConverter[RigidTransformation, np.void]): + def __init__(self) -> None: + self._matrix_converter = _ndjson.FixedNDArrayConverter(_ndjson.float32_converter, (3, 4,)) + super().__init__(np.dtype([ + ("matrix", self._matrix_converter.overall_dtype()), + ])) + + def to_json(self, value: RigidTransformation) -> object: + if not isinstance(value, RigidTransformation): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'RigidTransformation' instance") + json_object = {} + + json_object["matrix"] = self._matrix_converter.to_json(value.matrix) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["matrix"] = self._matrix_converter.numpy_to_json(value["matrix"]) + return json_object + + def from_json(self, json_object: object) -> RigidTransformation: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return RigidTransformation( + matrix=self._matrix_converter.from_json(json_object["matrix"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._matrix_converter.from_json_to_numpy(json_object["matrix"]), + ) # type:ignore + + +class ReplicatedObjectConverter(typing.Generic[T, T_NP], _ndjson.JsonConverter[ReplicatedObject[T], np.void]): + def __init__(self, t_converter: _ndjson.JsonConverter[T, T_NP]) -> None: + self._object_converter = t_converter + self._object_supports_none = self._object_converter.supports_none() + self._transforms_converter = _ndjson.VectorConverter(RigidTransformationConverter()) + self._ids_converter = _ndjson.VectorConverter(_ndjson.uint32_converter) + super().__init__(np.dtype([ + ("object", self._object_converter.overall_dtype()), + ("transforms", self._transforms_converter.overall_dtype()), + ("ids", self._ids_converter.overall_dtype()), + ])) + + def to_json(self, value: ReplicatedObject[T]) -> object: + if not isinstance(value, ReplicatedObject): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ReplicatedObject[T]' instance") + json_object = {} + + if not self._object_supports_none or value.object is not None: + json_object["object"] = self._object_converter.to_json(value.object) + json_object["transforms"] = self._transforms_converter.to_json(value.transforms) + json_object["ids"] = self._ids_converter.to_json(value.ids) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + if not self._object_supports_none or value["object"] is not None: + json_object["object"] = self._object_converter.numpy_to_json(value["object"]) + json_object["transforms"] = self._transforms_converter.numpy_to_json(value["transforms"]) + json_object["ids"] = self._ids_converter.numpy_to_json(value["ids"]) + return json_object + + def from_json(self, json_object: object) -> ReplicatedObject[T]: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ReplicatedObject[T]( + object=self._object_converter.from_json(json_object.get("object") if self._object_supports_none else json_object["object"]), + transforms=self._transforms_converter.from_json(json_object["transforms"],), + ids=self._ids_converter.from_json(json_object["ids"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._object_converter.from_json_to_numpy(json_object.get("object") if self._object_supports_none else json_object["object"]), + self._transforms_converter.from_json_to_numpy(json_object["transforms"]), + self._ids_converter.from_json_to_numpy(json_object["ids"]), + ) # type:ignore + + +class DetectorModuleConverter(_ndjson.JsonConverter[DetectorModule, np.void]): + def __init__(self) -> None: + self._detecting_elements_converter = _ndjson.VectorConverter(ReplicatedObjectConverter(SolidVolumeConverter(BoxShapeConverter()))) + self._detecting_element_ids_converter = _ndjson.VectorConverter(_ndjson.uint32_converter) + self._non_detecting_elements_converter = _ndjson.VectorConverter(ReplicatedObjectConverter(SolidVolumeConverter(_ndjson.UnionConverter(GeometricShape, [(GeometricShape.BoxShape, BoxShapeConverter(), [dict]), (GeometricShape.AnnulusShape, AnnulusShapeConverter(), [dict])], False)))) + super().__init__(np.dtype([ + ("detecting_elements", self._detecting_elements_converter.overall_dtype()), + ("detecting_element_ids", self._detecting_element_ids_converter.overall_dtype()), + ("non_detecting_elements", self._non_detecting_elements_converter.overall_dtype()), + ])) + + def to_json(self, value: DetectorModule) -> object: + if not isinstance(value, DetectorModule): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'DetectorModule' instance") + json_object = {} + + json_object["detectingElements"] = self._detecting_elements_converter.to_json(value.detecting_elements) + json_object["detectingElementIds"] = self._detecting_element_ids_converter.to_json(value.detecting_element_ids) + json_object["nonDetectingElements"] = self._non_detecting_elements_converter.to_json(value.non_detecting_elements) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["detectingElements"] = self._detecting_elements_converter.numpy_to_json(value["detecting_elements"]) + json_object["detectingElementIds"] = self._detecting_element_ids_converter.numpy_to_json(value["detecting_element_ids"]) + json_object["nonDetectingElements"] = self._non_detecting_elements_converter.numpy_to_json(value["non_detecting_elements"]) + return json_object + + def from_json(self, json_object: object) -> DetectorModule: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return DetectorModule( + detecting_elements=self._detecting_elements_converter.from_json(json_object["detectingElements"],), + detecting_element_ids=self._detecting_element_ids_converter.from_json(json_object["detectingElementIds"],), + non_detecting_elements=self._non_detecting_elements_converter.from_json(json_object["nonDetectingElements"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._detecting_elements_converter.from_json_to_numpy(json_object["detectingElements"]), + self._detecting_element_ids_converter.from_json_to_numpy(json_object["detectingElementIds"]), + self._non_detecting_elements_converter.from_json_to_numpy(json_object["nonDetectingElements"]), + ) # type:ignore + + +class ScannerGeometryConverter(_ndjson.JsonConverter[ScannerGeometry, np.void]): + def __init__(self) -> None: + self._replicated_modules_converter = _ndjson.VectorConverter(ReplicatedObjectConverter(DetectorModuleConverter())) + self._ids_converter = _ndjson.VectorConverter(_ndjson.uint32_converter) + self._non_detecting_volumes_converter = _ndjson.OptionalConverter(_ndjson.VectorConverter(SolidVolumeConverter(_ndjson.UnionConverter(GeometricShape, [(GeometricShape.BoxShape, BoxShapeConverter(), [dict]), (GeometricShape.AnnulusShape, AnnulusShapeConverter(), [dict])], False)))) + super().__init__(np.dtype([ + ("replicated_modules", self._replicated_modules_converter.overall_dtype()), + ("ids", self._ids_converter.overall_dtype()), + ("non_detecting_volumes", self._non_detecting_volumes_converter.overall_dtype()), + ])) + + def to_json(self, value: ScannerGeometry) -> object: + if not isinstance(value, ScannerGeometry): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ScannerGeometry' instance") + json_object = {} + + json_object["replicatedModules"] = self._replicated_modules_converter.to_json(value.replicated_modules) + json_object["ids"] = self._ids_converter.to_json(value.ids) + if value.non_detecting_volumes is not None: + json_object["nonDetectingVolumes"] = self._non_detecting_volumes_converter.to_json(value.non_detecting_volumes) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["replicatedModules"] = self._replicated_modules_converter.numpy_to_json(value["replicated_modules"]) + json_object["ids"] = self._ids_converter.numpy_to_json(value["ids"]) + if (field_val := value["non_detecting_volumes"]) is not None: + json_object["nonDetectingVolumes"] = self._non_detecting_volumes_converter.numpy_to_json(field_val) + return json_object + + def from_json(self, json_object: object) -> ScannerGeometry: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ScannerGeometry( + replicated_modules=self._replicated_modules_converter.from_json(json_object["replicatedModules"],), + ids=self._ids_converter.from_json(json_object["ids"],), + non_detecting_volumes=self._non_detecting_volumes_converter.from_json(json_object.get("nonDetectingVolumes")), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._replicated_modules_converter.from_json_to_numpy(json_object["replicatedModules"]), + self._ids_converter.from_json_to_numpy(json_object["ids"]), + self._non_detecting_volumes_converter.from_json_to_numpy(json_object.get("nonDetectingVolumes")), + ) # type:ignore + + +class SubjectConverter(_ndjson.JsonConverter[Subject, np.void]): + def __init__(self) -> None: + self._name_converter = _ndjson.OptionalConverter(_ndjson.string_converter) + self._id_converter = _ndjson.string_converter + super().__init__(np.dtype([ + ("name", self._name_converter.overall_dtype()), + ("id", self._id_converter.overall_dtype()), + ])) + + def to_json(self, value: Subject) -> object: + if not isinstance(value, Subject): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Subject' instance") + json_object = {} + + if value.name is not None: + json_object["name"] = self._name_converter.to_json(value.name) + json_object["id"] = self._id_converter.to_json(value.id) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + if (field_val := value["name"]) is not None: + json_object["name"] = self._name_converter.numpy_to_json(field_val) + json_object["id"] = self._id_converter.numpy_to_json(value["id"]) + return json_object + + def from_json(self, json_object: object) -> Subject: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Subject( + name=self._name_converter.from_json(json_object.get("name")), + id=self._id_converter.from_json(json_object["id"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._name_converter.from_json_to_numpy(json_object.get("name")), + self._id_converter.from_json_to_numpy(json_object["id"]), + ) # type:ignore + + +class InstitutionConverter(_ndjson.JsonConverter[Institution, np.void]): + def __init__(self) -> None: + self._name_converter = _ndjson.string_converter + self._address_converter = _ndjson.string_converter + super().__init__(np.dtype([ + ("name", self._name_converter.overall_dtype()), + ("address", self._address_converter.overall_dtype()), + ])) + + def to_json(self, value: Institution) -> object: + if not isinstance(value, Institution): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Institution' instance") + json_object = {} + + json_object["name"] = self._name_converter.to_json(value.name) + json_object["address"] = self._address_converter.to_json(value.address) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["name"] = self._name_converter.numpy_to_json(value["name"]) + json_object["address"] = self._address_converter.numpy_to_json(value["address"]) + return json_object + + def from_json(self, json_object: object) -> Institution: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Institution( + name=self._name_converter.from_json(json_object["name"],), + address=self._address_converter.from_json(json_object["address"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._name_converter.from_json_to_numpy(json_object["name"]), + self._address_converter.from_json_to_numpy(json_object["address"]), + ) # type:ignore + + +class ExamInformationConverter(_ndjson.JsonConverter[ExamInformation, np.void]): + def __init__(self) -> None: + self._subject_converter = SubjectConverter() + self._institution_converter = InstitutionConverter() + self._protocol_converter = _ndjson.OptionalConverter(_ndjson.string_converter) + self._start_of_acquisition_converter = _ndjson.OptionalConverter(_ndjson.datetime_converter) + super().__init__(np.dtype([ + ("subject", self._subject_converter.overall_dtype()), + ("institution", self._institution_converter.overall_dtype()), + ("protocol", self._protocol_converter.overall_dtype()), + ("start_of_acquisition", self._start_of_acquisition_converter.overall_dtype()), + ])) + + def to_json(self, value: ExamInformation) -> object: + if not isinstance(value, ExamInformation): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ExamInformation' instance") + json_object = {} + + json_object["subject"] = self._subject_converter.to_json(value.subject) + json_object["institution"] = self._institution_converter.to_json(value.institution) + if value.protocol is not None: + json_object["protocol"] = self._protocol_converter.to_json(value.protocol) + if value.start_of_acquisition is not None: + json_object["startOfAcquisition"] = self._start_of_acquisition_converter.to_json(value.start_of_acquisition) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["subject"] = self._subject_converter.numpy_to_json(value["subject"]) + json_object["institution"] = self._institution_converter.numpy_to_json(value["institution"]) + if (field_val := value["protocol"]) is not None: + json_object["protocol"] = self._protocol_converter.numpy_to_json(field_val) + if (field_val := value["start_of_acquisition"]) is not None: + json_object["startOfAcquisition"] = self._start_of_acquisition_converter.numpy_to_json(field_val) + return json_object + + def from_json(self, json_object: object) -> ExamInformation: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ExamInformation( + subject=self._subject_converter.from_json(json_object["subject"],), + institution=self._institution_converter.from_json(json_object["institution"],), + protocol=self._protocol_converter.from_json(json_object.get("protocol")), + start_of_acquisition=self._start_of_acquisition_converter.from_json(json_object.get("startOfAcquisition")), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._subject_converter.from_json_to_numpy(json_object["subject"]), + self._institution_converter.from_json_to_numpy(json_object["institution"]), + self._protocol_converter.from_json_to_numpy(json_object.get("protocol")), + self._start_of_acquisition_converter.from_json_to_numpy(json_object.get("startOfAcquisition")), + ) # type:ignore + + +class DirectionConverter(_ndjson.JsonConverter[Direction, np.void]): + def __init__(self) -> None: + self._c_converter = _ndjson.FixedNDArrayConverter(_ndjson.float32_converter, (3,)) + super().__init__(np.dtype([ + ("c", self._c_converter.overall_dtype()), + ])) + + def to_json(self, value: Direction) -> object: + if not isinstance(value, Direction): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Direction' instance") + json_object = {} + + json_object["c"] = self._c_converter.to_json(value.c) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["c"] = self._c_converter.numpy_to_json(value["c"]) + return json_object + + def from_json(self, json_object: object) -> Direction: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Direction( + c=self._c_converter.from_json(json_object["c"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._c_converter.from_json_to_numpy(json_object["c"]), + ) # type:ignore + + +class DirectionMatrixConverter(_ndjson.JsonConverter[DirectionMatrix, np.void]): + def __init__(self) -> None: + self._matrix_converter = _ndjson.FixedNDArrayConverter(_ndjson.float32_converter, (3, 3,)) + super().__init__(np.dtype([ + ("matrix", self._matrix_converter.overall_dtype()), + ])) + + def to_json(self, value: DirectionMatrix) -> object: + if not isinstance(value, DirectionMatrix): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'DirectionMatrix' instance") + json_object = {} + + json_object["matrix"] = self._matrix_converter.to_json(value.matrix) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["matrix"] = self._matrix_converter.numpy_to_json(value["matrix"]) + return json_object + + def from_json(self, json_object: object) -> DirectionMatrix: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return DirectionMatrix( + matrix=self._matrix_converter.from_json(json_object["matrix"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._matrix_converter.from_json_to_numpy(json_object["matrix"]), + ) # type:ignore + + +class AtomConverter(_ndjson.JsonConverter[Atom, np.void]): + def __init__(self) -> None: + self._mass_number_converter = _ndjson.uint32_converter + self._atomic_number_converter = _ndjson.uint32_converter + super().__init__(np.dtype([ + ("mass_number", self._mass_number_converter.overall_dtype()), + ("atomic_number", self._atomic_number_converter.overall_dtype()), + ])) + + def to_json(self, value: Atom) -> object: + if not isinstance(value, Atom): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Atom' instance") + json_object = {} + + json_object["massNumber"] = self._mass_number_converter.to_json(value.mass_number) + json_object["atomicNumber"] = self._atomic_number_converter.to_json(value.atomic_number) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["massNumber"] = self._mass_number_converter.numpy_to_json(value["mass_number"]) + json_object["atomicNumber"] = self._atomic_number_converter.numpy_to_json(value["atomic_number"]) + return json_object + + def from_json(self, json_object: object) -> Atom: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Atom( + mass_number=self._mass_number_converter.from_json(json_object["massNumber"],), + atomic_number=self._atomic_number_converter.from_json(json_object["atomicNumber"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._mass_number_converter.from_json_to_numpy(json_object["massNumber"]), + self._atomic_number_converter.from_json_to_numpy(json_object["atomicNumber"]), + ) # type:ignore + + +class BulkMaterialConverter(_ndjson.JsonConverter[BulkMaterial, np.void]): + def __init__(self) -> None: + self._id_converter = _ndjson.uint32_converter + self._name_converter = _ndjson.string_converter + self._density_converter = _ndjson.float32_converter + self._atoms_converter = _ndjson.VectorConverter(AtomConverter()) + self._mass_fractions_converter = _ndjson.VectorConverter(_ndjson.float32_converter) + super().__init__(np.dtype([ + ("id", self._id_converter.overall_dtype()), + ("name", self._name_converter.overall_dtype()), + ("density", self._density_converter.overall_dtype()), + ("atoms", self._atoms_converter.overall_dtype()), + ("mass_fractions", self._mass_fractions_converter.overall_dtype()), + ])) + + def to_json(self, value: BulkMaterial) -> object: + if not isinstance(value, BulkMaterial): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'BulkMaterial' instance") + json_object = {} + + json_object["id"] = self._id_converter.to_json(value.id) + json_object["name"] = self._name_converter.to_json(value.name) + json_object["density"] = self._density_converter.to_json(value.density) + json_object["atoms"] = self._atoms_converter.to_json(value.atoms) + json_object["massFractions"] = self._mass_fractions_converter.to_json(value.mass_fractions) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["id"] = self._id_converter.numpy_to_json(value["id"]) + json_object["name"] = self._name_converter.numpy_to_json(value["name"]) + json_object["density"] = self._density_converter.numpy_to_json(value["density"]) + json_object["atoms"] = self._atoms_converter.numpy_to_json(value["atoms"]) + json_object["massFractions"] = self._mass_fractions_converter.numpy_to_json(value["mass_fractions"]) + return json_object + + def from_json(self, json_object: object) -> BulkMaterial: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return BulkMaterial( + id=self._id_converter.from_json(json_object["id"],), + name=self._name_converter.from_json(json_object["name"],), + density=self._density_converter.from_json(json_object["density"],), + atoms=self._atoms_converter.from_json(json_object["atoms"],), + mass_fractions=self._mass_fractions_converter.from_json(json_object["massFractions"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._id_converter.from_json_to_numpy(json_object["id"]), + self._name_converter.from_json_to_numpy(json_object["name"]), + self._density_converter.from_json_to_numpy(json_object["density"]), + self._atoms_converter.from_json_to_numpy(json_object["atoms"]), + self._mass_fractions_converter.from_json_to_numpy(json_object["massFractions"]), + ) # type:ignore + + +coincidence_policy_name_to_value_map = { + "rejectMultiples": CoincidencePolicy.REJECT_MULTIPLES, + "storeMultiplesAsPairs": CoincidencePolicy.STORE_MULTIPLES_AS_PAIRS, + "other": CoincidencePolicy.OTHER, +} +coincidence_policy_value_to_name_map = {v: n for n, v in coincidence_policy_name_to_value_map.items()} + +class ScannerInformationConverter(_ndjson.JsonConverter[ScannerInformation, np.void]): + def __init__(self) -> None: + self._model_name_converter = _ndjson.string_converter + self._scanner_geometry_converter = ScannerGeometryConverter() + self._bulk_materials_converter = _ndjson.VectorConverter(BulkMaterialConverter()) + self._gantry_alignment_converter = _ndjson.OptionalConverter(RigidTransformationConverter()) + self._tof_bin_edges_converter = _ndjson.NDArrayConverter(_ndjson.float32_converter, 1) + self._tof_resolution_converter = _ndjson.float32_converter + self._energy_bin_edges_converter = _ndjson.NDArrayConverter(_ndjson.float32_converter, 1) + self._energy_resolution_at_511_converter = _ndjson.float32_converter + self._event_time_block_duration_converter = _ndjson.uint32_converter + self._coincidence_policy_converter = _ndjson.EnumConverter(CoincidencePolicy, np.int32, coincidence_policy_name_to_value_map, coincidence_policy_value_to_name_map) + super().__init__(np.dtype([ + ("model_name", self._model_name_converter.overall_dtype()), + ("scanner_geometry", self._scanner_geometry_converter.overall_dtype()), + ("bulk_materials", self._bulk_materials_converter.overall_dtype()), + ("gantry_alignment", self._gantry_alignment_converter.overall_dtype()), + ("tof_bin_edges", self._tof_bin_edges_converter.overall_dtype()), + ("tof_resolution", self._tof_resolution_converter.overall_dtype()), + ("energy_bin_edges", self._energy_bin_edges_converter.overall_dtype()), + ("energy_resolution_at_511", self._energy_resolution_at_511_converter.overall_dtype()), + ("event_time_block_duration", self._event_time_block_duration_converter.overall_dtype()), + ("coincidence_policy", self._coincidence_policy_converter.overall_dtype()), + ])) + + def to_json(self, value: ScannerInformation) -> object: + if not isinstance(value, ScannerInformation): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ScannerInformation' instance") + json_object = {} + + json_object["modelName"] = self._model_name_converter.to_json(value.model_name) + json_object["scannerGeometry"] = self._scanner_geometry_converter.to_json(value.scanner_geometry) + json_object["bulkMaterials"] = self._bulk_materials_converter.to_json(value.bulk_materials) + if value.gantry_alignment is not None: + json_object["gantryAlignment"] = self._gantry_alignment_converter.to_json(value.gantry_alignment) + json_object["tofBinEdges"] = self._tof_bin_edges_converter.to_json(value.tof_bin_edges) + json_object["tofResolution"] = self._tof_resolution_converter.to_json(value.tof_resolution) + json_object["energyBinEdges"] = self._energy_bin_edges_converter.to_json(value.energy_bin_edges) + json_object["energyResolutionAt511"] = self._energy_resolution_at_511_converter.to_json(value.energy_resolution_at_511) + json_object["eventTimeBlockDuration"] = self._event_time_block_duration_converter.to_json(value.event_time_block_duration) + json_object["coincidencePolicy"] = self._coincidence_policy_converter.to_json(value.coincidence_policy) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["modelName"] = self._model_name_converter.numpy_to_json(value["model_name"]) + json_object["scannerGeometry"] = self._scanner_geometry_converter.numpy_to_json(value["scanner_geometry"]) + json_object["bulkMaterials"] = self._bulk_materials_converter.numpy_to_json(value["bulk_materials"]) + if (field_val := value["gantry_alignment"]) is not None: + json_object["gantryAlignment"] = self._gantry_alignment_converter.numpy_to_json(field_val) + json_object["tofBinEdges"] = self._tof_bin_edges_converter.numpy_to_json(value["tof_bin_edges"]) + json_object["tofResolution"] = self._tof_resolution_converter.numpy_to_json(value["tof_resolution"]) + json_object["energyBinEdges"] = self._energy_bin_edges_converter.numpy_to_json(value["energy_bin_edges"]) + json_object["energyResolutionAt511"] = self._energy_resolution_at_511_converter.numpy_to_json(value["energy_resolution_at_511"]) + json_object["eventTimeBlockDuration"] = self._event_time_block_duration_converter.numpy_to_json(value["event_time_block_duration"]) + json_object["coincidencePolicy"] = self._coincidence_policy_converter.numpy_to_json(value["coincidence_policy"]) + return json_object + + def from_json(self, json_object: object) -> ScannerInformation: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ScannerInformation( + model_name=self._model_name_converter.from_json(json_object["modelName"],), + scanner_geometry=self._scanner_geometry_converter.from_json(json_object["scannerGeometry"],), + bulk_materials=self._bulk_materials_converter.from_json(json_object["bulkMaterials"],), + gantry_alignment=self._gantry_alignment_converter.from_json(json_object.get("gantryAlignment")), + tof_bin_edges=self._tof_bin_edges_converter.from_json(json_object["tofBinEdges"],), + tof_resolution=self._tof_resolution_converter.from_json(json_object["tofResolution"],), + energy_bin_edges=self._energy_bin_edges_converter.from_json(json_object["energyBinEdges"],), + energy_resolution_at_511=self._energy_resolution_at_511_converter.from_json(json_object["energyResolutionAt511"],), + event_time_block_duration=self._event_time_block_duration_converter.from_json(json_object["eventTimeBlockDuration"],), + coincidence_policy=self._coincidence_policy_converter.from_json(json_object["coincidencePolicy"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._model_name_converter.from_json_to_numpy(json_object["modelName"]), + self._scanner_geometry_converter.from_json_to_numpy(json_object["scannerGeometry"]), + self._bulk_materials_converter.from_json_to_numpy(json_object["bulkMaterials"]), + self._gantry_alignment_converter.from_json_to_numpy(json_object.get("gantryAlignment")), + self._tof_bin_edges_converter.from_json_to_numpy(json_object["tofBinEdges"]), + self._tof_resolution_converter.from_json_to_numpy(json_object["tofResolution"]), + self._energy_bin_edges_converter.from_json_to_numpy(json_object["energyBinEdges"]), + self._energy_resolution_at_511_converter.from_json_to_numpy(json_object["energyResolutionAt511"]), + self._event_time_block_duration_converter.from_json_to_numpy(json_object["eventTimeBlockDuration"]), + self._coincidence_policy_converter.from_json_to_numpy(json_object["coincidencePolicy"]), + ) # type:ignore + + +class HeaderConverter(_ndjson.JsonConverter[Header, np.void]): + def __init__(self) -> None: + self._scanner_converter = ScannerInformationConverter() + self._exam_converter = _ndjson.OptionalConverter(ExamInformationConverter()) + super().__init__(np.dtype([ + ("scanner", self._scanner_converter.overall_dtype()), + ("exam", self._exam_converter.overall_dtype()), + ])) + + def to_json(self, value: Header) -> object: + if not isinstance(value, Header): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'Header' instance") + json_object = {} + + json_object["scanner"] = self._scanner_converter.to_json(value.scanner) + if value.exam is not None: + json_object["exam"] = self._exam_converter.to_json(value.exam) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["scanner"] = self._scanner_converter.numpy_to_json(value["scanner"]) + if (field_val := value["exam"]) is not None: + json_object["exam"] = self._exam_converter.numpy_to_json(field_val) + return json_object + + def from_json(self, json_object: object) -> Header: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return Header( + scanner=self._scanner_converter.from_json(json_object["scanner"],), + exam=self._exam_converter.from_json(json_object.get("exam")), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._scanner_converter.from_json_to_numpy(json_object["scanner"]), + self._exam_converter.from_json_to_numpy(json_object.get("exam")), + ) # type:ignore + + +class TripleEventConverter(_ndjson.JsonConverter[TripleEvent, np.void]): + def __init__(self) -> None: + self._detector_ids_converter = _ndjson.FixedVectorConverter(_ndjson.uint32_converter, 3) + self._tof_indices_converter = _ndjson.FixedVectorConverter(_ndjson.uint32_converter, 2) + self._energy_indices_converter = _ndjson.FixedVectorConverter(_ndjson.uint32_converter, 3) + super().__init__(np.dtype([ + ("detector_ids", self._detector_ids_converter.overall_dtype()), + ("tof_indices", self._tof_indices_converter.overall_dtype()), + ("energy_indices", self._energy_indices_converter.overall_dtype()), + ])) + + def to_json(self, value: TripleEvent) -> object: + if not isinstance(value, TripleEvent): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'TripleEvent' instance") + json_object = {} + + json_object["detectorIds"] = self._detector_ids_converter.to_json(value.detector_ids) + json_object["tofIndices"] = self._tof_indices_converter.to_json(value.tof_indices) + json_object["energyIndices"] = self._energy_indices_converter.to_json(value.energy_indices) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["detectorIds"] = self._detector_ids_converter.numpy_to_json(value["detector_ids"]) + json_object["tofIndices"] = self._tof_indices_converter.numpy_to_json(value["tof_indices"]) + json_object["energyIndices"] = self._energy_indices_converter.numpy_to_json(value["energy_indices"]) + return json_object + + def from_json(self, json_object: object) -> TripleEvent: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return TripleEvent( + detector_ids=self._detector_ids_converter.from_json(json_object["detectorIds"],), + tof_indices=self._tof_indices_converter.from_json(json_object["tofIndices"],), + energy_indices=self._energy_indices_converter.from_json(json_object["energyIndices"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._detector_ids_converter.from_json_to_numpy(json_object["detectorIds"]), + self._tof_indices_converter.from_json_to_numpy(json_object["tofIndices"]), + self._energy_indices_converter.from_json_to_numpy(json_object["energyIndices"]), + ) # type:ignore + + +class EventTimeBlockConverter(_ndjson.JsonConverter[EventTimeBlock, np.void]): + def __init__(self) -> None: + self._start_converter = _ndjson.uint32_converter + self._prompt_events_converter = _ndjson.VectorConverter(CoincidenceEventConverter()) + self._delayed_events_converter = _ndjson.OptionalConverter(_ndjson.VectorConverter(CoincidenceEventConverter())) + self._triple_events_converter = _ndjson.OptionalConverter(_ndjson.VectorConverter(TripleEventConverter())) + super().__init__(np.dtype([ + ("start", self._start_converter.overall_dtype()), + ("prompt_events", self._prompt_events_converter.overall_dtype()), + ("delayed_events", self._delayed_events_converter.overall_dtype()), + ("triple_events", self._triple_events_converter.overall_dtype()), + ])) + + def to_json(self, value: EventTimeBlock) -> object: + if not isinstance(value, EventTimeBlock): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'EventTimeBlock' instance") + json_object = {} + + json_object["start"] = self._start_converter.to_json(value.start) + json_object["promptEvents"] = self._prompt_events_converter.to_json(value.prompt_events) + if value.delayed_events is not None: + json_object["delayedEvents"] = self._delayed_events_converter.to_json(value.delayed_events) + if value.triple_events is not None: + json_object["tripleEvents"] = self._triple_events_converter.to_json(value.triple_events) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["start"] = self._start_converter.numpy_to_json(value["start"]) + json_object["promptEvents"] = self._prompt_events_converter.numpy_to_json(value["prompt_events"]) + if (field_val := value["delayed_events"]) is not None: + json_object["delayedEvents"] = self._delayed_events_converter.numpy_to_json(field_val) + if (field_val := value["triple_events"]) is not None: + json_object["tripleEvents"] = self._triple_events_converter.numpy_to_json(field_val) + return json_object + + def from_json(self, json_object: object) -> EventTimeBlock: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return EventTimeBlock( + start=self._start_converter.from_json(json_object["start"],), + prompt_events=self._prompt_events_converter.from_json(json_object["promptEvents"],), + delayed_events=self._delayed_events_converter.from_json(json_object.get("delayedEvents")), + triple_events=self._triple_events_converter.from_json(json_object.get("tripleEvents")), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._start_converter.from_json_to_numpy(json_object["start"]), + self._prompt_events_converter.from_json_to_numpy(json_object["promptEvents"]), + self._delayed_events_converter.from_json_to_numpy(json_object.get("delayedEvents")), + self._triple_events_converter.from_json_to_numpy(json_object.get("tripleEvents")), + ) # type:ignore + + +class ExternalSignalTimeBlockConverter(_ndjson.JsonConverter[ExternalSignalTimeBlock, np.void]): + def __init__(self) -> None: + self._start_converter = _ndjson.uint32_converter + self._signal_id_converter = _ndjson.uint32_converter + self._signal_values_converter = _ndjson.VectorConverter(_ndjson.float32_converter) + super().__init__(np.dtype([ + ("start", self._start_converter.overall_dtype()), + ("signal_id", self._signal_id_converter.overall_dtype()), + ("signal_values", self._signal_values_converter.overall_dtype()), + ])) + + def to_json(self, value: ExternalSignalTimeBlock) -> object: + if not isinstance(value, ExternalSignalTimeBlock): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ExternalSignalTimeBlock' instance") + json_object = {} + + json_object["start"] = self._start_converter.to_json(value.start) + json_object["signalID"] = self._signal_id_converter.to_json(value.signal_id) + json_object["signalValues"] = self._signal_values_converter.to_json(value.signal_values) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["start"] = self._start_converter.numpy_to_json(value["start"]) + json_object["signalID"] = self._signal_id_converter.numpy_to_json(value["signal_id"]) + json_object["signalValues"] = self._signal_values_converter.numpy_to_json(value["signal_values"]) + return json_object + + def from_json(self, json_object: object) -> ExternalSignalTimeBlock: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ExternalSignalTimeBlock( + start=self._start_converter.from_json(json_object["start"],), + signal_id=self._signal_id_converter.from_json(json_object["signalID"],), + signal_values=self._signal_values_converter.from_json(json_object["signalValues"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._start_converter.from_json_to_numpy(json_object["start"]), + self._signal_id_converter.from_json_to_numpy(json_object["signalID"]), + self._signal_values_converter.from_json_to_numpy(json_object["signalValues"]), + ) # type:ignore + + +class BedMovementTimeBlockConverter(_ndjson.JsonConverter[BedMovementTimeBlock, np.void]): + def __init__(self) -> None: + self._start_converter = _ndjson.uint32_converter + self._transform_converter = RigidTransformationConverter() + super().__init__(np.dtype([ + ("start", self._start_converter.overall_dtype()), + ("transform", self._transform_converter.overall_dtype()), + ])) + + def to_json(self, value: BedMovementTimeBlock) -> object: + if not isinstance(value, BedMovementTimeBlock): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'BedMovementTimeBlock' instance") + json_object = {} + + json_object["start"] = self._start_converter.to_json(value.start) + json_object["transform"] = self._transform_converter.to_json(value.transform) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["start"] = self._start_converter.numpy_to_json(value["start"]) + json_object["transform"] = self._transform_converter.numpy_to_json(value["transform"]) + return json_object + + def from_json(self, json_object: object) -> BedMovementTimeBlock: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return BedMovementTimeBlock( + start=self._start_converter.from_json(json_object["start"],), + transform=self._transform_converter.from_json(json_object["transform"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._start_converter.from_json_to_numpy(json_object["start"]), + self._transform_converter.from_json_to_numpy(json_object["transform"]), + ) # type:ignore + + +class GantryMovementTimeBlockConverter(_ndjson.JsonConverter[GantryMovementTimeBlock, np.void]): + def __init__(self) -> None: + self._start_converter = _ndjson.uint32_converter + self._transform_converter = RigidTransformationConverter() + super().__init__(np.dtype([ + ("start", self._start_converter.overall_dtype()), + ("transform", self._transform_converter.overall_dtype()), + ])) + + def to_json(self, value: GantryMovementTimeBlock) -> object: + if not isinstance(value, GantryMovementTimeBlock): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'GantryMovementTimeBlock' instance") + json_object = {} + + json_object["start"] = self._start_converter.to_json(value.start) + json_object["transform"] = self._transform_converter.to_json(value.transform) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["start"] = self._start_converter.numpy_to_json(value["start"]) + json_object["transform"] = self._transform_converter.numpy_to_json(value["transform"]) + return json_object + + def from_json(self, json_object: object) -> GantryMovementTimeBlock: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return GantryMovementTimeBlock( + start=self._start_converter.from_json(json_object["start"],), + transform=self._transform_converter.from_json(json_object["transform"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._start_converter.from_json_to_numpy(json_object["start"]), + self._transform_converter.from_json_to_numpy(json_object["transform"]), + ) # type:ignore + + +external_signal_type_enum_name_to_value_map = { + "ecgTrace": ExternalSignalTypeEnum.ECG_TRACE, + "ecgTrigger": ExternalSignalTypeEnum.ECG_TRIGGER, + "respTrace": ExternalSignalTypeEnum.RESP_TRACE, + "respTrigger": ExternalSignalTypeEnum.RESP_TRIGGER, + "otherMotionSignal": ExternalSignalTypeEnum.OTHER_MOTION_SIGNAL, + "otherMotionTrigger": ExternalSignalTypeEnum.OTHER_MOTION_TRIGGER, + "externalSync": ExternalSignalTypeEnum.EXTERNAL_SYNC, + "other": ExternalSignalTypeEnum.OTHER, +} +external_signal_type_enum_value_to_name_map = {v: n for n, v in external_signal_type_enum_name_to_value_map.items()} + +class ExternalSignalTypeConverter(_ndjson.JsonConverter[ExternalSignalType, np.void]): + def __init__(self) -> None: + self._type_converter = _ndjson.EnumConverter(ExternalSignalTypeEnum, np.int32, external_signal_type_enum_name_to_value_map, external_signal_type_enum_value_to_name_map) + self._description_converter = _ndjson.string_converter + self._id_converter = _ndjson.uint32_converter + super().__init__(np.dtype([ + ("type", self._type_converter.overall_dtype()), + ("description", self._description_converter.overall_dtype()), + ("id", self._id_converter.overall_dtype()), + ])) + + def to_json(self, value: ExternalSignalType) -> object: + if not isinstance(value, ExternalSignalType): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'ExternalSignalType' instance") + json_object = {} + + json_object["type"] = self._type_converter.to_json(value.type) + json_object["description"] = self._description_converter.to_json(value.description) + json_object["id"] = self._id_converter.to_json(value.id) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["type"] = self._type_converter.numpy_to_json(value["type"]) + json_object["description"] = self._description_converter.numpy_to_json(value["description"]) + json_object["id"] = self._id_converter.numpy_to_json(value["id"]) + return json_object + + def from_json(self, json_object: object) -> ExternalSignalType: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ExternalSignalType( + type=self._type_converter.from_json(json_object["type"],), + description=self._description_converter.from_json(json_object["description"],), + id=self._id_converter.from_json(json_object["id"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._type_converter.from_json_to_numpy(json_object["type"]), + self._description_converter.from_json_to_numpy(json_object["description"]), + self._id_converter.from_json_to_numpy(json_object["id"]), + ) # type:ignore + + +class TimeIntervalConverter(_ndjson.JsonConverter[TimeInterval, np.void]): + def __init__(self) -> None: + self._start_converter = _ndjson.uint32_converter + self._stop_converter = _ndjson.uint32_converter + super().__init__(np.dtype([ + ("start", self._start_converter.overall_dtype()), + ("stop", self._stop_converter.overall_dtype()), + ])) + + def to_json(self, value: TimeInterval) -> object: + if not isinstance(value, TimeInterval): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'TimeInterval' instance") + json_object = {} + + json_object["start"] = self._start_converter.to_json(value.start) + json_object["stop"] = self._stop_converter.to_json(value.stop) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["start"] = self._start_converter.numpy_to_json(value["start"]) + json_object["stop"] = self._stop_converter.numpy_to_json(value["stop"]) + return json_object + + def from_json(self, json_object: object) -> TimeInterval: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return TimeInterval( + start=self._start_converter.from_json(json_object["start"],), + stop=self._stop_converter.from_json(json_object["stop"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._start_converter.from_json_to_numpy(json_object["start"]), + self._stop_converter.from_json_to_numpy(json_object["stop"]), + ) # type:ignore + + +class TimeFrameInformationConverter(_ndjson.JsonConverter[TimeFrameInformation, np.void]): + def __init__(self) -> None: + self._time_frames_converter = _ndjson.VectorConverter(TimeIntervalConverter()) + super().__init__(np.dtype([ + ("time_frames", self._time_frames_converter.overall_dtype()), + ])) + + def to_json(self, value: TimeFrameInformation) -> object: + if not isinstance(value, TimeFrameInformation): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'TimeFrameInformation' instance") + json_object = {} + + json_object["timeFrames"] = self._time_frames_converter.to_json(value.time_frames) + return json_object + + def numpy_to_json(self, value: np.void) -> object: + if not isinstance(value, np.void): # pyright: ignore [reportUnnecessaryIsInstance] + raise TypeError("Expected 'np.void' instance") + json_object = {} + + json_object["timeFrames"] = self._time_frames_converter.numpy_to_json(value["time_frames"]) + return json_object + + def from_json(self, json_object: object) -> TimeFrameInformation: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return TimeFrameInformation( + time_frames=self._time_frames_converter.from_json(json_object["timeFrames"],), + ) + + def from_json_to_numpy(self, json_object: object) -> np.void: + if not isinstance(json_object, dict): + raise TypeError("Expected 'dict' instance") + return ( + self._time_frames_converter.from_json_to_numpy(json_object["timeFrames"]), + ) # type:ignore + + +class NDJsonPETSIRDWriter(_ndjson.NDJsonProtocolWriter, PETSIRDWriterBase): + """NDJson writer for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self, stream: typing.Union[typing.TextIO, str]) -> None: + PETSIRDWriterBase.__init__(self) + _ndjson.NDJsonProtocolWriter.__init__(self, stream, PETSIRDWriterBase.schema) + + def _write_header(self, value: Header) -> None: + converter = HeaderConverter() + json_value = converter.to_json(value) + self._write_json_line({"header": json_value}) + + def _write_time_blocks(self, value: collections.abc.Iterable[TimeBlock]) -> None: + converter = _ndjson.UnionConverter(TimeBlock, [(TimeBlock.EventTimeBlock, EventTimeBlockConverter(), [dict]), (TimeBlock.ExternalSignalTimeBlock, ExternalSignalTimeBlockConverter(), [dict]), (TimeBlock.BedMovementTimeBlock, BedMovementTimeBlockConverter(), [dict]), (TimeBlock.GantryMovementTimeBlock, GantryMovementTimeBlockConverter(), [dict])], False) + for item in value: + json_item = converter.to_json(item) + self._write_json_line({"timeBlocks": json_item}) + + +class NDJsonPETSIRDReader(_ndjson.NDJsonProtocolReader, PETSIRDReaderBase): + """NDJson writer for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self, stream: typing.Union[io.BufferedReader, typing.TextIO, str]) -> None: + PETSIRDReaderBase.__init__(self) + _ndjson.NDJsonProtocolReader.__init__(self, stream, PETSIRDReaderBase.schema) + + def _read_header(self) -> Header: + json_object = self._read_json_line("header", True) + converter = HeaderConverter() + return converter.from_json(json_object) + + def _read_time_blocks(self) -> collections.abc.Iterable[TimeBlock]: + converter = _ndjson.UnionConverter(TimeBlock, [(TimeBlock.EventTimeBlock, EventTimeBlockConverter(), [dict]), (TimeBlock.ExternalSignalTimeBlock, ExternalSignalTimeBlockConverter(), [dict]), (TimeBlock.BedMovementTimeBlock, BedMovementTimeBlockConverter(), [dict]), (TimeBlock.GantryMovementTimeBlock, GantryMovementTimeBlockConverter(), [dict])], False) + while (json_object := self._read_json_line("timeBlocks", False)) is not _ndjson.MISSING_SENTINEL: + yield converter.from_json(json_object) + diff --git a/python/petsird/protocols.py b/python/petsird/protocols.py new file mode 100644 index 0000000..b623de3 --- /dev/null +++ b/python/petsird/protocols.py @@ -0,0 +1,186 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# pyright: reportUnusedImport=false + +import abc +import collections.abc +import datetime +import typing + +import numpy as np +import numpy.typing as npt + +from .types import * +from .yardl_types import ProtocolError +from . import yardl_types as yardl + +class PETSIRDWriterBase(abc.ABC): + """Abstract writer for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self) -> None: + self._state = 0 + + schema = r"""{"protocol":{"name":"PETSIRD","sequence":[{"name":"header","type":"PETSIRD.Header"},{"name":"timeBlocks","type":{"stream":{"items":"PETSIRD.TimeBlock"}}}]},"types":[{"name":"AnnulusShape","fields":[{"name":"innerRadius","type":"float32"},{"name":"outerRadius","type":"float32"},{"name":"thickness","type":"float32"},{"name":"angularRange","type":{"vector":{"items":"float32","length":2}}}]},{"name":"Atom","fields":[{"name":"massNumber","type":"uint32"},{"name":"atomicNumber","type":"uint32"}]},{"name":"BedMovementTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"transform","type":"PETSIRD.RigidTransformation"}]},{"name":"BoxShape","fields":[{"name":"corners","type":{"vector":{"items":"PETSIRD.Coordinate","length":8}}}]},{"name":"BoxSolidVolume","type":{"name":"PETSIRD.SolidVolume","typeArguments":["PETSIRD.BoxShape"]}},{"name":"BulkMaterial","fields":[{"name":"id","type":"uint32"},{"name":"name","type":"string"},{"name":"density","type":"float32"},{"name":"atoms","type":{"vector":{"items":"PETSIRD.Atom"}}},{"name":"massFractions","type":{"vector":{"items":"float32"}}}]},{"name":"CoincidenceEvent","fields":[{"name":"detectorIds","type":{"vector":{"items":"uint32","length":2}}},{"name":"tofIdx","type":"uint32"},{"name":"energyIndices","type":{"vector":{"items":"uint32","length":2}}}]},{"name":"CoincidencePolicy","values":[{"symbol":"rejectMultiples","value":0},{"symbol":"storeMultiplesAsPairs","value":1},{"symbol":"other","value":2}]},{"name":"Coordinate","fields":[{"name":"c","type":{"array":{"items":"float32","dimensions":[{"length":3}]}}}]},{"name":"DetectorModule","fields":[{"name":"detectingElements","type":{"vector":{"items":"PETSIRD.ReplicatedBoxSolidVolume"}}},{"name":"detectingElementIds","type":{"vector":{"items":"uint32"}}},{"name":"nonDetectingElements","type":{"vector":{"items":"PETSIRD.ReplicatedGenericSolidVolume"}}}]},{"name":"EventTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"promptEvents","type":{"vector":{"items":"PETSIRD.CoincidenceEvent"}}},{"name":"delayedEvents","type":[null,{"vector":{"items":"PETSIRD.CoincidenceEvent"}}]},{"name":"tripleEvents","type":[null,{"vector":{"items":"PETSIRD.TripleEvent"}}]}]},{"name":"ExamInformation","fields":[{"name":"subject","type":"PETSIRD.Subject"},{"name":"institution","type":"PETSIRD.Institution"},{"name":"protocol","type":[null,"string"]},{"name":"startOfAcquisition","type":[null,"datetime"]}]},{"name":"ExternalSignalTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"signalID","type":"uint32"},{"name":"signalValues","type":{"vector":{"items":"float32"}}}]},{"name":"GantryMovementTimeBlock","fields":[{"name":"start","type":"uint32"},{"name":"transform","type":"PETSIRD.RigidTransformation"}]},{"name":"GenericSolidVolume","type":{"name":"PETSIRD.SolidVolume","typeArguments":["PETSIRD.GeometricShape"]}},{"name":"GeometricShape","type":[{"tag":"BoxShape","type":"PETSIRD.BoxShape"},{"tag":"AnnulusShape","type":"PETSIRD.AnnulusShape"}]},{"name":"Header","fields":[{"name":"scanner","type":"PETSIRD.ScannerInformation"},{"name":"exam","type":[null,"PETSIRD.ExamInformation"]}]},{"name":"Institution","fields":[{"name":"name","type":"string"},{"name":"address","type":"string"}]},{"name":"ReplicatedBoxSolidVolume","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.BoxSolidVolume"]}},{"name":"ReplicatedDetectorModule","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.DetectorModule"]}},{"name":"ReplicatedGenericSolidVolume","type":{"name":"PETSIRD.ReplicatedObject","typeArguments":["PETSIRD.GenericSolidVolume"]}},{"name":"ReplicatedObject","typeParameters":["T"],"fields":[{"name":"object","type":"T"},{"name":"transforms","type":{"vector":{"items":"PETSIRD.RigidTransformation"}}},{"name":"ids","type":{"vector":{"items":"uint32"}}}]},{"name":"RigidTransformation","fields":[{"name":"matrix","type":{"array":{"items":"float32","dimensions":[{"length":3},{"length":4}]}}}]},{"name":"ScannerGeometry","fields":[{"name":"replicatedModules","type":{"vector":{"items":"PETSIRD.ReplicatedDetectorModule"}}},{"name":"ids","type":{"vector":{"items":"uint32"}}},{"name":"nonDetectingVolumes","type":[null,{"vector":{"items":"PETSIRD.GenericSolidVolume"}}]}]},{"name":"ScannerInformation","fields":[{"name":"modelName","type":"string"},{"name":"scannerGeometry","type":"PETSIRD.ScannerGeometry"},{"name":"bulkMaterials","type":{"vector":{"items":"PETSIRD.BulkMaterial"}}},{"name":"gantryAlignment","type":[null,"PETSIRD.RigidTransformation"]},{"name":"tofBinEdges","type":{"array":{"items":"float32","dimensions":1}}},{"name":"tofResolution","type":"float32"},{"name":"energyBinEdges","type":{"array":{"items":"float32","dimensions":1}}},{"name":"energyResolutionAt511","type":"float32"},{"name":"eventTimeBlockDuration","type":"uint32"},{"name":"coincidencePolicy","type":"PETSIRD.CoincidencePolicy"}]},{"name":"SolidVolume","typeParameters":["Shape"],"fields":[{"name":"shape","type":"Shape"},{"name":"materialId","type":"uint32"}]},{"name":"Subject","fields":[{"name":"name","type":[null,"string"]},{"name":"id","type":"string"}]},{"name":"TimeBlock","type":[{"tag":"EventTimeBlock","type":"PETSIRD.EventTimeBlock"},{"tag":"ExternalSignalTimeBlock","type":"PETSIRD.ExternalSignalTimeBlock"},{"tag":"BedMovementTimeBlock","type":"PETSIRD.BedMovementTimeBlock"},{"tag":"GantryMovementTimeBlock","type":"PETSIRD.GantryMovementTimeBlock"}]},{"name":"TripleEvent","fields":[{"name":"detectorIds","type":{"vector":{"items":"uint32","length":3}}},{"name":"tofIndices","type":{"vector":{"items":"uint32","length":2}}},{"name":"energyIndices","type":{"vector":{"items":"uint32","length":3}}}]}]}""" + + def close(self) -> None: + if self._state == 3: + try: + self._end_stream() + return + finally: + self._close() + self._close() + if self._state != 4: + expected_method = self._state_to_method_name((self._state + 1) & ~1) + raise ProtocolError(f"Protocol writer closed before all steps were called. Expected to call to '{expected_method}'.") + + def __enter__(self): + return self + + def __exit__(self, exc_type: typing.Optional[type[BaseException]], exc: typing.Optional[BaseException], traceback: object) -> None: + try: + self.close() + except Exception as e: + if exc is None: + raise e + + def write_header(self, value: Header) -> None: + """Ordinal 0""" + + if self._state != 0: + self._raise_unexpected_state(0) + + self._write_header(value) + self._state = 2 + + def write_time_blocks(self, value: collections.abc.Iterable[TimeBlock]) -> None: + """Ordinal 1""" + + if self._state & ~1 != 2: + self._raise_unexpected_state(2) + + self._write_time_blocks(value) + self._state = 3 + + @abc.abstractmethod + def _write_header(self, value: Header) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def _write_time_blocks(self, value: collections.abc.Iterable[TimeBlock]) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def _close(self) -> None: + pass + + @abc.abstractmethod + def _end_stream(self) -> None: + pass + + def _raise_unexpected_state(self, actual: int) -> None: + expected_method = self._state_to_method_name(self._state) + actual_method = self._state_to_method_name(actual) + raise ProtocolError(f"Expected to call to '{expected_method}' but received call to '{actual_method}'.") + + def _state_to_method_name(self, state: int) -> str: + if state == 0: + return 'write_header' + if state == 2: + return 'write_time_blocks' + return "" + +class PETSIRDReaderBase(abc.ABC): + """Abstract reader for the PETSIRD protocol. + + Definition of the stream of data + """ + + + def __init__(self) -> None: + self._state = 0 + + def close(self) -> None: + self._close() + if self._state != 4: + if self._state % 2 == 1: + previous_method = self._state_to_method_name(self._state - 1) + raise ProtocolError(f"Protocol reader closed before all data was consumed. The iterable returned by '{previous_method}' was not fully consumed.") + else: + expected_method = self._state_to_method_name(self._state) + raise ProtocolError(f"Protocol reader closed before all data was consumed. Expected call to '{expected_method}'.") + + + schema = PETSIRDWriterBase.schema + + def __enter__(self): + return self + + def __exit__(self, exc_type: typing.Optional[type[BaseException]], exc: typing.Optional[BaseException], traceback: object) -> None: + try: + self.close() + except Exception as e: + if exc is None: + raise e + + @abc.abstractmethod + def _close(self) -> None: + raise NotImplementedError() + + def read_header(self) -> Header: + """Ordinal 0""" + + if self._state != 0: + self._raise_unexpected_state(0) + + value = self._read_header() + self._state = 2 + return value + + def read_time_blocks(self) -> collections.abc.Iterable[TimeBlock]: + """Ordinal 1""" + + if self._state != 2: + self._raise_unexpected_state(2) + + value = self._read_time_blocks() + self._state = 3 + return self._wrap_iterable(value, 4) + + def copy_to(self, writer: PETSIRDWriterBase) -> None: + writer.write_header(self.read_header()) + writer.write_time_blocks(self.read_time_blocks()) + + @abc.abstractmethod + def _read_header(self) -> Header: + raise NotImplementedError() + + @abc.abstractmethod + def _read_time_blocks(self) -> collections.abc.Iterable[TimeBlock]: + raise NotImplementedError() + + T = typing.TypeVar('T') + def _wrap_iterable(self, iterable: collections.abc.Iterable[T], final_state: int) -> collections.abc.Iterable[T]: + yield from iterable + self._state = final_state + + def _raise_unexpected_state(self, actual: int) -> None: + actual_method = self._state_to_method_name(actual) + if self._state % 2 == 1: + previous_method = self._state_to_method_name(self._state - 1) + raise ProtocolError(f"Received call to '{actual_method}' but the iterable returned by '{previous_method}' was not fully consumed.") + else: + expected_method = self._state_to_method_name(self._state) + raise ProtocolError(f"Expected to call to '{expected_method}' but received call to '{actual_method}'.") + + def _state_to_method_name(self, state: int) -> str: + if state == 0: + return 'read_header' + if state == 2: + return 'read_time_blocks' + return "" + diff --git a/python/petsird/types.py b/python/petsird/types.py new file mode 100644 index 0000000..a76f755 --- /dev/null +++ b/python/petsird/types.py @@ -0,0 +1,1059 @@ +# This file was generated by the "yardl" tool. DO NOT EDIT. + +# pyright: reportUnusedImport=false +# pyright: reportUnknownArgumentType=false +# pyright: reportUnknownMemberType=false +# pyright: reportUnknownVariableType=false + +import datetime +import enum +import types +import typing + +import numpy as np +import numpy.typing as npt + +from . import yardl_types as yardl +from . import _dtypes + + +Shape = typing.TypeVar("Shape") +Shape_NP = typing.TypeVar("Shape_NP", bound=np.generic) +T = typing.TypeVar("T") +T_NP = typing.TypeVar("T_NP", bound=np.generic) + + +class CoincidenceEvent: + """All information about a coincidence event specified as identifiers or indices (i.e. discretized). Indices start from 0. + TODO: this might take up too much space, so some/all of these could be combined in a single index if necessary. + """ + + detector_ids: list[yardl.UInt32] + """the pair of detector elements""" + + tof_idx: yardl.UInt32 + """an index into the tofBinEdges field in the ScannerInformation""" + + energy_indices: list[yardl.UInt32] + """a pair of indices into the energyBinEdges field in the ScannerInformation""" + + + def __init__(self, *, + detector_ids: typing.Optional[list[yardl.UInt32]] = None, + tof_idx: yardl.UInt32 = 0, + energy_indices: typing.Optional[list[yardl.UInt32]] = None, + ): + self.detector_ids = detector_ids if detector_ids is not None else [0] * 2 + self.tof_idx = tof_idx + self.energy_indices = energy_indices if energy_indices is not None else [0] * 2 + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, CoincidenceEvent) + and self.detector_ids == other.detector_ids + and self.tof_idx == other.tof_idx + and self.energy_indices == other.energy_indices + ) + + def __str__(self) -> str: + return f"CoincidenceEvent(detectorIds={self.detector_ids}, tofIdx={self.tof_idx}, energyIndices={self.energy_indices})" + + def __repr__(self) -> str: + return f"CoincidenceEvent(detectorIds={repr(self.detector_ids)}, tofIdx={repr(self.tof_idx)}, energyIndices={repr(self.energy_indices)})" + + +class SolidVolume(typing.Generic[Shape]): + """A shape filled with a uniform material""" + + shape: Shape + material_id: yardl.UInt32 + """identifier referring to `ScannerInformation.bulkMaterials` list""" + + + def __init__(self, *, + shape: Shape, + material_id: yardl.UInt32 = 0, + ): + self.shape = shape + self.material_id = material_id + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, SolidVolume) + and yardl.structural_equal(self.shape, other.shape) + and self.material_id == other.material_id + ) + + def __str__(self) -> str: + return f"SolidVolume(shape={self.shape}, materialId={self.material_id})" + + def __repr__(self) -> str: + return f"SolidVolume(shape={repr(self.shape)}, materialId={repr(self.material_id)})" + + +class Coordinate: + """3D coordinates (in mm)""" + + c: npt.NDArray[np.float32] + + def __init__(self, *, + c: typing.Optional[npt.NDArray[np.float32]] = None, + ): + self.c = c if c is not None else np.zeros((3,), dtype=np.dtype(np.float32)) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Coordinate) + and yardl.structural_equal(self.c, other.c) + ) + + def __str__(self) -> str: + return f"Coordinate(c={self.c})" + + def __repr__(self) -> str: + return f"Coordinate(c={repr(self.c)})" + + +class BoxShape: + """A box-shape specified by 8 corners (e.g. cuboid, wedge, etc.) + TODO need to think about a clear definition of planes + We do not want to have to check about intersection planes + Potential mechanisms: + - lexicographical ordering of corner coordinates? + - first 4 coordinates give first plane, 5th and 6th need to define plane with first 2, etc. + """ + + corners: list[Coordinate] + + def __init__(self, *, + corners: typing.Optional[list[Coordinate]] = None, + ): + self.corners = corners if corners is not None else [Coordinate() for _ in range(8)] + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, BoxShape) + and len(self.corners) == len(other.corners) and all(a == b for a, b in zip(self.corners, other.corners)) + ) + + def __str__(self) -> str: + return f"BoxShape(corners={self.corners})" + + def __repr__(self) -> str: + return f"BoxShape(corners={repr(self.corners)})" + + +BoxSolidVolume = SolidVolume[BoxShape] + +class AnnulusShape: + """Annulus of certain thickness centered at [0,0,0] and oriented along the [0,0,1] axis + in radians. An angle of 0 corresponds to the [1,0,0] axis, Pi/2 corresponds to the [0,1,0] axis. + """ + + inner_radius: yardl.Float32 + """inner radius (in mm)""" + + outer_radius: yardl.Float32 + """outer radius (in mm)""" + + thickness: yardl.Float32 + """thickness of the annulus, i.e. length along the axis (in mm)""" + + angular_range: list[yardl.Float32] + """start-stop angle (in radians)""" + + + def __init__(self, *, + inner_radius: yardl.Float32 = 0.0, + outer_radius: yardl.Float32 = 0.0, + thickness: yardl.Float32 = 0.0, + angular_range: typing.Optional[list[yardl.Float32]] = None, + ): + self.inner_radius = inner_radius + self.outer_radius = outer_radius + self.thickness = thickness + self.angular_range = angular_range if angular_range is not None else [0.0] * 2 + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, AnnulusShape) + and self.inner_radius == other.inner_radius + and self.outer_radius == other.outer_radius + and self.thickness == other.thickness + and self.angular_range == other.angular_range + ) + + def __str__(self) -> str: + return f"AnnulusShape(innerRadius={self.inner_radius}, outerRadius={self.outer_radius}, thickness={self.thickness}, angularRange={self.angular_range})" + + def __repr__(self) -> str: + return f"AnnulusShape(innerRadius={repr(self.inner_radius)}, outerRadius={repr(self.outer_radius)}, thickness={repr(self.thickness)}, angularRange={repr(self.angular_range)})" + + +_T = typing.TypeVar('_T') + +class GeometricShape: + BoxShape: typing.ClassVar[type["GeometricShapeUnionCase[BoxShape]"]] + AnnulusShape: typing.ClassVar[type["GeometricShapeUnionCase[AnnulusShape]"]] + +class GeometricShapeUnionCase(GeometricShape, yardl.UnionCase[_T]): + pass + +GeometricShape.BoxShape = type("GeometricShape.BoxShape", (GeometricShapeUnionCase,), {"index": 0, "tag": "BoxShape"}) +GeometricShape.AnnulusShape = type("GeometricShape.AnnulusShape", (GeometricShapeUnionCase,), {"index": 1, "tag": "AnnulusShape"}) +del GeometricShapeUnionCase + +GenericSolidVolume = SolidVolume[GeometricShape] + +class RigidTransformation: + """Rigid transformation, encoded via homogenous transformation + transformed_coord = matrix * [c, 1] (where [c,1] is a column vector) + with `c` of type `Coordinate` + """ + + matrix: npt.NDArray[np.float32] + + def __init__(self, *, + matrix: typing.Optional[npt.NDArray[np.float32]] = None, + ): + self.matrix = matrix if matrix is not None else np.zeros((3, 4,), dtype=np.dtype(np.float32)) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, RigidTransformation) + and yardl.structural_equal(self.matrix, other.matrix) + ) + + def __str__(self) -> str: + return f"RigidTransformation(matrix={self.matrix})" + + def __repr__(self) -> str: + return f"RigidTransformation(matrix={repr(self.matrix)})" + + +class ReplicatedObject(typing.Generic[T]): + """A list of identical objects at different locations""" + + object: T + transforms: list[RigidTransformation] + """list of transforms + constraint: length >= 1 + """ + + ids: list[yardl.UInt32] + """list of unique ids for every replicated solid volume + constraint: size(transforms) == size(ids) + """ + + + def __init__(self, *, + object: T, + transforms: typing.Optional[list[RigidTransformation]] = None, + ids: typing.Optional[list[yardl.UInt32]] = None, + ): + self.object = object + self.transforms = transforms if transforms is not None else [] + self.ids = ids if ids is not None else [] + + def number_of_objects(self) -> yardl.Size: + return len(self.transforms) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ReplicatedObject) + and yardl.structural_equal(self.object, other.object) + and len(self.transforms) == len(other.transforms) and all(a == b for a, b in zip(self.transforms, other.transforms)) + and self.ids == other.ids + ) + + def __str__(self) -> str: + return f"ReplicatedObject(object={self.object}, transforms={self.transforms}, ids={self.ids})" + + def __repr__(self) -> str: + return f"ReplicatedObject(object={repr(self.object)}, transforms={repr(self.transforms)}, ids={repr(self.ids)})" + + +ReplicatedBoxSolidVolume = ReplicatedObject[BoxSolidVolume] +"""A list of identical SolidVolumes at different locations""" + + +ReplicatedGenericSolidVolume = ReplicatedObject[GenericSolidVolume] +"""A list of identical SolidVolumes at different locations""" + + +class DetectorModule: + """Top-level detector structure, consisting of one or more lists of detecting elements (or "crystals") + This allows having different types of detecting elements (e.g. for phoswich detectors) + TODO this could be made into a hierarchical structure + """ + + detecting_elements: list[ReplicatedBoxSolidVolume] + detecting_element_ids: list[yardl.UInt32] + """list of unique ids for every replicated solid volume + constraint: size(detectingElements) == size(detectingElementsIds) + """ + + non_detecting_elements: list[ReplicatedGenericSolidVolume] + """optional list describing shielding/optical reflectors etc""" + + + def __init__(self, *, + detecting_elements: typing.Optional[list[ReplicatedBoxSolidVolume]] = None, + detecting_element_ids: typing.Optional[list[yardl.UInt32]] = None, + non_detecting_elements: typing.Optional[list[ReplicatedGenericSolidVolume]] = None, + ): + self.detecting_elements = detecting_elements if detecting_elements is not None else [] + self.detecting_element_ids = detecting_element_ids if detecting_element_ids is not None else [] + self.non_detecting_elements = non_detecting_elements if non_detecting_elements is not None else [] + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, DetectorModule) + and len(self.detecting_elements) == len(other.detecting_elements) and all(a == b for a, b in zip(self.detecting_elements, other.detecting_elements)) + and self.detecting_element_ids == other.detecting_element_ids + and len(self.non_detecting_elements) == len(other.non_detecting_elements) and all(a == b for a, b in zip(self.non_detecting_elements, other.non_detecting_elements)) + ) + + def __str__(self) -> str: + return f"DetectorModule(detectingElements={self.detecting_elements}, detectingElementIds={self.detecting_element_ids}, nonDetectingElements={self.non_detecting_elements})" + + def __repr__(self) -> str: + return f"DetectorModule(detectingElements={repr(self.detecting_elements)}, detectingElementIds={repr(self.detecting_element_ids)}, nonDetectingElements={repr(self.non_detecting_elements)})" + + +ReplicatedDetectorModule = ReplicatedObject[DetectorModule] +"""A list of identical modules at different locations""" + + +class ScannerGeometry: + """Full definition of the geometry of the scanner, consisting of + one of more types of modules replicated in space and (optional) other structures (e.g. side-shielding) + """ + + replicated_modules: list[ReplicatedDetectorModule] + """list of different types of replicated modules""" + + ids: list[yardl.UInt32] + """list of unique ids for every replicated module + constraint: size(replicated_modules) == size(ids) + """ + + non_detecting_volumes: typing.Optional[list[GenericSolidVolume]] + """shielding etc""" + + + def __init__(self, *, + replicated_modules: typing.Optional[list[ReplicatedDetectorModule]] = None, + ids: typing.Optional[list[yardl.UInt32]] = None, + non_detecting_volumes: typing.Optional[list[GenericSolidVolume]] = None, + ): + self.replicated_modules = replicated_modules if replicated_modules is not None else [] + self.ids = ids if ids is not None else [] + self.non_detecting_volumes = non_detecting_volumes + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ScannerGeometry) + and len(self.replicated_modules) == len(other.replicated_modules) and all(a == b for a, b in zip(self.replicated_modules, other.replicated_modules)) + and self.ids == other.ids + and (other.non_detecting_volumes is None if self.non_detecting_volumes is None else (other.non_detecting_volumes is not None and len(self.non_detecting_volumes) == len(other.non_detecting_volumes) and all(a == b for a, b in zip(self.non_detecting_volumes, other.non_detecting_volumes)))) + ) + + def __str__(self) -> str: + return f"ScannerGeometry(replicatedModules={self.replicated_modules}, ids={self.ids}, nonDetectingVolumes={self.non_detecting_volumes})" + + def __repr__(self) -> str: + return f"ScannerGeometry(replicatedModules={repr(self.replicated_modules)}, ids={repr(self.ids)}, nonDetectingVolumes={repr(self.non_detecting_volumes)})" + + +class Subject: + name: typing.Optional[str] + id: str + + def __init__(self, *, + name: typing.Optional[str] = None, + id: str = "", + ): + self.name = name + self.id = id + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Subject) + and self.name == other.name + and self.id == other.id + ) + + def __str__(self) -> str: + return f"Subject(name={self.name}, id={self.id})" + + def __repr__(self) -> str: + return f"Subject(name={repr(self.name)}, id={repr(self.id)})" + + +class Institution: + name: str + address: str + + def __init__(self, *, + name: str = "", + address: str = "", + ): + self.name = name + self.address = address + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Institution) + and self.name == other.name + and self.address == other.address + ) + + def __str__(self) -> str: + return f"Institution(name={self.name}, address={self.address})" + + def __repr__(self) -> str: + return f"Institution(name={repr(self.name)}, address={repr(self.address)})" + + +class ExamInformation: + """Items describing the exam (incomplete)""" + + subject: Subject + institution: Institution + protocol: typing.Optional[str] + start_of_acquisition: typing.Optional[yardl.DateTime] + + def __init__(self, *, + subject: typing.Optional[Subject] = None, + institution: typing.Optional[Institution] = None, + protocol: typing.Optional[str] = None, + start_of_acquisition: typing.Optional[yardl.DateTime] = None, + ): + self.subject = subject if subject is not None else Subject() + self.institution = institution if institution is not None else Institution() + self.protocol = protocol + self.start_of_acquisition = start_of_acquisition + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ExamInformation) + and self.subject == other.subject + and self.institution == other.institution + and self.protocol == other.protocol + and self.start_of_acquisition == other.start_of_acquisition + ) + + def __str__(self) -> str: + return f"ExamInformation(subject={self.subject}, institution={self.institution}, protocol={self.protocol}, startOfAcquisition={self.start_of_acquisition})" + + def __repr__(self) -> str: + return f"ExamInformation(subject={repr(self.subject)}, institution={repr(self.institution)}, protocol={repr(self.protocol)}, startOfAcquisition={repr(self.start_of_acquisition)})" + + +class Direction: + """3D direction vector (normalized to 1)""" + + c: npt.NDArray[np.float32] + + def __init__(self, *, + c: typing.Optional[npt.NDArray[np.float32]] = None, + ): + self.c = c if c is not None else np.zeros((3,), dtype=np.dtype(np.float32)) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Direction) + and yardl.structural_equal(self.c, other.c) + ) + + def __str__(self) -> str: + return f"Direction(c={self.c})" + + def __repr__(self) -> str: + return f"Direction(c={repr(self.c)})" + + +class DirectionMatrix: + """Orthonormal matrix + direction_of_first_axis = matrix * [1, 0 ,0] (as a column vector) + """ + + matrix: npt.NDArray[np.float32] + + def __init__(self, *, + matrix: typing.Optional[npt.NDArray[np.float32]] = None, + ): + self.matrix = matrix if matrix is not None else np.zeros((3, 3,), dtype=np.dtype(np.float32)) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, DirectionMatrix) + and yardl.structural_equal(self.matrix, other.matrix) + ) + + def __str__(self) -> str: + return f"DirectionMatrix(matrix={self.matrix})" + + def __repr__(self) -> str: + return f"DirectionMatrix(matrix={repr(self.matrix)})" + + +class Atom: + """Atom definition in terms of Z and A""" + + mass_number: yardl.UInt32 + """A""" + + atomic_number: yardl.UInt32 + """Z""" + + + def __init__(self, *, + mass_number: yardl.UInt32 = 0, + atomic_number: yardl.UInt32 = 0, + ): + self.mass_number = mass_number + self.atomic_number = atomic_number + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Atom) + and self.mass_number == other.mass_number + and self.atomic_number == other.atomic_number + ) + + def __str__(self) -> str: + return f"Atom(massNumber={self.mass_number}, atomicNumber={self.atomic_number})" + + def __repr__(self) -> str: + return f"Atom(massNumber={repr(self.mass_number)}, atomicNumber={repr(self.atomic_number)})" + + +class BulkMaterial: + """Specification of materials used in the scanner. + TODO agree with vendors if this information can be supplied and to what accuracy + Ideally this list should be reasonably accurate to be useful for Monte Carlo simulations, but can be approximate. + """ + + id: yardl.UInt32 + """unique id that can be used to refer to the material in voxelised maps etc""" + + name: str + """informative string, not standardised. + Expected examples: + detecting: BGO, LSO, LYSO, LaBr, GAGG, plastic + non-detecting: tungsten, lead + """ + + density: yardl.Float32 + """density of the material + Units: g/cc + """ + + atoms: list[Atom] + """List of atoms""" + + mass_fractions: list[yardl.Float32] + """List of massFractions for the atoms. + constraint: sum of massFractions should be 1 + constraint: size(atoms) == size(massFractions) + """ + + + def __init__(self, *, + id: yardl.UInt32 = 0, + name: str = "", + density: yardl.Float32 = 0.0, + atoms: typing.Optional[list[Atom]] = None, + mass_fractions: typing.Optional[list[yardl.Float32]] = None, + ): + self.id = id + self.name = name + self.density = density + self.atoms = atoms if atoms is not None else [] + self.mass_fractions = mass_fractions if mass_fractions is not None else [] + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, BulkMaterial) + and self.id == other.id + and self.name == other.name + and self.density == other.density + and self.atoms == other.atoms + and self.mass_fractions == other.mass_fractions + ) + + def __str__(self) -> str: + return f"BulkMaterial(id={self.id}, name={self.name}, density={self.density}, atoms={self.atoms}, massFractions={self.mass_fractions})" + + def __repr__(self) -> str: + return f"BulkMaterial(id={repr(self.id)}, name={repr(self.name)}, density={repr(self.density)}, atoms={repr(self.atoms)}, massFractions={repr(self.mass_fractions)})" + + +class CoincidencePolicy(yardl.OutOfRangeEnum): + """Type definition for how to encode how the scanner handles multiple coincidences when recording the prompts. + Due to various effects (such as high count rate, prompt gammas), it is possible that multiple single + events are detected within the coincidence window. This type encodes some different ways + that this multiple events are handled, and recorded in the coincidence stream. + """ + + REJECT_MULTIPLES = 0 + """multiples will be rejected""" + + STORE_MULTIPLES_AS_PAIRS = 1 + """multiples will be stored as a sequence of pairs, e.g. a triple leads to 3 pairs""" + + OTHER = 2 + """other options, to be listed in the future""" + + +class ScannerInformation: + model_name: str + scanner_geometry: ScannerGeometry + """Geometric information for all detector elements + All coordinates are in the PET gantry coordinate system. + """ + + bulk_materials: list[BulkMaterial] + """List of materials present in the scanner geometry. The `material_id`s there will refer to the + identifiers in this list below. + """ + + gantry_alignment: typing.Optional[RigidTransformation] + """Fixed transformation to reference location for this scanner. + This field can be used to encode alignment with the CT or MRI gantry for instance. + The transformation should convert from the PET gantry coordinate system to the reference. + An empty field implies the identity transformation. + """ + + tof_bin_edges: npt.NDArray[np.float32] + """Edge information for TOF bins in mm (given as from first to last edge, so there is one more edge than the number of bins) + 0 corresponds to the same arrival time. Negative numbers indicate that the first detector detected first. + For instance, a coincidence event is stored as 2 detectorIds, denoting the arrival time at the first + detector t1 and the arrival time at the second detector t2, we store (t1-t2)*c/2. + Note: for non-TOF scanners, this defines the coincidence window + TODO: this currently assumes equal size for each TOF bin, but some scanners "stretch" TOF bins depending on length of LOR + """ + + tof_resolution: yardl.Float32 + """TOF resolution (as FWHM) in mm + Scanner coincidence timing resolution (CTR) without tof-binning + """ + + energy_bin_edges: npt.NDArray[np.float32] + """Edge information for energy windows in keV (given as from first to last edge, so there is one more edge than the number of bins)""" + + energy_resolution_at_511: yardl.Float32 + """FWHM of photopeak for incoming gamma of 511 keV, expressed as a ratio w.r.t. 511""" + + event_time_block_duration: yardl.UInt32 + """duration of each event time block in ms""" + + coincidence_policy: CoincidencePolicy + """Encode how the scanner handles multiple coincidences""" + + + def __init__(self, *, + model_name: str = "", + scanner_geometry: typing.Optional[ScannerGeometry] = None, + bulk_materials: typing.Optional[list[BulkMaterial]] = None, + gantry_alignment: typing.Optional[RigidTransformation] = None, + tof_bin_edges: typing.Optional[npt.NDArray[np.float32]] = None, + tof_resolution: yardl.Float32 = 0.0, + energy_bin_edges: typing.Optional[npt.NDArray[np.float32]] = None, + energy_resolution_at_511: yardl.Float32 = 0.0, + event_time_block_duration: yardl.UInt32 = 0, + coincidence_policy: CoincidencePolicy = CoincidencePolicy.REJECT_MULTIPLES, + ): + self.model_name = model_name + self.scanner_geometry = scanner_geometry if scanner_geometry is not None else ScannerGeometry() + self.bulk_materials = bulk_materials if bulk_materials is not None else [] + self.gantry_alignment = gantry_alignment + self.tof_bin_edges = tof_bin_edges if tof_bin_edges is not None else np.zeros((0), dtype=np.dtype(np.float32)) + self.tof_resolution = tof_resolution + self.energy_bin_edges = energy_bin_edges if energy_bin_edges is not None else np.zeros((0), dtype=np.dtype(np.float32)) + self.energy_resolution_at_511 = energy_resolution_at_511 + self.event_time_block_duration = event_time_block_duration + self.coincidence_policy = coincidence_policy + + def number_of_tof_bins(self) -> yardl.Size: + return self.tof_bin_edges.size - 1 + + def number_of_energy_bins(self) -> yardl.Size: + return self.energy_bin_edges.size - 1 + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ScannerInformation) + and self.model_name == other.model_name + and self.scanner_geometry == other.scanner_geometry + and self.bulk_materials == other.bulk_materials + and (other.gantry_alignment is None if self.gantry_alignment is None else (other.gantry_alignment is not None and self.gantry_alignment == other.gantry_alignment)) + and yardl.structural_equal(self.tof_bin_edges, other.tof_bin_edges) + and self.tof_resolution == other.tof_resolution + and yardl.structural_equal(self.energy_bin_edges, other.energy_bin_edges) + and self.energy_resolution_at_511 == other.energy_resolution_at_511 + and self.event_time_block_duration == other.event_time_block_duration + and self.coincidence_policy == other.coincidence_policy + ) + + def __str__(self) -> str: + return f"ScannerInformation(modelName={self.model_name}, scannerGeometry={self.scanner_geometry}, bulkMaterials={self.bulk_materials}, gantryAlignment={self.gantry_alignment}, tofBinEdges={self.tof_bin_edges}, tofResolution={self.tof_resolution}, energyBinEdges={self.energy_bin_edges}, energyResolutionAt511={self.energy_resolution_at_511}, eventTimeBlockDuration={self.event_time_block_duration}, coincidencePolicy={self.coincidence_policy})" + + def __repr__(self) -> str: + return f"ScannerInformation(modelName={repr(self.model_name)}, scannerGeometry={repr(self.scanner_geometry)}, bulkMaterials={repr(self.bulk_materials)}, gantryAlignment={repr(self.gantry_alignment)}, tofBinEdges={repr(self.tof_bin_edges)}, tofResolution={repr(self.tof_resolution)}, energyBinEdges={repr(self.energy_bin_edges)}, energyResolutionAt511={repr(self.energy_resolution_at_511)}, eventTimeBlockDuration={repr(self.event_time_block_duration)}, coincidencePolicy={repr(self.coincidence_policy)})" + + +class Header: + scanner: ScannerInformation + exam: typing.Optional[ExamInformation] + + def __init__(self, *, + scanner: typing.Optional[ScannerInformation] = None, + exam: typing.Optional[ExamInformation] = None, + ): + self.scanner = scanner if scanner is not None else ScannerInformation() + self.exam = exam + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, Header) + and self.scanner == other.scanner + and self.exam == other.exam + ) + + def __str__(self) -> str: + return f"Header(scanner={self.scanner}, exam={self.exam})" + + def __repr__(self) -> str: + return f"Header(scanner={repr(self.scanner)}, exam={repr(self.exam)})" + + +class TripleEvent: + """All information about a triple event specified as identifiers or indices (i.e. discretized).""" + + detector_ids: list[yardl.UInt32] + tof_indices: list[yardl.UInt32] + """timing differences (converted to mm) w.r.t. first event, stored as + indices into the tofBinEdges field in the ScannerInformation + Note: only 2, corresponding to the arrival time differences of the second and third detectorId + listed w.r.t. the first detectorId + """ + + energy_indices: list[yardl.UInt32] + """indices for each single event into the energyBinEdges field in the ScannerInformation""" + + + def __init__(self, *, + detector_ids: typing.Optional[list[yardl.UInt32]] = None, + tof_indices: typing.Optional[list[yardl.UInt32]] = None, + energy_indices: typing.Optional[list[yardl.UInt32]] = None, + ): + self.detector_ids = detector_ids if detector_ids is not None else [0] * 3 + self.tof_indices = tof_indices if tof_indices is not None else [0] * 2 + self.energy_indices = energy_indices if energy_indices is not None else [0] * 3 + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, TripleEvent) + and self.detector_ids == other.detector_ids + and self.tof_indices == other.tof_indices + and self.energy_indices == other.energy_indices + ) + + def __str__(self) -> str: + return f"TripleEvent(detectorIds={self.detector_ids}, tofIndices={self.tof_indices}, energyIndices={self.energy_indices})" + + def __repr__(self) -> str: + return f"TripleEvent(detectorIds={repr(self.detector_ids)}, tofIndices={repr(self.tof_indices)}, energyIndices={repr(self.energy_indices)})" + + +class EventTimeBlock: + start: yardl.UInt32 + """start time since ExamInformation.startOfAcquisition in ms + Note: duration is given by ScannerInformation.eventTimeBlockDuration + """ + + prompt_events: list[CoincidenceEvent] + """TODO encode end time? + list of prompts in this time block + TODO might be better to use !array + """ + + delayed_events: typing.Optional[list[CoincidenceEvent]] + """optional list of delayed coincidences in this time block""" + + triple_events: typing.Optional[list[TripleEvent]] + """optional list of triple coincidences in this time block""" + + + def __init__(self, *, + start: yardl.UInt32 = 0, + prompt_events: typing.Optional[list[CoincidenceEvent]] = None, + delayed_events: typing.Optional[list[CoincidenceEvent]] = None, + triple_events: typing.Optional[list[TripleEvent]] = None, + ): + self.start = start + self.prompt_events = prompt_events if prompt_events is not None else [] + self.delayed_events = delayed_events + self.triple_events = triple_events + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, EventTimeBlock) + and self.start == other.start + and self.prompt_events == other.prompt_events + and self.delayed_events == other.delayed_events + and self.triple_events == other.triple_events + ) + + def __str__(self) -> str: + return f"EventTimeBlock(start={self.start}, promptEvents={self.prompt_events}, delayedEvents={self.delayed_events}, tripleEvents={self.triple_events})" + + def __repr__(self) -> str: + return f"EventTimeBlock(start={repr(self.start)}, promptEvents={repr(self.prompt_events)}, delayedEvents={repr(self.delayed_events)}, tripleEvents={repr(self.triple_events)})" + + +class ExternalSignalTimeBlock: + start: yardl.UInt32 + """start time since ExamInformation.startOfAcquisition in ms""" + + signal_id: yardl.UInt32 + """refer to ExternalSignalType.id""" + + signal_values: list[yardl.Float32] + """Note for triggers, this field is to be ignored""" + + + def __init__(self, *, + start: yardl.UInt32 = 0, + signal_id: yardl.UInt32 = 0, + signal_values: typing.Optional[list[yardl.Float32]] = None, + ): + self.start = start + self.signal_id = signal_id + self.signal_values = signal_values if signal_values is not None else [] + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ExternalSignalTimeBlock) + and self.start == other.start + and self.signal_id == other.signal_id + and self.signal_values == other.signal_values + ) + + def __str__(self) -> str: + return f"ExternalSignalTimeBlock(start={self.start}, signalID={self.signal_id}, signalValues={self.signal_values})" + + def __repr__(self) -> str: + return f"ExternalSignalTimeBlock(start={repr(self.start)}, signalID={repr(self.signal_id)}, signalValues={repr(self.signal_values)})" + + +class BedMovementTimeBlock: + start: yardl.UInt32 + """start time since ExamInformation.startOfAcquisition in ms""" + + transform: RigidTransformation + + def __init__(self, *, + start: yardl.UInt32 = 0, + transform: typing.Optional[RigidTransformation] = None, + ): + self.start = start + self.transform = transform if transform is not None else RigidTransformation() + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, BedMovementTimeBlock) + and self.start == other.start + and self.transform == other.transform + ) + + def __str__(self) -> str: + return f"BedMovementTimeBlock(start={self.start}, transform={self.transform})" + + def __repr__(self) -> str: + return f"BedMovementTimeBlock(start={repr(self.start)}, transform={repr(self.transform)})" + + +class GantryMovementTimeBlock: + start: yardl.UInt32 + """start time since ExamInformation.startOfAcquisition in ms""" + + transform: RigidTransformation + + def __init__(self, *, + start: yardl.UInt32 = 0, + transform: typing.Optional[RigidTransformation] = None, + ): + self.start = start + self.transform = transform if transform is not None else RigidTransformation() + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, GantryMovementTimeBlock) + and self.start == other.start + and self.transform == other.transform + ) + + def __str__(self) -> str: + return f"GantryMovementTimeBlock(start={self.start}, transform={self.transform})" + + def __repr__(self) -> str: + return f"GantryMovementTimeBlock(start={repr(self.start)}, transform={repr(self.transform)})" + + +class TimeBlock: + EventTimeBlock: typing.ClassVar[type["TimeBlockUnionCase[EventTimeBlock]"]] + ExternalSignalTimeBlock: typing.ClassVar[type["TimeBlockUnionCase[ExternalSignalTimeBlock]"]] + BedMovementTimeBlock: typing.ClassVar[type["TimeBlockUnionCase[BedMovementTimeBlock]"]] + GantryMovementTimeBlock: typing.ClassVar[type["TimeBlockUnionCase[GantryMovementTimeBlock]"]] + +class TimeBlockUnionCase(TimeBlock, yardl.UnionCase[_T]): + pass + +TimeBlock.EventTimeBlock = type("TimeBlock.EventTimeBlock", (TimeBlockUnionCase,), {"index": 0, "tag": "EventTimeBlock"}) +TimeBlock.ExternalSignalTimeBlock = type("TimeBlock.ExternalSignalTimeBlock", (TimeBlockUnionCase,), {"index": 1, "tag": "ExternalSignalTimeBlock"}) +TimeBlock.BedMovementTimeBlock = type("TimeBlock.BedMovementTimeBlock", (TimeBlockUnionCase,), {"index": 2, "tag": "BedMovementTimeBlock"}) +TimeBlock.GantryMovementTimeBlock = type("TimeBlock.GantryMovementTimeBlock", (TimeBlockUnionCase,), {"index": 3, "tag": "GantryMovementTimeBlock"}) +del TimeBlockUnionCase + +class ExternalSignalTypeEnum(yardl.OutOfRangeEnum): + ECG_TRACE = 0 + ECG_TRIGGER = 1 + RESP_TRACE = 2 + RESP_TRIGGER = 3 + OTHER_MOTION_SIGNAL = 4 + OTHER_MOTION_TRIGGER = 5 + EXTERNAL_SYNC = 6 + OTHER = 7 + """other options, to be listed in the future""" + + +class ExternalSignalType: + type: ExternalSignalTypeEnum + description: str + id: yardl.UInt32 + + def __init__(self, *, + type: ExternalSignalTypeEnum = ExternalSignalTypeEnum.ECG_TRACE, + description: str = "", + id: yardl.UInt32 = 0, + ): + self.type = type + self.description = description + self.id = id + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, ExternalSignalType) + and self.type == other.type + and self.description == other.description + and self.id == other.id + ) + + def __str__(self) -> str: + return f"ExternalSignalType(type={self.type}, description={self.description}, id={self.id})" + + def __repr__(self) -> str: + return f"ExternalSignalType(type={repr(self.type)}, description={repr(self.description)}, id={repr(self.id)})" + + +class TimeInterval: + """Time interval in milliseconds since start of acquisition""" + + start: yardl.UInt32 + stop: yardl.UInt32 + + def __init__(self, *, + start: yardl.UInt32 = 0, + stop: yardl.UInt32 = 0, + ): + self.start = start + self.stop = stop + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, TimeInterval) + and self.start == other.start + and self.stop == other.stop + ) + + def __str__(self) -> str: + return f"TimeInterval(start={self.start}, stop={self.stop})" + + def __repr__(self) -> str: + return f"TimeInterval(start={repr(self.start)}, stop={repr(self.stop)})" + + +class TimeFrameInformation: + """A sequence of time intervals (could be consecutive)""" + + time_frames: list[TimeInterval] + + def __init__(self, *, + time_frames: typing.Optional[list[TimeInterval]] = None, + ): + self.time_frames = time_frames if time_frames is not None else [] + + def number_of_time_frames(self) -> yardl.Size: + return len(self.time_frames) + + def __eq__(self, other: object) -> bool: + return ( + isinstance(other, TimeFrameInformation) + and self.time_frames == other.time_frames + ) + + def __str__(self) -> str: + return f"TimeFrameInformation(timeFrames={self.time_frames})" + + def __repr__(self) -> str: + return f"TimeFrameInformation(timeFrames={repr(self.time_frames)})" + + +def _mk_get_dtype(): + dtype_map: dict[typing.Union[type, types.GenericAlias], typing.Union[np.dtype[typing.Any], typing.Callable[[tuple[type, ...]], np.dtype[typing.Any]]]] = {} + get_dtype = _dtypes.make_get_dtype_func(dtype_map) + + dtype_map.setdefault(CoincidenceEvent, np.dtype([('detector_ids', np.dtype(np.uint32), (2,)), ('tof_idx', np.dtype(np.uint32)), ('energy_indices', np.dtype(np.uint32), (2,))], align=True)) + dtype_map.setdefault(SolidVolume, lambda type_args: np.dtype([('shape', get_dtype(type_args[0])), ('material_id', np.dtype(np.uint32))], align=True)) + dtype_map.setdefault(Coordinate, np.dtype([('c', np.dtype(np.float32), (3,))], align=True)) + dtype_map.setdefault(BoxShape, np.dtype([('corners', get_dtype(Coordinate), (8,))], align=True)) + dtype_map.setdefault(BoxSolidVolume, get_dtype(types.GenericAlias(SolidVolume, (BoxShape,)))) + dtype_map.setdefault(AnnulusShape, np.dtype([('inner_radius', np.dtype(np.float32)), ('outer_radius', np.dtype(np.float32)), ('thickness', np.dtype(np.float32)), ('angular_range', np.dtype(np.float32), (2,))], align=True)) + dtype_map.setdefault(GeometricShape, np.dtype(np.object_)) + dtype_map.setdefault(GenericSolidVolume, get_dtype(types.GenericAlias(SolidVolume, (GeometricShape,)))) + dtype_map.setdefault(RigidTransformation, np.dtype([('matrix', np.dtype(np.float32), (3, 4,))], align=True)) + dtype_map.setdefault(ReplicatedObject, lambda type_args: np.dtype([('object', get_dtype(type_args[0])), ('transforms', np.dtype(np.object_)), ('ids', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(ReplicatedBoxSolidVolume, get_dtype(types.GenericAlias(ReplicatedObject, (BoxSolidVolume,)))) + dtype_map.setdefault(ReplicatedGenericSolidVolume, get_dtype(types.GenericAlias(ReplicatedObject, (GenericSolidVolume,)))) + dtype_map.setdefault(DetectorModule, np.dtype([('detecting_elements', np.dtype(np.object_)), ('detecting_element_ids', np.dtype(np.object_)), ('non_detecting_elements', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(ReplicatedDetectorModule, get_dtype(types.GenericAlias(ReplicatedObject, (DetectorModule,)))) + dtype_map.setdefault(ScannerGeometry, np.dtype([('replicated_modules', np.dtype(np.object_)), ('ids', np.dtype(np.object_)), ('non_detecting_volumes', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.object_))], align=True))], align=True)) + dtype_map.setdefault(Subject, np.dtype([('name', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.object_))], align=True)), ('id', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(Institution, np.dtype([('name', np.dtype(np.object_)), ('address', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(ExamInformation, np.dtype([('subject', get_dtype(Subject)), ('institution', get_dtype(Institution)), ('protocol', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.object_))], align=True)), ('start_of_acquisition', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.datetime64))], align=True))], align=True)) + dtype_map.setdefault(Direction, np.dtype([('c', np.dtype(np.float32), (3,))], align=True)) + dtype_map.setdefault(DirectionMatrix, np.dtype([('matrix', np.dtype(np.float32), (3, 3,))], align=True)) + dtype_map.setdefault(Atom, np.dtype([('mass_number', np.dtype(np.uint32)), ('atomic_number', np.dtype(np.uint32))], align=True)) + dtype_map.setdefault(BulkMaterial, np.dtype([('id', np.dtype(np.uint32)), ('name', np.dtype(np.object_)), ('density', np.dtype(np.float32)), ('atoms', np.dtype(np.object_)), ('mass_fractions', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(CoincidencePolicy, np.dtype(np.int32)) + dtype_map.setdefault(ScannerInformation, np.dtype([('model_name', np.dtype(np.object_)), ('scanner_geometry', get_dtype(ScannerGeometry)), ('bulk_materials', np.dtype(np.object_)), ('gantry_alignment', np.dtype([('has_value', np.dtype(np.bool_)), ('value', get_dtype(RigidTransformation))], align=True)), ('tof_bin_edges', np.dtype(np.object_)), ('tof_resolution', np.dtype(np.float32)), ('energy_bin_edges', np.dtype(np.object_)), ('energy_resolution_at_511', np.dtype(np.float32)), ('event_time_block_duration', np.dtype(np.uint32)), ('coincidence_policy', get_dtype(CoincidencePolicy))], align=True)) + dtype_map.setdefault(Header, np.dtype([('scanner', get_dtype(ScannerInformation)), ('exam', np.dtype([('has_value', np.dtype(np.bool_)), ('value', get_dtype(ExamInformation))], align=True))], align=True)) + dtype_map.setdefault(TripleEvent, np.dtype([('detector_ids', np.dtype(np.uint32), (3,)), ('tof_indices', np.dtype(np.uint32), (2,)), ('energy_indices', np.dtype(np.uint32), (3,))], align=True)) + dtype_map.setdefault(EventTimeBlock, np.dtype([('start', np.dtype(np.uint32)), ('prompt_events', np.dtype(np.object_)), ('delayed_events', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.object_))], align=True)), ('triple_events', np.dtype([('has_value', np.dtype(np.bool_)), ('value', np.dtype(np.object_))], align=True))], align=True)) + dtype_map.setdefault(ExternalSignalTimeBlock, np.dtype([('start', np.dtype(np.uint32)), ('signal_id', np.dtype(np.uint32)), ('signal_values', np.dtype(np.object_))], align=True)) + dtype_map.setdefault(BedMovementTimeBlock, np.dtype([('start', np.dtype(np.uint32)), ('transform', get_dtype(RigidTransformation))], align=True)) + dtype_map.setdefault(GantryMovementTimeBlock, np.dtype([('start', np.dtype(np.uint32)), ('transform', get_dtype(RigidTransformation))], align=True)) + dtype_map.setdefault(TimeBlock, np.dtype(np.object_)) + dtype_map.setdefault(ExternalSignalTypeEnum, np.dtype(np.int32)) + dtype_map.setdefault(ExternalSignalType, np.dtype([('type', get_dtype(ExternalSignalTypeEnum)), ('description', np.dtype(np.object_)), ('id', np.dtype(np.uint32))], align=True)) + dtype_map.setdefault(TimeInterval, np.dtype([('start', np.dtype(np.uint32)), ('stop', np.dtype(np.uint32))], align=True)) + dtype_map.setdefault(TimeFrameInformation, np.dtype([('time_frames', np.dtype(np.object_))], align=True)) + + return get_dtype + +get_dtype = _mk_get_dtype() + diff --git a/python/petsird/yardl_types.py b/python/petsird/yardl_types.py new file mode 100644 index 0000000..40693d2 --- /dev/null +++ b/python/petsird/yardl_types.py @@ -0,0 +1,303 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# pyright: reportUnknownArgumentType=false +# pyright: reportUnknownVariableType=false + +from abc import ABC +from enum import Enum +from typing import Annotated, Generic, TypeVar, Union +import numpy as np +import datetime +import time + + +class ProtocolError(Exception): + """Raised when the contract of a protocol is not respected.""" + + pass + + +class OutOfRangeEnum(Enum): + """Enum that allows values outside of the its defined values.""" + + @classmethod + def _missing_(cls, value: object): + if not isinstance(value, int): + return None + + obj = object.__new__(cls) + obj._value_ = value + obj._name_ = "" + return obj + + def __eq__(self, other: object): + return isinstance(other, self.__class__) and self.value == other.value + + def __hash__(self) -> int: + return hash(self.value) + + def __str__(self) -> str: + if self._name_ != "": + return super().__str__() + + return f"{self.__class__.__name__}({self.value})" + + def __repr__(self) -> str: + if self._name_ != "": + return super().__repr__() + + return f"<{self.__class__.__name__}: {self.value}>" + + +class DateTime: + """A basic datetime with nanosecond precision, always in UTC.""" + + def __init__(self, nanoseconds_from_epoch: Union[int, np.datetime64] = 0): + if isinstance(nanoseconds_from_epoch, np.datetime64): + if nanoseconds_from_epoch.dtype != "datetime64[ns]": + self._value = np.datetime64(nanoseconds_from_epoch, "ns") + else: + self._value = nanoseconds_from_epoch + else: + self._value = np.datetime64(nanoseconds_from_epoch, "ns") + + @property + def numpy_value(self) -> np.datetime64: + return self._value + + def to_datetime(self) -> datetime.datetime: + return datetime.datetime.utcfromtimestamp(self._value.astype(int) / 1e9) + + @staticmethod + def from_components( + year: int, + month: int, + day: int, + hour: int = 0, + minute: int = 0, + second: int = 0, + nanosecond: int = 0, + ) -> "DateTime": + if not 0 <= nanosecond <= 999_999_999: + raise ValueError("nanosecond must be in 0..999_999_999", nanosecond) + + return DateTime( + int(datetime.datetime(year, month, day, hour, minute, second).timestamp()) + * 1_000_000_000 + + nanosecond + ) + + @staticmethod + def from_datetime(dt: datetime.datetime) -> "DateTime": + return DateTime(round(dt.timestamp() * 1e6) * 1000) + + @staticmethod + def parse(s: str) -> "DateTime": + return DateTime(np.datetime64(s, "ns")) + + @staticmethod + def now() -> "DateTime": + return DateTime(time.time_ns()) + + def __str__(self) -> str: + return str(self._value) + + def __repr__(self) -> str: + return f"DateTime({self})" + + def __eq__(self, other: object) -> bool: + return ( + self._value == other._value + if isinstance(other, DateTime) + else (isinstance(other, np.datetime64) and self._value == other) + ) + + def __hash__(self) -> int: + return hash(self._value) + + +class Time: + """A basic time of day with nanosecond precision. It is not timezone-aware and is meant + to represent a wall clock time. + """ + + _NANOSECONDS_PER_DAY = 24 * 60 * 60 * 1_000_000_000 + + def __init__(self, nanoseconds_since_midnight: Union[int, np.timedelta64] = 0): + if isinstance(nanoseconds_since_midnight, np.timedelta64): + if nanoseconds_since_midnight.dtype != "timedelta64[ns]": + self._value = np.timedelta64(nanoseconds_since_midnight, "ns") + nanoseconds_since_midnight = nanoseconds_since_midnight.astype(int) + else: + self._value = nanoseconds_since_midnight + else: + self._value = np.timedelta64(nanoseconds_since_midnight, "ns") + + if ( + nanoseconds_since_midnight < 0 + or nanoseconds_since_midnight >= Time._NANOSECONDS_PER_DAY + ): + raise ValueError( + "TimeOfDay must be between 00:00:00 and 23:59:59.999999999" + ) + + @property + def numpy_value(self) -> np.timedelta64: + return self._value + + @staticmethod + def from_components( + hour: int, minute: int, second: int = 0, nanosecond: int = 0 + ) -> "Time": + if not 0 <= hour <= 23: + raise ValueError("hour must be in 0..23", hour) + if not 0 <= minute <= 59: + raise ValueError("minute must be in 0..59", minute) + if not 0 <= second <= 59: + raise ValueError("second must be in 0..59", second) + if not 0 <= nanosecond <= 999_999_999: + raise ValueError("nanosecond must be in 0..999_999_999", nanosecond) + + return Time( + hour * 3_600_000_000_000 + + minute * 60_000_000_000 + + second * 1_000_000_000 + + nanosecond + ) + + @staticmethod + def from_time(t: datetime.time) -> "Time": + return Time( + t.hour * 3_600_000_000_000 + + t.minute * 60_000_000_000 + + t.second * 1_000_000_000 + + t.microsecond * 1_000 + ) + + @staticmethod + def parse(s: str) -> "Time": + components = s.split(":") + if len(components) == 2: + hour = int(components[0]) + minute = int(components[1]) + return Time(hour * 3_600_000_000_000 + minute * 60_000_000_000) + if len(components) == 3: + hour = int(components[0]) + minute = int(components[1]) + second_components = components[2].split(".") + if len(second_components) <= 2: + second = int(second_components[0]) + if len(second_components) == 2: + fraction = int(second_components[1].ljust(9, "0")[:9]) + else: + fraction = 0 + return Time( + hour * 3_600_000_000_000 + + minute * 60_000_000_000 + + second * 1_000_000_000 + + fraction + ) + + raise ValueError("TimeOfDay must be in the format HH:MM:SS[.fffffffff]") + + def __str__(self) -> str: + nanoseconds_since_midnight = self._value.astype(int) + hours, r = divmod(nanoseconds_since_midnight, 3_600_000_000_000) + minutes, r = divmod(r, 60_000_000_000) + seconds, nanoseconds = divmod(r, 1_000_000_000) + if nanoseconds == 0: + return f"{hours:02}:{minutes:02}:{seconds:02}" + + return f"{hours:02}:{minutes:02}:{seconds:02}.{str(nanoseconds).rjust(9, '0').rstrip('0')}" + + def __repr__(self) -> str: + return f"Time({self})" + + def __eq__(self, other: object) -> bool: + return ( + self._value == other._value + if isinstance(other, Time) + else (isinstance(other, np.timedelta64) and self._value == other) + ) + + +Int8 = Annotated[int, "Int8"] +UInt8 = Annotated[int, "UInt8"] +Int16 = Annotated[int, "Int16"] +UInt16 = Annotated[int, "UInt16"] +Int32 = Annotated[int, "Int32"] +UInt32 = Annotated[int, "UInt32"] +Int64 = Annotated[int, "Int64"] +UInt64 = Annotated[int, "UInt64"] +Size = Annotated[int, "Size"] +Float32 = Annotated[float, "Float32"] +Float64 = Annotated[float, "Float64"] +ComplexFloat = Annotated[complex, "ComplexFloat"] +ComplexDouble = Annotated[complex, "ComplexDouble"] + + +def structural_equal(a: object, b: object) -> bool: + if a is None: + return b is None + + if isinstance(a, list): + if not isinstance(b, list): + if isinstance(b, np.ndarray): + return b.shape == (len(a),) and all( + structural_equal(x, y) for x, y in zip(a, b) + ) + return False + return len(a) == len(b) and all(structural_equal(x, y) for x, y in zip(a, b)) + + if isinstance(a, np.ndarray): + if not isinstance(b, np.ndarray): + if isinstance(b, list): + return a.shape == (len(b),) and all( + structural_equal(x, y) for x, y in zip(a, b) + ) + return False + if a.dtype.hasobject: # pyright: ignore [reportUnknownMemberType] + return ( + a.dtype == b.dtype # pyright: ignore [reportUnknownMemberType] + and a.shape == b.shape + and all(structural_equal(x, y) for x, y in zip(a, b)) + ) + return np.array_equal(a, b) + + if isinstance(a, np.void): + if not isinstance(b, np.void): + return b == a + return a.dtype == b.dtype and all( + structural_equal(x, y) + for x, y in zip(a, b) # pyright: ignore [reportGeneralTypeIssues] + ) + + if isinstance(b, np.void): + return a == b + + return a == b + + +_T = TypeVar("_T") + + +class UnionCase(ABC, Generic[_T]): + index: int + tag: str + + def __init__(self, value: _T) -> None: + self.value = value + + def __str__(self) -> str: + return str(self.value) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.value})" + + def __eq__(self, other: object) -> bool: + # Note we could codegen a more efficient version of this that does not + # (always) call structural_equal + return type(self) == type(other) and structural_equal( + self.value, other.value # pyright: ignore + )