FERS 1.0.0
The Flexible Extensible Radar Simulator
Loading...
Searching...
No Matches
hdf5_handler.cpp
Go to the documentation of this file.
1// SPDX-License-Identifier: GPL-2.0-only
2//
3// Copyright (c) 2006-2008 Marc Brooker and Michael Inggs
4// Copyright (c) 2008-present FERS Contributors (see AUTHORS.md).
5//
6// See the GNU GPLv2 LICENSE file in the FERS project root for more information.
7
8/**
9 * @file hdf5_handler.cpp
10 * @brief Source file for HDF5 data export and import functions.
11 */
12
13#include "hdf5_handler.h"
14
15#include <algorithm>
16#include <complex>
17#include <filesystem>
18#include <format>
19#include <highfive/highfive.hpp>
20#include <stdexcept>
21
22#include "core/logging.h"
23#include "core/parameters.h"
24
25using logging::Level;
26
27namespace serial
28{
30
31 void writeOutputFileMetadataAttributes(HighFive::File& file, const core::OutputFileMetadata& metadata)
32 {
33 file.createAttribute("fers_metadata_schema_version", 1U);
34 file.createAttribute("fers_metadata_json", core::outputFileMetadataToJsonString(metadata));
35 file.createAttribute("receiver_id", static_cast<unsigned long long>(metadata.receiver_id));
36 file.createAttribute("receiver_name", metadata.receiver_name);
37 file.createAttribute("data_mode", metadata.mode);
38 file.createAttribute("total_samples", static_cast<unsigned long long>(metadata.total_samples));
39 file.createAttribute("sample_start", static_cast<unsigned long long>(metadata.sample_start));
40 file.createAttribute("sample_end_exclusive", static_cast<unsigned long long>(metadata.sample_end_exclusive));
41 }
42
43 void readPulseData(const std::string& name, std::vector<ComplexType>& data)
44 {
45 std::scoped_lock lock(hdf5_global_mutex);
46
47 if (!std::filesystem::exists(name))
48 {
49 LOG(Level::FATAL, "File '{}' not found", name);
50 throw std::runtime_error("File " + name + " not found.");
51 }
52
53 LOG(Level::TRACE, "Opening file '{}'", name);
54 const HighFive::File file(name, HighFive::File::ReadOnly);
55
56 // Helper lambda to open group and read dataset
57 auto read_dataset = [&file](const std::string& groupName, std::vector<double>& buffer) -> size_t
58 {
59 const auto group = file.getGroup("/" + groupName);
60
61 const auto dataset = group.getDataSet("value");
62
63 const auto dimensions = dataset.getSpace().getDimensions();
64 const auto size = dimensions[0];
65
66 buffer.resize(size);
67 dataset.read(buffer);
68
69 return size;
70 };
71
72 LOG(Level::TRACE, "Reading dataset 'I' from file '{}'", name);
73 std::vector<double> buffer_i;
74 const auto size = read_dataset("I", buffer_i);
75
76 std::vector<double> buffer_q;
77 LOG(Level::TRACE, "Reading dataset 'Q' from file '{}'", name);
78 if (read_dataset("Q", buffer_q) != size)
79 {
80 LOG(Level::FATAL, "Dataset 'Q' is not the same size as dataset 'I' in file '{}'", name);
81 throw std::runtime_error(R"(Dataset "Q" is not the same size as dataset "I" in file )" + name);
82 }
83
84 data.resize(size);
85 for (size_t i = 0; i < size; ++i)
86 {
87 data[i] = ComplexType(buffer_i[i], buffer_q[i]);
88 }
89 LOG(Level::TRACE, "Read dataset successfully");
90 }
91
92 void addChunkToFile(HighFive::File& file, const std::vector<ComplexType>& data, const RealType time,
93 const RealType fullscale, const unsigned count, const core::PulseChunkMetadata* metadata)
94 {
95 std::scoped_lock lock(hdf5_global_mutex);
96
97 const std::size_t size = data.size();
98
99 const std::string base_chunk_name = "chunk_" + std::format("{:06}", count);
100 const std::string i_chunk_name = base_chunk_name + "_I";
101 const std::string q_chunk_name = base_chunk_name + "_Q";
102
103 std::vector<RealType> i(size), q(size);
104 std::ranges::transform(data, i.begin(), [](const ComplexType& c) { return c.real(); });
105 std::ranges::transform(data, q.begin(), [](const ComplexType& c) { return c.imag(); });
106
107 auto write_chunk = [&](const std::string& chunkName, const std::vector<RealType>& chunkData)
108 {
109 try
110 {
111 HighFive::DataSet dataset =
112 file.createDataSet<RealType>(chunkName, HighFive::DataSpace::From(chunkData));
113 dataset.write(chunkData);
114 }
115 catch (const HighFive::Exception& err)
116 {
117 LOG(Level::FATAL, "Error while writing data to HDF5 file: {}", err.what());
118 throw std::runtime_error("Error while writing data to HDF5 file: " + chunkName + " - " + err.what());
119 }
120 };
121
122 auto set_chunk_attributes = [&](const std::string& chunkName)
123 {
124 try
125 {
126 HighFive::DataSet dataset = file.getDataSet(chunkName);
127 dataset.createAttribute("time", time);
128 dataset.createAttribute("rate", params::rate());
129 dataset.createAttribute("fullscale", fullscale);
130 if (metadata != nullptr)
131 {
132 dataset.createAttribute("chunk_index", metadata->chunk_index);
133 dataset.createAttribute("sample_count", static_cast<unsigned long long>(metadata->sample_count));
134 dataset.createAttribute("sample_start", static_cast<unsigned long long>(metadata->sample_start));
135 dataset.createAttribute("sample_end_exclusive",
136 static_cast<unsigned long long>(metadata->sample_end_exclusive));
137 }
138 }
139 catch (const HighFive::Exception& err)
140 {
141 LOG(Level::FATAL, "Error while setting attributes on chunk: {}", err.what());
142 throw std::runtime_error("Error while setting attributes on chunk: " + chunkName + " - " + err.what());
143 }
144 };
145
146 write_chunk(i_chunk_name, i);
147 write_chunk(q_chunk_name, q);
148
149 set_chunk_attributes(i_chunk_name);
150 set_chunk_attributes(q_chunk_name);
151 }
152
153 std::vector<std::vector<RealType>> readPattern(const std::string& name, const std::string& datasetName)
154 {
155 std::scoped_lock lock(hdf5_global_mutex);
156 try
157 {
158 LOG(Level::TRACE, "Reading dataset '{}' from file '{}'", datasetName, name);
159 const HighFive::File file(name, HighFive::File::ReadOnly);
160
161 const auto dataset = file.getDataSet(datasetName);
162
163 const auto dataspace = dataset.getSpace();
164 const auto dims = dataspace.getDimensions();
165
166 if (dims.size() != 2)
167 {
168 LOG(Level::FATAL, "Invalid dataset dimensions for '{}' in file '{}'", datasetName, name);
169 throw std::runtime_error(
170 std::format(R"(Invalid dataset dimensions for "{}" in file "{}")", datasetName, name));
171 }
172
173 LOG(Level::TRACE, "Reading dataset with dimensions {}x{}", dims[0], dims[1]);
174
175 std::vector data(dims[0], std::vector<RealType>(dims[1]));
176 dataset.read(data);
177
178 LOG(Level::TRACE, "Read dataset successfully");
179
180 return data;
181 }
182 catch (const HighFive::Exception& err)
183 {
184 LOG(Level::FATAL, "Error handling HDF5 file: {}", err.what());
185 throw std::runtime_error("Error handling HDF5 file: " + std::string(err.what()));
186 }
187 }
188}
double RealType
Type for real numbers.
Definition config.h:27
std::complex< RealType > ComplexType
Type for complex numbers.
Definition config.h:35
Header file for HDF5 data export and import functions.
Header file for the logging system.
#define LOG(level,...)
Definition logging.h:19
std::string outputFileMetadataToJsonString(const OutputFileMetadata &metadata)
RealType rate() noexcept
Get the rendering sample rate.
Definition parameters.h:121
std::mutex hdf5_global_mutex
Global mutex to protect all HDF5 C-library calls, which are not thread-safe.
void writeOutputFileMetadataAttributes(HighFive::File &file, const core::OutputFileMetadata &metadata)
Writes additive FERS output metadata attributes to an open HDF5 file.
void addChunkToFile(HighFive::File &file, const std::vector< ComplexType > &data, const RealType time, const RealType fullscale, const unsigned count, const core::PulseChunkMetadata *metadata)
Adds a chunk of data to an HDF5 file.
void readPulseData(const std::string &name, std::vector< ComplexType > &data)
Reads pulse data from an HDF5 file.
std::vector< std::vector< RealType > > readPattern(const std::string &name, const std::string &datasetName)
Reads a 2D pattern dataset from an HDF5 file.
Defines the Parameters struct and provides methods for managing simulation parameters.
std::uint64_t sample_end_exclusive
std::uint64_t sample_end_exclusive