22template <
typename T,
typename =
void>
24 inline static DataSet
dump(File& file,
25 const std::string& path,
27 const DumpOptions& options) {
28 DataSet dataset = initScalarDataset(file, path, data, options);
30 if (options.flush()) {
36 inline static T
load(
const File& file,
const std::string& path) {
37 DataSet dataset = file.getDataSet(path);
44 const std::string& path,
45 const std::string& key,
47 const DumpOptions& options) {
48 Attribute attribute = initScalarAttribute(file, path, key, data, options);
49 attribute.write(data);
50 if (options.flush()) {
57 const std::string& path,
58 const std::string& key) {
59 DataSet dataset = file.getDataSet(path);
60 Attribute attribute = dataset.getAttribute(key);
66 inline static DataSet dump_extend(File& file,
67 const std::string& path,
69 const std::vector<size_t>& idx,
70 const DumpOptions& options) {
71 std::vector<size_t> ones(idx.size(), 1);
73 if (file.exist(path)) {
74 DataSet dataset = file.getDataSet(path);
75 std::vector<size_t> dims = dataset.getDimensions();
76 std::vector<size_t> shape = dims;
77 if (dims.size() != idx.size()) {
81 "H5Easy::dump: Dimension of the index and the existing field do not match");
83 for (
size_t i = 0; i < dims.size(); ++i) {
84 shape[i] = std::max(dims[i], idx[i] + 1);
87 dataset.resize(shape);
89 dataset.select(idx, ones).write(data);
90 if (options.flush()) {
96 std::vector<size_t> shape = idx;
98 std::vector<size_t> unlim_shape(idx.size(), unlim);
99 std::vector<hsize_t> chunks(idx.size(), 10);
100 if (options.isChunked()) {
101 chunks = options.getChunkSize();
102 if (chunks.size() != idx.size()) {
103 throw error(file, path,
"H5Easy::dump: Incorrect dimension ChunkSize");
106 for (
size_t& i: shape) {
111 props.add(Chunking(chunks));
112 DataSet dataset = file.createDataSet(path, dataspace, AtomicType<T>(), props, {},
true);
113 dataset.select(idx, ones).write(data);
114 if (options.flush()) {
120 inline static T load_part(
const File& file,
121 const std::string& path,
122 const std::vector<size_t>& idx) {
123 std::vector<size_t> ones(idx.size(), 1);
124 DataSet dataset = file.getDataSet(path);
126 dataset.select(idx, ones).read(data);
static const size_t UNLIMITED
Magic value to specify that a DataSpace can grow without limit.
Definition H5DataSpace.hpp:49
PropertyList< PropertyType::DATASET_CREATE > DataSetCreateProps
Definition H5PropertyList.hpp:201
Read/dump DataSets or Attribute using a minimalistic syntax. To this end, the functions are templated...
Definition H5Easy.hpp:59
DataSet dump(File &file, const std::string &path, const T &data, DumpMode mode=DumpMode::Create)
Write object (templated) to a (new) DataSet in an open HDF5 file.
Definition H5Easy_public.hpp:99
T loadAttribute(const File &file, const std::string &path, const std::string &key)
Load a Attribute in an open HDF5 file to an object (templated).
Definition H5Easy_public.hpp:166
Attribute dumpAttribute(File &file, const std::string &path, const std::string &key, const T &data, DumpMode mode=DumpMode::Create)
Write object (templated) to a (new) Attribute in an open HDF5 file.
Definition H5Easy_public.hpp:148
T load(const File &file, const std::string &path, const std::vector< size_t > &idx)
Load entry {i, j, ...} from a DataSet in an open HDF5 file to a scalar.
Definition H5Easy_public.hpp:138