Skip to content
Snippets Groups Projects
Commit 9370903a authored by Stéphane Del Pino's avatar Stéphane Del Pino
Browse files

git subrepo clone --force --branch=main git@github.com:highfive-devs/HighFive.git packages/HighFive

subrepo:
  subdir:   "packages/HighFive"
  merged:   "0e308c244"
upstream:
  origin:   "git@github.com:highfive-devs/HighFive.git"
  branch:   "main"
  commit:   "0e308c244"
git-subrepo:
  version:  "0.4.9"
  origin:   "git@github.com:ingydotnet/git-subrepo.git"
  commit:   "30db3b8"
parent 22327aeb
Branches
No related tags found
1 merge request!203git subrepo pull packages/HighFive
This commit is part of merge request !203. Comments created here will be created in the context of that merge request.
...@@ -7,9 +7,9 @@ jobs: ...@@ -7,9 +7,9 @@ jobs:
if: github.event.pull_request.merged == true if: github.event.pull_request.merged == true
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Trigger integration tests on BlueBrain/HighFive-testing - name: Trigger integration tests on highfive-devs/bbp-integration
run: | run: |
curl -X POST https://api.github.com/repos/BlueBrain/HighFive-testing/dispatches \ curl -X POST https://api.github.com/repos/highfive-devs/bbp-integration/dispatches \
-H 'Accept: application/vnd.github.everest-preview+json' \ -H 'Accept: application/vnd.github.everest-preview+json' \
-u ${{ secrets.ACCESS_TOKEN }} \ -u ${{ secrets.ACCESS_TOKEN }} \
--data '{"event_type": "merge", "client_payload": { "repository": "'"$GITHUB_REPOSITORY"'" }}' --data '{"event_type": "merge", "client_payload": { "repository": "'"$GITHUB_REPOSITORY"'" }}'
...@@ -4,9 +4,9 @@ ...@@ -4,9 +4,9 @@
; git-subrepo command. See https://github.com/ingydotnet/git-subrepo#readme ; git-subrepo command. See https://github.com/ingydotnet/git-subrepo#readme
; ;
[subrepo] [subrepo]
remote = git@github.com:BlueBrain/HighFive.git remote = git@github.com:highfive-devs/HighFive.git
branch = master branch = main
commit = dfc06537fdb20f5e572c20bc6febd22dd5d08246 commit = 0e308c244ef32d847a2f11c38878f419a8df8543
parent = fa321c14d4206f8183fc9029ee9bd0d216ce700b parent = 22327aeb7598d4ba70991c7e466d73a2a11a291d
method = merge method = merge
cmdver = 0.4.9 cmdver = 0.4.9
...@@ -4,11 +4,6 @@ cmake_policy(VERSION 3.13) ...@@ -4,11 +4,6 @@ cmake_policy(VERSION 3.13)
project(HighFive VERSION 3.0.0) project(HighFive VERSION 3.0.0)
set(HIGHFIVE_VERSION_PRERELEASE 2) set(HIGHFIVE_VERSION_PRERELEASE 2)
message(WARNING "=================================================================\n"
"HighFive development moved to:\n"
" github.com/highfive-devs/highfive\n"
"=================================================================")
# Configure HighFive # Configure HighFive
# ------------------ # ------------------
option(HIGHFIVE_VERBOSE "Set logging level to verbose." OFF) option(HIGHFIVE_VERBOSE "Set logging level to verbose." OFF)
......
> [!WARNING] > [!NOTE]
> The Blue Brain Project concluded in December 2024, so the HighFive development is ceased under the BlueBrain GitHub organization. > HighFive was orignally developed and maintained at
> > https://github.com/BlueBrain/HighFive. To continue maintenance of HighFive as
> The development of HighFive will continue at: > an independent open-source code without support from BBP or EPFL, some (one)
> https://github.com/highfive-devs/highfive > of the developers decided to create this repository.
*Note:* In preparation of `v3` of HighFive, we've started merging breaking
changes into the main branch. More information can be found at:
https://github.com/BlueBrain/HighFive/issues/864
# HighFive - HDF5 header-only C++ Library # HighFive - HDF5 header-only C++ Library
[![Doxygen -> gh-pages](https://github.com/highfive-devs/highfive/workflows/gh-pages/badge.svg?branch=master)](https://BlueBrain.github.io/HighFive/actions/workflows/gh-pages.yml?query=branch%3Amaster)
[![codecov](https://codecov.io/gh/BlueBrain/HighFive/branch/master/graph/badge.svg?token=UBKxHEn7RS)](https://codecov.io/gh/BlueBrain/HighFive)
[![Integration Tests](https://github.com/highfive-devs/bbp-integration/actions/workflows/integration.yml/badge.svg)](https://github.com/highfive-devs/bbp-integration/actions/workflows/integration.yml)
[![Zenodo](https://zenodo.org/badge/47755262.svg)](https://zenodo.org/doi/10.5281/zenodo.10679422)
Documentation: https://bluebrain.github.io/HighFive/ Documentation: https://bluebrain.github.io/HighFive/
## Brief ## Brief
...@@ -43,51 +52,255 @@ It integrates nicely with other CMake projects by defining (and exporting) a Hig ...@@ -43,51 +52,255 @@ It integrates nicely with other CMake projects by defining (and exporting) a Hig
- xtensor (optional) - xtensor (optional)
- half (optional) - half (optional)
The releases for versions 2.x.y and two prereleases of v3 can be found at: ### Versioning & Code Stability
* https://github.com/BlueBrain/HighFive/releases We use semantic versioning. Currently, we're preparing `v3` which contains a
* https://zenodo.org/doi/10.5281/zenodo.10679422 limited set of breaking changes required to eliminate undesireable behaviour or
modernize outdated patterns. We provide a
[Migration Guide](https://bluebrain.github.io/HighFive/md__2home_2runner_2work_2_high_five_2_high_five_2doc_2migration__guide.html),
please report any missing or incorrect information to help others make the
switch more easily.
- `v2.x.y` are stable and any API breaking changes are considered bugs. There's
like not going to be very many releases of the `v2` line once `v3` is stable.
- `v3.0.0-beta?` are pre-releases of `v3.0.0`. We predict that one more
breaking changes might happen: the string handling is confusing to some of the
maintainers and the default encoding is inconsistent (and will likely be made
consistent).
For codes that either use `std::string` when dealing with strings, or that
don't use strings with HDF5 at all, we don't currently have any additional
breaking changes planned for 3.0.0.
The state of HighFive immediately before preparing it for archival is:
* https://github.com/BlueBrain/HighFive/tree/v3.0.0-beta2
All future development and new releases can be found at: ### Known flaws
* https://github.com/highfive-devs/highfive - HighFive is not thread-safe. At best it has the same limitations as the HDF5 library. However, HighFive objects modify their members without protecting these writes. Users have reported that HighFive is not thread-safe even when using the threadsafe HDF5 library, e.g., https://github.com/BlueBrain/HighFive/discussions/675.
- Eigen support in core HighFive was broken until v3.0. See https://github.com/BlueBrain/HighFive/issues/532. H5Easy was not
affected.
- The support of fixed length strings isn't ideal.
## Example
## Examples
#### Write a std::vector<int> to 1D HDF5 dataset and read it back
```c++ ```c++
#include <highfive/highfive.hpp>
using namespace HighFive; using namespace HighFive;
File file("foo.h5", File::Truncate); std::string filename = "/tmp/new_file.h5";
{ {
// We create an empty HDF55 file, by truncating an existing
// file if required:
File file(filename, File::Truncate);
std::vector<int> data(50, 1); std::vector<int> data(50, 1);
file.createDataSet("grp/data", data); file.createDataSet("grp/data", data);
} }
{ {
// We open the file as read-only:
File file(filename, File::ReadOnly);
auto dataset = file.getDataSet("grp/data"); auto dataset = file.getDataSet("grp/data");
// Read back, automatically allocating: // Read back, with allocating:
auto data = dataset.read<std::vector<int>>(); auto data = dataset.read<std::vector<int>>();
// Alternatively, if `data` has the correct // Because `data` has the correct size, this will
// size, without reallocation: // not cause `data` to be reallocated:
dataset.read(data); dataset.read(data);
} }
``` ```
# Funding & Acknowledgment **Note:** As of 2.8.0, one can use `highfive/highfive.hpp` to include
everything HighFive. Prior to 2.8.0 one would include `highfive/H5File.hpp`.
**Note:** For advanced usecases the dataset can be created without immediately
writing to it. This is common in MPI-IO related patterns, or when growing a
dataset over the course of a simulation.
#### Write a 2 dimensional C double float array to a 2D HDF5 dataset
See [create_dataset_double.cpp](https://github.com/highfive-devs/highfive/blob/master/src/examples/create_dataset_double.cpp)
#### Write and read a matrix of double float (boost::ublas) to a 2D HDF5 dataset
See [boost_ublas_double.cpp](https://github.com/highfive-devs/highfive/blob/master/src/examples/boost_ublas_double.cpp)
#### Write and read a subset of a 2D double dataset
See [select_partial_dataset_cpp11.cpp](https://github.com/highfive-devs/highfive/blob/master/src/examples/select_partial_dataset_cpp11.cpp)
#### Create, write and list HDF5 attributes
See [create_attribute_string_integer.cpp](https://github.com/highfive-devs/highfive/blob/master/src/examples/create_attribute_string_integer.cpp)
#### And others
See [src/examples/](https://github.com/highfive-devs/highfive/blob/master/src/examples/) subdirectory for more info.
### H5Easy
For several 'standard' use cases the [highfive/H5Easy.hpp](include/highfive/H5Easy.hpp) interface is available. It allows:
* Reading/writing in a single line of:
- scalars (to/from an extendible DataSet),
- strings,
- vectors (of standard types),
- [Eigen::Matrix](http://eigen.tuxfamily.org) (optional),
- [xt::xarray](https://github.com/QuantStack/xtensor) and [xt::xtensor](https://github.com/QuantStack/xtensor)
(optional).
- [cv::Mat_](https://docs.opencv.org/master/df/dfc/classcv_1_1Mat__.html)
(optional).
* Getting in a single line:
- the size of a DataSet,
- the shape of a DataSet.
#### Example
```cpp
#include <highfive/H5Easy.hpp>
int main() {
H5Easy::File file("example.h5", H5Easy::File::Overwrite);
int A = ...;
H5Easy::dump(file, "/path/to/A", A);
A = H5Easy::load<int>(file, "/path/to/A");
}
```
whereby the `int` type of this example can be replaced by any of the above
types. See [easy_load_dump.cpp](src/examples/easy_load_dump.cpp) for more
details.
**Note:** Classes such as `H5Easy::File` are just short for the regular
`HighFive` classes (in this case `HighFive::File`). They can thus be used
interchangeably.
The development of this software was supported by funding to the Blue Brain Project, a research center of the École polytechnique fédérale de Lausanne (EPFL), from the Swiss government's ETH Board of the Swiss Federal Institutes of Technology. ## CMake integration
There's two common paths of integrating HighFive into a CMake based project.
The first is to "vendor" HighFive, the second is to install HighFive as a
normal C++ library. Since HighFive makes choices about how to integrate HDF5,
sometimes following the third Bailout Approach is needed.
Regular HDF5 CMake variables can be used. Interesting variables include:
* `HDF5_USE_STATIC_LIBRARIES` to link statically against the HDF5 library.
* `HDF5_PREFER_PARALLEL` to prefer pHDF5.
* `HDF5_IS_PARALLEL` to check if HDF5 is parallel.
Please consult `tests/cmake_integration` for examples of how to write libraries
or applications using HighFive.
### Vendoring HighFive
In this approach the HighFive sources are included in a subdirectory of the
project (typically as a git submodule), for example in `third_party/HighFive`.
The projects `CMakeLists.txt` add the following lines
```cmake
add_subdirectory(third_party/HighFive)
target_link_libraries(foo HighFive)
```
**Note:** `add_subdirectory(third_party/HighFive)` will search and "link" HDF5
but wont search or link any optional dependencies such as Boost.
### Regular Installation of HighFive
Alternatively, HighFive can be install and "found" like regular software.
The project's `CMakeLists.txt` should add the following:
```cmake
find_package(HighFive REQUIRED)
target_link_libraries(foo HighFive)
```
**Note:** `find_package(HighFive)` will search for HDF5. "Linking" to
`HighFive` includes linking with HDF5. The two commands will not search for or
"link" to optional dependencies such as Boost.
### Bailout Approach
To prevent HighFive from searching or "linking" to HDF5 the project's
`CMakeLists.txt` should contain the following:
```cmake
# Prevent HighFive CMake code from searching for HDF5:
set(HIGHFIVE_FIND_HDF5 Off)
# Then "find" HighFive as usual:
find_package(HighFive REQUIRED)
# alternatively, when vendoring:
# add_subdirectory(third_party/HighFive)
# Finally, use the target `HighFive::Include` which
# doesn't add a dependency on HDF5.
target_link_libraries(foo HighFive::Include)
# Proceed to find and link HDF5 as required.
```
### Optional Dependencies
HighFive does not attempt to find or "link" to any optional dependencies, such
as Boost, Eigen, etc. Any project using HighFive with any of the optional
dependencies must include the respective header:
```
#include <highfive/boost.hpp>
#include <highfive/eigen.hpp>
```
and add the required CMake code to find and link against the dependencies. For
Boost the required lines might be
```
find_package(Boost REQUIRED)
target_link_libraries(foo PUBLIC Boost::headers)
```
# Questions?
Please first check if your question/issue has been answered/reported at
[BlueBrain/HighFive](https://github.com/BlueBrain/HighFive).
Do you have questions on how to use HighFive? Would you like to share an interesting example or
discuss HighFive features? Head over to the [Discussions](https://github.com/highfive-devs/highfive/discussions)
forum and join the community.
For bugs and issues please use [Issues](https://github.com/highfive-devs/highfive/issues).
# Funding & Acknowledgment
HighFive releases are uploaded to Zenodo. If you wish to cite HighFive in a HighFive releases are uploaded to Zenodo. If you wish to cite HighFive in a
scientific publication you can use the DOIs for the scientific publication you can use the DOIs for the
[Zenodo records](https://zenodo.org/doi/10.5281/zenodo.10679422). [Zenodo records](https://zenodo.org/doi/10.5281/zenodo.10679422).
Copyright © 2015-2024 Blue Brain Project/EPFL ## Blue Brain Project Era: 2015 - 2024
HighFive was created and maintained as part of the BBP from 2015 until Dec 2024
(when BBP closed) at [BlueBrain/HighFive](https://github.com/BlueBrain/HighFive).
Please consult its README for funding information by the Blue Brain Project or EPFL.
### License ## Post Blue Brain Project: 2025 - present
One of the main contributors to
[BlueBrain/HighFive](https://github.com/BlueBrain/HighFive) wanted to keep the
project alive past the end of BBP. This repository was created to provide a
seemless continuation of HighFive; and prevent fracturing or capturing of the
project.
This repository is not supported by the Blue Brain Project or EPFL.
# License & Copyright
Boost Software License 1.0 Boost Software License 1.0
Copyright © 2015-2024 Blue Brain Project/EPFL
...@@ -85,7 +85,7 @@ inline haddr_t h5d_get_offset(hid_t dset_id) { ...@@ -85,7 +85,7 @@ inline haddr_t h5d_get_offset(hid_t dset_id) {
inline herr_t h5d_set_extent(hid_t dset_id, const hsize_t size[]) { inline herr_t h5d_set_extent(hid_t dset_id, const hsize_t size[]) {
herr_t err = H5Dset_extent(dset_id, size); herr_t err = H5Dset_extent(dset_id, size);
if (H5Dset_extent(dset_id, size) < 0) { if (err < 0) {
HDF5ErrMapper::ToException<DataSetException>("Could not resize dataset."); HDF5ErrMapper::ToException<DataSetException>("Could not resize dataset.");
} }
......
...@@ -91,6 +91,30 @@ inline DataSet initScalarDataset(File& file, ...@@ -91,6 +91,30 @@ inline DataSet initScalarDataset(File& file,
throw dump_error(file, path); throw dump_error(file, path);
} }
template <class File, class F>
auto apply_attr_func_impl(File& file, const std::string& path, F f) {
auto type = file.getObjectType(path);
if (type == ObjectType::Group) {
auto group = file.getGroup(path);
return f(group);
} else if (type == ObjectType::Dataset) {
auto dataset = file.getDataSet(path);
return f(dataset);
} else {
throw error(file, path, "path is not the root, a group or a dataset.");
}
}
template <class F>
auto apply_attr_func(const H5Easy::File& file, const std::string& path, F f) {
return apply_attr_func_impl(file, path, f);
}
template <class F>
auto apply_attr_func(H5Easy::File& file, const std::string& path, F f) {
return apply_attr_func_impl(file, path, f);
}
// get a opened Attribute: nd-array // get a opened Attribute: nd-array
template <class T> template <class T>
inline Attribute initAttribute(File& file, inline Attribute initAttribute(File& file,
...@@ -98,17 +122,11 @@ inline Attribute initAttribute(File& file, ...@@ -98,17 +122,11 @@ inline Attribute initAttribute(File& file,
const std::string& key, const std::string& key,
const std::vector<size_t>& shape, const std::vector<size_t>& shape,
const DumpOptions& options) { const DumpOptions& options) {
if (!file.exist(path)) { auto get_attribute = [&](auto& obj) {
throw error(file, path, "H5Easy::dumpAttribute: DataSet does not exist"); if (!obj.hasAttribute(key)) {
} return obj.template createAttribute<T>(key, DataSpace(shape));
if (file.getObjectType(path) != ObjectType::Dataset) {
throw error(file, path, "H5Easy::dumpAttribute: path not a DataSet");
}
DataSet dataset = file.getDataSet(path);
if (!dataset.hasAttribute(key)) {
return dataset.createAttribute<T>(key, DataSpace(shape));
} else if (options.overwrite()) { } else if (options.overwrite()) {
Attribute attribute = dataset.getAttribute(key); Attribute attribute = obj.getAttribute(key);
DataSpace dataspace = attribute.getSpace(); DataSpace dataspace = attribute.getSpace();
if (dataspace.getDimensions() != shape) { if (dataspace.getDimensions() != shape) {
throw error(file, path, "H5Easy::dumpAttribute: Inconsistent dimensions"); throw error(file, path, "H5Easy::dumpAttribute: Inconsistent dimensions");
...@@ -118,6 +136,13 @@ inline Attribute initAttribute(File& file, ...@@ -118,6 +136,13 @@ inline Attribute initAttribute(File& file,
throw error(file, throw error(file,
path, path,
"H5Easy: Attribute exists, overwrite with H5Easy::DumpMode::Overwrite."); "H5Easy: Attribute exists, overwrite with H5Easy::DumpMode::Overwrite.");
};
if (!file.exist(path)) {
throw error(file, path, "H5Easy::dumpAttribute: path does not exist");
}
return apply_attr_func(file, path, get_attribute);
} }
// get a opened Attribute: scalar // get a opened Attribute: scalar
...@@ -127,17 +152,11 @@ inline Attribute initScalarAttribute(File& file, ...@@ -127,17 +152,11 @@ inline Attribute initScalarAttribute(File& file,
const std::string& key, const std::string& key,
const T& data, const T& data,
const DumpOptions& options) { const DumpOptions& options) {
if (!file.exist(path)) { auto get_attribute = [&](auto& obj) {
throw error(file, path, "H5Easy::dumpAttribute: DataSet does not exist"); if (!obj.hasAttribute(key)) {
} return obj.template createAttribute<T>(key, DataSpace::From(data));
if (file.getObjectType(path) != ObjectType::Dataset) {
throw error(file, path, "H5Easy::dumpAttribute: path not a DataSet");
}
DataSet dataset = file.getDataSet(path);
if (!dataset.hasAttribute(key)) {
return dataset.createAttribute<T>(key, DataSpace::From(data));
} else if (options.overwrite()) { } else if (options.overwrite()) {
Attribute attribute = dataset.getAttribute(key); Attribute attribute = obj.getAttribute(key);
DataSpace dataspace = attribute.getSpace(); DataSpace dataspace = attribute.getSpace();
if (dataspace.getElementCount() != 1) { if (dataspace.getElementCount() != 1) {
throw error(file, path, "H5Easy::dumpAttribute: Existing field not a scalar"); throw error(file, path, "H5Easy::dumpAttribute: Existing field not a scalar");
...@@ -147,6 +166,13 @@ inline Attribute initScalarAttribute(File& file, ...@@ -147,6 +166,13 @@ inline Attribute initScalarAttribute(File& file,
throw error(file, throw error(file,
path, path,
"H5Easy: Attribute exists, overwrite with H5Easy::DumpMode::Overwrite."); "H5Easy: Attribute exists, overwrite with H5Easy::DumpMode::Overwrite.");
};
if (!file.exist(path)) {
throw error(file, path, "H5Easy::dumpAttribute: path does not exist");
}
apply_attr_func(file, path, get_attribute);
} }
} // namespace detail } // namespace detail
......
...@@ -57,9 +57,11 @@ struct default_io_impl { ...@@ -57,9 +57,11 @@ struct default_io_impl {
inline static T loadAttribute(const File& file, inline static T loadAttribute(const File& file,
const std::string& path, const std::string& path,
const std::string& key) { const std::string& key) {
DataSet dataset = file.getDataSet(path); auto read_attribute = [&key](const auto& obj) {
Attribute attribute = dataset.getAttribute(key); return obj.getAttribute(key).template read<T>();
return attribute.read<T>(); };
return apply_attr_func(file, path, read_attribute);
} }
}; };
......
...@@ -166,27 +166,49 @@ TEST_CASE("H5Easy_vector3d") { ...@@ -166,27 +166,49 @@ TEST_CASE("H5Easy_vector3d") {
CHECK(a == a_r); CHECK(a == a_r);
} }
TEST_CASE("H5Easy_Attribute_scalar") { void check_attribute(H5Easy::File& file, const std::string& path) {
H5Easy::File file("h5easy_attribute_scalar.h5", H5Easy::File::Overwrite);
double a = 1.2345; double a = 1.2345;
int b = 12345; int b = 12345;
std::string c = "12345"; std::string c = "12345";
std::vector<double> d = {1.1, 2.2, 3.3};
H5Easy::dump(file, "/path/to/a", a); H5Easy::dumpAttribute(file, path, "a", -a);
H5Easy::dumpAttribute(file, "/path/to/a", "a", a); H5Easy::dumpAttribute(file, path, "a", a, H5Easy::DumpMode::Overwrite);
H5Easy::dumpAttribute(file, "/path/to/a", "a", a, H5Easy::DumpMode::Overwrite); H5Easy::dumpAttribute(file, path, "b", b);
H5Easy::dumpAttribute(file, "/path/to/a", "b", b); H5Easy::dumpAttribute(file, path, "c", c);
H5Easy::dumpAttribute(file, "/path/to/a", "c", c); H5Easy::dumpAttribute(file, path, "d", d);
double a_r = H5Easy::loadAttribute<double>(file, "/path/to/a", "a"); double a_r = H5Easy::loadAttribute<double>(file, path, "a");
int b_r = H5Easy::loadAttribute<int>(file, "/path/to/a", "b"); int b_r = H5Easy::loadAttribute<int>(file, path, "b");
std::string c_r = H5Easy::loadAttribute<std::string>(file, "/path/to/a", "c"); std::string c_r = H5Easy::loadAttribute<std::string>(file, path, "c");
std::vector<double> d_r = H5Easy::loadAttribute<std::vector<double>>(file, path, "d");
CHECK(a == a_r); CHECK(a == a_r);
CHECK(b == b_r); CHECK(b == b_r);
CHECK(c == c_r); CHECK(c == c_r);
REQUIRE(d.size() == d_r.size());
for (size_t i = 0; i < d.size(); ++i) {
REQUIRE(d[i] == d_r[i]);
} }
}
TEST_CASE("H5Easy_Attribute_scalar") {
H5Easy::File file("h5easy_attribute_scalar.h5", H5Easy::File::Overwrite);
std::string path = "/path/to/x";
H5Easy::dump(file, path, 1.0);
SECTION("dataset") {
check_attribute(file, path);
}
SECTION("group") {
check_attribute(file, "/path");
}
SECTION("root") {
check_attribute(file, "/");
}
}
#ifdef HIGHFIVE_TEST_XTENSOR #ifdef HIGHFIVE_TEST_XTENSOR
TEST_CASE("H5Easy_extend1d") { TEST_CASE("H5Easy_extend1d") {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment