diff --git a/packages/HighFive/.github/ISSUE_TEMPLATE/bug_report.md b/packages/HighFive/.github/ISSUE_TEMPLATE/bug_report.md index 2558a56c406bc2f7bd2bebb6f71a7b8a0f156049..1b1ef3c350ff1d6acc23d6fcd5d42bd070f67c9d 100644 --- a/packages/HighFive/.github/ISSUE_TEMPLATE/bug_report.md +++ b/packages/HighFive/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,24 +8,30 @@ assignees: '' --- **Describe the bug** -A clear and concise description of what the bug is. +Please give a clear and concise description of the bug. For us to be able to +fix the bug we need to be able to reproduce it. As a result, if you provide a +reproducer, i.e. code that exhibits the bug (incl. build instructions) but is +stripped of all unessential complexity, the chance of getting the issue fixed +is much higher. -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error +**Version Information** + - HighFive: + - Compiler: + - [optional] OS: + - [optional] CMake: + - [optional] HDF5: -**Expected behavior** -A clear and concise description of what you expected to happen. +**Style Guide** +1. Please paste text as text and not as a screen shot. +2. If in doubt paste too much output rather than too little, i.e. don't be too + scared of a large wall of text. Especially, if it's a compiler error. + (Anything past the first error is largely uninformative and can be safely + stripped.) +3. Please strip all boilerplate. -**Stacktrace** -If applicable, add a stacktrace and error messages to help explain your problem. +**Markdown summary** +Some include `code` and a block: +``` +void foo(); +``` -**Desktop (please complete the following information):** - - OS: [e.g. ubuntu 20.10, macos 10.15] - - Version [e.g. master branch] - -**Additional context** -Add any other context about the problem here. diff --git a/packages/HighFive/.github/ISSUE_TEMPLATE/build_failure.md b/packages/HighFive/.github/ISSUE_TEMPLATE/build_failure.md new file mode 100644 index 0000000000000000000000000000000000000000..18b280c6199d349982902b7385b350c28004686c --- /dev/null +++ b/packages/HighFive/.github/ISSUE_TEMPLATE/build_failure.md @@ -0,0 +1,44 @@ +--- +name: Build failure +about: Report an issue with the build-system. +title: '' +labels: '' +assignees: '' + +--- + +**Bug Description** +Unfortunately, build failures tend to be highly specific and there's many +things that could go wrong on both our and your side. Therefore, a reproducer +is essential. You could try the minimal setup found here: +https://github.com/BlueBrain/HighFive/blob/master/doc/installation.md#manually-install-highfive + +from there you can work upwards by adding complexity until you reproduce the +issue. + +Once you have a reproducer, please paste it and the exact `cmake` command used to +configure the build and include the output. For the compilation phase please +ensure that the actual compiler invocation is visible, e.g., +``` +$ cmake --build build --verbose +[ 50%] Building CXX object CMakeFiles/dummy.dir/dummy.cpp.o +/usr/bin/c++ ... -isystem ${HIGHFIVE_ROOT}/include -isystem ${HDF5_ROOT}/include ... -c dummy.cpp +``` +and include at least the first error message. (If in doubt include more rather +than less output.) + +**Version Information** + - HighFive: + - Compiler: + - OS: + - CMake: + - HDF5: + +**Style Guide** +1. Please paste text as text and not as a screen shot. +2. If in doubt paste too much output rather than too little, i.e. don't be too + scared of a large wall of text. Especially, if it's a compiler error. + (Anything past the first error is largely uninformative and can be safely + stripped.) +3. Please strip all boilerplate. + diff --git a/packages/HighFive/.github/ISSUE_TEMPLATE/feature_request.md b/packages/HighFive/.github/ISSUE_TEMPLATE/feature_request.md index 4ead48053da37e38f6d9dce4a07d6824968ee963..c0b503578bf5ffeec2b2945cced8fe0b2fd17b5f 100644 --- a/packages/HighFive/.github/ISSUE_TEMPLATE/feature_request.md +++ b/packages/HighFive/.github/ISSUE_TEMPLATE/feature_request.md @@ -7,14 +7,3 @@ assignees: '' --- -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context about the feature request here. diff --git a/packages/HighFive/.github/build.sh b/packages/HighFive/.github/build.sh index 4cdad7e3ffdeb5c21a6ef7edb8173c2398f5158d..959f0570cf96b50edfe9b563aab4fbf8f707cc59 100644 --- a/packages/HighFive/.github/build.sh +++ b/packages/HighFive/.github/build.sh @@ -5,6 +5,7 @@ cmake --version set -x export HIGHFIVE_BUILD=$GITHUB_WORKSPACE/build cmake -B $HIGHFIVE_BUILD -S $GITHUB_WORKSPACE \ + -DHIGHFIVE_HAS_WERROR=On \ -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR \ "${CMAKE_OPTIONS[@]}" diff --git a/packages/HighFive/.github/create_submodule_update_pr.sh b/packages/HighFive/.github/create_submodule_update_pr.sh new file mode 100755 index 0000000000000000000000000000000000000000..d43dc1d7720dff9a22c392dbf0aeff778acf7b5c --- /dev/null +++ b/packages/HighFive/.github/create_submodule_update_pr.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# Usage: +# $0 PACKAGE_NAME VERSION +# +# Before calling this script, run the commands to update the dependency. If +# there dependency shouldn't update then the script must not modify the repo. +# +# When the repo is in the updated state, run this script. It will commit +# everything and create a PR. +# +# The PR title is `Update ${PACKAGE_NAME} to ${VERSION}` the script checks for +# this string and only creates a new PR if the string isn't the title of an +# existing PR. +# +# PACKAGE_NAME is an identifier of the package, no spaces. Doesn't need to be +# the exact name of the dependency. +# +# VERSION an identifier of the next version of the package, no spaces. This +# variable must be the same if the version if the same and different if +# the version is different. However, it doesn't have to be a `x.y.z` it +# could be a Git SHA, or something else. + +set -eu + +PACKAGE_NAME=$1 +VERSION=$2 +BRANCH=update-${PACKAGE_NAME}-${VERSION} +COMMIT_MESSAGE="Update ${PACKAGE_NAME} to ${VERSION}" + +if [[ -z "${PACKAGE_NAME}" ]] +then + echo "Empty PACKAGE_NAME." + exit -1 +fi + +if [[ -z "${VERSION}" ]] +then + echo "Empty VERSION." + exit -1 +fi + + +# NOTE: In a later runs of CI we will search for PR with this exact +# title. Only if no such PR exists will the script create a +# new PR. +PR_TITLE="Update ${PACKAGE_NAME} to ${VERSION}" + +if [[ -z "$(git status --porcelain)" ]] +then + echo "No differences detected: ${PACKAGE_NAME} is up-to-date." + exit 0 +fi + +if [[ -z "$(gh pr list --state all --search "${PR_TITLE}")" ]] +then + + git checkout -b $BRANCH + git config user.name github-actions + git config user.email github-actions@github.com + git commit -a -m "${COMMIT_MESSAGE}" + + git push -u origin ${BRANCH} + gh pr create \ + --title "${PR_TITLE}" \ + --body "This PR was generated by a Github Actions workflow." + +else + echo "Old PR detected: didn't create a new one." +fi diff --git a/packages/HighFive/.github/pull_request_template.md b/packages/HighFive/.github/pull_request_template.md deleted file mode 100644 index 58abc6c608ca3452deb51b29f4962e33886c1a0c..0000000000000000000000000000000000000000 --- a/packages/HighFive/.github/pull_request_template.md +++ /dev/null @@ -1,25 +0,0 @@ -**Description** - -Please include a summary of the change and which issue is fixed or which feature is added. - -- [ ] Issue 1 fixed -- [ ] Issue 2 fixed -- [ ] Feature 1 added -- [ ] Feature 2 added - -Fixes #(issue) - -**How to test this?** - -Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce if there is no integration test added with this PR. Please also list any relevant details for your test configuration - -```bash -cmake .. -make -j8 -make test -``` - -**Test System** - - OS: [e.g. Ubuntu 20.04] - - Compiler: [e.g. clang 12.0.0] - - Dependency versions: [e.g. hdf5 1.12] diff --git a/packages/HighFive/.github/workflows/check_doxygen_awesome_version.yml b/packages/HighFive/.github/workflows/check_doxygen_awesome_version.yml index 233577ef862b841eb07cbd38c61f4d25a29fe1c8..2e4df2833e75e36ee60b2fd8fd08b07a9f18528f 100644 --- a/packages/HighFive/.github/workflows/check_doxygen_awesome_version.yml +++ b/packages/HighFive/.github/workflows/check_doxygen_awesome_version.yml @@ -17,33 +17,4 @@ jobs: run: | VERSION=$(doc/doxygen-awesome-css/update_doxygen_awesome.sh "$(mktemp -d)") - BRANCH=update-doxygen-awesome-${VERSION} - COMMIT_MESSAGE="Update doxygen-awesome to ${VERSION}" - - # NOTE: In a later runs of CI we will search for PR with this exact - # title. Only if no such PR exists will the script create a - # new PR. - PR_TITLE="[docs] Update doxygen-awesome to ${VERSION}" - - if [[ -z "$(git status --porcelain)" ]] - then - echo "No differences detected: doxygen-awesome is up-to-date." - exit 0 - fi - - if [[ -z "$(gh pr list --state all --search "${PR_TITLE}")" ]] - then - - git checkout -b $BRANCH - git config user.name github-actions - git config user.email github-actions@github.com - git commit -a -m "${COMMIT_MESSAGE}" - - git push -u origin ${BRANCH} - gh pr create \ - --title "${PR_TITLE}" \ - --body "This PR was generated by a Github Actions workflow." - - else - echo "Old PR detected: didn't create a new one." - fi + .github/create_submodule_update_pr.sh doxygen-awesome ${VERSION} diff --git a/packages/HighFive/.github/workflows/ci.yml b/packages/HighFive/.github/workflows/ci.yml index e7f5fca10b9bf26ceb00276ec3dc749e11b0b94c..8730ed4104d319675090a633f66b18041631269a 100644 --- a/packages/HighFive/.github/workflows/ci.yml +++ b/packages/HighFive/.github/workflows/ci.yml @@ -80,7 +80,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG # Job testing several versions of hdf5 @@ -89,7 +91,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_2, hdf5-1_14_3 ] + hdf5_version : [ hdf5-1_8_23, hdf5-1_10_11, hdf5-1_12_3, hdf5-1_14_3 ] steps: - uses: actions/checkout@v3 @@ -118,7 +120,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples @@ -156,7 +160,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -199,7 +205,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -295,7 +303,9 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG - name: Examples working-directory: ${{github.workspace}}/build/src/examples @@ -348,4 +358,6 @@ jobs: - name: Test working-directory: ${{github.workspace}}/build shell: bash -l {0} - run: ctest -j2 --output-on-failure -C $BUILD_TYPE + run: | + ctest -j2 --output-on-failure -C $BUILD_TYPE + ! ctest --verbose -C $BUILD_TYPE | grep HDF5-DIAG diff --git a/packages/HighFive/.gitrepo b/packages/HighFive/.gitrepo index 14b8f5fba715beda2d02da0beb4c2e267ac16373..7d436d3458ed5f6d35f34ecd46f1c0975d9a2352 100644 --- a/packages/HighFive/.gitrepo +++ b/packages/HighFive/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = git@github.com:BlueBrain/HighFive.git branch = master - commit = 88fcc898970f93a63be8e25760d6b9f33589690f - parent = 88cd46aaec511360705df0fc7e577098877862d0 + commit = 1299c556a992bea8aa03c3b75cca50560a5078b6 + parent = ce26d02b1f92356362dc4daede0babf8d4ac23b6 method = merge cmdver = 0.4.6 diff --git a/packages/HighFive/CMake/HighFiveWarnings.cmake b/packages/HighFive/CMake/HighFiveWarnings.cmake index 8e8ec22019dd3d8f9d46668218db2c7e39994539..16896b6489367481efdf00142df7e3fee7d0ec18 100644 --- a/packages/HighFive/CMake/HighFiveWarnings.cmake +++ b/packages/HighFive/CMake/HighFiveWarnings.cmake @@ -22,7 +22,6 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wformat=2 -Wconversion -Wsign-conversion - -Wno-error=deprecated-declarations ) if(NOT CMAKE_CXX_COMPILER_ID MATCHES "Intel") @@ -33,4 +32,12 @@ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" -Wdouble-promotion ) endif() + + if(HIGHFIVE_HAS_WERROR) + target_compile_options(HighFiveWarnings + INTERFACE + -Werror + -Wno-error=deprecated-declarations + ) + endif() endif() diff --git a/packages/HighFive/CMakeLists.txt b/packages/HighFive/CMakeLists.txt index af274d9e25ea0ba40cfbd0407531db449be95f67..d592f2d66db1e448fbdf94cb24237070a120ee3a 100644 --- a/packages/HighFive/CMakeLists.txt +++ b/packages/HighFive/CMakeLists.txt @@ -37,6 +37,7 @@ option(HIGHFIVE_BUILD_DOCS "Enable documentation building" ON) option(HIGHFIVE_VERBOSE "Set logging level to verbose." OFF) option(HIGHFIVE_GLIBCXX_ASSERTIONS "Enable bounds check for STL." OFF) option(HIGHFIVE_HAS_CONCEPTS "Print readable compiler errors w/ C++20 concepts" ON) +option(HIGHFIVE_HAS_WERROR "Convert warnings to errors." OFF) # Controls if HighFive classes are friends of each other. # diff --git a/packages/HighFive/doc/developer_guide.md b/packages/HighFive/doc/developer_guide.md index 3017289b5172bbb443829da0daf6bcaa04413e54..fc388f3b5f7449dfa110a74e170b951ffec2dd21 100644 --- a/packages/HighFive/doc/developer_guide.md +++ b/packages/HighFive/doc/developer_guide.md @@ -91,3 +91,134 @@ release. Once this is done perform a final round of updates: * Update BlueBrain Spack recipe to use the archive and not the Git commit. * Update the upstream Spack recipe. +## Writing Tests +### Generate Multi-Dimensional Test Data +Input array of any dimension and type can be generated using the template class +`DataGenerator`. For example: +``` +auto dims = std::vector<size_t>{4, 2}; +auto values = testing::DataGenerator<std::vector<std::array<double, 2>>::create(dims); +``` +Generates an `std::vector<std::array<double, 2>>` initialized with suitable +values. + +If "suitable" isn't specific enough, one can specify a callback: +``` +auto callback = [](const std::vector<size_t>& indices) { + return 42.0; +} + +auto values = testing::DataGenerator<std::vector<double>>::create(dims, callback); +``` + +The `dims` can be generated via `testing::DataGenerator::default_dims` or by +using `testing::DataGenerator::sanitize_dims`. Remember, that certain +containers are fixed size and that we often compute the number of elements by +multiplying the dims. + +### Generate Scalar Test Data +To generate a single "suitable" element use template class `DefaultValues`, e.g. +``` +auto default_values = testing::DefaultValues<double>(); +auto x = testing::DefaultValues<double>(indices); +``` + +### Accessing Elements +To access a particular element from an unknown container use the following trait: +``` +using trait = testing::ContainerTraits<std::vector<std::array<int, 2>>; +// auto x = values[1][0]; +auto x = trait::get(values, {1, 0}); + +// values[1][0] = 42.0; +trait::set(values, {1, 0}, 42.0); +``` + +### Utilities For Multi-Dimensional Arrays +Use `testing::DataGenerator::allocate` to allocate an array (without filling +it) and `testing::copy` to copy an array from one type to another. There's +`testing::ravel`, `testing::unravel` and `testing::flat_size` to compute the +position in a flat array from a multi-dimensional index, the reverse and the +number of element in the multi-dimensional array. + +### Deduplicating DataSet and Attribute +Due to how HighFive is written testing `DataSet` and `Attribute` often requires +duplicating the entire test code because somewhere a `createDataSet` must be +replaced with `createAttribute`. Use `testing::AttributeCreateTraits` and +`testing::DataSetCreateTraits`. For example, +``` +template<class CreateTraits> +void check_write(...) { + // Same as one of: + // file.createDataSet(name, values); + // file.createAttribute(name, values); + CreateTraits::create(file, name, values); +} +``` + +### Test Organization +#### Multi-Dimensional Arrays +All tests for reading/writing whole multi-dimensional arrays to datasets or +attributes belong in `tests/unit/tests_high_five_multi_dimensional.cpp`. This +includes write/read cycles; checking all the generic edges cases, e.g. empty +arrays and mismatching sizes; and checking non-reallocation. + +Read/Write cycles are implemented in two distinct checks. One for writing and +another for reading. When checking writing we read with a "trusted" +multi-dimensional array (a nested `std::vector`), and vice-versa when checking +reading. This matters because certain bugs, like writing a column major array +as if it were row-major can't be caught if one reads it back into a +column-major array. + +Remember, `std::vector<bool>` is very different from all other `std::vector`s. + +Every container `template<class T> C;` should at least be checked with all of +the following `T`s that are supported by the container: `bool`, `double`, +`std::string`, `std::vector`, `std::array`. The reason is `bool` and +`std::string` are special, `double` is just a POD, `std::vector` requires +dynamic memory allocation and `std::array` is statically allocated. + +Similarly, each container should be put inside an `std::vector` and an +`std::array`. + +#### Scalar Data Set +Write-read cycles for scalar values should be implemented in +`tests/unit/tests_high_five_scalar.cpp`. + +#### Data Types +Unit-tests related to checking that `DataType` API, go in +`tests/unit/tests_high_data_type.cpp`. + +#### Selections +Anything selection related goes in `tests/unit/test_high_five_selection.cpp`. +This includes things like `ElementSet` and `HyperSlab`. + +#### Strings +Regular write-read cycles for strings are performed along with the other types, +see above. This should cover compatibility of `std::string` with all +containers. However, additional testing is required, e.g. character set, +padding, fixed vs. variable length. These all go in +`tests/unit/test_high_five_string.cpp`. + +#### Specific Tests For Optional Containers +If containers, e.g. `Eigen::Matrix` require special checks those go in files +called `tests/unit/test_high_five_*.cpp` where `*` is `eigen` for Eigen. + +#### Memory Layout Assumptions +In HighFive we make assumptions about the memory layout of certain types. For +example, we assume that +``` +auto array = std::vector<std::array<double, 2>>(n); +doube * ptr = (double*) array.data(); +``` +is a sensible thing to do. We assume similar about `bool` and +`details::Boolean`. These types of tests go into +`tests/unit/tests_high_five_memory_layout.cpp`. + +#### H5Easy +Anything `H5Easy` related goes in files with the appropriate name. + +#### Everything Else +What's left goes in `tests/unit/test_high_five_base.cpp`. This covers opening +files, groups, dataset or attributes; checking certain pathological edge cases; +etc. diff --git a/packages/HighFive/doc/doxygen-awesome-css/doxygen-awesome.css b/packages/HighFive/doc/doxygen-awesome-css/doxygen-awesome.css index 08238977a627938adbae1ea06fb174b4ecaafde3..ac7f0608ec69a90204ed8a599c159d2c5c8054e4 100644 --- a/packages/HighFive/doc/doxygen-awesome-css/doxygen-awesome.css +++ b/packages/HighFive/doc/doxygen-awesome-css/doxygen-awesome.css @@ -80,21 +80,21 @@ html { --toc-max-height: calc(100vh - 2 * var(--spacing-medium) - 85px); /* colors for various content boxes: @warning, @note, @deprecated @bug */ - --warning-color: #f8d1cc; - --warning-color-dark: #b61825; - --warning-color-darker: #75070f; - --note-color: #faf3d8; - --note-color-dark: #f3a600; - --note-color-darker: #5f4204; - --todo-color: #e4f3ff; - --todo-color-dark: #1879C4; - --todo-color-darker: #274a5c; + --warning-color: #faf3d8; + --warning-color-dark: #f3a600; + --warning-color-darker: #5f4204; + --note-color: #e4f3ff; + --note-color-dark: #1879C4; + --note-color-darker: #274a5c; + --todo-color: #e4dafd; + --todo-color-dark: #5b2bdd; + --todo-color-darker: #2a0d72; --deprecated-color: #ecf0f3; --deprecated-color-dark: #5b6269; --deprecated-color-darker: #43454a; - --bug-color: #e4dafd; - --bug-color-dark: #5b2bdd; - --bug-color-darker: #2a0d72; + --bug-color: #f8d1cc; + --bug-color-dark: #b61825; + --bug-color-darker: #75070f; --invariant-color: #d8f1e3; --invariant-color-dark: #44b86f; --invariant-color-darker: #265532; @@ -169,6 +169,8 @@ html { --webkit-scrollbar-size: 7px; --webkit-scrollbar-padding: 4px; --webkit-scrollbar-color: var(--separator-color); + + --animation-duration: .12s } @media screen and (max-width: 767px) { @@ -208,21 +210,21 @@ html { --blockquote-background: #222325; --blockquote-foreground: #7e8c92; - --warning-color: #2e1917; - --warning-color-dark: #ad2617; - --warning-color-darker: #f5b1aa; - --note-color: #3b2e04; - --note-color-dark: #f1b602; - --note-color-darker: #ceb670; - --todo-color: #163750; - --todo-color-dark: #1982D2; - --todo-color-darker: #dcf0fa; + --warning-color: #3b2e04; + --warning-color-dark: #f1b602; + --warning-color-darker: #ceb670; + --note-color: #163750; + --note-color-dark: #1982D2; + --note-color-darker: #dcf0fa; + --todo-color: #2a2536; + --todo-color-dark: #7661b3; + --todo-color-darker: #ae9ed6; --deprecated-color: #2e323b; --deprecated-color-dark: #738396; --deprecated-color-darker: #abb0bd; - --bug-color: #2a2536; - --bug-color-dark: #7661b3; - --bug-color-darker: #ae9ed6; + --bug-color: #2e1917; + --bug-color-dark: #ad2617; + --bug-color-darker: #f5b1aa; --invariant-color: #303a35; --invariant-color-dark: #76ce96; --invariant-color-darker: #cceed5; @@ -269,21 +271,21 @@ html.dark-mode { --blockquote-background: #222325; --blockquote-foreground: #7e8c92; - --warning-color: #2e1917; - --warning-color-dark: #ad2617; - --warning-color-darker: #f5b1aa; - --note-color: #3b2e04; - --note-color-dark: #f1b602; - --note-color-darker: #ceb670; - --todo-color: #163750; - --todo-color-dark: #1982D2; - --todo-color-darker: #dcf0fa; + --warning-color: #3b2e04; + --warning-color-dark: #f1b602; + --warning-color-darker: #ceb670; + --note-color: #163750; + --note-color-dark: #1982D2; + --note-color-darker: #dcf0fa; + --todo-color: #2a2536; + --todo-color-dark: #7661b3; + --todo-color-darker: #ae9ed6; --deprecated-color: #2e323b; --deprecated-color-dark: #738396; --deprecated-color-darker: #abb0bd; - --bug-color: #2a2536; - --bug-color-dark: #7661b3; - --bug-color-darker: #ae9ed6; + --bug-color: #2e1917; + --bug-color-dark: #ad2617; + --bug-color-darker: #f5b1aa; --invariant-color: #303a35; --invariant-color-dark: #76ce96; --invariant-color-darker: #cceed5; @@ -316,7 +318,7 @@ body, table, div, p, dl, #nav-tree .label, .title, } h1, h2, h3, h4, h5 { - margin-top: .9em; + margin-top: 1em; font-weight: 600; line-height: initial; } @@ -1174,7 +1176,7 @@ div.toc li a.aboveActive { margin-right: var(--spacing-small); margin-bottom: calc(var(--navigation-font-size) / 4); transform: rotate(-90deg); - transition: transform 0.25s ease-out; + transition: transform var(--animation-duration) ease-out; } div.contents .toc.interactive.open > h3::before { @@ -1231,9 +1233,13 @@ div.fragment, pre.fragment { .contents > div.fragment, .textblock > div.fragment, .textblock > pre.fragment, + .textblock > .tabbed > ul > li > div.fragment, + .textblock > .tabbed > ul > li > pre.fragment, .contents > .doxygen-awesome-fragment-wrapper > div.fragment, .textblock > .doxygen-awesome-fragment-wrapper > div.fragment, - .textblock > .doxygen-awesome-fragment-wrapper > pre.fragment { + .textblock > .doxygen-awesome-fragment-wrapper > pre.fragment, + .textblock > .tabbed > ul > li > .doxygen-awesome-fragment-wrapper > div.fragment, + .textblock > .tabbed > ul > li > .doxygen-awesome-fragment-wrapper > pre.fragment { margin: var(--spacing-medium) calc(0px - var(--spacing-large)); border-radius: 0; border-left: 0; @@ -1323,8 +1329,9 @@ div.fragment span.lineno a { color: var(--fragment-link) !important; } -div.fragment .line:first-child .lineno { +div.fragment > .line:first-child .lineno { box-shadow: -999999px 0px 0 999999px var(--fragment-linenumber-background), -999998px 0px 0 999999px var(--fragment-linenumber-border); + background-color: var(--fragment-linenumber-background) !important; } div.line { @@ -1383,8 +1390,8 @@ dl.todo { color: var(--todo-color-darker); } -dl.todo dt { - color: var(--todo-color-dark); +dl.todo dt a { + color: var(--todo-color-dark) !important; } dl.bug dt a { @@ -1866,7 +1873,7 @@ div.dynheader img[src="closed.png"] { display: block; float: left; margin-left: -10px; - transition: transform 0.25s ease-out; + transition: transform var(--animation-duration) ease-out; } table.memberdecls img { @@ -2344,7 +2351,7 @@ doxygen-awesome-dark-mode-toggle { } doxygen-awesome-dark-mode-toggle > svg { - transition: transform .1s ease-in-out; + transition: transform var(--animation-duration) ease-in-out; } doxygen-awesome-dark-mode-toggle:active > svg { @@ -2429,7 +2436,7 @@ a.anchorlink { text-decoration: none; opacity: .15; display: none; - transition: opacity .1s ease-in-out, color .1s ease-in-out; + transition: opacity var(--animation-duration) ease-in-out, color var(--animation-duration) ease-in-out; } a.anchorlink svg { @@ -2453,15 +2460,10 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. Optional tab feature */ -.tabbed { - margin: var(--spacing-medium) auto; -} - .tabbed ul { padding-inline-start: 0px; margin: 0; padding: var(--spacing-small) 0; - border-bottom: 1px solid var(--separator-color); } .tabbed li { @@ -2484,24 +2486,46 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. flex-direction: row; } +@media screen and (max-width: 767px) { + .tabs-overview-container { + margin: 0 calc(0px - var(--spacing-large)); + } + .tabs-overview { + padding: 0 var(--spacing-large) + } +} + .tabs-overview button.tab-button { color: var(--page-foreground-color); margin: 0; border: none; background: transparent; - padding: var(--spacing-small) 0; + padding: calc(var(--spacing-large) / 2) 0; display: inline-block; font-size: var(--page-font-size); cursor: pointer; box-shadow: 0 1px 0 0 var(--separator-color); position: relative; + + -webkit-tap-highlight-color: transparent; +} + +.tabs-overview button.tab-button .tab-title::before { + display: block; + content: attr(title); + font-weight: 600; + height: 0; + overflow: hidden; + visibility: hidden; } .tabs-overview button.tab-button .tab-title { float: left; white-space: nowrap; - padding: var(--spacing-small) var(--spacing-large); + font-weight: normal; + padding: calc(var(--spacing-large) / 2) var(--spacing-large); border-radius: var(--border-radius-medium); + transition: background-color var(--animation-duration) ease-in-out, font-weight var(--animation-duration) ease-in-out; } .tabs-overview button.tab-button:not(:last-child) .tab-title { @@ -2513,18 +2537,133 @@ h2:hover a.anchorlink, h1:hover a.anchorlink, h3:hover a.anchorlink, h4:hover a. box-shadow: none; } -.tabs-overview button.tab-button.active { - color: var(--primary-color); +.tabs-overview button.tab-button.active .tab-title { + font-weight: 600; } -.tabs-overview button.tab-button.active::after { +.tabs-overview button.tab-button::after { content: ''; display: block; position: absolute; - left: 0px; + left: 0; bottom: 0; - right: 0px; - height: 3px; + right: 0; + height: 0; + width: 0%; + margin: 0 auto; border-radius: var(--border-radius-small) var(--border-radius-small) 0 0; background-color: var(--primary-color); + transition: width var(--animation-duration) ease-in-out, height var(--animation-duration) ease-in-out; +} + +.tabs-overview button.tab-button.active::after { + width: 100%; + box-sizing: border-box; + height: 3px; +} + + +/* + Navigation Buttons +*/ + +.section_buttons:not(:empty) { + margin-top: calc(var(--spacing-large) * 3); +} + +.section_buttons table.markdownTable { + display: block; + width: 100%; +} + +.section_buttons table.markdownTable tbody { + display: table !important; + width: 100%; + box-shadow: none; + border-spacing: 10px; +} + +.section_buttons table.markdownTable td { + padding: 0; +} + +.section_buttons table.markdownTable th { + display: none; +} + +.section_buttons table.markdownTable tr.markdownTableHead { + border: none; +} + +.section_buttons tr th, .section_buttons tr td { + background: none; + border: none; + padding: var(--spacing-large) 0 var(--spacing-small); +} + +.section_buttons a { + display: inline-block; + border: 1px solid var(--separator-color); + border-radius: var(--border-radius-medium); + color: var(--page-secondary-foreground-color) !important; + text-decoration: none; + transition: color var(--animation-duration) ease-in-out, background-color var(--animation-duration) ease-in-out; +} + +.section_buttons a:hover { + color: var(--page-foreground-color) !important; + background-color: var(--odd-color); +} + +.section_buttons tr td.markdownTableBodyLeft a { + padding: var(--spacing-medium) var(--spacing-large) var(--spacing-medium) calc(var(--spacing-large) / 2); +} + +.section_buttons tr td.markdownTableBodyRight a { + padding: var(--spacing-medium) calc(var(--spacing-large) / 2) var(--spacing-medium) var(--spacing-large); +} + +.section_buttons tr td.markdownTableBodyLeft a::before, +.section_buttons tr td.markdownTableBodyRight a::after { + color: var(--page-secondary-foreground-color) !important; + display: inline-block; + transition: color .08s ease-in-out, transform .09s ease-in-out; +} + +.section_buttons tr td.markdownTableBodyLeft a::before { + content: '〈'; + padding-right: var(--spacing-large); +} + + +.section_buttons tr td.markdownTableBodyRight a::after { + content: '〉'; + padding-left: var(--spacing-large); +} + + +.section_buttons tr td.markdownTableBodyLeft a:hover::before { + color: var(--page-foreground-color) !important; + transform: translateX(-3px); +} + +.section_buttons tr td.markdownTableBodyRight a:hover::after { + color: var(--page-foreground-color) !important; + transform: translateX(3px); +} + +@media screen and (max-width: 450px) { + .section_buttons a { + width: 100%; + box-sizing: border-box; + } + + .section_buttons tr td:nth-of-type(1).markdownTableBodyLeft a { + border-radius: var(--border-radius-medium) 0 0 var(--border-radius-medium); + border-right: none; + } + + .section_buttons tr td:nth-of-type(2).markdownTableBodyRight a { + border-radius: 0 var(--border-radius-medium) var(--border-radius-medium) 0; + } } diff --git a/packages/HighFive/doc/installation.md b/packages/HighFive/doc/installation.md index 41521bba5e57212e7a7cd08b9423c5fcea655ddd..e9c5b2e6e1884e9e2ea7cc99a88335b2020b6040 100644 --- a/packages/HighFive/doc/installation.md +++ b/packages/HighFive/doc/installation.md @@ -211,10 +211,13 @@ this you type Okay, on to configure, compile and install. The CMake commands are - cmake -DCMAKE_INSTALL_PREFIX=../highfive-v2.7.1 -DHIGHFIVE_USE_BOOST=Off -B build . + cmake -DCMAKE_INSTALL_PREFIX=build/install -DHIGHFIVE_USE_BOOST=Off -B build . cmake --build build --parallel cmake --install build +Later you'd pass the installation directory, i.e. `${PWD}/build/install`, to +`CMAKE_PREFIX_PATH`. + ### Confirming It Works We again need a dummy file called `dummy.cpp` with the following contents diff --git a/packages/HighFive/include/highfive/H5DataSpace.hpp b/packages/HighFive/include/highfive/H5DataSpace.hpp index 95d04dbbbd9052e291483c46d362295ba5c72583..7c7c5860abf93239befb9c77d791e617c9c27136 100644 --- a/packages/HighFive/include/highfive/H5DataSpace.hpp +++ b/packages/HighFive/include/highfive/H5DataSpace.hpp @@ -145,6 +145,24 @@ class DataSpace: public Object { /// \since 1.3 explicit DataSpace(DataspaceType space_type); + /// \brief Create a scalar DataSpace. + /// + /// \code{.cpp} + /// auto dataspace = DataSpace::Scalar(); + /// \endcode + /// + /// \since 2.9 + static DataSpace Scalar(); + + /// \brief Create a null DataSpace. + /// + /// \code{.cpp} + /// auto dataspace = DataSpace::Null(); + /// \endcode + /// + /// \since 2.9 + static DataSpace Null(); + /// \brief Create a copy of the DataSpace which will have different id. /// /// \code{.cpp} diff --git a/packages/HighFive/include/highfive/H5DataType.hpp b/packages/HighFive/include/highfive/H5DataType.hpp index 886107961b87e32911702f3aaf3496acc8ce8c26..0d596965fea83fe2a97820be62249254c140af5d 100644 --- a/packages/HighFive/include/highfive/H5DataType.hpp +++ b/packages/HighFive/include/highfive/H5DataType.hpp @@ -19,6 +19,9 @@ #include "bits/string_padding.hpp" #include "H5PropertyList.hpp" +#include "bits/h5_wrapper.hpp" +#include "bits/h5t_wrapper.hpp" + namespace HighFive { @@ -70,7 +73,7 @@ class DataType: public Object { /// \brief Returns the length (in bytes) of this type elements /// /// Notice that the size of variable length sequences may have limited applicability - /// given that it refers to the size of the control structure. For info see + /// given that it refers to the size of the control structure. For info see /// https://support.hdfgroup.org/HDF5/doc/RM/RM_H5T.html#Datatype-GetSize size_t getSize() const; @@ -96,7 +99,6 @@ class DataType: public Object { /// /// \brief Check the DataType was default constructed. - /// Such value might represent auto-detection of the datatype from a buffer /// bool empty() const noexcept; @@ -152,7 +154,7 @@ class FixedLengthStringType: public StringType { /// requires `4*n` bytes. /// /// The string padding is subtle, essentially it's just a hint. A - /// nullterminated string is guaranteed to have one `'\0'` which marks the + /// null-terminated string is guaranteed to have one `'\0'` which marks the /// semantic end of the string. The length of the buffer must be at least /// `size` bytes regardless. HDF5 will read or write `size` bytes, /// irrespective of the when the `\0` occurs. @@ -236,21 +238,16 @@ class CompoundType: public DataType { ss << "hid " << _hid << " does not refer to a compound data type"; throw DataTypeException(ss.str()); } - int result = H5Tget_nmembers(_hid); - if (result < 0) { - throw DataTypeException("Could not get members of compound datatype"); - } - size_t n_members = static_cast<size_t>(result); + size_t n_members = static_cast<size_t>(detail::h5t_get_nmembers(_hid)); members.reserve(n_members); for (unsigned i = 0; i < n_members; i++) { - char* name = H5Tget_member_name(_hid, i); - size_t offset = H5Tget_member_offset(_hid, i); - hid_t member_hid = H5Tget_member_type(_hid, i); + char* name = detail::h5t_get_member_name(_hid, i); + size_t offset = detail::h5t_get_member_offset(_hid, i); + hid_t member_hid = detail::h5t_get_member_type(_hid, i); DataType member_type{member_hid}; members.emplace_back(std::string(name), member_type, offset); - if (H5free_memory(name) < 0) { - throw DataTypeException("Could not free names from the compound datatype"); - } + + detail::h5_free_memory(name); } } diff --git a/packages/HighFive/include/highfive/H5PropertyList.hpp b/packages/HighFive/include/highfive/H5PropertyList.hpp index 53b3c4a1374bdd9184163b209ae27bc84fedb5f3..2368f5ca935580f9b67fc2a48c0045d66d114255 100644 --- a/packages/HighFive/include/highfive/H5PropertyList.hpp +++ b/packages/HighFive/include/highfive/H5PropertyList.hpp @@ -179,6 +179,18 @@ class PropertyList: public PropertyListBase { return static_cast<const PropertyList<T>&>(PropertyListBase::Default()); } + /// Return a property list created via a call to `H5Pcreate`. + /// + /// An empty property is needed when one wants `getId()` to immediately + /// point at a valid HID. This is important when interfacing directly with + /// HDF5 to set properties that haven't been wrapped by HighFive. + static PropertyList<T> Empty() { + auto plist = PropertyList<T>(); + plist._initializeIfNeeded(); + + return plist; + } + protected: void _initializeIfNeeded(); }; @@ -229,6 +241,8 @@ class MPIOFileAccess { MPI_Info _info; }; + +#if H5_VERSION_GE(1, 10, 0) /// /// \brief Use collective MPI-IO for metadata read and write. /// @@ -306,6 +320,7 @@ class MPIOCollectiveMetadataWrite { bool collective_; }; +#endif #endif /// diff --git a/packages/HighFive/include/highfive/H5Utility.hpp b/packages/HighFive/include/highfive/H5Utility.hpp index 64ac1e5c890b15899a051eed2d3cb9450286fb02..7a17e14b0a0f0042ff9b5b60d65e5596685b0971 100644 --- a/packages/HighFive/include/highfive/H5Utility.hpp +++ b/packages/HighFive/include/highfive/H5Utility.hpp @@ -9,11 +9,11 @@ #pragma once -#include <H5Epublic.h> #include <functional> #include <string> #include <iostream> +#include "bits/h5e_wrapper.hpp" #include "bits/H5Friends.hpp" namespace HighFive { @@ -25,13 +25,15 @@ class SilenceHDF5 { public: inline SilenceHDF5(bool enable = true) : _client_data(nullptr) { - H5Eget_auto2(H5E_DEFAULT, &_func, &_client_data); - if (enable) - H5Eset_auto2(H5E_DEFAULT, NULL, NULL); + detail::nothrow::h5e_get_auto2(H5E_DEFAULT, &_func, &_client_data); + + if (enable) { + detail::nothrow::h5e_set_auto2(H5E_DEFAULT, nullptr, nullptr); + } } inline ~SilenceHDF5() { - H5Eset_auto2(H5E_DEFAULT, _func, _client_data); + detail::nothrow::h5e_set_auto2(H5E_DEFAULT, _func, _client_data); } private: @@ -119,7 +121,8 @@ inline void default_logging_callback(LogSeverity severity, const std::string& message, const std::string& file, int line) { - std::clog << file << ": " << line << " :: " << to_string(severity) << message << std::endl; + std::clog << file << ": " << line << " [" << to_string(severity) << "] " << message + << std::endl; } /// \brief Obtain a reference to the logger used by HighFive. diff --git a/packages/HighFive/include/highfive/bits/H5Annotate_traits_misc.hpp b/packages/HighFive/include/highfive/bits/H5Annotate_traits_misc.hpp index 85d2798fe284f53662f607dc58949dcecfca73fa..bf2be8a45369a85c98e5fd499f1850460d2e06c0 100644 --- a/packages/HighFive/include/highfive/bits/H5Annotate_traits_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Annotate_traits_misc.hpp @@ -16,6 +16,7 @@ #include "H5Attribute_misc.hpp" #include "H5Iterables_misc.hpp" +#include "h5a_wrapper.hpp" namespace HighFive { @@ -23,16 +24,12 @@ template <typename Derivate> inline Attribute AnnotateTraits<Derivate>::createAttribute(const std::string& attribute_name, const DataSpace& space, const DataType& dtype) { - auto attr_id = H5Acreate2(static_cast<Derivate*>(this)->getId(), - attribute_name.c_str(), - dtype.getId(), - space.getId(), - H5P_DEFAULT, - H5P_DEFAULT); - if (attr_id < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to create the attribute \"") + attribute_name + "\":"); - } + auto attr_id = detail::h5a_create2(static_cast<Derivate*>(this)->getId(), + attribute_name.c_str(), + dtype.getId(), + space.getId(), + H5P_DEFAULT, + H5P_DEFAULT); return detail::make_attribute(attr_id); } @@ -57,30 +54,20 @@ inline Attribute AnnotateTraits<Derivate>::createAttribute(const std::string& at template <typename Derivate> inline void AnnotateTraits<Derivate>::deleteAttribute(const std::string& attribute_name) { - if (H5Adelete(static_cast<const Derivate*>(this)->getId(), attribute_name.c_str()) < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to delete attribute \"") + attribute_name + "\":"); - } + detail::h5a_delete(static_cast<const Derivate*>(this)->getId(), attribute_name.c_str()); } template <typename Derivate> inline Attribute AnnotateTraits<Derivate>::getAttribute(const std::string& attribute_name) const { - const auto attr_id = - H5Aopen(static_cast<const Derivate*>(this)->getId(), attribute_name.c_str(), H5P_DEFAULT); - if (attr_id < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to open the attribute \"") + attribute_name + "\":"); - } + const auto attr_id = detail::h5a_open(static_cast<const Derivate*>(this)->getId(), + attribute_name.c_str(), + H5P_DEFAULT); return detail::make_attribute(attr_id); } template <typename Derivate> inline size_t AnnotateTraits<Derivate>::getNumberAttributes() const { - int res = H5Aget_num_attrs(static_cast<const Derivate*>(this)->getId()); - if (res < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to count attributes in existing group or file")); - } + int res = detail::h5a_get_num_attrs(static_cast<const Derivate*>(this)->getId()); return static_cast<size_t>(res); } @@ -92,27 +79,19 @@ inline std::vector<std::string> AnnotateTraits<Derivate>::listAttributeNames() c size_t num_objs = getNumberAttributes(); names.reserve(num_objs); - if (H5Aiterate2(static_cast<const Derivate*>(this)->getId(), - H5_INDEX_NAME, - H5_ITER_INC, - NULL, - &details::internal_high_five_iterate<H5A_info_t>, - static_cast<void*>(&iterateData)) < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to list attributes in group")); - } + detail::h5a_iterate2(static_cast<const Derivate*>(this)->getId(), + H5_INDEX_NAME, + H5_ITER_INC, + nullptr, + &details::internal_high_five_iterate<H5A_info_t>, + static_cast<void*>(&iterateData)); return names; } template <typename Derivate> inline bool AnnotateTraits<Derivate>::hasAttribute(const std::string& attr_name) const { - int res = H5Aexists(static_cast<const Derivate*>(this)->getId(), attr_name.c_str()); - if (res < 0) { - HDF5ErrMapper::ToException<AttributeException>( - std::string("Unable to check for attribute in group")); - } - return res; + return detail::h5a_exists(static_cast<const Derivate*>(this)->getId(), attr_name.c_str()) > 0; } } // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/H5Attribute_misc.hpp b/packages/HighFive/include/highfive/bits/H5Attribute_misc.hpp index 6516788297819c9f6e318bd588df3ac668ac63fd..33295d40e55a0e8c1ac111699114eafaf4fd0b9c 100644 --- a/packages/HighFive/include/highfive/bits/H5Attribute_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Attribute_misc.hpp @@ -14,36 +14,35 @@ #include <sstream> #include <string> -#include <H5Apublic.h> #include <H5Ppublic.h> #include "../H5DataSpace.hpp" #include "H5Converter_misc.hpp" #include "H5ReadWrite_misc.hpp" #include "H5Utils.hpp" +#include "h5a_wrapper.hpp" +#include "h5d_wrapper.hpp" namespace HighFive { inline std::string Attribute::getName() const { return details::get_name( - [&](char* buffer, size_t length) { return H5Aget_name(_hid, length, buffer); }); + [&](char* buffer, size_t length) { return detail::h5a_get_name(_hid, length, buffer); }); } inline size_t Attribute::getStorageSize() const { - return static_cast<size_t>(H5Aget_storage_size(_hid)); + return static_cast<size_t>(detail::h5a_get_storage_size(_hid)); } inline DataType Attribute::getDataType() const { DataType res; - res._hid = H5Aget_type(_hid); + res._hid = detail::h5a_get_type(_hid); return res; } inline DataSpace Attribute::getSpace() const { DataSpace space; - if ((space._hid = H5Aget_space(_hid)) < 0) { - HDF5ErrMapper::ToException<AttributeException>("Unable to get DataSpace out of Attribute"); - } + space._hid = detail::h5a_get_space(_hid); return space; } @@ -69,7 +68,7 @@ inline void Attribute::read(T& array) const { if (!details::checkDimensions(mem_space, buffer_info.n_dimensions)) { std::ostringstream ss; - ss << "Impossible to read DataSet of dimensions " << mem_space.getNumberDimensions() + ss << "Impossible to read Attribute of dimensions " << mem_space.getNumberDimensions() << " into arrays of dimensions " << buffer_info.n_dimensions; throw DataSpaceException(ss.str()); } @@ -94,10 +93,10 @@ inline void Attribute::read(T& array) const { if (c == DataTypeClass::VarLen || t.isVariableStr()) { #if H5_VERSION_GE(1, 12, 0) // This one have been created in 1.12.0 - (void) H5Treclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); + (void) detail::h5t_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); #else // This one is deprecated since 1.12.0 - (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); + (void) detail::h5d_vlen_reclaim(t.getId(), mem_space.getId(), H5P_DEFAULT, r.getPointer()); #endif } } @@ -107,9 +106,7 @@ inline void Attribute::read(T* array, const DataType& mem_datatype) const { static_assert(!std::is_const<T>::value, "read() requires a non-const structure to read data into"); - if (H5Aread(getId(), mem_datatype.getId(), static_cast<void*>(array)) < 0) { - HDF5ErrMapper::ToException<AttributeException>("Error during HDF5 Read: "); - } + detail::h5a_read(getId(), mem_datatype.getId(), static_cast<void*>(array)); } template <typename T> @@ -147,9 +144,7 @@ inline void Attribute::write(const T& buffer) { template <typename T> inline void Attribute::write_raw(const T* buffer, const DataType& mem_datatype) { - if (H5Awrite(getId(), mem_datatype.getId(), buffer) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Error during HDF5 Write: "); - } + detail::h5a_write(getId(), mem_datatype.getId(), buffer); } template <typename T> diff --git a/packages/HighFive/include/highfive/bits/H5Converter_misc.hpp b/packages/HighFive/include/highfive/bits/H5Converter_misc.hpp index 00749d1b6d6157bc745a86005a7a80d6b195fbb3..ed387702fac981ef9b1169d0adca99b25f6cd6b5 100644 --- a/packages/HighFive/include/highfive/bits/H5Converter_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Converter_misc.hpp @@ -179,9 +179,9 @@ struct StringBuffer { /// internal buffer as needed. /// /// The `length` is the length of the string in bytes. - void assign(char const* data, size_t length, StringPadding padding) { + void assign(char const* data, size_t length, StringPadding pad) { if (buffer.isVariableLengthString()) { - if (padding == StringPadding::NullTerminated) { + if (pad == StringPadding::NullTerminated) { buffer.variable_length_pointers[i] = data; } else { buffer.variable_length_buffer[i] = std::string(data, length); diff --git a/packages/HighFive/include/highfive/bits/H5DataSet_misc.hpp b/packages/HighFive/include/highfive/bits/H5DataSet_misc.hpp index 4411b4c0d42b40714f7e53269d8018e71577dfcb..4817fe00151f869dc6d352087a4d5cd713982365 100644 --- a/packages/HighFive/include/highfive/bits/H5DataSet_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5DataSet_misc.hpp @@ -14,26 +14,24 @@ #include <sstream> #include <string> -#include <H5Dpublic.h> #include <H5Ppublic.h> +#include "h5d_wrapper.hpp" #include "H5Utils.hpp" namespace HighFive { inline uint64_t DataSet::getStorageSize() const { - return H5Dget_storage_size(_hid); + return detail::h5d_get_storage_size(_hid); } inline DataType DataSet::getDataType() const { - return DataType(H5Dget_type(_hid)); + return DataType(detail::h5d_get_type(_hid)); } inline DataSpace DataSet::getSpace() const { DataSpace space; - if ((space._hid = H5Dget_space(_hid)) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Unable to get DataSpace out of DataSet"); - } + space._hid = detail::h5d_get_space(_hid); return space; } @@ -42,11 +40,7 @@ inline DataSpace DataSet::getMemSpace() const { } inline uint64_t DataSet::getOffset() const { - uint64_t addr = H5Dget_offset(_hid); - if (addr == HADDR_UNDEF) { - HDF5ErrMapper::ToException<DataSetException>("Cannot get offset of DataSet."); - } - return addr; + return static_cast<uint64_t>(detail::h5d_get_offset(_hid)); } inline void DataSet::resize(const std::vector<size_t>& dims) { @@ -58,10 +52,7 @@ inline void DataSet::resize(const std::vector<size_t>& dims) { } std::vector<hsize_t> real_dims(dims.begin(), dims.end()); - - if (H5Dset_extent(getId(), real_dims.data()) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Could not resize dataset."); - } + detail::h5d_set_extent(getId(), real_dims.data()); } } // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/H5DataType_misc.hpp b/packages/HighFive/include/highfive/bits/H5DataType_misc.hpp index 8535d617ab443405dbdd88353f52cf4850f8e7d1..e29c99b0ed30c8baf555e685ed5a6dc746c9fa6c 100644 --- a/packages/HighFive/include/highfive/bits/H5DataType_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5DataType_misc.hpp @@ -16,74 +16,13 @@ #endif #include <H5Ppublic.h> -#include <H5Tpublic.h> - -#ifdef H5_USE_HALF_FLOAT -#include <half.hpp> -#endif #include "H5Inspector_misc.hpp" +#include "h5t_wrapper.hpp" +#include "h5i_wrapper.hpp" namespace HighFive { -namespace detail { - -inline hid_t h5t_copy(hid_t original) { - auto copy = H5Tcopy(original); - if (copy == H5I_INVALID_HID) { - HDF5ErrMapper::ToException<DataTypeException>("Error copying datatype."); - } - - return copy; -} - -inline hsize_t h5t_get_size(hid_t hid) { - hsize_t size = H5Tget_size(hid); - if (size == 0) { - HDF5ErrMapper::ToException<DataTypeException>("Error getting size of datatype."); - } - - return size; -} - -inline H5T_cset_t h5t_get_cset(hid_t hid) { - auto cset = H5Tget_cset(hid); - if (cset == H5T_CSET_ERROR) { - HDF5ErrMapper::ToException<DataTypeException>("Error getting cset of datatype."); - } - - return cset; -} - -inline H5T_str_t h5t_get_strpad(hid_t hid) { - auto strpad = H5Tget_strpad(hid); - if (strpad == H5T_STR_ERROR) { - HDF5ErrMapper::ToException<DataTypeException>("Error getting strpad of datatype."); - } - - return strpad; -} - -inline void h5t_set_size(hid_t hid, hsize_t size) { - if (H5Tset_size(hid, size) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Error setting size of datatype."); - } -} - -inline void h5t_set_cset(hid_t hid, H5T_cset_t cset) { - if (H5Tset_cset(hid, cset) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Error setting cset of datatype."); - } -} - -inline void h5t_set_strpad(hid_t hid, H5T_str_t strpad) { - if (H5Tset_strpad(hid, strpad) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Error setting strpad of datatype."); - } -} -} // namespace detail - - namespace { // unnamed inline DataTypeClass convert_type_class(const H5T_class_t& tclass); inline std::string type_class_string(DataTypeClass); @@ -95,7 +34,7 @@ inline bool DataType::empty() const noexcept { } inline DataTypeClass DataType::getClass() const { - return convert_type_class(H5Tget_class(_hid)); + return convert_type_class(detail::h5t_get_class(_hid)); } inline size_t DataType::getSize() const { @@ -103,7 +42,7 @@ inline size_t DataType::getSize() const { } inline bool DataType::operator==(const DataType& other) const { - return (H5Tequal(_hid, other._hid) > 0); + return detail::h5t_equal(_hid, other._hid) > 0; } inline bool DataType::operator!=(const DataType& other) const { @@ -111,11 +50,7 @@ inline bool DataType::operator!=(const DataType& other) const { } inline bool DataType::isVariableStr() const { - auto var_value = H5Tis_variable_str(_hid); - if (var_value < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Unable to define datatype size to variable"); - } - return static_cast<bool>(var_value); + return detail::h5t_is_variable_str(_hid) > 0; } inline bool DataType::isFixedLenStr() const { @@ -123,7 +58,7 @@ inline bool DataType::isFixedLenStr() const { } inline bool DataType::isReference() const { - return H5Tequal(_hid, H5T_STD_REF_OBJ) > 0; + return detail::h5t_equal(_hid, H5T_STD_REF_OBJ) > 0; } inline StringType DataType::asStringType() const { @@ -131,8 +66,8 @@ inline StringType DataType::asStringType() const { throw DataTypeException("Invalid conversion to StringType."); } - if (isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (isValid()) { + detail::h5i_inc_ref(_hid); } return StringType(_hid); @@ -233,21 +168,6 @@ inline AtomicType<unsigned long long>::AtomicType() { } // half-float, float, double and long double mapping -#ifdef H5_USE_HALF_FLOAT -using float16_t = half_float::half; - -template <> -inline AtomicType<float16_t>::AtomicType() { - _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); - // Sign position, exponent position, exponent size, mantissa position, mantissa size - H5Tset_fields(_hid, 15, 10, 5, 0, 10); - // Total datatype size (in bytes) - detail::h5t_set_size(_hid, 2); - // Floating point exponent bias - H5Tset_ebias(_hid, 15); -} -#endif - template <> inline AtomicType<float>::AtomicType() { _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); @@ -373,8 +293,8 @@ inline AtomicType<Reference>::AtomicType() { inline size_t find_first_atomic_member_size(hid_t hid) { // Recursive exit condition - if (H5Tget_class(hid) == H5T_COMPOUND) { - auto number_of_members = H5Tget_nmembers(hid); + if (detail::h5t_get_class(hid) == H5T_COMPOUND) { + auto number_of_members = detail::h5t_get_nmembers(hid); if (number_of_members == -1) { throw DataTypeException("Cannot get members of CompoundType with hid: " + std::to_string(hid)); @@ -384,11 +304,11 @@ inline size_t find_first_atomic_member_size(hid_t hid) { std::to_string(hid)); } - auto member_type = H5Tget_member_type(hid, 0); + auto member_type = detail::h5t_get_member_type(hid, 0); auto size = find_first_atomic_member_size(member_type); - H5Tclose(member_type); + detail::h5t_close(member_type); return size; - } else if (H5Tget_class(hid) == H5T_STRING) { + } else if (detail::h5t_get_class(hid) == H5T_STRING) { return 1; } return detail::h5t_get_size(hid); @@ -448,43 +368,36 @@ inline void CompoundType::create(size_t size) { } // Create the HDF5 type - if ((_hid = H5Tcreate(H5T_COMPOUND, size)) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Could not create new compound datatype"); - } + _hid = detail::h5t_create(H5T_COMPOUND, size); // Loop over all the members and insert them into the datatype for (const auto& member: members) { - if (H5Tinsert(_hid, member.name.c_str(), member.offset, member.base_type.getId()) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Could not add new member to datatype"); - } + detail::h5t_insert(_hid, member.name.c_str(), member.offset, member.base_type.getId()); } } #undef _H5_STRUCT_PADDING inline void CompoundType::commit(const Object& object, const std::string& name) const { - H5Tcommit2(object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + detail::h5t_commit2( + object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } template <typename T> inline void EnumType<T>::create() { // Create the HDF5 type - if ((_hid = H5Tenum_create(AtomicType<typename std::underlying_type<T>::type>{}.getId())) < 0) { - HDF5ErrMapper::ToException<DataTypeException>("Could not create new enum datatype"); - } + _hid = detail::h5t_enum_create(AtomicType<typename std::underlying_type<T>::type>{}.getId()); // Loop over all the members and insert them into the datatype for (const auto& member: members) { - if (H5Tenum_insert(_hid, member.name.c_str(), &(member.value)) < 0) { - HDF5ErrMapper::ToException<DataTypeException>( - "Could not add new member to this enum datatype"); - } + detail::h5t_enum_insert(_hid, member.name.c_str(), &(member.value)); } } template <typename T> inline void EnumType<T>::commit(const Object& object, const std::string& name) const { - H5Tcommit2(object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + detail::h5t_commit2( + object.getId(), name.c_str(), getId(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } namespace { @@ -598,3 +511,16 @@ inline DataType create_and_check_datatype() { } // namespace HighFive HIGHFIVE_REGISTER_TYPE(HighFive::details::Boolean, HighFive::create_enum_boolean) + +namespace HighFive { + +template <> +inline DataType create_datatype<bool>() { + return create_datatype<HighFive::details::Boolean>(); +} + +} // namespace HighFive + +#ifdef H5_USE_HALF_FLOAT +#include <highfive/half_float.hpp> +#endif diff --git a/packages/HighFive/include/highfive/bits/H5Dataspace_misc.hpp b/packages/HighFive/include/highfive/bits/H5Dataspace_misc.hpp index 0fdcacefdb7c5bbcdff0aa5dad6f7d16902d3571..ceae1e5314551f72179b7e757d7ccf6faf67a038 100644 --- a/packages/HighFive/include/highfive/bits/H5Dataspace_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Dataspace_misc.hpp @@ -17,6 +17,7 @@ #include "H5Utils.hpp" #include "H5Converter_misc.hpp" +#include "h5s_wrapper.hpp" namespace HighFive { @@ -38,9 +39,15 @@ template <class IT, typename> inline DataSpace::DataSpace(const IT begin, const IT end) { std::vector<hsize_t> real_dims(begin, end); - if ((_hid = H5Screate_simple(int(real_dims.size()), real_dims.data(), NULL)) < 0) { - throw DataSpaceException("Impossible to create dataspace"); - } + _hid = detail::h5s_create_simple(int(real_dims.size()), real_dims.data(), nullptr); +} + +inline DataSpace DataSpace::Scalar() { + return DataSpace(DataSpace::dataspace_scalar); +} + +inline DataSpace DataSpace::Null() { + return DataSpace(DataSpace::dataspace_null); } inline DataSpace::DataSpace(const std::vector<size_t>& dims, const std::vector<size_t>& maxdims) { @@ -57,10 +64,8 @@ inline DataSpace::DataSpace(const std::vector<size_t>& dims, const std::vector<s static_cast<hsize_t>(DataSpace::UNLIMITED), H5S_UNLIMITED); - if ((_hid = H5Screate_simple(int(dims.size()), real_dims.data(), real_maxdims.data())) < 0) { - throw DataSpaceException("Impossible to create dataspace"); - } -} // namespace HighFive + _hid = detail::h5s_create_simple(int(dims.size()), real_dims.data(), real_maxdims.data()); +} inline DataSpace::DataSpace(DataSpace::DataspaceType space_type) { H5S_class_t h5_dataspace_type; @@ -77,53 +82,34 @@ inline DataSpace::DataSpace(DataSpace::DataspaceType space_type) { "dataspace_scalar or dataspace_null"); } - if ((_hid = H5Screate(h5_dataspace_type)) < 0) { - throw DataSpaceException("Unable to create dataspace"); - } + _hid = detail::h5s_create(h5_dataspace_type); } inline DataSpace DataSpace::clone() const { DataSpace res; - if ((res._hid = H5Scopy(_hid)) < 0) { - throw DataSpaceException("Unable to copy dataspace"); - } + res._hid = detail::h5s_copy(_hid); return res; } inline size_t DataSpace::getNumberDimensions() const { - const int ndim = H5Sget_simple_extent_ndims(_hid); - if (ndim < 0) { - HDF5ErrMapper::ToException<DataSetException>( - "Unable to get dataspace number of dimensions"); - } - return size_t(ndim); + return static_cast<size_t>(detail::h5s_get_simple_extent_ndims(_hid)); } inline std::vector<size_t> DataSpace::getDimensions() const { std::vector<hsize_t> dims(getNumberDimensions()); if (!dims.empty()) { - if (H5Sget_simple_extent_dims(_hid, dims.data(), NULL) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Unable to get dataspace dimensions"); - } + detail::h5s_get_simple_extent_dims(_hid, dims.data(), nullptr); } return details::to_vector_size_t(std::move(dims)); } inline size_t DataSpace::getElementCount() const { - hssize_t nelements = H5Sget_simple_extent_npoints(_hid); - if (nelements < 0) { - HDF5ErrMapper::ToException<DataSetException>( - "Unable to get number of elements in dataspace"); - } - - return static_cast<size_t>(nelements); + return static_cast<size_t>(detail::h5s_get_simple_extent_npoints(_hid)); } inline std::vector<size_t> DataSpace::getMaxDimensions() const { std::vector<hsize_t> maxdims(getNumberDimensions()); - if (H5Sget_simple_extent_dims(_hid, NULL, maxdims.data()) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Unable to get dataspace dimensions"); - } + detail::h5s_get_simple_extent_dims(_hid, nullptr, maxdims.data()); std::replace(maxdims.begin(), maxdims.end(), diff --git a/packages/HighFive/include/highfive/bits/H5Exception_misc.hpp b/packages/HighFive/include/highfive/bits/H5Exception_misc.hpp index f7382f2c2c5fcc01cd6b064ef6991d200a0ed8f9..16ec107e65713587e8f11b423cd101120f648462 100644 --- a/packages/HighFive/include/highfive/bits/H5Exception_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Exception_misc.hpp @@ -11,7 +11,8 @@ #include <cstdlib> #include <sstream> -#include <H5Epublic.h> +#include "h5_wrapper.hpp" +#include "h5e_wrapper.hpp" namespace HighFive { @@ -21,14 +22,14 @@ struct HDF5ErrMapper { auto** e_iter = static_cast<ExceptionType**>(client_data); (void) n; - const char* major_err = H5Eget_major(err_desc->maj_num); - const char* minor_err = H5Eget_minor(err_desc->min_num); + const char* major_err = detail::nothrow::h5e_get_major(err_desc->maj_num); + const char* minor_err = detail::nothrow::h5e_get_minor(err_desc->min_num); std::ostringstream oss; oss << '(' << major_err << ") " << minor_err; - H5free_memory((void*) major_err); - H5free_memory((void*) minor_err); + detail::nothrow::h5_free_memory((void*) major_err); + detail::nothrow::h5_free_memory((void*) minor_err); auto* e = new ExceptionType(oss.str()); e->_err_major = err_desc->maj_num; @@ -45,8 +46,11 @@ struct HDF5ErrMapper { ExceptionType e(""); ExceptionType* e_iter = &e; - H5Ewalk2(err_stack, H5E_WALK_UPWARD, &HDF5ErrMapper::stackWalk<ExceptionType>, &e_iter); - H5Eclear2(err_stack); + detail::nothrow::h5e_walk2(err_stack, + H5E_WALK_UPWARD, + &HDF5ErrMapper::stackWalk<ExceptionType>, + &e_iter); + detail::nothrow::h5e_clear2(err_stack); const char* next_err_msg = (e.nextException() != NULL) ? (e.nextException()->what()) : (""); diff --git a/packages/HighFive/include/highfive/bits/H5File_misc.hpp b/packages/HighFive/include/highfive/bits/H5File_misc.hpp index b90792a71218f9890bfcc51dc2cd78810f4c2968..52ae59516eb83065455cc4632acaa10ec8f64f2e 100644 --- a/packages/HighFive/include/highfive/bits/H5File_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5File_misc.hpp @@ -14,6 +14,7 @@ #include "../H5Utility.hpp" #include "H5Utils.hpp" +#include "h5f_wrapper.hpp" namespace HighFive { @@ -62,7 +63,7 @@ inline File::File(const std::string& filename, if (openOrCreate) silencer.reset(new SilenceHDF5()); - _hid = H5Fopen(filename.c_str(), openMode, fileAccessProps.getId()); + _hid = detail::nothrow::h5f_open(filename.c_str(), openMode, fileAccessProps.getId()); if (isValid()) return; // Done @@ -78,15 +79,14 @@ inline File::File(const std::string& filename, auto fcpl = fileCreateProps.getId(); auto fapl = fileAccessProps.getId(); - if ((_hid = H5Fcreate(filename.c_str(), createMode, fcpl, fapl)) < 0) { - HDF5ErrMapper::ToException<FileException>(std::string("Unable to create file " + filename)); - } + _hid = detail::h5f_create(filename.c_str(), createMode, fcpl, fapl); } inline const std::string& File::getName() const noexcept { if (_filename.empty()) { - _filename = details::get_name( - [this](char* buffer, size_t length) { return H5Fget_name(getId(), buffer, length); }); + _filename = details::get_name([this](char* buffer, size_t length) { + return detail::h5f_get_name(getId(), buffer, length); + }); } return _filename; } @@ -122,27 +122,17 @@ inline hsize_t File::getFileSpacePageSize() const { #endif inline void File::flush() { - if (H5Fflush(_hid, H5F_SCOPE_GLOBAL) < 0) { - HDF5ErrMapper::ToException<FileException>(std::string("Unable to flush file " + getName())); - } + detail::h5f_flush(_hid, H5F_SCOPE_GLOBAL); } inline size_t File::getFileSize() const { hsize_t sizeValue = 0; - if (H5Fget_filesize(_hid, &sizeValue) < 0) { - HDF5ErrMapper::ToException<FileException>( - std::string("Unable to retrieve size of file " + getName())); - } + detail::h5f_get_filesize(_hid, &sizeValue); return static_cast<size_t>(sizeValue); } inline size_t File::getFreeSpace() const { - hssize_t unusedSize = H5Fget_freespace(_hid); - if (unusedSize < 0) { - HDF5ErrMapper::ToException<FileException>( - std::string("Unable to retrieve unused space of file " + getName())); - } - return static_cast<size_t>(unusedSize); + return static_cast<size_t>(detail::h5f_get_freespace(_hid)); } } // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/H5Inspector_decl.hpp b/packages/HighFive/include/highfive/bits/H5Inspector_decl.hpp new file mode 100644 index 0000000000000000000000000000000000000000..434545a60e76f7840847bc838d1ca91af911b17c --- /dev/null +++ b/packages/HighFive/include/highfive/bits/H5Inspector_decl.hpp @@ -0,0 +1,27 @@ +#pragma once + +#include <cstddef> +#include <numeric> +#include <functional> +#include <vector> + +namespace HighFive { + +inline size_t compute_total_size(const std::vector<size_t>& dims) { + return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies<size_t>()); +} + +template <typename T> +using unqualified_t = typename std::remove_const<typename std::remove_reference<T>::type>::type; + + +namespace details { + +template <typename T> +struct type_helper; + +template <typename T> +struct inspector; + +} // namespace details +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/H5Inspector_misc.hpp b/packages/HighFive/include/highfive/bits/H5Inspector_misc.hpp index 05ed6bc3ec0250b18e0ce4311ff710e703a6fca9..1613f87c3392df4e4c37d706a0eb211c31e86ed6 100644 --- a/packages/HighFive/include/highfive/bits/H5Inspector_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Inspector_misc.hpp @@ -21,19 +21,10 @@ #include "string_padding.hpp" -#ifdef H5_USE_BOOST -#include <boost/multi_array.hpp> -// starting Boost 1.64, serialization header must come before ublas -#include <boost/serialization/vector.hpp> -#include <boost/numeric/ublas/matrix.hpp> -#endif -#ifdef H5_USE_EIGEN -#include <Eigen/Eigen> -#endif +#include "H5Inspector_decl.hpp" namespace HighFive { - namespace details { inline bool checkDimensions(const std::vector<size_t>& dims, size_t n_dim_requested) { @@ -125,13 +116,6 @@ inline std::vector<size_t> squeezeDimensions(const std::vector<size_t>& dims, } // namespace details -inline size_t compute_total_size(const std::vector<size_t>& dims) { - return std::accumulate(dims.begin(), dims.end(), size_t{1u}, std::multiplies<size_t>()); -} - -template <typename T> -using unqualified_t = typename std::remove_const<typename std::remove_reference<T>::type>::type; - /***** inspector<T> { using type = T @@ -401,19 +385,21 @@ struct inspector<std::vector<T>> { } static hdf5_type* data(type& val) { - return inspector<value_type>::data(val[0]); + return val.empty() ? nullptr : inspector<value_type>::data(val[0]); } static const hdf5_type* data(const type& val) { - return inspector<value_type>::data(val[0]); + return val.empty() ? nullptr : inspector<value_type>::data(val[0]); } template <class It> static void serialize(const type& val, It m) { - size_t subsize = inspector<value_type>::getSizeVal(val[0]); - for (auto&& e: val) { - inspector<value_type>::serialize(e, m); - m += subsize; + if (!val.empty()) { + size_t subsize = inspector<value_type>::getSizeVal(val[0]); + for (auto&& e: val) { + inspector<value_type>::serialize(e, m); + m += subsize; + } } } @@ -514,12 +500,17 @@ struct inspector<std::array<T, N>> { return compute_total_size(dims); } - static void prepare(type& /* val */, const std::vector<size_t>& dims) { + static void prepare(type& val, const std::vector<size_t>& dims) { if (dims[0] > N) { std::ostringstream os; os << "Size of std::array (" << N << ") is too small for dims (" << dims[0] << ")."; throw DataSpaceException(os.str()); } + + std::vector<size_t> next_dims(dims.begin() + 1, dims.end()); + for (auto&& e: val) { + inspector<value_type>::prepare(e, next_dims); + } } static hdf5_type* data(type& val) { @@ -627,232 +618,13 @@ struct inspector<T[N]> { } }; -#ifdef H5_USE_EIGEN -template <typename T, int M, int N> -struct inspector<Eigen::Matrix<T, M, N>> { - using type = Eigen::Matrix<T, M, N>; - using value_type = T; - using base_type = typename inspector<value_type>::base_type; - using hdf5_type = base_type; - - static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && - inspector<value_type>::is_trivially_copyable; - - - static void assert_not_buggy(Eigen::Index nrows, Eigen::Index ncols) { - if (nrows > 1 && ncols > 1) { - throw std::runtime_error( - "HighFive has been broken for Eigen::Matrix. Please check " - "https://github.com/BlueBrain/HighFive/issues/532."); - } - } - - static std::vector<size_t> getDimensions(const type& val) { - assert_not_buggy(val.rows(), val.cols()); - - std::vector<size_t> sizes{static_cast<size_t>(val.rows()), static_cast<size_t>(val.cols())}; - auto s = inspector<value_type>::getDimensions(val.data()[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector<size_t>& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector<size_t>& dims) { - if (dims[0] != static_cast<size_t>(val.rows()) || - dims[1] != static_cast<size_t>(val.cols())) { - val.resize(static_cast<typename type::Index>(dims[0]), - static_cast<typename type::Index>(dims[1])); - } - - assert_not_buggy(val.rows(), val.cols()); - } - - static hdf5_type* data(type& val) { - assert_not_buggy(val.rows(), val.cols()); - return inspector<value_type>::data(*val.data()); - } - - static const hdf5_type* data(const type& val) { - assert_not_buggy(val.rows(), val.cols()); - return inspector<value_type>::data(*val.data()); - } - - static void serialize(const type& val, hdf5_type* m) { - assert_not_buggy(val.rows(), val.cols()); - std::memcpy(m, val.data(), static_cast<size_t>(val.size()) * sizeof(hdf5_type)); - } - - static void unserialize(const hdf5_type* vec_align, - const std::vector<size_t>& dims, - type& val) { - assert_not_buggy(val.rows(), val.cols()); - if (dims.size() < 2) { - std::ostringstream os; - os << "Impossible to pair DataSet with " << dims.size() - << " dimensions into an eigen-matrix."; - throw DataSpaceException(os.str()); - } - std::memcpy(val.data(), vec_align, compute_total_size(dims) * sizeof(hdf5_type)); - } -}; -#endif +} // namespace details +} // namespace HighFive #ifdef H5_USE_BOOST -template <typename T, size_t Dims> -struct inspector<boost::multi_array<T, Dims>> { - using type = boost::multi_array<T, Dims>; - using value_type = T; - using base_type = typename inspector<value_type>::base_type; - using hdf5_type = typename inspector<value_type>::hdf5_type; - - static constexpr size_t ndim = Dims; - static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && - inspector<value_type>::is_trivially_copyable; - - static std::vector<size_t> getDimensions(const type& val) { - std::vector<size_t> sizes; - for (size_t i = 0; i < ndim; ++i) { - sizes.push_back(val.shape()[i]); - } - auto s = inspector<value_type>::getDimensions(val.data()[0]); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector<size_t>& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector<size_t>& dims) { - if (dims.size() < ndim) { - std::ostringstream os; - os << "Only '" << dims.size() << "' given but boost::multi_array is of size '" << ndim - << "'."; - throw DataSpaceException(os.str()); - } - boost::array<typename type::index, Dims> ext; - std::copy(dims.begin(), dims.begin() + ndim, ext.begin()); - val.resize(ext); - std::vector<size_t> next_dims(dims.begin() + Dims, dims.end()); - std::size_t size = std::accumulate(dims.begin(), - dims.begin() + Dims, - std::size_t{1}, - std::multiplies<size_t>()); - for (size_t i = 0; i < size; ++i) { - inspector<value_type>::prepare(*(val.origin() + i), next_dims); - } - } - - static hdf5_type* data(type& val) { - return inspector<value_type>::data(*val.data()); - } - - static const hdf5_type* data(const type& val) { - return inspector<value_type>::data(*val.data()); - } - - template <class It> - static void serialize(const type& val, It m) { - size_t size = val.num_elements(); - size_t subsize = inspector<value_type>::getSizeVal(*val.origin()); - for (size_t i = 0; i < size; ++i) { - inspector<value_type>::serialize(*(val.origin() + i), m + i * subsize); - } - } - - template <class It> - static void unserialize(It vec_align, const std::vector<size_t>& dims, type& val) { - std::vector<size_t> next_dims(dims.begin() + ndim, dims.end()); - size_t subsize = compute_total_size(next_dims); - for (size_t i = 0; i < val.num_elements(); ++i) { - inspector<value_type>::unserialize(vec_align + i * subsize, - next_dims, - *(val.origin() + i)); - } - } -}; - -template <typename T> -struct inspector<boost::numeric::ublas::matrix<T>> { - using type = boost::numeric::ublas::matrix<T>; - using value_type = unqualified_t<T>; - using base_type = typename inspector<value_type>::base_type; - using hdf5_type = typename inspector<value_type>::hdf5_type; - - static constexpr size_t ndim = 2; - static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; - static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && - inspector<value_type>::is_trivially_copyable; - - static std::vector<size_t> getDimensions(const type& val) { - std::vector<size_t> sizes{val.size1(), val.size2()}; - auto s = inspector<value_type>::getDimensions(val(0, 0)); - sizes.insert(sizes.end(), s.begin(), s.end()); - return sizes; - } - - static size_t getSizeVal(const type& val) { - return compute_total_size(getDimensions(val)); - } - - static size_t getSize(const std::vector<size_t>& dims) { - return compute_total_size(dims); - } - - static void prepare(type& val, const std::vector<size_t>& dims) { - if (dims.size() < ndim) { - std::ostringstream os; - os << "Impossible to pair DataSet with " << dims.size() << " dimensions into a " << ndim - << " boost::numeric::ublas::matrix"; - throw DataSpaceException(os.str()); - } - val.resize(dims[0], dims[1], false); - } - - static hdf5_type* data(type& val) { - return inspector<value_type>::data(val(0, 0)); - } - - static const hdf5_type* data(const type& val) { - return inspector<value_type>::data(val(0, 0)); - } - - static void serialize(const type& val, hdf5_type* m) { - size_t size = val.size1() * val.size2(); - size_t subsize = inspector<value_type>::getSizeVal(val(0, 0)); - for (size_t i = 0; i < size; ++i) { - inspector<value_type>::serialize(*(&val(0, 0) + i), m + i * subsize); - } - } - - static void unserialize(const hdf5_type* vec_align, - const std::vector<size_t>& dims, - type& val) { - std::vector<size_t> next_dims(dims.begin() + ndim, dims.end()); - size_t subsize = compute_total_size(next_dims); - size_t size = val.size1() * val.size2(); - for (size_t i = 0; i < size; ++i) { - inspector<value_type>::unserialize(vec_align + i * subsize, - next_dims, - *(&val(0, 0) + i)); - } - } -}; +#include <highfive/boost.hpp> #endif -} // namespace details -} // namespace HighFive +#ifdef H5_USE_EIGEN +#include <highfive/eigen.hpp> +#endif diff --git a/packages/HighFive/include/highfive/bits/H5Node_traits.hpp b/packages/HighFive/include/highfive/bits/H5Node_traits.hpp index d53d3f0488f1dc8ec07a645d3f0aaec32e65c3b2..493749beecd0e94203028d071eaffabd44f6f8c0 100644 --- a/packages/HighFive/include/highfive/bits/H5Node_traits.hpp +++ b/packages/HighFive/include/highfive/bits/H5Node_traits.hpp @@ -239,8 +239,7 @@ class NodeTraits { bool _exist(const std::string& node_name, bool raise_errors = true) const; // Opens an arbitrary object to obtain info - Object _open(const std::string& node_name, - const DataSetAccessProps& accessProps = DataSetAccessProps::Default()) const; + Object _open(const std::string& node_name) const; }; diff --git a/packages/HighFive/include/highfive/bits/H5Node_traits_misc.hpp b/packages/HighFive/include/highfive/bits/H5Node_traits_misc.hpp index 2f75ff3111efa3f9c12e94599cf995dfdd0f8ae2..b09bc3190d1136d1385748fa2546c9b1e40a2064 100644 --- a/packages/HighFive/include/highfive/bits/H5Node_traits_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Node_traits_misc.hpp @@ -12,9 +12,7 @@ #include <vector> #include <H5Apublic.h> -#include <H5Dpublic.h> #include <H5Fpublic.h> -#include <H5Gpublic.h> #include <H5Ppublic.h> #include <H5Tpublic.h> @@ -27,6 +25,11 @@ #include "H5Selection_misc.hpp" #include "H5Slice_traits_misc.hpp" +#include "h5l_wrapper.hpp" +#include "h5g_wrapper.hpp" +#include "h5o_wrapper.hpp" + + namespace HighFive { @@ -39,18 +42,13 @@ inline DataSet NodeTraits<Derivate>::createDataSet(const std::string& dataset_na bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Dcreate2(static_cast<Derivate*>(this)->getId(), - dataset_name.c_str(), - dtype.getId(), - space.getId(), - lcpl.getId(), - createProps.getId(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException<DataSetException>( - std::string("Unable to create the dataset \"") + dataset_name + "\":"); - } - return DataSet(hid); + return DataSet(detail::h5d_create2(static_cast<Derivate*>(this)->getId(), + dataset_name.c_str(), + dtype.getId(), + space.getId(), + lcpl.getId(), + createProps.getId(), + accessProps.getId())); } template <typename Derivate> @@ -119,30 +117,20 @@ inline DataSet NodeTraits<Derivate>::createDataSet(const std::string& dataset_na template <typename Derivate> inline DataSet NodeTraits<Derivate>::getDataSet(const std::string& dataset_name, const DataSetAccessProps& accessProps) const { - const auto hid = H5Dopen2(static_cast<const Derivate*>(this)->getId(), - dataset_name.c_str(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException<DataSetException>(std::string("Unable to open the dataset \"") + - dataset_name + "\":"); - } - return DataSet(hid); + return DataSet(detail::h5d_open2(static_cast<const Derivate*>(this)->getId(), + dataset_name.c_str(), + accessProps.getId())); } template <typename Derivate> inline Group NodeTraits<Derivate>::createGroup(const std::string& group_name, bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Gcreate2(static_cast<Derivate*>(this)->getId(), - group_name.c_str(), - lcpl.getId(), - H5P_DEFAULT, - H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_create2(static_cast<Derivate*>(this)->getId(), + group_name.c_str(), + lcpl.getId(), + H5P_DEFAULT, + H5P_DEFAULT)); } template <typename Derivate> @@ -151,63 +139,46 @@ inline Group NodeTraits<Derivate>::createGroup(const std::string& group_name, bool parents) { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - const auto hid = H5Gcreate2(static_cast<Derivate*>(this)->getId(), - group_name.c_str(), - lcpl.getId(), - createProps.getId(), - H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_create2(static_cast<Derivate*>(this)->getId(), + group_name.c_str(), + lcpl.getId(), + createProps.getId(), + H5P_DEFAULT)); } template <typename Derivate> inline Group NodeTraits<Derivate>::getGroup(const std::string& group_name) const { - const auto hid = - H5Gopen2(static_cast<const Derivate*>(this)->getId(), group_name.c_str(), H5P_DEFAULT); - if (hid < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to open the group \"") + - group_name + "\":"); - } - return detail::make_group(hid); + return detail::make_group(detail::h5g_open2(static_cast<const Derivate*>(this)->getId(), + group_name.c_str(), + H5P_DEFAULT)); } template <typename Derivate> inline DataType NodeTraits<Derivate>::getDataType(const std::string& type_name, const DataTypeAccessProps& accessProps) const { - const auto hid = H5Topen2(static_cast<const Derivate*>(this)->getId(), - type_name.c_str(), - accessProps.getId()); - if (hid < 0) { - HDF5ErrMapper::ToException<DataTypeException>( - std::string("Unable to open the datatype \"") + type_name + "\":"); - } - return DataType(hid); + return DataType(detail::h5t_open2(static_cast<const Derivate*>(this)->getId(), + type_name.c_str(), + accessProps.getId())); } template <typename Derivate> inline size_t NodeTraits<Derivate>::getNumberObjects() const { hsize_t res; - if (H5Gget_num_objs(static_cast<const Derivate*>(this)->getId(), &res) < 0) { - HDF5ErrMapper::ToException<GroupException>( - std::string("Unable to count objects in existing group or file")); - } + detail::h5g_get_num_objs(static_cast<const Derivate*>(this)->getId(), &res); return static_cast<size_t>(res); } template <typename Derivate> inline std::string NodeTraits<Derivate>::getObjectName(size_t index) const { return details::get_name([&](char* buffer, size_t length) { - return H5Lget_name_by_idx(static_cast<const Derivate*>(this)->getId(), - ".", - H5_INDEX_NAME, - H5_ITER_INC, - index, - buffer, - length, - H5P_DEFAULT); + return detail::h5l_get_name_by_idx(static_cast<const Derivate*>(this)->getId(), + ".", + H5_INDEX_NAME, + H5_ITER_INC, + index, + buffer, + length, + H5P_DEFAULT); }); } @@ -217,18 +188,14 @@ inline bool NodeTraits<Derivate>::rename(const std::string& src_path, bool parents) const { LinkCreateProps lcpl; lcpl.add(CreateIntermediateGroup(parents)); - herr_t status = H5Lmove(static_cast<const Derivate*>(this)->getId(), - src_path.c_str(), - static_cast<const Derivate*>(this)->getId(), - dst_path.c_str(), - lcpl.getId(), - H5P_DEFAULT); - if (status < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to move link to \"") + - dst_path + "\":"); - return false; - } - return true; + herr_t err = detail::h5l_move(static_cast<const Derivate*>(this)->getId(), + src_path.c_str(), + static_cast<const Derivate*>(this)->getId(), + dst_path.c_str(), + lcpl.getId(), + H5P_DEFAULT); + + return err >= 0; } template <typename Derivate> @@ -239,23 +206,21 @@ inline std::vector<std::string> NodeTraits<Derivate>::listObjectNames(IndexType size_t num_objs = getNumberObjects(); names.reserve(num_objs); - if (H5Literate(static_cast<const Derivate*>(this)->getId(), - static_cast<H5_index_t>(idx_type), - H5_ITER_INC, - NULL, - &details::internal_high_five_iterate<H5L_info_t>, - static_cast<void*>(&iterateData)) < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to list objects in group")); - } - + detail::h5l_iterate(static_cast<const Derivate*>(this)->getId(), + static_cast<H5_index_t>(idx_type), + H5_ITER_INC, + NULL, + &details::internal_high_five_iterate<H5L_info_t>, + static_cast<void*>(&iterateData)); return names; } template <typename Derivate> inline bool NodeTraits<Derivate>::_exist(const std::string& node_name, bool raise_errors) const { SilenceHDF5 silencer{}; - const auto val = - H5Lexists(static_cast<const Derivate*>(this)->getId(), node_name.c_str(), H5P_DEFAULT); + const auto val = detail::nothrow::h5l_exists(static_cast<const Derivate*>(this)->getId(), + node_name.c_str(), + H5P_DEFAULT); if (val < 0) { if (raise_errors) { HDF5ErrMapper::ToException<GroupException>("Invalid link for exist()"); @@ -285,11 +250,7 @@ inline bool NodeTraits<Derivate>::exist(const std::string& group_path) const { template <typename Derivate> inline void NodeTraits<Derivate>::unlink(const std::string& node_name) const { - const herr_t val = - H5Ldelete(static_cast<const Derivate*>(this)->getId(), node_name.c_str(), H5P_DEFAULT); - if (val < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Invalid name for unlink() ")); - } + detail::h5l_delete(static_cast<const Derivate*>(this)->getId(), node_name.c_str(), H5P_DEFAULT); } @@ -313,13 +274,14 @@ static inline LinkType _convert_link_type(const H5L_type_t& ltype) noexcept { template <typename Derivate> inline LinkType NodeTraits<Derivate>::getLinkType(const std::string& node_name) const { H5L_info_t linkinfo; - if (H5Lget_info(static_cast<const Derivate*>(this)->getId(), - node_name.c_str(), - &linkinfo, - H5P_DEFAULT) < 0 || - linkinfo.type == H5L_TYPE_ERROR) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to obtain info for link ") + - node_name); + detail::h5l_get_info(static_cast<const Derivate*>(this)->getId(), + node_name.c_str(), + &linkinfo, + H5P_DEFAULT); + + if (linkinfo.type == H5L_TYPE_ERROR) { + HDF5ErrMapper::ToException<GroupException>(std::string("Link type of \"") + node_name + + "\" is H5L_TYPE_ERROR"); } return _convert_link_type(linkinfo.type); } @@ -339,14 +301,11 @@ inline void NodeTraits<Derivate>::createSoftLink(const std::string& link_name, if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_soft(obj_path.c_str(), - static_cast<const Derivate*>(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create soft link: ")); - } + detail::h5l_create_soft(obj_path.c_str(), + static_cast<const Derivate*>(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } @@ -360,15 +319,12 @@ inline void NodeTraits<Derivate>::createExternalLink(const std::string& link_nam if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_external(h5_file.c_str(), - obj_path.c_str(), - static_cast<const Derivate*>(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create external link: ")); - } + detail::h5l_create_external(h5_file.c_str(), + obj_path.c_str(), + static_cast<const Derivate*>(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } template <typename Derivate> @@ -383,28 +339,20 @@ inline void NodeTraits<Derivate>::createHardLink(const std::string& link_name, if (parents) { linkCreateProps.add(CreateIntermediateGroup{}); } - auto status = H5Lcreate_hard(target_obj.getId(), - ".", - static_cast<const Derivate*>(this)->getId(), - link_name.c_str(), - linkCreateProps.getId(), - linkAccessProps.getId()); - if (status < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create hard link: ")); - } + detail::h5l_create_hard(target_obj.getId(), + ".", + static_cast<const Derivate*>(this)->getId(), + link_name.c_str(), + linkCreateProps.getId(), + linkAccessProps.getId()); } template <typename Derivate> -inline Object NodeTraits<Derivate>::_open(const std::string& node_name, - const DataSetAccessProps& accessProps) const { - const auto id = H5Oopen(static_cast<const Derivate*>(this)->getId(), - node_name.c_str(), - accessProps.getId()); - if (id < 0) { - HDF5ErrMapper::ToException<GroupException>(std::string("Unable to open \"") + node_name + - "\":"); - } +inline Object NodeTraits<Derivate>::_open(const std::string& node_name) const { + const auto id = detail::h5o_open(static_cast<const Derivate*>(this)->getId(), + node_name.c_str(), + H5P_DEFAULT); return detail::make_object(id); } diff --git a/packages/HighFive/include/highfive/bits/H5Object_misc.hpp b/packages/HighFive/include/highfive/bits/H5Object_misc.hpp index f477d7fdf387a10932e0a8b127c1a6bb52eb7307..c5a1f39998dab4d5a52e50792fff70b5fc001988 100644 --- a/packages/HighFive/include/highfive/bits/H5Object_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Object_misc.hpp @@ -12,6 +12,7 @@ #include "../H5Exception.hpp" #include "../H5Utility.hpp" +#include "h5i_wrapper.hpp" namespace HighFive { namespace detail { @@ -29,8 +30,8 @@ inline Object::Object(hid_t hid) inline Object::Object(const Object& other) : _hid(other._hid) { - if (other.isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (other.isValid()) { + detail::h5i_inc_ref(_hid); } } @@ -41,25 +42,28 @@ inline Object::Object(Object&& other) noexcept inline Object& Object::operator=(const Object& other) { if (this != &other) { - if (isValid()) - H5Idec_ref(_hid); + if ((*this).isValid()) { + detail::h5i_dec_ref(_hid); + } _hid = other._hid; - if (other.isValid() && H5Iinc_ref(_hid) < 0) { - throw ObjectException("Reference counter increase failure"); + if (other.isValid()) { + detail::h5i_inc_ref(_hid); } } return *this; } inline Object::~Object() { - if (isValid() && H5Idec_ref(_hid) < 0) { - HIGHFIVE_LOG_ERROR("HighFive::~Object: reference counter decrease failure"); + if (isValid()) { + if (detail::nothrow::h5i_dec_ref(_hid) < 0) { + HIGHFIVE_LOG_ERROR("Failed to decrease reference count of HID"); + } } } inline bool Object::isValid() const noexcept { - return (_hid != H5I_INVALID_HID) && (H5Iis_valid(_hid) != false); + return (_hid > 0) && (detail::nothrow::h5i_is_valid(_hid) > 0); } inline hid_t Object::getId() const noexcept { @@ -87,11 +91,7 @@ static inline ObjectType _convert_object_type(const H5I_type_t& h5type) { inline ObjectType Object::getType() const { // H5Iget_type is a very lightweight func which extracts the type from the id - H5I_type_t h5type; - if ((h5type = H5Iget_type(_hid)) == H5I_BADID) { - HDF5ErrMapper::ToException<ObjectException>("Invalid hid or object type"); - } - return _convert_object_type(h5type); + return _convert_object_type(detail::h5i_get_type(_hid)); } inline ObjectInfo Object::getInfo() const { diff --git a/packages/HighFive/include/highfive/bits/H5Path_traits_misc.hpp b/packages/HighFive/include/highfive/bits/H5Path_traits_misc.hpp index 444e9294bf8f5aea3233b5b4e0005c2ec774b842..acde06d1ee9061e0701f14af3ac6a8297f8b4df6 100644 --- a/packages/HighFive/include/highfive/bits/H5Path_traits_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Path_traits_misc.hpp @@ -21,20 +21,16 @@ inline PathTraits<Derivate>::PathTraits() { std::is_same<Derivate, Attribute>::value, "PathTraits can only be applied to Group, DataSet and Attribute"); const auto& obj = static_cast<const Derivate&>(*this); - if (!obj.isValid()) { - return; + if (obj.isValid()) { + const hid_t file_id = detail::h5i_get_file_id<PropertyException>(obj.getId()); + _file_obj.reset(new File(file_id)); } - const hid_t file_id = H5Iget_file_id(obj.getId()); - if (file_id < 0) { - HDF5ErrMapper::ToException<PropertyException>("getFile(): Could not obtain file of object"); - } - _file_obj.reset(new File(file_id)); } template <typename Derivate> inline std::string PathTraits<Derivate>::getPath() const { return details::get_name([this](char* buffer, size_t length) { - return H5Iget_name(static_cast<const Derivate&>(*this).getId(), buffer, length); + return detail::h5i_get_name(static_cast<const Derivate&>(*this).getId(), buffer, length); }); } diff --git a/packages/HighFive/include/highfive/bits/H5PropertyList_misc.hpp b/packages/HighFive/include/highfive/bits/H5PropertyList_misc.hpp index cef301e53a826215ea58587e2ac4538171c85c9c..1fa2101f222d901cb88e003612c97b64b2eafc1a 100644 --- a/packages/HighFive/include/highfive/bits/H5PropertyList_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5PropertyList_misc.hpp @@ -8,7 +8,7 @@ */ #pragma once -#include <H5Ppublic.h> +#include "h5p_wrapper.hpp" namespace HighFive { @@ -64,9 +64,7 @@ inline void PropertyList<T>::_initializeIfNeeded() { if (_hid != H5P_DEFAULT) { return; } - if ((_hid = H5Pcreate(convert_plist_type(T))) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to create property list"); - } + _hid = detail::h5p_create(convert_plist_type(T)); } template <PropertyType T> @@ -95,15 +93,11 @@ inline FileSpaceStrategy::FileSpaceStrategy(H5F_fspace_strategy_t strategy, , _threshold(threshold) {} inline FileSpaceStrategy::FileSpaceStrategy(const FileCreateProps& fcpl) { - if (H5Pget_file_space_strategy(fcpl.getId(), &_strategy, &_persist, &_threshold) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to get file space strategy"); - } + detail::h5p_get_file_space_strategy(fcpl.getId(), &_strategy, &_persist, &_threshold); } inline void FileSpaceStrategy::apply(const hid_t list) const { - if (H5Pset_file_space_strategy(list, _strategy, _persist, _threshold) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting file space strategy."); - } + detail::h5p_set_file_space_strategy(list, _strategy, _persist, _threshold); } inline H5F_fspace_strategy_t FileSpaceStrategy::getStrategy() const { @@ -122,15 +116,11 @@ inline FileSpacePageSize::FileSpacePageSize(hsize_t page_size) : _page_size(page_size) {} inline void FileSpacePageSize::apply(const hid_t list) const { - if (H5Pset_file_space_page_size(list, _page_size) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting file space page size."); - } + detail::h5p_set_file_space_page_size(list, _page_size); } inline FileSpacePageSize::FileSpacePageSize(const FileCreateProps& fcpl) { - if (H5Pget_file_space_page_size(fcpl.getId(), &_page_size) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to get file space page size"); - } + detail::h5p_get_file_space_page_size(fcpl.getId(), &_page_size); } inline hsize_t FileSpacePageSize::getPageSize() const { @@ -146,15 +136,11 @@ inline PageBufferSize::PageBufferSize(size_t page_buffer_size, , _min_raw(min_raw_percent) {} inline PageBufferSize::PageBufferSize(const FileAccessProps& plist) { - if (H5Pget_page_buffer_size(plist.getId(), &_page_buffer_size, &_min_meta, &_min_raw) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting page buffer size."); - } + detail::h5p_get_page_buffer_size(plist.getId(), &_page_buffer_size, &_min_meta, &_min_raw); } inline void PageBufferSize::apply(const hid_t list) const { - if (H5Pset_page_buffer_size(list, _page_buffer_size, _min_meta, _min_raw) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting page buffer size."); - } + detail::h5p_set_page_buffer_size(list, _page_buffer_size, _min_meta, _min_raw); } inline size_t PageBufferSize::getPageBufferSize() const { @@ -178,11 +164,10 @@ inline MPIOFileAccess::MPIOFileAccess(MPI_Comm comm, MPI_Info info) , _info(info) {} inline void MPIOFileAccess::apply(const hid_t list) const { - if (H5Pset_fapl_mpio(list, _comm, _info) < 0) { - HDF5ErrMapper::ToException<FileException>("Unable to set-up MPIO Driver configuration"); - } + detail::h5p_set_fapl_mpio(list, _comm, _info); } +#if H5_VERSION_GE(1, 10, 0) inline void MPIOCollectiveMetadata::apply(const hid_t plist) const { auto read = MPIOCollectiveMetadataRead{collective_read_}; auto write = MPIOCollectiveMetadataWrite{collective_write_}; @@ -210,9 +195,7 @@ inline bool MPIOCollectiveMetadata::isCollectiveWrite() const { inline void MPIOCollectiveMetadataRead::apply(const hid_t plist) const { - if (H5Pset_all_coll_metadata_ops(plist, collective_) < 0) { - HDF5ErrMapper::ToException<FileException>("Unable to request collective metadata reads"); - } + detail::h5p_set_all_coll_metadata_ops(plist, collective_); } inline bool MPIOCollectiveMetadataRead::isCollective() const { @@ -220,18 +203,14 @@ inline bool MPIOCollectiveMetadataRead::isCollective() const { } inline MPIOCollectiveMetadataRead::MPIOCollectiveMetadataRead(const FileAccessProps& plist) { - if (H5Pget_all_coll_metadata_ops(plist.getId(), &collective_) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error loading MPI metadata read."); - } + detail::h5p_get_all_coll_metadata_ops(plist.getId(), &collective_); } inline MPIOCollectiveMetadataRead::MPIOCollectiveMetadataRead(bool collective) : collective_(collective) {} inline void MPIOCollectiveMetadataWrite::apply(const hid_t plist) const { - if (H5Pset_coll_metadata_write(plist, collective_) < 0) { - HDF5ErrMapper::ToException<FileException>("Unable to request collective metadata writes"); - } + detail::h5p_set_coll_metadata_write(plist, collective_); } inline bool MPIOCollectiveMetadataWrite::isCollective() const { @@ -239,24 +218,21 @@ inline bool MPIOCollectiveMetadataWrite::isCollective() const { } inline MPIOCollectiveMetadataWrite::MPIOCollectiveMetadataWrite(const FileAccessProps& plist) { - if (H5Pget_coll_metadata_write(plist.getId(), &collective_) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error loading MPI metadata write."); - } + detail::h5p_get_coll_metadata_write(plist.getId(), &collective_); } inline MPIOCollectiveMetadataWrite::MPIOCollectiveMetadataWrite(bool collective) : collective_(collective) {} #endif +#endif inline FileVersionBounds::FileVersionBounds(H5F_libver_t low, H5F_libver_t high) : _low(low) , _high(high) {} inline FileVersionBounds::FileVersionBounds(const FileAccessProps& fapl) { - if (H5Pget_libver_bounds(fapl.getId(), &_low, &_high) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to access file version bounds"); - } + detail::h5p_get_libver_bounds(fapl.getId(), &_low, &_high); } inline std::pair<H5F_libver_t, H5F_libver_t> FileVersionBounds::getVersion() const { @@ -264,24 +240,18 @@ inline std::pair<H5F_libver_t, H5F_libver_t> FileVersionBounds::getVersion() con } inline void FileVersionBounds::apply(const hid_t list) const { - if (H5Pset_libver_bounds(list, _low, _high) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting file version bounds"); - } + detail::h5p_set_libver_bounds(list, _low, _high); } inline MetadataBlockSize::MetadataBlockSize(hsize_t size) : _size(size) {} inline MetadataBlockSize::MetadataBlockSize(const FileAccessProps& fapl) { - if (H5Pget_meta_block_size(fapl.getId(), &_size) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to access file metadata block size"); - } + detail::h5p_get_meta_block_size(fapl.getId(), &_size); } inline void MetadataBlockSize::apply(const hid_t list) const { - if (H5Pset_meta_block_size(list, _size) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting metadata block size"); - } + detail::h5p_set_meta_block_size(list, _size); } inline hsize_t MetadataBlockSize::getSize() const { @@ -289,9 +259,7 @@ inline hsize_t MetadataBlockSize::getSize() const { } inline void EstimatedLinkInfo::apply(const hid_t hid) const { - if (H5Pset_est_link_info(hid, _entries, _length) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting estimated link info"); - } + detail::h5p_set_est_link_info(hid, _entries, _length); } inline EstimatedLinkInfo::EstimatedLinkInfo(unsigned entries, unsigned length) @@ -299,9 +267,7 @@ inline EstimatedLinkInfo::EstimatedLinkInfo(unsigned entries, unsigned length) , _length(length) {} inline EstimatedLinkInfo::EstimatedLinkInfo(const GroupCreateProps& gcpl) { - if (H5Pget_est_link_info(gcpl.getId(), &_entries, &_length) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Unable to access group link size property"); - } + detail::h5p_get_est_link_info(gcpl.getId(), &_entries, &_length); } inline unsigned EstimatedLinkInfo::getEntries() const { @@ -313,9 +279,7 @@ inline unsigned EstimatedLinkInfo::getNameLength() const { } inline void Chunking::apply(const hid_t hid) const { - if (H5Pset_chunk(hid, static_cast<int>(_dims.size()), _dims.data()) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting chunk property"); - } + detail::h5p_set_chunk(hid, static_cast<int>(_dims.size()), _dims.data()); } inline Chunking::Chunking(const std::vector<hsize_t>& dims) @@ -326,10 +290,8 @@ inline Chunking::Chunking(const std::initializer_list<hsize_t>& items) inline Chunking::Chunking(DataSetCreateProps& plist, size_t max_dims) : _dims(max_dims + 1) { - auto n_loaded = H5Pget_chunk(plist.getId(), static_cast<int>(_dims.size()), _dims.data()); - if (n_loaded < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error getting chunk size"); - } + auto n_loaded = + detail::h5p_get_chunk(plist.getId(), static_cast<int>(_dims.size()), _dims.data()); if (n_loaded >= static_cast<int>(_dims.size())) { *this = Chunking(plist, 8 * max_dims); @@ -347,22 +309,22 @@ inline Chunking::Chunking(hsize_t item, Args... args) : Chunking(std::vector<hsize_t>{item, static_cast<hsize_t>(args)...}) {} inline void Deflate::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_DEFLATE) || H5Pset_deflate(hid, _level) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting deflate property"); + if (detail::h5z_filter_avail(H5Z_FILTER_DEFLATE) == 0) { + HDF5ErrMapper::ToException<PropertyException>("Deflate filter unavailable."); } + + detail::h5p_set_deflate(hid, _level); } inline Deflate::Deflate(unsigned int level) : _level(level) {} inline void Szip::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_SZIP)) { - HDF5ErrMapper::ToException<PropertyException>("Error setting szip property"); + if (detail::h5z_filter_avail(H5Z_FILTER_SZIP) == 0) { + HDF5ErrMapper::ToException<PropertyException>("SZIP filter unavailable."); } - if (H5Pset_szip(hid, _options_mask, _pixels_per_block) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting szip property"); - } + detail::h5p_set_szip(hid, _options_mask, _pixels_per_block); } inline Szip::Szip(unsigned int options_mask, unsigned int pixels_per_block) @@ -378,28 +340,22 @@ inline unsigned Szip::getPixelsPerBlock() const { } inline void Shuffle::apply(const hid_t hid) const { - if (!H5Zfilter_avail(H5Z_FILTER_SHUFFLE)) { - HDF5ErrMapper::ToException<PropertyException>("Error setting shuffle property"); + if (detail::h5z_filter_avail(H5Z_FILTER_SHUFFLE) == 0) { + HDF5ErrMapper::ToException<PropertyException>("Shuffle filter unavailable."); } - if (H5Pset_shuffle(hid) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting shuffle property"); - } + detail::h5p_set_shuffle(hid); } inline AllocationTime::AllocationTime(H5D_alloc_time_t alloc_time) : _alloc_time(alloc_time) {} inline AllocationTime::AllocationTime(const DataSetCreateProps& dcpl) { - if (H5Pget_alloc_time(dcpl.getId(), &_alloc_time) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error getting allocation time"); - } + detail::h5p_get_alloc_time(dcpl.getId(), &_alloc_time); } inline void AllocationTime::apply(hid_t dcpl) const { - if (H5Pset_alloc_time(dcpl, _alloc_time) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting allocation time"); - } + detail::h5p_set_alloc_time(dcpl, _alloc_time); } inline H5D_alloc_time_t AllocationTime::getAllocationTime() { @@ -407,15 +363,11 @@ inline H5D_alloc_time_t AllocationTime::getAllocationTime() { } inline Caching::Caching(const DataSetCreateProps& dcpl) { - if (H5Pget_chunk_cache(dcpl.getId(), &_numSlots, &_cacheSize, &_w0) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error getting dataset cache parameters"); - } + detail::h5p_get_chunk_cache(dcpl.getId(), &_numSlots, &_cacheSize, &_w0); } inline void Caching::apply(const hid_t hid) const { - if (H5Pset_chunk_cache(hid, _numSlots, _cacheSize, _w0) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting dataset cache parameters"); - } + detail::h5p_set_chunk_cache(hid, _numSlots, _cacheSize, _w0); } inline Caching::Caching(const size_t numSlots, const size_t cacheSize, const double w0) @@ -444,10 +396,7 @@ inline CreateIntermediateGroup::CreateIntermediateGroup(const ObjectCreateProps& inline void CreateIntermediateGroup::apply(const hid_t hid) const { - if (H5Pset_create_intermediate_group(hid, _create ? 1 : 0) < 0) { - HDF5ErrMapper::ToException<PropertyException>( - "Error setting property for create intermediate groups"); - } + detail::h5p_set_create_intermediate_group(hid, _create ? 1 : 0); } inline CreateIntermediateGroup::CreateIntermediateGroup(const LinkCreateProps& lcpl) { @@ -456,12 +405,7 @@ inline CreateIntermediateGroup::CreateIntermediateGroup(const LinkCreateProps& l inline void CreateIntermediateGroup::fromPropertyList(hid_t hid) { unsigned c_bool = 0; - if (H5Pget_create_intermediate_group(hid, &c_bool) < 0) { - HDF5ErrMapper::ToException<PropertyException>( - "Error getting property for create intermediate groups"); - } - - _create = bool(c_bool); + _create = bool(detail::h5p_get_create_intermediate_group(hid, &c_bool)); } inline bool CreateIntermediateGroup::isSet() const { @@ -473,17 +417,13 @@ inline UseCollectiveIO::UseCollectiveIO(bool enable) : _enable(enable) {} inline void UseCollectiveIO::apply(const hid_t hid) const { - if (H5Pset_dxpl_mpio(hid, _enable ? H5FD_MPIO_COLLECTIVE : H5FD_MPIO_INDEPENDENT) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting H5Pset_dxpl_mpio."); - } + detail::h5p_set_dxpl_mpio(hid, _enable ? H5FD_MPIO_COLLECTIVE : H5FD_MPIO_INDEPENDENT); } inline UseCollectiveIO::UseCollectiveIO(const DataTransferProps& dxpl) { H5FD_mpio_xfer_t collective; - if (H5Pget_dxpl_mpio(dxpl.getId(), &collective) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error getting H5Pset_dxpl_mpio."); - } + detail::h5p_get_dxpl_mpio(dxpl.getId(), &collective); if (collective != H5FD_MPIO_COLLECTIVE && collective != H5FD_MPIO_INDEPENDENT) { throw std::logic_error("H5Pget_dxpl_mpio returned something strange."); @@ -497,9 +437,7 @@ inline bool UseCollectiveIO::isCollective() const { } inline MpioNoCollectiveCause::MpioNoCollectiveCause(const DataTransferProps& dxpl) { - if (H5Pget_mpio_no_collective_cause(dxpl.getId(), &_local_cause, &_global_cause) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Failed to check mpio_no_collective_cause."); - } + detail::h5p_get_mpio_no_collective_cause(dxpl.getId(), &_local_cause, &_global_cause); } inline bool MpioNoCollectiveCause::wasCollective() const { @@ -532,16 +470,11 @@ inline unsigned LinkCreationOrder::getFlags() const { } inline void LinkCreationOrder::apply(const hid_t hid) const { - if (H5Pset_link_creation_order(hid, _flags) < 0) { - HDF5ErrMapper::ToException<PropertyException>("Error setting LinkCreationOrder."); - } + detail::h5p_set_link_creation_order(hid, _flags); } inline void LinkCreationOrder::fromPropertyList(hid_t hid) { - if (H5Pget_link_creation_order(hid, &_flags) < 0) { - HDF5ErrMapper::ToException<PropertyException>( - "Error getting property for link creation order"); - } + detail::h5p_get_link_creation_order(hid, &_flags); } inline AttributePhaseChange::AttributePhaseChange(unsigned max_compact, unsigned min_dense) @@ -549,10 +482,7 @@ inline AttributePhaseChange::AttributePhaseChange(unsigned max_compact, unsigned , _min_dense(min_dense) {} inline AttributePhaseChange::AttributePhaseChange(const GroupCreateProps& gcpl) { - if (H5Pget_attr_phase_change(gcpl.getId(), &_max_compact, &_min_dense) < 0) { - HDF5ErrMapper::ToException<PropertyException>( - "Error getting property for attribute phase change"); - } + detail::h5p_get_attr_phase_change(gcpl.getId(), &_max_compact, &_min_dense); } inline unsigned AttributePhaseChange::max_compact() const { @@ -564,10 +494,7 @@ inline unsigned AttributePhaseChange::min_dense() const { } inline void AttributePhaseChange::apply(hid_t hid) const { - if (H5Pset_attr_phase_change(hid, _max_compact, _min_dense) < 0) { - HDF5ErrMapper::ToException<PropertyException>( - "Error getting property for attribute phase change"); - } + detail::h5p_set_attr_phase_change(hid, _max_compact, _min_dense); } diff --git a/packages/HighFive/include/highfive/bits/H5ReadWrite_misc.hpp b/packages/HighFive/include/highfive/bits/H5ReadWrite_misc.hpp index c8e73617400fa9cec3b727d84fb49f1ac8e74c11..4f6f1578851cc2d72b2584e3737559b8be83aa4b 100644 --- a/packages/HighFive/include/highfive/bits/H5ReadWrite_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5ReadWrite_misc.hpp @@ -76,15 +76,21 @@ inline void enforce_ascii_hack(const DataType& dst, const DataType& src) { // TEMP. CHANGE: Ensure that the character set is properly configured to prevent // converter issues on HDF5 <=v1.12.0 when loading ASCII strings first. // See https://github.com/HDFGroup/hdf5/issues/544 for further information. - if (H5Tget_cset(src.getId()) == H5T_CSET_ASCII) { - H5Tset_cset(dst.getId(), H5T_CSET_ASCII); + + bool is_dst_string = detail::h5t_get_class(dst.getId()) == H5T_STRING; + bool is_src_string = detail::h5t_get_class(src.getId()) == H5T_STRING; + + if (is_dst_string && is_src_string) { + if (detail::h5t_get_cset(src.getId()) == H5T_CSET_ASCII) { + detail::h5t_set_cset(dst.getId(), H5T_CSET_ASCII); + } } } template <> struct string_type_checker<void> { inline static DataType getDataType(const DataType& element_type, const DataType& dtype) { - if (H5Tget_class(element_type.getId()) == H5T_STRING) { + if (detail::h5t_get_class(element_type.getId()) == H5T_STRING) { enforce_ascii_hack(element_type, dtype); } return element_type; diff --git a/packages/HighFive/include/highfive/bits/H5Reference_misc.hpp b/packages/HighFive/include/highfive/bits/H5Reference_misc.hpp index 7c8db36fb6b2482689cd0a73057858427b054fdf..c73deee2a084d4a70883e72f6b2f7fd71070ea34 100644 --- a/packages/HighFive/include/highfive/bits/H5Reference_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Reference_misc.hpp @@ -16,19 +16,19 @@ #include "../H5Object.hpp" +#include "h5r_wrapper.hpp" + namespace HighFive { inline Reference::Reference(const Object& location, const Object& object) : parent_id(location.getId()) { - obj_name = details::get_name( - [&](char* buffer, size_t length) { return H5Iget_name(object.getId(), buffer, length); }); + obj_name = details::get_name([&](char* buffer, size_t length) { + return detail::h5i_get_name(object.getId(), buffer, length); + }); } inline void Reference::create_ref(hobj_ref_t* refptr) const { - if (H5Rcreate(refptr, parent_id, obj_name.c_str(), H5R_OBJECT, -1) < 0) { - HDF5ErrMapper::ToException<ReferenceException>( - std::string("Unable to create the reference for \"") + obj_name + "\":"); - } + detail::h5r_create(refptr, parent_id, obj_name.c_str(), H5R_OBJECT, -1); } inline ObjectType Reference::getType(const Object& location) const { @@ -51,15 +51,10 @@ inline T Reference::dereference(const Object& location) const { } inline Object Reference::get_ref(const Object& location) const { - hid_t res; #if (H5Rdereference_vers == 2) - if ((res = H5Rdereference(location.getId(), H5P_DEFAULT, H5R_OBJECT, &href)) < 0) { - HDF5ErrMapper::ToException<ReferenceException>("Unable to dereference."); - } + hid_t res = detail::h5r_dereference(location.getId(), H5P_DEFAULT, H5R_OBJECT, &href); #else - if ((res = H5Rdereference(location.getId(), H5R_OBJECT, &href)) < 0) { - HDF5ErrMapper::ToException<ReferenceException>("Unable to dereference."); - } + hid_t res = detail::h5r_dereference(location.getId(), H5R_OBJECT, &href); #endif return Object(res); } diff --git a/packages/HighFive/include/highfive/bits/H5Slice_traits.hpp b/packages/HighFive/include/highfive/bits/H5Slice_traits.hpp index 52c52713f02e73a7254720a0cb9b873f8eab681d..c753026c3d20a68cb8c3442c4e32ba69951e053f 100644 --- a/packages/HighFive/include/highfive/bits/H5Slice_traits.hpp +++ b/packages/HighFive/include/highfive/bits/H5Slice_traits.hpp @@ -15,6 +15,7 @@ #include "H5Utils.hpp" #include "../H5PropertyList.hpp" +#include "h5s_wrapper.hpp" namespace HighFive { @@ -174,19 +175,14 @@ class HyperSlab { auto space = space_.clone(); for (const auto& sel: selects) { if (sel.op == Op::None) { - H5Sselect_none(space.getId()); + detail::h5s_select_none(space.getId()); } else { - auto error_code = - H5Sselect_hyperslab(space.getId(), - convert(sel.op), - sel.offset.empty() ? nullptr : sel.offset.data(), - sel.stride.empty() ? nullptr : sel.stride.data(), - sel.count.empty() ? nullptr : sel.count.data(), - sel.block.empty() ? nullptr : sel.block.data()); - - if (error_code < 0) { - HDF5ErrMapper::ToException<DataSpaceException>("Unable to select hyperslab"); - } + detail::h5s_select_hyperslab(space.getId(), + convert(sel.op), + sel.offset.empty() ? nullptr : sel.offset.data(), + sel.stride.empty() ? nullptr : sel.stride.data(), + sel.count.empty() ? nullptr : sel.count.data(), + sel.block.empty() ? nullptr : sel.block.data()); } } return space; diff --git a/packages/HighFive/include/highfive/bits/H5Slice_traits_misc.hpp b/packages/HighFive/include/highfive/bits/H5Slice_traits_misc.hpp index 7b07c9abf9406b214e068aadd7c5a34011eb3d60..dd7e45b1d4682c3da46274695fb477bfceca4bfd 100644 --- a/packages/HighFive/include/highfive/bits/H5Slice_traits_misc.hpp +++ b/packages/HighFive/include/highfive/bits/H5Slice_traits_misc.hpp @@ -15,8 +15,8 @@ #include <sstream> #include <string> -#include <H5Dpublic.h> -#include <H5Ppublic.h> +#include "h5d_wrapper.hpp" +#include "h5s_wrapper.hpp" #include "H5ReadWrite_misc.hpp" #include "H5Converter_misc.hpp" @@ -84,7 +84,7 @@ inline Selection SliceTraits<Derivate>::select(const HyperSlab& hyper_slab) cons auto filespace = slice.getSpace(); filespace = hyper_slab.apply(filespace); - auto n_elements = H5Sget_select_npoints(filespace.getId()); + auto n_elements = detail::h5s_get_select_npoints(filespace.getId()); auto memspace = DataSpace(std::array<size_t, 1>{size_t(n_elements)}); return detail::make_selection(memspace, filespace, details::get_dataset(slice)); @@ -149,9 +149,7 @@ inline Selection SliceTraits<Derivate>::select(const ElementSet& elements) const data = raw_elements.data(); } - if (H5Sselect_elements(space.getId(), H5S_SELECT_SET, num_elements, data) < 0) { - HDF5ErrMapper::ToException<DataSpaceException>("Unable to select elements"); - } + detail::h5s_select_elements(space.getId(), H5S_SELECT_SET, num_elements, data); return detail::make_selection(DataSpace(num_elements), space, details::get_dataset(slice)); } @@ -187,14 +185,6 @@ inline void SliceTraits<Derivate>::read(T& array, const DataTransferProps& xfer_ } auto dims = mem_space.getDimensions(); - if (mem_space.getElementCount() == 0) { - auto effective_dims = details::squeezeDimensions(dims, - details::inspector<T>::recursive_ndim); - - details::inspector<T>::prepare(array, effective_dims); - return; - } - auto r = details::data_converter::get_reader<T>(dims, array, file_datatype); read(r.getPointer(), buffer_info.data_type, xfer_props); // re-arrange results @@ -205,10 +195,14 @@ inline void SliceTraits<Derivate>::read(T& array, const DataTransferProps& xfer_ if (c == DataTypeClass::VarLen || t.isVariableStr()) { #if H5_VERSION_GE(1, 12, 0) // This one have been created in 1.12.0 - (void) H5Treclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); + (void) + detail::h5t_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); #else // This one is deprecated since 1.12.0 - (void) H5Dvlen_reclaim(t.getId(), mem_space.getId(), xfer_props.getId(), r.getPointer()); + (void) detail::h5d_vlen_reclaim(t.getId(), + mem_space.getId(), + xfer_props.getId(), + r.getPointer()); #endif } } @@ -224,14 +218,12 @@ inline void SliceTraits<Derivate>::read(T* array, const auto& slice = static_cast<const Derivate&>(*this); - if (H5Dread(details::get_dataset(slice).getId(), - mem_datatype.getId(), - details::get_memspace_id(slice), - slice.getSpace().getId(), - xfer_props.getId(), - static_cast<void*>(array)) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Error during HDF5 Read."); - } + detail::h5d_read(details::get_dataset(slice).getId(), + mem_datatype.getId(), + details::get_memspace_id(slice), + slice.getSpace().getId(), + xfer_props.getId(), + static_cast<void*>(array)); } template <typename Derivate> @@ -250,10 +242,6 @@ inline void SliceTraits<Derivate>::write(const T& buffer, const DataTransferProp const auto& slice = static_cast<const Derivate&>(*this); const DataSpace& mem_space = slice.getMemSpace(); - if (mem_space.getElementCount() == 0) { - return; - } - auto file_datatype = slice.getDataType(); const details::BufferInfo<T> buffer_info( @@ -280,14 +268,12 @@ inline void SliceTraits<Derivate>::write_raw(const T* buffer, const DataTransferProps& xfer_props) { const auto& slice = static_cast<const Derivate&>(*this); - if (H5Dwrite(details::get_dataset(slice).getId(), - mem_datatype.getId(), - details::get_memspace_id(slice), - slice.getSpace().getId(), - xfer_props.getId(), - static_cast<const void*>(buffer)) < 0) { - HDF5ErrMapper::ToException<DataSetException>("Error during HDF5 Write: "); - } + detail::h5d_write(details::get_dataset(slice).getId(), + mem_datatype.getId(), + details::get_memspace_id(slice), + slice.getSpace().getId(), + xfer_props.getId(), + static_cast<const void*>(buffer)); } template <typename Derivate> diff --git a/packages/HighFive/include/highfive/bits/h5_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..6f418e3f485efe2f96c7eef062372bd868423b5f --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5_wrapper.hpp @@ -0,0 +1,19 @@ +#pragma once +#include <H5public.h> + +namespace HighFive { +namespace detail { +inline void h5_free_memory(void* mem) { + if (H5free_memory(mem) < 0) { + throw DataTypeException("Could not free memory allocated by HDF5"); + } +} + +namespace nothrow { +inline herr_t h5_free_memory(void* mem) { + return H5free_memory(mem); +} +} // namespace nothrow + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5a_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5a_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..76f4e56fcf07914a2129a2c78a3a07cc7a3619e3 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5a_wrapper.hpp @@ -0,0 +1,131 @@ +#pragma once + +#include <H5Apublic.h> +#include <H5Ipublic.h> + +namespace HighFive { +namespace detail { + +inline hid_t h5a_create2(hid_t loc_id, + char const* const attr_name, + hid_t type_id, + hid_t space_id, + hid_t acpl_id, + hid_t aapl_id) { + auto attr_id = H5Acreate2(loc_id, attr_name, type_id, space_id, acpl_id, aapl_id); + if (attr_id < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to create the attribute \"") + attr_name + "\":"); + } + + return attr_id; +} + +inline void h5a_delete(hid_t loc_id, char const* const attr_name) { + if (H5Adelete(loc_id, attr_name) < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to delete attribute \"") + attr_name + "\":"); + } +} + +inline hid_t h5a_open(hid_t loc_id, char const* const attr_name, hid_t aapl_id) { + const auto attr_id = H5Aopen(loc_id, attr_name, aapl_id); + if (attr_id < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to open the attribute \"") + attr_name + "\":"); + } + + return attr_id; +} + + +inline int h5a_get_num_attrs(hid_t loc_id) { + int res = H5Aget_num_attrs(loc_id); + if (res < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to count attributes in existing group or file")); + } + + return res; +} + + +inline void h5a_iterate2(hid_t loc_id, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t* idx, + H5A_operator2_t op, + void* op_data) { + if (H5Aiterate2(loc_id, idx_type, order, idx, op, op_data) < 0) { + HDF5ErrMapper::ToException<AttributeException>(std::string("Failed H5Aiterate2.")); + } +} + +inline int h5a_exists(hid_t obj_id, char const* const attr_name) { + int res = H5Aexists(obj_id, attr_name); + if (res < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to check for attribute in group")); + } + + return res; +} + +inline ssize_t h5a_get_name(hid_t attr_id, size_t buf_size, char* buf) { + ssize_t name_length = H5Aget_name(attr_id, buf_size, buf); + if (name_length < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to get name of attribute")); + } + + return name_length; +} + + +inline hid_t h5a_get_space(hid_t attr_id) { + hid_t attr = H5Aget_space(attr_id); + if (attr < 0) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to get dataspace of attribute")); + } + + return attr; +} + +inline hsize_t h5a_get_storage_size(hid_t attr_id) { + // Docs: + // Returns the amount of storage size allocated for the attribute; + // otherwise returns 0 (zero). + return H5Aget_storage_size(attr_id); +} + +inline hid_t h5a_get_type(hid_t attr_id) { + hid_t type_id = H5Aget_type(attr_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<AttributeException>( + std::string("Unable to get datatype of attribute")); + } + + return type_id; +} + +inline herr_t h5a_read(hid_t attr_id, hid_t type_id, void* buf) { + herr_t err = H5Aread(attr_id, type_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException<AttributeException>(std::string("Unable to read attribute")); + } + + return err; +} + +inline herr_t h5a_write(hid_t attr_id, hid_t type_id, void const* buf) { + herr_t err = H5Awrite(attr_id, type_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException<AttributeException>(std::string("Unable to write attribute")); + } + + return err; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5d_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5d_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..6de7fa5a3b64140b32c0d1dc6614ad2a945ef613 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5d_wrapper.hpp @@ -0,0 +1,125 @@ +#pragma once + +#include <H5Dpublic.h> +#include <H5Ipublic.h> + +namespace HighFive { +namespace detail { + + +#if !H5_VERSION_GE(1, 12, 0) +inline herr_t h5d_vlen_reclaim(hid_t type_id, hid_t space_id, hid_t dxpl_id, void* buf) { + herr_t err = H5Dvlen_reclaim(type_id, space_id, dxpl_id, buf); + if (err < 0) { + throw DataSetException("Failed to reclaim HDF5 internal memory"); + } + + return err; +} +#endif + +inline hsize_t h5d_get_storage_size(hid_t dset_id) { + // Docs: + // H5Dget_storage_size() does not differentiate between 0 (zero), the + // value returned for the storage size of a dataset with no stored values, + // and 0 (zero), the value returned to indicate an error. + return H5Dget_storage_size(dset_id); +} + +inline hid_t h5d_get_space(hid_t dset_id) { + hid_t dset = H5Dget_space(dset_id); + if (dset == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataSetException>( + std::string("Unable to get dataspace of the dataset")); + } + + return dset; +} + +inline hid_t h5d_get_type(hid_t dset_id) { + hid_t type_id = H5Dget_type(dset_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataSetException>( + std::string("Unable to get datatype of the dataset")); + } + + return type_id; +} + +inline herr_t h5d_read(hid_t dset_id, + hid_t mem_type_id, + hid_t mem_space_id, + hid_t file_space_id, + hid_t dxpl_id, + void* buf) { + herr_t err = H5Dread(dset_id, mem_type_id, mem_space_id, file_space_id, dxpl_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException<DataSetException>(std::string("Unable to read the dataset")); + } + + return err; +} + +inline herr_t h5d_write(hid_t dset_id, + hid_t mem_type_id, + hid_t mem_space_id, + hid_t file_space_id, + hid_t dxpl_id, + const void* buf) { + herr_t err = H5Dwrite(dset_id, mem_type_id, mem_space_id, file_space_id, dxpl_id, buf); + if (err < 0) { + HDF5ErrMapper::ToException<DataSetException>(std::string("Unable to write the dataset")); + } + + return err; +} + +inline haddr_t h5d_get_offset(hid_t dset_id) { + uint64_t addr = H5Dget_offset(dset_id); + if (addr == HADDR_UNDEF) { + HDF5ErrMapper::ToException<DataSetException>("Cannot get offset of DataSet."); + } + return addr; +} + + +inline herr_t h5d_set_extent(hid_t dset_id, const hsize_t size[]) { + herr_t err = H5Dset_extent(dset_id, size); + if (H5Dset_extent(dset_id, size) < 0) { + HDF5ErrMapper::ToException<DataSetException>("Could not resize dataset."); + } + + return err; +} + +inline hid_t h5d_create2(hid_t loc_id, + const char* name, + hid_t type_id, + hid_t space_id, + hid_t lcpl_id, + hid_t dcpl_id, + hid_t dapl_id) { + hid_t dataset_id = H5Dcreate2(loc_id, name, type_id, space_id, lcpl_id, dcpl_id, dapl_id); + + if (dataset_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataSetException>( + std::string("Failed to create the dataset \"") + name + "\":"); + } + + return dataset_id; +} + +inline hid_t h5d_open2(hid_t loc_id, const char* name, hid_t dapl_id) { + hid_t dataset_id = H5Dopen2(loc_id, name, dapl_id); + + if (dataset_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataSetException>(std::string("Unable to open the dataset \"") + + name + "\":"); + } + + return dataset_id; +} + + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5e_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5e_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..168b14b7c3c3a01b80fcd1c97834bf2e3a7f5d90 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5e_wrapper.hpp @@ -0,0 +1,39 @@ +#pragma once + +#include <H5Epublic.h> +namespace HighFive { +namespace detail { +namespace nothrow { + + +inline void h5e_get_auto2(hid_t estack_id, H5E_auto2_t* func, void** client_data) { + H5Eget_auto2(estack_id, func, client_data); +} + +inline void h5e_set_auto2(hid_t estack_id, H5E_auto2_t func, void* client_data) { + H5Eset_auto2(estack_id, func, client_data); +} + +inline char* h5e_get_major(H5E_major_t maj) { + return H5Eget_major(maj); +} + +inline char* h5e_get_minor(H5E_minor_t min) { + return H5Eget_minor(min); +} + +inline herr_t h5e_walk2(hid_t err_stack, + H5E_direction_t direction, + H5E_walk2_t func, + void* client_data) { + return H5Ewalk2(err_stack, direction, func, client_data); +} + +inline herr_t h5e_clear2(hid_t err_stack) { + return H5Eclear2(err_stack); +} + + +} // namespace nothrow +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5f_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5f_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..600534b33e7ef0c6e15e0ea8a579fe068702096b --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5f_wrapper.hpp @@ -0,0 +1,58 @@ +#pragma once + +#include <H5Fpublic.h> +namespace HighFive { +namespace detail { +namespace nothrow { +inline hid_t h5f_open(const char* filename, unsigned flags, hid_t fapl_id) { + return H5Fopen(filename, flags, fapl_id); +} +} // namespace nothrow + +inline hid_t h5f_create(const char* filename, unsigned flags, hid_t fcpl_id, hid_t fapl_id) { + hid_t file_id = H5Fcreate(filename, flags, fcpl_id, fapl_id); + + if (file_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<FileException>(std::string("Failed to create file ") + filename); + } + return file_id; +} + +inline ssize_t h5f_get_name(hid_t obj_id, char* name, size_t size) { + ssize_t nread = H5Fget_name(obj_id, name, size); + if (nread < 0) { + HDF5ErrMapper::ToException<FileException>(std::string("Failed to get file from id")); + } + + return nread; +} + +inline herr_t h5f_flush(hid_t object_id, H5F_scope_t scope) { + herr_t err = H5Fflush(object_id, scope); + if (err < 0) { + HDF5ErrMapper::ToException<FileException>(std::string("Failed to flush file")); + } + + return err; +} + +inline herr_t h5f_get_filesize(hid_t file_id, hsize_t* size) { + herr_t err = H5Fget_filesize(file_id, size); + if (err < 0) { + HDF5ErrMapper::ToException<FileException>(std::string("Unable to retrieve size of file")); + } + + return err; +} + +inline hssize_t h5f_get_freespace(hid_t file_id) { + hssize_t free_space = H5Fget_freespace(file_id); + if (free_space < 0) { + HDF5ErrMapper::ToException<FileException>( + std::string("Unable to retrieve unused space of file ")); + } + return free_space; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5g_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5g_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..eb77f998328236409281e47cc4237ce2c78426cd --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5g_wrapper.hpp @@ -0,0 +1,46 @@ +#pragma once + +#include <H5Dpublic.h> +#include <H5Ipublic.h> + +#include <highfive/H5Exception.hpp> + +namespace HighFive { +namespace detail { + +inline hid_t h5g_create2(hid_t loc_id, + const char* name, + hid_t lcpl_id, + hid_t gcpl_id, + hid_t gapl_id) { + hid_t group_id = H5Gcreate2(loc_id, name, lcpl_id, gcpl_id, gapl_id); + if (group_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create the group \"") + + name + "\":"); + } + + return group_id; +} + +inline hid_t h5g_open2(hid_t loc_id, const char* name, hid_t gapl_id) { + hid_t group_id = H5Gopen2(loc_id, name, gapl_id); + if (group_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to open the group \"") + + name + "\":"); + } + return group_id; +} + +inline herr_t h5g_get_num_objs(hid_t loc_id, hsize_t* num_objs) { + herr_t err = H5Gget_num_objs(loc_id, num_objs); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>( + std::string("Unable to count objects in existing group or file")); + } + + return err; +} + + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5i_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5i_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..c81290b7412a79706c288696da5ce42d8f6a9229 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5i_wrapper.hpp @@ -0,0 +1,79 @@ +#pragma once + +#include <H5Ipublic.h> + +namespace HighFive { +namespace detail { +inline int h5i_inc_ref(hid_t id) { + auto count = H5Iinc_ref(id); + + if (count < 0) { + throw ObjectException("Failed to increase reference count of HID"); + } + + return count; +} + +namespace nothrow { + +inline int h5i_dec_ref(hid_t id) { + return H5Idec_ref(id); +} + +} // namespace nothrow + +inline int h5i_dec_ref(hid_t id) { + int count = H5Idec_ref(id); + if (count < 0) { + throw ObjectException("Failed to decrease reference count of HID"); + } + + return count; +} + +namespace nothrow { +inline htri_t h5i_is_valid(hid_t id) { + return H5Iis_valid(id); +} + +} // namespace nothrow + +inline htri_t h5i_is_valid(hid_t id) { + htri_t tri = H5Iis_valid(id); + if (tri < 0) { + throw ObjectException("Failed to check if HID is valid"); + } + + return tri; +} + +inline H5I_type_t h5i_get_type(hid_t id) { + H5I_type_t type = H5Iget_type(id); + if (type == H5I_BADID) { + HDF5ErrMapper::ToException<ObjectException>("Failed to get type of HID"); + } + + return type; +} + +template <class Exception> +inline hid_t h5i_get_file_id(hid_t id) { + hid_t file_id = H5Iget_file_id(id); + if (file_id < 0) { + HDF5ErrMapper::ToException<Exception>("Failed not obtain file HID of object"); + } + + return file_id; +} + +inline ssize_t h5i_get_name(hid_t id, char* name, size_t size) { + ssize_t n_chars = H5Iget_name(id, name, size); + if (n_chars < 0) { + HDF5ErrMapper::ToException<ObjectException>("Failed to get name of HID."); + } + + return n_chars; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5l_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5l_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..0009fdcfd3cadcd0ff427b65dce2fee0c4f8baa8 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5l_wrapper.hpp @@ -0,0 +1,132 @@ +#pragma once + +#include <H5Lpublic.h> + +namespace HighFive { +namespace detail { + +inline herr_t h5l_create_external(const char* file_name, + const char* obj_name, + hid_t link_loc_id, + const char* link_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_external(file_name, obj_name, link_loc_id, link_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create external link: ")); + } + + return err; +} + +inline herr_t h5l_create_soft(const char* link_target, + hid_t link_loc_id, + const char* link_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_soft(link_target, link_loc_id, link_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create soft link: ")); + } + + return err; +} + +inline herr_t h5l_create_hard(hid_t cur_loc, + const char* cur_name, + hid_t dst_loc, + const char* dst_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lcreate_hard(cur_loc, cur_name, dst_loc, dst_name, lcpl_id, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to create hard link: ")); + } + + return err; +} + +inline herr_t h5l_get_info(hid_t loc_id, const char* name, H5L_info_t* linfo, hid_t lapl_id) { + herr_t err = H5Lget_info(loc_id, name, linfo, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to obtain info for link ")); + } + + return err; +} + +inline herr_t h5l_delete(hid_t loc_id, const char* name, hid_t lapl_id) { + herr_t err = H5Ldelete(loc_id, name, lapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Invalid name for unlink() ")); + } + + return err; +} + +inline htri_t h5l_exists(hid_t loc_id, const char* name, hid_t lapl_id) { + htri_t tri = H5Lexists(loc_id, name, lapl_id); + if (tri < 0) { + HDF5ErrMapper::ToException<GroupException>("Invalid link for exist()"); + } + + return tri; +} + +namespace nothrow { + +inline htri_t h5l_exists(hid_t loc_id, const char* name, hid_t lapl_id) { + return H5Lexists(loc_id, name, lapl_id); +} + +} // namespace nothrow + +inline herr_t h5l_iterate(hid_t grp_id, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t* idx, + H5L_iterate_t op, + void* op_data) { + herr_t err = H5Literate(grp_id, idx_type, order, idx, op, op_data); + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to list objects in group")); + } + return err; +} + +inline herr_t h5l_move(hid_t src_loc, + const char* src_name, + hid_t dst_loc, + const char* dst_name, + hid_t lcpl_id, + hid_t lapl_id) { + herr_t err = H5Lmove(src_loc, src_name, dst_loc, dst_name, lcpl_id, lapl_id); + + if (err < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to move link to \"") + + dst_name + "\":"); + } + return err; +} + +inline ssize_t h5l_get_name_by_idx(hid_t loc_id, + const char* group_name, + H5_index_t idx_type, + H5_iter_order_t order, + hsize_t n, + char* name, + size_t size, + hid_t lapl_id) { + ssize_t n_chars = + H5Lget_name_by_idx(loc_id, group_name, idx_type, order, n, name, size, lapl_id); + + if (n_chars < 0) { + HDF5ErrMapper::ToException<GroupException>( + std::string("Unable to obtain link name from index.")); + } + + return n_chars; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5o_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5o_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..75b91bb6af950232f27ad64a2315d021a01a1526 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5o_wrapper.hpp @@ -0,0 +1,19 @@ +#pragma once + +#include <H5Ipublic.h> +#include <H5Tpublic.h> + +namespace HighFive { +namespace detail { + +inline hid_t h5o_open(hid_t loc_id, const char* name, hid_t lapl_id) { + hid_t hid = H5Oopen(loc_id, name, lapl_id); + if (hid < 0) { + HDF5ErrMapper::ToException<GroupException>(std::string("Unable to open \"") + name + "\":"); + } + + return hid; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5p_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5p_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..fcfcbd456552749ae811a1e59da99dec776f4d66 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5p_wrapper.hpp @@ -0,0 +1,376 @@ +#pragma once + +#include <H5Ipublic.h> +#include <H5Ppublic.h> + +namespace HighFive { +namespace detail { +inline hid_t h5p_create(hid_t cls_id) { + hid_t plist_id = H5Pcreate(cls_id); + if (plist_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<PropertyException>("Failed to create property list"); + } + + return plist_id; +} + +#if H5_VERSION_GE(1, 10, 1) +inline herr_t h5p_set_file_space_strategy(hid_t plist_id, + H5F_fspace_strategy_t strategy, + hbool_t persist, + hsize_t threshold) { + herr_t err = H5Pset_file_space_strategy(plist_id, strategy, persist, threshold); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Unable to get file space strategy"); + } + + return err; +} + +inline herr_t h5p_get_file_space_strategy(hid_t plist_id, + H5F_fspace_strategy_t* strategy, + hbool_t* persist, + hsize_t* threshold) { + herr_t err = H5Pget_file_space_strategy(plist_id, strategy, persist, threshold); + if (err) { + HDF5ErrMapper::ToException<PropertyException>("Error setting file space strategy."); + } + + return err; +} + +inline herr_t h5p_set_file_space_page_size(hid_t plist_id, hsize_t fsp_size) { + herr_t err = H5Pset_file_space_page_size(plist_id, fsp_size); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting file space page size."); + } + + return err; +} + +inline herr_t h5p_get_file_space_page_size(hid_t plist_id, hsize_t* fsp_size) { + herr_t err = H5Pget_file_space_page_size(plist_id, fsp_size); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Unable to get file space page size"); + } + + return err; +} + +#ifndef H5_HAVE_PARALLEL +inline herr_t h5p_get_page_buffer_size(hid_t plist_id, + size_t* buf_size, + unsigned* min_meta_perc, + unsigned* min_raw_perc) { + herr_t err = H5Pget_page_buffer_size(plist_id, buf_size, min_meta_perc, min_raw_perc); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting page buffer size."); + } + + return err; +} + +inline herr_t h5p_set_page_buffer_size(hid_t plist_id, + size_t buf_size, + unsigned min_meta_per, + unsigned min_raw_per) { + herr_t err = H5Pset_page_buffer_size(plist_id, buf_size, min_meta_per, min_raw_per); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting page buffer size."); + } + + return err; +} +#endif +#endif + +#ifdef H5_HAVE_PARALLEL +inline herr_t h5p_set_fapl_mpio(hid_t fapl_id, MPI_Comm comm, MPI_Info info) { + herr_t err = H5Pset_fapl_mpio(fapl_id, comm, info); + if (err < 0) { + HDF5ErrMapper::ToException<FileException>("Unable to set-up MPIO Driver configuration"); + } + + return err; +} + +#if H5_VERSION_GE(1, 10, 0) +inline herr_t h5p_set_all_coll_metadata_ops(hid_t plist_id, hbool_t is_collective) { + herr_t err = H5Pset_all_coll_metadata_ops(plist_id, is_collective); + if (err < 0) { + HDF5ErrMapper::ToException<FileException>("Unable to request collective metadata reads"); + } + + return err; +} + +inline herr_t h5p_get_all_coll_metadata_ops(hid_t plist_id, hbool_t* is_collective) { + herr_t err = H5Pget_all_coll_metadata_ops(plist_id, is_collective); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error loading MPI metadata read."); + } + + return err; +} + +inline herr_t h5p_set_coll_metadata_write(hid_t plist_id, hbool_t is_collective) { + herr_t err = H5Pset_coll_metadata_write(plist_id, is_collective); + + if (err < 0) { + HDF5ErrMapper::ToException<FileException>("Unable to request collective metadata writes"); + } + + return err; +} + +inline herr_t h5p_get_coll_metadata_write(hid_t plist_id, hbool_t* is_collective) { + herr_t err = H5Pget_coll_metadata_write(plist_id, is_collective); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error loading MPI metadata write."); + } + + return err; +} +#endif +#endif + +inline herr_t h5p_get_libver_bounds(hid_t plist_id, H5F_libver_t* low, H5F_libver_t* high) { + herr_t err = H5Pget_libver_bounds(plist_id, low, high); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Unable to access file version bounds"); + } + + return err; +} + +inline herr_t h5p_set_libver_bounds(hid_t plist_id, H5F_libver_t low, H5F_libver_t high) { + herr_t err = H5Pset_libver_bounds(plist_id, low, high); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting file version bounds"); + } + + return err; +} + +inline herr_t h5p_get_meta_block_size(hid_t fapl_id, hsize_t* size) { + herr_t err = H5Pget_meta_block_size(fapl_id, size); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Unable to access file metadata block size"); + } + + return err; +} + +inline herr_t h5p_set_meta_block_size(hid_t fapl_id, hsize_t size) { + herr_t err = H5Pset_meta_block_size(fapl_id, size); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting metadata block size"); + } + + return err; +} + +inline herr_t h5p_set_est_link_info(hid_t plist_id, + unsigned est_num_entries, + unsigned est_name_len) { + herr_t err = H5Pset_est_link_info(plist_id, est_num_entries, est_name_len); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting estimated link info"); + } + + return err; +} + +inline herr_t h5p_get_est_link_info(hid_t plist_id, + unsigned* est_num_entries, + unsigned* est_name_len) { + herr_t err = H5Pget_est_link_info(plist_id, est_num_entries, est_name_len); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Unable to access group link size property"); + } + + return err; +} + +inline herr_t h5p_set_chunk(hid_t plist_id, int ndims, const hsize_t dim[]) { + herr_t err = H5Pset_chunk(plist_id, ndims, dim); + + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting chunk property"); + } + + return err; +} + +inline int h5p_get_chunk(hid_t plist_id, int max_ndims, hsize_t dim[]) { + int chunk_dims = H5Pget_chunk(plist_id, max_ndims, dim); + if (chunk_dims < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error getting chunk size"); + } + return chunk_dims; +} + +inline htri_t h5z_filter_avail(H5Z_filter_t id) { + htri_t tri = H5Zfilter_avail(id); + if (tri < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error checking filter availability"); + } + return tri; +} + +inline herr_t h5p_set_deflate(hid_t plist_id, unsigned level) { + herr_t err = H5Pset_deflate(plist_id, level); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting deflate property"); + } + return err; +} + +inline herr_t h5p_set_szip(hid_t plist_id, unsigned options_mask, unsigned pixels_per_block) { + herr_t err = H5Pset_szip(plist_id, options_mask, pixels_per_block); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting szip property"); + } + return err; +} + +inline herr_t h5p_set_shuffle(hid_t plist_id) { + herr_t err = H5Pset_shuffle(plist_id); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting shuffle property"); + } + return err; +} + +inline herr_t h5p_get_alloc_time(hid_t plist_id, H5D_alloc_time_t* alloc_time) { + herr_t err = H5Pget_alloc_time(plist_id, alloc_time); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error getting allocation time"); + } + return err; +} + +inline herr_t h5p_set_alloc_time(hid_t plist_id, H5D_alloc_time_t alloc_time) { + herr_t err = H5Pset_alloc_time(plist_id, alloc_time); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting allocation time"); + } + return err; +} + +inline herr_t h5p_get_chunk_cache(hid_t dapl_id, + size_t* rdcc_nslots, + size_t* rdcc_nbytes, + double* rdcc_w0) { + herr_t err = H5Pget_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error getting dataset cache parameters"); + } + return err; +} + +inline herr_t h5p_set_chunk_cache(hid_t dapl_id, + size_t rdcc_nslots, + size_t rdcc_nbytes, + double rdcc_w0) { + herr_t err = H5Pset_chunk_cache(dapl_id, rdcc_nslots, rdcc_nbytes, rdcc_w0); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting dataset cache parameters"); + } + return err; +} + +inline herr_t h5p_set_create_intermediate_group(hid_t plist_id, unsigned crt_intmd) { + herr_t err = H5Pset_create_intermediate_group(plist_id, crt_intmd); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>( + "Error setting property for create intermediate groups"); + } + return err; +} + +inline herr_t h5p_get_create_intermediate_group(hid_t plist_id, unsigned* crt_intmd) { + herr_t err = H5Pget_create_intermediate_group(plist_id, crt_intmd); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>( + "Error getting property for create intermediate groups"); + } + return err; +} + +#ifdef H5_HAVE_PARALLEL +inline herr_t h5p_set_dxpl_mpio(hid_t dxpl_id, H5FD_mpio_xfer_t xfer_mode) { + herr_t err = H5Pset_dxpl_mpio(dxpl_id, xfer_mode); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting H5Pset_dxpl_mpio."); + } + return err; +} + +inline herr_t h5p_get_dxpl_mpio(hid_t dxpl_id, H5FD_mpio_xfer_t* xfer_mode) { + herr_t err = H5Pget_dxpl_mpio(dxpl_id, xfer_mode); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error getting H5Pset_dxpl_mpio."); + } + return err; +} + +inline herr_t h5p_get_mpio_no_collective_cause(hid_t plist_id, + uint32_t* local_no_collective_cause, + uint32_t* global_no_collective_cause) { + herr_t err = H5Pget_mpio_no_collective_cause(plist_id, + local_no_collective_cause, + global_no_collective_cause); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Failed to check mpio_no_collective_cause."); + } + return err; +} + +#endif + +inline herr_t h5p_set_link_creation_order(hid_t plist_id, unsigned crt_order_flags) { + herr_t err = H5Pset_link_creation_order(plist_id, crt_order_flags); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>("Error setting LinkCreationOrder."); + } + return err; +} + +inline herr_t h5p_get_link_creation_order(hid_t plist_id, unsigned* crt_order_flags) { + herr_t err = H5Pget_link_creation_order(plist_id, crt_order_flags); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>( + "Error getting property for link creation order"); + } + return err; +} + +inline herr_t h5p_get_attr_phase_change(hid_t plist_id, + unsigned* max_compact, + unsigned* min_dense) { + herr_t err = H5Pget_attr_phase_change(plist_id, max_compact, min_dense); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>( + "Error getting property for attribute phase change"); + } + return err; +} + +inline herr_t h5p_set_attr_phase_change(hid_t plist_id, unsigned max_compact, unsigned min_dense) { + herr_t err = H5Pset_attr_phase_change(plist_id, max_compact, min_dense); + if (err < 0) { + HDF5ErrMapper::ToException<PropertyException>( + "Error getting property for attribute phase change"); + } + return err; +} + + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5r_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5r_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..86552d3950914f21d0e3929bc3c2dead88d56a7f --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5r_wrapper.hpp @@ -0,0 +1,42 @@ +#pragma once + +#include <H5Rpublic.h> + +namespace HighFive { +namespace detail { +inline herr_t h5r_create(void* ref, + hid_t loc_id, + const char* name, + H5R_type_t ref_type, + hid_t space_id) { + herr_t err = H5Rcreate(ref, loc_id, name, ref_type, space_id); + if (err < 0) { + HDF5ErrMapper::ToException<ReferenceException>( + std::string("Unable to create the reference for \"") + name + "\":"); + } + + return err; +} + +#if (H5Rdereference_vers == 2) +inline hid_t h5r_dereference(hid_t obj_id, hid_t oapl_id, H5R_type_t ref_type, const void* ref) { + hid_t hid = H5Rdereference(obj_id, oapl_id, ref_type, ref); + if (hid < 0) { + HDF5ErrMapper::ToException<ReferenceException>("Unable to dereference."); + } + + return hid; +} +#else +inline hid_t h5r_dereference(hid_t dataset, H5R_type_t ref_type, const void* ref) { + hid_t hid = H5Rdereference(dataset, ref_type, ref); + if (hid < 0) { + HDF5ErrMapper::ToException<ReferenceException>("Unable to dereference."); + } + + return hid; +} +#endif + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5s_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5s_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..03edf80058de0340e19e48a35e2f804f338134b1 --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5s_wrapper.hpp @@ -0,0 +1,115 @@ +#pragma once + +#include <H5Spublic.h> +namespace HighFive { +namespace detail { + +inline hid_t h5s_create_simple(int rank, const hsize_t dims[], const hsize_t maxdims[]) { + hid_t space_id = H5Screate_simple(rank, dims, maxdims); + if (space_id == H5I_INVALID_HID) { + throw DataSpaceException("Unable to create simple dataspace"); + } + + return space_id; +} + +inline hid_t h5s_create(H5S_class_t type) { + hid_t space_id = H5Screate(type); + + if (space_id == H5I_INVALID_HID) { + throw DataSpaceException("Unable to create dataspace"); + } + + return space_id; +} + +inline hid_t h5s_copy(hid_t space_id) { + hid_t copy_id = H5Scopy(space_id); + + if (copy_id < 0) { + throw DataSpaceException("Unable to copy dataspace"); + } + + return copy_id; +} + +inline herr_t h5s_select_none(hid_t spaceid) { + herr_t err = H5Sselect_none(spaceid); + if (err < 0) { + HDF5ErrMapper::ToException<DataSpaceException>("Unable to select None space"); + } + return err; +} + +inline herr_t h5s_select_hyperslab(hid_t space_id, + H5S_seloper_t op, + const hsize_t start[], + const hsize_t stride[], + const hsize_t count[], + const hsize_t block[]) { + herr_t err = H5Sselect_hyperslab(space_id, op, start, stride, count, block); + if (err < 0) { + HDF5ErrMapper::ToException<DataSpaceException>("Unable to select hyperslab"); + } + return err; +} + +inline hssize_t h5s_get_select_npoints(hid_t spaceid) { + hssize_t n_points = H5Sget_select_npoints(spaceid); + if (n_points < 0) { + HDF5ErrMapper::ToException<DataSpaceException>( + "Unable to get number of points in selection"); + } + return n_points; +} + +inline herr_t h5s_select_elements(hid_t space_id, + H5S_seloper_t op, + size_t num_elem, + const hsize_t* coord) { + herr_t err = H5Sselect_elements(space_id, op, num_elem, coord); + if (err < 0) { + HDF5ErrMapper::ToException<DataSpaceException>("Unable to select elements"); + } + return err; +} + +inline int h5s_get_simple_extent_ndims(hid_t space_id) { + int ndim = H5Sget_simple_extent_ndims(space_id); + if (ndim < 0) { + HDF5ErrMapper::ToException<DataSetException>( + "Unable to get number of dimensions of dataspace"); + } + return ndim; +} + +inline herr_t h5s_get_simple_extent_dims(hid_t space_id, hsize_t dims[], hsize_t maxdims[]) { + herr_t err = H5Sget_simple_extent_dims(space_id, dims, maxdims); + if (err < 0) { + HDF5ErrMapper::ToException<DataSetException>("Unable to get dimensions of dataspace"); + } + return err; +} + +inline hssize_t h5s_get_simple_extent_npoints(hid_t space_id) { + hssize_t nelements = H5Sget_simple_extent_npoints(space_id); + if (nelements < 0) { + HDF5ErrMapper::ToException<DataSetException>( + "Unable to get number of elements in dataspace"); + } + + return nelements; +} + +inline H5S_class_t h5s_get_simple_extent_type(hid_t space_id) { + H5S_class_t cls = H5Sget_simple_extent_type(space_id); + if (cls == H5S_NO_CLASS) { + HDF5ErrMapper::ToException<DataSpaceException>("Unable to get class of simple dataspace."); + } + + return cls; +} + + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/bits/h5t_wrapper.hpp b/packages/HighFive/include/highfive/bits/h5t_wrapper.hpp new file mode 100644 index 0000000000000000000000000000000000000000..f2c7bb098f9e5b4a902ca7867de4197563024a6f --- /dev/null +++ b/packages/HighFive/include/highfive/bits/h5t_wrapper.hpp @@ -0,0 +1,230 @@ +#pragma once + +#include <H5Ipublic.h> +#include <H5Tpublic.h> + +namespace HighFive { +namespace detail { + +inline hid_t h5t_copy(hid_t original) { + auto copy = H5Tcopy(original); + if (copy == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataTypeException>("Error copying datatype."); + } + + return copy; +} + +inline hsize_t h5t_get_size(hid_t hid) { + hsize_t size = H5Tget_size(hid); + if (size == 0) { + HDF5ErrMapper::ToException<DataTypeException>("Error getting size of datatype."); + } + + return size; +} + +inline H5T_cset_t h5t_get_cset(hid_t hid) { + auto cset = H5Tget_cset(hid); + if (cset == H5T_CSET_ERROR) { + HDF5ErrMapper::ToException<DataTypeException>("Error getting cset of datatype."); + } + + return cset; +} + +inline H5T_str_t h5t_get_strpad(hid_t hid) { + auto strpad = H5Tget_strpad(hid); + if (strpad == H5T_STR_ERROR) { + HDF5ErrMapper::ToException<DataTypeException>("Error getting strpad of datatype."); + } + + return strpad; +} + +inline void h5t_set_size(hid_t hid, hsize_t size) { + if (H5Tset_size(hid, size) < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Error setting size of datatype."); + } +} + +inline void h5t_set_cset(hid_t hid, H5T_cset_t cset) { + if (H5Tset_cset(hid, cset) < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Error setting cset of datatype."); + } +} + +inline void h5t_set_strpad(hid_t hid, H5T_str_t strpad) { + if (H5Tset_strpad(hid, strpad) < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Error setting strpad of datatype."); + } +} + +inline int h5t_get_nmembers(hid_t hid) { + auto result = H5Tget_nmembers(hid); + + if (result < 0) { + throw DataTypeException("Could not get members of compound datatype"); + } + + return result; +} + +inline char* h5t_get_member_name(hid_t type_id, unsigned membno) { + char* name = H5Tget_member_name(type_id, membno); + if (name == nullptr) { + throw DataTypeException("Failed to get member names of compound datatype"); + } + + return name; +} + + +inline size_t h5t_get_member_offset(hid_t type_id, unsigned membno) { + // Note, this function is peculiar. On failure it returns 0, yet 0 is also + // what's returned on failure. + return H5Tget_member_offset(type_id, membno); +} + +inline hid_t h5t_get_member_type(hid_t type_id, unsigned membno) { + hid_t member_id = H5Tget_member_type(type_id, membno); + + if (member_id < 0) { + throw DataTypeException("Failed to get member type of compound datatype"); + } + + return member_id; +} + +#if H5_VERSION_GE(1, 12, 0) +inline herr_t h5t_reclaim(hid_t type_id, hid_t space_id, hid_t plist_id, void* buf) { + herr_t err = H5Treclaim(type_id, space_id, plist_id, buf); + if (err < 0) { + throw DataTypeException("Failed to reclaim HDF5 internal memory"); + } + + return err; +} +#endif + +inline H5T_class_t h5t_get_class(hid_t type_id) { + H5T_class_t class_id = H5Tget_class(type_id); + if (class_id == H5T_NO_CLASS) { + throw DataTypeException("Failed to get class of type"); + } + + return class_id; +} + +inline htri_t h5t_equal(hid_t type1_id, hid_t type2_id) { + htri_t equal = H5Tequal(type1_id, type2_id); + if (equal < 0) { + throw DataTypeException("Failed to compare two datatypes"); + } + + return equal; +} + +inline htri_t h5t_is_variable_str(hid_t type_id) { + htri_t is_variable = H5Tis_variable_str(type_id); + if (is_variable < 0) { + HDF5ErrMapper::ToException<DataTypeException>( + "Failed to check if string is variable length"); + } + return is_variable; +} + +inline herr_t h5t_set_fields(hid_t type_id, + size_t spos, + size_t epos, + size_t esize, + size_t mpos, + size_t msize) { + herr_t err = H5Tset_fields(type_id, spos, epos, esize, mpos, msize); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>( + "Failed to create custom floating point data type"); + } + return err; +} + +inline herr_t h5t_set_ebias(hid_t type_id, size_t ebias) { + herr_t err = H5Tset_ebias(type_id, ebias); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>( + "Failed to exponent bias of floating point data type"); + } + + return err; +} + +inline hid_t h5t_create(H5T_class_t type, size_t size) { + hid_t type_id = H5Tcreate(type, size); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataTypeException>("Failed to datatype"); + } + + return type_id; +} + +inline herr_t h5t_insert(hid_t parent_id, const char* name, size_t offset, hid_t member_id) { + herr_t err = H5Tinsert(parent_id, name, offset, member_id); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Failed to not add new member to datatype"); + } + + return err; +} + +inline herr_t h5t_commit2(hid_t loc_id, + const char* name, + hid_t type_id, + hid_t lcpl_id, + hid_t tcpl_id, + hid_t tapl_id) { + herr_t err = H5Tcommit2(loc_id, name, type_id, lcpl_id, tcpl_id, tapl_id); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Failed to commit datatype"); + } + + return err; +} + +inline herr_t h5t_close(hid_t type_id) { + auto err = H5Tclose(type_id); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>("Failed to close datatype"); + } + + return err; +} + +inline hid_t h5t_enum_create(hid_t base_id) { + hid_t type_id = H5Tenum_create(base_id); + if (type_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataTypeException>("Failed to create new enum datatype"); + } + return type_id; +} + +inline herr_t h5t_enum_insert(hid_t type, const char* name, const void* value) { + herr_t err = H5Tenum_insert(type, name, value); + if (err < 0) { + HDF5ErrMapper::ToException<DataTypeException>( + "Failed to add new member to this enum datatype"); + } + return err; +} + +inline hid_t h5t_open2(hid_t loc_id, const char* name, hid_t tapl_id) { + hid_t datatype_id = H5Topen2(loc_id, name, tapl_id); + if (datatype_id == H5I_INVALID_HID) { + HDF5ErrMapper::ToException<DataTypeException>( + std::string("Unable to open the datatype \"") + name + "\":"); + } + + return datatype_id; +} + +} // namespace detail +} // namespace HighFive diff --git a/packages/HighFive/include/highfive/boost.hpp b/packages/HighFive/include/highfive/boost.hpp new file mode 100644 index 0000000000000000000000000000000000000000..8992159a2df1fca5a4bcbefaed2fd5e3dfcf5b2e --- /dev/null +++ b/packages/HighFive/include/highfive/boost.hpp @@ -0,0 +1,164 @@ +#pragma once +#ifdef H5_USE_BOOST + +#include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" + +#include <boost/multi_array.hpp> +// starting Boost 1.64, serialization header must come before ublas +#include <boost/serialization/vector.hpp> +#include <boost/numeric/ublas/matrix.hpp> + +namespace HighFive { +namespace details { + +template <typename T, size_t Dims> +struct inspector<boost::multi_array<T, Dims>> { + using type = boost::multi_array<T, Dims>; + using value_type = T; + using base_type = typename inspector<value_type>::base_type; + using hdf5_type = typename inspector<value_type>::hdf5_type; + + static constexpr size_t ndim = Dims; + static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && + inspector<value_type>::is_trivially_copyable; + + static std::vector<size_t> getDimensions(const type& val) { + std::vector<size_t> sizes; + for (size_t i = 0; i < ndim; ++i) { + sizes.push_back(val.shape()[i]); + } + auto s = inspector<value_type>::getDimensions(val.data()[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector<size_t>& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector<size_t>& dims) { + if (dims.size() < ndim) { + std::ostringstream os; + os << "Only '" << dims.size() << "' given but boost::multi_array is of size '" << ndim + << "'."; + throw DataSpaceException(os.str()); + } + boost::array<typename type::index, Dims> ext; + std::copy(dims.begin(), dims.begin() + ndim, ext.begin()); + val.resize(ext); + std::vector<size_t> next_dims(dims.begin() + Dims, dims.end()); + std::size_t size = std::accumulate(dims.begin(), + dims.begin() + Dims, + std::size_t{1}, + std::multiplies<size_t>()); + for (size_t i = 0; i < size; ++i) { + inspector<value_type>::prepare(*(val.origin() + i), next_dims); + } + } + + static hdf5_type* data(type& val) { + return inspector<value_type>::data(*val.data()); + } + + static const hdf5_type* data(const type& val) { + return inspector<value_type>::data(*val.data()); + } + + template <class It> + static void serialize(const type& val, It m) { + size_t size = val.num_elements(); + size_t subsize = inspector<value_type>::getSizeVal(*val.origin()); + for (size_t i = 0; i < size; ++i) { + inspector<value_type>::serialize(*(val.origin() + i), m + i * subsize); + } + } + + template <class It> + static void unserialize(It vec_align, const std::vector<size_t>& dims, type& val) { + std::vector<size_t> next_dims(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(next_dims); + for (size_t i = 0; i < val.num_elements(); ++i) { + inspector<value_type>::unserialize(vec_align + i * subsize, + next_dims, + *(val.origin() + i)); + } + } +}; + +template <typename T> +struct inspector<boost::numeric::ublas::matrix<T>> { + using type = boost::numeric::ublas::matrix<T>; + using value_type = unqualified_t<T>; + using base_type = typename inspector<value_type>::base_type; + using hdf5_type = typename inspector<value_type>::hdf5_type; + + static constexpr size_t ndim = 2; + static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && + inspector<value_type>::is_trivially_copyable; + + static std::vector<size_t> getDimensions(const type& val) { + std::vector<size_t> sizes{val.size1(), val.size2()}; + auto s = inspector<value_type>::getDimensions(val(0, 0)); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector<size_t>& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector<size_t>& dims) { + if (dims.size() < ndim) { + std::ostringstream os; + os << "Impossible to pair DataSet with " << dims.size() << " dimensions into a " << ndim + << " boost::numeric::ublas::matrix"; + throw DataSpaceException(os.str()); + } + val.resize(dims[0], dims[1], false); + } + + static hdf5_type* data(type& val) { + return inspector<value_type>::data(val(0, 0)); + } + + static const hdf5_type* data(const type& val) { + return inspector<value_type>::data(val(0, 0)); + } + + static void serialize(const type& val, hdf5_type* m) { + size_t size = val.size1() * val.size2(); + size_t subsize = inspector<value_type>::getSizeVal(val(0, 0)); + for (size_t i = 0; i < size; ++i) { + inspector<value_type>::serialize(*(&val(0, 0) + i), m + i * subsize); + } + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector<size_t>& dims, + type& val) { + std::vector<size_t> next_dims(dims.begin() + ndim, dims.end()); + size_t subsize = compute_total_size(next_dims); + size_t size = val.size1() * val.size2(); + for (size_t i = 0; i < size; ++i) { + inspector<value_type>::unserialize(vec_align + i * subsize, + next_dims, + *(&val(0, 0) + i)); + } + } +}; + +} // namespace details +} // namespace HighFive + +#endif diff --git a/packages/HighFive/include/highfive/eigen.hpp b/packages/HighFive/include/highfive/eigen.hpp new file mode 100644 index 0000000000000000000000000000000000000000..c47095ddeeea3698ae76366d7c72fb443450241d --- /dev/null +++ b/packages/HighFive/include/highfive/eigen.hpp @@ -0,0 +1,93 @@ +#pragma once +#ifdef H5_USE_EIGEN + +#include "bits/H5Inspector_decl.hpp" +#include "H5Exception.hpp" + +#include <Eigen/Eigen> + + +namespace HighFive { +namespace details { + +template <typename T, int M, int N> +struct inspector<Eigen::Matrix<T, M, N>> { + using type = Eigen::Matrix<T, M, N>; + using value_type = T; + using base_type = typename inspector<value_type>::base_type; + using hdf5_type = base_type; + + static constexpr size_t ndim = 2; + static constexpr size_t recursive_ndim = ndim + inspector<value_type>::recursive_ndim; + static constexpr bool is_trivially_copyable = std::is_trivially_copyable<value_type>::value && + inspector<value_type>::is_trivially_copyable; + + + static void assert_not_buggy(Eigen::Index nrows, Eigen::Index ncols) { + if (nrows > 1 && ncols > 1) { + throw std::runtime_error( + "HighFive has been broken for Eigen::Matrix. Please check " + "https://github.com/BlueBrain/HighFive/issues/532."); + } + } + + static std::vector<size_t> getDimensions(const type& val) { + assert_not_buggy(val.rows(), val.cols()); + + std::vector<size_t> sizes{static_cast<size_t>(val.rows()), static_cast<size_t>(val.cols())}; + auto s = inspector<value_type>::getDimensions(val.data()[0]); + sizes.insert(sizes.end(), s.begin(), s.end()); + return sizes; + } + + static size_t getSizeVal(const type& val) { + return compute_total_size(getDimensions(val)); + } + + static size_t getSize(const std::vector<size_t>& dims) { + return compute_total_size(dims); + } + + static void prepare(type& val, const std::vector<size_t>& dims) { + if (dims[0] != static_cast<size_t>(val.rows()) || + dims[1] != static_cast<size_t>(val.cols())) { + val.resize(static_cast<typename type::Index>(dims[0]), + static_cast<typename type::Index>(dims[1])); + } + + assert_not_buggy(val.rows(), val.cols()); + } + + static hdf5_type* data(type& val) { + assert_not_buggy(val.rows(), val.cols()); + return inspector<value_type>::data(*val.data()); + } + + static const hdf5_type* data(const type& val) { + assert_not_buggy(val.rows(), val.cols()); + return inspector<value_type>::data(*val.data()); + } + + static void serialize(const type& val, hdf5_type* m) { + assert_not_buggy(val.rows(), val.cols()); + std::memcpy(m, val.data(), static_cast<size_t>(val.size()) * sizeof(hdf5_type)); + } + + static void unserialize(const hdf5_type* vec_align, + const std::vector<size_t>& dims, + type& val) { + assert_not_buggy(val.rows(), val.cols()); + if (dims.size() < 2) { + std::ostringstream os; + os << "Impossible to pair DataSet with " << dims.size() + << " dimensions into an eigen-matrix."; + throw DataSpaceException(os.str()); + } + std::memcpy(val.data(), vec_align, compute_total_size(dims) * sizeof(hdf5_type)); + } +}; + +} // namespace details +} // namespace HighFive + +#endif diff --git a/packages/HighFive/include/highfive/half_float.hpp b/packages/HighFive/include/highfive/half_float.hpp new file mode 100644 index 0000000000000000000000000000000000000000..998e693ffe37c15b8ebba08f83f13c521f46423b --- /dev/null +++ b/packages/HighFive/include/highfive/half_float.hpp @@ -0,0 +1,21 @@ +#pragma once +#ifdef H5_USE_HALF_FLOAT + +#include <half.hpp> + +namespace HighFive { +using float16_t = half_float::half; + +template <> +inline AtomicType<float16_t>::AtomicType() { + _hid = detail::h5t_copy(H5T_NATIVE_FLOAT); + // Sign position, exponent position, exponent size, mantissa position, mantissa size + detail::h5t_set_fields(_hid, 15, 10, 5, 0, 10); + // Total datatype size (in bytes) + detail::h5t_set_size(_hid, 2); + // Floating point exponent bias + detail::h5t_set_ebias(_hid, 15); +} +} // namespace HighFive + +#endif diff --git a/packages/HighFive/src/examples/create_dataset_half_float.cpp b/packages/HighFive/src/examples/create_dataset_half_float.cpp index 2b720cd187036c98392ed7fd2e9518934ae948ce..837c58704c5d4e4d7a030bd3d784286004ca1030 100644 --- a/packages/HighFive/src/examples/create_dataset_half_float.cpp +++ b/packages/HighFive/src/examples/create_dataset_half_float.cpp @@ -7,8 +7,6 @@ * */ -#ifdef H5_USE_HALF_FLOAT - #include <iostream> #include <string> #include <vector> @@ -45,5 +43,3 @@ int main(void) { return 0; } - -#endif diff --git a/packages/HighFive/tests/test_project_integration.sh b/packages/HighFive/tests/test_project_integration.sh index ae88695a0ca1fed8aae2e36276320bf876cb5b4f..95521995269edcc75362c34cfd755ccad37e6888 100644 --- a/packages/HighFive/tests/test_project_integration.sh +++ b/packages/HighFive/tests/test_project_integration.sh @@ -2,50 +2,56 @@ set -xeo pipefail cd "$( dirname "${BASH_SOURCE[0]}")" # cd here -BUILDDIR="${PWD}/build" +BUILD_DIR="${PWD}/build-highfive" ROOT="${PWD}/.." -TESTDIR="${PWD}" +TEST_DIR="${PWD}" +INSTALL_DIR="${BUILD_DIR}/install" test_install() { local project="${1}" - local builddir="${BUILDDIR}/${project}/${2}" + local project_dir="${TEST_DIR}/${project}" + local dep_dir="${TEST_DIR}/${project}/deps/HighFive" shift - shift - ln -sf ../../.. "${TESTDIR}/${project}/deps/HighFive" - rm -rf "${builddir}" - mkdir -p "${builddir}" - pushd "${builddir}" - cmake "${TESTDIR}/${project}" "$@" - cmake --build . --verbose - ctest + + pushd "${project_dir}" + + local build_dir="build" + + ln -sf ../../.. "${dep_dir}" + + cmake "$@" -B "${build_dir}" . + cmake --build "${build_dir}" --verbose + ctest --test-dir "${build_dir}" + + rm -f "${dep_dir}" + rm -rf "${build_dir}" + popd - rm "${TESTDIR}/${project}/deps/HighFive" } -rm -rf "${BUILDDIR}/highfive" -mkdir -p "${BUILDDIR}/highfive" -pushd "${BUILDDIR}/highfive" cmake "${ROOT}" \ -DHIGHFIVE_EXAMPLES=OFF \ -DHIGHFIVE_UNIT_TESTS=OFF \ - -DCMAKE_INSTALL_PREFIX="${PWD}/install" -cmake --build . --target install -popd + -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \ + -B "${BUILD_DIR}" +cmake --build "${BUILD_DIR}" --target install for project in test_project test_dependent_library; do # Case 1. Base case: include subdirectory - test_install "${project}" subdir + test_install "${project}" # Case 2. We use an install dir and all deps configuration # Install highfive (no tests required) - test_install "${project}" reuse_deps \ + test_install "${project}" \ -DUSE_BUNDLED_HIGHFIVE=NO \ -DHIGHFIVE_USE_INSTALL_DEPS=YES \ - -DCMAKE_PREFIX_PATH="${BUILDDIR}/highfive/install" - # + -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" + # Case 3. We redetect-dependencies - test_install "${project}" install_new_deps \ + test_install "${project}" \ -DUSE_BUNDLED_HIGHFIVE=NO \ -DHIGHFIVE_USE_INSTALL_DEPS=NO \ - -DCMAKE_PREFIX_PATH="${BUILDDIR}/highfive/install" + -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" done + +rm -rf "${BUILD_DIR}" diff --git a/packages/HighFive/tests/unit/CMakeLists.txt b/packages/HighFive/tests/unit/CMakeLists.txt index 3644d117ce11f7464e8f67fd0d360ff871b115fb..b8943067f7bc3055aa871b808ab8cae8d16782f5 100644 --- a/packages/HighFive/tests/unit/CMakeLists.txt +++ b/packages/HighFive/tests/unit/CMakeLists.txt @@ -7,7 +7,7 @@ if(MSVC) endif() ## Base tests -foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types) +foreach(test_name tests_high_five_base tests_high_five_multi_dims tests_high_five_easy test_all_types test_high_five_selection tests_high_five_data_type) add_executable(${test_name} "${test_name}.cpp") target_link_libraries(${test_name} HighFive HighFiveWarnings Catch2::Catch2WithMain) catch_discover_tests(${test_name}) diff --git a/packages/HighFive/tests/unit/create_traits.hpp b/packages/HighFive/tests/unit/create_traits.hpp new file mode 100644 index 0000000000000000000000000000000000000000..959fcdeb1fb887f2085c122fb2ad30f42c4b3cf9 --- /dev/null +++ b/packages/HighFive/tests/unit/create_traits.hpp @@ -0,0 +1,70 @@ +#pragma once + +namespace HighFive { +namespace testing { + +/// \brief Trait for `createAttribute`. +/// +/// The point of these is to simplify testing. The typical issue is that we +/// need to write the tests twice, one with `createDataSet` and then again with +/// `createAttribute`. This trait allows us to inject this difference. +struct AttributeCreateTraits { + using type = Attribute; + + template <class Hi5> + static Attribute get(Hi5& hi5, const std::string& name) { + return hi5.getAttribute(name); + } + + + template <class Hi5, class Container> + static Attribute create(Hi5& hi5, const std::string& name, const Container& container) { + return hi5.createAttribute(name, container); + } + + template <class Hi5> + static Attribute create(Hi5& hi5, + const std::string& name, + const DataSpace& dataspace, + const DataType& datatype) { + return hi5.createAttribute(name, dataspace, datatype); + } + + template <class T, class Hi5> + static Attribute create(Hi5& hi5, const std::string& name, const DataSpace& dataspace) { + auto datatype = create_datatype<T>(); + return hi5.template createAttribute<T>(name, dataspace); + } +}; + +/// \brief Trait for `createDataSet`. +struct DataSetCreateTraits { + using type = DataSet; + + template <class Hi5> + static DataSet get(Hi5& hi5, const std::string& name) { + return hi5.getDataSet(name); + } + + template <class Hi5, class Container> + static DataSet create(Hi5& hi5, const std::string& name, const Container& container) { + return hi5.createDataSet(name, container); + } + + template <class Hi5> + static DataSet create(Hi5& hi5, + const std::string& name, + const DataSpace& dataspace, + const DataType& datatype) { + return hi5.createDataSet(name, dataspace, datatype); + } + + template <class T, class Hi5> + static DataSet create(Hi5& hi5, const std::string& name, const DataSpace& dataspace) { + auto datatype = create_datatype<T>(); + return hi5.template createDataSet<T>(name, dataspace); + } +}; + +} // namespace testing +} // namespace HighFive diff --git a/packages/HighFive/tests/unit/data_generator.hpp b/packages/HighFive/tests/unit/data_generator.hpp new file mode 100644 index 0000000000000000000000000000000000000000..9a6712d53208a762a493ab257e8a54feea27cf9a --- /dev/null +++ b/packages/HighFive/tests/unit/data_generator.hpp @@ -0,0 +1,461 @@ +#pragma once + +#include <limits> +#include <numeric> +#include <stdexcept> +#include <type_traits> +#include <vector> +#include <array> + +#ifdef H5_USE_BOOST +#include <boost/multi_array.hpp> +#endif + +#include <highfive/bits/H5Inspector_misc.hpp> + +namespace HighFive { +namespace testing { + +std::vector<size_t> lstrip(const std::vector<size_t>& indices, size_t n) { + std::vector<size_t> subindices(indices.size() - n); + for (size_t i = 0; i < subindices.size(); ++i) { + subindices[i] = indices[i + n]; + } + + return subindices; +} + +size_t ravel(std::vector<size_t>& indices, const std::vector<size_t> dims) { + size_t rank = dims.size(); + size_t linear_index = 0; + size_t ld = 1; + for (size_t kk = 0; kk < rank; ++kk) { + auto k = rank - 1 - kk; + linear_index += indices[k] * ld; + ld *= dims[k]; + } + + return linear_index; +} + +std::vector<size_t> unravel(size_t flat_index, const std::vector<size_t> dims) { + size_t rank = dims.size(); + size_t ld = 1; + std::vector<size_t> indices(rank); + for (size_t kk = 0; kk < rank; ++kk) { + auto k = rank - 1 - kk; + indices[k] = (flat_index / ld) % dims[k]; + ld *= dims[k]; + } + + return indices; +} + +static size_t flat_size(const std::vector<size_t>& dims) { + size_t n = 1; + for (auto d: dims) { + n *= d; + } + + return n; +} + +template <class Container, class = void> +struct ContainerTraits; + +// -- Scalar basecases --------------------------------------------------------- +template <class T> +struct ScalarContainerTraits { + using container_type = T; + using base_type = T; + + static void set(container_type& array, std::vector<size_t> /* indices */, base_type value) { + array = value; + } + + static const base_type& get(const container_type& array, std::vector<size_t> /* indices */) { + return array; + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector<size_t>& /* dims */) { + return container_type{}; + } + + static void sanitize_dims(std::vector<size_t>& /* dims */, size_t /* axis */) {} +}; + +template <class T> +struct ContainerTraits<T, typename std::enable_if<std::is_floating_point<T>::value>::type> + : public ScalarContainerTraits<T> {}; + +template <class T> +struct ContainerTraits<T, typename std::enable_if<std::is_integral<T>::value>::type> + : public ScalarContainerTraits<T> {}; + +template <> +struct ContainerTraits<std::string>: public ScalarContainerTraits<std::string> {}; + +// -- STL ---------------------------------------------------------------------- +template <> +struct ContainerTraits<std::vector<bool>> { + using container_type = std::vector<bool>; + using value_type = bool; + using base_type = bool; + + static void set(container_type& array, + const std::vector<size_t>& indices, + const base_type& value) { + array[indices[0]] = value; + } + + static base_type get(const container_type& array, const std::vector<size_t>& indices) { + return array[indices[0]]; + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector<size_t>& dims) { + container_type array(dims[0]); + return array; + } + + static void sanitize_dims(std::vector<size_t>& dims, size_t axis) { + ContainerTraits<value_type>::sanitize_dims(dims, axis + 1); + } +}; + +template <class Container, class ValueType = typename Container::value_type> +struct STLLikeContainerTraits { + using container_type = Container; + using value_type = ValueType; + using base_type = typename ContainerTraits<value_type>::base_type; + + static void set(container_type& array, + const std::vector<size_t>& indices, + const base_type& value) { + return ContainerTraits<value_type>::set(array[indices[0]], lstrip(indices, 1), value); + } + + static base_type get(const container_type& array, const std::vector<size_t>& indices) { + return ContainerTraits<value_type>::get(array[indices[0]], lstrip(indices, 1)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector<size_t>& dims) { + container_type array(dims[0]); + for (size_t i = 0; i < dims[0]; ++i) { + auto value = ContainerTraits<value_type>::allocate(lstrip(dims, 1)); + ContainerTraits<value_type>::assign(array[i], value); + } + + return array; + } + + static void sanitize_dims(std::vector<size_t>& dims, size_t axis) { + ContainerTraits<value_type>::sanitize_dims(dims, axis + 1); + } +}; + +template <class T> +struct ContainerTraits<std::vector<T>>: public STLLikeContainerTraits<std::vector<T>> {}; + +template <class T, size_t N> +struct ContainerTraits<std::array<T, N>>: public STLLikeContainerTraits<std::array<T, N>> { + private: + using super = STLLikeContainerTraits<std::array<T, N>>; + + public: + using container_type = typename super::container_type; + using base_type = typename super::base_type; + using value_type = typename super::value_type; + + public: + static container_type allocate(const std::vector<size_t>& dims) { + if (N != dims[0]) { + throw std::runtime_error("broken logic: static and runtime size don't match."); + } + + container_type array; + for (size_t i = 0; i < dims[0]; ++i) { + auto value = ContainerTraits<value_type>::allocate(lstrip(dims, 1)); + ContainerTraits<value_type>::assign(array[i], value); + } + + return array; + } + + static void sanitize_dims(std::vector<size_t>& dims, size_t axis) { + dims[axis] = N; + ContainerTraits<value_type>::sanitize_dims(dims, axis + 1); + } +}; + +// -- Boost ------------------------------------------------------------------- +#ifdef H5_USE_BOOST +template <class T, size_t n> +struct ContainerTraits<boost::multi_array<T, n>> { + using container_type = typename boost::multi_array<T, n>; + using value_type = T; + using base_type = typename ContainerTraits<value_type>::base_type; + + static void set(container_type& array, + const std::vector<size_t>& indices, + const base_type& value) { + auto i = std::vector<size_t>(indices.begin(), indices.begin() + n); + return ContainerTraits<value_type>::set(array(i), lstrip(indices, n), value); + } + + static base_type get(const container_type& array, const std::vector<size_t>& indices) { + auto i = std::vector<size_t>(indices.begin(), indices.begin() + n); + return ContainerTraits<value_type>::get(array(i), lstrip(indices, n)); + } + + static void assign(container_type& dst, const container_type& src) { + auto const* const shape = src.shape(); + dst.resize(std::vector<size_t>(shape, shape + n)); + dst = src; + } + + static container_type allocate(const std::vector<size_t>& dims) { + auto local_dims = std::vector<size_t>(dims.begin(), dims.begin() + n); + container_type array(local_dims); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto element = ContainerTraits<value_type>::allocate(lstrip(dims, n)); + set(array, unravel(i, local_dims), element); + } + + return array; + } + + static void sanitize_dims(std::vector<size_t>& dims, size_t axis) { + ContainerTraits<value_type>::sanitize_dims(dims, axis + n); + } +}; + +template <class T> +struct ContainerTraits<boost::numeric::ublas::matrix<T>> { + using container_type = typename boost::numeric::ublas::matrix<T>; + using value_type = T; + using base_type = typename ContainerTraits<value_type>::base_type; + + static void set(container_type& array, + const std::vector<size_t>& indices, + const base_type& value) { + auto i = indices[0]; + auto j = indices[1]; + return ContainerTraits<value_type>::set(array(i, j), lstrip(indices, 2), value); + } + + static base_type get(const container_type& array, const std::vector<size_t>& indices) { + auto i = indices[0]; + auto j = indices[1]; + return ContainerTraits<value_type>::get(array(i, j), lstrip(indices, 2)); + } + + static void assign(container_type& dst, const container_type& src) { + dst = src; + } + + static container_type allocate(const std::vector<size_t>& dims) { + auto local_dims = std::vector<size_t>(dims.begin(), dims.begin() + 2); + container_type array(local_dims[0], local_dims[1]); + + size_t n_elements = flat_size(local_dims); + for (size_t i = 0; i < n_elements; ++i) { + auto indices = unravel(i, local_dims); + auto element = ContainerTraits<value_type>::allocate(lstrip(dims, 2)); + + ContainerTraits<value_type>::assign(array(indices[0], indices[1]), element); + } + + return array; + } + + static void sanitize_dims(std::vector<size_t>& dims, size_t axis) { + ContainerTraits<value_type>::sanitize_dims(dims, axis + 2); + } +}; + +#endif + +template <class T, class C> +T copy(const C& src, const std::vector<size_t>& dims) { + auto dst = ContainerTraits<T>::allocate(dims); + for (size_t i = 0; i < flat_size(dims); ++i) { + auto indices = unravel(i, dims); + ContainerTraits<T>::set(dst, indices, ContainerTraits<C>::get(src, indices)); + } + + return dst; +} + +template <class T> +T default_real_value(const std::vector<size_t>& indices, T shift, T base, T factor) { + auto value = T(0); + + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + auto sign = (std::is_signed<T>::value) && (isum % 2 == 1) ? T(-1) : T(1); + + for (size_t k = 0; k < indices.size(); ++k) { + value += T(indices[k]) * T(std::pow(shift, T(k))) * base; + } + + return sign * value * factor; +} + +std::vector<std::string> ascii_alphabet = {"a", "b", "c", "d", "e", "f"}; + +std::string default_string(size_t offset, size_t length, const std::vector<std::string>& alphabet) { + std::string s = ""; + for (size_t k = 0; k < length; ++k) { + s += alphabet[(offset + k) % alphabet.size()]; + } + + return s; +} + +std::string default_fixed_length_ascii_string(const std::vector<size_t>& indices, size_t length) { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return default_string(isum, length, ascii_alphabet); +} + +std::string default_variable_length_ascii_string(const std::vector<size_t>& indices) { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return default_string(isum, isum, ascii_alphabet); +} + +template <class T, class = void> +struct DefaultValues; + +template <class T> +struct DefaultValues<T, typename std::enable_if<std::is_floating_point<T>::value>::type> { + T operator()(const std::vector<size_t>& indices) const { + auto eps = std::numeric_limits<T>::epsilon(); + return default_real_value(indices, T(100.0), T(0.01), T(1.0) + T(8) * eps); + } +}; + +template <class T> +struct DefaultValues<T, typename std::enable_if<std::is_integral<T>::value>::type> { + T operator()(const std::vector<size_t>& indices) const { + return default_real_value(indices, T(100), T(1), T(1)); + } +}; + +template <> +struct DefaultValues<char> { + char operator()(const std::vector<size_t>& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return char(isum % size_t(std::numeric_limits<char>::max)); + } +}; + +template <> +struct DefaultValues<unsigned char> { + unsigned char operator()(const std::vector<size_t>& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return (unsigned char) (isum % size_t(std::numeric_limits<unsigned char>::max)); + } +}; + +template <> +struct DefaultValues<std::string> { + std::string operator()(const std::vector<size_t>& indices) const { + return default_variable_length_ascii_string(indices); + } +}; + +template <> +struct DefaultValues<bool> { + bool operator()(const std::vector<size_t>& indices) const { + auto isum = std::accumulate(indices.begin(), indices.end(), size_t(0)); + return (isum % 2) == 0; + } +}; + +template <class T, size_t N> +struct MultiDimVector { + using type = std::vector<typename MultiDimVector<T, N - 1>::type>; +}; + +template <class T> +struct MultiDimVector<T, 0> { + using type = T; +}; + +template <class Container> +class DataGenerator { + public: + constexpr static size_t rank = details::inspector<Container>::recursive_ndim; + using traits = ContainerTraits<Container>; + using base_type = typename traits::base_type; + using container_type = Container; + + public: + static container_type allocate(const std::vector<size_t>& dims) { + return traits::allocate(dims); + } + + template <class F> + static container_type create(const std::vector<size_t>& dims, F f) { + auto array = allocate(dims); + initialize(array, dims, f); + + return array; + } + + static container_type create(const std::vector<size_t>& dims) { + return create(dims, DefaultValues<typename traits::base_type>()); + } + + static std::vector<size_t> default_dims() { + using difference_type = std::vector<size_t>::difference_type; + std::vector<size_t> oversized{2, 3, 5, 7, 2, 3, 5, 7}; + std::vector<size_t> dims(oversized.begin(), oversized.begin() + difference_type(rank)); + ContainerTraits<Container>::sanitize_dims(dims, /* axis = */ 0); + + return dims; + } + + static void sanitize_dims(std::vector<size_t>& dims) { + ContainerTraits<Container>::sanitize_dims(dims, /* axis = */ 0); + } + + private: + template <class C, class F> + static void initialize(C& array, const std::vector<size_t>& dims, F f) { + std::vector<size_t> indices(dims.size()); + initialize(array, dims, indices, 0, f); + } + + template <class C, class F> + static void initialize(C& array, + const std::vector<size_t>& dims, + std::vector<size_t>& indices, + size_t axis, + F f) { + if (axis == indices.size()) { + auto value = f(indices); + traits::set(array, indices, value); + } else { + for (size_t i = 0; i < dims[axis]; ++i) { + indices[axis] = i; + initialize(array, dims, indices, axis + 1, f); + } + } + } +}; + +} // namespace testing +} // namespace HighFive diff --git a/packages/HighFive/tests/unit/supported_types.hpp b/packages/HighFive/tests/unit/supported_types.hpp new file mode 100644 index 0000000000000000000000000000000000000000..f708303b1dd55f7aced2e14f9b139fcd9b38fbc6 --- /dev/null +++ b/packages/HighFive/tests/unit/supported_types.hpp @@ -0,0 +1,108 @@ + +#pragma once + +#include <type_traits> +#include <vector> +#include <array> +#include <tuple> + +#ifdef H5_USE_BOOST +#include <boost/multi_array.hpp> +#endif + +namespace HighFive { +namespace testing { + +struct type_identity { + template <class T> + using type = T; +}; + +template <class C = type_identity> +struct STDVector { + template <class T> + using type = std::vector<typename C::template type<T>>; +}; + +template <size_t n, class C = type_identity> +struct STDArray { + template <class T> + using type = std::array<typename C::template type<T>, n>; +}; + +#ifdef H5_USE_BOOST +template <size_t n, class C = type_identity> +struct BoostMultiArray { + template <class T> + using type = boost::multi_array<typename C::template type<T>, 4>; +}; + +template <class C = type_identity> +struct BoostUblasMatrix { + template <class T> + using type = boost::numeric::ublas::matrix<typename C::template type<T>>; +}; +#endif + +template <class C, class Tuple> +struct ContainerProduct; + +template <class C, class... ScalarTypes> +struct ContainerProduct<C, std::tuple<ScalarTypes...>> { + using type = std::tuple<typename C::template type<ScalarTypes>...>; +}; + +template <class... Tuples> +struct ConcatenateTuples; + +template <class... Args1, class... Args2, class... Tuples> +struct ConcatenateTuples<std::tuple<Args1...>, std::tuple<Args2...>, Tuples...> { + using type = typename ConcatenateTuples<std::tuple<Args1..., Args2...>, Tuples...>::type; +}; + +template <class... Args1> +struct ConcatenateTuples<std::tuple<Args1...>> { + using type = std::tuple<Args1...>; +}; + +// clang-format off +using numeric_scalar_types = std::tuple< + int, + unsigned int, + long, + unsigned long, + unsigned char, + char, + float, + double, + long long, + unsigned long long +>; + +using scalar_types = typename ConcatenateTuples<numeric_scalar_types, std::tuple<bool, std::string>>::type; +using scalar_types_boost = typename ConcatenateTuples<numeric_scalar_types, std::tuple<bool>>::type; + +using supported_array_types = typename ConcatenateTuples< +#ifdef H5_USE_BOOST + typename ContainerProduct<BoostMultiArray<3>, scalar_types_boost>::type, + typename ContainerProduct<STDVector<BoostMultiArray<3>>, scalar_types_boost>::type, + typename ContainerProduct<STDArray<5, BoostMultiArray<3>>, scalar_types_boost>::type, + + typename ContainerProduct<BoostUblasMatrix<>, scalar_types_boost>::type, + typename ContainerProduct<STDVector<BoostUblasMatrix<>>, scalar_types_boost>::type, + typename ContainerProduct<STDArray<5, BoostUblasMatrix<>>, scalar_types_boost>::type, +#endif + typename ContainerProduct<STDVector<>, scalar_types>::type, + typename ContainerProduct<STDVector<STDVector<>>, scalar_types>::type, + typename ContainerProduct<STDVector<STDVector<STDVector<>>>, scalar_types>::type, + typename ContainerProduct<STDVector<STDVector<STDVector<STDVector<>>>>, scalar_types>::type, + typename ContainerProduct<STDArray<3>, scalar_types>::type, + typename ContainerProduct<STDArray<7, STDArray<5>>, scalar_types>::type, + typename ContainerProduct<STDVector<STDArray<5>>, scalar_types>::type, + typename ContainerProduct<STDArray<7, STDVector<>>, scalar_types>::type +>::type; + +// clang-format on + +} // namespace testing +} // namespace HighFive diff --git a/packages/HighFive/tests/unit/test_all_types.cpp b/packages/HighFive/tests/unit/test_all_types.cpp index d74579af6b23176cd99bd9436d690e623fdc5f9b..e772fd1d7d932a1d6905c0b9bec62794f4b693cf 100644 --- a/packages/HighFive/tests/unit/test_all_types.cpp +++ b/packages/HighFive/tests/unit/test_all_types.cpp @@ -7,26 +7,31 @@ * */ #include <string> +#include <sstream> #include <catch2/catch_template_test_macros.hpp> #include <highfive/highfive.hpp> +#include <type_traits> #include "tests_high_five.hpp" +#include "data_generator.hpp" +#include "create_traits.hpp" +#include "supported_types.hpp" using namespace HighFive; TEMPLATE_TEST_CASE("Scalar in DataSet", "[Types]", bool, std::string) { - const std::string FILE_NAME("rw_dataset_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); TestType t1{}; { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset DataSet dataset = - file.createDataSet(DATASET_NAME, + file.createDataSet(dataset_name, DataSpace(1), create_datatype<typename details::inspector<TestType>::base_type>()); @@ -36,27 +41,27 @@ TEMPLATE_TEST_CASE("Scalar in DataSet", "[Types]", bool, std::string) { // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); TestType value; - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); } } TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector", "[Types]", std::vector, (bool, std::string)) { - const std::string FILE_NAME("rw_dataset_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); TestType t1(5); { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset DataSet dataset = file.createDataSet( - DATASET_NAME, {5}, create_datatype<typename details::inspector<TestType>::base_type>()); + dataset_name, {5}, create_datatype<typename details::inspector<TestType>::base_type>()); // Write into the initial part of the dataset dataset.write(t1); @@ -64,10 +69,10 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector", "[Types]", std::vector, (boo // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); TestType value; - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); CHECK(value.size() == 5); @@ -78,8 +83,8 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::vector>", "[Types]", std::vector, (bool, std::string)) { - const std::string FILE_NAME("rw_dataset_vector_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_vector_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); std::vector<TestType> t1(5); for (auto&& e: t1) { e.resize(6); @@ -87,11 +92,11 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::vector>", { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset DataSet dataset = file.createDataSet( - DATASET_NAME, + dataset_name, {5, 6}, create_datatype<typename details::inspector<std::vector<TestType>>::base_type>()); @@ -101,10 +106,10 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::vector>", // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); std::vector<TestType> value; - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); CHECK(value.size() == 5); @@ -112,17 +117,17 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::vector>", } TEMPLATE_TEST_CASE("Scalar in std::array", "[Types]", bool, std::string) { - const std::string FILE_NAME("rw_dataset_array_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_array_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); std::array<TestType, 5> t1{}; { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset DataSet dataset = file.createDataSet( - DATASET_NAME, + dataset_name, {5}, create_datatype<typename details::inspector<std::array<TestType, 5>>::base_type>()); @@ -132,10 +137,10 @@ TEMPLATE_TEST_CASE("Scalar in std::array", "[Types]", bool, std::string) { // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); std::array<TestType, 5> value; - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); CHECK(value.size() == 5); @@ -143,17 +148,17 @@ TEMPLATE_TEST_CASE("Scalar in std::array", "[Types]", bool, std::string) { } TEMPLATE_TEST_CASE("Scalar in std::vector<std::array>", "[Types]", bool, std::string) { - const std::string FILE_NAME("rw_dataset_vector_array_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_vector_array_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); std::vector<std::array<TestType, 6>> t1(5); { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset DataSet dataset = file.createDataSet( - DATASET_NAME, + dataset_name, {5, 6}, create_datatype< typename details::inspector<std::vector<std::array<TestType, 5>>>::base_type>()); @@ -164,28 +169,63 @@ TEMPLATE_TEST_CASE("Scalar in std::vector<std::array>", "[Types]", bool, std::st // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); std::vector<std::array<TestType, 6>> value; - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); CHECK(value.size() == 5); } } +TEMPLATE_TEST_CASE("Scalar in std::array<std::vector>", "[Types]", bool, std::string) { + const std::string file_name("rw_dataset_array_vector" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); + std::array<std::vector<TestType>, 6> t1; + for (auto& tt: t1) { + tt = std::vector<TestType>(5); + } + + { + // Create a new file using the default property lists. + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + // Create the dataset + DataSet dataset = file.createDataSet( + dataset_name, + {6, 5}, + create_datatype< + typename details::inspector<std::vector<std::array<TestType, 5>>>::base_type>()); + + // Write into the initial part of the dataset + dataset.write(t1); + } + + // read it back + { + File file(file_name, File::ReadOnly); + + std::array<std::vector<TestType>, 6> value; + DataSet dataset = file.getDataSet("/" + dataset_name); + dataset.read(value); + CHECK(t1 == value); + CHECK(value.size() == 6); + } +} + #if HIGHFIVE_CXX_STD >= 17 TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::byte>", "[Types]", std::vector, std::byte) { - const std::string FILE_NAME("rw_dataset_vector_" + typeNameHelper<TestType>() + ".h5"); - const std::string DATASET_NAME("dset"); + const std::string file_name("rw_dataset_vector_" + typeNameHelper<TestType>() + ".h5"); + const std::string dataset_name("dset"); TestType t1(5, std::byte(0xCD)); { // Create a new file using the default property lists. - File file(FILE_NAME, File::ReadWrite | File::Create | File::Truncate); + File file(file_name, File::ReadWrite | File::Create | File::Truncate); // Create the dataset - DataSet dataset = file.createDataSet(DATASET_NAME, {5}, create_datatype<std::byte>()); + DataSet dataset = file.createDataSet(dataset_name, {5}, create_datatype<std::byte>()); // Write into the initial part of the dataset dataset.write(t1); @@ -193,13 +233,254 @@ TEMPLATE_PRODUCT_TEST_CASE("Scalar in std::vector<std::byte>", "[Types]", std::v // read it back { - File file(FILE_NAME, File::ReadOnly); + File file(file_name, File::ReadOnly); TestType value(5, std::byte(0xCD)); - DataSet dataset = file.getDataSet("/" + DATASET_NAME); + DataSet dataset = file.getDataSet("/" + dataset_name); dataset.read(value); CHECK(t1 == value); CHECK(value.size() == 5); } } #endif + +template <class T, class = void> +struct DiffMessageTrait; + +template <class T> +struct DiffMessageTrait<T, typename std::enable_if<std::is_floating_point<T>::value>::type> { + static std::string diff(T a, T b) { + std::stringstream sstream; + sstream << std::scientific << " delta: " << a - b; + return sstream.str(); + } +}; + +template <class T> +struct DiffMessageTrait<T, typename std::enable_if<!std::is_floating_point<T>::value>::type> { + static std::string diff(T /* a */, T /* b */) { + return ""; + } +}; + +template <class T> +std::string diff_message(T a, T b) { + return DiffMessageTrait<T>::diff(a, b); +} + +template <class Actual, class Expected, class Comp> +void compare_arrays(const Actual& actual, + const Expected& expected, + const std::vector<size_t>& dims, + Comp comp) { + using actual_trait = testing::ContainerTraits<Actual>; + using expected_trait = testing::ContainerTraits<Expected>; + using base_type = typename actual_trait::base_type; + + auto n = testing::flat_size(dims); + + for (size_t i = 0; i < n; ++i) { + auto indices = testing::unravel(i, dims); + base_type actual_value = actual_trait::get(actual, indices); + base_type expected_value = expected_trait::get(expected, indices); + auto c = comp(actual_value, expected_value); + if (!c) { + std::stringstream sstream; + sstream << std::scientific << "i = " << i << ": " << actual_value + << " != " << expected_value << diff_message(actual_value, expected_value); + INFO(sstream.str()); + } + REQUIRE(c); + } +} + +template <class Actual, class Expected> +void compare_arrays(const Actual& actual, + const Expected& expected, + const std::vector<size_t>& dims) { + using base_type = typename testing::ContainerTraits<Actual>::base_type; + compare_arrays(expected, actual, dims, [](base_type a, base_type b) { return a == b; }); +} + +template <class Container, class Expected, class Obj> +void check_read_auto(const Expected& expected, const std::vector<size_t>& dims, const Obj& obj) { + compare_arrays(obj.template read<Container>(), expected, dims); +} + +template <class Container, class Expected, class Obj> +void check_read_preallocated(const Expected& expected, + const std::vector<size_t>& dims, + const Obj& obj) { + auto actual = testing::DataGenerator<Container>::allocate(dims); + obj.read(actual); + + compare_arrays(actual, expected, dims); +} + +template <class Container> +void check_read_regular(const std::string& file_name, const std::vector<size_t>& dims) { + using traits = testing::DataGenerator<Container>; + using base_type = typename traits::base_type; + using reference_type = typename testing::MultiDimVector<base_type, traits::rank>::type; + + auto file = File(file_name, File::Truncate); + auto expected = testing::copy<reference_type>(traits::create(dims), dims); + + auto dataspace = DataSpace(dims); + auto attr = testing::AttributeCreateTraits::create<base_type>(file, "dset", dataspace); + attr.write(expected); + + auto dset = testing::DataSetCreateTraits::create<base_type>(file, "attr", dataspace); + dset.write(expected); + + + SECTION("dset.read<Container>()") { + check_read_auto<Container>(expected, dims, dset); + } + + SECTION("dset.read(values)") { + check_read_preallocated<Container>(expected, dims, dset); + } + + SECTION("attr.read<Container>()") { + check_read_auto<Container>(expected, dims, attr); + } + + SECTION("attr.read(values)") { + check_read_preallocated<Container>(expected, dims, attr); + } +} + +template <class Container> +void check_read_regular() { + const std::string file_name("rw_read_regular" + typeNameHelper<Container>() + ".h5"); + auto dims = testing::DataGenerator<Container>::default_dims(); + + check_read_regular<Container>(file_name, dims); +} + +TEMPLATE_LIST_TEST_CASE("TestReadRegular", "[read]", testing::supported_array_types) { + check_read_regular<TestType>(); +} + +template <class Container, class Write> +void check_writing(const std::vector<size_t>& dims, Write write) { + using traits = testing::DataGenerator<Container>; + using base_type = typename traits::base_type; + using reference_type = typename testing::MultiDimVector<base_type, traits::rank>::type; + + auto values = testing::DataGenerator<Container>::create(dims); + auto expected = testing::copy<reference_type>(values, dims); + + auto obj = write(values); + + auto actual = testing::DataGenerator<reference_type>::allocate(dims); + obj.read(actual); + + compare_arrays(actual, expected, dims); +} + +template <class CreateTraits, class Container> +void check_write_auto(File& file, const std::string& name, const std::vector<size_t>& dims) { + auto write_auto = [&](const Container& values) { + return CreateTraits::create(file, "auto_" + name, values); + }; + + check_writing<Container>(dims, write_auto); +} + +template <class CreateTraits, class Container> +void check_write_deduce_type(File& file, const std::string& name, const std::vector<size_t>& dims) { + auto write_two_phase_auto = [&](const Container& values) { + using traits = testing::ContainerTraits<Container>; + auto dataspace = DataSpace(dims); + auto h5 = CreateTraits::template create<typename traits::base_type>(file, + "two_phase_auto" + name, + dataspace); + h5.write(values); + return h5; + }; + check_writing<Container>(dims, write_two_phase_auto); +} + +template <class CreateTraits, class Container> +void check_write_manual(File& file, const std::string& name, const std::vector<size_t>& dims) { + auto write_two_phase = [&](const Container& values) { + using traits = testing::ContainerTraits<Container>; + auto datatype = create_datatype<typename traits::base_type>(); + auto dataspace = DataSpace(dims); + auto h5 = CreateTraits::create(file, "two_phase_" + name, dataspace, datatype); + h5.write(values); + return h5; + }; + check_writing<Container>(dims, write_two_phase); +} + +template <class Container> +void check_write_regular(const std::string& file_name, const std::vector<size_t>& dims) { + auto file = File(file_name, File::Truncate); + + SECTION("createDataSet(name, container)") { + check_write_auto<testing::DataSetCreateTraits, Container>(file, "dset", dims); + } + + SECTION("createDataSet(name, container)") { + check_write_deduce_type<testing::DataSetCreateTraits, Container>(file, "dset", dims); + } + + SECTION("createDataSet(name, container)") { + check_write_manual<testing::DataSetCreateTraits, Container>(file, "dset", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_auto<testing::AttributeCreateTraits, Container>(file, "attr", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_deduce_type<testing::AttributeCreateTraits, Container>(file, "attr", dims); + } + + SECTION("createAttribute(name, container)") { + check_write_manual<testing::AttributeCreateTraits, Container>(file, "attr", dims); + } +} + +template <class Container> +void check_write_regular() { + std::string file_name("rw_write_regular" + typeNameHelper<Container>() + ".h5"); + auto dims = testing::DataGenerator<Container>::default_dims(); + check_write_regular<Container>(file_name, dims); +} + +TEMPLATE_LIST_TEST_CASE("TestWriteRegularSTDVector", "[write]", testing::supported_array_types) { + check_write_regular<TestType>(); +} + +TEST_CASE("DataGeneratorDefaultDims", "[internal]") { + SECTION("std::array") { + auto dims = testing::DataGenerator<std::array<double, 3>>::default_dims(); + REQUIRE(dims.size() == 1); + CHECK(dims[0] == 3); + } + + SECTION("std::vector") { + auto dims = testing::DataGenerator<std::vector<double>>::default_dims(); + REQUIRE(dims.size() == 1); + CHECK(dims[0] > 0); + } + + SECTION("std::vector<std::vector>") { + auto dims = testing::DataGenerator<std::vector<std::vector<double>>>::default_dims(); + REQUIRE(dims.size() == 2); + CHECK(dims[0] * dims[1] > 0); + } +} + +TEST_CASE("ravel", "[internal]") { + std::vector<size_t> dims = {2, 4, 5}; + std::vector<size_t> indices = {1, 2, 3}; + size_t flat_index = indices[2] + dims[2] * (indices[1] + dims[1] * indices[0]); + + CHECK(flat_index == testing::ravel(indices, dims)); + CHECK(indices == testing::unravel(flat_index, dims)); +} diff --git a/packages/HighFive/tests/unit/test_high_five_selection.cpp b/packages/HighFive/tests/unit/test_high_five_selection.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e3b91e4cc1b9793e7d69e2beb396865c95f6a0e2 --- /dev/null +++ b/packages/HighFive/tests/unit/test_high_five_selection.cpp @@ -0,0 +1,536 @@ +/* + * Copyright (c), 2017-2023, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#include <algorithm> +#include <cstdio> +#include <cstdlib> +#include <ctime> +#include <iostream> +#include <map> +#include <memory> +#include <random> +#include <string> +#include <typeinfo> +#include <type_traits> +#include <vector> + +#include <catch2/catch_test_macros.hpp> +#include <catch2/catch_template_test_macros.hpp> +#include <catch2/matchers/catch_matchers_vector.hpp> + +#include <highfive/highfive.hpp> +#include "tests_high_five.hpp" + +using namespace HighFive; +using Catch::Matchers::Equals; + +template <typename T> +void selectionArraySimpleTest() { + typedef typename std::vector<T> Vector; + + std::ostringstream filename; + filename << "h5_rw_select_test_" << typeNameHelper<T>() << "_test.h5"; + + const size_t size_x = 10; + const size_t offset_x = 2, count_x = 5; + + const std::string dataset_name("dset"); + + Vector values(size_x); + + ContentGenerate<T> generator; + std::generate(values.begin(), values.end(), generator); + + // Create a new file using the default property lists. + File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); + + DataSet dataset = file.createDataSet<T>(dataset_name, DataSpace::From(values)); + + dataset.write(values); + + file.flush(); + + // select slice + { + // read it back + Vector result; + std::vector<size_t> offset{offset_x}; + std::vector<size_t> size{count_x}; + + Selection slice = dataset.select(offset, size); + + CHECK(slice.getSpace().getDimensions()[0] == size_x); + CHECK(slice.getMemSpace().getDimensions()[0] == count_x); + + slice.read(result); + + CHECK(result.size() == 5); + + for (size_t i = 0; i < count_x; ++i) { + REQUIRE(values[i + offset_x] == result[i]); + } + } + + // select cherry pick + { + // read it back + Vector result; + std::vector<size_t> ids{1, 3, 4, 7}; + + Selection slice = dataset.select(ElementSet(ids)); + + CHECK(slice.getSpace().getDimensions()[0] == size_x); + CHECK(slice.getMemSpace().getDimensions()[0] == ids.size()); + + slice.read(result); + + CHECK(result.size() == ids.size()); + + for (size_t i = 0; i < ids.size(); ++i) { + const std::size_t id = ids[i]; + REQUIRE(values[id] == result[i]); + } + } +} + +TEST_CASE("selectionArraySimpleString") { + selectionArraySimpleTest<std::string>(); +} + +TEMPLATE_LIST_TEST_CASE("selectionArraySimple", "[template]", dataset_test_types) { + selectionArraySimpleTest<TestType>(); +} + +TEST_CASE("selectionByElementMultiDim") { + const std::string file_name("h5_test_selection_multi_dim.h5"); + // Create a 2-dim dataset + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + std::vector<size_t> dims{3, 3}; + + auto set = file.createDataSet("test", DataSpace(dims), AtomicType<int>()); + int values[3][3] = {{1, 2, 3}, {4, 5, 6}, {7, 8, 9}}; + set.write(values); + + { + int value; + set.select(ElementSet{{1, 1}}).read(value); + CHECK(value == 5); + } + + { + int value[2]; + set.select(ElementSet{0, 0, 2, 2}).read(value); + CHECK(value[0] == 1); + CHECK(value[1] == 9); + } + + { + int value[2]; + set.select(ElementSet{{0, 1}, {1, 2}}).read(value); + CHECK(value[0] == 2); + CHECK(value[1] == 6); + } + + { + SilenceHDF5 silencer; + CHECK_THROWS_AS(set.select(ElementSet{0, 1, 2}), DataSpaceException); + } +} + +template <typename T> +void columnSelectionTest() { + std::ostringstream filename; + filename << "h5_rw_select_column_test_" << typeNameHelper<T>() << "_test.h5"; + + const size_t x_size = 10; + const size_t y_size = 7; + + const std::string dataset_name("dset"); + + T values[x_size][y_size]; + + ContentGenerate<T> generator; + generate2D(values, x_size, y_size, generator); + + // Create a new file using the default property lists. + File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); + + // Create the data space for the dataset. + std::vector<size_t> dims{x_size, y_size}; + + DataSpace dataspace(dims); + // Create a dataset with arbitrary type + DataSet dataset = file.createDataSet<T>(dataset_name, dataspace); + + dataset.write(values); + + file.flush(); + + std::vector<size_t> columns{1, 3, 5}; + + Selection slice = dataset.select(columns); + T result[x_size][3]; + slice.read(result); + + CHECK(slice.getSpace().getDimensions()[0] == x_size); + CHECK(slice.getMemSpace().getDimensions()[0] == x_size); + + for (size_t i = 0; i < 3; ++i) + for (size_t j = 0; j < x_size; ++j) + REQUIRE(result[j][i] == values[j][columns[i]]); +} + +TEMPLATE_LIST_TEST_CASE("columnSelection", "[template]", numerical_test_types) { + columnSelectionTest<TestType>(); +} + +std::vector<std::array<size_t, 2>> global_indices_2d(const std::vector<size_t>& offset, + const std::vector<size_t>& count) { + std::vector<std::array<size_t, 2>> indices; + indices.reserve(count[0] * count[1]); + + for (size_t i = 0; i < count[0]; ++i) { + for (size_t j = 0; j < count[1]; ++j) { + indices.push_back({offset[0] + i, offset[1] + j}); + } + } + + return indices; +} + +std::vector<std::array<size_t, 2>> local_indices_2d(const std::vector<size_t>& count) { + return global_indices_2d({0ul, 0ul}, count); +} + +std::vector<std::array<size_t, 1>> local_indices_1d(const std::vector<size_t>& count) { + std::vector<std::array<size_t, 1>> local_indices; + for (size_t i = 0; i < count[0]; ++i) { + local_indices.push_back({i}); + } + + return local_indices; +} + +struct RegularHyperSlabAnswer { + static RegularHyperSlabAnswer createRegular(const std::vector<size_t>& offset, + const std::vector<size_t>& count) { + return RegularHyperSlabAnswer{global_indices_2d(offset, count), + local_indices_1d({count[0] * count[1]})}; + } + + // These are the selected indices in the + // outer (larger) array. + std::vector<std::array<size_t, 2>> global_indices; + + // These are the selected indices in the compacted (inner) + // array. + std::vector<std::array<size_t, 1>> local_indices; +}; + +struct RegularHyperSlabTestData { + std::string desc; + HyperSlab slab; + RegularHyperSlabAnswer answer; +}; + +std::vector<RegularHyperSlabTestData> make_regular_hyperslab_test_data() { + std::vector<RegularHyperSlabTestData> test_data; + + // The dataset is 10x8, we define the following regular + // hyperslabs: + // x----------------x + // | | + // | x------x e | 1 + // | | a | | + // x-|------|-------x 3 + // | | x-|-------x 4 + // | | | | b | + // | | c-|-------c 5 + // | | b-|-------b 6 + // | | | | c | + // | d----x-d-------x 7 + // | | d | | + // | a------a | 9 + // | | + // ------------------ + // 1 3 4 8 + + std::map<std::string, RegularHyperSlab> slabs; + + slabs["a"] = RegularHyperSlab(/* offset = */ {1ul, 1ul}, + /* count = */ {8ul, 3ul}); + + slabs["b"] = RegularHyperSlab(/* offset = */ {4ul, 3ul}, + /* count = */ {2ul, 5ul}); + + slabs["c"] = RegularHyperSlab(/* offset = */ {5ul, 3ul}, + /* count = */ {2ul, 5ul}); + + slabs["d"] = RegularHyperSlab(/* offset = */ {7ul, 1ul}, + /* count = */ {2ul, 3ul}); + + slabs["e"] = RegularHyperSlab(/* offset = */ {0ul, 0ul}, + /* count = */ {3ul, 8ul}); + + // Union, regular + auto slab_bc_union = HyperSlab(slabs["b"]) | slabs["c"]; + auto answer_bc_union = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {3ul, 5ul}); + test_data.push_back({"b | c", slab_bc_union, answer_bc_union}); + + // Intersection, always regular + auto slab_ab_cut = HyperSlab(slabs["a"]) & slabs["b"]; + auto answer_ab_cut = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {2ul, 1ul}); + test_data.push_back({"a & b", slab_ab_cut, answer_ab_cut}); + + // Intersection, always regular + auto slab_bc_cut = HyperSlab(slabs["b"]) & slabs["c"]; + auto answer_bc_cut = RegularHyperSlabAnswer::createRegular({5ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"b & c", slab_bc_cut, answer_bc_cut}); + + // Xor, regular + auto slab_ad_xor = HyperSlab(slabs["a"]) ^ slabs["d"]; + auto answer_ad_xor = RegularHyperSlabAnswer::createRegular({1ul, 1ul}, {6ul, 3ul}); + test_data.push_back({"a ^ b", slab_ad_xor, answer_ad_xor}); + + // (not b) and c, regular + auto slab_bc_nota = HyperSlab(slabs["b"]).notA(slabs["c"]); + auto answer_bc_nota = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"b notA a", slab_bc_nota, answer_bc_nota}); + + // (not c) and b, regular + auto slab_cb_notb = HyperSlab(slabs["c"]).notB(slabs["b"]); + auto answer_cb_notb = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); + test_data.push_back({"c notB b", slab_cb_notb, answer_cb_notb}); + + return test_data; +} + +template <class T, size_t x_size, size_t y_size> +File setupHyperSlabFile(T (&values)[x_size][y_size], + const std::string& filename, + const std::string& dataset_name) { + ContentGenerate<T> generator; + generate2D(values, x_size, y_size, generator); + + // Create a new file using the default property lists. + File file(filename, File::ReadWrite | File::Create | File::Truncate); + + // Create the data space for the dataset. + std::vector<size_t> dims{x_size, y_size}; + + DataSpace dataspace(dims); + // Create a dataset with arbitrary type + DataSet dataset = file.createDataSet<T>(dataset_name, dataspace); + + dataset.write(values); + file.flush(); + + return file; +} + +template <typename T> +void regularHyperSlabSelectionTest() { + std::ostringstream filename; + filename << "h5_rw_select_regular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T values[x_size][y_size]; + + auto file = setupHyperSlabFile(values, filename.str(), dataset_name); + auto test_cases = make_regular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + std::vector<T> result; + + file.getDataSet(dataset_name).select(test_case.slab).read(result); + + auto n_selected = test_case.answer.global_indices.size(); + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + const auto il = test_case.answer.local_indices[i]; + + REQUIRE(result[il[0]] == values[ig[0]][ig[1]]); + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("hyperSlabSelection", "[template]", numerical_test_types) { + regularHyperSlabSelectionTest<TestType>(); +} + +struct IrregularHyperSlabAnswer { + // These are the selected indices in the outer (larger) array. + std::vector<std::array<size_t, 2>> global_indices; +}; + +struct IrregularHyperSlabTestData { + std::string desc; + HyperSlab slab; + IrregularHyperSlabAnswer answer; +}; + +std::vector<IrregularHyperSlabTestData> make_irregular_hyperslab_test_data() { + // The dataset is 10x8, with two regular hyperslabs: + // x----------------x + // | | + // | bbbb | + // | bbbb | + // | aaaabb | + // | aaaabb | + // | bbbb | + // | bbbb | + // | | + // | | + // | | + // | | + // ------------------ + + auto slabs = std::map<std::string, RegularHyperSlab>{}; + slabs["a"] = RegularHyperSlab{{2ul, 0ul}, {1ul, 2ul}}; + slabs["b"] = RegularHyperSlab{{1ul, 1ul}, {3ul, 2ul}}; + + std::vector<IrregularHyperSlabTestData> test_data; + + // Union, irregular + auto slab_ab_union = HyperSlab(slabs["a"]) | slabs["b"]; + // clang-format off + auto answer_ab_union = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 0ul}, {2ul, 1ul}, {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a | b", slab_ab_union, answer_ab_union}); + + // xor, irregular + auto slab_ab_xor = HyperSlab(slabs["a"]) ^ slabs["b"]; + // clang-format off + auto answer_ab_xor = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 0ul}, {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a xor b", slab_ab_xor, answer_ab_xor}); + + // (not a) and e, irregular + auto slab_ab_nota = HyperSlab(slabs["a"]).notA(slabs["b"]); + // clang-format off + auto answer_ab_nota = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"a nota b", slab_ab_nota, answer_ab_nota}); + + // (not a) and e, irregular + auto slab_ba_notb = HyperSlab(slabs["b"]).notB(slabs["a"]); + // clang-format off + auto answer_ba_notb = IrregularHyperSlabAnswer{{ + {1ul, 1ul}, {1ul, 2ul}, + {2ul, 2ul}, + {3ul, 1ul}, {3ul, 2ul} + }}; + // clang-format on + test_data.push_back({"b notb a", slab_ba_notb, answer_ba_notb}); + + return test_data; +} + +template <typename T> +void irregularHyperSlabSelectionReadTest() { + std::ostringstream filename; + filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; + + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T values[x_size][y_size]; + auto file = setupHyperSlabFile(values, filename.str(), dataset_name); + + auto test_cases = make_irregular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + std::vector<T> result; + + file.getDataSet(dataset_name).select(test_case.slab).read(result); + + auto n_selected = test_case.answer.global_indices.size(); + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + + REQUIRE(result[i] == values[ig[0]][ig[1]]); + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionRead", "[template]", numerical_test_types) { + irregularHyperSlabSelectionReadTest<TestType>(); +} + +template <typename T> +void irregularHyperSlabSelectionWriteTest() { + std::ostringstream filename; + filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; + + const std::string dataset_name("dset"); + + const size_t x_size = 10; + const size_t y_size = 8; + + T orig_values[x_size][y_size]; + auto file = setupHyperSlabFile(orig_values, filename.str(), dataset_name); + + auto test_cases = make_irregular_hyperslab_test_data(); + + for (const auto& test_case: test_cases) { + SECTION(test_case.desc) { + auto n_selected = test_case.answer.global_indices.size(); + std::vector<T> changed_values(n_selected); + ContentGenerate<T> gen; + std::generate(changed_values.begin(), changed_values.end(), gen); + + file.getDataSet(dataset_name).select(test_case.slab).write(changed_values); + + T overwritten_values[x_size][y_size]; + file.getDataSet(dataset_name).read(overwritten_values); + + T expected_values[x_size][y_size]; + for (size_t i = 0; i < x_size; ++i) { + for (size_t j = 0; j < y_size; ++j) { + expected_values[i][j] = orig_values[i][j]; + } + } + + for (size_t i = 0; i < n_selected; ++i) { + const auto ig = test_case.answer.global_indices[i]; + expected_values[ig[0]][ig[1]] = changed_values[i]; + } + + for (size_t i = 0; i < x_size; ++i) { + for (size_t j = 0; j < y_size; ++j) { + REQUIRE(expected_values[i][j] == overwritten_values[i][j]); + } + } + } + } +} + +TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionWrite", "[template]", std::tuple<int>) { + irregularHyperSlabSelectionWriteTest<TestType>(); +} diff --git a/packages/HighFive/tests/unit/tests_high_five.hpp b/packages/HighFive/tests/unit/tests_high_five.hpp index 0ebd58c44890e65ddbf4ee149d6c13c6662a15e3..9d259c8d130bad483bae71c282cd9f887cf065d8 100644 --- a/packages/HighFive/tests/unit/tests_high_five.hpp +++ b/packages/HighFive/tests/unit/tests_high_five.hpp @@ -43,8 +43,6 @@ using base_test_types = std::tuple<int, fcomplex>; #ifdef H5_USE_HALF_FLOAT -#include <half.hpp> - using float16_t = half_float::half; using numerical_test_types = decltype(std::tuple_cat(std::declval<base_test_types>(), std::tuple<float16_t>())); diff --git a/packages/HighFive/tests/unit/tests_high_five_base.cpp b/packages/HighFive/tests/unit/tests_high_five_base.cpp index 899170d9328683b2832e592871ff7fac917ea58e..163535b55cdd0875ac7203dfabf39e1ce1f29c69 100644 --- a/packages/HighFive/tests/unit/tests_high_five_base.cpp +++ b/packages/HighFive/tests/unit/tests_high_five_base.cpp @@ -9,6 +9,7 @@ #include <algorithm> #include <cstdio> #include <cstdlib> +#include <cstring> #include <ctime> #include <iostream> #include <map> @@ -16,8 +17,10 @@ #include <random> #include <string> #include <typeinfo> +#include <type_traits> #include <vector> + #include <catch2/catch_test_macros.hpp> #include <catch2/catch_template_test_macros.hpp> #include <catch2/matchers/catch_matchers_vector.hpp> @@ -676,25 +679,6 @@ TEST_CASE("Test simple listings") { } } -TEST_CASE("Simple test for type equality") { - AtomicType<double> d_var; - AtomicType<size_t> size_var; - AtomicType<double> d_var_test; - AtomicType<size_t> size_var_cpy(size_var); - AtomicType<int> int_var; - AtomicType<unsigned> uint_var; - - // check different type matching - CHECK(d_var == d_var_test); - CHECK(d_var != size_var); - - // check type copy matching - CHECK(size_var_cpy == size_var); - - // check sign change not matching - CHECK(int_var != uint_var); -} - TEST_CASE("StringType") { SECTION("enshrine-defaults") { auto fixed_length = FixedLengthStringType(32, StringPadding::SpacePadded); @@ -791,11 +775,25 @@ TEST_CASE("DataSpace::getElementCount") { SECTION("null") { auto space = DataSpace(DataSpace::dataspace_null); CHECK(space.getElementCount() == 0); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); + } + + SECTION("null named ctor") { + auto space = DataSpace::Null(); + CHECK(space.getElementCount() == 0); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_NULL); } SECTION("scalar") { auto space = DataSpace(DataSpace::dataspace_scalar); CHECK(space.getElementCount() == 1); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); + } + + SECTION("scalar named ctor") { + auto space = DataSpace::Scalar(); + CHECK(space.getElementCount() == 1); + CHECK(detail::h5s_get_simple_extent_type(space.getId()) == H5S_SCALAR); } SECTION("simple, empty (1D)") { @@ -1111,511 +1109,6 @@ TEST_CASE("datasetOffset") { CHECK(ds_read.getOffset() > 0); } -template <typename T> -void selectionArraySimpleTest() { - typedef typename std::vector<T> Vector; - - std::ostringstream filename; - filename << "h5_rw_select_test_" << typeNameHelper<T>() << "_test.h5"; - - const size_t size_x = 10; - const size_t offset_x = 2, count_x = 5; - - const std::string dataset_name("dset"); - - Vector values(size_x); - - ContentGenerate<T> generator; - std::generate(values.begin(), values.end(), generator); - - // Create a new file using the default property lists. - File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); - - DataSet dataset = file.createDataSet<T>(dataset_name, DataSpace::From(values)); - - dataset.write(values); - - file.flush(); - - // select slice - { - // read it back - Vector result; - std::vector<size_t> offset{offset_x}; - std::vector<size_t> size{count_x}; - - Selection slice = dataset.select(offset, size); - - CHECK(slice.getSpace().getDimensions()[0] == size_x); - CHECK(slice.getMemSpace().getDimensions()[0] == count_x); - - slice.read(result); - - CHECK(result.size() == 5); - - for (size_t i = 0; i < count_x; ++i) { - REQUIRE(values[i + offset_x] == result[i]); - } - } - - // select cherry pick - { - // read it back - Vector result; - std::vector<size_t> ids{1, 3, 4, 7}; - - Selection slice = dataset.select(ElementSet(ids)); - - CHECK(slice.getSpace().getDimensions()[0] == size_x); - CHECK(slice.getMemSpace().getDimensions()[0] == ids.size()); - - slice.read(result); - - CHECK(result.size() == ids.size()); - - for (size_t i = 0; i < ids.size(); ++i) { - const std::size_t id = ids[i]; - REQUIRE(values[id] == result[i]); - } - } -} - -TEST_CASE("selectionArraySimpleString") { - selectionArraySimpleTest<std::string>(); -} - -TEMPLATE_LIST_TEST_CASE("selectionArraySimple", "[template]", dataset_test_types) { - selectionArraySimpleTest<TestType>(); -} - -TEST_CASE("selectionByElementMultiDim") { - const std::string file_name("h5_test_selection_multi_dim.h5"); - // Create a 2-dim dataset - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - std::vector<size_t> dims{3, 3}; - - auto set = file.createDataSet("test", DataSpace(dims), AtomicType<int>()); - int values[3][3] = {{1, 2, 3}, {4, 5, 6}, {7, 8, 9}}; - set.write(values); - - { - int value; - set.select(ElementSet{{1, 1}}).read(value); - CHECK(value == 5); - } - - { - int value[2]; - set.select(ElementSet{0, 0, 2, 2}).read(value); - CHECK(value[0] == 1); - CHECK(value[1] == 9); - } - - { - int value[2]; - set.select(ElementSet{{0, 1}, {1, 2}}).read(value); - CHECK(value[0] == 2); - CHECK(value[1] == 6); - } - - { - SilenceHDF5 silencer; - CHECK_THROWS_AS(set.select(ElementSet{0, 1, 2}), DataSpaceException); - } -} - -template <typename T> -void columnSelectionTest() { - std::ostringstream filename; - filename << "h5_rw_select_column_test_" << typeNameHelper<T>() << "_test.h5"; - - const size_t x_size = 10; - const size_t y_size = 7; - - const std::string dataset_name("dset"); - - T values[x_size][y_size]; - - ContentGenerate<T> generator; - generate2D(values, x_size, y_size, generator); - - // Create a new file using the default property lists. - File file(filename.str(), File::ReadWrite | File::Create | File::Truncate); - - // Create the data space for the dataset. - std::vector<size_t> dims{x_size, y_size}; - - DataSpace dataspace(dims); - // Create a dataset with arbitrary type - DataSet dataset = file.createDataSet<T>(dataset_name, dataspace); - - dataset.write(values); - - file.flush(); - - std::vector<size_t> columns{1, 3, 5}; - - Selection slice = dataset.select(columns); - T result[x_size][3]; - slice.read(result); - - CHECK(slice.getSpace().getDimensions()[0] == x_size); - CHECK(slice.getMemSpace().getDimensions()[0] == x_size); - - for (size_t i = 0; i < 3; ++i) - for (size_t j = 0; j < x_size; ++j) - REQUIRE(result[j][i] == values[j][columns[i]]); -} - -TEMPLATE_LIST_TEST_CASE("columnSelection", "[template]", numerical_test_types) { - columnSelectionTest<TestType>(); -} - -std::vector<std::array<size_t, 2>> global_indices_2d(const std::vector<size_t>& offset, - const std::vector<size_t>& count) { - std::vector<std::array<size_t, 2>> indices; - indices.reserve(count[0] * count[1]); - - for (size_t i = 0; i < count[0]; ++i) { - for (size_t j = 0; j < count[1]; ++j) { - indices.push_back({offset[0] + i, offset[1] + j}); - } - } - - return indices; -} - -std::vector<std::array<size_t, 2>> local_indices_2d(const std::vector<size_t>& count) { - return global_indices_2d({0ul, 0ul}, count); -} - -std::vector<std::array<size_t, 1>> local_indices_1d(const std::vector<size_t>& count) { - std::vector<std::array<size_t, 1>> local_indices; - for (size_t i = 0; i < count[0]; ++i) { - local_indices.push_back({i}); - } - - return local_indices; -} - -struct RegularHyperSlabAnswer { - static RegularHyperSlabAnswer createRegular(const std::vector<size_t>& offset, - const std::vector<size_t>& count) { - return RegularHyperSlabAnswer{global_indices_2d(offset, count), - local_indices_1d({count[0] * count[1]})}; - } - - // These are the selected indices in the - // outer (larger) array. - std::vector<std::array<size_t, 2>> global_indices; - - // These are the selected indices in the compacted (inner) - // array. - std::vector<std::array<size_t, 1>> local_indices; -}; - -struct RegularHyperSlabTestData { - std::string desc; - HyperSlab slab; - RegularHyperSlabAnswer answer; -}; - -std::vector<RegularHyperSlabTestData> make_regular_hyperslab_test_data() { - std::vector<RegularHyperSlabTestData> test_data; - - // The dataset is 10x8, we define the following regular - // hyperslabs: - // x----------------x - // | | - // | x------x e | 1 - // | | a | | - // x-|------|-------x 3 - // | | x-|-------x 4 - // | | | | b | - // | | c-|-------c 5 - // | | b-|-------b 6 - // | | | | c | - // | d----x-d-------x 7 - // | | d | | - // | a------a | 9 - // | | - // ------------------ - // 1 3 4 8 - - std::map<std::string, RegularHyperSlab> slabs; - - slabs["a"] = RegularHyperSlab(/* offset = */ {1ul, 1ul}, - /* count = */ {8ul, 3ul}); - - slabs["b"] = RegularHyperSlab(/* offset = */ {4ul, 3ul}, - /* count = */ {2ul, 5ul}); - - slabs["c"] = RegularHyperSlab(/* offset = */ {5ul, 3ul}, - /* count = */ {2ul, 5ul}); - - slabs["d"] = RegularHyperSlab(/* offset = */ {7ul, 1ul}, - /* count = */ {2ul, 3ul}); - - slabs["e"] = RegularHyperSlab(/* offset = */ {0ul, 0ul}, - /* count = */ {3ul, 8ul}); - - // Union, regular - auto slab_bc_union = HyperSlab(slabs["b"]) | slabs["c"]; - auto answer_bc_union = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {3ul, 5ul}); - test_data.push_back({"b | c", slab_bc_union, answer_bc_union}); - - // Intersection, always regular - auto slab_ab_cut = HyperSlab(slabs["a"]) & slabs["b"]; - auto answer_ab_cut = RegularHyperSlabAnswer::createRegular({4ul, 3ul}, {2ul, 1ul}); - test_data.push_back({"a & b", slab_ab_cut, answer_ab_cut}); - - // Intersection, always regular - auto slab_bc_cut = HyperSlab(slabs["b"]) & slabs["c"]; - auto answer_bc_cut = RegularHyperSlabAnswer::createRegular({5ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"b & c", slab_bc_cut, answer_bc_cut}); - - // Xor, regular - auto slab_ad_xor = HyperSlab(slabs["a"]) ^ slabs["d"]; - auto answer_ad_xor = RegularHyperSlabAnswer::createRegular({1ul, 1ul}, {6ul, 3ul}); - test_data.push_back({"a ^ b", slab_ad_xor, answer_ad_xor}); - - // (not b) and c, regular - auto slab_bc_nota = HyperSlab(slabs["b"]).notA(slabs["c"]); - auto answer_bc_nota = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"b notA a", slab_bc_nota, answer_bc_nota}); - - // (not c) and b, regular - auto slab_cb_notb = HyperSlab(slabs["c"]).notB(slabs["b"]); - auto answer_cb_notb = RegularHyperSlabAnswer::createRegular({6ul, 3ul}, {1ul, 5ul}); - test_data.push_back({"c notB b", slab_cb_notb, answer_cb_notb}); - - return test_data; -} - -template <class T, size_t x_size, size_t y_size> -File setupHyperSlabFile(T (&values)[x_size][y_size], - const std::string& filename, - const std::string& dataset_name) { - ContentGenerate<T> generator; - generate2D(values, x_size, y_size, generator); - - // Create a new file using the default property lists. - File file(filename, File::ReadWrite | File::Create | File::Truncate); - - // Create the data space for the dataset. - std::vector<size_t> dims{x_size, y_size}; - - DataSpace dataspace(dims); - // Create a dataset with arbitrary type - DataSet dataset = file.createDataSet<T>(dataset_name, dataspace); - - dataset.write(values); - file.flush(); - - return file; -} - -template <typename T> -void regularHyperSlabSelectionTest() { - std::ostringstream filename; - filename << "h5_rw_select_regular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T values[x_size][y_size]; - - auto file = setupHyperSlabFile(values, filename.str(), dataset_name); - auto test_cases = make_regular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - std::vector<T> result; - - file.getDataSet(dataset_name).select(test_case.slab).read(result); - - auto n_selected = test_case.answer.global_indices.size(); - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - const auto il = test_case.answer.local_indices[i]; - - REQUIRE(result[il[0]] == values[ig[0]][ig[1]]); - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("hyperSlabSelection", "[template]", numerical_test_types) { - regularHyperSlabSelectionTest<TestType>(); -} - -struct IrregularHyperSlabAnswer { - // These are the selected indices in the outer (larger) array. - std::vector<std::array<size_t, 2>> global_indices; -}; - -struct IrregularHyperSlabTestData { - std::string desc; - HyperSlab slab; - IrregularHyperSlabAnswer answer; -}; - -std::vector<IrregularHyperSlabTestData> make_irregular_hyperslab_test_data() { - // The dataset is 10x8, with two regular hyperslabs: - // x----------------x - // | | - // | bbbb | - // | bbbb | - // | aaaabb | - // | aaaabb | - // | bbbb | - // | bbbb | - // | | - // | | - // | | - // | | - // ------------------ - - auto slabs = std::map<std::string, RegularHyperSlab>{}; - slabs["a"] = RegularHyperSlab{{2ul, 0ul}, {1ul, 2ul}}; - slabs["b"] = RegularHyperSlab{{1ul, 1ul}, {3ul, 2ul}}; - - std::vector<IrregularHyperSlabTestData> test_data; - - // Union, irregular - auto slab_ab_union = HyperSlab(slabs["a"]) | slabs["b"]; - // clang-format off - auto answer_ab_union = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 0ul}, {2ul, 1ul}, {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a | b", slab_ab_union, answer_ab_union}); - - // xor, irregular - auto slab_ab_xor = HyperSlab(slabs["a"]) ^ slabs["b"]; - // clang-format off - auto answer_ab_xor = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 0ul}, {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a xor b", slab_ab_xor, answer_ab_xor}); - - // (not a) and e, irregular - auto slab_ab_nota = HyperSlab(slabs["a"]).notA(slabs["b"]); - // clang-format off - auto answer_ab_nota = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"a nota b", slab_ab_nota, answer_ab_nota}); - - // (not a) and e, irregular - auto slab_ba_notb = HyperSlab(slabs["b"]).notB(slabs["a"]); - // clang-format off - auto answer_ba_notb = IrregularHyperSlabAnswer{{ - {1ul, 1ul}, {1ul, 2ul}, - {2ul, 2ul}, - {3ul, 1ul}, {3ul, 2ul} - }}; - // clang-format on - test_data.push_back({"b notb a", slab_ba_notb, answer_ba_notb}); - - return test_data; -} - -template <typename T> -void irregularHyperSlabSelectionReadTest() { - std::ostringstream filename; - filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; - - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T values[x_size][y_size]; - auto file = setupHyperSlabFile(values, filename.str(), dataset_name); - - auto test_cases = make_irregular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - std::vector<T> result; - - file.getDataSet(dataset_name).select(test_case.slab).read(result); - - auto n_selected = test_case.answer.global_indices.size(); - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - - REQUIRE(result[i] == values[ig[0]][ig[1]]); - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionRead", "[template]", numerical_test_types) { - irregularHyperSlabSelectionReadTest<TestType>(); -} - -template <typename T> -void irregularHyperSlabSelectionWriteTest() { - std::ostringstream filename; - filename << "h5_write_select_irregular_hyperslab_test_" << typeNameHelper<T>() << "_test.h5"; - - const std::string dataset_name("dset"); - - const size_t x_size = 10; - const size_t y_size = 8; - - T orig_values[x_size][y_size]; - auto file = setupHyperSlabFile(orig_values, filename.str(), dataset_name); - - auto test_cases = make_irregular_hyperslab_test_data(); - - for (const auto& test_case: test_cases) { - SECTION(test_case.desc) { - auto n_selected = test_case.answer.global_indices.size(); - std::vector<T> changed_values(n_selected); - ContentGenerate<T> gen; - std::generate(changed_values.begin(), changed_values.end(), gen); - - file.getDataSet(dataset_name).select(test_case.slab).write(changed_values); - - T overwritten_values[x_size][y_size]; - file.getDataSet(dataset_name).read(overwritten_values); - - T expected_values[x_size][y_size]; - for (size_t i = 0; i < x_size; ++i) { - for (size_t j = 0; j < y_size; ++j) { - expected_values[i][j] = orig_values[i][j]; - } - } - - for (size_t i = 0; i < n_selected; ++i) { - const auto ig = test_case.answer.global_indices[i]; - expected_values[ig[0]][ig[1]] = changed_values[i]; - } - - for (size_t i = 0; i < x_size; ++i) { - for (size_t j = 0; j < y_size; ++j) { - REQUIRE(expected_values[i][j] == overwritten_values[i][j]); - } - } - } - } -} - -TEMPLATE_LIST_TEST_CASE("irregularHyperSlabSelectionWrite", "[template]", std::tuple<int>) { - irregularHyperSlabSelectionWriteTest<TestType>(); -} template <typename T> void attribute_scalar_rw() { @@ -2508,6 +2001,23 @@ TEST_CASE("HighFivePropertyObjects") { CHECK(plist_g2.isValid()); } +TEST_CASE("HighFivePropertyObjectsQuirks") { + auto pl1 = LinkCreateProps::Default(); + auto pl2 = pl1; + // As usual shallow copying semantics apply: + CHECK(pl1.getId() == pl2.getId()); + + // Then one adds something and the link is broken: + pl2.add(CreateIntermediateGroup(true)); + CHECK(pl1.getId() == H5P_DEFAULT); + + // but once it's not a "special" value, regular shallow copy semantic + // return: + auto pl3 = pl2; + pl3.add(CreateIntermediateGroup(false)); + CHECK(pl3.getId() == pl2.getId()); +} + TEST_CASE("HighFiveLinkCreationOrderProperty") { { // For file const std::string file_name("h5_keep_creation_order_file.h5"); @@ -2565,369 +2075,65 @@ TEST_CASE("HighFiveLinkCreationOrderProperty") { } } -struct CSL1 { - int m1; - int m2; - int m3; -}; - -struct CSL2 { - CSL1 csl1; -}; - -CompoundType create_compound_csl1() { - auto t2 = AtomicType<int>(); - CompoundType t1({{"m1", AtomicType<int>{}}, {"m2", AtomicType<int>{}}, {"m3", t2}}); - - return t1; -} - -CompoundType create_compound_csl2() { - CompoundType t1 = create_compound_csl1(); - - CompoundType t2({{"csl1", t1}}); - - return t2; -} - -HIGHFIVE_REGISTER_TYPE(CSL1, create_compound_csl1) -HIGHFIVE_REGISTER_TYPE(CSL2, create_compound_csl2) - -TEST_CASE("HighFiveCompounds") { - const std::string file_name("compounds_test.h5"); - const std::string dataset_name1("/a"); - const std::string dataset_name2("/b"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - auto t3 = AtomicType<int>(); - CompoundType t1 = create_compound_csl1(); - t1.commit(file, "my_type"); +TEST_CASE("DirectWriteBool") { + SECTION("Basic compatibility") { + CHECK(sizeof(bool) == sizeof(details::Boolean)); - CompoundType t2 = create_compound_csl2(); - t2.commit(file, "my_type2"); + auto n_bytes = 2 * sizeof(details::Boolean); - { // Not nested - auto dataset = file.createDataSet(dataset_name1, DataSpace(2), t1); + auto* const enum_ptr = (details::Boolean*) malloc(n_bytes); + memset(enum_ptr, 187, n_bytes); + enum_ptr[0] = details::Boolean::HighFiveTrue; + enum_ptr[1] = details::Boolean::HighFiveFalse; - std::vector<CSL1> csl = {{1, 1, 1}, {2, 3, 4}}; - dataset.write(csl); + auto* const bool_ptr = (bool*) malloc(n_bytes); + memset(bool_ptr, 68, n_bytes); + bool_ptr[0] = true; + bool_ptr[1] = false; - file.flush(); - - std::vector<CSL1> result; - dataset.select({0}, {2}).read(result); + CHECK(std::memcmp(bool_ptr, enum_ptr, n_bytes) == 0); - CHECK(result.size() == 2); - CHECK(result[0].m1 == 1); - CHECK(result[0].m2 == 1); - CHECK(result[0].m3 == 1); - CHECK(result[1].m1 == 2); - CHECK(result[1].m2 == 3); - CHECK(result[1].m3 == 4); + free(enum_ptr); + free(bool_ptr); } - { // Nested - auto dataset = file.createDataSet(dataset_name2, DataSpace(2), t2); + auto file = File("rw_bool_from_ptr.h5", File::Truncate); - std::vector<CSL2> csl = {{{1, 1, 1}, {2, 3, 4}}}; - dataset.write(csl); + size_t n = 4; + bool* expected = new bool[n]; + bool* actual = new bool[n]; - file.flush(); - std::vector<CSL2> result = {{{1, 1, 1}, {2, 3, 4}}}; - dataset.select({0}, {2}).read(result); - - CHECK(result.size() == 2); - CHECK(result[0].csl1.m1 == 1); - CHECK(result[0].csl1.m2 == 1); - CHECK(result[0].csl1.m3 == 1); - CHECK(result[1].csl1.m1 == 2); - CHECK(result[1].csl1.m2 == 3); - CHECK(result[1].csl1.m3 == 4); + for (size_t i = 0; i < n; ++i) { + expected[i] = i % 2 == 0; } - // Test the constructor from hid - CompoundType t1_from_hid(t1); - CHECK(t1 == t1_from_hid); - - CompoundType t2_from_hid(t2); - CHECK(t2 == t2_from_hid); - - // Back from a DataType - CHECK_NOTHROW(CompoundType(DataType(t1_from_hid))); - CHECK_THROWS(CompoundType(AtomicType<uint32_t>{})); -} - -struct GrandChild { - uint32_t gcm1; - uint32_t gcm2; - uint32_t gcm3; -}; - -struct Child { - GrandChild grandChild; - uint32_t cm1; -}; - -struct Parent { - uint32_t pm1; - Child child; -}; - -CompoundType create_compound_GrandChild() { - auto t2 = AtomicType<uint32_t>(); - CompoundType t1({{"gcm1", AtomicType<uint32_t>{}}, - {"gcm2", AtomicType<uint32_t>{}}, - { - "gcm3", - t2, - }}); - return t1; -} - -CompoundType create_compound_Child() { - auto nestedType = create_compound_GrandChild(); - return CompoundType{{{ - "grandChild", - nestedType, - }, - {"cm1", AtomicType<uint32_t>{}}}}; -} - -CompoundType create_compound_Parent() { - auto nestedType = create_compound_Child(); - return CompoundType{{{"pm1", AtomicType<uint32_t>{}}, - { - "child", - nestedType, - }}}; -} - -HIGHFIVE_REGISTER_TYPE(GrandChild, create_compound_GrandChild) -HIGHFIVE_REGISTER_TYPE(Child, create_compound_Child) -HIGHFIVE_REGISTER_TYPE(Parent, create_compound_Parent) - -TEST_CASE("HighFiveCompoundsNested") { - const std::string file_name("nested_compounds_test.h5"); - const std::string dataset_name("/a"); - - { // Write - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - auto type = create_compound_Parent(); + auto dataspace = DataSpace{n}; + auto datatype = create_datatype<bool>(); - auto dataset = file.createDataSet(dataset_name, DataSpace(2), type); - CHECK(dataset.getDataType().getSize() == 20); - - std::vector<Parent> csl = {Parent{1, Child{GrandChild{1, 1, 1}, 1}}, - Parent{2, Child{GrandChild{3, 4, 5}, 6}}}; - dataset.write(csl); - } - - { // Read - File file(file_name, File::ReadOnly); - std::vector<Parent> result; - auto dataset = file.getDataSet(dataset_name); - CHECK(dataset.getDataType().getSize() == 20); - dataset.select({0}, {2}).read(result); + SECTION("WriteReadCycleAttribute") { + auto attr = file.createAttribute("attr", dataspace, datatype); + attr.write_raw(expected); + attr.read(actual); - CHECK(result.size() == 2); - CHECK(result[0].pm1 == 1); - CHECK(result[0].child.grandChild.gcm1 == 1); - CHECK(result[0].child.grandChild.gcm2 == 1); - CHECK(result[0].child.grandChild.gcm3 == 1); - CHECK(result[0].child.cm1 == 1); - CHECK(result[1].pm1 == 2); - CHECK(result[1].child.grandChild.gcm1 == 3); - CHECK(result[1].child.grandChild.gcm2 == 4); - CHECK(result[1].child.grandChild.gcm3 == 5); - CHECK(result[1].child.cm1 == 6); + for (size_t i = 0; i < n; ++i) { + REQUIRE(expected[i] == actual[i]); + } } -} - -template <size_t N> -struct Record { - double d = 3.14; - int i = 42; - char s[N]; -}; - -template <size_t N> -void fill(Record<N>& r) { - constexpr char ref[] = "123456789a123456789b123456789c123456789d123456789e123456789f"; - std::copy(ref, ref + N - 1, r.s); - r.s[N - 1] = '\0'; -} - -template <size_t N> -CompoundType rec_t() { - using RecN = Record<N>; - return {{"d", create_datatype<decltype(RecN::d)>()}, - {"i", create_datatype<decltype(RecN::i)>()}, - {"s", create_datatype<decltype(RecN::s)>()}}; -} - -HIGHFIVE_REGISTER_TYPE(Record<4>, rec_t<4>) -HIGHFIVE_REGISTER_TYPE(Record<8>, rec_t<8>) -HIGHFIVE_REGISTER_TYPE(Record<9>, rec_t<9>) - -template <size_t N> -void save(File& f) { - const size_t numRec = 2; - std::vector<Record<N>> recs(numRec); - fill<N>(recs[0]); - fill<N>(recs[1]); - auto dataset = f.createDataSet<Record<N>>("records" + std::to_string(N), DataSpace::From(recs)); - dataset.write(recs); -} - -template <size_t N> -std::string check(File& f) { - const size_t numRec = 2; - std::vector<Record<N>> recs(numRec); - f.getDataSet("records" + std::to_string(N)).read(recs); - return std::string(recs[0].s); -} -TEST_CASE("HighFiveCompoundsSeveralPadding") { - const std::string file_name("padded_compounds_test.h5"); + SECTION("WriteReadCycleDataSet") { + auto dset = file.createAttribute("dset", dataspace, datatype); + dset.write_raw(expected); + dset.read(actual); - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - { // Write - // 4 have been choose because no padding - // /* offset | size */ type = struct Record<4> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 4 */ char s[4]; - // total size (bytes): 16 - CHECK_NOTHROW(save<4>(file)); - // 8 have been choose because there is a padding - // /* offset | size */ type = struct Record<8> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 8 */ char s[8]; - // /* XXX 4-byte padding */ - // total size (bytes): 24 - CHECK_NOTHROW(save<8>(file)); - // 9 have been choose because there should not be a padding on 9 - // /* offset | size */ type = struct Record<9> { - // /* 0 | 8 */ double d; - // /* 8 | 4 */ int i; - // /* 12 | 9 */ char s[9]; - // /* XXX 3-byte padding */ - // total size (bytes): 24 - CHECK_NOTHROW(save<9>(file)); - } - - { // Read - CHECK(check<4>(file) == std::string("123")); - CHECK(check<8>(file) == std::string("1234567")); - CHECK(check<9>(file) == std::string("12345678")); - } -} - -enum Position { - highfive_first = 1, - highfive_second = 2, - highfive_third = 3, - highfive_last = -1, -}; - -enum class Direction : signed char { - Forward = 1, - Backward = -1, - Left = -2, - Right = 2, -}; - -// This is only for boost test -std::ostream& operator<<(std::ostream& ost, const Direction& dir) { - ost << static_cast<int>(dir); - return ost; -} - -EnumType<Position> create_enum_position() { - return {{"highfive_first", Position::highfive_first}, - {"highfive_second", Position::highfive_second}, - {"highfive_third", Position::highfive_third}, - {"highfive_last", Position::highfive_last}}; -} -HIGHFIVE_REGISTER_TYPE(Position, create_enum_position) - -EnumType<Direction> create_enum_direction() { - return {{"Forward", Direction::Forward}, - {"Backward", Direction::Backward}, - {"Left", Direction::Left}, - {"Right", Direction::Right}}; -} -HIGHFIVE_REGISTER_TYPE(Direction, create_enum_direction) - -TEST_CASE("HighFiveEnum") { - const std::string file_name("enum_test.h5"); - const std::string dataset_name1("/a"); - const std::string dataset_name2("/b"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - { // Unscoped enum - auto e1 = create_enum_position(); - e1.commit(file, "Position"); - - auto dataset = file.createDataSet(dataset_name1, DataSpace(1), e1); - dataset.write(Position::highfive_first); - - file.flush(); - - Position result; - dataset.select(ElementSet({0})).read(result); - - CHECK(result == Position::highfive_first); + for (size_t i = 0; i < n; ++i) { + REQUIRE(expected[i] == actual[i]); + } } - { // Scoped enum - auto e1 = create_enum_direction(); - e1.commit(file, "Direction"); - - auto dataset = file.createDataSet(dataset_name2, DataSpace(5), e1); - std::vector<Direction> robot_moves({Direction::Backward, - Direction::Forward, - Direction::Forward, - Direction::Left, - Direction::Left}); - dataset.write(robot_moves); - - file.flush(); - - std::vector<Direction> result; - dataset.read(result); - - CHECK(result[0] == Direction::Backward); - CHECK(result[1] == Direction::Forward); - CHECK(result[2] == Direction::Forward); - CHECK(result[3] == Direction::Left); - CHECK(result[4] == Direction::Left); - } + delete[] expected; + delete[] actual; } -TEST_CASE("HighFiveReadType") { - const std::string file_name("readtype_test.h5"); - const std::string datatype_name1("my_type"); - const std::string datatype_name2("position"); - - File file(file_name, File::ReadWrite | File::Create | File::Truncate); - - CompoundType t1 = create_compound_csl1(); - t1.commit(file, datatype_name1); - - CompoundType t2 = file.getDataType(datatype_name1); - - auto t3 = create_enum_position(); - t3.commit(file, datatype_name2); - - DataType t4 = file.getDataType(datatype_name2); - - CHECK(t2 == t1); - CHECK(t4 == t3); -} class ForwardToAttribute { public: @@ -3143,12 +2349,12 @@ TEST_CASE("HighFiveSTDString (attribute, multiple, short)") { } TEST_CASE("HighFiveSTDString (dataset, multiple, long)") { - File file("std_string_dataset_multiple_short.h5", File::Truncate); + File file("std_string_dataset_multiple_long.h5", File::Truncate); check_multiple_string(ForwardToDataSet(file), 256); } TEST_CASE("HighFiveSTDString (attribute, multiple, long)") { - File file("std_string_attribute_multiple_short.h5", File::Truncate); + File file("std_string_attribute_multiple_long.h5", File::Truncate); check_multiple_string(ForwardToAttribute(file), 256); } diff --git a/packages/HighFive/tests/unit/tests_high_five_data_type.cpp b/packages/HighFive/tests/unit/tests_high_five_data_type.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b40050f2137c8858ef46e1a91ad324bfd1f346bf --- /dev/null +++ b/packages/HighFive/tests/unit/tests_high_five_data_type.cpp @@ -0,0 +1,410 @@ +/* + * Copyright (c), 2017-2023, Blue Brain Project - EPFL + * + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + * + */ +#include <algorithm> +#include <cstdio> +#include <cstdlib> +#include <cstring> +#include <ctime> +#include <iostream> +#include <map> +#include <memory> +#include <random> +#include <string> +#include <typeinfo> +#include <type_traits> +#include <vector> + + +#include <catch2/catch_test_macros.hpp> +#include <catch2/catch_template_test_macros.hpp> +#include <catch2/matchers/catch_matchers_vector.hpp> + +#include <highfive/highfive.hpp> +#include "tests_high_five.hpp" + +using namespace HighFive; +using Catch::Matchers::Equals; + +TEST_CASE("Simple test for type equality") { + AtomicType<double> d_var; + AtomicType<size_t> size_var; + AtomicType<double> d_var_test; + AtomicType<size_t> size_var_cpy(size_var); + AtomicType<int> int_var; + AtomicType<unsigned> uint_var; + + // check different type matching + CHECK(d_var == d_var_test); + CHECK(d_var != size_var); + + // check type copy matching + CHECK(size_var_cpy == size_var); + + // check sign change not matching + CHECK(int_var != uint_var); +} + + +struct CSL1 { + int m1; + int m2; + int m3; +}; + +struct CSL2 { + CSL1 csl1; +}; + +CompoundType create_compound_csl1() { + auto t2 = AtomicType<int>(); + CompoundType t1({{"m1", AtomicType<int>{}}, {"m2", AtomicType<int>{}}, {"m3", t2}}); + + return t1; +} + +CompoundType create_compound_csl2() { + CompoundType t1 = create_compound_csl1(); + + CompoundType t2({{"csl1", t1}}); + + return t2; +} + +HIGHFIVE_REGISTER_TYPE(CSL1, create_compound_csl1) +HIGHFIVE_REGISTER_TYPE(CSL2, create_compound_csl2) + +TEST_CASE("HighFiveCompounds") { + const std::string file_name("compounds_test.h5"); + const std::string dataset_name1("/a"); + const std::string dataset_name2("/b"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + auto t3 = AtomicType<int>(); + CompoundType t1 = create_compound_csl1(); + t1.commit(file, "my_type"); + + CompoundType t2 = create_compound_csl2(); + t2.commit(file, "my_type2"); + + { // Not nested + auto dataset = file.createDataSet(dataset_name1, DataSpace(2), t1); + + std::vector<CSL1> csl = {{1, 1, 1}, {2, 3, 4}}; + dataset.write(csl); + + file.flush(); + + std::vector<CSL1> result; + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].m1 == 1); + CHECK(result[0].m2 == 1); + CHECK(result[0].m3 == 1); + CHECK(result[1].m1 == 2); + CHECK(result[1].m2 == 3); + CHECK(result[1].m3 == 4); + } + + { // Nested + auto dataset = file.createDataSet(dataset_name2, DataSpace(2), t2); + + std::vector<CSL2> csl = {{{1, 1, 1}, {2, 3, 4}}}; + dataset.write(csl); + + file.flush(); + std::vector<CSL2> result = {{{1, 1, 1}, {2, 3, 4}}}; + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].csl1.m1 == 1); + CHECK(result[0].csl1.m2 == 1); + CHECK(result[0].csl1.m3 == 1); + CHECK(result[1].csl1.m1 == 2); + CHECK(result[1].csl1.m2 == 3); + CHECK(result[1].csl1.m3 == 4); + } + + // Test the constructor from hid + CompoundType t1_from_hid(t1); + CHECK(t1 == t1_from_hid); + + CompoundType t2_from_hid(t2); + CHECK(t2 == t2_from_hid); + + // Back from a DataType + CHECK_NOTHROW(CompoundType(DataType(t1_from_hid))); + CHECK_THROWS(CompoundType(AtomicType<uint32_t>{})); +} + +struct GrandChild { + uint32_t gcm1; + uint32_t gcm2; + uint32_t gcm3; +}; + +struct Child { + GrandChild grandChild; + uint32_t cm1; +}; + +struct Parent { + uint32_t pm1; + Child child; +}; + +CompoundType create_compound_GrandChild() { + auto t2 = AtomicType<uint32_t>(); + CompoundType t1({{"gcm1", AtomicType<uint32_t>{}}, + {"gcm2", AtomicType<uint32_t>{}}, + { + "gcm3", + t2, + }}); + return t1; +} + +CompoundType create_compound_Child() { + auto nestedType = create_compound_GrandChild(); + return CompoundType{{{ + "grandChild", + nestedType, + }, + {"cm1", AtomicType<uint32_t>{}}}}; +} + +CompoundType create_compound_Parent() { + auto nestedType = create_compound_Child(); + return CompoundType{{{"pm1", AtomicType<uint32_t>{}}, + { + "child", + nestedType, + }}}; +} + +HIGHFIVE_REGISTER_TYPE(GrandChild, create_compound_GrandChild) +HIGHFIVE_REGISTER_TYPE(Child, create_compound_Child) +HIGHFIVE_REGISTER_TYPE(Parent, create_compound_Parent) + +TEST_CASE("HighFiveCompoundsNested") { + const std::string file_name("nested_compounds_test.h5"); + const std::string dataset_name("/a"); + + { // Write + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + auto type = create_compound_Parent(); + + auto dataset = file.createDataSet(dataset_name, DataSpace(2), type); + CHECK(dataset.getDataType().getSize() == 20); + + std::vector<Parent> csl = {Parent{1, Child{GrandChild{1, 1, 1}, 1}}, + Parent{2, Child{GrandChild{3, 4, 5}, 6}}}; + dataset.write(csl); + } + + { // Read + File file(file_name, File::ReadOnly); + std::vector<Parent> result; + auto dataset = file.getDataSet(dataset_name); + CHECK(dataset.getDataType().getSize() == 20); + dataset.select({0}, {2}).read(result); + + CHECK(result.size() == 2); + CHECK(result[0].pm1 == 1); + CHECK(result[0].child.grandChild.gcm1 == 1); + CHECK(result[0].child.grandChild.gcm2 == 1); + CHECK(result[0].child.grandChild.gcm3 == 1); + CHECK(result[0].child.cm1 == 1); + CHECK(result[1].pm1 == 2); + CHECK(result[1].child.grandChild.gcm1 == 3); + CHECK(result[1].child.grandChild.gcm2 == 4); + CHECK(result[1].child.grandChild.gcm3 == 5); + CHECK(result[1].child.cm1 == 6); + } +} + +template <size_t N> +struct Record { + double d = 3.14; + int i = 42; + char s[N]; +}; + +template <size_t N> +void fill(Record<N>& r) { + constexpr char ref[] = "123456789a123456789b123456789c123456789d123456789e123456789f"; + std::copy(ref, ref + N - 1, r.s); + r.s[N - 1] = '\0'; +} + +template <size_t N> +CompoundType rec_t() { + using RecN = Record<N>; + return {{"d", create_datatype<decltype(RecN::d)>()}, + {"i", create_datatype<decltype(RecN::i)>()}, + {"s", create_datatype<decltype(RecN::s)>()}}; +} + +HIGHFIVE_REGISTER_TYPE(Record<4>, rec_t<4>) +HIGHFIVE_REGISTER_TYPE(Record<8>, rec_t<8>) +HIGHFIVE_REGISTER_TYPE(Record<9>, rec_t<9>) + +template <size_t N> +void save(File& f) { + const size_t numRec = 2; + std::vector<Record<N>> recs(numRec); + fill<N>(recs[0]); + fill<N>(recs[1]); + auto dataset = f.createDataSet<Record<N>>("records" + std::to_string(N), DataSpace::From(recs)); + dataset.write(recs); +} + +template <size_t N> +std::string check(File& f) { + const size_t numRec = 2; + std::vector<Record<N>> recs(numRec); + f.getDataSet("records" + std::to_string(N)).read(recs); + return std::string(recs[0].s); +} + +TEST_CASE("HighFiveCompoundsSeveralPadding") { + const std::string file_name("padded_compounds_test.h5"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + { // Write + // 4 have been choose because no padding + // /* offset | size */ type = struct Record<4> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 4 */ char s[4]; + // total size (bytes): 16 + CHECK_NOTHROW(save<4>(file)); + // 8 have been choose because there is a padding + // /* offset | size */ type = struct Record<8> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 8 */ char s[8]; + // /* XXX 4-byte padding */ + // total size (bytes): 24 + CHECK_NOTHROW(save<8>(file)); + // 9 have been choose because there should not be a padding on 9 + // /* offset | size */ type = struct Record<9> { + // /* 0 | 8 */ double d; + // /* 8 | 4 */ int i; + // /* 12 | 9 */ char s[9]; + // /* XXX 3-byte padding */ + // total size (bytes): 24 + CHECK_NOTHROW(save<9>(file)); + } + + { // Read + CHECK(check<4>(file) == std::string("123")); + CHECK(check<8>(file) == std::string("1234567")); + CHECK(check<9>(file) == std::string("12345678")); + } +} + +enum Position { + highfive_first = 1, + highfive_second = 2, + highfive_third = 3, + highfive_last = -1, +}; + +enum class Direction : signed char { + Forward = 1, + Backward = -1, + Left = -2, + Right = 2, +}; + +EnumType<Position> create_enum_position() { + return {{"highfive_first", Position::highfive_first}, + {"highfive_second", Position::highfive_second}, + {"highfive_third", Position::highfive_third}, + {"highfive_last", Position::highfive_last}}; +} +HIGHFIVE_REGISTER_TYPE(Position, create_enum_position) + +EnumType<Direction> create_enum_direction() { + return {{"Forward", Direction::Forward}, + {"Backward", Direction::Backward}, + {"Left", Direction::Left}, + {"Right", Direction::Right}}; +} +HIGHFIVE_REGISTER_TYPE(Direction, create_enum_direction) + +TEST_CASE("HighFiveEnum") { + const std::string file_name("enum_test.h5"); + const std::string dataset_name1("/a"); + const std::string dataset_name2("/b"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + { // Unscoped enum + auto e1 = create_enum_position(); + e1.commit(file, "Position"); + + auto dataset = file.createDataSet(dataset_name1, DataSpace(1), e1); + dataset.write(Position::highfive_first); + + file.flush(); + + Position result; + dataset.select(ElementSet({0})).read(result); + + CHECK(result == Position::highfive_first); + } + + { // Scoped enum + auto e1 = create_enum_direction(); + e1.commit(file, "Direction"); + + auto dataset = file.createDataSet(dataset_name2, DataSpace(5), e1); + std::vector<Direction> robot_moves({Direction::Backward, + Direction::Forward, + Direction::Forward, + Direction::Left, + Direction::Left}); + dataset.write(robot_moves); + + file.flush(); + + std::vector<Direction> result; + dataset.read(result); + + CHECK(result[0] == Direction::Backward); + CHECK(result[1] == Direction::Forward); + CHECK(result[2] == Direction::Forward); + CHECK(result[3] == Direction::Left); + CHECK(result[4] == Direction::Left); + } +} + +TEST_CASE("HighFiveReadType") { + const std::string file_name("readtype_test.h5"); + const std::string datatype_name1("my_type"); + const std::string datatype_name2("position"); + + File file(file_name, File::ReadWrite | File::Create | File::Truncate); + + CompoundType t1 = create_compound_csl1(); + t1.commit(file, datatype_name1); + + CompoundType t2 = file.getDataType(datatype_name1); + + auto t3 = create_enum_position(); + t3.commit(file, datatype_name2); + + DataType t4 = file.getDataType(datatype_name2); + + CHECK(t2 == t1); + CHECK(t4 == t3); +} diff --git a/packages/HighFive/tests/unit/tests_high_five_multi_dims.cpp b/packages/HighFive/tests/unit/tests_high_five_multi_dims.cpp index 442f1c9cc5d92ceb3a4ef4729d03a724ab900e8c..08fbea9cec350a8b4de42393a1a7d5ef830b5d70 100644 --- a/packages/HighFive/tests/unit/tests_high_five_multi_dims.cpp +++ b/packages/HighFive/tests/unit/tests_high_five_multi_dims.cpp @@ -80,11 +80,11 @@ void readWriteArrayTest() { typename std::array<T, 1> tooSmall; CHECK_THROWS_AS(dataset.read(tooSmall), DataSpaceException); } + TEMPLATE_LIST_TEST_CASE("readWriteArray", "[template]", numerical_test_types) { readWriteArrayTest<TestType>(); } - template <typename T, typename VectorSubT> void readWriteVectorNDTest(std::vector<VectorSubT>& ndvec, const std::vector<size_t>& dims) { fillVec(ndvec, dims, ContentGenerate<T>()); diff --git a/packages/HighFive/tests/unit/tests_high_five_parallel.cpp b/packages/HighFive/tests/unit/tests_high_five_parallel.cpp index 8b096205e8528df17c6ff03cb68c592c81e0c9bd..e2d7f948eeab405cdd6c45a4360136e68e8f8fde 100644 --- a/packages/HighFive/tests/unit/tests_high_five_parallel.cpp +++ b/packages/HighFive/tests/unit/tests_high_five_parallel.cpp @@ -19,6 +19,7 @@ #include <highfive/highfive.hpp> #include "tests_high_five.hpp" +#include "data_generator.hpp" using namespace HighFive; @@ -152,6 +153,82 @@ TEMPLATE_LIST_TEST_CASE("mpiSelectionArraySimpleCollectiveMD", "[template]", num } +TEST_CASE("ReadWriteHalfEmptyDatasets") { + int mpi_rank = -1; + MPI_Comm mpi_comm = MPI_COMM_WORLD; + MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank); + + std::string filename = "rw_collective_some_empty.h5"; + std::string dset_name = "dset"; + + using container_t = std::vector<std::vector<double>>; + using traits = testing::ContainerTraits<container_t>; + + auto dims = std::vector<size_t>{5ul, 7ul}; + auto values = testing::DataGenerator<container_t>::create(dims); + + if (mpi_rank == 0) { + auto file = HighFive::File(filename, HighFive::File::Truncate); + file.createDataSet(dset_name, values); + } + + MPI_Barrier(mpi_comm); + + bool collective_metadata = true; + bool collective_transfer = true; + + HighFive::FileAccessProps fapl; + fapl.add(HighFive::MPIOFileAccess{MPI_COMM_WORLD, MPI_INFO_NULL}); + fapl.add(HighFive::MPIOCollectiveMetadata{collective_metadata}); + + auto file = HighFive::File(filename, HighFive::File::Truncate, fapl); + file.createDataSet(dset_name, values); + auto dset = file.getDataSet(dset_name); + + HighFive::DataTransferProps dxpl; + dxpl.add(HighFive::UseCollectiveIO{collective_transfer}); + + auto hyperslab = HighFive::HyperSlab(); + auto subdims = std::vector<size_t>(2, 0ul); + + if (mpi_rank == 0) { + subdims = std::vector<size_t>{2ul, 4ul}; + hyperslab |= HighFive::RegularHyperSlab({0ul, 0ul}, subdims); + } + + SECTION("read") { + auto subvalues = dset.select(hyperslab, DataSpace(subdims)).template read<container_t>(); + + for (size_t i = 0; i < subdims[0]; ++i) { + for (size_t j = 0; j < subdims[1]; ++j) { + REQUIRE(traits::get(subvalues, {i, j}) == traits::get(values, {i, j})); + } + } + } + + SECTION("write") { + auto subvalues = + testing::DataGenerator<container_t>::create(subdims, [](const std::vector<size_t>& d) { + auto default_values = testing::DefaultValues<double>(); + return -1000.0 + default_values(d); + }); + dset.select(hyperslab, DataSpace(subdims)).write(subvalues, dxpl); + + MPI_Barrier(mpi_comm); + + if (mpi_rank == 0) { + auto modified_values = dset.read<container_t>(); + + for (size_t i = 0; i < subdims[0]; ++i) { + for (size_t j = 0; j < subdims[1]; ++j) { + REQUIRE(traits::get(subvalues, {i, j}) == traits::get(modified_values, {i, j})); + } + } + } + } +} + + int main(int argc, char* argv[]) { MpiFixture mpi(argc, argv);