diff --git a/CMakeLists.txt b/CMakeLists.txt index 52579983f4dc7deaa42079eae59532695cdcf9ae..7c0f69fcf6395a96f88e74e9143e8e85730ba66e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -147,11 +147,36 @@ if(PARMETIS_LIBRARIES) set_property(TARGET PkgConfig::ParMETIS PROPERTY IMPORTED_LOCATION "${PARMETIS_LIBRARIES}") + set(PUGS_HAS_PARMETIS TRUE) set(PARMETIS_TARGET PkgConfig::ParMETIS) +else() + unset(PUGS_HAS_PARMETIS) endif() +#------------------------------------------------------ +# Search for PTScotch + +find_package(PTScotch) + +set(PUGS_HAS_PTSCOTCH ${PTScotch_FOUND}) + +if (PTScotch_FOUND) + add_library(PkgConfig::PTScotch STATIC IMPORTED) + set_property(TARGET PkgConfig::PTScotch PROPERTY + IMPORTED_LOCATION "${PTSCOTCH_LIBRARY}") + set_property(TARGET PkgConfig::PTScotch PROPERTY + INTERFACE_INCLUDE_DIRECTORIES "${PTSCOTCH_INCLUDE_DIR}") + + set(PTSCOTCH_TARGET PkgConfig::PTScotch) + include_directories(SYSTEM "${PTSCOTCH_INCLUDE_DIR}") +else() + set(PTSCOTCH_LIBRARY "") +endif() + +#------------------------------------------------------ + if(${MPI_FOUND}) - if (NOT PARMETIS_LIBRARIES) + if ((NOT PARMETIS_LIBRARIES) AND (NOT PTSCOTCH_LIBRARIES)) if(PUGS_ENABLE_MPI MATCHES "^AUTO$") message(STATUS "MPI support deactivated: ParMETIS cannot be found!") unset(MPI_FOUND) diff --git a/cmake/FindPTScotch.cmake b/cmake/FindPTScotch.cmake new file mode 100644 index 0000000000000000000000000000000000000000..da6286e3e8ea110460a068f8e40839b15080319c --- /dev/null +++ b/cmake/FindPTScotch.cmake @@ -0,0 +1,29 @@ +# Looking for PTScotch + +find_package(PkgConfig) +pkg_check_modules(PC_PTSCOTCH QUIET PTSCOTCH) + +find_path(PTSCOTCH_INCLUDE_DIR + NAMES ptscotch.h + PATH_SUFFIXES "include" "include/scotch" + HINTS "$ENV{PTSCOTCH_INCDIR}") + +if(EXISTS "${PTSCOTCH_INCLUDE_DIR}/ptscotch.h") + message(STATUS "Found ptscotch.h in ${PTSCOTCH_INCLUDE_DIR}") + find_library(LIB_PTSCOTCH ptscotch $ENV{PTSCOTCH_LIBDIR}) + if("${LIB_PTSCOTCH}" STREQUAL "LIB_PTSCOTCH-NOTFOUND") + message(WARNING "** Could not find ptscotch library.\n** Is PTSCOTCH_LIBDIR correctly set (Actual: \"$ENV{PTSCOTCH_LIBDIR}\")?") + endif() + set(PTSCOTCH_LIBRARIES ${LIB_PTSCOTCH}) + message(STATUS "Found ptscotch/scotch libraries ${PTSCOTCH_LIBRARIES}") +else() + message(WARNING "** Could not find ptscotch.h.\n** Is PTSCOTCH_INCDIR correctly set (Actual: \"$ENV{PTSCOTCH_INCDIR}\")?") +endif() + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(PTScotch + FOUND_VAR + PTSCOTCH_FOUND + REQUIRED_VARS + PTSCOTCH_LIBRARIES + PTSCOTCH_INCLUDE_DIR) diff --git a/src/language/modules/CoreModule.cpp b/src/language/modules/CoreModule.cpp index 47e0183256413b7579165b26364dff68f0614a74..5b5753eb8b3952d6d9c08c34ccc843f4bece2dc9 100644 --- a/src/language/modules/CoreModule.cpp +++ b/src/language/modules/CoreModule.cpp @@ -32,6 +32,7 @@ #include <language/utils/UnaryOperatorRegisterForRnxn.hpp> #include <language/utils/UnaryOperatorRegisterForZ.hpp> #include <utils/Messenger.hpp> +#include <utils/PartitionerOptions.hpp> #include <utils/PugsUtils.hpp> #include <utils/RandomEngine.hpp> #include <utils/Stop.hpp> @@ -94,6 +95,27 @@ CoreModule::CoreModule() : BuiltinModule(true) )); + this->_addBuiltinFunction("setPartitionerLibrary", std::function( + + [](const std::string& library_name) -> void { + PartitionerOptions::default_options.library() = + getPartitionerEnumFromName<PartitionerLibrary>(library_name); + } + + )); + + this->_addBuiltinFunction("getPartitionerOptions", std::function( + + []() -> std::string { + std::ostringstream os; + os << rang::fgB::yellow << "Partitioner options" + << rang::style::reset << '\n'; + os << PartitionerOptions::default_options; + return os.str(); + } + + )); + this->_addTypeDescriptor(ast_node_data_type_from<std::shared_ptr<const OStream>>); this->_addBuiltinFunction("ofstream", std::function( diff --git a/src/language/modules/MeshModule.cpp b/src/language/modules/MeshModule.cpp index c060da50d6da17915937edda6f5403a9e604355e..9b698b9e095ff6c227c58a2e6fe6bae5f7f72ad4 100644 --- a/src/language/modules/MeshModule.cpp +++ b/src/language/modules/MeshModule.cpp @@ -308,6 +308,46 @@ MeshModule::MeshModule() return DualMeshManager::instance().getMedianDualMesh(mesh_v); } + )); + + this->_addBuiltinFunction("cell_owner", std::function( + + [](const std::shared_ptr<const MeshVariant>& mesh_v) + -> std::shared_ptr<const ItemValueVariant> { + return std::visit( + [&](auto&& mesh) { + const auto& connectivity = mesh->connectivity(); + auto cell_owner = connectivity.cellOwner(); + CellValue<long int> cell_owner_long{connectivity}; + parallel_for( + connectivity.numberOfCells(), PUGS_LAMBDA(const CellId cell_id) { + cell_owner_long[cell_id] = cell_owner[cell_id]; + }); + return std::make_shared<const ItemValueVariant>(cell_owner_long); + }, + mesh_v->variant()); + } + + )); + + this->_addBuiltinFunction("node_owner", std::function( + + [](const std::shared_ptr<const MeshVariant>& mesh_v) + -> std::shared_ptr<const ItemValueVariant> { + return std::visit( + [&](auto&& mesh) { + const auto& connectivity = mesh->connectivity(); + auto node_owner = connectivity.nodeOwner(); + NodeValue<long int> node_owner_long{connectivity}; + parallel_for( + connectivity.numberOfNodes(), PUGS_LAMBDA(const NodeId node_id) { + node_owner_long[node_id] = node_owner[node_id]; + }); + return std::make_shared<const ItemValueVariant>(node_owner_long); + }, + mesh_v->variant()); + } + )); } diff --git a/src/mesh/Connectivity.hpp b/src/mesh/Connectivity.hpp index 24d9ef656f0cdd0746db0ee2f27881ce5244c8b0..aa1a471d6e84b554c1715ca7eecc7a2361744b07 100644 --- a/src/mesh/Connectivity.hpp +++ b/src/mesh/Connectivity.hpp @@ -19,7 +19,6 @@ #include <utils/PugsTraits.hpp> #include <utils/PugsUtils.hpp> -#include <algorithm> #include <iostream> #include <set> #include <vector> diff --git a/src/utils/CMakeLists.txt b/src/utils/CMakeLists.txt index 4d74c5e195fe0361819315cd3732661ba7589e56..bc66a97faf800dc0a10be3165f8d4f2c94059010 100644 --- a/src/utils/CMakeLists.txt +++ b/src/utils/CMakeLists.txt @@ -14,9 +14,12 @@ add_library( FPEManager.cpp GlobalVariableManager.cpp Messenger.cpp + ParMETISPartitioner.cpp Partitioner.cpp + PartitionerOptions.cpp PETScWrapper.cpp PluginsLoader.cpp + PTScotchPartitioner.cpp PugsUtils.cpp RandomEngine.cpp ReproducibleSumManager.cpp diff --git a/src/utils/PTScotchPartitioner.cpp b/src/utils/PTScotchPartitioner.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e86acfa4494c437dfbe8886a83d6ecbf01f514f2 --- /dev/null +++ b/src/utils/PTScotchPartitioner.cpp @@ -0,0 +1,110 @@ +#include <utils/PTScotchPartitioner.hpp> + +#include <utils/pugs_config.hpp> + +#ifdef PUGS_HAS_PTSCOTCH + +#include <utils/Exceptions.hpp> +#include <utils/Messenger.hpp> + +#include <ptscotch.h> + +#include <iostream> +#include <vector> + +Array<int> +PTScotchPartitioner::partition(const CRSGraph& graph) +{ + std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset + << " parts using " << rang::fgB::green << "PTScotch" << rang::fg::reset << '\n'; + + MPI_Group world_group; + MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group); + + MPI_Group mesh_group; + std::vector<int> group_ranks = [&]() { + Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes())); + std::vector<int> grp_ranks; + grp_ranks.reserve(graph_node_owners.size()); + for (size_t i = 0; i < graph_node_owners.size(); ++i) { + if (graph_node_owners[i] > 0) { + grp_ranks.push_back(i); + } + } + return grp_ranks; + }(); + + MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group); + + MPI_Comm partitioning_comm; + MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &partitioning_comm); + + Array<int> partition; + if (graph.numberOfNodes() > 0) { + SCOTCH_Strat scotch_strategy; + SCOTCH_Dgraph scotch_grah; + + SCOTCH_stratInit(&scotch_strategy); // use default strategy + SCOTCH_dgraphInit(&scotch_grah, partitioning_comm); + + const Array<const int>& entries = graph.entries(); + const Array<const int>& neighbors = graph.neighbors(); + + static_assert(std::is_same_v<int, SCOTCH_Num>); + + SCOTCH_Num* entries_ptr = const_cast<int*>(&(entries[0])); + SCOTCH_Num* neighbors_ptr = const_cast<int*>(&(neighbors[0])); + + if (SCOTCH_dgraphBuild(&scotch_grah, + 0, // 0 for C-like arrays + graph.numberOfNodes(), // + graph.numberOfNodes(), // + entries_ptr, // + nullptr, // + nullptr, // + nullptr, // optional local node label array + graph.neighbors().size(), // + graph.neighbors().size(), // + neighbors_ptr, // + nullptr, // + nullptr) != 0) { + // LCOV_EXCL_START + throw UnexpectedError("PTScotch invalid graph"); + // LCOV_EXCL_STOP + } + + partition = Array<int>(graph.numberOfNodes()); + + SCOTCH_Num* partition_ptr = static_cast<SCOTCH_Num*>(&(partition[0])); + + if (SCOTCH_dgraphPart(&scotch_grah, // + parallel::size(), // + &scotch_strategy, // + partition_ptr) != 0) { + // LCOV_EXCL_START + throw UnexpectedError("PTScotch Error"); + // LCOV_EXCL_STOP + } + + SCOTCH_dgraphExit(&scotch_grah); + SCOTCH_stratExit(&scotch_strategy); + } + + MPI_Comm_free(&partitioning_comm); + MPI_Group_free(&mesh_group); + MPI_Group_free(&world_group); + + return partition; +} + +#else // PUGS_HAS_PTSCOTCH + +Array<int> +PTScotchPartitioner::partition(const CRSGraph& graph) +{ + Array<int> partition{graph.entries().size() - 1}; + partition.fill(0); + return partition; +} + +#endif // PUGS_HAS_PTSCOTCH diff --git a/src/utils/PTScotchPartitioner.hpp b/src/utils/PTScotchPartitioner.hpp new file mode 100644 index 0000000000000000000000000000000000000000..cefb0a1df2e1c39966d6c8ed0eaffe655a836df0 --- /dev/null +++ b/src/utils/PTScotchPartitioner.hpp @@ -0,0 +1,19 @@ +#ifndef PT_SCOTCH_PARTITIONER_HPP +#define PT_SCOTCH_PARTITIONER_HPP + +#include <utils/Array.hpp> +#include <utils/CRSGraph.hpp> + +class PTScotchPartitioner +{ + private: + friend class Partitioner; + + // Forbids direct calls + static Array<int> partition(const CRSGraph& graph); + + public: + PTScotchPartitioner() = delete; +}; + +#endif // PT_SCOTCH_PARTITIONER_HPP diff --git a/src/utils/ParMETISPartitioner.cpp b/src/utils/ParMETISPartitioner.cpp new file mode 100644 index 0000000000000000000000000000000000000000..47864de1fd3337bc6e193ad3453ad8d6c75eb797 --- /dev/null +++ b/src/utils/ParMETISPartitioner.cpp @@ -0,0 +1,92 @@ +#include <utils/ParMETISPartitioner.hpp> + +#include <utils/pugs_config.hpp> + +#ifdef PUGS_HAS_PARMETIS + +#include <utils/Exceptions.hpp> +#include <utils/Messenger.hpp> + +#include <parmetis.h> + +#include <iostream> +#include <vector> + +Array<int> +ParMETISPartitioner::partition(const CRSGraph& graph) +{ + std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset + << " parts using " << rang::fgB::green << "ParMETIS" << rang::fg::reset << '\n'; + + MPI_Group world_group; + MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group); + + MPI_Group mesh_group; + std::vector<int> group_ranks = [&]() { + Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes())); + std::vector<int> grp_ranks; + grp_ranks.reserve(graph_node_owners.size()); + for (size_t i = 0; i < graph_node_owners.size(); ++i) { + if (graph_node_owners[i] > 0) { + grp_ranks.push_back(i); + } + } + return grp_ranks; + }(); + + MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group); + + MPI_Comm partitioning_comm; + MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &partitioning_comm); + + Array<int> partition; + if (graph.numberOfNodes() > 0) { + int wgtflag = 0; + int numflag = 0; + int ncon = 1; + int npart = parallel::size(); + std::vector<float> tpwgts(npart, 1. / npart); + + std::vector<float> ubvec{1.05}; + std::vector<int> options{1, 0, 0}; + int edgecut = 0; + + int local_number_of_nodes = graph.numberOfNodes(); + + partition = Array<int>(local_number_of_nodes); + std::vector<int> vtxdist{0, local_number_of_nodes}; + + const Array<const int>& entries = graph.entries(); + const Array<const int>& neighbors = graph.neighbors(); + + int* entries_ptr = const_cast<int*>(&(entries[0])); + int* neighbors_ptr = const_cast<int*>(&(neighbors[0])); + + int result = + ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart, + &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(partition[0]), &partitioning_comm); + // LCOV_EXCL_START + if (result == METIS_ERROR) { + throw UnexpectedError("Metis Error"); + } + // LCOV_EXCL_STOP + } + + MPI_Comm_free(&partitioning_comm); + MPI_Group_free(&mesh_group); + MPI_Group_free(&world_group); + + return partition; +} + +#else // PUGS_HAS_PARMETIS + +Array<int> +ParMETISPartitioner::partition(const CRSGraph& graph) +{ + Array<int> partition{graph.entries().size() - 1}; + partition.fill(0); + return partition; +} + +#endif // PUGS_HAS_PARMETIS diff --git a/src/utils/ParMETISPartitioner.hpp b/src/utils/ParMETISPartitioner.hpp new file mode 100644 index 0000000000000000000000000000000000000000..53c1d2dc50e96c85a2c57d0abfb59735b471cc6f --- /dev/null +++ b/src/utils/ParMETISPartitioner.hpp @@ -0,0 +1,19 @@ +#ifndef PARMETIS_PARTITIONER_HPP +#define PARMETIS_PARTITIONER_HPP + +#include <utils/Array.hpp> +#include <utils/CRSGraph.hpp> + +class ParMETISPartitioner +{ + private: + friend class Partitioner; + + // Forbids direct calls + static Array<int> partition(const CRSGraph& graph); + + public: + ParMETISPartitioner() = delete; +}; + +#endif // PARMETIS_PARTITIONER_HPP diff --git a/src/utils/Partitioner.cpp b/src/utils/Partitioner.cpp index 85e13c22d80f449c211f4c37ee20f3107556d6ba..4f53d322f5e52c130122ccc3966270e7220d34d7 100644 --- a/src/utils/Partitioner.cpp +++ b/src/utils/Partitioner.cpp @@ -1,94 +1,22 @@ #include <utils/Partitioner.hpp> -#include <utils/Messenger.hpp> -#include <utils/pugs_config.hpp> +#include <utils/PTScotchPartitioner.hpp> +#include <utils/ParMETISPartitioner.hpp> -#ifdef PUGS_HAS_MPI - -#define IDXTYPEWIDTH 64 -#define REALTYPEWIDTH 64 -#include <parmetis.h> - -#include <iostream> -#include <vector> - -#include <utils/Exceptions.hpp> +Partitioner::Partitioner(const PartitionerOptions& options) : m_partitioner_options{options} {} Array<int> Partitioner::partition(const CRSGraph& graph) { - std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset << " parts\n"; - - int wgtflag = 0; - int numflag = 0; - int ncon = 1; - int npart = parallel::size(); - std::vector<float> tpwgts(npart, 1. / npart); - - std::vector<float> ubvec{1.05}; - std::vector<int> options{1, 0, 0}; - int edgecut = 0; - Array<int> part(0); - - MPI_Group world_group; - - MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group); - - MPI_Group mesh_group; - std::vector<int> group_ranks = [&]() { - Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes())); - std::vector<int> grp_ranks; - grp_ranks.reserve(graph_node_owners.size()); - for (size_t i = 0; i < graph_node_owners.size(); ++i) { - if (graph_node_owners[i] > 0) { - grp_ranks.push_back(i); - } - } - return grp_ranks; - }(); - - MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group); - - MPI_Comm parmetis_comm; - MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &parmetis_comm); - - int local_number_of_nodes = graph.numberOfNodes(); - - if (graph.numberOfNodes() > 0) { - part = Array<int>(local_number_of_nodes); - std::vector<int> vtxdist{0, local_number_of_nodes}; - - const Array<const int>& entries = graph.entries(); - const Array<const int>& neighbors = graph.neighbors(); - - int* entries_ptr = const_cast<int*>(&(entries[0])); - int* neighbors_ptr = const_cast<int*>(&(neighbors[0])); - - int result = - ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart, - &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(part[0]), &parmetis_comm); - // LCOV_EXCL_START - if (result == METIS_ERROR) { - throw UnexpectedError("Metis Error"); - } - // LCOV_EXCL_STOP - - MPI_Comm_free(&parmetis_comm); + switch (m_partitioner_options.library()) { + case PartitionerLibrary::parmetis: { + return ParMETISPartitioner::partition(graph); + } + case PartitionerLibrary::ptscotch: { + return PTScotchPartitioner::partition(graph); + } + default: { + throw UnexpectedError("invalid partition library"); + } } - - MPI_Group_free(&mesh_group); - - return part; -} - -#else // PUGS_HAS_MPI - -Array<int> -Partitioner::partition(const CRSGraph& graph) -{ - Array<int> partition{graph.entries().size() - 1}; - partition.fill(0); - return partition; } - -#endif // PUGS_HAS_MPI diff --git a/src/utils/Partitioner.hpp b/src/utils/Partitioner.hpp index 2c720bfd87e7853e12cd0736a46c5eda246f085f..a923d2301635d43f346159aeff6de4331e7dceef 100644 --- a/src/utils/Partitioner.hpp +++ b/src/utils/Partitioner.hpp @@ -2,11 +2,15 @@ #define PARTITIONER_HPP #include <utils/CRSGraph.hpp> +#include <utils/PartitionerOptions.hpp> class Partitioner { + private: + PartitionerOptions m_partitioner_options; + public: - Partitioner() = default; + Partitioner(const PartitionerOptions& options = PartitionerOptions::default_options); Partitioner(const Partitioner&) = default; ~Partitioner() = default; diff --git a/src/utils/PartitionerOptions.cpp b/src/utils/PartitionerOptions.cpp new file mode 100644 index 0000000000000000000000000000000000000000..207d649803be16ac79795084517eb90feb97f283 --- /dev/null +++ b/src/utils/PartitionerOptions.cpp @@ -0,0 +1,10 @@ +#include <utils/PartitionerOptions.hpp> + +#include <rang.hpp> + +std::ostream& +operator<<(std::ostream& os, const PartitionerOptions& options) +{ + os << " library: " << rang::style::bold << name(options.library()) << rang::style::reset << '\n'; + return os; +} diff --git a/src/utils/PartitionerOptions.hpp b/src/utils/PartitionerOptions.hpp new file mode 100644 index 0000000000000000000000000000000000000000..e4157f7659cac94ef8b4c7cd02a54d289156d4a8 --- /dev/null +++ b/src/utils/PartitionerOptions.hpp @@ -0,0 +1,86 @@ +#ifndef PARTITIONER_OPTIONS_HPP +#define PARTITIONER_OPTIONS_HPP + +#include <utils/Exceptions.hpp> +#include <utils/pugs_config.hpp> + +#include <iostream> + +enum class PartitionerLibrary : int8_t +{ + PT__begin = 0, + // + parmetis = PT__begin, + ptscotch, + // + PT__end +}; + +inline std::string +name(const PartitionerLibrary library) +{ + switch (library) { + case PartitionerLibrary::parmetis: { + return "ParMETIS"; + } + case PartitionerLibrary::ptscotch: { + return "PTScotch"; + } + case PartitionerLibrary::PT__end: { + } + } + throw UnexpectedError("Linear system library name is not defined!"); +} + +template <typename PartitionerEnumType> +inline PartitionerEnumType +getPartitionerEnumFromName(const std::string& enum_name) +{ + using BaseT = std::underlying_type_t<PartitionerEnumType>; + for (BaseT enum_value = static_cast<BaseT>(PartitionerEnumType::PT__begin); + enum_value < static_cast<BaseT>(PartitionerEnumType::PT__end); ++enum_value) { + if (name(PartitionerEnumType{enum_value}) == enum_name) { + return PartitionerEnumType{enum_value}; + } + } + throw NormalError(std::string{"could not find '"} + enum_name + "' associate type!"); +} + +class PartitionerOptions +{ + private: + PartitionerLibrary m_library = []() { +#if !defined(PUGS_HAS_PARMETIS) && defined(PUGS_HAS_PTSCOTCH) + return PartitionerLibrary::ptscotch; +#else // sets parmetis as default if no alternative is available + return PartitionerLibrary::parmetis; +#endif + }(); + + public: + static PartitionerOptions default_options; + + friend std::ostream& operator<<(std::ostream& os, const PartitionerOptions& options); + + PartitionerLibrary& + library() + { + return m_library; + } + + PartitionerLibrary + library() const + { + return m_library; + } + + PartitionerOptions(const PartitionerOptions&) = default; + PartitionerOptions(PartitionerOptions&&) = default; + + PartitionerOptions() = default; + ~PartitionerOptions() = default; +}; + +inline PartitionerOptions PartitionerOptions::default_options; + +#endif // PARTITIONER_OPTIONS_HPP diff --git a/src/utils/checkpointing/Checkpoint.cpp b/src/utils/checkpointing/Checkpoint.cpp index ed56dcb83b4cc33fc0cdbaac4c6e704423f13336..ebef0dddd218887a7c48187909757eeb719a14a6 100644 --- a/src/utils/checkpointing/Checkpoint.cpp +++ b/src/utils/checkpointing/Checkpoint.cpp @@ -4,8 +4,6 @@ #ifdef PUGS_HAS_HDF5 -#include <algebra/EigenvalueSolverOptions.hpp> -#include <algebra/LinearSolverOptions.hpp> #include <dev/ParallelChecker.hpp> #include <language/ast/ASTExecutionStack.hpp> #include <language/utils/ASTCheckpointsInfo.hpp> @@ -25,6 +23,7 @@ #include <utils/checkpointing/EigenvalueSolverOptionsHFType.hpp> #include <utils/checkpointing/LinearSolverOptionsHFType.hpp> #include <utils/checkpointing/ParallelCheckerHFType.hpp> +#include <utils/checkpointing/PartitionerOptionsHFType.hpp> #include <utils/checkpointing/ResumingManager.hpp> #include <iostream> @@ -131,6 +130,14 @@ checkpoint() eigenvalue_solver_options_default_group.createAttribute("library", default_options.library()); } + { + HighFive::Group partitioner_options_default_group = + checkpoint.createGroup("singleton/partitioner_options_default"); + + const PartitionerOptions& default_options = PartitionerOptions::default_options; + + partitioner_options_default_group.createAttribute("library", default_options.library()); + } { const auto& primal_to_dual_connectivity_info_map = DualConnectivityManager::instance().primalToDualConnectivityInfoMap(); diff --git a/src/utils/checkpointing/PartitionerOptionsHFType.hpp b/src/utils/checkpointing/PartitionerOptionsHFType.hpp new file mode 100644 index 0000000000000000000000000000000000000000..e022bb66119943081d22573ac657b46736613037 --- /dev/null +++ b/src/utils/checkpointing/PartitionerOptionsHFType.hpp @@ -0,0 +1,15 @@ +#ifndef PARTITIONER_OPTIONS_HF_TYPE_HPP +#define PARTITIONER_OPTIONS_HF_TYPE_HPP + +#include <utils/HighFivePugsUtils.hpp> +#include <utils/PartitionerOptions.hpp> +#include <utils/PugsMacros.hpp> + +HighFive::EnumType<PartitionerLibrary> PUGS_INLINE +create_enum_PTOptions_library_type() +{ + return {{"ParMETIS", PartitionerLibrary::parmetis}, {"PTScotch", PartitionerLibrary::ptscotch}}; +} +HIGHFIVE_REGISTER_TYPE(PartitionerLibrary, create_enum_PTOptions_library_type) + +#endif // PARTITIONER_OPTIONS_HF_TYPE_HPP diff --git a/src/utils/checkpointing/Resume.cpp b/src/utils/checkpointing/Resume.cpp index effd6b8080c5f5c8c3d64bbfe7d30fafbbbb1e7e..f342a2466caf36567840a8fc21686a28c26b4d8f 100644 --- a/src/utils/checkpointing/Resume.cpp +++ b/src/utils/checkpointing/Resume.cpp @@ -18,6 +18,7 @@ #include <utils/checkpointing/EigenvalueSolverOptionsHFType.hpp> #include <utils/checkpointing/LinearSolverOptionsHFType.hpp> #include <utils/checkpointing/ParallelCheckerHFType.hpp> +#include <utils/checkpointing/PartitionerOptionsHFType.hpp> #include <utils/checkpointing/ResumingData.hpp> #include <utils/checkpointing/ResumingManager.hpp> @@ -91,6 +92,13 @@ resume() default_options.library() = eigenvalue_solver_options_default_group.getAttribute("library").read<ESLibrary>(); } + { + HighFive::Group partitioner_options_default_group = checkpoint.getGroup("singleton/partitioner_options_default"); + + PartitionerOptions& default_options = PartitionerOptions::default_options; + + default_options.library() = partitioner_options_default_group.getAttribute("library").read<PartitionerLibrary>(); + } checkpointing::ResumingData::instance().readData(checkpoint, p_symbol_table); diff --git a/src/utils/pugs_config.hpp.in b/src/utils/pugs_config.hpp.in index a07d8c314da95f8db595886e3df5b29cdc45907a..9560f7101d75033749dd89801cd560123cfbfb77 100644 --- a/src/utils/pugs_config.hpp.in +++ b/src/utils/pugs_config.hpp.in @@ -1,12 +1,14 @@ #ifndef PUGS_CONFIG_HPP #define PUGS_CONFIG_HPP +#cmakedefine PUGS_HAS_EIGEN3 #cmakedefine PUGS_HAS_FENV_H +#cmakedefine PUGS_HAS_HDF5 #cmakedefine PUGS_HAS_MPI +#cmakedefine PUGS_HAS_PARMETIS #cmakedefine PUGS_HAS_PETSC +#cmakedefine PUGS_HAS_PTSCOTCH #cmakedefine PUGS_HAS_SLEPC -#cmakedefine PUGS_HAS_EIGEN3 -#cmakedefine PUGS_HAS_HDF5 #cmakedefine PUGS_HAS_SLURM #cmakedefine SYSTEM_IS_LINUX