From ba6c638c7c75a902a40e6e8f4bee42e043468414 Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sat, 1 Feb 2025 18:11:59 +0100
Subject: [PATCH 1/6] Prepare PTScotch integration

- Add simple CMake recipe
- Add PartitionerOptions
- Separate ParMETIS partitioner from Partitioner
---
 CMakeLists.txt                    | 27 ++++++++-
 cmake/FindPTScotch.cmake          | 29 +++++++++
 src/utils/CMakeLists.txt          |  2 +
 src/utils/ParMETISPartitioner.cpp | 93 +++++++++++++++++++++++++++++
 src/utils/ParMETISPartitioner.hpp | 19 ++++++
 src/utils/Partitioner.cpp         | 97 ++++---------------------------
 src/utils/Partitioner.hpp         |  6 +-
 src/utils/PartitionerOptions.cpp  | 10 ++++
 src/utils/PartitionerOptions.hpp  | 72 +++++++++++++++++++++++
 src/utils/pugs_config.hpp.in      |  6 +-
 10 files changed, 272 insertions(+), 89 deletions(-)
 create mode 100644 cmake/FindPTScotch.cmake
 create mode 100644 src/utils/ParMETISPartitioner.cpp
 create mode 100644 src/utils/ParMETISPartitioner.hpp
 create mode 100644 src/utils/PartitionerOptions.cpp
 create mode 100644 src/utils/PartitionerOptions.hpp

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 52579983f..7c0f69fcf 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -147,11 +147,36 @@ if(PARMETIS_LIBRARIES)
   set_property(TARGET PkgConfig::ParMETIS PROPERTY
     IMPORTED_LOCATION "${PARMETIS_LIBRARIES}")
 
+  set(PUGS_HAS_PARMETIS TRUE)
   set(PARMETIS_TARGET PkgConfig::ParMETIS)
+else()
+  unset(PUGS_HAS_PARMETIS)
 endif()
 
+#------------------------------------------------------
+# Search for PTScotch
+
+find_package(PTScotch)
+
+set(PUGS_HAS_PTSCOTCH ${PTScotch_FOUND})
+
+if (PTScotch_FOUND)
+  add_library(PkgConfig::PTScotch STATIC IMPORTED)
+  set_property(TARGET PkgConfig::PTScotch PROPERTY
+    IMPORTED_LOCATION "${PTSCOTCH_LIBRARY}")
+  set_property(TARGET PkgConfig::PTScotch PROPERTY
+    INTERFACE_INCLUDE_DIRECTORIES "${PTSCOTCH_INCLUDE_DIR}")
+
+  set(PTSCOTCH_TARGET PkgConfig::PTScotch)
+  include_directories(SYSTEM "${PTSCOTCH_INCLUDE_DIR}")
+else()
+  set(PTSCOTCH_LIBRARY "")
+endif()
+
+#------------------------------------------------------
+
 if(${MPI_FOUND})
-  if (NOT PARMETIS_LIBRARIES)
+  if ((NOT PARMETIS_LIBRARIES) AND (NOT PTSCOTCH_LIBRARIES))
     if(PUGS_ENABLE_MPI MATCHES "^AUTO$")
       message(STATUS "MPI support deactivated: ParMETIS cannot be found!")
       unset(MPI_FOUND)
diff --git a/cmake/FindPTScotch.cmake b/cmake/FindPTScotch.cmake
new file mode 100644
index 000000000..da6286e3e
--- /dev/null
+++ b/cmake/FindPTScotch.cmake
@@ -0,0 +1,29 @@
+# Looking for PTScotch
+
+find_package(PkgConfig)
+pkg_check_modules(PC_PTSCOTCH QUIET PTSCOTCH)
+
+find_path(PTSCOTCH_INCLUDE_DIR
+  NAMES ptscotch.h
+  PATH_SUFFIXES  "include" "include/scotch"
+  HINTS "$ENV{PTSCOTCH_INCDIR}")
+
+if(EXISTS "${PTSCOTCH_INCLUDE_DIR}/ptscotch.h")
+  message(STATUS "Found ptscotch.h in ${PTSCOTCH_INCLUDE_DIR}")
+  find_library(LIB_PTSCOTCH ptscotch $ENV{PTSCOTCH_LIBDIR})
+  if("${LIB_PTSCOTCH}" STREQUAL "LIB_PTSCOTCH-NOTFOUND")
+    message(WARNING "** Could not find ptscotch library.\n** Is PTSCOTCH_LIBDIR correctly set (Actual: \"$ENV{PTSCOTCH_LIBDIR}\")?")
+  endif()
+  set(PTSCOTCH_LIBRARIES ${LIB_PTSCOTCH})
+  message(STATUS "Found ptscotch/scotch libraries ${PTSCOTCH_LIBRARIES}")
+else()
+  message(WARNING "** Could not find ptscotch.h.\n** Is PTSCOTCH_INCDIR correctly set (Actual: \"$ENV{PTSCOTCH_INCDIR}\")?")
+endif()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(PTScotch
+  FOUND_VAR
+    PTSCOTCH_FOUND
+  REQUIRED_VARS
+    PTSCOTCH_LIBRARIES
+    PTSCOTCH_INCLUDE_DIR)
diff --git a/src/utils/CMakeLists.txt b/src/utils/CMakeLists.txt
index 4d74c5e19..333d37768 100644
--- a/src/utils/CMakeLists.txt
+++ b/src/utils/CMakeLists.txt
@@ -14,7 +14,9 @@ add_library(
   FPEManager.cpp
   GlobalVariableManager.cpp
   Messenger.cpp
+  ParMETISPartitioner.cpp
   Partitioner.cpp
+  PartitionerOptions.cpp
   PETScWrapper.cpp
   PluginsLoader.cpp
   PugsUtils.cpp
diff --git a/src/utils/ParMETISPartitioner.cpp b/src/utils/ParMETISPartitioner.cpp
new file mode 100644
index 000000000..06feb9b25
--- /dev/null
+++ b/src/utils/ParMETISPartitioner.cpp
@@ -0,0 +1,93 @@
+#include <utils/ParMETISPartitioner.hpp>
+
+#include <utils/pugs_config.hpp>
+
+#ifdef PUGS_HAS_PARMETIS
+
+#include <utils/Exceptions.hpp>
+#include <utils/Messenger.hpp>
+
+#include <parmetis.h>
+
+#include <iostream>
+#include <vector>
+
+Array<int>
+ParMETISPartitioner::partition(const CRSGraph& graph)
+{
+  std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset
+            << " parts using " << rang::fgB::green << "ParMETIS" << rang::fg::reset << '\n';
+
+  int wgtflag = 0;
+  int numflag = 0;
+  int ncon    = 1;
+  int npart   = parallel::size();
+  std::vector<float> tpwgts(npart, 1. / npart);
+
+  std::vector<float> ubvec{1.05};
+  std::vector<int> options{1, 0, 0};
+  int edgecut = 0;
+  Array<int> part(0);
+
+  MPI_Group world_group;
+
+  MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group);
+
+  MPI_Group mesh_group;
+  std::vector<int> group_ranks = [&]() {
+    Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes()));
+    std::vector<int> grp_ranks;
+    grp_ranks.reserve(graph_node_owners.size());
+    for (size_t i = 0; i < graph_node_owners.size(); ++i) {
+      if (graph_node_owners[i] > 0) {
+        grp_ranks.push_back(i);
+      }
+    }
+    return grp_ranks;
+  }();
+
+  MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);
+
+  MPI_Comm parmetis_comm;
+  MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &parmetis_comm);
+
+  int local_number_of_nodes = graph.numberOfNodes();
+
+  if (graph.numberOfNodes() > 0) {
+    part = Array<int>(local_number_of_nodes);
+    std::vector<int> vtxdist{0, local_number_of_nodes};
+
+    const Array<const int>& entries   = graph.entries();
+    const Array<const int>& neighbors = graph.neighbors();
+
+    int* entries_ptr   = const_cast<int*>(&(entries[0]));
+    int* neighbors_ptr = const_cast<int*>(&(neighbors[0]));
+
+    int result =
+      ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart,
+                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(part[0]), &parmetis_comm);
+    // LCOV_EXCL_START
+    if (result == METIS_ERROR) {
+      throw UnexpectedError("Metis Error");
+    }
+    // LCOV_EXCL_STOP
+
+    MPI_Comm_free(&parmetis_comm);
+  }
+
+  MPI_Group_free(&mesh_group);
+
+  return part;
+}
+
+#else   // PUGS_HAS_PARMETIS
+
+Array<int>
+ParMETISPartitioner::partition(const CRSGraph& graph)
+{
+  Array<int> partition{graph.entries().size() - 1};
+  partition.fill(0);
+  return partition;
+}
+
+#endif   // PUGS_HAS_PARMETIS
diff --git a/src/utils/ParMETISPartitioner.hpp b/src/utils/ParMETISPartitioner.hpp
new file mode 100644
index 000000000..53c1d2dc5
--- /dev/null
+++ b/src/utils/ParMETISPartitioner.hpp
@@ -0,0 +1,19 @@
+#ifndef PARMETIS_PARTITIONER_HPP
+#define PARMETIS_PARTITIONER_HPP
+
+#include <utils/Array.hpp>
+#include <utils/CRSGraph.hpp>
+
+class ParMETISPartitioner
+{
+ private:
+  friend class Partitioner;
+
+  // Forbids direct calls
+  static Array<int> partition(const CRSGraph& graph);
+
+ public:
+  ParMETISPartitioner() = delete;
+};
+
+#endif   // PARMETIS_PARTITIONER_HPP
diff --git a/src/utils/Partitioner.cpp b/src/utils/Partitioner.cpp
index 85e13c22d..4c7145cf2 100644
--- a/src/utils/Partitioner.cpp
+++ b/src/utils/Partitioner.cpp
@@ -1,94 +1,21 @@
 #include <utils/Partitioner.hpp>
 
-#include <utils/Messenger.hpp>
-#include <utils/pugs_config.hpp>
+#include <utils/ParMETISPartitioner.hpp>
 
-#ifdef PUGS_HAS_MPI
-
-#define IDXTYPEWIDTH 64
-#define REALTYPEWIDTH 64
-#include <parmetis.h>
-
-#include <iostream>
-#include <vector>
-
-#include <utils/Exceptions.hpp>
+Partitioner::Partitioner(const PartitionerOptions& options) : m_partitioner_options{options} {}
 
 Array<int>
 Partitioner::partition(const CRSGraph& graph)
 {
-  std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset << " parts\n";
-
-  int wgtflag = 0;
-  int numflag = 0;
-  int ncon    = 1;
-  int npart   = parallel::size();
-  std::vector<float> tpwgts(npart, 1. / npart);
-
-  std::vector<float> ubvec{1.05};
-  std::vector<int> options{1, 0, 0};
-  int edgecut = 0;
-  Array<int> part(0);
-
-  MPI_Group world_group;
-
-  MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group);
-
-  MPI_Group mesh_group;
-  std::vector<int> group_ranks = [&]() {
-    Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes()));
-    std::vector<int> grp_ranks;
-    grp_ranks.reserve(graph_node_owners.size());
-    for (size_t i = 0; i < graph_node_owners.size(); ++i) {
-      if (graph_node_owners[i] > 0) {
-        grp_ranks.push_back(i);
-      }
-    }
-    return grp_ranks;
-  }();
-
-  MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);
-
-  MPI_Comm parmetis_comm;
-  MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &parmetis_comm);
-
-  int local_number_of_nodes = graph.numberOfNodes();
-
-  if (graph.numberOfNodes() > 0) {
-    part = Array<int>(local_number_of_nodes);
-    std::vector<int> vtxdist{0, local_number_of_nodes};
-
-    const Array<const int>& entries   = graph.entries();
-    const Array<const int>& neighbors = graph.neighbors();
-
-    int* entries_ptr   = const_cast<int*>(&(entries[0]));
-    int* neighbors_ptr = const_cast<int*>(&(neighbors[0]));
-
-    int result =
-      ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart,
-                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(part[0]), &parmetis_comm);
-    // LCOV_EXCL_START
-    if (result == METIS_ERROR) {
-      throw UnexpectedError("Metis Error");
-    }
-    // LCOV_EXCL_STOP
-
-    MPI_Comm_free(&parmetis_comm);
+  switch (m_partitioner_options.library()) {
+  case PartitionerLibrary::parmetis: {
+    return ParMETISPartitioner::partition(graph);
+  }
+  case PartitionerLibrary::ptscotch: {
+    throw NotImplementedError("PTScotch");
+  }
+  default: {
+    throw UnexpectedError("invalid partition library");
+  }
   }
-
-  MPI_Group_free(&mesh_group);
-
-  return part;
-}
-
-#else   // PUGS_HAS_MPI
-
-Array<int>
-Partitioner::partition(const CRSGraph& graph)
-{
-  Array<int> partition{graph.entries().size() - 1};
-  partition.fill(0);
-  return partition;
 }
-
-#endif   // PUGS_HAS_MPI
diff --git a/src/utils/Partitioner.hpp b/src/utils/Partitioner.hpp
index 2c720bfd8..a923d2301 100644
--- a/src/utils/Partitioner.hpp
+++ b/src/utils/Partitioner.hpp
@@ -2,11 +2,15 @@
 #define PARTITIONER_HPP
 
 #include <utils/CRSGraph.hpp>
+#include <utils/PartitionerOptions.hpp>
 
 class Partitioner
 {
+ private:
+  PartitionerOptions m_partitioner_options;
+
  public:
-  Partitioner()                   = default;
+  Partitioner(const PartitionerOptions& options = PartitionerOptions::default_options);
   Partitioner(const Partitioner&) = default;
   ~Partitioner()                  = default;
 
diff --git a/src/utils/PartitionerOptions.cpp b/src/utils/PartitionerOptions.cpp
new file mode 100644
index 000000000..207d64980
--- /dev/null
+++ b/src/utils/PartitionerOptions.cpp
@@ -0,0 +1,10 @@
+#include <utils/PartitionerOptions.hpp>
+
+#include <rang.hpp>
+
+std::ostream&
+operator<<(std::ostream& os, const PartitionerOptions& options)
+{
+  os << "  library: " << rang::style::bold << name(options.library()) << rang::style::reset << '\n';
+  return os;
+}
diff --git a/src/utils/PartitionerOptions.hpp b/src/utils/PartitionerOptions.hpp
new file mode 100644
index 000000000..8ce9263b0
--- /dev/null
+++ b/src/utils/PartitionerOptions.hpp
@@ -0,0 +1,72 @@
+#ifndef PARTITIONER_OPTIONS_HPP
+#define PARTITIONER_OPTIONS_HPP
+
+#include <utils/Exceptions.hpp>
+#include <utils/pugs_config.hpp>
+
+#include <iostream>
+
+enum class PartitionerLibrary : int8_t
+{
+  PT__begin = 0,
+  //
+  parmetis = PT__begin,
+  ptscotch,
+  //
+  PT__end
+};
+
+inline std::string
+name(const PartitionerLibrary library)
+{
+  switch (library) {
+  case PartitionerLibrary::parmetis: {
+    return "ParMETIS";
+  }
+  case PartitionerLibrary::ptscotch: {
+    return "PTScotch";
+  }
+  case PartitionerLibrary::PT__end: {
+  }
+  }
+  throw UnexpectedError("Linear system library name is not defined!");
+}
+
+class PartitionerOptions
+{
+ private:
+  PartitionerLibrary m_library = []() {
+#if !defined(PUGS_HAS_PARMETIS) && defined(PUGS_HAS_PTSCOTCH)
+    return PartitionerLibrary::ptscotch;
+#else   // sets parmetis as default if no alternative is available
+    return PartitionerLibrary::parmetis;
+#endif
+  }();
+
+ public:
+  static PartitionerOptions default_options;
+
+  friend std::ostream& operator<<(std::ostream& os, const PartitionerOptions& options);
+
+  PartitionerLibrary&
+  library()
+  {
+    return m_library;
+  }
+
+  PartitionerLibrary
+  library() const
+  {
+    return m_library;
+  }
+
+  PartitionerOptions(const PartitionerOptions&) = default;
+  PartitionerOptions(PartitionerOptions&&)      = default;
+
+  PartitionerOptions()  = default;
+  ~PartitionerOptions() = default;
+};
+
+inline PartitionerOptions PartitionerOptions::default_options;
+
+#endif   // PARTITIONER_OPTIONS_HPP
diff --git a/src/utils/pugs_config.hpp.in b/src/utils/pugs_config.hpp.in
index a07d8c314..9560f7101 100644
--- a/src/utils/pugs_config.hpp.in
+++ b/src/utils/pugs_config.hpp.in
@@ -1,12 +1,14 @@
 #ifndef PUGS_CONFIG_HPP
 #define PUGS_CONFIG_HPP
 
+#cmakedefine PUGS_HAS_EIGEN3
 #cmakedefine PUGS_HAS_FENV_H
+#cmakedefine PUGS_HAS_HDF5
 #cmakedefine PUGS_HAS_MPI
+#cmakedefine PUGS_HAS_PARMETIS
 #cmakedefine PUGS_HAS_PETSC
+#cmakedefine PUGS_HAS_PTSCOTCH
 #cmakedefine PUGS_HAS_SLEPC
-#cmakedefine PUGS_HAS_EIGEN3
-#cmakedefine PUGS_HAS_HDF5
 #cmakedefine PUGS_HAS_SLURM
 
 #cmakedefine SYSTEM_IS_LINUX
-- 
GitLab


From 3b98e7355dc57fe1306be7f94b51b94f349730a3 Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sun, 2 Feb 2025 18:32:45 +0100
Subject: [PATCH 2/6] Add missing MPI_Comm_free

---
 src/utils/ParMETISPartitioner.cpp | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/src/utils/ParMETISPartitioner.cpp b/src/utils/ParMETISPartitioner.cpp
index 06feb9b25..93018f44e 100644
--- a/src/utils/ParMETISPartitioner.cpp
+++ b/src/utils/ParMETISPartitioner.cpp
@@ -27,10 +27,8 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
   std::vector<float> ubvec{1.05};
   std::vector<int> options{1, 0, 0};
   int edgecut = 0;
-  Array<int> part(0);
 
   MPI_Group world_group;
-
   MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group);
 
   MPI_Group mesh_group;
@@ -53,8 +51,9 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
 
   int local_number_of_nodes = graph.numberOfNodes();
 
+  Array<int> partition;
   if (graph.numberOfNodes() > 0) {
-    part = Array<int>(local_number_of_nodes);
+    partition = Array<int>(local_number_of_nodes);
     std::vector<int> vtxdist{0, local_number_of_nodes};
 
     const Array<const int>& entries   = graph.entries();
@@ -65,7 +64,7 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
 
     int result =
       ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart,
-                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(part[0]), &parmetis_comm);
+                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(partition[0]), &parmetis_comm);
     // LCOV_EXCL_START
     if (result == METIS_ERROR) {
       throw UnexpectedError("Metis Error");
@@ -76,8 +75,9 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
   }
 
   MPI_Group_free(&mesh_group);
+  MPI_Group_free(&world_group);
 
-  return part;
+  return partition;
 }
 
 #else   // PUGS_HAS_PARMETIS
-- 
GitLab


From 31152b0f071f3e696534544645bb1ee439d9533f Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sun, 2 Feb 2025 18:34:52 +0100
Subject: [PATCH 3/6] Add access and setting pgs functions for partitioner
 library

---
 src/language/modules/CoreModule.cpp | 22 ++++++++++++++++++++++
 src/utils/PartitionerOptions.hpp    | 14 ++++++++++++++
 2 files changed, 36 insertions(+)

diff --git a/src/language/modules/CoreModule.cpp b/src/language/modules/CoreModule.cpp
index 47e018325..5b5753eb8 100644
--- a/src/language/modules/CoreModule.cpp
+++ b/src/language/modules/CoreModule.cpp
@@ -32,6 +32,7 @@
 #include <language/utils/UnaryOperatorRegisterForRnxn.hpp>
 #include <language/utils/UnaryOperatorRegisterForZ.hpp>
 #include <utils/Messenger.hpp>
+#include <utils/PartitionerOptions.hpp>
 #include <utils/PugsUtils.hpp>
 #include <utils/RandomEngine.hpp>
 #include <utils/Stop.hpp>
@@ -94,6 +95,27 @@ CoreModule::CoreModule() : BuiltinModule(true)
 
                                                  ));
 
+  this->_addBuiltinFunction("setPartitionerLibrary", std::function(
+
+                                                       [](const std::string& library_name) -> void {
+                                                         PartitionerOptions::default_options.library() =
+                                                           getPartitionerEnumFromName<PartitionerLibrary>(library_name);
+                                                       }
+
+                                                       ));
+
+  this->_addBuiltinFunction("getPartitionerOptions", std::function(
+
+                                                       []() -> std::string {
+                                                         std::ostringstream os;
+                                                         os << rang::fgB::yellow << "Partitioner options"
+                                                            << rang::style::reset << '\n';
+                                                         os << PartitionerOptions::default_options;
+                                                         return os.str();
+                                                       }
+
+                                                       ));
+
   this->_addTypeDescriptor(ast_node_data_type_from<std::shared_ptr<const OStream>>);
 
   this->_addBuiltinFunction("ofstream", std::function(
diff --git a/src/utils/PartitionerOptions.hpp b/src/utils/PartitionerOptions.hpp
index 8ce9263b0..e4157f765 100644
--- a/src/utils/PartitionerOptions.hpp
+++ b/src/utils/PartitionerOptions.hpp
@@ -32,6 +32,20 @@ name(const PartitionerLibrary library)
   throw UnexpectedError("Linear system library name is not defined!");
 }
 
+template <typename PartitionerEnumType>
+inline PartitionerEnumType
+getPartitionerEnumFromName(const std::string& enum_name)
+{
+  using BaseT = std::underlying_type_t<PartitionerEnumType>;
+  for (BaseT enum_value = static_cast<BaseT>(PartitionerEnumType::PT__begin);
+       enum_value < static_cast<BaseT>(PartitionerEnumType::PT__end); ++enum_value) {
+    if (name(PartitionerEnumType{enum_value}) == enum_name) {
+      return PartitionerEnumType{enum_value};
+    }
+  }
+  throw NormalError(std::string{"could not find '"} + enum_name + "' associate type!");
+}
+
 class PartitionerOptions
 {
  private:
-- 
GitLab


From 1e46180615177a99152bf1efd6a88b7fd4c292c5 Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sun, 2 Feb 2025 21:50:39 +0100
Subject: [PATCH 4/6] Plug PTScotch partitioner

---
 src/mesh/Connectivity.hpp         |   1 -
 src/utils/CMakeLists.txt          |   1 +
 src/utils/PTScotchPartitioner.cpp | 110 ++++++++++++++++++++++++++++++
 src/utils/PTScotchPartitioner.hpp |  19 ++++++
 src/utils/ParMETISPartitioner.cpp |  33 +++++----
 src/utils/Partitioner.cpp         |   3 +-
 6 files changed, 148 insertions(+), 19 deletions(-)
 create mode 100644 src/utils/PTScotchPartitioner.cpp
 create mode 100644 src/utils/PTScotchPartitioner.hpp

diff --git a/src/mesh/Connectivity.hpp b/src/mesh/Connectivity.hpp
index 24d9ef656..aa1a471d6 100644
--- a/src/mesh/Connectivity.hpp
+++ b/src/mesh/Connectivity.hpp
@@ -19,7 +19,6 @@
 #include <utils/PugsTraits.hpp>
 #include <utils/PugsUtils.hpp>
 
-#include <algorithm>
 #include <iostream>
 #include <set>
 #include <vector>
diff --git a/src/utils/CMakeLists.txt b/src/utils/CMakeLists.txt
index 333d37768..bc66a97fa 100644
--- a/src/utils/CMakeLists.txt
+++ b/src/utils/CMakeLists.txt
@@ -19,6 +19,7 @@ add_library(
   PartitionerOptions.cpp
   PETScWrapper.cpp
   PluginsLoader.cpp
+  PTScotchPartitioner.cpp
   PugsUtils.cpp
   RandomEngine.cpp
   ReproducibleSumManager.cpp
diff --git a/src/utils/PTScotchPartitioner.cpp b/src/utils/PTScotchPartitioner.cpp
new file mode 100644
index 000000000..e86acfa44
--- /dev/null
+++ b/src/utils/PTScotchPartitioner.cpp
@@ -0,0 +1,110 @@
+#include <utils/PTScotchPartitioner.hpp>
+
+#include <utils/pugs_config.hpp>
+
+#ifdef PUGS_HAS_PTSCOTCH
+
+#include <utils/Exceptions.hpp>
+#include <utils/Messenger.hpp>
+
+#include <ptscotch.h>
+
+#include <iostream>
+#include <vector>
+
+Array<int>
+PTScotchPartitioner::partition(const CRSGraph& graph)
+{
+  std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset
+            << " parts using " << rang::fgB::green << "PTScotch" << rang::fg::reset << '\n';
+
+  MPI_Group world_group;
+  MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group);
+
+  MPI_Group mesh_group;
+  std::vector<int> group_ranks = [&]() {
+    Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes()));
+    std::vector<int> grp_ranks;
+    grp_ranks.reserve(graph_node_owners.size());
+    for (size_t i = 0; i < graph_node_owners.size(); ++i) {
+      if (graph_node_owners[i] > 0) {
+        grp_ranks.push_back(i);
+      }
+    }
+    return grp_ranks;
+  }();
+
+  MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);
+
+  MPI_Comm partitioning_comm;
+  MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &partitioning_comm);
+
+  Array<int> partition;
+  if (graph.numberOfNodes() > 0) {
+    SCOTCH_Strat scotch_strategy;
+    SCOTCH_Dgraph scotch_grah;
+
+    SCOTCH_stratInit(&scotch_strategy);   // use default strategy
+    SCOTCH_dgraphInit(&scotch_grah, partitioning_comm);
+
+    const Array<const int>& entries   = graph.entries();
+    const Array<const int>& neighbors = graph.neighbors();
+
+    static_assert(std::is_same_v<int, SCOTCH_Num>);
+
+    SCOTCH_Num* entries_ptr   = const_cast<int*>(&(entries[0]));
+    SCOTCH_Num* neighbors_ptr = const_cast<int*>(&(neighbors[0]));
+
+    if (SCOTCH_dgraphBuild(&scotch_grah,
+                           0,                          // 0 for C-like arrays
+                           graph.numberOfNodes(),      //
+                           graph.numberOfNodes(),      //
+                           entries_ptr,                //
+                           nullptr,                    //
+                           nullptr,                    //
+                           nullptr,                    // optional local node label array
+                           graph.neighbors().size(),   //
+                           graph.neighbors().size(),   //
+                           neighbors_ptr,              //
+                           nullptr,                    //
+                           nullptr) != 0) {
+      //  LCOV_EXCL_START
+      throw UnexpectedError("PTScotch invalid graph");
+      //   LCOV_EXCL_STOP
+    }
+
+    partition = Array<int>(graph.numberOfNodes());
+
+    SCOTCH_Num* partition_ptr = static_cast<SCOTCH_Num*>(&(partition[0]));
+
+    if (SCOTCH_dgraphPart(&scotch_grah,       //
+                          parallel::size(),   //
+                          &scotch_strategy,   //
+                          partition_ptr) != 0) {
+      // LCOV_EXCL_START
+      throw UnexpectedError("PTScotch Error");
+      // LCOV_EXCL_STOP
+    }
+
+    SCOTCH_dgraphExit(&scotch_grah);
+    SCOTCH_stratExit(&scotch_strategy);
+  }
+
+  MPI_Comm_free(&partitioning_comm);
+  MPI_Group_free(&mesh_group);
+  MPI_Group_free(&world_group);
+
+  return partition;
+}
+
+#else   // PUGS_HAS_PTSCOTCH
+
+Array<int>
+PTScotchPartitioner::partition(const CRSGraph& graph)
+{
+  Array<int> partition{graph.entries().size() - 1};
+  partition.fill(0);
+  return partition;
+}
+
+#endif   // PUGS_HAS_PTSCOTCH
diff --git a/src/utils/PTScotchPartitioner.hpp b/src/utils/PTScotchPartitioner.hpp
new file mode 100644
index 000000000..cefb0a1df
--- /dev/null
+++ b/src/utils/PTScotchPartitioner.hpp
@@ -0,0 +1,19 @@
+#ifndef PT_SCOTCH_PARTITIONER_HPP
+#define PT_SCOTCH_PARTITIONER_HPP
+
+#include <utils/Array.hpp>
+#include <utils/CRSGraph.hpp>
+
+class PTScotchPartitioner
+{
+ private:
+  friend class Partitioner;
+
+  // Forbids direct calls
+  static Array<int> partition(const CRSGraph& graph);
+
+ public:
+  PTScotchPartitioner() = delete;
+};
+
+#endif   // PT_SCOTCH_PARTITIONER_HPP
diff --git a/src/utils/ParMETISPartitioner.cpp b/src/utils/ParMETISPartitioner.cpp
index 93018f44e..47864de1f 100644
--- a/src/utils/ParMETISPartitioner.cpp
+++ b/src/utils/ParMETISPartitioner.cpp
@@ -18,16 +18,6 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
   std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset
             << " parts using " << rang::fgB::green << "ParMETIS" << rang::fg::reset << '\n';
 
-  int wgtflag = 0;
-  int numflag = 0;
-  int ncon    = 1;
-  int npart   = parallel::size();
-  std::vector<float> tpwgts(npart, 1. / npart);
-
-  std::vector<float> ubvec{1.05};
-  std::vector<int> options{1, 0, 0};
-  int edgecut = 0;
-
   MPI_Group world_group;
   MPI_Comm_group(parallel::Messenger::getInstance().comm(), &world_group);
 
@@ -46,13 +36,23 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
 
   MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);
 
-  MPI_Comm parmetis_comm;
-  MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &parmetis_comm);
-
-  int local_number_of_nodes = graph.numberOfNodes();
+  MPI_Comm partitioning_comm;
+  MPI_Comm_create_group(parallel::Messenger::getInstance().comm(), mesh_group, 1, &partitioning_comm);
 
   Array<int> partition;
   if (graph.numberOfNodes() > 0) {
+    int wgtflag = 0;
+    int numflag = 0;
+    int ncon    = 1;
+    int npart   = parallel::size();
+    std::vector<float> tpwgts(npart, 1. / npart);
+
+    std::vector<float> ubvec{1.05};
+    std::vector<int> options{1, 0, 0};
+    int edgecut = 0;
+
+    int local_number_of_nodes = graph.numberOfNodes();
+
     partition = Array<int>(local_number_of_nodes);
     std::vector<int> vtxdist{0, local_number_of_nodes};
 
@@ -64,16 +64,15 @@ ParMETISPartitioner::partition(const CRSGraph& graph)
 
     int result =
       ParMETIS_V3_PartKway(&(vtxdist[0]), entries_ptr, neighbors_ptr, NULL, NULL, &wgtflag, &numflag, &ncon, &npart,
-                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(partition[0]), &parmetis_comm);
+                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(partition[0]), &partitioning_comm);
     // LCOV_EXCL_START
     if (result == METIS_ERROR) {
       throw UnexpectedError("Metis Error");
     }
     // LCOV_EXCL_STOP
-
-    MPI_Comm_free(&parmetis_comm);
   }
 
+  MPI_Comm_free(&partitioning_comm);
   MPI_Group_free(&mesh_group);
   MPI_Group_free(&world_group);
 
diff --git a/src/utils/Partitioner.cpp b/src/utils/Partitioner.cpp
index 4c7145cf2..4f53d322f 100644
--- a/src/utils/Partitioner.cpp
+++ b/src/utils/Partitioner.cpp
@@ -1,5 +1,6 @@
 #include <utils/Partitioner.hpp>
 
+#include <utils/PTScotchPartitioner.hpp>
 #include <utils/ParMETISPartitioner.hpp>
 
 Partitioner::Partitioner(const PartitionerOptions& options) : m_partitioner_options{options} {}
@@ -12,7 +13,7 @@ Partitioner::partition(const CRSGraph& graph)
     return ParMETISPartitioner::partition(graph);
   }
   case PartitionerLibrary::ptscotch: {
-    throw NotImplementedError("PTScotch");
+    return PTScotchPartitioner::partition(graph);
   }
   default: {
     throw UnexpectedError("invalid partition library");
-- 
GitLab


From e1dee840cd5a7e9b70d69006b6f47a1bab6c249d Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sun, 2 Feb 2025 22:12:57 +0100
Subject: [PATCH 5/6] Add checkpoint/resume mechanism for partitioner library

---
 src/utils/checkpointing/Checkpoint.cpp            | 11 +++++++++--
 .../checkpointing/PartitionerOptionsHFType.hpp    | 15 +++++++++++++++
 src/utils/checkpointing/Resume.cpp                |  8 ++++++++
 3 files changed, 32 insertions(+), 2 deletions(-)
 create mode 100644 src/utils/checkpointing/PartitionerOptionsHFType.hpp

diff --git a/src/utils/checkpointing/Checkpoint.cpp b/src/utils/checkpointing/Checkpoint.cpp
index ed56dcb83..ebef0dddd 100644
--- a/src/utils/checkpointing/Checkpoint.cpp
+++ b/src/utils/checkpointing/Checkpoint.cpp
@@ -4,8 +4,6 @@
 
 #ifdef PUGS_HAS_HDF5
 
-#include <algebra/EigenvalueSolverOptions.hpp>
-#include <algebra/LinearSolverOptions.hpp>
 #include <dev/ParallelChecker.hpp>
 #include <language/ast/ASTExecutionStack.hpp>
 #include <language/utils/ASTCheckpointsInfo.hpp>
@@ -25,6 +23,7 @@
 #include <utils/checkpointing/EigenvalueSolverOptionsHFType.hpp>
 #include <utils/checkpointing/LinearSolverOptionsHFType.hpp>
 #include <utils/checkpointing/ParallelCheckerHFType.hpp>
+#include <utils/checkpointing/PartitionerOptionsHFType.hpp>
 #include <utils/checkpointing/ResumingManager.hpp>
 
 #include <iostream>
@@ -131,6 +130,14 @@ checkpoint()
 
       eigenvalue_solver_options_default_group.createAttribute("library", default_options.library());
     }
+    {
+      HighFive::Group partitioner_options_default_group =
+        checkpoint.createGroup("singleton/partitioner_options_default");
+
+      const PartitionerOptions& default_options = PartitionerOptions::default_options;
+
+      partitioner_options_default_group.createAttribute("library", default_options.library());
+    }
     {
       const auto& primal_to_dual_connectivity_info_map =
         DualConnectivityManager::instance().primalToDualConnectivityInfoMap();
diff --git a/src/utils/checkpointing/PartitionerOptionsHFType.hpp b/src/utils/checkpointing/PartitionerOptionsHFType.hpp
new file mode 100644
index 000000000..e022bb661
--- /dev/null
+++ b/src/utils/checkpointing/PartitionerOptionsHFType.hpp
@@ -0,0 +1,15 @@
+#ifndef PARTITIONER_OPTIONS_HF_TYPE_HPP
+#define PARTITIONER_OPTIONS_HF_TYPE_HPP
+
+#include <utils/HighFivePugsUtils.hpp>
+#include <utils/PartitionerOptions.hpp>
+#include <utils/PugsMacros.hpp>
+
+HighFive::EnumType<PartitionerLibrary> PUGS_INLINE
+create_enum_PTOptions_library_type()
+{
+  return {{"ParMETIS", PartitionerLibrary::parmetis}, {"PTScotch", PartitionerLibrary::ptscotch}};
+}
+HIGHFIVE_REGISTER_TYPE(PartitionerLibrary, create_enum_PTOptions_library_type)
+
+#endif   // PARTITIONER_OPTIONS_HF_TYPE_HPP
diff --git a/src/utils/checkpointing/Resume.cpp b/src/utils/checkpointing/Resume.cpp
index effd6b808..f342a2466 100644
--- a/src/utils/checkpointing/Resume.cpp
+++ b/src/utils/checkpointing/Resume.cpp
@@ -18,6 +18,7 @@
 #include <utils/checkpointing/EigenvalueSolverOptionsHFType.hpp>
 #include <utils/checkpointing/LinearSolverOptionsHFType.hpp>
 #include <utils/checkpointing/ParallelCheckerHFType.hpp>
+#include <utils/checkpointing/PartitionerOptionsHFType.hpp>
 #include <utils/checkpointing/ResumingData.hpp>
 #include <utils/checkpointing/ResumingManager.hpp>
 
@@ -91,6 +92,13 @@ resume()
 
       default_options.library() = eigenvalue_solver_options_default_group.getAttribute("library").read<ESLibrary>();
     }
+    {
+      HighFive::Group partitioner_options_default_group = checkpoint.getGroup("singleton/partitioner_options_default");
+
+      PartitionerOptions& default_options = PartitionerOptions::default_options;
+
+      default_options.library() = partitioner_options_default_group.getAttribute("library").read<PartitionerLibrary>();
+    }
 
     checkpointing::ResumingData::instance().readData(checkpoint, p_symbol_table);
 
-- 
GitLab


From 2b05a8d15183788c419cf8fa95764b02574f5c2c Mon Sep 17 00:00:00 2001
From: Stephane Del Pino <stephane.delpino44@gmail.com>
Date: Sun, 2 Feb 2025 22:38:55 +0100
Subject: [PATCH 6/6] Add cell_owner and node_owner (mesh->item_value)
 functions

This is useful to check mesh partition/load balance
---
 src/language/modules/MeshModule.cpp | 40 +++++++++++++++++++++++++++++
 1 file changed, 40 insertions(+)

diff --git a/src/language/modules/MeshModule.cpp b/src/language/modules/MeshModule.cpp
index c060da50d..9b698b9e0 100644
--- a/src/language/modules/MeshModule.cpp
+++ b/src/language/modules/MeshModule.cpp
@@ -308,6 +308,46 @@ MeshModule::MeshModule()
                                               return DualMeshManager::instance().getMedianDualMesh(mesh_v);
                                             }
 
+                                            ));
+
+  this->_addBuiltinFunction("cell_owner", std::function(
+
+                                            [](const std::shared_ptr<const MeshVariant>& mesh_v)
+                                              -> std::shared_ptr<const ItemValueVariant> {
+                                              return std::visit(
+                                                [&](auto&& mesh) {
+                                                  const auto& connectivity = mesh->connectivity();
+                                                  auto cell_owner          = connectivity.cellOwner();
+                                                  CellValue<long int> cell_owner_long{connectivity};
+                                                  parallel_for(
+                                                    connectivity.numberOfCells(), PUGS_LAMBDA(const CellId cell_id) {
+                                                      cell_owner_long[cell_id] = cell_owner[cell_id];
+                                                    });
+                                                  return std::make_shared<const ItemValueVariant>(cell_owner_long);
+                                                },
+                                                mesh_v->variant());
+                                            }
+
+                                            ));
+
+  this->_addBuiltinFunction("node_owner", std::function(
+
+                                            [](const std::shared_ptr<const MeshVariant>& mesh_v)
+                                              -> std::shared_ptr<const ItemValueVariant> {
+                                              return std::visit(
+                                                [&](auto&& mesh) {
+                                                  const auto& connectivity = mesh->connectivity();
+                                                  auto node_owner          = connectivity.nodeOwner();
+                                                  NodeValue<long int> node_owner_long{connectivity};
+                                                  parallel_for(
+                                                    connectivity.numberOfNodes(), PUGS_LAMBDA(const NodeId node_id) {
+                                                      node_owner_long[node_id] = node_owner[node_id];
+                                                    });
+                                                  return std::make_shared<const ItemValueVariant>(node_owner_long);
+                                                },
+                                                mesh_v->variant());
+                                            }
+
                                             ));
 }
 
-- 
GitLab