Skip to content
Snippets Groups Projects
Partitioner.cpp 2.17 KiB
#include <utils/Partitioner.hpp>

#include <utils/Messenger.hpp>
#include <utils/pugs_config.hpp>

#ifdef PUGS_HAS_MPI

#define IDXTYPEWIDTH 64
#define REALTYPEWIDTH 64
#include <parmetis.h>

#include <iostream>
#include <vector>

#include <utils/Exceptions.hpp>

Array<int>
Partitioner::partition(const CSRGraph& graph)
{
  std::cout << "Partitioning graph into " << rang::style::bold << parallel::size() << rang::style::reset << " parts\n";

  int wgtflag = 0;
  int numflag = 0;
  int ncon    = 1;
  int npart   = parallel::size();
  std::vector<float> tpwgts(npart, 1. / npart);

  std::vector<float> ubvec{1.05};
  std::vector<int> options{1, 1, 0};
  int edgecut = 0;
  Array<int> part(0);

  MPI_Group world_group;
  MPI_Comm_group(MPI_COMM_WORLD, &world_group);

  MPI_Group mesh_group;
  std::vector<int> group_ranks = [&]() {
    Array<int> graph_node_owners = parallel::allGather(static_cast<int>(graph.numberOfNodes()));
    std::vector<int> group_ranks;
    group_ranks.reserve(graph_node_owners.size());
    for (size_t i = 0; i < graph_node_owners.size(); ++i) {
      if (graph_node_owners[i] > 0) {
        group_ranks.push_back(i);
      }
    }
    return group_ranks;
  }();

  MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);

  MPI_Comm parmetis_comm;
  MPI_Comm_create_group(MPI_COMM_WORLD, mesh_group, 1, &parmetis_comm);

  int local_number_of_nodes = graph.numberOfNodes();

  if (graph.numberOfNodes() > 0) {
    part = Array<int>(local_number_of_nodes);
    std::vector<int> vtxdist{0, local_number_of_nodes};

    const Array<int>& entries   = graph.entries();
    const Array<int>& neighbors = graph.neighbors();

    int result =
      ParMETIS_V3_PartKway(&(vtxdist[0]), &(entries[0]), &(neighbors[0]), NULL, NULL, &wgtflag, &numflag, &ncon, &npart,
                           &(tpwgts[0]), &(ubvec[0]), &(options[0]), &edgecut, &(part[0]), &parmetis_comm);
    if (result == METIS_ERROR) {
      throw UnexpectedError("Metis Error");
    }

    MPI_Comm_free(&parmetis_comm);
  }

  MPI_Group_free(&mesh_group);

  return part;
}

#else   // PUGS_HAS_MPI

Array<int>
Partitioner::partition(const CSRGraph&)
{
  return Array<int>(0);
}

#endif   // PUGS_HAS_MPI