Skip to content
Snippets Groups Projects
Commit 1475f720 authored by Stéphane Del Pino's avatar Stéphane Del Pino
Browse files

Plug partitioner in gmsh reader

- This is a first step to parallel computations
- Lots of clean-up is required in mesh reading/construction
parent 4f44693a
Branches
Tags
1 merge request!11Feature/mpi
......@@ -9,7 +9,7 @@ add_library(
ConnectivityComputer.cpp
GmshReader.cpp)
#include_directories(${PASTIS_SOURCE_DIR}/utils)
include_directories(${PASTIS_SOURCE_DIR}/utils)
# Additional dependencies
#add_dependencies(PastisMesh)
......@@ -12,6 +12,8 @@
#include <Mesh.hpp>
#include <RefFaceList.hpp>
#include <Messenger.hpp>
#include <Partitioner.hpp>
#include <map>
#include <regex>
......@@ -141,6 +143,7 @@ ErrorHandler(const std::string& filename,
GmshReader::GmshReader(const std::string& filename)
: m_filename(filename)
{
if (commRank() == 0) {
try {
m_fin.open(m_filename);
if (not m_fin) {
......@@ -301,6 +304,21 @@ GmshReader::GmshReader(const std::string& filename)
}
}
if (commSize() > 1) {
pout() << "Sequential mesh read! Need to be dispatched\n";
CSRGraph mesh_graph;
if (commRank() == 0) {
mesh_graph = m_mesh->cellToCellGraph();
}
Partitioner P;
Array<int> new_cell_owner = P.partition(mesh_graph);
Messenger::destroy();
std::exit(0);
}
}
void GmshReader::__readVertices()
{
const int numberOfVerices = this->_getInteger();
......
......@@ -2,86 +2,81 @@
#include <Messenger.hpp>
#include <pastis_config.hpp>
#include <PastisOStream.hpp>
#ifdef PASTIS_HAS_MPI
#define IDXTYPEWIDTH 64
#define REALTYPEWIDTH 64
#include <parmetis.h>
#include <vector>
Partitioner::
Partitioner()
{
std::cout << commRank() << " / " << commSize() << '\n' << std::flush;
MPI_Barrier(MPI_COMM_WORLD);
if (commSize() != 3) {
Messenger::destroy();
std::cerr << "Commsize=" << commSize() << ". Need 3 PE to test";
std::exit(1);
}
#include <vector>
idx_t wgtflag = 0;
idx_t numflag = 0;
idx_t ncon = 1;
idx_t npart= 3;
real_t wgts = 1./npart;
std::vector<real_t> tpwgts{wgts, wgts, wgts};
std::vector<real_t> ubvec{1.05};
std::vector<idx_t> options{0,0,0};
idx_t edgecut = 0;
std::vector<idx_t> part(5);
Array<int> Partitioner::partition(const CSRGraph& graph)
{
pout() << "Partitioning graph into "
<< rang::style::bold << commSize() << rang::style::reset
<< " parts\n";
MPI_Comm mpi_comm_;
MPI_Comm_dup(MPI_COMM_WORLD, &mpi_comm_);
MPI_Comm* mpi_comm = & mpi_comm_;
switch (commRank()) {
case 0: {
std::vector<idx_t> xadj {0,2,5,8,11,13};
std::vector<idx_t> adjncy {1,5,0,2,6,1,3,7,2,4,8,3,9};
std::vector<idx_t> vtxdist{0,5,10,15};
ParMETIS_V3_PartKway(&(vtxdist[0]), &(xadj[0]), &(adjncy[0]),
NULL, NULL, &wgtflag, &numflag,
&ncon, &npart, &(tpwgts[0]), &(ubvec[0]),
&(options[0]), &edgecut, &(part[0]), mpi_comm);
break;
int wgtflag = 0;
int numflag = 0;
int ncon = 1;
int npart= commSize();
std::vector<float> tpwgts;
for (int i_part=0; i_part<npart; ++i_part) {
tpwgts.push_back(1./npart);
}
case 1: {
std::vector<idx_t> xadj {0,3,7,11,15,18};
std::vector<idx_t> adjncy {0,6,10,1,5,7,11,2,6,8,12,3,7,9,13,4,8,14};
std::vector<idx_t> vtxdist{0,5,10,15};
std::vector<float> ubvec{1.05};
std::vector<int> options{1,1,0};
int edgecut = 0;
Array<int> part(0);
ParMETIS_V3_PartKway(&(vtxdist[0]), &(xadj[0]), &(adjncy[0]),
NULL, NULL, &wgtflag, &numflag,
&ncon, &npart, &(tpwgts[0]), &(ubvec[0]),
&(options[0]), &edgecut, &(part[0]), mpi_comm);
break;
}
case 2: {
std::vector<idx_t> xadj {0,2,5,8,11,13};
std::vector<idx_t> adjncy {5,11,6,10,12,7,11,13,8,12,14,9,13};
std::vector<idx_t> vtxdist{0,5,10,15};
MPI_Group world_group;
MPI_Comm_group(MPI_COMM_WORLD, &world_group);
MPI_Group mesh_group;
std::vector<int> group_ranks{0};
MPI_Group_incl(world_group, group_ranks.size(), &(group_ranks[0]), &mesh_group);
MPI_Comm parmetis_comm;
MPI_Comm_create_group(MPI_COMM_WORLD, mesh_group, 1, &parmetis_comm);
int local_number_of_cells = graph.entries().size()-1;
if (commRank() ==0) {
part = Array<int>(local_number_of_cells);
std::vector<int> vtxdist{0,local_number_of_cells};
static_assert(std::is_same<int, int>());
ParMETIS_V3_PartKway(&(vtxdist[0]), &(xadj[0]), &(adjncy[0]),
const Array<int>& entries = graph.entries();
const Array<int>& neighbors = graph.neighbors();
int result
= ParMETIS_V3_PartKway(&(vtxdist[0]), &(entries[0]), &(neighbors[0]),
NULL, NULL, &wgtflag, &numflag,
&ncon, &npart, &(tpwgts[0]), &(ubvec[0]),
&(options[0]), &edgecut, &(part[0]), mpi_comm);
break;
}
default: {
std::cerr << "unexpected rank " << commRank() << "!\n";
std::exit(0);
&(options[0]), &edgecut, &(part[0]), &parmetis_comm);
if (result == METIS_ERROR) {
perr() << "Metis Error\n";
std::exit(1);
}
}
std::cerr << commRank() << " FINISHED\n";
MPI_Barrier(MPI_COMM_WORLD);
Messenger::destroy();
std::exit(0);
return part;
}
#else // PASTIS_HAS_MPI
Partitioner::Partitioner() {}
Array<int> Partitioner::partition(const CSRGraph& graph)
{
return Array<int>(0);
}
#endif // PASTIS_HAS_MPI
#ifndef PARTITIONER_HPP
#define PARTITIONER_HPP
#include <CSRGraph.hpp>
class Partitioner
{
public:
Partitioner();
Partitioner() = default;
Partitioner(const Partitioner&) = default;
~Partitioner() = default;
Array<int> partition(const CSRGraph& graph);
};
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment