Skip to content
Snippets Groups Projects
Commit 1d1e91a2 authored by Stéphane Del Pino's avatar Stéphane Del Pino
Browse files

Add new CLI option to allow parallel output

parent f87a0fb1
Branches
No related tags found
1 merge request!188Add new CLI option to allow parallel output
...@@ -9,10 +9,10 @@ namespace parallel ...@@ -9,10 +9,10 @@ namespace parallel
Messenger* Messenger::m_instance = nullptr; Messenger* Messenger::m_instance = nullptr;
void void
Messenger::create(int& argc, char* argv[]) Messenger::create(int& argc, char* argv[], bool parallel_output)
{ {
if (Messenger::m_instance == nullptr) { if (Messenger::m_instance == nullptr) {
Messenger::m_instance = new Messenger(argc, argv); Messenger::m_instance = new Messenger(argc, argv, parallel_output);
} else { } else {
throw UnexpectedError("Messenger already created"); throw UnexpectedError("Messenger already created");
} }
...@@ -28,7 +28,7 @@ Messenger::destroy() ...@@ -28,7 +28,7 @@ Messenger::destroy()
} }
} }
Messenger::Messenger([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[]) Messenger::Messenger([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[], bool parallel_output)
{ {
#ifdef PUGS_HAS_MPI #ifdef PUGS_HAS_MPI
MPI_Init(&argc, &argv); MPI_Init(&argc, &argv);
...@@ -66,7 +66,7 @@ Messenger::Messenger([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[]) ...@@ -66,7 +66,7 @@ Messenger::Messenger([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[])
return size; return size;
}(); }();
if (m_rank != 0) { if ((not parallel_output) and (m_rank != 0)) {
// LCOV_EXCL_START // LCOV_EXCL_START
std::cout.setstate(std::ios::badbit); std::cout.setstate(std::ios::badbit);
std::cerr.setstate(std::ios::badbit); std::cerr.setstate(std::ios::badbit);
......
...@@ -85,7 +85,7 @@ class Messenger ...@@ -85,7 +85,7 @@ class Messenger
}; };
static Messenger* m_instance; static Messenger* m_instance;
Messenger(int& argc, char* argv[]); Messenger(int& argc, char* argv[], bool parallel_output);
#ifdef PUGS_HAS_MPI #ifdef PUGS_HAS_MPI
MPI_Comm m_pugs_comm_world = MPI_COMM_WORLD; MPI_Comm m_pugs_comm_world = MPI_COMM_WORLD;
...@@ -406,7 +406,7 @@ class Messenger ...@@ -406,7 +406,7 @@ class Messenger
} }
public: public:
static void create(int& argc, char* argv[]); static void create(int& argc, char* argv[], bool parallel_output = false);
static void destroy(); static void destroy();
PUGS_INLINE PUGS_INLINE
......
...@@ -88,6 +88,7 @@ initialize(int& argc, char* argv[]) ...@@ -88,6 +88,7 @@ initialize(int& argc, char* argv[])
bool enable_fpe = true; bool enable_fpe = true;
bool enable_signals = true; bool enable_signals = true;
int nb_threads = -1; int nb_threads = -1;
bool parallel_output = false;
ParallelChecker::Mode pc_mode = ParallelChecker::Mode::automatic; ParallelChecker::Mode pc_mode = ParallelChecker::Mode::automatic;
std::string pc_filename = ParallelChecker::instance().filename(); std::string pc_filename = ParallelChecker::instance().filename();
...@@ -132,6 +133,8 @@ initialize(int& argc, char* argv[]) ...@@ -132,6 +133,8 @@ initialize(int& argc, char* argv[])
app.add_flag("--reproducible-sums,!--no-reproducible-sums", show_preamble, app.add_flag("--reproducible-sums,!--no-reproducible-sums", show_preamble,
"Special treatment of array sums to ensure reproducibility [default: true]"); "Special treatment of array sums to ensure reproducibility [default: true]");
app.add_flag("--parallel-output", parallel_output, "All MPI processes output to console [default: false]");
std::map<std::string, ParallelChecker::Mode> pc_mode_map{{"auto", ParallelChecker::Mode::automatic}, std::map<std::string, ParallelChecker::Mode> pc_mode_map{{"auto", ParallelChecker::Mode::automatic},
{"write", ParallelChecker::Mode::write}, {"write", ParallelChecker::Mode::write},
{"read", ParallelChecker::Mode::read}}; {"read", ParallelChecker::Mode::read}};
...@@ -169,7 +172,8 @@ initialize(int& argc, char* argv[]) ...@@ -169,7 +172,8 @@ initialize(int& argc, char* argv[])
SignalManager::setPauseForDebug(pause_on_error); SignalManager::setPauseForDebug(pause_on_error);
ReproducibleSumManager::setReproducibleSums(reproducible_sums); ReproducibleSumManager::setReproducibleSums(reproducible_sums);
} }
parallel::Messenger::create(argc, argv);
parallel::Messenger::create(argc, argv, parallel_output);
PETScWrapper::initialize(argc, argv); PETScWrapper::initialize(argc, argv);
SLEPcWrapper::initialize(argc, argv); SLEPcWrapper::initialize(argc, argv);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment