diff --git a/src/utils/CMakeLists.txt b/src/utils/CMakeLists.txt
index 2289a91d5f74c566a18af0f9fbcc8bf85ed844bb..1cce13c8daf68e2c61ceeeecb44dab4e3f2bf4e7 100644
--- a/src/utils/CMakeLists.txt
+++ b/src/utils/CMakeLists.txt
@@ -4,6 +4,7 @@ add_library(
   PugsUtils
   BuildInfo.cpp
   BacktraceManager.cpp
+  CommunicatorManager.cpp
   ConsoleManager.cpp
   Demangle.cpp
   Exceptions.cpp
diff --git a/src/utils/CommunicatorManager.cpp b/src/utils/CommunicatorManager.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..511257c4bb10f941ca592d2500e55b32ef45ec7d
--- /dev/null
+++ b/src/utils/CommunicatorManager.cpp
@@ -0,0 +1,27 @@
+#include <utils/CommunicatorManager.hpp>
+
+#include <utils/PugsAssert.hpp>
+
+std::optional<int> CommunicatorManager::s_split_color;
+
+bool
+CommunicatorManager::hasSplitColor()
+{
+  return s_split_color.has_value();
+}
+
+int
+CommunicatorManager::splitColor()
+{
+  Assert(s_split_color.has_value(), "split color has not been defined");
+
+  return s_split_color.value();
+}
+
+void
+CommunicatorManager::setSplitColor(int split_color)
+{
+  Assert(not s_split_color.has_value(), "split color has already been defined");
+
+  s_split_color = split_color;
+}
diff --git a/src/utils/CommunicatorManager.hpp b/src/utils/CommunicatorManager.hpp
new file mode 100644
index 0000000000000000000000000000000000000000..df0ea8dba61b7ab51f9dd5819a9a20c9eca684ec
--- /dev/null
+++ b/src/utils/CommunicatorManager.hpp
@@ -0,0 +1,17 @@
+#ifndef COMMUNICATOR_MANAGER_HPP
+#define COMMUNICATOR_MANAGER_HPP
+
+#include <optional>
+
+class CommunicatorManager
+{
+ private:
+  static std::optional<int> s_split_color;
+
+ public:
+  static bool hasSplitColor();
+  static int splitColor();
+  static void setSplitColor(int split_color);
+};
+
+#endif   // COMMUNICATOR_MANAGER_HPP
diff --git a/src/utils/Messenger.cpp b/src/utils/Messenger.cpp
index 66f178d3eaead9e9bdabf8aca081045bb39a0d30..05dfa43e5a22f5223fd6c1e8291302c5366f5015 100644
--- a/src/utils/Messenger.cpp
+++ b/src/utils/Messenger.cpp
@@ -1,5 +1,7 @@
 #include <utils/Messenger.hpp>
 
+#include <utils/CommunicatorManager.hpp>
+
 #include <iostream>
 
 namespace parallel
@@ -31,53 +33,38 @@ Messenger::Messenger([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[])
 #ifdef PUGS_HAS_MPI
   MPI_Init(&argc, &argv);
 
-  const char* coupled_color = std::getenv("PUGS_COUPLED_COLOR");
-  if (coupled_color == NULL) {
-    m_comm_world_pugs = MPI_COMM_WORLD;
-  } else {
-    int color = atoi(coupled_color);
-    int global_rank;
-    int global_size;
+  if (CommunicatorManager::hasSplitColor()) {
     int key = 0;
 
-    MPI_Comm_rank(MPI_COMM_WORLD, &global_rank);
-    MPI_Comm_size(MPI_COMM_WORLD, &global_size);
-
-    auto res = MPI_Comm_split(MPI_COMM_WORLD, color, key, &m_comm_world_pugs);
+    auto res = MPI_Comm_split(MPI_COMM_WORLD, CommunicatorManager::splitColor(), key, &m_pugs_comm_world);
     if (res) {
       MPI_Abort(MPI_COMM_WORLD, res);
     }
-
-    int local_rank;
-    int local_size;
-
-    MPI_Comm_rank(m_comm_world_pugs, &local_rank);
-    MPI_Comm_size(m_comm_world_pugs, &local_size);
-
-    std::cout << "----------------- " << rang::fg::green << "pugs coupled info " << rang::fg::reset
-              << " ----------------------" << '\n';
-    std::cout << "Coupling mode activated";
-    std::cout << "\n\t Global size: " << global_size;
-    std::cout << "\n\t Global rank: " << global_rank + 1;
-    std::cout << "\n\t Coupled color: " << color;
-    std::cout << "\n\t local size: " << local_size;
-    std::cout << "\n\t local rank: " << local_rank + 1 << std::endl;
-    std::cout << "---------------------------------------" << '\n';
   }
 
   m_rank = [&]() {
     int rank;
-    MPI_Comm_rank(m_comm_world_pugs, &rank);
+    MPI_Comm_rank(m_pugs_comm_world, &rank);
     return rank;
   }();
 
   m_size = [&]() {
     int size = 0;
-    MPI_Comm_size(m_comm_world_pugs, &size);
+    MPI_Comm_size(m_pugs_comm_world, &size);
     return size;
   }();
 
-  std::cout << "pugs process " << m_rank + 1 << "/" << m_size << std::endl;
+  m_global_rank = [&]() {
+    int rank;
+    MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+    return rank;
+  }();
+
+  m_global_size = [&]() {
+    int size = 0;
+    MPI_Comm_size(MPI_COMM_WORLD, &size);
+    return size;
+  }();
 
   if (m_rank != 0) {
     // LCOV_EXCL_START
@@ -100,7 +87,7 @@ void
 Messenger::barrier() const
 {
 #ifdef PUGS_HAS_MPI
-  MPI_Barrier(m_comm_world_pugs);
+  MPI_Barrier(m_pugs_comm_world);
 #endif   // PUGS_HAS_MPI
 }
 
diff --git a/src/utils/Messenger.hpp b/src/utils/Messenger.hpp
index 692ffed84d1645a5cb55a09496f9d7da5a33f2d6..2fb6af56c0b610b7312cec7b3b69d98d14a6545e 100644
--- a/src/utils/Messenger.hpp
+++ b/src/utils/Messenger.hpp
@@ -87,9 +87,15 @@ class Messenger
   static Messenger* m_instance;
   Messenger(int& argc, char* argv[]);
 
+  MPI_Comm m_pugs_comm_world = MPI_COMM_WORLD;
+
   size_t m_rank{0};
   size_t m_size{1};
 
+  // Rank and size in the whole MPI_COMM_WORLD of the process
+  size_t m_global_rank{0};
+  size_t m_global_size{1};
+
   template <typename DataType>
   void
   _gather(const DataType& data, Array<DataType> gather, size_t rank) const
@@ -102,7 +108,7 @@ class Messenger
 
     auto gather_address = (gather.size() > 0) ? &(gather[0]) : nullptr;
 
-    MPI_Gather(&data, 1, mpi_datatype, gather_address, 1, mpi_datatype, rank, m_comm_world_pugs);
+    MPI_Gather(&data, 1, mpi_datatype, gather_address, 1, mpi_datatype, rank, m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     gather[0] = data;
 #endif   // PUGS_HAS_MPI
@@ -131,7 +137,7 @@ class Messenger
     auto gather_address = (gather_array.size() > 0) ? &(gather_array[0]) : nullptr;
 
     MPI_Gather(data_address, data_array.size(), mpi_datatype, gather_address, data_array.size(), mpi_datatype, rank,
-               m_comm_world_pugs);
+               m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     copy_to(data_array, gather_array);
 #endif   // PUGS_HAS_MPI
@@ -172,7 +178,7 @@ class Messenger
     auto gather_address    = (gather_array.size() > 0) ? &(gather_array[0]) : nullptr;
 
     MPI_Gatherv(data_address, data_array.size(), mpi_datatype, gather_address, sizes_address, positions_address,
-                mpi_datatype, rank, m_comm_world_pugs);
+                mpi_datatype, rank, m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     copy_to(data_array, gather_array);
 #endif   // PUGS_HAS_MPI
@@ -188,7 +194,7 @@ class Messenger
 #ifdef PUGS_HAS_MPI
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
-    MPI_Allgather(&data, 1, mpi_datatype, &(gather[0]), 1, mpi_datatype, m_comm_world_pugs);
+    MPI_Allgather(&data, 1, mpi_datatype, &(gather[0]), 1, mpi_datatype, m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     gather[0] = data;
 #endif   // PUGS_HAS_MPI
@@ -217,7 +223,7 @@ class Messenger
     auto gather_address = (gather_array.size() > 0) ? &(gather_array[0]) : nullptr;
 
     MPI_Allgather(data_address, data_array.size(), mpi_datatype, gather_address, data_array.size(), mpi_datatype,
-                  m_comm_world_pugs);
+                  m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     copy_to(data_array, gather_array);
 #endif   // PUGS_HAS_MPI
@@ -257,7 +263,7 @@ class Messenger
     auto gather_address    = (gather_array.size() > 0) ? &(gather_array[0]) : nullptr;
 
     MPI_Allgatherv(data_address, data_array.size(), mpi_datatype, gather_address, sizes_address, positions_address,
-                   mpi_datatype, m_comm_world_pugs);
+                   mpi_datatype, m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     copy_to(data_array, gather_array);
 #endif   // PUGS_HAS_MPI
@@ -273,7 +279,7 @@ class Messenger
 #ifdef PUGS_HAS_MPI
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
-    MPI_Bcast(&data, 1, mpi_datatype, root_rank, m_comm_world_pugs);
+    MPI_Bcast(&data, 1, mpi_datatype, root_rank, m_pugs_comm_world);
 #endif   // PUGS_HAS_MPI
   }
 
@@ -289,7 +295,7 @@ class Messenger
     auto array_address = (array.size() > 0) ? &(array[0]) : nullptr;
 
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
-    MPI_Bcast(array_address, array.size(), mpi_datatype, root_rank, m_comm_world_pugs);
+    MPI_Bcast(array_address, array.size(), mpi_datatype, root_rank, m_pugs_comm_world);
 #endif   // PUGS_HAS_MPI
   }
 
@@ -318,7 +324,7 @@ class Messenger
     auto sent_address = (sent_array.size() > 0) ? &(sent_array[0]) : nullptr;
     auto recv_address = (recv_array.size() > 0) ? &(recv_array[0]) : nullptr;
 
-    MPI_Alltoall(sent_address, count, mpi_datatype, recv_address, count, mpi_datatype, m_comm_world_pugs);
+    MPI_Alltoall(sent_address, count, mpi_datatype, recv_address, count, mpi_datatype, m_pugs_comm_world);
 #else    // PUGS_HAS_MPI
     copy_to(sent_array, recv_array);
 #endif   // PUGS_HAS_MPI
@@ -348,7 +354,7 @@ class Messenger
       const auto sent_array = sent_array_list[i_send];
       if (sent_array.size() > 0) {
         MPI_Request request;
-        MPI_Isend(&(sent_array[0]), sent_array.size(), mpi_datatype, i_send, 0, m_comm_world_pugs, &request);
+        MPI_Isend(&(sent_array[0]), sent_array.size(), mpi_datatype, i_send, 0, m_pugs_comm_world, &request);
         request_list.push_back(request);
       }
     }
@@ -357,7 +363,7 @@ class Messenger
       auto recv_array = recv_array_list[i_recv];
       if (recv_array.size() > 0) {
         MPI_Request request;
-        MPI_Irecv(&(recv_array[0]), recv_array.size(), mpi_datatype, i_recv, 0, m_comm_world_pugs, &request);
+        MPI_Irecv(&(recv_array[0]), recv_array.size(), mpi_datatype, i_recv, 0, m_pugs_comm_world, &request);
         request_list.push_back(request);
       }
     }
@@ -399,9 +405,6 @@ class Messenger
   }
 
  public:
-#ifdef PUGS_HAS_MPI
-  MPI_Comm m_comm_world_pugs = MPI_COMM_NULL;
-#endif   // PUGS_HAS_MPI
   static void create(int& argc, char* argv[]);
   static void destroy();
 
@@ -420,22 +423,40 @@ class Messenger
     return m_rank;
   }
 
-#ifdef PUGS_HAS_MPI
   PUGS_INLINE
-  const MPI_Comm&
-  comm() const
+  const size_t&
+  size() const
   {
-    return m_comm_world_pugs;
+    return m_size;
   }
-#endif   // PUGS_HAS_MPI
 
+  // The global rank is the rank in the whole MPI_COMM_WORLD, one
+  // generally needs rank() for classical parallelism
   PUGS_INLINE
   const size_t&
-  size() const
+  globalRank() const
   {
-    return m_size;
+    return m_global_rank;
   }
 
+  // The global size is the size in the whole MPI_COMM_WORLD, one
+  // generally needs size() for classical parallelism
+  PUGS_INLINE
+  const size_t&
+  globalSize() const
+  {
+    return m_global_size;
+  }
+
+#ifdef PUGS_HAS_MPI
+  PUGS_INLINE
+  const MPI_Comm&
+  comm() const
+  {
+    return m_pugs_comm_world;
+  }
+#endif   // PUGS_HAS_MPI
+
   void barrier() const;
 
   template <typename DataType>
@@ -450,7 +471,7 @@ class Messenger
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
     DataType min_data = data;
-    MPI_Allreduce(&data, &min_data, 1, mpi_datatype, MPI_MIN, m_comm_world_pugs);
+    MPI_Allreduce(&data, &min_data, 1, mpi_datatype, MPI_MIN, m_pugs_comm_world);
 
     return min_data;
 #else    // PUGS_HAS_MPI
@@ -468,7 +489,7 @@ class Messenger
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
     DataType max_data = data;
-    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_LAND, m_comm_world_pugs);
+    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_LAND, m_pugs_comm_world);
 
     return max_data;
 #else    // PUGS_HAS_MPI
@@ -486,7 +507,7 @@ class Messenger
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
     DataType max_data = data;
-    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_LOR, m_comm_world_pugs);
+    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_LOR, m_pugs_comm_world);
 
     return max_data;
 #else    // PUGS_HAS_MPI
@@ -506,7 +527,7 @@ class Messenger
     MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
     DataType max_data = data;
-    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_MAX, m_comm_world_pugs);
+    MPI_Allreduce(&data, &max_data, 1, mpi_datatype, MPI_MAX, m_pugs_comm_world);
 
     return max_data;
 #else    // PUGS_HAS_MPI
@@ -526,7 +547,7 @@ class Messenger
       MPI_Datatype mpi_datatype = Messenger::helper::mpiType<DataType>();
 
       DataType data_sum = data;
-      MPI_Allreduce(&data, &data_sum, 1, mpi_datatype, MPI_SUM, m_comm_world_pugs);
+      MPI_Allreduce(&data, &data_sum, 1, mpi_datatype, MPI_SUM, m_pugs_comm_world);
 
       return data_sum;
     } else if (is_trivially_castable<DataType>) {
@@ -535,7 +556,7 @@ class Messenger
       MPI_Datatype mpi_datatype = Messenger::helper::mpiType<InnerDataType>();
       const int size            = sizeof(DataType) / sizeof(InnerDataType);
       DataType data_sum         = data;
-      MPI_Allreduce(&data, &data_sum, size, mpi_datatype, MPI_SUM, m_comm_world_pugs);
+      MPI_Allreduce(&data, &data_sum, size, mpi_datatype, MPI_SUM, m_pugs_comm_world);
 
       return data_sum;
     }
diff --git a/src/utils/PETScWrapper.cpp b/src/utils/PETScWrapper.cpp
index 18b16aa1435c2f27dc4d850c0727ddb4e259acb0..fb7fc932a2b5c3013d3831fb22bbbaa997d09bb0 100644
--- a/src/utils/PETScWrapper.cpp
+++ b/src/utils/PETScWrapper.cpp
@@ -1,5 +1,6 @@
 #include <utils/PETScWrapper.hpp>
 
+#include <utils/Messenger.hpp>
 #include <utils/pugs_config.hpp>
 
 #ifdef PUGS_HAS_PETSC
@@ -12,6 +13,7 @@ void
 initialize([[maybe_unused]] int& argc, [[maybe_unused]] char* argv[])
 {
 #ifdef PUGS_HAS_PETSC
+  PETSC_COMM_WORLD = parallel::Messenger::getInstance().comm();
   PetscOptionsSetValue(NULL, "-no_signal_handler", "true");
   PetscOptionsSetValue(NULL, "-fp_trap", "false");
   PetscInitialize(&argc, &argv, 0, 0);
diff --git a/src/utils/PugsUtils.cpp b/src/utils/PugsUtils.cpp
index 6c78e42955dd4bdf84a49ec886e1b60aa20fbd65..2ece76cb2732e28354e7ff9282536d34dbab644b 100644
--- a/src/utils/PugsUtils.cpp
+++ b/src/utils/PugsUtils.cpp
@@ -2,6 +2,7 @@
 
 #include <utils/BacktraceManager.hpp>
 #include <utils/BuildInfo.hpp>
+#include <utils/CommunicatorManager.hpp>
 #include <utils/ConsoleManager.hpp>
 #include <utils/FPEManager.hpp>
 #include <utils/Messenger.hpp>
@@ -11,9 +12,6 @@
 #include <utils/SignalManager.hpp>
 #include <utils/pugs_build_info.hpp>
 
-#ifdef PUGS_HAS_PETSC
-#include <petsc.h>
-#endif   // PUGS_HAS_PETSC
 #include <rang.hpp>
 
 #include <Kokkos_Core.hpp>
@@ -83,7 +81,6 @@ setDefaultOMPEnvironment()
 std::string
 initialize(int& argc, char* argv[])
 {
-  parallel::Messenger::create(argc, argv);
   bool enable_fpe     = true;
   bool enable_signals = true;
 
@@ -120,24 +117,31 @@ initialize(int& argc, char* argv[])
     bool pause_on_error = false;
     app.add_flag("-p,--pause-on-error", pause_on_error, "Pause for debugging on unexpected error [default: false]");
 
+    int mpi_split_color = -1;
+    app.add_option("--mpi-split-color", mpi_split_color, "Sets the MPI split color value (for MPMD applications)")
+      ->check(CLI::Range(0, std::numeric_limits<decltype(mpi_split_color)>::max()));
+
     std::atexit([]() { std::cout << rang::style::reset; });
     try {
       app.parse(argc, argv);
     }
     catch (const CLI::ParseError& e) {
+      // Stupid trick to avoid repetition of error messages in parallel
+      parallel::Messenger::create(argc, argv);
       parallel::Messenger::destroy();
       std::exit(app.exit(e, std::cout, std::cerr));
     }
 
+    if (app.count("--mpi-split-color") > 0) {
+      CommunicatorManager::setSplitColor(mpi_split_color);
+    }
+
     BacktraceManager::setShow(show_backtrace);
     ConsoleManager::setShowPreamble(show_preamble);
     ConsoleManager::init(enable_color);
     SignalManager::setPauseForDebug(pause_on_error);
   }
-
-#ifdef PUGS_HAS_PETSC
-  PETSC_COMM_WORLD = parallel::Messenger::getInstance().comm();
-#endif   // PUGS_HAS_PETSC
+  parallel::Messenger::create(argc, argv);
 
   PETScWrapper::initialize(argc, argv);
   SLEPcWrapper::initialize(argc, argv);
@@ -154,7 +158,14 @@ initialize(int& argc, char* argv[])
 
     std::cout << rang::style::bold;
 #ifdef PUGS_HAS_MPI
-    std::cout << "MPI number of ranks " << parallel::size() << '\n';
+    if (CommunicatorManager::hasSplitColor()) {
+      std::cout << "MPI number of global ranks " << parallel::Messenger::getInstance().globalSize() << '\n';
+      std::cout << "MPI local pugs communication world\n";
+      std::cout << " - number of local ranks  " << parallel::size() << '\n';
+      std::cout << " - local comm world color " << CommunicatorManager::splitColor() << '\n';
+    } else {
+      std::cout << "MPI number of ranks " << parallel::size() << '\n';
+    }
 #else    // PUGS_HAS_MPI
     std::cout << "Sequential build\n";
 #endif   // PUGS_HAS_MPI