diff --git a/apps/benchmarks/UniformGrid/UniformGrid.cpp b/apps/benchmarks/UniformGrid/UniformGrid.cpp
index 62fbb5b343f46dc2551e754a5dc5ced25a1eb334..0324c7190e3cd1692541a433d33992ab4c16f260 100644
--- a/apps/benchmarks/UniformGrid/UniformGrid.cpp
+++ b/apps/benchmarks/UniformGrid/UniformGrid.cpp
@@ -263,7 +263,7 @@ void createSetupBlockForest( blockforest::SetupBlockForest & sforest, const Conf
 
       WALBERLA_MPI_SECTION()
       {
-         if ( ! MPIManager::instance()->isCartesianCommValid() )
+         if ( MPIManager::instance()->isCartesianCommValid() )
          {
             MPIManager::instance()->createCartesianComm(numberOfXProcesses, numberOfYProcesses, numberOfZProcesses, false, false, false);
 
diff --git a/src/blockforest/Initialization.cpp b/src/blockforest/Initialization.cpp
index d05b9521ba3e3a3e20f45afeab979808a7b998f5..c13896c87c7966d3adc9422828d1006f7be7aa6c 100644
--- a/src/blockforest/Initialization.cpp
+++ b/src/blockforest/Initialization.cpp
@@ -229,7 +229,7 @@ createBlockForest(      const AABB& domainAABB,
       //create cartesian communicator only if not yet a cartesian communicator (or other communicator was created)
       if ( ! mpiManager->rankValid() )
       {
-         if ( ! mpiManager->isCartesianCommValid() ) {
+         if ( mpiManager->isCartesianCommValid() ) {
             mpiManager->createCartesianComm( numberOfXProcesses, numberOfYProcesses, numberOfZProcesses, xPeriodic, yPeriodic, zPeriodic );
 
             processIdMap = new std::vector< uint_t >( numberOfProcesses );
diff --git a/src/core/mpi/MPIManager.cpp b/src/core/mpi/MPIManager.cpp
index b3705407b478f3a2139c96eace1cebff81b3f940..72861cbd109a91321099f9e95771c88376a8a28c 100644
--- a/src/core/mpi/MPIManager.cpp
+++ b/src/core/mpi/MPIManager.cpp
@@ -158,7 +158,7 @@ void MPIManager::createCartesianComm( int dims[3], int periodicity[3] )
    WALBERLA_ASSERT_GREATER( dims[1], 0 );
    WALBERLA_ASSERT_GREATER( dims[2], 0 );
 
-   if ( isCartesianCommValid() ) {
+   if ( ! isCartesianCommValid() ) {
       WALBERLA_LOG_WARNING_ON_ROOT( "Your version of OpenMPI contains a bug which might lead to a segmentation fault "
                                     "when generating vtk output. Since the bug only occurs with a 3D Cartesian MPI "
                                     "communicator, try to use MPI_COMM_WORLD instead. See waLBerla issue #73 for "
@@ -242,13 +242,13 @@ bool MPIManager::isCartesianCommValid() const
 
       if (ompi_ver == "2.0.0" || ompi_ver == "2.0.1" || ompi_ver == "2.0.2" || ompi_ver == "2.0.3" ||
           ompi_ver == "2.1.0" || ompi_ver == "2.1.1") {
-         return true;
+         return false;
       }
       else {
-         return false;
+         return true;
       }
    #else
-      return false;
+      return true;
    #endif
 }