walberla::mpi::MPIManager Class Reference

Detailed Description

Encapsulates MPI Rank/Communicator information.

Every process has two ranks/communicators: World: This communicator/rank is valid after calling activateMPI, usually at the beginning of the program. This communicator never changes.

Custom: Can be adapted to the block structure. During the block structure setup, either the Cartesian setup has to be chosen using createCartesianComm() or the world communicator has to be used: useWorldComm()

#include <MPIManager.h>

+ Inheritance diagram for walberla::mpi::MPIManager:

Public Member Functions

 ~MPIManager ()
 
void initializeMPI (int *argc, char ***argv, bool abortOnException=true)
 Configures the class, initializes numProcesses, worldRank the rank and comm variables are still invalid, until custom communicator is set up. More...
 
void finalizeMPI ()
 
void resetMPI ()
 
void abort ()
 
Cartesian Communicator
void createCartesianComm (int numberOfProcessors[3], int periodicity[3])
 
void createCartesianComm (const uint_t xProcesses, const uint_t yProcesses, const uint_t zProcesses, const bool xPeriodic=false, const bool yPeriodic=false, const bool zPeriodic=false)
 
void cartesianCoord (int coordOut[3]) const
 Cartesian coordinates of own rank. More...
 
void cartesianCoord (int rank, int coordOut[3]) const
 Cartesian coordinates of given rank. More...
 
int cartesianRank (int coords[3]) const
 translates Cartesian coordinates to rank More...
 
int cartesianRank (const uint_t x, const uint_t y, const uint_t z) const
 translates Cartesian coordinates to rank More...
 
World Communicator
void useWorldComm ()
 

Public Attributes

 WALBERLA_BEFRIEND_SINGLETON
 

Getter Function

int worldRank_ {0}
 Rank in MPI_COMM_WORLD. More...
 
int rank_ {-1}
 Rank in the custom communicator. More...
 
int numProcesses_ {1}
 Total number of processes. More...
 
MPI_Comm comm_
 Use this communicator for all MPI calls this is in general not equal to MPI_COMM_WORLD this may change during domain setup, where a custom communicator adapted to the domain is created. More...
 
bool isMPIInitialized_ {false}
 Indicates whether initializeMPI has been called. If true, MPI_Finalize is called upon destruction. More...
 
bool cartesianSetup_ {false}
 Indicates whether a Cartesian communicator has been created. More...
 
bool currentlyAborting_ {false}
 
bool finalizeOnDestruction_ {false}
 
int worldRank () const
 
int numProcesses () const
 
int rank () const
 
MPI_Comm comm () const
 
uint_t bitsNeededToRepresentRank () const
 
bool isMPIInitialized () const
 
bool hasCartesianSetup () const
 
bool rankValid () const
 Rank is valid after calling createCartesianComm() or useWorldComm() More...
 
bool hasWorldCommSetup () const
 
bool isCommMPIIOValid () const
 Indicates whether MPI-IO can be used with the current MPI communicator; certain versions of OpenMPI produce segmentation faults when using MPI-IO with a 3D Cartesian MPI communicator (see waLBerla issue #73) More...
 
static std::string getMPIErrorString (int errorCode)
 
static std::string getMPICommName (MPI_Comm comm)
 
 MPIManager ()
 

Constructor & Destructor Documentation

◆ ~MPIManager()

walberla::mpi::MPIManager::~MPIManager ( )

◆ MPIManager()

walberla::mpi::MPIManager::MPIManager ( )
inlineprivate

Member Function Documentation

◆ abort()

void walberla::mpi::MPIManager::abort ( )

◆ bitsNeededToRepresentRank()

uint_t walberla::mpi::MPIManager::bitsNeededToRepresentRank ( ) const
inline

◆ cartesianCoord() [1/2]

void walberla::mpi::MPIManager::cartesianCoord ( int  coordOut[3]) const

Cartesian coordinates of own rank.

◆ cartesianCoord() [2/2]

void walberla::mpi::MPIManager::cartesianCoord ( int  rank,
int  coordOut[3] 
) const

Cartesian coordinates of given rank.

◆ cartesianRank() [1/2]

int walberla::mpi::MPIManager::cartesianRank ( const uint_t  x,
const uint_t  y,
const uint_t  z 
) const

translates Cartesian coordinates to rank

◆ cartesianRank() [2/2]

int walberla::mpi::MPIManager::cartesianRank ( int  coords[3]) const

translates Cartesian coordinates to rank

◆ comm()

MPI_Comm walberla::mpi::MPIManager::comm ( ) const
inline

◆ createCartesianComm() [1/2]

void walberla::mpi::MPIManager::createCartesianComm ( const uint_t  xProcesses,
const uint_t  yProcesses,
const uint_t  zProcesses,
const bool  xPeriodic = false,
const bool  yPeriodic = false,
const bool  zPeriodic = false 
)

◆ createCartesianComm() [2/2]

void walberla::mpi::MPIManager::createCartesianComm ( int  numberOfProcessors[3],
int  periodicity[3] 
)

◆ finalizeMPI()

void walberla::mpi::MPIManager::finalizeMPI ( )

◆ getMPICommName()

std::string walberla::mpi::MPIManager::getMPICommName ( MPI_Comm  comm)
static

◆ getMPIErrorString()

std::string walberla::mpi::MPIManager::getMPIErrorString ( int  errorCode)
static

◆ hasCartesianSetup()

bool walberla::mpi::MPIManager::hasCartesianSetup ( ) const
inline

◆ hasWorldCommSetup()

bool walberla::mpi::MPIManager::hasWorldCommSetup ( ) const
inline

◆ initializeMPI()

void walberla::mpi::MPIManager::initializeMPI ( int *  argc,
char ***  argv,
bool  abortOnException = true 
)

Configures the class, initializes numProcesses, worldRank the rank and comm variables are still invalid, until custom communicator is set up.

Parameters
abortOnExceptionif true, MPI_Abort is called in case of an uncaught exception

◆ isCommMPIIOValid()

bool walberla::mpi::MPIManager::isCommMPIIOValid ( ) const

Indicates whether MPI-IO can be used with the current MPI communicator; certain versions of OpenMPI produce segmentation faults when using MPI-IO with a 3D Cartesian MPI communicator (see waLBerla issue #73)

◆ isMPIInitialized()

bool walberla::mpi::MPIManager::isMPIInitialized ( ) const
inline

◆ numProcesses()

int walberla::mpi::MPIManager::numProcesses ( ) const
inline

◆ rank()

int walberla::mpi::MPIManager::rank ( ) const
inline

◆ rankValid()

bool walberla::mpi::MPIManager::rankValid ( ) const
inline

Rank is valid after calling createCartesianComm() or useWorldComm()

◆ resetMPI()

void walberla::mpi::MPIManager::resetMPI ( )

◆ useWorldComm()

void walberla::mpi::MPIManager::useWorldComm ( )
inline

◆ worldRank()

int walberla::mpi::MPIManager::worldRank ( ) const
inline

Member Data Documentation

◆ cartesianSetup_

bool walberla::mpi::MPIManager::cartesianSetup_ {false}
private

Indicates whether a Cartesian communicator has been created.

◆ comm_

MPI_Comm walberla::mpi::MPIManager::comm_
private

Use this communicator for all MPI calls this is in general not equal to MPI_COMM_WORLD this may change during domain setup, where a custom communicator adapted to the domain is created.

◆ currentlyAborting_

bool walberla::mpi::MPIManager::currentlyAborting_ {false}
private

◆ finalizeOnDestruction_

bool walberla::mpi::MPIManager::finalizeOnDestruction_ {false}
private

◆ isMPIInitialized_

bool walberla::mpi::MPIManager::isMPIInitialized_ {false}
private

Indicates whether initializeMPI has been called. If true, MPI_Finalize is called upon destruction.

◆ numProcesses_

int walberla::mpi::MPIManager::numProcesses_ {1}
private

Total number of processes.

◆ rank_

int walberla::mpi::MPIManager::rank_ {-1}
private

Rank in the custom communicator.

◆ WALBERLA_BEFRIEND_SINGLETON

walberla::mpi::MPIManager::WALBERLA_BEFRIEND_SINGLETON

◆ worldRank_

int walberla::mpi::MPIManager::worldRank_ {0}
private

Rank in MPI_COMM_WORLD.


The documentation for this class was generated from the following files: