OP  0.1
OP is a optimization solver plugin package
All Classes Namespaces Functions Variables Typedefs Enumerations Friends
Namespaces | Functions
op::mpi Namespace Reference

template MPI namespace More...

Namespaces

 detail
 MPI related type traits.
 

Functions

int getRank (MPI_Comm comm=MPI_COMM_WORLD)
 Get rank.
 
int getNRanks (MPI_Comm comm=MPI_COMM_WORLD)
 Get number of ranks.
 
template<typename T >
std::enable_if_t
<!(detail::has_data< T >
::value &&detail::has_size< T >
::value), int > 
Allreduce (T &local, T &global, MPI_Op operation, MPI_Comm comm=MPI_COMM_WORLD)
 All reduce a single element across all ranks in a communicator. More...
 
template<typename T >
std::enable_if_t
<(detail::has_data< T >::value
&&detail::has_size< T >::value),
int > 
Allreduce (T &local, T &global, MPI_Op operation, MPI_Comm comm=MPI_COMM_WORLD)
 All reduce std::collections across all ranks in a communicator. More...
 
template<typename T >
std::enable_if_t
<!(detail::has_data< T >
::value &&detail::has_size< T >
::value), int > 
Broadcast (T &buf, int root=0, MPI_Comm comm=MPI_COMM_WORLD)
 Broadcast a single element to all ranks on the communicator. More...
 
template<typename T >
std::enable_if_t
<(detail::has_data< T >::value
&&detail::has_size< T >::value),
int > 
Broadcast (T &buf, int root=0, MPI_Comm comm=MPI_COMM_WORLD)
 Broadcast a vector to all ranks on the communicator. More...
 
template<typename T >
int Allgatherv (T &buf, T &values_on_rank, std::vector< int > &size_on_rank, std::vector< int > &offsets_on_rank, MPI_Comm comm=MPI_COMM_WORLD)
 gathers a local collections from all ranks on all ranks on a communicator More...
 
template<typename T >
int Gatherv (T &buf, T &values_on_rank, std::vector< int > &size_on_rank, std::vector< int > &offsets_on_rank, int root=0, MPI_Comm comm=MPI_COMM_WORLD)
 gathers a local collections from all ranks only on the root rank More...
 
template<typename T >
int Scatterv (T &sendbuf, std::vector< int > &variables_per_rank, std::vector< int > &offsets, T &recvbuff, int root=0, MPI_Comm comm=MPI_COMM_WORLD)
 MPI_Scatterv on std::collections. Send only portions of buff to ranks. More...
 
template<typename T >
int Irecv (T &buf, int send_rank, MPI_Request *request, int tag=0, MPI_Comm comm=MPI_COMM_WORLD)
 Recieve a buffer from a specified rank and create a handle for the MPI_Request. More...
 
template<typename T >
int Isend (T &buf, int recv_rank, MPI_Request *request, int tag=0, MPI_Comm comm=MPI_COMM_WORLD)
 Send a buffer to a specified rank and create a handle for the MPI_Request. More...
 
int Waitall (std::vector< MPI_Request > &requests, std::vector< MPI_Status > &status)
 A wrapper to MPI_Waitall to wait for all the requests to be fulfilled. More...
 
int CreateAndSetErrorHandler (MPI_Errhandler &newerr, void(*err)(MPI_Comm *comm, int *err,...), MPI_Comm comm=MPI_COMM_WORLD)
 

Detailed Description

template MPI namespace

Function Documentation

template<typename T >
int op::mpi::Allgatherv ( T &  buf,
T &  values_on_rank,
std::vector< int > &  size_on_rank,
std::vector< int > &  offsets_on_rank,
MPI_Comm  comm = MPI_COMM_WORLD 
)

gathers a local collections from all ranks on all ranks on a communicator

Parameters
[in]bufrank-local std::collection to gather
[out]values_on_rankthe globally-colelcted std::collection
[in]size_on_rankNumber of variables per rank
[in]offsets_on_rankOffsets in values_on_rank corresponding to a given rank
[in]commMPI Communicator

Definition at line 137 of file op_mpi.hpp.

template<typename T >
std::enable_if_t<!(detail::has_data<T>::value && detail::has_size<T>::value), int> op::mpi::Allreduce ( T &  local,
T &  global,
MPI_Op  operation,
MPI_Comm  comm = MPI_COMM_WORLD 
)

All reduce a single element across all ranks in a communicator.

Parameters
[in]localelement contribution to reduce
[out]globalelement to reduce to
[in]operationMPI_Op
[in]commMPI communicator

Definition at line 75 of file op_mpi.hpp.

template<typename T >
std::enable_if_t<(detail::has_data<T>::value && detail::has_size<T>::value), int> op::mpi::Allreduce ( T &  local,
T &  global,
MPI_Op  operation,
MPI_Comm  comm = MPI_COMM_WORLD 
)

All reduce std::collections across all ranks in a communicator.

Parameters
[in]localstd::collection contribution to reduce
[out]globalstd::collection to reduce to
[in]operationMPI_Op
[in]commMPI communicator

Definition at line 91 of file op_mpi.hpp.

template<typename T >
std::enable_if_t<!(detail::has_data<T>::value && detail::has_size<T>::value), int> op::mpi::Broadcast ( T &  buf,
int  root = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

Broadcast a single element to all ranks on the communicator.

Parameters
[in]bufstd::collection to broadcast
[in]rootRoot rank
[in]commMPI communicator

Definition at line 106 of file op_mpi.hpp.

template<typename T >
std::enable_if_t<(detail::has_data<T>::value && detail::has_size<T>::value), int> op::mpi::Broadcast ( T &  buf,
int  root = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

Broadcast a vector to all ranks on the communicator.

Parameters
[in]bufstd::collection to broadcast
[in]rootRoot rank
[in]commMPI communicator

Definition at line 120 of file op_mpi.hpp.

template<typename T >
int op::mpi::Gatherv ( T &  buf,
T &  values_on_rank,
std::vector< int > &  size_on_rank,
std::vector< int > &  offsets_on_rank,
int  root = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

gathers a local collections from all ranks only on the root rank

Parameters
[in]bufrank-local std::collection to gather
[out]values_on_rankthe globally-colelcted std::collection
[in]size_on_rankNumber of variables per rank
[in]offsets_on_rankOffsets in values_on_rank corresponding to a given rank
[in]rootroot rank
[in]commMPI Communicator

Definition at line 157 of file op_mpi.hpp.

template<typename T >
int op::mpi::Irecv ( T &  buf,
int  send_rank,
MPI_Request *  request,
int  tag = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

Recieve a buffer from a specified rank and create a handle for the MPI_Request.

Parameters
[out]bufstd::collection to recieve into
[in]send_rankThe rank sending the information
[out]requestthe MPI request handle
[in]tagA tag to identify the communication message
[in]commMPI communicator

Definition at line 197 of file op_mpi.hpp.

template<typename T >
int op::mpi::Isend ( T &  buf,
int  recv_rank,
MPI_Request *  request,
int  tag = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

Send a buffer to a specified rank and create a handle for the MPI_Request.

Parameters
[in]bufstd::collection to send
[in]recv_rankThe rank recieving the data
[out]requestthe MPI request handle
[in]tagA tag to identify the communication message
[in]commMPI communicator

Definition at line 214 of file op_mpi.hpp.

template<typename T >
int op::mpi::Scatterv ( T &  sendbuf,
std::vector< int > &  variables_per_rank,
std::vector< int > &  offsets,
T &  recvbuff,
int  root = 0,
MPI_Comm  comm = MPI_COMM_WORLD 
)

MPI_Scatterv on std::collections. Send only portions of buff to ranks.

Parameters
[in]sendbuffthe buffer to send
[in]variables_per_rankthe numbers of variables each rank, i, will recieve
[in]offsets,theexclusive scan of varaibles_per_rank
[in]recvbuffthe recieve buffer with the proper size

Definition at line 175 of file op_mpi.hpp.

int op::mpi::Waitall ( std::vector< MPI_Request > &  requests,
std::vector< MPI_Status > &  status 
)

A wrapper to MPI_Waitall to wait for all the requests to be fulfilled.

Parameters
[in]requestsA vector of MPI_Request handles
[in]statusA vector MPI_Status for each of the handles

Definition at line 227 of file op_mpi.hpp.