oomph::OomphCommunicator Class Reference

#include <communicator.h>

Public Member Functions

 OomphCommunicator ()
 Serial constructor. More...
 
 OomphCommunicator (const OomphCommunicator &communicator)
 Copy constructor. More...
 
 OomphCommunicator (const OomphCommunicator *communicator_pt)
 Pointer (copy) constructor. More...
 
 ~OomphCommunicator ()
 
void operator= (const OomphCommunicator &communicator)
 assignment operator More...
 
int nproc () const
 number of processors More...
 
int my_rank () const
 my rank More...
 
bool operator== (const OomphCommunicator &other_comm) const
 
bool operator!= (const OomphCommunicator &other_comm) const
 

Detailed Description

An oomph-lib wrapper to the MPI_Comm communicator object. Just contains an MPI_Comm object (which is a pointer) and wrappers to the MPI_... methods.

Constructor & Destructor Documentation

◆ OomphCommunicator() [1/3]

oomph::OomphCommunicator::OomphCommunicator ( )
inline

Serial constructor.

76  : Owns_mpi_comm(false), Serial_communicator(true)
77 #endif
78  {
79  }

◆ OomphCommunicator() [2/3]

oomph::OomphCommunicator::OomphCommunicator ( const OomphCommunicator communicator)
inline

Copy constructor.

84  : Owns_mpi_comm(false)
85  {
86  if (communicator.serial_communicator())
87  {
88  Serial_communicator = true;
89  }
90  else
91  {
92  Comm = communicator.mpi_comm();
93  Serial_communicator = false;
94  }
95  }
96 #else
97  {
98  }

◆ OomphCommunicator() [3/3]

oomph::OomphCommunicator::OomphCommunicator ( const OomphCommunicator communicator_pt)
inline

Pointer (copy) constructor.

104  : Owns_mpi_comm(false)
105  {
106  if (communicator_pt->serial_communicator())
107  {
108  Serial_communicator = true;
109  }
110  else
111  {
112  Comm = communicator_pt->mpi_comm();
113  Serial_communicator = false;
114  }
115  }
116 #else
117  {
118  }

◆ ~OomphCommunicator()

oomph::OomphCommunicator::~OomphCommunicator ( )
inline

Destructor. If MPI and this preconditioner owns the MPI_Comm object then MPI_Comm_free is called, otherwise nothing happens as the destruction of the underlying MPI_Comm object is the responsibility of another communicator.

126  {
127 #ifdef OOMPH_HAS_MPI
128  if (Owns_mpi_comm)
129  {
130  MPI_Comm_free(&Comm);
131  }
132 #endif
133  }

Member Function Documentation

◆ my_rank()

int oomph::OomphCommunicator::my_rank ( ) const
inline

my rank

177  {
178 #ifdef OOMPH_HAS_MPI
179  if (Serial_communicator)
180  {
181  return 0;
182  }
183  else
184  {
185  int My_rank = 0;
186  MPI_Comm_rank(Comm, &My_rank);
187  return My_rank;
188  }
189 #else
190  return 0;
191 #endif
192  }

Referenced by oomph::NavierStokesSchurComplementPreconditioner::assemble_inv_press_and_veloc_mass_matrix_diagonal(), oomph::PressureBasedSolidLSCPreconditioner::assemble_mass_matrix_diagonal(), oomph::Multi_domain_functions::aux_setup_multi_domain_interaction(), oomph::LinearAlgebraDistribution::build(), oomph::CRDoubleMatrixHelpers::concatenate(), oomph::DoubleVectorHelpers::concatenate(), oomph::LinearAlgebraDistributionHelpers::concatenate(), oomph::Z2ErrorEstimator::doc_flux(), oomph::DoubleVectorHaloScheme::DoubleVectorHaloScheme(), oomph::Problem::dump(), oomph::MumpsSolver::factorise(), oomph::LinearAlgebraDistribution::first_row(), oomph::Z2ErrorEstimator::get_element_errors(), oomph::LineVisualiser::get_output_data(), oomph::Node::hanging_pt(), oomph::BlockPreconditioner< MATRIX >::internal_dof_number(), main(), oomph::LinearAlgebraDistribution::nrow_local(), parallel_test(), oomph::PitchForkHandler::PitchForkHandler(), oomph::Problem::read(), oomph::DoubleMultiVector::redistribute(), oomph::DoubleVector::redistribute(), oomph::CRDoubleMatrix::redistribute(), oomph::BlockPitchForkLinearSolver::resolve(), oomph::MemoryUsage::run_continous_top(), oomph::LineVisualiser::setup(), oomph::BlockPitchForkLinearSolver::solve(), oomph::MumpsSolver::solve(), oomph::PitchForkHandler::solve_full_system(), oomph::Problem::sparse_assemble_row_or_column_compressed_with_lists(), oomph::Problem::sparse_assemble_row_or_column_compressed_with_maps(), oomph::Problem::sparse_assemble_row_or_column_compressed_with_two_arrays(), oomph::Problem::sparse_assemble_row_or_column_compressed_with_two_vectors(), oomph::Problem::sparse_assemble_row_or_column_compressed_with_vectors_of_pairs(), oomph::DoubleVectorHelpers::split(), oomph::MemoryUsage::stop_continous_top(), and oomph::LineVisualiser::update_plot_points_coordinates().

◆ nproc()

int oomph::OomphCommunicator::nproc ( ) const
inline

number of processors

158  {
159 #ifdef OOMPH_HAS_MPI
160  if (Serial_communicator)
161  {
162  return 1;
163  }
164  else
165  {
166  int n_proc = 1;
167  MPI_Comm_size(Comm, &n_proc);
168  return n_proc;
169  }
170 #else
171  return 1;
172 #endif
173  }

Referenced by oomph::NavierStokesSchurComplementPreconditioner::assemble_inv_press_and_veloc_mass_matrix_diagonal(), oomph::PressureBasedSolidLSCPreconditioner::assemble_mass_matrix_diagonal(), oomph::Problem::assign_eqn_numbers(), oomph::Multi_domain_functions::aux_setup_multi_domain_interaction(), oomph::LinearAlgebraDistribution::build(), oomph::Problem::calculate_continuation_derivatives_fd_helper(), oomph::Problem::calculate_continuation_derivatives_helper(), oomph::CRDoubleMatrixHelpers::concatenate(), oomph::DoubleVectorHelpers::concatenate(), oomph::LinearAlgebraDistributionHelpers::concatenate(), oomph::DoubleVectorHelpers::concatenate_without_communication(), oomph::CRDoubleMatrixHelpers::concatenate_without_communication(), oomph::HypreHelpers::create_HYPRE_Matrix(), oomph::DoubleVectorHaloScheme::DoubleVectorHaloScheme(), oomph::SuperLUSolver::factorise(), oomph::MumpsSolver::factorise(), oomph::LinearAlgebraDistribution::first_row(), oomph::CRDoubleMatrixHelpers::gershgorin_eigenvalue_estimate(), oomph::Multi_domain_functions::get_dim_helper(), oomph::Problem::get_eigenproblem_matrices(), oomph::Z2ErrorEstimator::get_element_errors(), oomph::Problem::get_jacobian(), oomph::LineVisualiser::get_output_data(), oomph::Problem::get_residuals(), oomph::CRDoubleMatrix::global_matrix(), oomph::HypreInterface::hypre_matrix_setup(), oomph::CRDoubleMatrixHelpers::inf_norm(), oomph::BlockPreconditioner< MATRIX >::internal_get_block(), oomph::MGSolver< DIM >::interpolation_matrix_set(), oomph::HelmholtzMGPreconditioner< DIM >::interpolation_matrix_set(), main(), oomph::Problem::newton_solve_continuation(), oomph::LinearAlgebraDistribution::nrow_local(), oomph::LinearAlgebraDistribution::operator==(), oomph::DoubleMultiVector::output(), oomph::DoubleVector::output(), parallel_test(), oomph::PitchForkHandler::PitchForkHandler(), oomph::DoubleMultiVector::redistribute(), oomph::DoubleVector::redistribute(), oomph::CRDoubleMatrix::redistribute(), oomph::MGSolver< DIM >::self_test(), oomph::LineVisualiser::setup(), oomph::MGSolver< DIM >::setup_mg_structures(), oomph::HelmholtzMGPreconditioner< DIM >::setup_mg_structures(), oomph::MGSolver< DIM >::setup_smoothers(), oomph::HelmholtzMGPreconditioner< DIM >::setup_smoothers(), oomph::DenseLU::solve(), oomph::HSL_MA42::solve(), oomph::SuperLUSolver::solve(), oomph::MumpsSolver::solve(), oomph::SuperLUSolver::solve_transpose(), oomph::DoubleVectorHelpers::split(), oomph::DoubleVectorHelpers::split_without_communication(), and oomph::LineVisualiser::update_plot_points_coordinates().

◆ operator!=()

bool oomph::OomphCommunicator::operator!= ( const OomphCommunicator other_comm) const
inline

!= operator returns !(==operator) (see ==operator for more details)

225  {
226  return !(*this == other_comm);
227  }

◆ operator=()

void oomph::OomphCommunicator::operator= ( const OomphCommunicator communicator)
inline

assignment operator

137  {
138 #ifdef OOMPH_HAS_MPI
139  if (Owns_mpi_comm)
140  {
141  MPI_Comm_free(&Comm);
142  }
143  Owns_mpi_comm = false;
144  if (communicator.serial_communicator())
145  {
146  Serial_communicator = true;
147  }
148  else
149  {
150  Serial_communicator = false;
151  Comm = communicator.mpi_comm();
152  }
153 #endif
154  }

◆ operator==()

bool oomph::OomphCommunicator::operator== ( const OomphCommunicator other_comm) const
inline

== operator - only returns true if communicators are MPI_IDENT, i.e. if both group and context are the same

197  {
198 #ifdef OOMPH_HAS_MPI
199  if (Serial_communicator != other_comm.serial_communicator())
200  {
201  return false;
202  }
203  else if (Serial_communicator)
204  {
205  return true;
206  }
207  else
208  {
209  int flag;
210  MPI_Comm_compare(Comm, other_comm.mpi_comm(), &flag);
211  if (flag == MPI_IDENT)
212  {
213  return true;
214  }
215  return false;
216  }
217 #else
218  return true;
219 #endif
220  }

The documentation for this class was generated from the following file: