EpetraExt Package Browser (Single Doxygen Collection) Development
Loading...
Searching...
No Matches
EpetraExt_MultiMpiComm.cpp
Go to the documentation of this file.
1//@HEADER
2// ***********************************************************************
3//
4// EpetraExt: Epetra Extended - Linear Algebra Services Package
5// Copyright (2011) Sandia Corporation
6//
7// Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
8// the U.S. Government retains certain rights in this software.
9//
10// Redistribution and use in source and binary forms, with or without
11// modification, are permitted provided that the following conditions are
12// met:
13//
14// 1. Redistributions of source code must retain the above copyright
15// notice, this list of conditions and the following disclaimer.
16//
17// 2. Redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution.
20//
21// 3. Neither the name of the Corporation nor the names of the
22// contributors may be used to endorse or promote products derived from
23// this software without specific prior written permission.
24//
25// THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36//
37// Questions? Contact Michael A. Heroux (maherou@sandia.gov)
38//
39// ***********************************************************************
40//@HEADER
41
43#include "Teuchos_Assert.hpp"
44#include "Teuchos_VerbosityLevel.hpp"
45
46namespace EpetraExt {
47
48MultiMpiComm::MultiMpiComm(MPI_Comm globalMpiComm, int subDomainProcs, int numTimeSteps_,
49 const Teuchos::EVerbosityLevel verbLevel) :
50 Epetra_MpiComm(globalMpiComm),
51 Teuchos::VerboseObject<MultiMpiComm>(verbLevel),
52 myComm(Teuchos::rcp(new Epetra_MpiComm(globalMpiComm))),
53 subComm(0)
54{
55 Teuchos::RCP<Teuchos::FancyOStream> out = this->getOStream();
56
57 // The default output stream only outputs to proc 0, which is not what
58 // we generally want. Manually override this if necessary so we get output
59 // to all processors
60 int outputRootRank = out->getOutputToRootOnly();
61 if (outputRootRank >= 0) {
62 out->setOutputToRootOnly(-1);
63 }
64
65 //Need to construct subComm for each sub domain, compute subDomainRank,
66 //and check that all integer arithmatic works out correctly.
67
68 int size, rank;
69 (void) MPI_Comm_size(globalMpiComm, &size);
70 (void) MPI_Comm_rank(globalMpiComm, &rank);
71
72 TEUCHOS_TEST_FOR_EXCEPTION(
73 subDomainProcs <= 0,
74 std::logic_error,
75 "ERROR: num subDomainProcs " << subDomainProcs <<
76 " must be strictly positive." << std::endl);
77
78 TEUCHOS_TEST_FOR_EXCEPTION(
79 size % subDomainProcs != 0,
80 std::logic_error,
81 "ERROR: num subDomainProcs "<< subDomainProcs <<
82 " does not divide into num total procs " << size << std::endl);
83
84 numSubDomains = size / subDomainProcs;
85 numTimeDomains = subDomainProcs;
86
87 // Create split communicators
88 MPI_Comm split_MPI_Comm;
89 MPI_Comm time_split_MPI_Comm;
90 subDomainRank = rank / subDomainProcs;
91 timeDomainRank = rank % subDomainProcs;
92 (void) MPI_Comm_split(globalMpiComm, subDomainRank, rank,
93 &split_MPI_Comm);
94 (void) MPI_Comm_split(globalMpiComm, timeDomainRank, rank,
95 &time_split_MPI_Comm);
96
97 // Construct second epetra communicators
98 subComm = new Epetra_MpiComm(split_MPI_Comm);
99 timeComm = new Epetra_MpiComm(time_split_MPI_Comm);
100
101 // Compute number of time steps on this sub domain
102 ResetNumTimeSteps(numTimeSteps_);
103
104 if (verbLevel != Teuchos::VERB_NONE) {
105 if (numTimeSteps_ > 0)
106 *out << "Processor " << rank << " is on subdomain " << subDomainRank
107 << " and owns " << numTimeStepsOnDomain
108 << " time steps, starting with "
109 << firstTimeStepOnDomain << std::endl;
110 else
111 *out << "Processor " << rank << " is on subdomain " << subDomainRank
112 << std::endl;
113 }
114
115 // Reset output flag if we changed it
116 if (outputRootRank >= 0) {
117 out->setOutputToRootOnly(outputRootRank);
118 }
119}
120
121// This constructor is for just one subdomain, so only adds the info
122// for multiple time steps on the domain. No two-level parallelism.
123MultiMpiComm::MultiMpiComm(const Epetra_MpiComm& EpetraMpiComm_, int numTimeSteps_,
124 const Teuchos::EVerbosityLevel verbLevel) :
125 Epetra_MpiComm(EpetraMpiComm_),
126 Teuchos::VerboseObject<MultiMpiComm>(verbLevel),
127 myComm(Teuchos::rcp(new Epetra_MpiComm(EpetraMpiComm_))),
128 subComm(0)
129{
130
131 numSubDomains = 1;
132 subDomainRank = 0;
133 numTimeSteps = numTimeSteps_;
134 numTimeStepsOnDomain = numTimeSteps_;
136
137 subComm = new Epetra_MpiComm(EpetraMpiComm_);
138
139 // Create split communicators for time domain
140 MPI_Comm time_split_MPI_Comm;
141 int rank = EpetraMpiComm_.MyPID();
142 (void) MPI_Comm_split(EpetraMpiComm_.Comm(), rank, rank,
143 &time_split_MPI_Comm);
144 timeComm = new Epetra_MpiComm(time_split_MPI_Comm);
145 numTimeDomains = EpetraMpiComm_.NumProc();
146 timeDomainRank = rank;
147}
148
149//Copy Constructor
151 Epetra_MpiComm(MMC),
152 myComm(Teuchos::rcp(new Epetra_MpiComm(dynamic_cast<const Epetra_MpiComm&>(MMC)))),
153 subComm(new Epetra_MpiComm(*MMC.subComm)),
154 timeComm(new Epetra_MpiComm(*MMC.timeComm))
155{
160
164}
165
167{
168 delete subComm;
169 delete timeComm;
170}
171
172void MultiMpiComm::ResetNumTimeSteps(int numTimeSteps_)
173{
174 numTimeSteps = numTimeSteps_;
175
176 // Compute number of time steps on this sub domain
177 if (numTimeSteps > 0) {
178 // Compute part for number of domains dividing evenly into number of steps
181
182 // Dole out remainder
183 int remainder = numTimeSteps % numSubDomains;
184 if (subDomainRank < remainder) {
187 }
188 else firstTimeStepOnDomain += remainder;
189 }
190 else {
193 }
194}
195
196} //namespace EpetraExt
MultiMpiComm(MPI_Comm globalComm, int subDomainProcs, int numTimeSteps_=-1, const Teuchos::EVerbosityLevel verbLevel=Teuchos::VERB_DEFAULT)
MultiMpiComm constuctor.
void ResetNumTimeSteps(int numTimeSteps)
Reset total number of time steps, allowing time steps per domain to.
virtual ~MultiMpiComm()
Destructor.
MPI_Comm Comm() const
int NumProc() const
int MyPID() const
EpetraExt::BlockCrsMatrix: A class for constructing a distributed block matrix.