/[escript]/branches/subworld2/escriptcore/src/MPIScalarReducer.cpp
ViewVC logotype

Contents of /branches/subworld2/escriptcore/src/MPIScalarReducer.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 5504 - (show annotations)
Wed Mar 4 22:58:13 2015 UTC (4 years, 1 month ago) by jfenwick
File size: 5830 byte(s)
Again with a more up to date copy


1 /*****************************************************************************
2 *
3 * Copyright (c) 2014-2015 by University of Queensland
4 * http://www.uq.edu.au
5 *
6 * Primary Business: Queensland, Australia
7 * Licensed under the Open Software License version 3.0
8 * http://www.opensource.org/licenses/osl-3.0.php
9 *
10 * Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11 * Development 2012-2013 by School of Earth Sciences
12 * Development from 2014 by Centre for Geoscience Computing (GeoComp)
13 *
14 *****************************************************************************/
15
16 #define ESNEEDPYTHON
17 #include "esysUtils/first.h"
18
19
20 #include <sstream>
21 #include <limits>
22 #include <boost/python/extract.hpp>
23 #include <boost/scoped_array.hpp>
24
25 #include "MPIScalarReducer.h"
26 #include "SplitWorldException.h"
27
28 using namespace boost::python;
29 using namespace escript;
30
31
32 namespace escript
33 {
34
35 Reducer_ptr makeScalarReducer(std::string type)
36 {
37 MPI_Op op;
38 if (type=="SUM")
39 {
40 op=MPI_SUM;
41 }
42 else if (type=="MAX")
43 {
44 op=MPI_MAX;
45 }
46 else if (type=="MIN")
47 {
48 op=MPI_MIN;
49 }
50 else
51 {
52 throw SplitWorldException("Unsupported operation for makeScalarReducer.");
53 }
54 MPIScalarReducer* m=new MPIScalarReducer(op);
55 return Reducer_ptr(m);
56 }
57
58
59 }
60
61 namespace
62 {
63
64 void combineDouble(double& d1, const double d2, MPI_Op op)
65 {
66 if (op==MPI_SUM)
67 {
68 d1+=d2;
69 }
70 else if (op==MPI_MAX)
71 {
72 d1=(d2>d1)?d2:d1;
73 }
74 else if (op==MPI_MIN)
75 {
76 d1=(d2<d1)?d2:d1;
77 }
78 }
79 }
80
81
82 MPIScalarReducer::MPIScalarReducer(MPI_Op op)
83 : reduceop(op)
84 {
85 valueadded=false;
86 if (op==MPI_SUM) // why not switch? because we don't know MPI_Op is scalar
87 {
88 identity=0;
89 }
90 else if (op==MPI_MAX)
91 {
92 identity=std::numeric_limits<double>::min();
93 }
94 else if (op==MPI_MIN)
95 {
96 identity=std::numeric_limits<double>::max();
97 }
98 else
99 {
100 throw SplitWorldException("Unsupported MPI_Op");
101 }
102 }
103
104 void MPIScalarReducer::setDomain(escript::Domain_ptr d)
105 {
106 // deliberately left blank
107 }
108
109 std::string MPIScalarReducer::description()
110 {
111 std::string op;
112 if (reduceop==MPI_SUM)
113 {
114 op="SUM";
115 }
116 else if (reduceop==MPI_MAX)
117 {
118 op="MAX";
119 }
120 else if (reduceop==MPI_MIN)
121 {
122 op="MIN";
123 }
124 else
125 {
126 throw SplitWorldException("Unsupported MPI reduction operation");
127 }
128 return "Reducer("+op+") for double scalars";
129 }
130
131 bool MPIScalarReducer::valueCompatible(boost::python::object v)
132 {
133 extract<double> ex(v);
134 if (!ex.check())
135 {
136 return false;
137 }
138 return true;
139 }
140
141
142 bool MPIScalarReducer::reduceLocalValue(boost::python::object v, std::string& errstring)
143 {
144 extract<double> ex(v);
145 if (!ex.check())
146 {
147 errstring="reduceLocalValue: expected double value. Got something else.";
148 return false;
149 }
150 if (!valueadded) // first value so answer becomes this one
151 {
152 value=ex();
153 valueadded=true;
154 }
155 else
156 {
157 combineDouble(value, ex(), reduceop);
158 }
159 return true;
160 }
161
162 void MPIScalarReducer::reset()
163 {
164 valueadded=false;
165 value=0;
166 }
167
168 bool MPIScalarReducer::checkRemoteCompatibility(esysUtils::JMPI& mpi_info, std::string& errstring)
169 {
170 return true;
171 }
172
173 // By the time this function is called, we know that all the values
174 // are compatible
175 bool MPIScalarReducer::reduceRemoteValues(esysUtils::JMPI& mpi_info, bool active)
176 {
177 #ifdef ESYS_MPI
178 if (!active)
179 {
180 value=identity;
181 }
182 std::cout << "Value in " << value << std::endl;
183 if (MPI_Allreduce(&value, &value, 1, MPI_DOUBLE, reduceop, mpi_info->comm)!=MPI_SUCCESS)
184 {
185 return false;
186 }
187 std::cout << "Value out " << value << std::endl;
188 return true;
189 #else
190 return true;
191 #endif
192 }
193
194 // populate a vector of ints with enough information to ensure two values are compatible
195 // or to construct a container for incomming data
196 // Format for this:
197 // [0] Type of Data: {0 : error, 1: DataEmpty, 10: constant, 11:tagged, 12:expanded}
198 // [1] Functionspace type code
199 // [2] Only used for tagged --- gives the number of tags (which exist in the data object)
200 // [3..6] Components of the shape
201 void MPIScalarReducer::getCompatibilityInfo(std::vector<unsigned>& params)
202 {
203 params.resize(1); // in case someone tries to do something with it
204 }
205
206
207 // Get a value for this variable from another process
208 // This is not a reduction and will replace any existing value
209 bool MPIScalarReducer::recvFrom(Esys_MPI_rank localid, Esys_MPI_rank source, esysUtils::JMPI& mpiinfo)
210 {
211 #ifdef ESYS_MPI
212 MPI_Status stat;
213 if (MPI_Recv(&value, 1, MPI_DOUBLE, source, PARAMTAG, mpiinfo->comm, &stat)!=MPI_SUCCESS)
214 {
215 return false;
216 }
217 #endif
218 return true;
219 }
220
221 // Send a value to this variable to another process
222 // This is not a reduction and will replace any existing value
223 bool MPIScalarReducer::sendTo(Esys_MPI_rank localid, Esys_MPI_rank target, esysUtils::JMPI& mpiinfo)
224 {
225 #ifdef ESYS_MPI
226 if (MPI_Send(&value, 1, MPI_DOUBLE, target, PARAMTAG, mpiinfo->comm)!=MPI_SUCCESS)
227 {
228 return false;
229 }
230 #endif
231 return true;
232 }
233
234 double MPIScalarReducer::getDouble()
235 {
236 return value;
237 }
238
239
240 boost::python::object MPIScalarReducer::getPyObj()
241 {
242 boost::python::object o(value);
243 return o;
244 }
245
246 #ifdef ESYS_MPI
247
248 // send from proc 0 in the communicator to all others
249 bool MPIScalarReducer::groupSend(MPI_Comm& com)
250 {
251 if (MPI_Bcast(&value, 1, MPI_DOUBLE, 0, com)==MPI_SUCCESS)
252 {
253 valueadded=true;
254 return true;
255 }
256 return false;
257 }
258
259 bool MPIScalarReducer::groupReduce(MPI_Comm& com, char mystate)
260 {
261 double answer=0;
262 if (MPI_Allreduce((mystate==reducerstatus::NEW)?&value:&identity, &answer, 1, MPI_DOUBLE, reduceop, com)==MPI_SUCCESS)
263 {
264 value=answer;
265 valueadded=true;
266 return true;
267 }
268 return false;
269 }
270
271 #endif

  ViewVC Help
Powered by ViewVC 1.1.26