/[escript]/branches/split/escriptcore/src/SplitWorld.cpp
ViewVC logotype

Annotation of /branches/split/escriptcore/src/SplitWorld.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4808 - (hide annotations)
Thu Mar 27 05:34:14 2014 UTC (5 years, 4 months ago) by jfenwick
File size: 10219 byte(s)
More work towards export
1 jfenwick 4730 /*****************************************************************************
2     *
3     * Copyright (c) 2014 by University of Queensland
4     * http://www.uq.edu.au
5     *
6     * Primary Business: Queensland, Australia
7     * Licensed under the Open Software License version 3.0
8     * http://www.opensource.org/licenses/osl-3.0.php
9     *
10     * Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11     * Development 2012-2013 by School of Earth Sciences
12     * Development from 2014 by Centre for Geoscience Computing (GeoComp)
13     *
14     *****************************************************************************/
15    
16     #include "esysUtils/Esys_MPI.h"
17 jfenwick 4734 #include "SplitWorld.h"
18 jfenwick 4730 #include "AbstractDomain.h"
19 jfenwick 4731 #include "SplitWorldException.h"
20 jfenwick 4734 #include "SplitWorldException.h"
21 jfenwick 4730
22 jfenwick 4731 #include <iostream>
23 jfenwick 4796 #include <sstream>
24 jfenwick 4731
25 jfenwick 4730 using namespace boost::python;
26     using namespace escript;
27    
28 jfenwick 4734 SplitWorld::SplitWorld(unsigned int numgroups, MPI_Comm global)
29 jfenwick 4808 :localworld((SubWorld*)0), swcount(numgroups>0?numgroups:1), jobcounter(1), manualimport(false)
30 jfenwick 4730 {
31 jfenwick 4747 globalcom=esysUtils::makeInfo(global);
32    
33 jfenwick 4746 int grank=0;
34     int wsize=1; // each world has this many processes
35     #ifdef ESYS_MPI
36 jfenwick 4747 int gsize=globalcom->size;
37     grank=globalcom->rank;
38 jfenwick 4746 if (gsize%swcount!=0)
39     {
40     throw SplitWorldException("SplitWorld error: requested number of groups is not a factor of global communicator size.");
41     }
42     wsize=gsize/swcount; // each world has this many processes
43 jfenwick 4747 MPI_Comm sub;
44     int res=MPI_Comm_split(MPI_COMM_WORLD, grank/wsize, grank%wsize, &sub);
45 jfenwick 4746 if (res!=MPI_SUCCESS)
46     {
47     throw SplitWorldException("SplitWorld error: Unable to form communicator.");
48     }
49 jfenwick 4747 subcom=esysUtils::makeInfo(sub,true);
50     #else
51     subcom=esysUtils::makeInfo(0);
52 jfenwick 4746 #endif
53 jfenwick 4730 localworld=SubWorld_ptr(new SubWorld(subcom));
54 jfenwick 4731 localid=grank/wsize;
55 jfenwick 4730 }
56    
57 jfenwick 4734 SplitWorld::~SplitWorld()
58 jfenwick 4730 {
59 jfenwick 4747 // communicator cleanup handled by the MPI_Info
60 jfenwick 4730 }
61    
62    
63     // The boost wrapper will ensure that there is at least one entry in the tuple
64 jfenwick 4734 object SplitWorld::buildDomains(tuple t, dict kwargs)
65 jfenwick 4730 {
66     int tsize=len(t);
67     // get the callable that we will invoke in a sec
68     object tocall=t[0];
69     // make a new tuple without the first element
70     tuple ntup=tuple(t.slice(1,tsize));
71     // now add the subworld to the kwargs
72     kwargs["escriptworld"]=localworld;
73    
74     // pass the whole package to the python call
75     object dobj=tocall(*ntup, **kwargs);
76     extract<Domain_ptr> ex1(dobj);
77     Domain_ptr dptr=ex1();
78    
79     // now do a sanity check to see if the domain has respected the communicator info we passed it.
80 jfenwick 4747 if (dptr->getMPIComm()!=localworld->getMPI()->comm)
81 jfenwick 4730 {
82 jfenwick 4734 throw SplitWorldException("The newly constructed domain is not using the correct communicator.");
83 jfenwick 4730 }
84     localworld->setDomain(dptr);
85     return object(); // return None
86     }
87    
88 jfenwick 4745
89 jfenwick 4773 namespace
90     {
91    
92     // Throw all values in and get the maximum
93     // This is not an AllReduce because we need to use Tagged Communication so as not to interfere with
94     // other messages / collective ops which the SubWorld's Jobs might be using
95     // This is implemented by sending to rank 0
96     bool checkResultInt(int res, int& mres, esysUtils::JMPI& info)
97     {
98 jfenwick 4802 #ifdef ESYS_MPI
99 jfenwick 4773 const int leader=0;
100     const int BIGTAG=esysUtils::getSubWorldTag();
101     if (info->size==1)
102     {
103     mres=res;
104     return true;
105     }
106     else
107     {
108     if (info->rank!=leader)
109     {
110     if (MPI_Send(&res, 1, MPI_INT, leader, BIGTAG, info->comm)!=MPI_SUCCESS)
111     {
112     return false;
113     }
114     if (MPI_Recv(&mres, 1, MPI_INT, leader, BIGTAG, info->comm,0)!=MPI_SUCCESS)
115     {
116     return false;
117     }
118     }
119     else
120     {
121     MPI_Request* reqs=new MPI_Request[info->size-1];
122     int* eres=new int[info->size-1];
123     for (int i=0;i<info->size-1;++i)
124     {
125     MPI_Irecv(eres+i, 1, MPI_INT, i+1, BIGTAG, info->comm, reqs+i);
126     }
127     if (MPI_Waitall(info->size-1, reqs, 0)!=MPI_SUCCESS)
128     {
129     delete[] reqs;
130     return false;
131     }
132     // now we have them all, find the max
133     mres=res;
134     for (int i=0;i<info->size-1;++i)
135     {
136     if (mres<eres[i])
137     {
138     mres=eres[i];
139     }
140     }
141     // now we know what the result should be
142     // send it to the others
143     for (int i=0;i<info->size-1;++i)
144     {
145     MPI_Isend(&mres, 1, MPI_INT, i+1, BIGTAG, info->comm, reqs+i);
146     }
147     if (MPI_Waitall(info->size-1, reqs,0)!=MPI_SUCCESS)
148     {
149     return false;
150     }
151     }
152    
153     }
154     return true;
155 jfenwick 4802 #else
156     mres=res;
157     return true;
158     #endif
159 jfenwick 4773 }
160    
161    
162     }
163    
164 jfenwick 4745 // Executes all pending jobs on all subworlds
165     void SplitWorld::runJobs()
166 jfenwick 4731 {
167 jfenwick 4745 distributeJobs();
168 jfenwick 4734 int mres=0;
169 jfenwick 4746 std::string err;
170 jfenwick 4808 std::vector<char> impexpdetail;
171 jfenwick 4734 do
172     {
173     // now we actually need to run the jobs
174     // everybody will be executing their localworld's jobs
175 jfenwick 4746 int res=localworld->runJobs(err);
176 jfenwick 4808
177 jfenwick 4802 // take this opportunity to clean up
178     localworld->clearImportExports();
179 jfenwick 4734 // now we find out about the other worlds
180 jfenwick 4773 if (!checkResultInt(res, mres, globalcom))
181 jfenwick 4734 {
182     throw SplitWorldException("MPI appears to have failed.");
183     }
184 jfenwick 4808 if (mres>1) // 1 and 0 are normal returns, >1 is some sort of error
185     {
186     break;
187     }
188     if (!localworld->localTransport(impexpdetail, err))
189     {
190     mres=4;
191     break;
192     }
193 jfenwick 4734 } while (mres==1);
194 jfenwick 4802 if (mres==0)
195 jfenwick 4734 {
196 jfenwick 4802 return;
197     }
198     else if (mres==2)
199     {
200 jfenwick 4734 throw SplitWorldException("At least one Job's work() function did not return True/False.");
201     }
202     else if (mres==3)
203     {
204 jfenwick 4746 char* resultstr=0;
205 jfenwick 4773 // now we ship around the error message - This should be safe since
206     // eveyone must have finished their Jobs to get here
207 jfenwick 4747 if (!esysUtils::shipString(err.c_str(), &resultstr, globalcom->comm))
208 jfenwick 4746 {
209     throw SplitWorldException("MPI appears to have failed.");
210     }
211     //throw SplitWorldException("At least one Job's work() function raised an exception.");
212     std::string s("At least one Job's work() function raised the following exception:\n");
213     s+=resultstr;
214     throw SplitWorldException(s);
215 jfenwick 4734 }
216 jfenwick 4802 else if (mres==4)
217     {
218     throw SplitWorldException("While processing exports: "+err);
219    
220     }
221     else
222     {
223     throw SplitWorldException("Unexpected return value from runJobs.");
224     }
225 jfenwick 4731 }
226    
227 jfenwick 4745 /**
228     stores the constructor/factory to make Jobs and the parameters.
229     */
230     void SplitWorld::addJob(boost::python::object creator, boost::python::tuple tup, boost::python::dict kw)
231     {
232     create.push_back(creator);
233     tupargs.push_back(tup);
234     kwargs.push_back(kw);
235     }
236    
237 jfenwick 4796 // At some point, we may need there to be more isolation here
238     // and trap any python exceptions etc, but for now I'll just call the constructor
239     void SplitWorld::addVariable(std::string name, boost::python::object creator, boost::python::tuple ntup, boost::python::dict kwargs)
240     {
241     object red=creator(*ntup, **kwargs);
242     extract<Reducer_ptr> ex(red);
243     if (!ex.check())
244     {
245     throw SplitWorldException("Creator function did not produce a reducer.");
246     }
247     Reducer_ptr rp=ex();
248 jfenwick 4808 localworld->addVariable(name, rp, manualimport);
249 jfenwick 4796 }
250    
251    
252     void SplitWorld::removeVariable(std::string name)
253     {
254 jfenwick 4802 localworld->removeVariable(name);
255 jfenwick 4796 }
256    
257 jfenwick 4745 void SplitWorld::clearPendingJobs()
258     {
259     create.clear();
260     tupargs.clear();
261     kwargs.clear();
262     }
263    
264     void SplitWorld::clearActiveJobs()
265     {
266     localworld->clearJobs();
267     }
268    
269     // All the job params are known on all the ranks.
270     void SplitWorld::distributeJobs()
271     {
272     unsigned int numjobs=create.size()/swcount;
273     unsigned int start=create.size()/swcount*localid;
274     if (localid<create.size()%swcount)
275     {
276     numjobs++;
277     start+=localid;
278     }
279     else
280     {
281     start+=create.size()%swcount;
282     }
283 jfenwick 4746 int errstat=0;
284 jfenwick 4745 try
285     {
286     // No other subworld will be looking at this portion of the array
287     // so jobs will only be created on one subworld
288     for (unsigned int i=start;i<start+numjobs;++i)
289     {
290     // we need to add some things to the kw map
291     kwargs[i]["domain"]=localworld->getDomain();
292     kwargs[i]["jobid"]=object(jobcounter+i);
293     object job=create[i](*(tupargs[i]), **(kwargs[i]));
294     localworld->addJob(job);
295     }
296     }
297     catch (boost::python::error_already_set e)
298     {
299     errstat=1;
300     }
301     jobcounter+=create.size();
302     clearPendingJobs();
303    
304     // MPI check to ensure that it worked for everybody
305 jfenwick 4746 int mstat=0;
306 jfenwick 4747 if (!esysUtils::checkResult(errstat, mstat, globalcom->comm))
307 jfenwick 4745 {
308     throw SplitWorldException("MPI appears to have failed.");
309     }
310    
311     if (errstat==1)
312     {
313     throw SplitWorldException("distributeJobs: Job creation failed.");
314     clearActiveJobs();
315     }
316     }
317    
318    
319 jfenwick 4730 namespace escript
320     {
321    
322     boost::python::object raw_buildDomains(boost::python::tuple t, boost::python::dict kwargs)
323     {
324     int l=len(t);
325     if (l<2)
326     {
327 jfenwick 4734 throw SplitWorldException("Insufficient parameters to buildDomains.");
328 jfenwick 4730 }
329 jfenwick 4734 extract<SplitWorld&> exw(t[0]);
330 jfenwick 4730 if (!exw.check())
331     {
332 jfenwick 4734 throw SplitWorldException("First parameter to buildDomains must be a SplitWorld.");
333 jfenwick 4730 }
334 jfenwick 4734 SplitWorld& ws=exw();
335 jfenwick 4730 tuple ntup=tuple(t.slice(1,l)); // strip off the object param
336     return ws.buildDomains(ntup, kwargs);
337     }
338    
339 jfenwick 4745 boost::python::object raw_addJob(boost::python::tuple t, boost::python::dict kwargs)
340     {
341     int l=len(t);
342     if (l<2)
343     {
344     throw SplitWorldException("Insufficient parameters to addJob.");
345     }
346     extract<SplitWorld&> exw(t[0]);
347     if (!exw.check())
348     {
349     throw SplitWorldException("First parameter to addJob must be a SplitWorld.");
350     }
351     SplitWorld& ws=exw();
352     object creator=t[1];
353     tuple ntup=tuple(t.slice(2,l)); // strip off the object param
354     ws.addJob(creator, ntup, kwargs);
355     return object();
356 jfenwick 4730 }
357 jfenwick 4745
358 jfenwick 4796 // expects, splitworld, name of var, constructor function for the reducer, any constructor params
359     boost::python::object raw_addVariable(boost::python::tuple t, boost::python::dict kwargs)
360     {
361     int l=len(t);
362     if (l<3)
363     {
364     throw SplitWorldException("Insufficient parameters to addReducer.");
365     }
366     extract<SplitWorld&> exw(t[0]);
367     if (!exw.check())
368     {
369     throw SplitWorldException("First parameter to addVariable must be a SplitWorld.");
370     }
371     SplitWorld& ws=exw();
372     object pname=t[1];
373     extract<std::string> ex2(pname);
374     if (!ex2.check())
375     {
376     throw SplitWorldException("Second parameter to addVariable must be a string");
377     }
378     std::string name=ex2();
379     object creator=t[2];
380     tuple ntup=tuple(t.slice(3,l)); // strip off the object param
381     ws.addVariable(name, creator, ntup, kwargs);
382     return object();
383     }
384 jfenwick 4745
385 jfenwick 4796
386    
387 jfenwick 4745 }

  ViewVC Help
Powered by ViewVC 1.1.26