/[escript]/trunk/weipa/src/EscriptDataset.cpp
ViewVC logotype

Contents of /trunk/weipa/src/EscriptDataset.cpp

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3091 - (show annotations)
Fri Aug 13 07:47:57 2010 UTC (10 years, 1 month ago) by caltinay
File size: 31041 byte(s)
-fixed cleanup on error when invoking escriptconvert in parallel
-fixed saving VTK files on one rank if data was loaded in chunks
-cleaned up EscriptDataset and split a couple of methods

1
2 /*******************************************************
3 *
4 * Copyright (c) 2003-2010 by University of Queensland
5 * Earth Systems Science Computational Center (ESSCC)
6 * http://www.uq.edu.au/esscc
7 *
8 * Primary Business: Queensland, Australia
9 * Licensed under the Open Software License version 3.0
10 * http://www.opensource.org/licenses/osl-3.0.php
11 *
12 *******************************************************/
13
14 #include <weipa/EscriptDataset.h>
15 #include <weipa/DataVar.h>
16 #include <weipa/ElementData.h>
17 #include <weipa/FinleyMesh.h>
18 #include <weipa/NodeData.h>
19
20 #ifndef VISIT_PLUGIN
21 #include <escript/Data.h>
22 #endif
23
24 #include <fstream>
25 #include <iostream>
26 #include <numeric> // for std::accumulate
27 #include <sstream>
28
29 #if USE_SILO
30 #include <silo.h>
31
32 #if HAVE_MPI
33 #include <pmpio.h>
34 #endif
35
36 #endif // USE_SILO
37
38 using namespace std;
39
40 namespace weipa {
41
42 const char* MESH_VARS = "mesh_vars/";
43 const int NUM_SILO_FILES = 1;
44
45 //
46 // Default constructor
47 //
48 EscriptDataset::EscriptDataset() :
49 cycle(0),
50 time(0.),
51 externalMesh(false),
52 mpiRank(0),
53 mpiSize(1)
54 {
55 }
56
57 //
58 // Constructor with communicator
59 //
60 #if HAVE_MPI
61 EscriptDataset::EscriptDataset(MPI_Comm comm) :
62 cycle(0),
63 time(0.),
64 externalMesh(false),
65 mpiComm(comm)
66 {
67 MPI_Comm_rank(mpiComm, &mpiRank);
68 MPI_Comm_size(mpiComm, &mpiSize);
69 }
70 #endif
71
72 //
73 // Destructor
74 //
75 EscriptDataset::~EscriptDataset()
76 {
77 }
78
79 //
80 //
81 //
82 bool EscriptDataset::initFromEscript(
83 const escript::AbstractDomain* escriptDomain,
84 DataVec& escriptVars,
85 const StringVec& varNames)
86 {
87 // Set the domain
88 if (!setDomain(escriptDomain)) {
89 return false;
90 }
91
92 // initialize variables
93 DataVec::iterator varIt = escriptVars.begin();
94 StringVec::const_iterator nameIt = varNames.begin();
95 for (; varIt != escriptVars.end(); varIt++, nameIt++) {
96 addData(*varIt, *nameIt);
97 }
98
99 return true;
100 }
101
102 //
103 //
104 //
105 bool EscriptDataset::loadNetCDF(const string meshFilePattern,
106 const StringVec& varFiles,
107 const StringVec& varNames, int nBlocks)
108 {
109 // Load the domain files
110 if (!setDomain(meshFilePattern, nBlocks)) {
111 return false;
112 }
113
114 // Load the variables
115 StringVec::const_iterator fileIt = varFiles.begin();
116 StringVec::const_iterator nameIt = varNames.begin();
117 for (; fileIt != varFiles.end(); fileIt++, nameIt++) {
118 addData(*fileIt, *nameIt);
119 }
120
121 return true;
122 }
123
124 //
125 // Load only variables using provided domain
126 //
127 bool EscriptDataset::loadNetCDF(const MeshBlocks& mesh,
128 const StringVec& varFiles,
129 const StringVec& varNames)
130 {
131 // Set the domain
132 if (!setDomain(mesh)) {
133 return false;
134 }
135
136 // Load the variables
137 StringVec::const_iterator fileIt = varFiles.begin();
138 StringVec::const_iterator nameIt = varNames.begin();
139 for (; fileIt != varFiles.end(); fileIt++, nameIt++) {
140 addData(*fileIt, *nameIt);
141 }
142
143 return true;
144 }
145
146 //
147 //
148 //
149 bool EscriptDataset::saveSilo(string fileName, bool useMultiMesh)
150 {
151 #if USE_SILO
152 if (meshBlocks.size() == 0)
153 return false;
154
155 const char* blockDirFmt = "/block%04d";
156 string siloPath;
157 DBfile* dbfile = NULL;
158 #if HAVE_MPI
159 PMPIO_baton_t* baton = NULL;
160 #endif
161
162 if (mpiSize > 1) {
163 #if HAVE_MPI
164 baton = PMPIO_Init(NUM_SILO_FILES, PMPIO_WRITE,
165 mpiComm, 0x1337, PMPIO_DefaultCreate, PMPIO_DefaultOpen,
166 PMPIO_DefaultClose, NULL);
167 if (baton) {
168 char str[64];
169 snprintf(str, 64, blockDirFmt, PMPIO_RankInGroup(baton, mpiRank));
170 siloPath = str;
171 dbfile = (DBfile*) PMPIO_WaitForBaton(
172 baton, fileName.c_str(), siloPath.c_str());
173 }
174 #endif
175 } else {
176 dbfile = DBCreate(fileName.c_str(), DB_CLOBBER, DB_LOCAL,
177 "escriptData", DB_PDB);
178 }
179
180 if (!dbfile) {
181 cerr << "Could not create Silo file." << endl;
182 return false;
183 }
184
185 MeshBlocks::iterator meshIt;
186 VarVector::iterator viIt;
187 int idx = 0;
188 for (meshIt = meshBlocks.begin(); meshIt != meshBlocks.end(); meshIt++, idx++) {
189 if (mpiSize == 1) {
190 char str[64];
191 snprintf(str, 64, blockDirFmt, idx);
192 siloPath = str;
193 DBMkdir(dbfile, siloPath.c_str());
194 }
195 // write block of the mesh if we don't use an external mesh
196 if (!externalMesh) {
197 if (! (*meshIt)->writeToSilo(dbfile, siloPath)) {
198 cerr << "Error writing block " << idx << " of mesh to Silo file!\n";
199 break;
200 }
201 }
202
203 // write variables for current mesh block
204 for (viIt = variables.begin(); viIt != variables.end(); viIt++) {
205 // do not attempt to write this variable if previous steps failed
206 if (!(*viIt).valid) continue;
207 DataVar_ptr var = (*viIt).dataBlocks[idx];
208 if (!var->writeToSilo(dbfile, siloPath)) {
209 cerr << "Error writing block " << idx << " of '"
210 << var->getName() << "' to Silo file!" << endl;
211 (*viIt).valid = false;
212 }
213 }
214 }
215
216 // rank 0 writes additional data that describe how the parts fit together
217 if (mpiRank == 0) {
218 if (useMultiMesh) {
219 const StringVec& meshNames = meshBlocks[0]->getMeshNames();
220 StringVec::const_iterator it;
221 for (it = meshNames.begin(); it != meshNames.end(); it++)
222 putSiloMultiMesh(dbfile, *it);
223
224 DBMkdir(dbfile, MESH_VARS);
225 for (viIt = meshVariables.begin(); viIt != meshVariables.end(); viIt++)
226 putSiloMultiVar(dbfile, *viIt, true);
227
228 for (viIt = variables.begin(); viIt != variables.end(); viIt++) {
229 if (!(*viIt).valid) continue;
230 DataVar_ptr var = (*viIt).dataBlocks[0];
231 if (var->getRank() < 2)
232 putSiloMultiVar(dbfile, *viIt);
233 else
234 putSiloMultiTensor(dbfile, *viIt);
235 }
236 }
237
238 vector<char*> tensorNames;
239 vector<string> tensorDefStrings;
240 vector<char*> tensorDefs;
241
242 // collect tensors for their Silo definitions
243 for (viIt = variables.begin(); viIt != variables.end(); viIt++) {
244 if (!(*viIt).valid) continue;
245 DataVar_ptr var = (*viIt).dataBlocks[0];
246 if (var->getRank() == 2) {
247 tensorDefStrings.push_back(var->getTensorDef());
248 tensorDefs.push_back((char*)tensorDefStrings.back().c_str());
249 tensorNames.push_back((char*)var->getName().c_str());
250 }
251 }
252
253 if (tensorDefs.size()) {
254 DBSetDir(dbfile, "/");
255 DBoptlist* optList = DBMakeOptlist(2);
256 DBAddOption(optList, DBOPT_CYCLE, &cycle);
257 DBAddOption(optList, DBOPT_DTIME, &time);
258 vector<DBoptlist*> defOpts(tensorDefs.size(), optList);
259 vector<int> defTypes(tensorDefs.size(), DB_VARTYPE_TENSOR);
260 DBPutDefvars(dbfile, "tensors", tensorDefs.size(), &tensorNames[0],
261 &defTypes[0], &tensorDefs[0], &defOpts[0]);
262 DBFreeOptlist(optList);
263 }
264 }
265
266 if (mpiSize > 1) {
267 #if HAVE_MPI
268 PMPIO_HandOffBaton(baton, dbfile);
269 PMPIO_Finish(baton);
270 #endif
271 } else {
272 DBClose(dbfile);
273 }
274
275 return true;
276
277 #else // !USE_SILO
278 return false;
279 #endif
280 }
281
282 //
283 //
284 //
285 bool EscriptDataset::saveVTK(string fileName)
286 {
287 if (meshBlocks.size() == 0)
288 return false;
289
290 string meshName;
291
292 // determine mesh type and variables to write
293 VarVector nodalVars, cellVars;
294 VarVector::iterator viIt;
295 for (viIt = variables.begin(); viIt != variables.end(); viIt++) {
296 const DataBlocks& varBlocks = viIt->dataBlocks;
297 // skip empty variable
298 int numSamples = accumulate(viIt->sampleDistribution.begin(),
299 viIt->sampleDistribution.end(), 0);
300 if (numSamples == 0 || !viIt->valid) {
301 continue;
302 }
303 string tmpName = varBlocks[0]->getMeshName();
304 if (meshName != "") {
305 if (meshName != tmpName) {
306 cerr << "VTK supports only one mesh! Skipping variable "
307 << varBlocks[0]->getName() << " on " << tmpName << endl;
308 continue;
309 }
310 } else {
311 meshName = tmpName;
312 }
313
314 if (varBlocks[0]->isNodeCentered()) {
315 nodalVars.push_back(*viIt);
316 } else {
317 cellVars.push_back(*viIt);
318 }
319 }
320
321 // no valid variables so use default mesh
322 if (meshName == "")
323 meshName = "Elements";
324
325 // add mesh variables
326 for (viIt = meshVariables.begin(); viIt != meshVariables.end(); viIt++) {
327 DataVar_ptr var = viIt->dataBlocks[0];
328 if (meshName == var->getMeshName()) {
329 VarInfo vi = *viIt;
330 vi.varName = string(MESH_VARS)+vi.varName;
331 if (var->isNodeCentered()) {
332 nodalVars.push_back(vi);
333 } else {
334 cellVars.push_back(vi);
335 }
336 }
337 }
338
339 MeshBlocks::iterator meshIt;
340 int gNumPoints;
341 int gNumCells = 0;
342 int gCellSizeAndType[2] = { 0, 0 };
343
344 #if HAVE_MPI
345 // remove file first if it exists
346 int error = 0;
347 int mpiErr;
348 if (mpiRank == 0) {
349 ifstream f(fileName.c_str());
350 if (f.is_open()) {
351 f.close();
352 if (remove(fileName.c_str())) {
353 error=1;
354 }
355 }
356 }
357 MPI_Allreduce(&error, &mpiErr, 1, MPI_INT, MPI_MAX, mpiComm);
358 if (mpiErr != 0) {
359 cerr << "Error removing " << fileName << "!" << endl;
360 return false;
361 }
362
363 MPI_File mpiFileHandle;
364 MPI_Info mpiInfo = MPI_INFO_NULL;
365 int amode = MPI_MODE_CREATE|MPI_MODE_WRONLY|MPI_MODE_UNIQUE_OPEN;
366 mpiErr = MPI_File_open(mpiComm, const_cast<char*>(fileName.c_str()),
367 amode, mpiInfo, &mpiFileHandle);
368 if (mpiErr == MPI_SUCCESS) {
369 mpiErr = MPI_File_set_view(mpiFileHandle, 0, MPI_CHAR, MPI_CHAR,
370 const_cast<char*>("native"), mpiInfo);
371 }
372 if (mpiErr != MPI_SUCCESS) {
373 cerr << "Error opening " << fileName << " for parallel writing!" << endl;
374 return false;
375 }
376
377 // these definitions make the code more readable and life easier -
378 // WRITE_ORDERED causes all ranks to write the contents of the stream while
379 // WRITE_SHARED should only be called for rank 0.
380 #define WRITE_ORDERED(_stream) \
381 do {\
382 MPI_Status mpiStatus;\
383 string contents = _stream.str();\
384 mpiErr = MPI_File_write_ordered(\
385 mpiFileHandle, const_cast<char*>(contents.c_str()),\
386 contents.length(), MPI_CHAR, &mpiStatus);\
387 _stream.str("");\
388 } while(0)
389
390 #define WRITE_SHARED(_stream) \
391 do {\
392 MPI_Status mpiStatus;\
393 string contents = _stream.str();\
394 mpiErr = MPI_File_write_shared(\
395 mpiFileHandle, const_cast<char*>(contents.c_str()),\
396 contents.length(), MPI_CHAR, &mpiStatus);\
397 _stream.str("");\
398 } while(0)
399
400 #else /*********** non-MPI version follows ************/
401
402 ofstream ofs(fileName.c_str());
403
404 // the non-MPI versions of WRITE_ORDERED and WRITE_SHARED are
405 // identical.
406 #define WRITE_ORDERED(_stream) \
407 do {\
408 ofs << _stream.str();\
409 _stream.str("");\
410 } while(0)
411
412 #define WRITE_SHARED(_stream) \
413 do {\
414 ofs << _stream.str();\
415 _stream.str("");\
416 } while(0)
417
418 #endif // HAVE_MPI
419
420 if (mpiSize > 1) {
421 #if HAVE_MPI
422 meshBlocks[0]->removeGhostZones(mpiRank);
423 ElementData_ptr elements = meshBlocks[0]->getElementsByName(meshName);
424 int myNumCells = elements->getNumElements();
425 MPI_Reduce(&myNumCells, &gNumCells, 1, MPI_INT, MPI_SUM, 0, mpiComm);
426
427 int myCellSizeAndType[2];
428 myCellSizeAndType[0] = elements->getNodesPerElement();
429 myCellSizeAndType[1] = elements->getType();
430
431 // rank 0 needs to know element type and size but it's possible that
432 // this information is only available on other ranks (value=0) so
433 // retrieve it
434 MPI_Reduce(&myCellSizeAndType, &gCellSizeAndType, 2, MPI_INT, MPI_MAX,
435 0, mpiComm);
436 #endif
437 } else {
438 int idx = 0;
439 for (meshIt = meshBlocks.begin(); meshIt != meshBlocks.end(); meshIt++, idx++) {
440 if (meshBlocks.size() > 1)
441 (*meshIt)->removeGhostZones(idx);
442 ElementData_ptr elements = (*meshIt)->getElementsByName(meshName);
443 gNumCells += elements->getNumElements();
444 if (gCellSizeAndType[0] == 0)
445 gCellSizeAndType[0] = elements->getNodesPerElement();
446 if (gCellSizeAndType[1] == 0)
447 gCellSizeAndType[1] = elements->getType();
448 }
449 }
450
451 gNumPoints = meshBlocks[0]->getNodes()->getGlobalNumNodes();
452
453 ostringstream oss;
454 oss.setf(ios_base::scientific, ios_base::floatfield);
455
456 if (mpiRank == 0) {
457 #ifdef _DEBUG
458 cout << meshName << ": pts=" << gNumPoints << ", cells=" << gNumCells
459 << ", cellsize=" << gCellSizeAndType[0] << ", type="
460 << gCellSizeAndType[1] << ", numNodalVars=" << nodalVars.size()
461 << ", numCellVars=" << cellVars.size()
462 << endl;
463 #endif
464
465 oss << "<?xml version=\"1.0\"?>" << endl;
466 oss << "<VTKFile type=\"UnstructuredGrid\" version=\"0.1\">" << endl;
467 oss << "<UnstructuredGrid>" << endl;
468
469 // write time and cycle values
470 oss << "<FieldData>" << endl;
471 oss << "<DataArray Name=\"TIME\" type=\"Float64\" format=\"ascii\" NumberOfTuples=\"1\">" << endl;
472 oss << time << endl;
473 oss << "</DataArray>" << endl << "</FieldData>" << endl;
474 oss << "<FieldData>" << endl;
475 oss << "<DataArray Name=\"CYCLE\" type=\"Int32\" format=\"ascii\" NumberOfTuples=\"1\">" << endl;
476 oss << cycle << endl;
477 oss << "</DataArray>" << endl << "</FieldData>" << endl;
478
479 // write mesh header
480 oss << "<Piece NumberOfPoints=\"" << gNumPoints
481 << "\" NumberOfCells=\"" << gNumCells << "\">" << endl;
482 oss << "<Points>" << endl;
483 oss << "<DataArray NumberOfComponents=\"3\" type=\"Float64\" format=\"ascii\">" << endl;
484 }
485
486 //
487 // coordinates (note that we are using the original nodes)
488 //
489 int blockNum = (mpiSize>1 ? mpiRank : 0);
490 for (meshIt = meshBlocks.begin(); meshIt != meshBlocks.end(); meshIt++, blockNum++) {
491 (*meshIt)->getNodes()->writeCoordinatesVTK(oss, blockNum);
492 }
493
494 WRITE_ORDERED(oss);
495
496 if (mpiRank == 0) {
497 oss << "</DataArray>" << endl << "</Points>" << endl;
498 oss << "<Cells>" << endl;
499 oss << "<DataArray Name=\"connectivity\" type=\"Int32\" format=\"ascii\">" << endl;
500 }
501
502 //
503 // connectivity
504 //
505 for (meshIt = meshBlocks.begin(); meshIt != meshBlocks.end(); meshIt++) {
506 ElementData_ptr el = (*meshIt)->getElementsByName(meshName);
507 el->writeConnectivityVTK(oss);
508 }
509
510 WRITE_ORDERED(oss);
511
512 //
513 // offsets & types
514 //
515 if (mpiRank == 0) {
516 oss << "</DataArray>" << endl;
517 oss << "<DataArray Name=\"offsets\" type=\"Int32\" format=\"ascii\">" << endl;
518 for (int i=1; i < gNumCells+1; i++) {
519 oss << i*gCellSizeAndType[0] << endl;
520 }
521 oss << "</DataArray>" << endl;
522 oss << "<DataArray Name=\"types\" type=\"UInt8\" format=\"ascii\">" << endl;
523 for (int i=1; i < gNumCells+1; i++) {
524 oss << gCellSizeAndType[1] << endl;
525 }
526 oss << "</DataArray>" << endl << "</Cells>" << endl;
527 WRITE_SHARED(oss);
528 }
529
530 // now write all variables - first the nodal data, then cell data
531
532 // write nodal data if any
533 if (!nodalVars.empty()) {
534 if (mpiRank == 0)
535 oss << "<PointData>" << endl;
536 for (viIt = nodalVars.begin(); viIt != nodalVars.end(); viIt++) {
537 writeVarToVTK(*viIt, oss);
538 WRITE_ORDERED(oss);
539 if (mpiRank == 0)
540 oss << "</DataArray>" << endl;
541 }
542 if (mpiRank == 0)
543 oss << "</PointData>" << endl;
544 }
545
546 // write cell data if any
547 if (!cellVars.empty()) {
548 if (mpiRank == 0)
549 oss << "<CellData>" << endl;
550 for (viIt = cellVars.begin(); viIt != cellVars.end(); viIt++) {
551 writeVarToVTK(*viIt, oss);
552 WRITE_ORDERED(oss);
553 if (mpiRank == 0)
554 oss << "</DataArray>" << endl;
555 }
556 if (mpiRank == 0)
557 oss << "</CellData>" << endl;
558 }
559
560 if (mpiRank == 0) {
561 oss << "</Piece>" << endl << "</UnstructuredGrid>" << endl
562 << "</VTKFile>" << endl;
563 WRITE_SHARED(oss);
564 }
565
566 #if HAVE_MPI
567 mpiErr = MPI_File_close(&mpiFileHandle);
568 #else
569 ofs.close();
570 #endif
571
572 return true;
573 }
574
575 //
576 //
577 //
578 void EscriptDataset::writeVarToVTK(const VarInfo& varInfo, ostream& os)
579 {
580 const DataBlocks& varBlocks = varInfo.dataBlocks;
581 int rank = varBlocks[0]->getRank();
582 int numComps = 1;
583 if (rank > 0)
584 numComps *= 3;
585 if (rank > 1)
586 numComps *= 3;
587
588 if (mpiRank == 0) {
589 os << "<DataArray Name=\"" << varInfo.varName
590 << "\" type=\"Float64\" NumberOfComponents=\"" << numComps
591 << "\" format=\"ascii\">" << endl;
592 }
593
594 // this is required in case we read a dataset with more than one chunk on
595 // one rank
596 int blockNum = (mpiSize>1 ? mpiRank : 0);
597 DataBlocks::const_iterator blockIt;
598 for (blockIt = varBlocks.begin(); blockIt != varBlocks.end(); blockIt++, blockNum++) {
599 (*blockIt)->writeToVTK(os, blockNum);
600 }
601 }
602
603 //
604 // Sets the domain using an escript::AbstractDomain instance.
605 //
606 bool EscriptDataset::setDomain(const escript::AbstractDomain* domain)
607 {
608 #ifndef VISIT_PLUGIN
609 int myError = 0, gError;
610
611 // fail if the domain has already been set
612 if (meshBlocks.size() > 0) {
613 cerr << "Domain has already been set!" << endl;
614 myError = 1;
615
616 } else {
617 FinleyMesh_ptr mesh(new FinleyMesh());
618 if (mesh->initFromEscript(domain)) {
619 if (mpiSize > 1)
620 mesh->reorderGhostZones(mpiRank);
621 meshBlocks.push_back(mesh);
622 } else {
623 cerr << "Error initializing domain!" << endl;
624 myError = 1;
625 }
626 }
627
628 if (mpiSize > 1) {
629 #if HAVE_MPI
630 MPI_Allreduce(&myError, &gError, 1, MPI_INT, MPI_MAX, mpiComm);
631 #endif
632 } else {
633 gError = myError;
634 }
635
636 if (gError) {
637 meshBlocks.clear();
638 } else {
639 // Convert mesh data to variables
640 convertMeshVariables();
641 }
642 return !gError;
643
644 #else // VISIT_PLUGIN
645 return false;
646 #endif
647 }
648
649 //
650 // Sets the domain from dump files.
651 //
652 bool EscriptDataset::setDomain(const string filePattern, int nBlocks)
653 {
654 int myError = 0, gError;
655
656 if (mpiSize > 1 && nBlocks != mpiSize) {
657 cerr << "Cannot load " << nBlocks << " chunks on " << mpiSize
658 << " MPI ranks!" << endl;
659 myError = 1;
660
661 } else if (meshBlocks.size() > 0) {
662 cerr << "Domain has already been set!" << endl;
663 myError = 1;
664
665 } else {
666 char* str = new char[filePattern.length()+10];
667 if (mpiSize > 1) {
668 FinleyMesh_ptr meshPart(new FinleyMesh());
669 sprintf(str, filePattern.c_str(), mpiRank);
670 string meshfile = str;
671 if (meshPart->initFromNetCDF(meshfile)) {
672 meshPart->reorderGhostZones(mpiRank);
673 meshBlocks.push_back(meshPart);
674 } else {
675 cerr << "Error initializing domain!" << endl;
676 myError = 1;
677 }
678 } else {
679 for (int idx=0; idx < nBlocks; idx++) {
680 FinleyMesh_ptr meshPart(new FinleyMesh());
681 sprintf(str, filePattern.c_str(), idx);
682 string meshfile = str;
683 if (meshPart->initFromNetCDF(meshfile)) {
684 if (nBlocks > 1)
685 meshPart->reorderGhostZones(idx);
686 meshBlocks.push_back(meshPart);
687 } else {
688 cerr << "Error initializing domain block " << idx << endl;
689 myError = 1;
690 break;
691 }
692 }
693 }
694 delete[] str;
695 }
696
697 if (mpiSize > 1) {
698 #if HAVE_MPI
699 MPI_Allreduce(&myError, &gError, 1, MPI_INT, MPI_MAX, mpiComm);
700 #endif
701 } else {
702 gError = myError;
703 }
704
705 if (gError) {
706 meshBlocks.clear();
707 } else {
708 // Convert mesh data to variables
709 convertMeshVariables();
710 }
711
712 return !gError;
713 }
714
715 //
716 // Sets an already converted domain.
717 //
718 bool EscriptDataset::setDomain(const MeshBlocks& domain)
719 {
720 int myError = 0, gError;
721
722 if (mpiSize > 1 && domain.size() > 1) {
723 cerr << "Can only add one domain block per rank when using MPI!"
724 << endl;
725 myError = 1;
726
727 } else if (meshBlocks.size() > 0) {
728 cerr << "Domain has already been set!" << endl;
729 myError = 1;
730
731 }
732
733 if (mpiSize > 1) {
734 #if HAVE_MPI
735 MPI_Allreduce(&myError, &gError, 1, MPI_INT, MPI_MAX, mpiComm);
736 #endif
737 } else {
738 gError = myError;
739 }
740
741 if (!gError) {
742 externalMesh = true;
743 meshBlocks = domain;
744 }
745
746 return !gError;
747 }
748
749 //
750 //
751 //
752 bool EscriptDataset::addData(escript::Data& data, const string name)
753 {
754 #ifndef VISIT_PLUGIN
755 bool success = true;
756
757 // fail if no domain has been set
758 if (meshBlocks.size() == 0) {
759 success = false;
760
761 } else {
762 // initialize variable
763 VarInfo vi;
764 vi.varName = name;
765
766 DataVar_ptr var(new DataVar(vi.varName));
767 if (var->initFromEscript(data, meshBlocks[0])) {
768 vi.dataBlocks.push_back(var);
769 updateSampleDistribution(vi);
770 vi.valid = true;
771 } else {
772 var.reset();
773 vi.valid = false;
774 }
775 variables.push_back(vi);
776 }
777 return success;
778
779 #else // VISIT_PLUGIN
780 return false;
781 #endif
782 }
783
784 //
785 //
786 //
787 bool EscriptDataset::addData(const string filePattern, const string name)
788 {
789 int myError = 0, gError;
790
791 // fail if no domain has been set
792 if (meshBlocks.size() == 0) {
793 gError = 1;
794
795 } else {
796 // initialize variable
797 VarInfo vi;
798 vi.varName = name;
799 vi.valid = true;
800 char* str = new char[filePattern.length()+10];
801
802 // read all parts of the variable
803 MeshBlocks::iterator mIt;
804 int idx = (mpiSize > 1) ? mpiRank : 0;
805 for (mIt = meshBlocks.begin(); mIt != meshBlocks.end(); mIt++, idx++) {
806 sprintf(str, filePattern.c_str(), idx);
807 string dfile = str;
808 DataVar_ptr var(new DataVar(name));
809 if (var->initFromNetCDF(dfile, *mIt))
810 vi.dataBlocks.push_back(var);
811 else {
812 cerr << "Error reading " << dfile << endl;
813 myError = 1;
814 break;
815 }
816 }
817 delete[] str;
818
819 if (mpiSize > 1) {
820 #if HAVE_MPI
821 MPI_Allreduce(&myError, &gError, 1, MPI_INT, MPI_MAX, mpiComm);
822 #endif
823 } else {
824 gError = myError;
825 }
826
827 if (!gError) {
828 // only add variable if all chunks have been read without error
829 updateSampleDistribution(vi);
830 variables.push_back(vi);
831 }
832 }
833
834 return !gError;
835 }
836
837 //
838 //
839 //
840 void EscriptDataset::convertMeshVariables()
841 {
842 const StringVec& varNames = meshBlocks[0]->getVarNames();
843 StringVec::const_iterator it;
844 for (it = varNames.begin(); it != varNames.end(); it++) {
845 VarInfo vi;
846 vi.varName = *it;
847 vi.valid = true;
848 // get all parts of current variable
849 MeshBlocks::iterator mIt;
850 for (mIt = meshBlocks.begin(); mIt != meshBlocks.end(); mIt++) {
851 DataVar_ptr var(new DataVar(*it));
852 if (var->initFromMesh(*mIt)) {
853 vi.dataBlocks.push_back(var);
854 } else {
855 cerr << "Error converting mesh variable " << *it << endl;
856 vi.valid = false;
857 break;
858 }
859 }
860 updateSampleDistribution(vi);
861 meshVariables.push_back(vi);
862 }
863 }
864
865 // retrieves the number of samples at each block - used to determine which
866 // blocks contribute to given variable if any.
867 void EscriptDataset::updateSampleDistribution(VarInfo& vi)
868 {
869 IntVec sampleDist;
870 const DataBlocks& varBlocks = vi.dataBlocks;
871
872 if (mpiSize > 1) {
873 #if HAVE_MPI
874 int myNumSamples = varBlocks[0]->getNumberOfSamples();
875 sampleDist.insert(sampleDist.end(), mpiSize, 0);
876 MPI_Allgather(
877 &myNumSamples, 1, MPI_INT, &sampleDist[0], 1, MPI_INT, mpiComm);
878 #endif
879 } else {
880 DataBlocks::const_iterator it;
881 for (it = varBlocks.begin(); it != varBlocks.end(); it++) {
882 sampleDist.push_back((*it)->getNumberOfSamples());
883 }
884 }
885 vi.sampleDistribution = sampleDist;
886 }
887
888 //
889 //
890 //
891 void EscriptDataset::putSiloMultiMesh(DBfile* dbfile, const string& meshName)
892 {
893 #if USE_SILO
894 vector<int> meshtypes;
895 vector<string> tempstrings;
896 vector<char*> meshnames;
897 string pathPrefix;
898
899 int ppIndex = meshBlocks[0]->getSiloPath().find(':');
900 if (ppIndex != string::npos) {
901 pathPrefix = meshBlocks[0]->getSiloPath().substr(0, ppIndex+1);
902 }
903
904 // find a variable belonging to this mesh to get the sample
905 // distribution (which tells us which ranks contribute to this mesh).
906 // Try mesh variables first, then regular ones.
907 VarVector::const_iterator viIt;
908 for (viIt = meshVariables.begin(); viIt != meshVariables.end(); viIt++) {
909 if (meshName == viIt->dataBlocks[0]->getMeshName())
910 break;
911 }
912
913 if (viIt == meshVariables.end()) {
914 for (viIt = variables.begin(); viIt != variables.end(); viIt++) {
915 if (meshName == viIt->dataBlocks[0]->getMeshName())
916 break;
917 }
918 }
919 // this probably means that the mesh is empty
920 if (viIt == variables.end()) {
921 return;
922 }
923
924 for (size_t idx = 0; idx < viIt->sampleDistribution.size(); idx++) {
925 if (viIt->sampleDistribution[idx] > 0) {
926 stringstream siloPath;
927 siloPath << pathPrefix << "/block";
928 int prevWidth = siloPath.width(4);
929 char prevFill = siloPath.fill('0');
930 siloPath << right << idx;
931 siloPath.width(prevWidth);
932 siloPath.fill(prevFill);
933 siloPath << "/";
934 siloPath << meshName;
935 tempstrings.push_back(siloPath.str());
936 meshnames.push_back((char*)tempstrings.back().c_str());
937 meshtypes.push_back(DB_UCDMESH);
938 }
939 }
940
941 // ignore empty mesh
942 if (!meshnames.empty()) {
943 DBSetDir(dbfile, "/");
944 DBoptlist* optList = DBMakeOptlist(2);
945 DBAddOption(optList, DBOPT_CYCLE, &cycle);
946 DBAddOption(optList, DBOPT_DTIME, &time);
947 DBPutMultimesh(dbfile, meshName.c_str(), meshnames.size(),
948 &meshnames[0], &meshtypes[0], optList);
949 DBFreeOptlist(optList);
950 }
951 #endif
952 }
953
954 //
955 //
956 //
957 void EscriptDataset::putSiloMultiVar(DBfile* dbfile, const VarInfo& vi,
958 bool useMeshFile)
959 {
960 #if USE_SILO
961 vector<int> vartypes;
962 vector<string> tempstrings;
963 vector<char*> varnames;
964 string pathPrefix;
965 if (useMeshFile) {
966 int ppIndex = meshBlocks[0]->getSiloPath().find(':');
967 if (ppIndex != string::npos) {
968 pathPrefix = meshBlocks[0]->getSiloPath().substr(0, ppIndex+1);
969 }
970 }
971
972 for (size_t idx = 0; idx < vi.sampleDistribution.size(); idx++) {
973 if (vi.sampleDistribution[idx] > 0) {
974 stringstream siloPath;
975 siloPath << pathPrefix << "/block";
976 int prevWidth = siloPath.width(4);
977 char prevFill = siloPath.fill('0');
978 siloPath << right << idx;
979 siloPath.width(prevWidth);
980 siloPath.fill(prevFill);
981 siloPath << "/";
982 siloPath << vi.varName;
983 tempstrings.push_back(siloPath.str());
984 varnames.push_back((char*)tempstrings.back().c_str());
985 vartypes.push_back(DB_UCDVAR);
986 }
987 }
988
989 // ignore empty variables
990 if (!varnames.empty()) {
991 DBSetDir(dbfile, "/");
992 DBoptlist* optList = DBMakeOptlist(2);
993 DBAddOption(optList, DBOPT_CYCLE, &cycle);
994 DBAddOption(optList, DBOPT_DTIME, &time);
995 if (useMeshFile) {
996 string vpath = string(MESH_VARS)+vi.varName;
997 DBPutMultivar(dbfile, vpath.c_str(), varnames.size(),
998 &varnames[0], &vartypes[0], optList);
999 } else {
1000 DBPutMultivar(dbfile, vi.varName.c_str(), varnames.size(),
1001 &varnames[0], &vartypes[0], optList);
1002 }
1003 DBFreeOptlist(optList);
1004 }
1005 #endif
1006 }
1007
1008 //
1009 //
1010 //
1011 void EscriptDataset::putSiloMultiTensor(DBfile* dbfile, const VarInfo& vi)
1012 {
1013 #if USE_SILO
1014 string tensorDir = vi.varName+string("_comps/");
1015 DBSetDir(dbfile, "/");
1016 DBMkdir(dbfile, tensorDir.c_str());
1017 int one = 1;
1018 DBoptlist* optList = DBMakeOptlist(3);
1019 DBAddOption(optList, DBOPT_CYCLE, &cycle);
1020 DBAddOption(optList, DBOPT_DTIME, &time);
1021 DBAddOption(optList, DBOPT_HIDE_FROM_GUI, &one);
1022 const IntVec& shape = vi.dataBlocks[0]->getShape();
1023
1024 for (int i=0; i<shape[1]; i++) {
1025 for (int j=0; j<shape[0]; j++) {
1026 vector<string> tempstrings;
1027 vector<char*> varnames;
1028 vector<int> vartypes;
1029 stringstream comp;
1030 comp << vi.varName << "_comps/a_";
1031 comp << i;
1032 comp << j;
1033 for (size_t idx = 0; idx < vi.sampleDistribution.size(); idx++) {
1034 if (vi.sampleDistribution[idx] > 0) {
1035 stringstream siloPath;
1036 siloPath << "/block";
1037 int prevWidth = siloPath.width(4);
1038 char prevFill = siloPath.fill('0');
1039 siloPath << right << idx;
1040 siloPath.width(prevWidth);
1041 siloPath.fill(prevFill);
1042 siloPath << "/" << comp.str();
1043 tempstrings.push_back(siloPath.str());
1044 varnames.push_back((char*)tempstrings.back().c_str());
1045 vartypes.push_back(DB_UCDVAR);
1046 }
1047 }
1048 if (!varnames.empty()) {
1049 DBPutMultivar(dbfile, comp.str().c_str(), varnames.size(),
1050 &varnames[0], &vartypes[0], optList);
1051 }
1052 }
1053 }
1054 DBFreeOptlist(optList);
1055 #endif
1056 }
1057
1058 } // namespace weipa
1059

  ViewVC Help
Powered by ViewVC 1.1.26