/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3958 - (show annotations)
Fri Sep 7 02:56:42 2012 UTC (7 years ago) by caltinay
File MIME type: text/x-python
File size: 29561 byte(s)
Fixes to netcdf data source and new ERS (ER Mapper) datasource.

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
15 Earth Systems Science Computational Center (ESSCC)
16 http://www.uq.edu.au/esscc
17 Primary Business: Queensland, Australia"""
18 __license__="""Licensed under the Open Software License version 3.0
19 http://www.opensource.org/licenses/osl-3.0.php"""
20 __url__="https://launchpad.net/escript-finley"
21
22 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
23
24 import logging
25 import numpy as np
26 import struct
27 from esys.escript import *
28 from esys.escript.linearPDEs import *
29 import esys.escript.unitsSI as U
30 try:
31 from scipy.io.netcdf import netcdf_file
32 __all__ += ['NetCDFDataSource']
33 except:
34 pass
35
36 def LatLonToUTM(lon, lat, wkt_string=None):
37 """
38 Converts one or more longitude,latitude pairs to the corresponding x,y
39 coordinates in the Universal Transverse Mercator projection.
40 If wkt_string is not given or invalid or the gdal module is not available
41 to convert the string, then the input values are assumed to be given in the
42 Clarke 1866 projection.
43 """
44
45 # not really optimal: if pyproj is not installed we return the input
46 # values without modification.
47 try:
48 import pyproj
49 except:
50 print("Warning, pyproj not available. Domain extents will be wrong")
51 return lon,lat
52
53 # determine UTM zone from the input data
54 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
55 try:
56 import osgeo.osr
57 srs = osgeo.osr.SpatialReference()
58 srs.ImportFromWkt(wkt_string)
59 p_src = pyproj.Proj(srs.ExportToProj4())
60 except:
61 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
62 p_dest = pyproj.Proj(proj='utm', zone=zone) # ellps?
63 x,y=pyproj.transform(p_src, p_dest, lon, lat)
64 return x,y
65
66 class DataSource(object):
67 """
68 A class that provides survey data for the inversion process.
69 """
70 # this is currently specific to gravity inversion and should be generalised
71
72 def __init__(self):
73 """
74 """
75 self._constrainBottom=False
76 self._domain=None
77 self._pad_l=0.1
78 self._pad_h=0.1
79 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
80
81 def _addPadding(self, pad_l, pad_h, NE, l, origin):
82 """
83 Helper method that computes new number of elements, length and origin
84 after adding padding to the input values.
85 """
86 DIM=len(NE)
87 frac=[0.]*DIM
88 # padding is applied to each side so multiply by 2 to get the total
89 # amount of padding per dimension
90 if pad_l>0 and pad_l<1:
91 for i in xrange(DIM-1):
92 frac[i]=2*pad_l
93 elif pad_l>=1:
94 for i in xrange(DIM-1):
95 frac[i]=2*pad_l/float(NE[i])
96 if pad_h>0 and pad_h<1:
97 frac[DIM-1]=2*pad_h
98 elif pad_h>=1:
99 frac[DIM-1]=2*pad_h/(float(NE[DIM-1]))
100 # calculate new number of elements
101 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
102 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
103 spacing=[l[i]/NE[i] for i in xrange(DIM)]
104 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
105 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
106 return NE_new, l_new, origin_new
107
108 def _interpolateOnDomain(self, data, shape, origin, spacing, length):
109 """
110 Helper method that interpolates data arrays onto the domain.
111 """
112 dim=len(shape)
113 arrays=np.zeros(((len(data[0])-dim),)+tuple(shape))
114 for entry in data:
115 index=()
116 for i in xrange(dim):
117 index=((entry[i]-origin[i])/spacing[i],)+index
118 for i in xrange(arrays.shape[0]):
119 arrays[i][index]=entry[dim+i]
120 dom=self.getDomain()
121 x=dom.getX()
122 delta=[length[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
123 realorigin=[inf(x[i]) for i in xrange(dim)]
124 res=[]
125 for i in xrange(arrays.shape[0]):
126 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
127 return res
128
129 def setPadding(self, pad_l=0.1, pad_h=0.1):
130 """
131 Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
132 they are treated as number of elements to be added to the domain.
133 If 0 < pad_l;pad_h < 1, the padding amount is relative.
134 """
135 self._pad_l=pad_l
136 self._pad_h=pad_h
137
138 def setConstrainBottom(self, constrain):
139 """
140 If `constrain` is True, then the density mask will be set to 1 in the
141 padding area at the bottom of the domain. By default this area is
142 unconstrained.
143 """
144 self._constrainBottom=constrain
145
146 def getDomain(self):
147 """
148 Returns a domain that spans the data area plus padding.
149 """
150 if self._domain is None:
151 self._domain=self._createDomain(self._pad_l, self._pad_h)
152 return self._domain
153
154 def getDensityMask(self):
155 """
156 Returns the density mask data object, where mask has value 1 on the
157 padding area, 0 elsewhere.
158 """
159 raise NotImplementedError
160
161 def getGravityAndStdDev(self):
162 """
163 Returns the gravity anomaly and standard deviation data objects as a
164 tuple.
165 """
166 raise NotImplementedError
167
168 def _createDomain(self, padding_l, padding_h):
169 """
170 creates and returns an escript domain that spans the entire area of
171 available data plus a buffer zone.
172 """
173 raise NotImplementedError
174
175
176 ##############################################################################
177 class UBCDataSource(DataSource):
178 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
179 super(UBCDataSource,self).__init__()
180 self._meshfile=meshfile
181 self._gravfile=gravfile
182 self._topofile=topofile
183 self._domainclass=domainclass
184 self._readMesh()
185
186 def getDensityMask(self):
187 #topodata=self._readTopography()
188 #shape=[self.NE[1]+1, self.NE[0]+1]
189 #mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen)
190 #mask=wherePositive(self.getDomain().getX()[2]-mask[0])
191 return self._mask
192
193 def getGravityAndStdDev(self):
194 gravlist=self._readGravity() # x,y,z,g,s
195 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
196 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
197 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
198
199 def _readMesh(self):
200 meshdata=open(self._meshfile).readlines()
201 numDataPoints=meshdata[0].split()
202 origin=meshdata[1].split()
203 self._numDataPoints=[int(x) for x in numDataPoints]
204 self._origin=[float(x) for x in origin]
205 self._spacing=[float(X.split('*')[1]) for X in meshdata[2:]]
206 # vertical data is upside down
207 self._origin[2]-=(self._numDataPoints[2]-1)*self._spacing[2]
208
209 def _createDomain(self, padding_l, padding_h):
210 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
211 l=[NE[i]*self._spacing[i] for i in xrange(3)]
212 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
213 NE, l, self._origin)
214
215 self._meshlen=l_new
216 self.NE=NE_new
217 self._origin=origin_new
218 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
219 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
220 try:
221 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
222 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
223 mask=wherePositive(dom.getX()[2])
224
225 except TypeError:
226 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
227 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
228 mask=wherePositive(x[2]+self._origin[2])
229
230 if self._constrainBottom:
231 M=3 # constrain bottom
232 else:
233 M=2 # do not constrain bottom
234 for i in xrange(M):
235 mask=mask + whereNegative(x[i]) + \
236 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
237 self._mask=wherePositive(mask)
238
239 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
240 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
241 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
242
243 return dom
244
245 def _readTopography(self):
246 f=open(self._topofile)
247 n=int(f.readline())
248 topodata=np.zeros((n,3))
249 for i in xrange(n):
250 x=f.readline().split()
251 x[0]=float(x[0])
252 x[1]=float(x[1])
253 x[2]=float(x[2])
254 topodata[i]=x
255 f.close()
256 return topodata
257
258 def _readGravity(self):
259 f=open(self._gravfile)
260 n=int(f.readline())
261 gravdata=np.zeros((n,5))
262 for i in xrange(n):
263 x=f.readline().split()
264 x[0]=float(x[0]) # x
265 x[1]=float(x[1]) # y
266 x[2]=float(x[2]) # z
267 x[3]=float(x[3]) # gravity anomaly in mGal
268 x[4]=float(x[4]) # stddev
269 # convert gravity anomaly units to m/s^2 and rescale error
270 x[3]*=-1e-5
271 x[4]*=1e-5
272 gravdata[i]=x
273 f.close()
274 return gravdata
275
276 ##############################################################################
277 class NetCDFDataSource(DataSource):
278 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
279 """
280 vertical_extents - (alt_min, alt_max, num_elements)
281 alt_of_data - altitude of measurements
282 """
283 super(NetCDFDataSource,self).__init__()
284 self._topofile=topofile
285 self._gravfile=gravfile
286 self._domainclass=domainclass
287 self._determineExtents(vertical_extents)
288 self._altOfData=alt_of_data
289
290 def getDensityMask(self):
291 #topodata=self._readTopography()
292 #shape=self._numDataPoints[1::-1]
293 #mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen)
294 #mask=wherePositive(self.getDomain().getX()[2]-mask[0])
295 #rho=fill*(1.-mask) + RHO_AIR*mask
296 return self._mask
297
298 def getGravityAndStdDev(self):
299 gravlist=self._readGravity() # x,y,z,g,s
300 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
301 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
302 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
303
304 def _determineExtents(self, ve):
305 f=netcdf_file(self._gravfile, 'r')
306 NX=0
307 for n in ['lon','longitude','x']:
308 if n in f.dimensions:
309 NX=f.dimensions[n]
310 break
311 if NX==0:
312 raise RuntimeError("Could not determine extents of data")
313 NY=0
314 for n in ['lat','latitude','y']:
315 if n in f.dimensions:
316 NY=f.dimensions[n]
317 break
318 if NY==0:
319 raise RuntimeError("Could not determine extents of data")
320
321 # find longitude and latitude variables
322 lon_name=None
323 for n in ['lon','longitude']:
324 if n in f.variables:
325 lon_name=n
326 longitude=f.variables.pop(n)
327 break
328 if lon_name is None:
329 raise RuntimeError("Could not determine longitude variable")
330 lat_name=None
331 for n in ['lat','latitude']:
332 if n in f.variables:
333 lat_name=n
334 latitude=f.variables.pop(n)
335 break
336 if lat_name is None:
337 raise RuntimeError("Could not determine latitude variable")
338
339 # try to figure out gravity variable name
340 grav_name=None
341 if len(f.variables)==1:
342 grav_name=f.variables.keys()[0]
343 else:
344 for n in f.variables.keys():
345 dims=f.variables[n].dimensions
346 if (lat_name in dims) and (lon_name in dims):
347 grav_name=n
348 break
349 if grav_name is None:
350 raise RuntimeError("Could not determine gravity variable")
351
352 # see if there is a wkt string to convert coordinates
353 try:
354 wkt_string=f.variables[grav_name].esri_pe_string
355 except:
356 wkt_string=None
357
358 # we don't trust actual_range & geospatial_lon_min/max since subset
359 # data does not seem to have these fields updated it seems.
360 # Getting min/max from the arrays is obviously not very efficient...
361 #lon_range=longitude.actual_range
362 #lat_range=latitude.actual_range
363 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
364 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
365 lon_range=longitude.data.min(),longitude.data.max()
366 lat_range=latitude.data.min(),latitude.data.max()
367 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
368 origin=[lon_range[0],lat_range[0],ve[0]]
369 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
370
371 f.close()
372
373 self._numDataPoints=[NX, NY, ve[2]]
374 self._origin=origin
375 # we are rounding to avoid interpolateOnDomain issues
376 self._spacing=[np.round(lengths[i]/(self._numDataPoints[i]-1)) for i in xrange(3)]
377 self._meshlen=[(self._numDataPoints[i]-1)*self._spacing[i] for i in xrange(3)]
378 self._wkt_string=wkt_string
379 self._lon=lon_name
380 self._lat=lat_name
381 self._grv=grav_name
382
383 def _createDomain(self, padding_l, padding_h):
384 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
385 l=self._meshlen
386 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
387 NE, l, self._origin)
388
389 self._meshlen=l_new
390 self.NE=NE_new
391 self._dataorigin=self._origin
392 self._origin=origin_new
393 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
394 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
395 try:
396 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
397 # ripley may adjust NE and length
398 self._meshlen=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
399 self.NE=[self._meshlen[i]/self._spacing[i] for i in xrange(3)]
400 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
401 mask=wherePositive(dom.getX()[2])
402
403 except TypeError:
404 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
405 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
406 mask=wherePositive(x[2]+self._origin[2])
407
408 if self._constrainBottom:
409 M=3 # constrain bottom
410 else:
411 M=2 # do not constrain bottom
412 for i in xrange(M):
413 mask=mask + whereNegative(x[i]) + \
414 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
415 self._mask=wherePositive(mask)
416
417 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
418 self.logger.debug(" length: %s x %s x %s"%(self._meshlen[0],self._meshlen[1],self._meshlen[2]))
419 self.logger.debug(" origin: %s x %s x %s"%(self._origin[0],self._origin[1],self._origin[2]))
420
421 return dom
422
423 def _readTopography(self):
424 f=netcdf_file(self._topofile, 'r')
425 lon=None
426 for n in ['lon','longitude']:
427 if n in f.variables:
428 lon=f.variables[n][:]
429 break
430 if lon is None:
431 raise RuntimeError("Could not determine longitude variable")
432 lat=None
433 for n in ['lat','latitude']:
434 if n in f.variables:
435 lat=f.variables[n][:]
436 break
437 if lat is None:
438 raise RuntimeError("Could not determine latitude variable")
439 alt=None
440 for n in ['altitude','alt']:
441 if n in f.variables:
442 alt=f.variables[n][:]
443 break
444 if alt is None:
445 raise RuntimeError("Could not determine altitude variable")
446
447 topodata=np.column_stack((lon,lat,alt))
448 f.close()
449 return topodata
450
451 def _readGravity(self):
452 f=netcdf_file(self._gravfile, 'r')
453 #lon=f.variables[self._lon][:]
454 #lat=f.variables[self._lat][:]
455 NE=[self._numDataPoints[i]-1 for i in xrange(2)]
456 lon=np.linspace(self._dataorigin[0], self._dataorigin[0]+NE[0]*self._spacing[0], NE[0]+1)
457 lat=np.linspace(self._dataorigin[1], self._dataorigin[1]+NE[1]*self._spacing[1], NE[1]+1)
458 lon,lat=np.meshgrid(lon,lat)
459 grav=f.variables[self._grv][:]
460 f.close()
461 lon=lon.flatten()
462 lat=lat.flatten()
463 grav=grav.flatten()
464 alt=self._altOfData*np.ones(grav.shape)
465 # error value is an assumption
466 try:
467 missing=f.variables[self._grv].missing_value
468 err=np.where(grav==missing, 0.0, 20.0)
469 except:
470 err=20.0*np.ones(lon.shape)
471 # convert units
472 err=1e-6*err
473 grav=1e-6*grav
474 gravdata=np.column_stack((lon,lat,alt,grav,err))
475 return gravdata
476
477 ##############################################################################
478 class ERSDataSource(DataSource):
479 """
480 Data Source for ER Mapper raster data.
481 Note that this class only accepts a very specific type of ER Mapper data
482 input and will raise an exception if other data is found.
483 """
484 def __init__(self, domainclass, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
485 """
486 headerfile - usually ends in .ers
487 datafile - usually has the same name as the headerfile without '.ers'
488 """
489 super(ERSDataSource,self).__init__()
490 self._headerfile=headerfile
491 if datafile is None:
492 self._datafile=headerfile[:-4]
493 else:
494 self._datafile=datafile
495 self._domainclass=domainclass
496 self._readHeader(vertical_extents)
497 self._altOfData=alt_of_data
498
499 def _readHeader(self, ve):
500 metadata=open(self._headerfile, 'r').readlines()
501 # parse metadata
502 start=-1
503 for i in xrange(len(metadata)):
504 if metadata[i].strip() == 'DatasetHeader Begin':
505 start=i+1
506 if start==-1:
507 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
508
509 md_dict={}
510 section=[]
511 for i in xrange(start, len(metadata)):
512 line=metadata[i]
513 if line[-6:].strip() == 'Begin':
514 section.append(line[:-6].strip())
515 elif line[-4:].strip() == 'End':
516 if len(section)>0:
517 section.pop()
518 else:
519 vals=line.split('=')
520 if len(vals)==2:
521 key = vals[0].strip()
522 value = vals[1].strip()
523 fullkey='.'.join(section+[key])
524 md_dict[fullkey]=value
525
526 try:
527 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
528 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
529 except KeyError:
530 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
531
532 try:
533 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
534 NY = int(md_dict['RasterInfo.NrOfLines'])
535 except:
536 raise RuntimeError("Could not determine extents of data")
537
538 try:
539 maskval = float(md_dict['RasterInfo.NullCellValue'])
540 except:
541 raise RuntimeError("Could not determine NullCellValue")
542
543 try:
544 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
545 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
546 except:
547 raise RuntimeError("Could not determine cell dimensions")
548
549 try:
550 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
551 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
552 except:
553 self.logger.warn("Could not determine coordinate origin")
554 originX,originY = 0.0, 0.0
555
556 # test data sets have origin in top-left corner so y runs top-down
557 self._dataorigin=[originX, originY]
558 originY-=(NY-1)*spacingY
559 self._maskval=maskval
560 self._spacing=[spacingX, spacingY, (ve[1]-ve[0])/(ve[2]-1)]
561 self._numDataPoints = [NX, NY, ve[2]]
562 self._origin = [originX, originY, ve[0]]
563 self._meshlen=[self._numDataPoints[i]*self._spacing[i] for i in xrange(3)]
564
565 def getDensityMask(self):
566 return self._mask
567
568 def getGravityAndStdDev(self):
569 gravlist=self._readGravity() # x,y,z,g,s
570 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
571 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
572 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
573
574 def _createDomain(self, padding_l, padding_h):
575 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
576 l=[NE[i]*self._spacing[i] for i in xrange(3)]
577 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
578 NE, l, self._origin)
579
580 self._meshlen=l_new
581 self.NE=NE_new
582 self._origin=origin_new
583 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
584 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
585 try:
586 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
587 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
588 mask=wherePositive(dom.getX()[2])
589
590 except TypeError:
591 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
592 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
593 mask=wherePositive(x[2]+self._origin[2])
594
595 if self._constrainBottom:
596 M=3 # constrain bottom
597 else:
598 M=2 # do not constrain bottom
599 for i in xrange(M):
600 mask=mask + whereNegative(x[i]) + \
601 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
602 self._mask=wherePositive(mask)
603
604 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
605 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
606 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
607
608 return dom
609
610 def _readGravity(self):
611 f=open(self._datafile, 'r')
612 n=self._numDataPoints[0]*self._numDataPoints[1]
613 grav=[]
614 err=[]
615 for i in xrange(n):
616 v=struct.unpack('f',f.read(4))[0]
617 grav.append(v)
618 if abs((self._maskval-v)/v) < 1e-6:
619 err.append(0.)
620 else:
621 err.append(1.)
622 f.close()
623
624 NE=[self._numDataPoints[i] for i in xrange(2)]
625 x=self._dataorigin[0]+np.arange(start=0, stop=NE[0]*self._spacing[0], step=self._spacing[0])
626 # test data sets have origin in top-left corner so y runs top-down
627 y=self._dataorigin[1]-np.arange(start=0, stop=NE[1]*self._spacing[1], step=self._spacing[1])
628 x,y=np.meshgrid(x,y)
629 x,y=x.flatten(),y.flatten()
630 alt=self._altOfData*np.ones((n,))
631
632 # convert units
633 err=2e-5*np.array(err)
634 grav=1e-5*np.array(grav)
635 gravdata=np.column_stack((x,y,alt,grav,err))
636 return gravdata
637
638 ##############################################################################
639 class SourceFeature(object):
640 """
641 A feature adds a density distribution to (parts of) a domain of a synthetic
642 data source, for example a layer of a specific rock type or a simulated
643 ore body.
644 """
645 def getDensity(self):
646 """
647 Returns the density for the area covered by mask. It can be constant
648 or a data object with spatial dependency.
649 """
650 raise NotImplementedError
651 def getMask(self, x):
652 """
653 Returns the mask of the area of interest for this feature. That is,
654 mask is non-zero where the density returned by getDensity() should be
655 applied, zero elsewhere.
656 """
657 raise NotImplementedError
658
659 class SmoothAnomaly(SourceFeature):
660 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
661 self.x=x
662 self.y=y
663 self.lx=lx
664 self.ly=ly
665 self.lz=lz
666 self.depth=depth
667 self.rho_inner=rho_inner
668 self.rho_outer=rho_outer
669 self.rho=None
670
671 def getDensity(self):
672 return self.rho
673
674 def getMask(self, x):
675 DIM=x.getDomain().getDim()
676 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
677 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
678 if DIM>2:
679 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
680 if self.rho is None:
681 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
682 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
683 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
684 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
685 if self.rho_inner<0: self.rho=-self.rho
686 return m
687
688 ##############################################################################
689 class SyntheticDataSource(DataSource):
690 def __init__(self, DIM, NE, l, h, features):
691 super(SyntheticDataSource,self).__init__()
692 self._features = features
693 self.DIM=DIM
694 self.NE=NE
695 self.l=l
696 self.h=h
697
698 def _createDomain(self, padding_l, padding_h):
699 NE_H=self.NE
700 NE_L=int((self.l/self.h)*NE_H+0.5)
701 l=[self.l]*(self.DIM-1)+[self.h]
702 NE=[NE_L]*(self.DIM-1)+[NE_H]
703 origin=[0.]*self.DIM
704 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
705 NE, l, origin)
706
707 self.NE=NE_new
708 self.l=l_new[0]
709 self.h=l_new[self.DIM-1]
710
711 if self.DIM==2:
712 from esys.finley import Rectangle
713 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
714 self._x = dom.getX() + origin_new
715 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
716 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
717 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
718 else:
719 from esys.finley import Brick
720 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
721 self._x = dom.getX() + origin_new
722 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
723 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
724 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
725
726 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
727 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
728 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
729 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
730 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
731 self._mask=whereNegative(self._x[self.DIM-1]) + \
732 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
733 for i in xrange(self.DIM-1):
734 self._mask=self._mask + whereNegative(self._x[i]) + \
735 wherePositive(self._x[i]-l[i])
736 self._mask=wherePositive(self._mask)
737
738 rho_ref=0.
739 for f in self._features:
740 m=f.getMask(self._x)
741 rho_ref = rho_ref * (1-m) + f.getDensity() * m
742 self._rho=rho_ref
743
744 return dom
745
746 def getDensityMask(self):
747 return self._mask
748
749 def getReferenceDensity(self):
750 return self._rho
751
752 def getGravityAndStdDev(self):
753 pde=LinearSinglePDE(self.getDomain())
754 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
755 m_psi_ref=0.
756 for i in range(self.DIM):
757 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
758 + whereZero(self._x[i]-sup(self._x[i]))
759
760 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
761 pde.setSymmetryOn()
762 psi_ref=pde.getSolution()
763 del pde
764 g=-grad(psi_ref)
765 sigma=self._g_mask
766 return g,sigma
767

  ViewVC Help
Powered by ViewVC 1.1.26