/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3959 - (show annotations)
Fri Sep 7 04:48:07 2012 UTC (7 years, 2 months ago) by caltinay
File MIME type: text/x-python
File size: 31053 byte(s)
support for geodetic coordinates in ER Mapper files. Correction to UTM
conversion (now assumes southern hemisphere).

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
15 Earth Systems Science Computational Center (ESSCC)
16 http://www.uq.edu.au/esscc
17 Primary Business: Queensland, Australia"""
18 __license__="""Licensed under the Open Software License version 3.0
19 http://www.opensource.org/licenses/osl-3.0.php"""
20 __url__="https://launchpad.net/escript-finley"
21
22 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
23
24 import logging
25 import numpy as np
26 import struct
27 from esys.escript import *
28 from esys.escript.linearPDEs import *
29 import esys.escript.unitsSI as U
30 try:
31 from scipy.io.netcdf import netcdf_file
32 __all__ += ['NetCDFDataSource']
33 except:
34 pass
35
36 def LatLonToUTM(lon, lat, wkt_string=None):
37 """
38 Converts one or more longitude,latitude pairs to the corresponding x,y
39 coordinates in the Universal Transverse Mercator projection.
40 If wkt_string is not given or invalid or the gdal module is not available
41 to convert the string, then the input values are assumed to be given in the
42 Clarke 1866 projection.
43 """
44
45 # not really optimal: if pyproj is not installed we return the input
46 # values without modification.
47 try:
48 import pyproj
49 except:
50 print("Warning, pyproj not available. Domain extents will be wrong")
51 return lon,lat
52
53 # determine UTM zone from the input data
54 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
55 try:
56 import osgeo.osr
57 srs = osgeo.osr.SpatialReference()
58 srs.ImportFromWkt(wkt_string)
59 p_src = pyproj.Proj(srs.ExportToProj4())
60 except:
61 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
62 # we assume southern hemisphere here
63 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
64 x,y=pyproj.transform(p_src, p_dest, lon, lat)
65 return x,y
66
67 class DataSource(object):
68 """
69 A class that provides survey data for the inversion process.
70 """
71 # this is currently specific to gravity inversion and should be generalised
72
73 def __init__(self):
74 """
75 """
76 self._constrainBottom=False
77 self._domain=None
78 self._pad_l=0.1
79 self._pad_h=0.1
80 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
81
82 def _addPadding(self, pad_l, pad_h, NE, l, origin):
83 """
84 Helper method that computes new number of elements, length and origin
85 after adding padding to the input values.
86 """
87 DIM=len(NE)
88 frac=[0.]*DIM
89 # padding is applied to each side so multiply by 2 to get the total
90 # amount of padding per dimension
91 if pad_l>0 and pad_l<1:
92 for i in xrange(DIM-1):
93 frac[i]=2*pad_l
94 elif pad_l>=1:
95 for i in xrange(DIM-1):
96 frac[i]=2*pad_l/float(NE[i])
97 if pad_h>0 and pad_h<1:
98 frac[DIM-1]=2*pad_h
99 elif pad_h>=1:
100 frac[DIM-1]=2*pad_h/(float(NE[DIM-1]))
101 # calculate new number of elements
102 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
103 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
104 spacing=[l[i]/NE[i] for i in xrange(DIM)]
105 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
106 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
107 return NE_new, l_new, origin_new
108
109 def _interpolateOnDomain(self, data, shape, origin, spacing, length):
110 """
111 Helper method that interpolates data arrays onto the domain.
112 """
113 dim=len(shape)
114 arrays=np.zeros(((len(data[0])-dim),)+tuple(shape))
115 for entry in data:
116 index=()
117 for i in xrange(dim):
118 index=(int((entry[i]-origin[i])/spacing[i]),)+index
119 for i in xrange(arrays.shape[0]):
120 arrays[i][index]=entry[dim+i]
121 dom=self.getDomain()
122 x=dom.getX()
123 delta=[length[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
124 realorigin=[inf(x[i]) for i in xrange(dim)]
125 res=[]
126 for i in xrange(arrays.shape[0]):
127 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
128 return res
129
130 def setPadding(self, pad_l=0.1, pad_h=0.1):
131 """
132 Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
133 they are treated as number of elements to be added to the domain.
134 If 0 < pad_l;pad_h < 1, the padding amount is relative.
135 """
136 self._pad_l=pad_l
137 self._pad_h=pad_h
138
139 def setConstrainBottom(self, constrain):
140 """
141 If `constrain` is True, then the density mask will be set to 1 in the
142 padding area at the bottom of the domain. By default this area is
143 unconstrained.
144 """
145 self._constrainBottom=constrain
146
147 def getDomain(self):
148 """
149 Returns a domain that spans the data area plus padding.
150 """
151 if self._domain is None:
152 self._domain=self._createDomain(self._pad_l, self._pad_h)
153 return self._domain
154
155 def getDensityMask(self):
156 """
157 Returns the density mask data object, where mask has value 1 on the
158 padding area, 0 elsewhere.
159 """
160 raise NotImplementedError
161
162 def getGravityAndStdDev(self):
163 """
164 Returns the gravity anomaly and standard deviation data objects as a
165 tuple.
166 """
167 raise NotImplementedError
168
169 def _createDomain(self, padding_l, padding_h):
170 """
171 creates and returns an escript domain that spans the entire area of
172 available data plus a buffer zone.
173 """
174 raise NotImplementedError
175
176
177 ##############################################################################
178 class UBCDataSource(DataSource):
179 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
180 super(UBCDataSource,self).__init__()
181 self._meshfile=meshfile
182 self._gravfile=gravfile
183 self._topofile=topofile
184 self._domainclass=domainclass
185 self._readMesh()
186
187 def getDensityMask(self):
188 #topodata=self._readTopography()
189 #shape=[self.NE[1]+1, self.NE[0]+1]
190 #mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen)
191 #mask=wherePositive(self.getDomain().getX()[2]-mask[0])
192 return self._mask
193
194 def getGravityAndStdDev(self):
195 gravlist=self._readGravity() # x,y,z,g,s
196 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
197 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
198 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
199
200 def _readMesh(self):
201 meshdata=open(self._meshfile).readlines()
202 numDataPoints=meshdata[0].split()
203 origin=meshdata[1].split()
204 self._numDataPoints=[int(x) for x in numDataPoints]
205 self._origin=[float(x) for x in origin]
206 self._spacing=[float(X.split('*')[1]) for X in meshdata[2:]]
207 # vertical data is upside down
208 self._origin[2]-=(self._numDataPoints[2]-1)*self._spacing[2]
209
210 def _createDomain(self, padding_l, padding_h):
211 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
212 l=[NE[i]*self._spacing[i] for i in xrange(3)]
213 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
214 NE, l, self._origin)
215
216 self._meshlen=l_new
217 self.NE=NE_new
218 self._origin=origin_new
219 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
220 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
221 try:
222 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
223 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
224 mask=wherePositive(dom.getX()[2])
225
226 except TypeError:
227 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
228 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
229 mask=wherePositive(x[2]+self._origin[2])
230
231 if self._constrainBottom:
232 M=3 # constrain bottom
233 else:
234 M=2 # do not constrain bottom
235 for i in xrange(M):
236 mask=mask + whereNegative(x[i]) + \
237 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
238 self._mask=wherePositive(mask)
239
240 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
241 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
242 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
243
244 return dom
245
246 def _readTopography(self):
247 f=open(self._topofile)
248 n=int(f.readline())
249 topodata=np.zeros((n,3))
250 for i in xrange(n):
251 x=f.readline().split()
252 x[0]=float(x[0])
253 x[1]=float(x[1])
254 x[2]=float(x[2])
255 topodata[i]=x
256 f.close()
257 return topodata
258
259 def _readGravity(self):
260 f=open(self._gravfile)
261 n=int(f.readline())
262 gravdata=np.zeros((n,5))
263 for i in xrange(n):
264 x=f.readline().split()
265 x[0]=float(x[0]) # x
266 x[1]=float(x[1]) # y
267 x[2]=float(x[2]) # z
268 x[3]=float(x[3]) # gravity anomaly in mGal
269 x[4]=float(x[4]) # stddev
270 # convert gravity anomaly units to m/s^2 and rescale error
271 x[3]*=-1e-5
272 x[4]*=1e-5
273 gravdata[i]=x
274 f.close()
275 return gravdata
276
277 ##############################################################################
278 class NetCDFDataSource(DataSource):
279 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
280 """
281 vertical_extents - (alt_min, alt_max, num_elements)
282 alt_of_data - altitude of measurements
283 """
284 super(NetCDFDataSource,self).__init__()
285 self._topofile=topofile
286 self._gravfile=gravfile
287 self._domainclass=domainclass
288 self._determineExtents(vertical_extents)
289 self._altOfData=alt_of_data
290
291 def getDensityMask(self):
292 #topodata=self._readTopography()
293 #shape=self._numDataPoints[1::-1]
294 #mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen)
295 #mask=wherePositive(self.getDomain().getX()[2]-mask[0])
296 #rho=fill*(1.-mask) + RHO_AIR*mask
297 return self._mask
298
299 def getGravityAndStdDev(self):
300 gravlist=self._readGravity() # x,y,z,g,s
301 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
302 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
303 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
304
305 def _determineExtents(self, ve):
306 f=netcdf_file(self._gravfile, 'r')
307 NX=0
308 for n in ['lon','longitude','x']:
309 if n in f.dimensions:
310 NX=f.dimensions[n]
311 break
312 if NX==0:
313 raise RuntimeError("Could not determine extents of data")
314 NY=0
315 for n in ['lat','latitude','y']:
316 if n in f.dimensions:
317 NY=f.dimensions[n]
318 break
319 if NY==0:
320 raise RuntimeError("Could not determine extents of data")
321
322 # find longitude and latitude variables
323 lon_name=None
324 for n in ['lon','longitude']:
325 if n in f.variables:
326 lon_name=n
327 longitude=f.variables.pop(n)
328 break
329 if lon_name is None:
330 raise RuntimeError("Could not determine longitude variable")
331 lat_name=None
332 for n in ['lat','latitude']:
333 if n in f.variables:
334 lat_name=n
335 latitude=f.variables.pop(n)
336 break
337 if lat_name is None:
338 raise RuntimeError("Could not determine latitude variable")
339
340 # try to figure out gravity variable name
341 grav_name=None
342 if len(f.variables)==1:
343 grav_name=f.variables.keys()[0]
344 else:
345 for n in f.variables.keys():
346 dims=f.variables[n].dimensions
347 if (lat_name in dims) and (lon_name in dims):
348 grav_name=n
349 break
350 if grav_name is None:
351 raise RuntimeError("Could not determine gravity variable")
352
353 # see if there is a wkt string to convert coordinates
354 try:
355 wkt_string=f.variables[grav_name].esri_pe_string
356 except:
357 wkt_string=None
358
359 # we don't trust actual_range & geospatial_lon_min/max since subset
360 # data does not seem to have these fields updated it seems.
361 # Getting min/max from the arrays is obviously not very efficient...
362 #lon_range=longitude.actual_range
363 #lat_range=latitude.actual_range
364 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
365 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
366 lon_range=longitude.data.min(),longitude.data.max()
367 lat_range=latitude.data.min(),latitude.data.max()
368 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
369 origin=[lon_range[0],lat_range[0],ve[0]]
370 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
371
372 f.close()
373
374 self._numDataPoints=[NX, NY, ve[2]]
375 self._origin=origin
376 # we are rounding to avoid interpolateOnDomain issues
377 self._spacing=[np.round(lengths[i]/(self._numDataPoints[i]-1)) for i in xrange(3)]
378 self._meshlen=[(self._numDataPoints[i]-1)*self._spacing[i] for i in xrange(3)]
379 self._wkt_string=wkt_string
380 self._lon=lon_name
381 self._lat=lat_name
382 self._grv=grav_name
383
384 def _createDomain(self, padding_l, padding_h):
385 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
386 l=self._meshlen
387 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
388 NE, l, self._origin)
389
390 self._meshlen=l_new
391 self.NE=NE_new
392 self._dataorigin=self._origin
393 self._origin=origin_new
394 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
395 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
396 try:
397 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
398 # ripley may adjust NE and length
399 self._meshlen=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
400 self.NE=[self._meshlen[i]/self._spacing[i] for i in xrange(3)]
401 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
402 mask=wherePositive(dom.getX()[2])
403
404 except TypeError:
405 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
406 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
407 mask=wherePositive(x[2]+self._origin[2])
408
409 if self._constrainBottom:
410 M=3 # constrain bottom
411 else:
412 M=2 # do not constrain bottom
413 for i in xrange(M):
414 mask=mask + whereNegative(x[i]) + \
415 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
416 self._mask=wherePositive(mask)
417
418 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
419 self.logger.debug(" length: %s x %s x %s"%(self._meshlen[0],self._meshlen[1],self._meshlen[2]))
420 self.logger.debug(" origin: %s x %s x %s"%(self._origin[0],self._origin[1],self._origin[2]))
421
422 return dom
423
424 def _readTopography(self):
425 f=netcdf_file(self._topofile, 'r')
426 lon=None
427 for n in ['lon','longitude']:
428 if n in f.variables:
429 lon=f.variables[n][:]
430 break
431 if lon is None:
432 raise RuntimeError("Could not determine longitude variable")
433 lat=None
434 for n in ['lat','latitude']:
435 if n in f.variables:
436 lat=f.variables[n][:]
437 break
438 if lat is None:
439 raise RuntimeError("Could not determine latitude variable")
440 alt=None
441 for n in ['altitude','alt']:
442 if n in f.variables:
443 alt=f.variables[n][:]
444 break
445 if alt is None:
446 raise RuntimeError("Could not determine altitude variable")
447
448 topodata=np.column_stack((lon,lat,alt))
449 f.close()
450 return topodata
451
452 def _readGravity(self):
453 f=netcdf_file(self._gravfile, 'r')
454 #lon=f.variables[self._lon][:]
455 #lat=f.variables[self._lat][:]
456 NE=[self._numDataPoints[i]-1 for i in xrange(2)]
457 lon=np.linspace(self._dataorigin[0], self._dataorigin[0]+NE[0]*self._spacing[0], NE[0]+1)
458 lat=np.linspace(self._dataorigin[1], self._dataorigin[1]+NE[1]*self._spacing[1], NE[1]+1)
459 lon,lat=np.meshgrid(lon,lat)
460 grav=f.variables[self._grv][:]
461 f.close()
462 lon=lon.flatten()
463 lat=lat.flatten()
464 grav=grav.flatten()
465 alt=self._altOfData*np.ones(grav.shape)
466 # error value is an assumption
467 try:
468 missing=f.variables[self._grv].missing_value
469 err=np.where(grav==missing, 0.0, 20.0)
470 except:
471 err=20.0*np.ones(lon.shape)
472 # convert units
473 err=1e-6*err
474 grav=1e-6*grav
475 gravdata=np.column_stack((lon,lat,alt,grav,err))
476 return gravdata
477
478 ##############################################################################
479 class ERSDataSource(DataSource):
480 """
481 Data Source for ER Mapper raster data.
482 Note that this class only accepts a very specific type of ER Mapper data
483 input and will raise an exception if other data is found.
484 """
485 def __init__(self, domainclass, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
486 """
487 headerfile - usually ends in .ers
488 datafile - usually has the same name as the headerfile without '.ers'
489 """
490 super(ERSDataSource,self).__init__()
491 self._headerfile=headerfile
492 if datafile is None:
493 self._datafile=headerfile[:-4]
494 else:
495 self._datafile=datafile
496 self._domainclass=domainclass
497 self._readHeader(vertical_extents)
498 self._altOfData=alt_of_data
499
500 def _readHeader(self, ve):
501 metadata=open(self._headerfile, 'r').readlines()
502 # parse metadata
503 start=-1
504 for i in xrange(len(metadata)):
505 if metadata[i].strip() == 'DatasetHeader Begin':
506 start=i+1
507 if start==-1:
508 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
509
510 md_dict={}
511 section=[]
512 for i in xrange(start, len(metadata)):
513 line=metadata[i]
514 if line[-6:].strip() == 'Begin':
515 section.append(line[:-6].strip())
516 elif line[-4:].strip() == 'End':
517 if len(section)>0:
518 section.pop()
519 else:
520 vals=line.split('=')
521 if len(vals)==2:
522 key = vals[0].strip()
523 value = vals[1].strip()
524 fullkey='.'.join(section+[key])
525 md_dict[fullkey]=value
526
527 try:
528 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
529 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
530 except KeyError:
531 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
532
533 try:
534 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
535 NY = int(md_dict['RasterInfo.NrOfLines'])
536 except:
537 raise RuntimeError("Could not determine extents of data")
538
539 try:
540 maskval = float(md_dict['RasterInfo.NullCellValue'])
541 except:
542 raise RuntimeError("Could not determine NullCellValue")
543
544 try:
545 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
546 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
547 except:
548 raise RuntimeError("Could not determine cell dimensions")
549
550 try:
551 if md_dict['CoordinateSpace.CoordinateType']=='EN':
552 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
553 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
554 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
555 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
556 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
557 else:
558 raise RuntimeError("Unknown CoordinateType")
559 except:
560 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
561 originX,originY = 0.0, 0.0
562
563 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
564 # it appears we have lat/lon coordinates so need to convert
565 # origin and spacing. Try using gdal to get the wkt if available:
566 try:
567 from osgeo import gdal
568 ds=gdal.Open(self._headerfile)
569 wkt=ds.GetProjection()
570 except:
571 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
572 self.logger.warn('GDAL not available, assuming GDA94 data')
573 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
574 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
575 # we are rounding to avoid interpolateOnDomain issues
576 spacingX=np.round(op1X-originX_UTM)
577 spacingY=np.round(op1Y-originY_UTM)
578 originX=np.round(1000*originX_UTM)/1000.
579 originY=np.round(1000*originY_UTM)/1000.
580
581 # data sets have origin in top-left corner so y runs top-down
582 self._dataorigin=[originX, originY]
583 originY-=(NY-1)*spacingY
584 self._maskval=maskval
585 self._spacing=[spacingX, spacingY, (ve[1]-ve[0])/(ve[2]-1)]
586 self._numDataPoints = [NX, NY, ve[2]]
587 self._origin = [originX, originY, ve[0]]
588 self._meshlen=[self._numDataPoints[i]*self._spacing[i] for i in xrange(3)]
589
590 def getDensityMask(self):
591 return self._mask
592
593 def getGravityAndStdDev(self):
594 gravlist=self._readGravity() # x,y,z,g,s
595 shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1]
596 g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen)
597 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
598
599 def _createDomain(self, padding_l, padding_h):
600 NE=[self._numDataPoints[i]-1 for i in xrange(3)]
601 l=[NE[i]*self._spacing[i] for i in xrange(3)]
602 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
603 NE, l, self._origin)
604
605 self._meshlen=l_new
606 self.NE=NE_new
607 self._origin=origin_new
608 lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)]
609 NEdiff=[NE_new[i]-NE[i] for i in xrange(3)]
610 try:
611 dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2])
612 x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
613 mask=wherePositive(dom.getX()[2])
614
615 except TypeError:
616 dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
617 x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)]
618 mask=wherePositive(x[2]+self._origin[2])
619
620 if self._constrainBottom:
621 M=3 # constrain bottom
622 else:
623 M=2 # do not constrain bottom
624 for i in xrange(M):
625 mask=mask + whereNegative(x[i]) + \
626 wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i])
627 self._mask=wherePositive(mask)
628
629 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
630 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
631 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
632
633 return dom
634
635 def _readGravity(self):
636 f=open(self._datafile, 'r')
637 n=self._numDataPoints[0]*self._numDataPoints[1]
638 grav=[]
639 err=[]
640 for i in xrange(n):
641 v=struct.unpack('f',f.read(4))[0]
642 grav.append(v)
643 if abs((self._maskval-v)/v) < 1e-6:
644 err.append(0.)
645 else:
646 err.append(1.)
647 f.close()
648
649 NE=[self._numDataPoints[i] for i in xrange(2)]
650 x=self._dataorigin[0]+np.arange(start=0, stop=NE[0]*self._spacing[0], step=self._spacing[0])
651 # data sets have origin in top-left corner so y runs top-down
652 y=self._dataorigin[1]-np.arange(start=0, stop=NE[1]*self._spacing[1], step=self._spacing[1])
653 x,y=np.meshgrid(x,y)
654 x,y=x.flatten(),y.flatten()
655 alt=self._altOfData*np.ones((n,))
656
657 # convert units
658 err=2e-5*np.array(err)
659 grav=1e-5*np.array(grav)
660 gravdata=np.column_stack((x,y,alt,grav,err))
661 return gravdata
662
663 ##############################################################################
664 class SourceFeature(object):
665 """
666 A feature adds a density distribution to (parts of) a domain of a synthetic
667 data source, for example a layer of a specific rock type or a simulated
668 ore body.
669 """
670 def getDensity(self):
671 """
672 Returns the density for the area covered by mask. It can be constant
673 or a data object with spatial dependency.
674 """
675 raise NotImplementedError
676 def getMask(self, x):
677 """
678 Returns the mask of the area of interest for this feature. That is,
679 mask is non-zero where the density returned by getDensity() should be
680 applied, zero elsewhere.
681 """
682 raise NotImplementedError
683
684 class SmoothAnomaly(SourceFeature):
685 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
686 self.x=x
687 self.y=y
688 self.lx=lx
689 self.ly=ly
690 self.lz=lz
691 self.depth=depth
692 self.rho_inner=rho_inner
693 self.rho_outer=rho_outer
694 self.rho=None
695
696 def getDensity(self):
697 return self.rho
698
699 def getMask(self, x):
700 DIM=x.getDomain().getDim()
701 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
702 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
703 if DIM>2:
704 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
705 if self.rho is None:
706 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
707 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
708 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
709 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
710 if self.rho_inner<0: self.rho=-self.rho
711 return m
712
713 ##############################################################################
714 class SyntheticDataSource(DataSource):
715 def __init__(self, DIM, NE, l, h, features):
716 super(SyntheticDataSource,self).__init__()
717 self._features = features
718 self.DIM=DIM
719 self.NE=NE
720 self.l=l
721 self.h=h
722
723 def _createDomain(self, padding_l, padding_h):
724 NE_H=self.NE
725 NE_L=int((self.l/self.h)*NE_H+0.5)
726 l=[self.l]*(self.DIM-1)+[self.h]
727 NE=[NE_L]*(self.DIM-1)+[NE_H]
728 origin=[0.]*self.DIM
729 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
730 NE, l, origin)
731
732 self.NE=NE_new
733 self.l=l_new[0]
734 self.h=l_new[self.DIM-1]
735
736 if self.DIM==2:
737 from esys.finley import Rectangle
738 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
739 self._x = dom.getX() + origin_new
740 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
741 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
742 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
743 else:
744 from esys.finley import Brick
745 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
746 self._x = dom.getX() + origin_new
747 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
748 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
749 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
750
751 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
752 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
753 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
754 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
755 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
756 self._mask=whereNegative(self._x[self.DIM-1]) + \
757 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
758 for i in xrange(self.DIM-1):
759 self._mask=self._mask + whereNegative(self._x[i]) + \
760 wherePositive(self._x[i]-l[i])
761 self._mask=wherePositive(self._mask)
762
763 rho_ref=0.
764 for f in self._features:
765 m=f.getMask(self._x)
766 rho_ref = rho_ref * (1-m) + f.getDensity() * m
767 self._rho=rho_ref
768
769 return dom
770
771 def getDensityMask(self):
772 return self._mask
773
774 def getReferenceDensity(self):
775 return self._rho
776
777 def getGravityAndStdDev(self):
778 pde=LinearSinglePDE(self.getDomain())
779 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
780 m_psi_ref=0.
781 for i in range(self.DIM):
782 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
783 + whereZero(self._x[i]-sup(self._x[i]))
784
785 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
786 pde.setSymmetryOn()
787 psi_ref=pde.getSolution()
788 del pde
789 g=-grad(psi_ref)
790 sigma=self._g_mask
791 return g,sigma
792

  ViewVC Help
Powered by ViewVC 1.1.26