/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3988 - (show annotations)
Mon Sep 24 01:08:19 2012 UTC (6 years, 11 months ago) by caltinay
File MIME type: text/x-python
File size: 30832 byte(s)
Scale coordinates if pyproj is not available

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
17 http://www.uq.edu.au
18 Primary Business: Queensland, Australia"""
19 __license__="""Licensed under the Open Software License version 3.0
20 http://www.opensource.org/licenses/osl-3.0.php"""
21 __url__="https://launchpad.net/escript-finley"
22
23 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
24
25 import logging
26 import numpy as np
27 import struct
28 from esys.escript import *
29 from esys.escript.linearPDEs import *
30 import esys.escript.unitsSI as U
31 from esys.ripley import *
32
33 try:
34 from scipy.io.netcdf import netcdf_file
35 __all__ += ['NetCDFDataSource']
36 except:
37 pass
38
39 def LatLonToUTM(lon, lat, wkt_string=None):
40 """
41 Converts one or more longitude,latitude pairs to the corresponding x,y
42 coordinates in the Universal Transverse Mercator projection.
43 If wkt_string is not given or invalid or the gdal module is not available
44 to convert the string, then the input values are assumed to be given in the
45 Clarke 1866 projection.
46 """
47
48 # not really optimal: if pyproj is not installed we return the input
49 # values without modification.
50 try:
51 import pyproj
52 except:
53 print("Warning, pyproj not available. Domain extents will be wrong")
54 return lon*1000., lat*1000.
55
56 # determine UTM zone from the input data
57 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
58 try:
59 import osgeo.osr
60 srs = osgeo.osr.SpatialReference()
61 srs.ImportFromWkt(wkt_string)
62 p_src = pyproj.Proj(srs.ExportToProj4())
63 except:
64 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
65 # we assume southern hemisphere here
66 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
67 x,y=pyproj.transform(p_src, p_dest, lon, lat)
68 return x,y
69
70 class DataSource(object):
71 """
72 A class that provides survey data for the inversion process.
73 """
74 # this is currently specific to gravity inversion and should be generalised
75
76 def __init__(self):
77 """
78 """
79 self._constrainBottom=False
80 self._constrainSides=True
81 self._domain=None
82 self._pad_l=0.1
83 self._pad_h=0.1
84 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
85
86 def _addPadding(self, pad_l, pad_h, NE, l, origin):
87 """
88 Helper method that computes new number of elements, length and origin
89 after adding padding to the input values.
90 """
91 DIM=len(NE)
92 frac=[0.]*DIM
93 # padding is applied to each side so multiply by 2 to get the total
94 # amount of padding per dimension
95 if pad_l>0 and pad_l<1:
96 for i in xrange(DIM-1):
97 frac[i]=2.*pad_l
98 elif pad_l>=1:
99 for i in xrange(DIM-1):
100 frac[i]=2.*pad_l/float(NE[i])
101 if pad_h>0 and pad_h<1:
102 frac[DIM-1]=2.*pad_h
103 elif pad_h>=1:
104 frac[DIM-1]=2.*pad_h/(float(NE[DIM-1]))
105
106 # calculate new number of elements
107 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
108 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
109 spacing=[l[i]/NE[i] for i in xrange(DIM)]
110 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
111 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
112 return NE_new, l_new, origin_new
113
114 def _interpolateOnDomain(self, data):
115 """
116 Helper method that interpolates data arrays onto the domain.
117 Currently this works like a nearest neighbour mapping, i.e. values
118 are directly inserted into data objects at closest location.
119 """
120 dom=self.getDomain()
121 dim=dom.getDim()
122 # determine number of values required per element
123 DPP=Scalar(0., Function(dom)).getNumberOfDataPoints()
124 for i in xrange(dim):
125 DPP=DPP/self._dom_NE[i]
126 DPP=int(DPP)
127
128 # idx_mult.dot([x,y,z]) = flat index into data object
129 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
130
131 # separate data arrays and coordinates
132 num_arrays=len(data[0])-dim
133 arrays=[]
134 for i in xrange(num_arrays):
135 d=Scalar(0., Function(dom))
136 d.expand()
137 arrays.append(d)
138
139 for entry in data:
140 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
141 index=int(idx_mult.dot(index))
142 for i in xrange(num_arrays):
143 for p in xrange(DPP):
144 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
145
146 return arrays
147
148 def _interpolateOnDomain_old(self, data):
149 """
150 Old interpolation method. Works only on ContinuousFunction and thus
151 produces wrong values once interpolated on Function.
152 """
153 dom=self.getDomain()
154 dim=dom.getDim()
155 # shape = number of data points/nodes in each dimension
156 shape=()
157 for i in xrange(dim):
158 shape=(self._dom_NE[i]+1,)+shape
159 # separate data arrays and coordinates
160 arrays=np.zeros(((len(data[0])-dim),)+shape)
161 num_arrays=arrays.shape[0]
162 for entry in data:
163 index=()
164 for i in xrange(dim):
165 index=(int((entry[i]-self._dom_origin[i])/self._spacing[i]),)+index
166 for i in xrange(num_arrays):
167 arrays[i][index]=entry[dim+i]
168 x=dom.getX()
169 delta=[self._dom_len[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
170 realorigin=[inf(x[i]) for i in xrange(dim)]
171 res=[]
172 for i in xrange(num_arrays):
173 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
174 return res
175
176 def setPadding(self, pad_l=0.1, pad_h=0.1):
177 """
178 Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
179 they are treated as number of elements to be added to the domain.
180 If 0 < pad_l;pad_h < 1, the padding amount is relative.
181 """
182 self._pad_l=pad_l
183 self._pad_h=pad_h
184
185 def setConstraints(self, bottom=False, sides=True):
186 """
187 If `bottom` is True, then the density mask will be set to 1 in the
188 padding area at the bottom of the domain. By default this area is
189 unconstrained. Similarly, if `sides` is True (default) then the
190 horizontal padding area is constrained, otherwise not.
191 """
192 self._constrainBottom=bottom
193 self._constrainSides=sides
194
195 def getDomain(self):
196 """
197 Returns a domain that spans the data area plus padding.
198 """
199 if self._domain is None:
200 self._domain=self._createDomain(self._pad_l, self._pad_h)
201 return self._domain
202
203 def getDensityMask(self):
204 """
205 Returns the density mask data object, where mask has value 1 on the
206 padding area, 0 elsewhere.
207 """
208 return self._mask
209
210 def getGravityAndStdDev(self):
211 """
212 Returns the gravity anomaly and standard deviation data objects as a
213 tuple.
214 """
215 raise NotImplementedError
216
217 def getDataExtents(self):
218 """
219 returns ( (x0, y0), (nx, ny), (dx, dy) ), where
220 x0, y0 = coordinates of data origin
221 nx, ny = number of data points in x and y
222 dx, dy = spacing of data points in x and y
223 """
224 raise NotImplementedError
225
226 def getVerticalExtents(self):
227 """
228 returns (z0, nz, dz), where
229 z0 = minimum z coordinate (origin)
230 nz = number of nodes in z direction
231 dz = spacing of nodes (= cell size in z)
232 """
233 raise NotImplementedError
234
235 def getDomainClass(self):
236 """
237 returns the domain generator class (e.g. esys.ripley.Brick)
238 """
239 raise NotImplementedError
240
241 def _createDomain(self, padding_l, padding_h):
242 """
243 creates and returns an escript domain that spans the entire area of
244 available data plus a buffer zone.
245 """
246 X0, NX, DX = self.getDataExtents()
247 z0, nz, dz = self.getVerticalExtents()
248
249 # number of elements (without padding)
250 NE = [NX[0], NX[1], nz]
251
252 # origin of domain (without padding)
253 origin = [X0[0], X0[1], z0]
254 origin = [np.round(oi) for oi in origin]
255
256 # cell size / point spacing
257 self._spacing = DX+[dz]
258 self._spacing = [float(np.round(si)) for si in self._spacing]
259
260 # length of domain (without padding)
261 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
262
263 # now add padding to the values
264 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
265 NE, l, origin)
266
267 # number of padding elements per side
268 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
269
270 self._dom_NE_pad = NE_pad
271 self._dom_len = l_new
272 self._dom_NE = NE_new
273 self._dom_origin = origin_new
274 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
275 try:
276 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
277 # ripley may internally adjust NE and length, so recompute
278 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
279 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in xrange(3)]
280 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
281 mask=wherePositive(dom.getX()[2])
282
283 except TypeError:
284 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
285 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
286 mask=wherePositive(x[2]+self._dom_origin[2])
287
288 # prepare density mask (=1 at padding area, 0 else)
289 if self._constrainSides:
290 for i in xrange(2):
291 mask=mask + whereNegative(x[i]) + \
292 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
293
294 if self._constrainBottom:
295 mask = mask + whereNegative(x[2])
296 self._mask=wherePositive(mask)
297
298 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
299 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
300 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
301
302 return dom
303
304
305 ##############################################################################
306 class UBCDataSource(DataSource):
307 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
308 super(UBCDataSource,self).__init__()
309 self.__meshfile=meshfile
310 self.__gravfile=gravfile
311 self.__topofile=topofile
312 self.__domainclass=domainclass
313 self.__readMesh()
314
315 def __readMesh(self):
316 meshdata=open(self.__meshfile).readlines()
317 numDataPoints=meshdata[0].split()
318 origin=meshdata[1].split()
319 self.__nPts=map(int, numDataPoints)
320 self.__origin=map(float, origin)
321 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
322 # vertical data is upside down
323 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
324 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
325
326 def getDataExtents(self):
327 """
328 returns ( (x0, y0), (nx, ny), (dx, dy) )
329 """
330 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
331
332 def getVerticalExtents(self):
333 """
334 returns (z0, nz, dz)
335 """
336 return (self.__origin[2], self.__nPts[2], self.__delta[2])
337
338 def getDomainClass(self):
339 """
340 returns the domain generator class (e.g. esys.ripley.Brick)
341 """
342 return self.__domainclass
343
344 #def getDensityMask(self):
345 # topodata=self.__readTopography()
346 # mask=self._interpolateOnDomain(topodata)
347 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
348 # return mask
349
350 def getGravityAndStdDev(self):
351 gravlist=self.__readGravity() # x,y,z,g,s
352 g_and_sigma=self._interpolateOnDomain(gravlist)
353 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
354
355 def __readTopography(self):
356 f=open(self.__topofile)
357 n=int(f.readline())
358 topodata=np.zeros((n,3))
359 for i in xrange(n):
360 x=f.readline().split()
361 x=map(float, x)
362 topodata[i]=x
363 f.close()
364 return topodata
365
366 def __readGravity(self):
367 f=open(self.__gravfile)
368 n=int(f.readline())
369 gravdata=np.zeros((n,5))
370 for i in xrange(n):
371 x=f.readline().split()
372 x=map(float, x) # x, y, z, anomaly in mGal, stddev
373 # convert gravity anomaly units to m/s^2 and rescale error
374 x[3]*=-1e-5
375 x[4]*=1e-5
376 gravdata[i]=x
377 f.close()
378 return gravdata
379
380 ##############################################################################
381 class NetCDFDataSource(DataSource):
382 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
383 """
384 vertical_extents - (alt_min, alt_max, num_points)
385 alt_of_data - altitude of measurements
386 """
387 super(NetCDFDataSource,self).__init__()
388 self.__topofile=topofile
389 self.__gravfile=gravfile
390 self.__domainclass=domainclass
391 self.__determineExtents(vertical_extents)
392 self.__altOfData=alt_of_data
393
394 def __determineExtents(self, ve):
395 self.logger.debug("Data Source: %s"%self.__gravfile)
396 f=netcdf_file(self.__gravfile, 'r')
397 NX=0
398 for n in ['lon','longitude','x']:
399 if n in f.dimensions:
400 NX=f.dimensions[n]
401 break
402 if NX==0:
403 raise RuntimeError("Could not determine extents of data")
404 NY=0
405 for n in ['lat','latitude','y']:
406 if n in f.dimensions:
407 NY=f.dimensions[n]
408 break
409 if NY==0:
410 raise RuntimeError("Could not determine extents of data")
411
412 # find longitude and latitude variables
413 lon_name=None
414 for n in ['lon','longitude']:
415 if n in f.variables:
416 lon_name=n
417 longitude=f.variables.pop(n)
418 break
419 if lon_name is None:
420 raise RuntimeError("Could not determine longitude variable")
421 lat_name=None
422 for n in ['lat','latitude']:
423 if n in f.variables:
424 lat_name=n
425 latitude=f.variables.pop(n)
426 break
427 if lat_name is None:
428 raise RuntimeError("Could not determine latitude variable")
429
430 # try to figure out gravity variable name
431 grav_name=None
432 if len(f.variables)==1:
433 grav_name=f.variables.keys()[0]
434 else:
435 for n in f.variables.keys():
436 dims=f.variables[n].dimensions
437 if (lat_name in dims) and (lon_name in dims):
438 grav_name=n
439 break
440 if grav_name is None:
441 raise RuntimeError("Could not determine gravity variable")
442
443 # see if there is a wkt string to convert coordinates
444 try:
445 wkt_string=f.variables[grav_name].esri_pe_string
446 except:
447 wkt_string=None
448
449 # we don't trust actual_range & geospatial_lon_min/max since subset
450 # data does not seem to have these fields updated it seems.
451 # Getting min/max from the arrays is obviously not very efficient...
452 #lon_range=longitude.actual_range
453 #lat_range=latitude.actual_range
454 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
455 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
456 lon_range=longitude.data.min(),longitude.data.max()
457 lat_range=latitude.data.min(),latitude.data.max()
458 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
459 origin=[lon_range[0],lat_range[0],ve[0]]
460 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
461
462 f.close()
463
464 self.__nPts=[NX, NY, ve[2]]
465 self.__origin=origin
466 # we are rounding to avoid interpolateOnDomain issues
467 self.__delta=[np.round(lengths[i]/(self.__nPts[i]-1)) for i in xrange(3)]
468 self.__wkt_string=wkt_string
469 self.__lon=lon_name
470 self.__lat=lat_name
471 self.__grv=grav_name
472
473 def getDataExtents(self):
474 """
475 returns ( (x0, y0), (nx, ny), (dx, dy) )
476 """
477 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
478
479 def getVerticalExtents(self):
480 """
481 returns (z0, nz, dz)
482 """
483 return (self.__origin[2], self.__nPts[2], self.__delta[2])
484
485 def getDomainClass(self):
486 """
487 returns the domain generator class (e.g. esys.ripley.Brick)
488 """
489 return self.__domainclass
490
491 def getGravityAndStdDev(self):
492 gravlist=self._readGravity() # x,y,z,g,s
493 g_and_sigma=self._interpolateOnDomain(gravlist)
494 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
495
496 def _readTopography(self):
497 f=netcdf_file(self.__topofile, 'r')
498 lon=None
499 for n in ['lon','longitude']:
500 if n in f.variables:
501 lon=f.variables[n][:]
502 break
503 if lon is None:
504 raise RuntimeError("Could not determine longitude variable")
505 lat=None
506 for n in ['lat','latitude']:
507 if n in f.variables:
508 lat=f.variables[n][:]
509 break
510 if lat is None:
511 raise RuntimeError("Could not determine latitude variable")
512 alt=None
513 for n in ['altitude','alt']:
514 if n in f.variables:
515 alt=f.variables[n][:]
516 break
517 if alt is None:
518 raise RuntimeError("Could not determine altitude variable")
519
520 topodata=np.column_stack((lon,lat,alt))
521 f.close()
522 return topodata
523
524 def _readGravity(self):
525 f=netcdf_file(self.__gravfile, 'r')
526 #lon=f.variables[self.__lon][:]
527 #lat=f.variables[self.__lat][:]
528 NE=[self.__nPts[i]-1 for i in xrange(2)]
529 lon=np.linspace(self.__origin[0], self.__origin[0]+NE[0]*self.__delta[0], NE[0]+1)
530 lat=np.linspace(self.__origin[1], self.__origin[1]+NE[1]*self.__delta[1], NE[1]+1)
531 lon,lat=np.meshgrid(lon,lat)
532 grav=f.variables[self.__grv][:]
533 f.close()
534 lon=lon.flatten()
535 lat=lat.flatten()
536 grav=grav.flatten()
537 alt=self.__altOfData*np.ones(grav.shape)
538 # error value is an assumption
539 try:
540 missing=f.variables[self.__grv].missing_value
541 err=np.where(grav==missing, 0.0, 20.0)
542 except:
543 err=20.0*np.ones(lon.shape)
544 # convert units
545 err=1e-6*err
546 grav=1e-6*grav
547 gravdata=np.column_stack((lon,lat,alt,grav,err))
548 return gravdata
549
550 ##############################################################################
551 class ERSDataSource(DataSource):
552 """
553 Data Source for ER Mapper raster data.
554 Note that this class only accepts a very specific type of ER Mapper data
555 input and will raise an exception if other data is found.
556 """
557 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
558 """
559 headerfile - usually ends in .ers
560 datafile - usually has the same name as the headerfile without '.ers'
561 """
562 super(ERSDataSource,self).__init__()
563 self.__headerfile=headerfile
564 if datafile is None:
565 self.__datafile=headerfile[:-4]
566 else:
567 self.__datafile=datafile
568 self.__readHeader(vertical_extents)
569 self.__altOfData=alt_of_data
570
571 def __readHeader(self, ve):
572 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
573 metadata=open(self.__headerfile, 'r').readlines()
574 # parse metadata
575 start=-1
576 for i in xrange(len(metadata)):
577 if metadata[i].strip() == 'DatasetHeader Begin':
578 start=i+1
579 if start==-1:
580 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
581
582 md_dict={}
583 section=[]
584 for i in xrange(start, len(metadata)):
585 line=metadata[i]
586 if line[-6:].strip() == 'Begin':
587 section.append(line[:-6].strip())
588 elif line[-4:].strip() == 'End':
589 if len(section)>0:
590 section.pop()
591 else:
592 vals=line.split('=')
593 if len(vals)==2:
594 key = vals[0].strip()
595 value = vals[1].strip()
596 fullkey='.'.join(section+[key])
597 md_dict[fullkey]=value
598
599 try:
600 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
601 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
602 except KeyError:
603 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
604
605 try:
606 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
607 NY = int(md_dict['RasterInfo.NrOfLines'])
608 except:
609 raise RuntimeError("Could not determine extents of data")
610
611 try:
612 maskval = float(md_dict['RasterInfo.NullCellValue'])
613 except:
614 maskval = 99999
615
616 try:
617 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
618 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
619 except:
620 raise RuntimeError("Could not determine cell dimensions")
621
622 try:
623 if md_dict['CoordinateSpace.CoordinateType']=='EN':
624 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
625 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
626 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
627 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
628 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
629 else:
630 raise RuntimeError("Unknown CoordinateType")
631 except:
632 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
633 originX,originY = 0.0, 0.0
634
635 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
636 # it appears we have lat/lon coordinates so need to convert
637 # origin and spacing. Try using gdal to get the wkt if available:
638 try:
639 from osgeo import gdal
640 ds=gdal.Open(self.__headerfile)
641 wkt=ds.GetProjection()
642 except:
643 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
644 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
645 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
646 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
647 # we are rounding to avoid interpolateOnDomain issues
648 spacingX=np.round(op1X-originX_UTM)
649 spacingY=np.round(op1Y-originY_UTM)
650 originX=np.round(originX_UTM)
651 originY=np.round(originY_UTM)
652
653 # data sets have origin in top-left corner so y runs top-down
654 self.__dataorigin=[originX, originY]
655 originY-=(NY-1)*spacingY
656 self.__maskval=maskval
657 spacingZ=np.round(float(ve[1]-ve[0])/(ve[2]-1))
658 self.__delta = [spacingX, spacingY, spacingZ]
659 self.__nPts = [NX, NY, ve[2]]
660 self.__origin = [originX, originY, ve[0]]
661
662 def getDataExtents(self):
663 """
664 returns ( (x0, y0), (nx, ny), (dx, dy) )
665 """
666 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
667
668 def getVerticalExtents(self):
669 """
670 returns (z0, nz, dz)
671 """
672 return (self.__origin[2], self.__nPts[2], self.__delta[2])
673
674 def getDomainClass(self):
675 """
676 returns the domain generator class (e.g. esys.ripley.Brick)
677 """
678 return Brick
679
680 def getGravityAndStdDev(self):
681 nValues=self.__nPts[:2]+[1]
682 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
683 g=ripleycpp._readBinaryGrid(self.__datafile,
684 ReducedFunction(self.getDomain()),
685 first, nValues, (), self.__maskval)
686 sigma=whereNonZero(g-self.__maskval)
687 g=g*1e-6
688 sigma=sigma*2e-6
689 return g*[0,0,1], sigma
690
691
692 ##############################################################################
693 class SourceFeature(object):
694 """
695 A feature adds a density distribution to (parts of) a domain of a synthetic
696 data source, for example a layer of a specific rock type or a simulated
697 ore body.
698 """
699 def getDensity(self):
700 """
701 Returns the density for the area covered by mask. It can be constant
702 or a data object with spatial dependency.
703 """
704 raise NotImplementedError
705 def getMask(self, x):
706 """
707 Returns the mask of the area of interest for this feature. That is,
708 mask is non-zero where the density returned by getDensity() should be
709 applied, zero elsewhere.
710 """
711 raise NotImplementedError
712
713 class SmoothAnomaly(SourceFeature):
714 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
715 self.x=x
716 self.y=y
717 self.lx=lx
718 self.ly=ly
719 self.lz=lz
720 self.depth=depth
721 self.rho_inner=rho_inner
722 self.rho_outer=rho_outer
723 self.rho=None
724
725 def getDensity(self):
726 return self.rho
727
728 def getMask(self, x):
729 DIM=x.getDomain().getDim()
730 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
731 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
732 if DIM>2:
733 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
734 if self.rho is None:
735 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
736 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
737 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
738 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
739 if self.rho_inner<0: self.rho=-self.rho
740 return m
741
742 ##############################################################################
743 class SyntheticDataSource(DataSource):
744 def __init__(self, DIM, NE, l, h, features):
745 super(SyntheticDataSource,self).__init__()
746 self._features = features
747 self.DIM=DIM
748 self.NE=NE
749 self.l=l
750 self.h=h
751
752 def _createDomain(self, padding_l, padding_h):
753 NE_H=self.NE
754 NE_L=int((self.l/self.h)*NE_H+0.5)
755 l=[self.l]*(self.DIM-1)+[self.h]
756 NE=[NE_L]*(self.DIM-1)+[NE_H]
757 origin=[0.]*self.DIM
758 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
759 NE, l, origin)
760
761 self.NE=NE_new
762 self.l=l_new[0]
763 self.h=l_new[self.DIM-1]
764
765 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
766 if self.DIM==2:
767 from esys.finley import Rectangle
768 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
769 self._x = dom.getX() + origin_new
770 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
771 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
772 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
773 else:
774 from esys.finley import Brick
775 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
776 self._x = dom.getX() + origin_new
777 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
778 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
779 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
780
781 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
782 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
783 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
784 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
785 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
786 self._mask=whereNegative(self._x[self.DIM-1]) + \
787 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
788 for i in xrange(self.DIM-1):
789 self._mask=self._mask + whereNegative(self._x[i]) + \
790 wherePositive(self._x[i]-l[i])
791 self._mask=wherePositive(self._mask)
792
793 rho_ref=0.
794 for f in self._features:
795 m=f.getMask(self._x)
796 rho_ref = rho_ref * (1-m) + f.getDensity() * m
797 self._rho=rho_ref
798
799 return dom
800
801 def getDensityMask(self):
802 return self._mask
803
804 def getReferenceDensity(self):
805 return self._rho
806
807 def getGravityAndStdDev(self):
808 pde=LinearSinglePDE(self.getDomain())
809 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
810 m_psi_ref=0.
811 for i in xrange(self.DIM):
812 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
813 + whereZero(self._x[i]-sup(self._x[i]))
814
815 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
816 pde.setSymmetryOn()
817 psi_ref=pde.getSolution()
818 del pde
819 g=-grad(psi_ref)
820 sigma=self._g_mask
821 return g,sigma
822

  ViewVC Help
Powered by ViewVC 1.1.26