/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4005 - (show annotations)
Fri Sep 28 06:09:03 2012 UTC (6 years, 11 months ago) by caltinay
File MIME type: text/x-python
File size: 32210 byte(s)
test fixes, doco updates, annoyance removals.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
17 http://www.uq.edu.au
18 Primary Business: Queensland, Australia"""
19 __license__="""Licensed under the Open Software License version 3.0
20 http://www.opensource.org/licenses/osl-3.0.php"""
21 __url__="https://launchpad.net/escript-finley"
22
23 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
24
25 import logging
26 import numpy as np
27 from esys.escript import *
28 from esys.escript.linearPDEs import LinearSinglePDE
29 import esys.escript.unitsSI as U
30 from esys.ripley import Brick, Rectangle, ripleycpp
31
32 try:
33 from scipy.io.netcdf import netcdf_file
34 __all__ += ['NetCDFDataSource']
35 except:
36 pass
37
38 def LatLonToUTM(lon, lat, wkt_string=None):
39 """
40 Converts one or more longitude,latitude pairs to the corresponding x,y
41 coordinates in the Universal Transverse Mercator projection.
42 If `wkt_string` is not given or invalid or the ``gdal`` module is not
43 available to convert the string, then the input values are assumed to be
44 given in the Clarke 1866 projection.
45 """
46
47 # not really optimal: if pyproj is not installed we return the input
48 # values scaled by a constant.
49 try:
50 import pyproj
51 except:
52 print("Warning, pyproj not available. Domain extents will be wrong")
53 return lon*1000., lat*1000.
54
55 # determine UTM zone from the input data
56 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
57 try:
58 import osgeo.osr
59 srs = osgeo.osr.SpatialReference()
60 srs.ImportFromWkt(wkt_string)
61 p_src = pyproj.Proj(srs.ExportToProj4())
62 except:
63 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
64 # we assume southern hemisphere here
65 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
66 x,y=pyproj.transform(p_src, p_dest, lon, lat)
67 return x,y
68
69 class DataSource(object):
70 """
71 A class that provides survey data for the inversion process.
72 This is an abstract base class that implements common functionality.
73 Methods to be overwritten by subclasses are marked as such.
74 """
75 # this is currently specific to gravity inversion and should be generalised
76
77 def __init__(self):
78 """
79 Constructor. Sets some defaults and initializes logger.
80 """
81 self._constrainBottom=False
82 self._constrainSides=True
83 self._domain=None
84 self.setPadding()
85 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
86
87 def setPadding(self, pad_x=10, pad_y=10):
88 """
89 Sets the amount of padding around the dataset. If `pad_x`/`pad_y`
90 is >=1 the value is treated as number of elements to be added to the
91 domain (per side).
92 If ``0 < pad_x,pad_y < 1``, the padding amount is relative to the
93 dataset size. For example, calling ``setPadding(3, 0.1)`` to a data
94 source with size 10x20 will result in the padded data set size
95 16x24 (10+2*3, 20*(1+2*0.1))
96
97 :param pad_x: Padding per side in x direction (default: 10 elements)
98 :type pad_x: ``int`` or ``float``
99 :param pad_y: Padding per side in y direction (default: 10 elements).
100 This value is only used for 3-dimensional datasets
101 :type pad_y: ``int`` or ``float``
102 """
103 self._pad_x=pad_x
104 self._pad_y=pad_y
105
106 def setConstraints(self, bottom=False, sides=True):
107 """
108 If `bottom` is True, then the density mask will be set to 1 in the
109 padding area at the bottom of the domain. By default this area is
110 unconstrained. Similarly, if `sides` is True (default) then the
111 horizontal padding area is constrained, otherwise not.
112
113 :param bottom: Whether to constrain the density at the bottom of the
114 domain
115 :type bottom: ``bool``
116 :param sides: Whether to constrain the density in the padding area
117 surrounding the data
118 :type sides: ``bool``
119 """
120 self._constrainBottom=bottom
121 self._constrainSides=sides
122
123 def getDomain(self):
124 """
125 Returns a domain that spans the data area plus padding.
126 The domain is created the first time this method is called, subsequent
127 calls return the same domain so anything that affects the domain
128 (such as padding) needs to be set beforehand.
129
130 :return: The escript domain for this data source.
131 :rtype: `esys.escript.Domain`
132 """
133 if self._domain is None:
134 self._domain=self._createDomain(self._pad_x, self._pad_y)
135 return self._domain
136
137 def getDensityMask(self):
138 """
139 Returns the density mask data object, where mask has value 1 in the
140 padding area, 0 elsewhere.
141
142 :return: The mask for the density.
143 :rtype: `esys.escript.Data`
144 """
145 return self._mask
146
147 def getGravityAndStdDev(self):
148 """
149 Returns the gravity anomaly and standard deviation data objects as a
150 tuple. This method must be implemented in subclasses.
151 """
152 raise NotImplementedError
153
154 def getDataExtents(self):
155 """
156 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
157
158 - ``x0``, ``y0`` = coordinates of data origin
159 - ``nx``, ``ny`` = number of data points in x and y
160 - ``dx``, ``dy`` = spacing of data points in x and y
161
162 This method must be implemented in subclasses.
163 """
164 raise NotImplementedError
165
166 def getVerticalExtents(self):
167 """
168 returns a tuple ``(z0, nz, dz)``, where
169
170 - ``z0`` = minimum z coordinate (origin)
171 - ``nz`` = number of nodes in z direction
172 - ``dz`` = spacing of nodes (= cell size in z)
173
174 This method must be implemented in subclasses.
175 """
176 raise NotImplementedError
177
178 def getDomainClass(self):
179 """
180 returns the domain generator class (e.g. esys.ripley.Brick).
181 Must be implemented in subclasses.
182 """
183 raise NotImplementedError
184
185 def _addPadding(self, pad_x, pad_y, NE, l, origin):
186 """
187 Helper method that computes new number of elements, length and origin
188 after adding padding to the input values.
189
190 :param pad_x: Number of elements or fraction of padding in x direction
191 :type pad_x: ``int`` or ``float``
192 :param pad_y: Number of elements or fraction of padding in y direction
193 :type pad_y: ``int`` or ``float``
194 :param NE: Initial number of elements
195 :type NE: ``tuple`` or ``list``
196 :param l: Initial side lengths
197 :type l: ``tuple`` or ``list``
198 :param origin: Initial origin
199 :type origin: ``tuple`` or ``list``
200 :return: tuple with three elements ``(NE_padded, l_padded, origin_padded)``,
201 which are lists of the updated input parameters
202 """
203 DIM=len(NE)
204 frac=[0.]*(DIM-1)+[0]
205 # padding is applied to each side so multiply by 2 to get the total
206 # amount of padding per dimension
207 if pad_x>0 and pad_y<1:
208 frac[0]=2.*pad_x
209 elif pad_x>=1:
210 frac[0]=2.*pad_x/float(NE[0])
211 if DIM>2:
212 if pad_y>0 and pad_y<1:
213 frac[1]=2.*pad_y
214 elif pad_y>=1:
215 frac[1]=2.*pad_y/(float(NE[1]))
216
217 # calculate new number of elements
218 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
219 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
220 spacing=[l[i]/NE[i] for i in xrange(DIM)]
221 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
222 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
223 return NE_new, l_new, origin_new
224
225 def _interpolateOnDomain(self, data):
226 """
227 Helper method that interpolates data arrays onto the domain.
228 Currently this works like a nearest neighbour mapping, i.e. values
229 are directly inserted into data objects at closest location.
230 """
231 dom=self.getDomain()
232 dim=dom.getDim()
233 # determine number of values required per element
234 DPP=Scalar(0., Function(dom)).getNumberOfDataPoints()
235 for i in xrange(dim):
236 DPP=DPP/self._dom_NE[i]
237 DPP=int(DPP)
238
239 # idx_mult.dot([x,y,z]) = flat index into data object
240 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
241
242 # separate data arrays and coordinates
243 num_arrays=len(data[0])-dim
244 arrays=[]
245 for i in xrange(num_arrays):
246 d=Scalar(0., Function(dom))
247 d.expand()
248 arrays.append(d)
249
250 for entry in data:
251 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
252 index=int(idx_mult.dot(index))
253 for i in xrange(num_arrays):
254 for p in xrange(DPP):
255 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
256
257 return arrays
258
259 def _createDomain(self, padding_l, padding_h):
260 """
261 Creates and returns an escript domain that spans the entire area of
262 available data plus a buffer zone. This method is called only once
263 the first time `getDomain()` is invoked and may be overwritten if
264 required.
265
266 :return: The escript domain for this data source.
267 :rtype: `esys.escript.Domain`
268 """
269 X0, NX, DX = self.getDataExtents()
270 z0, nz, dz = self.getVerticalExtents()
271
272 # number of elements (without padding)
273 NE = [NX[0], NX[1], nz]
274
275 # origin of domain (without padding)
276 origin = [X0[0], X0[1], z0]
277 origin = [np.round(oi) for oi in origin]
278
279 # cell size / point spacing
280 self._spacing = DX+[dz]
281 self._spacing = [float(np.round(si)) for si in self._spacing]
282
283 # length of domain (without padding)
284 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
285
286 # now add padding to the values
287 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
288 NE, l, origin)
289
290 # number of padding elements per side
291 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
292
293 self._dom_NE_pad = NE_pad
294 self._dom_len = l_new
295 self._dom_NE = NE_new
296 self._dom_origin = origin_new
297 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
298 try:
299 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
300 # ripley may internally adjust NE and length, so recompute
301 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
302 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in xrange(3)]
303 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
304 mask=wherePositive(dom.getX()[2])
305
306 except TypeError:
307 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
308 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
309 mask=wherePositive(x[2]+self._dom_origin[2])
310
311 # prepare density mask (=1 at padding area, 0 else)
312 if self._constrainSides:
313 for i in xrange(2):
314 mask=mask + whereNegative(x[i]) + \
315 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
316
317 if self._constrainBottom:
318 mask = mask + whereNegative(x[2])
319 self._mask=wherePositive(mask)
320
321 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
322 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
323 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
324
325 return dom
326
327
328 ##############################################################################
329 class UBCDataSource(DataSource):
330 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
331 super(UBCDataSource,self).__init__()
332 self.__meshfile=meshfile
333 self.__gravfile=gravfile
334 self.__topofile=topofile
335 self.__domainclass=domainclass
336 self.__readMesh()
337
338 def __readMesh(self):
339 meshdata=open(self.__meshfile).readlines()
340 numDataPoints=meshdata[0].split()
341 origin=meshdata[1].split()
342 self.__nPts=map(int, numDataPoints)
343 self.__origin=map(float, origin)
344 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
345 # vertical data is upside down
346 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
347 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
348
349 def getDataExtents(self):
350 """
351 returns ( (x0, y0), (nx, ny), (dx, dy) )
352 """
353 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
354
355 def getVerticalExtents(self):
356 """
357 returns (z0, nz, dz)
358 """
359 return (self.__origin[2], self.__nPts[2], self.__delta[2])
360
361 def getDomainClass(self):
362 """
363 returns the domain generator class (e.g. esys.ripley.Brick)
364 """
365 return self.__domainclass
366
367 #def getDensityMask(self):
368 # topodata=self.__readTopography()
369 # mask=self._interpolateOnDomain(topodata)
370 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
371 # return mask
372
373 def getGravityAndStdDev(self):
374 gravlist=self.__readGravity() # x,y,z,g,s
375 g_and_sigma=self._interpolateOnDomain(gravlist)
376 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
377
378 def __readTopography(self):
379 f=open(self.__topofile)
380 n=int(f.readline())
381 topodata=np.zeros((n,3))
382 for i in xrange(n):
383 x=f.readline().split()
384 x=map(float, x)
385 topodata[i]=x
386 f.close()
387 return topodata
388
389 def __readGravity(self):
390 f=open(self.__gravfile)
391 n=int(f.readline())
392 gravdata=np.zeros((n,5))
393 for i in xrange(n):
394 x=f.readline().split()
395 x=map(float, x) # x, y, z, anomaly in mGal, stddev
396 # convert gravity anomaly units to m/s^2 and rescale error
397 x[3]*=-1e-5
398 x[4]*=1e-5
399 gravdata[i]=x
400 f.close()
401 return gravdata
402
403 ##############################################################################
404 class NetCDFDataSource(DataSource):
405 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
406 """
407 vertical_extents - (alt_min, alt_max, num_points)
408 alt_of_data - altitude of measurements
409 """
410 super(NetCDFDataSource,self).__init__()
411 self.__topofile=topofile
412 self.__gravfile=gravfile
413 self.__domainclass=domainclass
414 self.__determineExtents(vertical_extents)
415 self.__altOfData=alt_of_data
416
417 def __determineExtents(self, ve):
418 self.logger.debug("Data Source: %s"%self.__gravfile)
419 f=netcdf_file(self.__gravfile, 'r')
420 NX=0
421 for n in ['lon','longitude','x']:
422 if n in f.dimensions:
423 NX=f.dimensions[n]
424 break
425 if NX==0:
426 raise RuntimeError("Could not determine extents of data")
427 NY=0
428 for n in ['lat','latitude','y']:
429 if n in f.dimensions:
430 NY=f.dimensions[n]
431 break
432 if NY==0:
433 raise RuntimeError("Could not determine extents of data")
434
435 # find longitude and latitude variables
436 lon_name=None
437 for n in ['lon','longitude']:
438 if n in f.variables:
439 lon_name=n
440 longitude=f.variables.pop(n)
441 break
442 if lon_name is None:
443 raise RuntimeError("Could not determine longitude variable")
444 lat_name=None
445 for n in ['lat','latitude']:
446 if n in f.variables:
447 lat_name=n
448 latitude=f.variables.pop(n)
449 break
450 if lat_name is None:
451 raise RuntimeError("Could not determine latitude variable")
452
453 # try to figure out gravity variable name
454 grav_name=None
455 if len(f.variables)==1:
456 grav_name=f.variables.keys()[0]
457 else:
458 for n in f.variables.keys():
459 dims=f.variables[n].dimensions
460 if (lat_name in dims) and (lon_name in dims):
461 grav_name=n
462 break
463 if grav_name is None:
464 raise RuntimeError("Could not determine gravity variable")
465
466 # see if there is a wkt string to convert coordinates
467 try:
468 wkt_string=f.variables[grav_name].esri_pe_string
469 except:
470 wkt_string=None
471
472 # we don't trust actual_range & geospatial_lon_min/max since subset
473 # data does not seem to have these fields updated it seems.
474 # Getting min/max from the arrays is obviously not very efficient...
475 #lon_range=longitude.actual_range
476 #lat_range=latitude.actual_range
477 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
478 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
479 lon_range=longitude.data.min(),longitude.data.max()
480 lat_range=latitude.data.min(),latitude.data.max()
481 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
482 origin=[lon_range[0],lat_range[0],ve[0]]
483 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
484
485 f.close()
486
487 self.__nPts=[NX, NY, ve[2]]
488 self.__origin=origin
489 # we are rounding to avoid interpolateOnDomain issues
490 self.__delta=[np.round(lengths[i]/(self.__nPts[i]-1)) for i in xrange(3)]
491 self.__wkt_string=wkt_string
492 self.__lon=lon_name
493 self.__lat=lat_name
494 self.__grv=grav_name
495
496 def getDataExtents(self):
497 """
498 returns ( (x0, y0), (nx, ny), (dx, dy) )
499 """
500 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
501
502 def getVerticalExtents(self):
503 """
504 returns (z0, nz, dz)
505 """
506 return (self.__origin[2], self.__nPts[2], self.__delta[2])
507
508 def getDomainClass(self):
509 """
510 returns the domain generator class (e.g. esys.ripley.Brick)
511 """
512 return self.__domainclass
513
514 def getGravityAndStdDev(self):
515 gravlist=self._readGravity() # x,y,z,g,s
516 g_and_sigma=self._interpolateOnDomain(gravlist)
517 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
518
519 def _readTopography(self):
520 f=netcdf_file(self.__topofile, 'r')
521 lon=None
522 for n in ['lon','longitude']:
523 if n in f.variables:
524 lon=f.variables[n][:]
525 break
526 if lon is None:
527 raise RuntimeError("Could not determine longitude variable")
528 lat=None
529 for n in ['lat','latitude']:
530 if n in f.variables:
531 lat=f.variables[n][:]
532 break
533 if lat is None:
534 raise RuntimeError("Could not determine latitude variable")
535 alt=None
536 for n in ['altitude','alt']:
537 if n in f.variables:
538 alt=f.variables[n][:]
539 break
540 if alt is None:
541 raise RuntimeError("Could not determine altitude variable")
542
543 topodata=np.column_stack((lon,lat,alt))
544 f.close()
545 return topodata
546
547 def _readGravity(self):
548 f=netcdf_file(self.__gravfile, 'r')
549 #lon=f.variables[self.__lon][:]
550 #lat=f.variables[self.__lat][:]
551 NE=[self.__nPts[i]-1 for i in xrange(2)]
552 lon=np.linspace(self.__origin[0], self.__origin[0]+NE[0]*self.__delta[0], NE[0]+1)
553 lat=np.linspace(self.__origin[1], self.__origin[1]+NE[1]*self.__delta[1], NE[1]+1)
554 lon,lat=np.meshgrid(lon,lat)
555 grav=f.variables[self.__grv][:]
556 f.close()
557 lon=lon.flatten()
558 lat=lat.flatten()
559 grav=grav.flatten()
560 alt=self.__altOfData*np.ones(grav.shape)
561 # error value is an assumption
562 try:
563 missing=f.variables[self.__grv].missing_value
564 err=np.where(grav==missing, 0.0, 20.0)
565 except:
566 err=20.0*np.ones(lon.shape)
567 # convert units
568 err=1e-6*err
569 grav=1e-6*grav
570 gravdata=np.column_stack((lon,lat,alt,grav,err))
571 return gravdata
572
573 ##############################################################################
574 class ERSDataSource(DataSource):
575 """
576 Data Source for ER Mapper raster data.
577 Note that this class only accepts a very specific type of ER Mapper data
578 input and will raise an exception if other data is found.
579 """
580 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
581 """
582 headerfile - usually ends in .ers
583 datafile - usually has the same name as the headerfile without '.ers'
584 """
585 super(ERSDataSource,self).__init__()
586 self.__headerfile=headerfile
587 if datafile is None:
588 self.__datafile=headerfile[:-4]
589 else:
590 self.__datafile=datafile
591 self.__readHeader(vertical_extents)
592 self.__altOfData=alt_of_data
593
594 def __readHeader(self, ve):
595 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
596 metadata=open(self.__headerfile, 'r').readlines()
597 # parse metadata
598 start=-1
599 for i in xrange(len(metadata)):
600 if metadata[i].strip() == 'DatasetHeader Begin':
601 start=i+1
602 if start==-1:
603 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
604
605 md_dict={}
606 section=[]
607 for i in xrange(start, len(metadata)):
608 line=metadata[i]
609 if line[-6:].strip() == 'Begin':
610 section.append(line[:-6].strip())
611 elif line[-4:].strip() == 'End':
612 if len(section)>0:
613 section.pop()
614 else:
615 vals=line.split('=')
616 if len(vals)==2:
617 key = vals[0].strip()
618 value = vals[1].strip()
619 fullkey='.'.join(section+[key])
620 md_dict[fullkey]=value
621
622 try:
623 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
624 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
625 except KeyError:
626 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
627
628 try:
629 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
630 NY = int(md_dict['RasterInfo.NrOfLines'])
631 except:
632 raise RuntimeError("Could not determine extents of data")
633
634 try:
635 maskval = float(md_dict['RasterInfo.NullCellValue'])
636 except:
637 maskval = 99999
638
639 try:
640 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
641 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
642 except:
643 raise RuntimeError("Could not determine cell dimensions")
644
645 try:
646 if md_dict['CoordinateSpace.CoordinateType']=='EN':
647 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
648 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
649 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
650 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
651 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
652 else:
653 raise RuntimeError("Unknown CoordinateType")
654 except:
655 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
656 originX,originY = 0.0, 0.0
657
658 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
659 # it appears we have lat/lon coordinates so need to convert
660 # origin and spacing. Try using gdal to get the wkt if available:
661 try:
662 from osgeo import gdal
663 ds=gdal.Open(self.__headerfile)
664 wkt=ds.GetProjection()
665 except:
666 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
667 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
668 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
669 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
670 # we are rounding to avoid interpolateOnDomain issues
671 spacingX=np.round(op1X-originX_UTM)
672 spacingY=np.round(op1Y-originY_UTM)
673 originX=np.round(originX_UTM)
674 originY=np.round(originY_UTM)
675
676 # data sets have origin in top-left corner so y runs top-down
677 self.__dataorigin=[originX, originY]
678 originY-=(NY-1)*spacingY
679 self.__maskval=maskval
680 spacingZ=np.round(float(ve[1]-ve[0])/(ve[2]-1))
681 self.__delta = [spacingX, spacingY, spacingZ]
682 self.__nPts = [NX, NY, ve[2]]
683 self.__origin = [originX, originY, ve[0]]
684
685 def getDataExtents(self):
686 """
687 returns ( (x0, y0), (nx, ny), (dx, dy) )
688 """
689 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
690
691 def getVerticalExtents(self):
692 """
693 returns (z0, nz, dz)
694 """
695 return (self.__origin[2], self.__nPts[2], self.__delta[2])
696
697 def getDomainClass(self):
698 """
699 returns the domain generator class (e.g. esys.ripley.Brick)
700 """
701 return Brick
702
703 def getGravityAndStdDev(self):
704 nValues=self.__nPts[:2]+[1]
705 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
706 g=ripleycpp._readBinaryGrid(self.__datafile,
707 ReducedFunction(self.getDomain()),
708 first, nValues, (), self.__maskval)
709 sigma=whereNonZero(g-self.__maskval)
710 g=g*1e-6
711 sigma=sigma*2e-6
712 return g*[0,0,1], sigma
713
714
715 ##############################################################################
716 class SourceFeature(object):
717 """
718 A feature adds a density distribution to (parts of) a domain of a synthetic
719 data source, for example a layer of a specific rock type or a simulated
720 ore body.
721 """
722 def getDensity(self):
723 """
724 Returns the density for the area covered by mask. It can be constant
725 or a data object with spatial dependency.
726 """
727 raise NotImplementedError
728 def getMask(self, x):
729 """
730 Returns the mask of the area of interest for this feature. That is,
731 mask is non-zero where the density returned by getDensity() should be
732 applied, zero elsewhere.
733 """
734 raise NotImplementedError
735
736 class SmoothAnomaly(SourceFeature):
737 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
738 self.x=x
739 self.y=y
740 self.lx=lx
741 self.ly=ly
742 self.lz=lz
743 self.depth=depth
744 self.rho_inner=rho_inner
745 self.rho_outer=rho_outer
746 self.rho=None
747
748 def getDensity(self):
749 return self.rho
750
751 def getMask(self, x):
752 DIM=x.getDomain().getDim()
753 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
754 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
755 if DIM>2:
756 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
757 if self.rho is None:
758 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
759 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
760 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
761 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
762 if self.rho_inner<0: self.rho=-self.rho
763 return m
764
765 ##############################################################################
766 class SyntheticDataSource(DataSource):
767 def __init__(self, DIM, NE, l, h, features):
768 super(SyntheticDataSource,self).__init__()
769 self._features = features
770 self.DIM=DIM
771 self.NE=NE
772 self.l=l
773 self.h=h
774
775 def _createDomain(self, padding_l, padding_h):
776 NE_H=self.NE
777 NE_L=int((self.l/self.h)*NE_H+0.5)
778 l=[self.l]*(self.DIM-1)+[self.h]
779 NE=[NE_L]*(self.DIM-1)+[NE_H]
780 origin=[0.]*self.DIM
781 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
782 NE, l, origin)
783
784 self.NE=NE_new
785 self.l=l_new[0]
786 self.h=l_new[self.DIM-1]
787
788 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
789 if self.DIM==2:
790 from esys.finley import Rectangle
791 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
792 self._x = dom.getX() + origin_new
793 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
794 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
795 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
796 else:
797 from esys.finley import Brick
798 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
799 self._x = dom.getX() + origin_new
800 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
801 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
802 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
803
804 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
805 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
806 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
807 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
808 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
809 self._mask=whereNegative(self._x[self.DIM-1]) + \
810 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
811 for i in xrange(self.DIM-1):
812 self._mask=self._mask + whereNegative(self._x[i]) + \
813 wherePositive(self._x[i]-l[i])
814 self._mask=wherePositive(self._mask)
815
816 rho_ref=0.
817 for f in self._features:
818 m=f.getMask(self._x)
819 rho_ref = rho_ref * (1-m) + f.getDensity() * m
820 self._rho=rho_ref
821
822 return dom
823
824 def getDensityMask(self):
825 return self._mask
826
827 def getReferenceDensity(self):
828 return self._rho
829
830 def getGravityAndStdDev(self):
831 pde=LinearSinglePDE(self.getDomain())
832 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
833 m_psi_ref=0.
834 for i in xrange(self.DIM):
835 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
836 + whereZero(self._x[i]-sup(self._x[i]))
837
838 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
839 pde.setSymmetryOn()
840 psi_ref=pde.getSolution()
841 del pde
842 g=-grad(psi_ref)
843 sigma=self._g_mask
844 return g,sigma
845

  ViewVC Help
Powered by ViewVC 1.1.26