/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4039 - (show annotations)
Fri Oct 26 07:34:06 2012 UTC (6 years, 11 months ago) by gross
File MIME type: text/x-python
File size: 35690 byte(s)
magnetic inversion added
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction, Scalar
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34 import sys
35
36 if sys.version_info[0]>2:
37 xrange=range
38
39 try:
40 from scipy.io.netcdf import netcdf_file
41 __all__ += ['NetCDFDataSource']
42 except:
43 pass
44
45 def LatLonToUTM(lon, lat, wkt_string=None):
46 """
47 Converts one or more longitude,latitude pairs to the corresponding x,y
48 coordinates in the Universal Transverse Mercator projection.
49
50 :note: If the ``pyproj`` module is not installed a warning is printed and
51 the input values are scaled by a constant and returned.
52 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
53 not available to convert the string, then the input values are
54 assumed to be given in the Clarke 1866 projection.
55
56 :param lon: longitude value(s)
57 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
58 :param lat: latitude value(s)
59 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
60 :rtype: ``numpy.array``
61 """
62
63 # not really optimal: if pyproj is not installed we return the input
64 # values scaled by a constant.
65 try:
66 import pyproj
67 except:
68 print("Warning, pyproj not available. Domain extents will be wrong")
69 return np.array(lon)*1000., np.array(lat)*1000.
70
71 # determine UTM zone from the input data
72 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
73 try:
74 import osgeo.osr
75 srs = osgeo.osr.SpatialReference()
76 srs.ImportFromWkt(wkt_string)
77 p_src = pyproj.Proj(srs.ExportToProj4())
78 except:
79 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
80 # we assume southern hemisphere here
81 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
82 x,y=pyproj.transform(p_src, p_dest, lon, lat)
83 return x,y
84
85 class DataSource(object):
86 """
87 A class that provides survey data for the inversion process.
88 This is an abstract base class that implements common functionality.
89 Methods to be overwritten by subclasses are marked as such.
90 """
91 # this is currently specific to gravity inversion and should be generalised
92
93 def __init__(self):
94 """
95 Constructor. Sets some defaults and initializes logger.
96 """
97 self._constrainBottom=False
98 self._constrainSides=True
99 self._domain=None
100 self.__set_density_mask=None
101 self.__set_susceptibility_mask=None
102 self.setPadding()
103 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
104
105 def setPadding(self, pad_x=10, pad_y=10):
106 """
107 Sets the amount of padding around the dataset. If `pad_x`/`pad_y`
108 is >=1 the value is treated as number of elements to be added to the
109 domain (per side).
110 If ``0 < pad_x,pad_y < 1``, the padding amount is relative to the
111 dataset size. For example, calling ``setPadding(3, 0.1)`` to a data
112 source with size 10x20 will result in the padded data set size
113 16x24 (10+2*3, 20*(1+2*0.1))
114
115 :param pad_x: Padding per side in x direction (default: 10 elements)
116 :type pad_x: ``int`` or ``float``
117 :param pad_y: Padding per side in y direction (default: 10 elements).
118 This value is only used for 3-dimensional datasets
119 :type pad_y: ``int`` or ``float``
120 """
121 self._pad_x=pad_x
122 self._pad_y=pad_y
123
124 def setConstraints(self, bottom=False, sides=True):
125 """
126 If `bottom` is True, then the density mask will be set to 1 in the
127 padding area at the bottom of the domain. By default this area is
128 unconstrained. Similarly, if `sides` is True (default) then the
129 horizontal padding area is constrained, otherwise not.
130
131 :param bottom: Whether to constrain the density at the bottom of the
132 domain
133 :type bottom: ``bool``
134 :param sides: Whether to constrain the density in the padding area
135 surrounding the data
136 :type sides: ``bool``
137 """
138 self._constrainBottom=bottom
139 self._constrainSides=sides
140
141 def getDomain(self):
142 """
143 Returns a domain that spans the data area plus padding.
144 The domain is created the first time this method is called, subsequent
145 calls return the same domain so anything that affects the domain
146 (such as padding) needs to be set beforehand.
147
148 :return: The escript domain for this data source.
149 :rtype: `esys.escript.Domain`
150 """
151 if self._domain is None:
152 self._domain=self._createDomain(self._pad_x, self._pad_y)
153 return self._domain
154
155 def getSetDensityMask(self):
156 """
157 Returns the density mask data object, where mask has value 1 in the
158 padding area, 0 elsewhere.
159
160 :return: The mask for the density.
161 :rtype: `esys.escript.Data`
162 """
163 return self.__set_density_mask
164
165 def setSetDensityMask(self, mask):
166 self.__set_density_mask=mask
167
168 def getSetSusceptibilityMask(self):
169 """
170 Returns the susceptibility mask data object, where mask has value 1 in the
171 padding area, 0 elsewhere.
172
173 :return: The mask for the susceptibility.
174 :rtype: `esys.escript.Data`
175 """
176 return self.__set_susceptibility_mask
177
178 def setSetSusceptibilityMask(self, mask):
179 self.__set_susceptibility_mask=mask
180
181
182 def getGravityAndStdDev(self):
183 """
184 Returns the gravity anomaly and standard deviation data objects as a
185 tuple. This method must be implemented in subclasses.
186 """
187 raise NotImplementedError
188
189 def getMagneticFlieldAndStdDev(self):
190 """
191 Returns the magnetic flield and standard deviation data objects as a
192 tuple. This method must be implemented in subclasses.
193 """
194 raise NotImplementedError
195
196
197
198 def getBackgroundMagneticField(self):
199 """
200 returns the back ground magnetic field. his method must be implemented in subclasses.
201 """
202 return NotImplementedError
203
204
205 def getDataExtents(self):
206 """
207 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
208
209 - ``x0``, ``y0`` = coordinates of data origin
210 - ``nx``, ``ny`` = number of data points in x and y
211 - ``dx``, ``dy`` = spacing of data points in x and y
212
213 This method must be implemented in subclasses.
214 """
215 raise NotImplementedError
216
217 def getVerticalExtents(self):
218 """
219 returns a tuple ``(z0, nz, dz)``, where
220
221 - ``z0`` = minimum z coordinate (origin)
222 - ``nz`` = number of nodes in z direction
223 - ``dz`` = spacing of nodes (= cell size in z)
224
225 This method must be implemented in subclasses.
226 """
227 raise NotImplementedError
228
229 def getDomainClass(self):
230 """
231 returns the domain generator class (e.g. esys.ripley.Brick).
232 Must be implemented in subclasses.
233 """
234 raise NotImplementedError
235
236 def _addPadding(self, pad_x, pad_y, NE, l, origin):
237 """
238 Helper method that computes new number of elements, length and origin
239 after adding padding to the input values.
240
241 :param pad_x: Number of elements or fraction of padding in x direction
242 :type pad_x: ``int`` or ``float``
243 :param pad_y: Number of elements or fraction of padding in y direction
244 :type pad_y: ``int`` or ``float``
245 :param NE: Initial number of elements
246 :type NE: ``tuple`` or ``list``
247 :param l: Initial side lengths
248 :type l: ``tuple`` or ``list``
249 :param origin: Initial origin
250 :type origin: ``tuple`` or ``list``
251 :return: tuple with three elements ``(NE_padded, l_padded, origin_padded)``,
252 which are lists of the updated input parameters
253 """
254 DIM=len(NE)
255 frac=[0.]*(DIM-1)+[0]
256 # padding is applied to each side so multiply by 2 to get the total
257 # amount of padding per dimension
258 if pad_x>0 and pad_y<1:
259 frac[0]=2.*pad_x
260 elif pad_x>=1:
261 frac[0]=2.*pad_x/float(NE[0])
262 if DIM>2:
263 if pad_y>0 and pad_y<1:
264 frac[1]=2.*pad_y
265 elif pad_y>=1:
266 frac[1]=2.*pad_y/(float(NE[1]))
267
268 # calculate new number of elements
269 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
270 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
271 spacing=[l[i]/NE[i] for i in xrange(DIM)]
272 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
273 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
274 return NE_new, l_new, origin_new
275
276 def _interpolateOnDomain(self, data):
277 """
278 Helper method that interpolates data arrays onto the domain.
279 Currently this works like a nearest neighbour mapping, i.e. values
280 are directly inserted into data objects at closest location.
281 """
282 dom=self.getDomain()
283 dim=dom.getDim()
284 # determine number of values required per element
285 DPP=Scalar(0., ReducedFunction(dom)).getNumberOfDataPoints()
286 for i in xrange(dim):
287 DPP=DPP/self._dom_NE[i]
288 DPP=int(DPP)
289
290 # idx_mult.dot([x,y,z]) = flat index into data object
291 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
292
293 # separate data arrays and coordinates
294 num_arrays=len(data[0])-dim
295 arrays=[]
296 for i in xrange(num_arrays):
297 d=Scalar(0., ReducedFunction(dom))
298 d.expand()
299 arrays.append(d)
300
301 for entry in data:
302 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
303 index=int(idx_mult.dot(index))
304 for i in xrange(num_arrays):
305 for p in xrange(DPP):
306 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
307
308 return arrays
309
310 def _createDomain(self, padding_x, padding_y):
311 """
312 Creates and returns an escript domain that spans the entire area of
313 available data plus a buffer zone. This method is called only once
314 the first time `getDomain()` is invoked and may be overwritten if
315 required.
316
317 :return: The escript domain for this data source.
318 :rtype: `esys.escript.Domain`
319 """
320 X0, NX, DX = self.getDataExtents()
321 z0, nz, dz = self.getVerticalExtents()
322
323 # number of elements (without padding)
324 NE = [NX[0], NX[1], nz]
325
326 # origin of domain (without padding)
327 origin = [X0[0], X0[1], z0]
328 origin = [np.round(oi) for oi in origin]
329
330 # cell size / point spacing
331 self._spacing = DX+[dz]
332 self._spacing = [float(np.round(si)) for si in self._spacing]
333
334 # length of domain (without padding)
335 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
336
337 # now add padding to the values
338 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
339 NE, l, origin)
340
341 # number of padding elements per side
342 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
343
344 self._dom_NE_pad = NE_pad
345 self._dom_len = l_new
346 self._dom_NE = NE_new
347 self._dom_origin = origin_new
348 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
349 try:
350 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
351 # ripley may internally adjust NE and length, so recompute
352 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
353 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in xrange(3)]
354 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
355 mask=wherePositive(dom.getX()[2])
356
357 except TypeError:
358 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
359 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
360 mask=wherePositive(x[2]+self._dom_origin[2])
361
362 # prepare density mask (=1 at padding area, 0 else)
363 if self._constrainSides:
364 for i in xrange(2):
365 mask=mask + whereNegative(x[i]) + \
366 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
367
368 if self._constrainBottom:
369 mask = mask + whereNonPositive(x[2])
370 self.getSetDensityMask(wherePositive(mask))
371
372 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
373 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
374 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
375
376 return dom
377
378
379 ##############################################################################
380 class UBCDataSource(DataSource):
381 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
382 super(UBCDataSource,self).__init__()
383 self.__meshfile=meshfile
384 self.__gravfile=gravfile
385 self.__topofile=topofile
386 self.__domainclass=domainclass
387 self.__readMesh()
388
389 def __readMesh(self):
390 meshdata=open(self.__meshfile).readlines()
391 numDataPoints=meshdata[0].split()
392 origin=meshdata[1].split()
393 self.__nPts=map(int, numDataPoints)
394 self.__origin=map(float, origin)
395 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
396 # vertical data is upside down
397 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
398 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
399
400 def getDataExtents(self):
401 """
402 returns ( (x0, y0), (nx, ny), (dx, dy) )
403 """
404 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
405
406 def getVerticalExtents(self):
407 """
408 returns (z0, nz, dz)
409 """
410 return (self.__origin[2], self.__nPts[2], self.__delta[2])
411
412 def getDomainClass(self):
413 """
414 returns the domain generator class (e.g. esys.ripley.Brick)
415 """
416 return self.__domainclass
417
418 #def getSetDensityMask(self):
419 # topodata=self.__readTopography()
420 # mask=self._interpolateOnDomain(topodata)
421 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
422 # return mask
423
424 def getGravityAndStdDev(self):
425 gravlist=self.__readGravity() # x,y,z,g,s
426 g_and_sigma=self._interpolateOnDomain(gravlist)
427 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
428
429 def __readTopography(self):
430 f=open(self.__topofile)
431 n=int(f.readline())
432 topodata=np.zeros((n,3))
433 for i in xrange(n):
434 x=f.readline().split()
435 x=map(float, x)
436 topodata[i]=x
437 f.close()
438 return topodata
439
440 def __readGravity(self):
441 f=open(self.__gravfile)
442 n=int(f.readline())
443 gravdata=np.zeros((n,5))
444 for i in xrange(n):
445 x=f.readline().split()
446 x=map(float, x) # x, y, z, anomaly in mGal, stddev
447 # convert gravity anomaly units to m/s^2 and rescale error
448 x[3]*=-1e-5
449 x[4]*=1e-5
450 gravdata[i]=x
451 f.close()
452 return gravdata
453
454 ##############################################################################
455 class NetCDFDataSource(DataSource):
456 def __init__(self, gravfile=None, magfile=None, topofile=None, vertical_extents=(-40000,10000,25), alt_of_data=0.):
457 """
458 vertical_extents - (alt_min, alt_max, num_points)
459 alt_of_data - altitude of measurements
460 """
461 super(NetCDFDataSource,self).__init__()
462 self.__topofile=topofile
463 self.__gravfile=gravfile
464 self.__magfile=magfile
465 self.__determineExtents(vertical_extents)
466 self.__altOfData=alt_of_data
467
468 def __determineExtents(self, ve):
469 self.logger.debug("Data Source: %s"%self.__gravfile)
470 f=netcdf_file(self.__gravfile, 'r')
471 NX=0
472 for n in ['lon','longitude','x']:
473 if n in f.dimensions:
474 NX=f.dimensions[n]
475 break
476 if NX==0:
477 raise RuntimeError("Could not determine extents of data")
478 NY=0
479 for n in ['lat','latitude','y']:
480 if n in f.dimensions:
481 NY=f.dimensions[n]
482 break
483 if NY==0:
484 raise RuntimeError("Could not determine extents of data")
485
486 # find longitude and latitude variables
487 lon_name=None
488 for n in ['lon','longitude']:
489 if n in f.variables:
490 lon_name=n
491 longitude=f.variables.pop(n)
492 break
493 if lon_name is None:
494 raise RuntimeError("Could not determine longitude variable")
495 lat_name=None
496 for n in ['lat','latitude']:
497 if n in f.variables:
498 lat_name=n
499 latitude=f.variables.pop(n)
500 break
501 if lat_name is None:
502 raise RuntimeError("Could not determine latitude variable")
503
504 # try to figure out gravity variable name
505 grav_name=None
506 if len(f.variables)==1:
507 grav_name=f.variables.keys()[0]
508 else:
509 for n in f.variables.keys():
510 dims=f.variables[n].dimensions
511 if (lat_name in dims) and (lon_name in dims):
512 grav_name=n
513 break
514 if grav_name is None:
515 raise RuntimeError("Could not determine gravity variable")
516
517 # try to determine value for unused data
518 if hasattr(f.variables[grav_name], 'missing_value'):
519 maskval = float(f.variables[grav_name].missing_value)
520 elif hasattr(f.variables[grav_name], '_FillValue'):
521 maskval = float(f.variables[grav_name]._FillValue)
522 else:
523 self.logger.debug("missing_value attribute not found, using default.")
524 maskval = 99999
525
526 # see if there is a wkt string to convert coordinates
527 try:
528 wkt_string=f.variables[grav_name].esri_pe_string
529 except:
530 wkt_string=None
531
532 # we don't trust actual_range & geospatial_lon_min/max since subset
533 # data does not seem to have these fields updated.
534 # Getting min/max from the arrays is obviously not very efficient but..
535 #lon_range=longitude.actual_range
536 #lat_range=latitude.actual_range
537 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
538 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
539 lon_range=longitude.data.min(),longitude.data.max()
540 lat_range=latitude.data.min(),latitude.data.max()
541 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
542 origin=[lon_range[0],lat_range[0],ve[0]]
543 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
544
545 f.close()
546
547 self.__nPts=[NX, NY, ve[2]]
548 self.__origin=origin
549 # we are rounding to avoid interpolation issues
550 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in xrange(3)]
551 self.__wkt_string=wkt_string
552 self.__lon=lon_name
553 self.__lat=lat_name
554 self.__grv=grav_name
555 self.__maskval=maskval
556
557 def getDataExtents(self):
558 """
559 returns ( (x0, y0), (nx, ny), (dx, dy) )
560 """
561 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
562
563 def getVerticalExtents(self):
564 """
565 returns (z0, nz, dz)
566 """
567 return (self.__origin[2], self.__nPts[2], self.__delta[2])
568
569 def getDomainClass(self):
570 """
571 returns the domain generator class (e.g. esys.ripley.Brick)
572 """
573 return Brick
574
575 def getGravityAndStdDev(self):
576 nValues=self.__nPts[:2]+[1]
577 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
578 g=ripleycpp._readNcGrid(self.__gravfile, self.__grv,
579 ReducedFunction(self.getDomain()),
580 first, nValues, (), self.__maskval)
581 sigma=whereNonZero(g-self.__maskval)
582 g=g*1e-6
583 sigma=sigma*2e-6
584 return g*[0,0,1], sigma
585
586 def _readTopography(self):
587 f=netcdf_file(self.__topofile, 'r')
588 lon=None
589 for n in ['lon','longitude']:
590 if n in f.variables:
591 lon=f.variables[n][:]
592 break
593 if lon is None:
594 raise RuntimeError("Could not determine longitude variable")
595 lat=None
596 for n in ['lat','latitude']:
597 if n in f.variables:
598 lat=f.variables[n][:]
599 break
600 if lat is None:
601 raise RuntimeError("Could not determine latitude variable")
602 alt=None
603 for n in ['altitude','alt']:
604 if n in f.variables:
605 alt=f.variables[n][:]
606 break
607 if alt is None:
608 raise RuntimeError("Could not determine altitude variable")
609
610 topodata=np.column_stack((lon,lat,alt))
611 f.close()
612 return topodata
613
614 ##############################################################################
615 class ERSDataSource(DataSource):
616 """
617 Data Source for ER Mapper raster data.
618 Note that this class only accepts a very specific type of ER Mapper data
619 input and will raise an exception if other data is found.
620 """
621 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,25), alt_of_data=0.):
622 """
623 headerfile - usually ends in .ers
624 datafile - usually has the same name as the headerfile without '.ers'
625 """
626 super(ERSDataSource,self).__init__()
627 self.__headerfile=headerfile
628 if datafile is None:
629 self.__datafile=headerfile[:-4]
630 else:
631 self.__datafile=datafile
632 self.__readHeader(vertical_extents)
633 self.__altOfData=alt_of_data
634
635 def __readHeader(self, ve):
636 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
637 metadata=open(self.__headerfile, 'r').readlines()
638 # parse metadata
639 start=-1
640 for i in xrange(len(metadata)):
641 if metadata[i].strip() == 'DatasetHeader Begin':
642 start=i+1
643 if start==-1:
644 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
645
646 md_dict={}
647 section=[]
648 for i in xrange(start, len(metadata)):
649 line=metadata[i]
650 if line[-6:].strip() == 'Begin':
651 section.append(line[:-6].strip())
652 elif line[-4:].strip() == 'End':
653 if len(section)>0:
654 section.pop()
655 else:
656 vals=line.split('=')
657 if len(vals)==2:
658 key = vals[0].strip()
659 value = vals[1].strip()
660 fullkey='.'.join(section+[key])
661 md_dict[fullkey]=value
662
663 try:
664 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
665 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
666 except KeyError:
667 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
668
669 try:
670 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
671 NY = int(md_dict['RasterInfo.NrOfLines'])
672 except:
673 raise RuntimeError("Could not determine extents of data")
674
675 try:
676 maskval = float(md_dict['RasterInfo.NullCellValue'])
677 except:
678 maskval = 99999
679
680 try:
681 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
682 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
683 except:
684 raise RuntimeError("Could not determine cell dimensions")
685
686 try:
687 if md_dict['CoordinateSpace.CoordinateType']=='EN':
688 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
689 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
690 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
691 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
692 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
693 else:
694 raise RuntimeError("Unknown CoordinateType")
695 except:
696 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
697 originX,originY = 0.0, 0.0
698
699 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
700 # it appears we have lat/lon coordinates so need to convert
701 # origin and spacing. Try using gdal to get the wkt if available:
702 try:
703 from osgeo import gdal
704 ds=gdal.Open(self.__headerfile)
705 wkt=ds.GetProjection()
706 except:
707 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
708 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
709 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
710 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
711 # we are rounding to avoid interpolation issues
712 spacingX=np.round(op1X-originX_UTM)
713 spacingY=np.round(op1Y-originY_UTM)
714 originX=np.round(originX_UTM)
715 originY=np.round(originY_UTM)
716
717 # data sets have origin in top-left corner so y runs top-down
718 self.__dataorigin=[originX, originY]
719 originY-=(NY-1)*spacingY
720 self.__maskval=maskval
721 spacingZ=np.round(float(ve[1]-ve[0])/ve[2])
722 self.__delta = [spacingX, spacingY, spacingZ]
723 self.__nPts = [NX, NY, ve[2]]
724 self.__origin = [originX, originY, ve[0]]
725
726 def getDataExtents(self):
727 """
728 returns ( (x0, y0), (nx, ny), (dx, dy) )
729 """
730 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
731
732 def getVerticalExtents(self):
733 """
734 returns (z0, nz, dz)
735 """
736 return (self.__origin[2], self.__nPts[2], self.__delta[2])
737
738 def getDomainClass(self):
739 """
740 returns the domain generator class (e.g. esys.ripley.Brick)
741 """
742 return Brick
743
744 def getGravityAndStdDev(self):
745 nValues=self.__nPts[:2]+[1]
746 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
747 g=ripleycpp._readBinaryGrid(self.__datafile,
748 ReducedFunction(self.getDomain()),
749 first, nValues, (), self.__maskval)
750 sigma=whereNonZero(g-self.__maskval)
751 g=g*1e-6
752 sigma=sigma*2e-6
753 return g*[0,0,1], sigma
754
755
756 ##############################################################################
757 class SourceFeature(object):
758 """
759 A feature adds a density distribution to (parts of) a domain of a synthetic
760 data source, for example a layer of a specific rock type or a simulated
761 ore body.
762 """
763 def getDensity(self):
764 """
765 Returns the density for the area covered by mask. It can be constant
766 or a data object with spatial dependency.
767 """
768 raise NotImplementedError
769 def getMask(self, x):
770 """
771 Returns the mask of the area of interest for this feature. That is,
772 mask is non-zero where the density returned by getDensity() should be
773 applied, zero elsewhere.
774 """
775 raise NotImplementedError
776
777 class SmoothAnomaly(SourceFeature):
778 def __init__(self, lx, ly, lz, x, y, depth, rho_inner=None, rho_outer=None, k_inner=None, k_outer=None):
779 self.x=x
780 self.y=y
781 self.lx=lx
782 self.ly=ly
783 self.lz=lz
784 self.depth=depth
785 self.rho_inner=rho_inner
786 self.rho_outer=rho_outer
787 self.k_inner=k_inner
788 self.k_outer=k_outer
789 self.rho=None
790 self.k=None
791 self.mask=None
792
793 def getDensity(self,x):
794 if self.rho is None:
795 if self.rho_outer is None or self.rho_inner is None:
796 self.rho=0
797 else:
798 DIM=x.getDomain().getDim()
799 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
800 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
801 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
802 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
803 if self.rho_inner<0: self.rho=-self.rho
804
805 return self.rho
806
807 def getSusceptibility(self,x):
808 if self.k is None:
809 if self.k_outer is None or self.k_inner is None:
810 self.k=0
811 else:
812 DIM=x.getDomain().getDim()
813 alpha=-log(abs(self.k_outer/self.k_inner))*4
814 k=exp(-alpha*((x[0]-self.x)/self.lx)**2)
815 k=k*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
816 self.k=maximum(abs(self.k_outer), abs(self.k_inner*k))
817 if self.k_inner<0: self.k=-self.k
818
819 return self.k
820
821 def getMask(self, x):
822 DIM=x.getDomain().getDim()
823 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
824 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
825 if DIM>2:
826 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
827 self.mask = m
828 return m
829
830 ##############################################################################
831 class SyntheticDataSource(DataSource):
832 def __init__(self, DIM, NE, l, h, features, latitude=-32.):
833 super(SyntheticDataSource,self).__init__()
834 self._features = features
835 self.DIM=DIM
836 self.NE=NE
837 self.l=l
838 self.h=h
839 self.latitude=latitude
840
841 def _createDomain(self, padding_x, padding_y):
842 NE_H=self.NE
843 NE_L=int((self.l/self.h)*NE_H+0.5)
844 l=[self.l]*(self.DIM-1)+[self.h]
845 NE=[NE_L]*(self.DIM-1)+[NE_H]
846 origin=[0.]*self.DIM
847 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
848 NE, l, origin)
849
850 self.NE=NE_new
851 self.l=l_new[0]
852 self.h=l_new[self.DIM-1]
853
854 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
855 if self.DIM==2:
856 from esys.finley import Rectangle
857 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
858 self._x = dom.getX() + origin_new
859 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
860 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
861 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
862 else:
863 from esys.finley import Brick
864 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
865 self._x = dom.getX() + origin_new
866 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
867 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
868 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
869
870 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
871 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
872 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
873 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
874 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
875
876 self._B_mask=self._g_mask
877
878 mask=whereNegative(self._x[self.DIM-1]) + \
879 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
880 for i in xrange(self.DIM-1):
881 mask+= whereNegative(self._x[i]) + wherePositive(self._x[i]-l[i])
882 self.setSetDensityMask(wherePositive(mask))
883 self.setSetSusceptibilityMask(wherePositive(mask))
884
885 rho_ref=0.
886 k_ref=0
887 for f in self._features:
888 m=f.getMask(self._x)
889 rho_ref = rho_ref * (1-m) + f.getDensity(self._x) * m
890 k_ref = k_ref * (1-m) + f.getSusceptibility(self._x) * m
891 self._rho=rho_ref
892 self._k=k_ref
893
894 return dom
895
896
897 def getReferenceDensity(self):
898 return self._rho
899 def getReferenceSusceptibility(self):
900 return self._k
901
902 def getGravityAndStdDev(self):
903 pde=LinearSinglePDE(self.getDomain())
904 G=U.Gravitational_Constant
905 m_psi_ref=0.
906 for i in xrange(self.DIM):
907 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
908 + whereZero(self._x[i]-sup(self._x[i]))
909
910 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
911 pde.setSymmetryOn()
912 psi_ref=pde.getSolution()
913 del pde
914 g=-grad(psi_ref)
915 sigma=self._g_mask
916 return g,sigma
917
918 def getMagneticFlieldAndStdDev(self):
919
920 pde=LinearSinglePDE(self.getDomain())
921 B_b=self.getBackgroundMagneticField()
922 DIM=self.getDomain().getDim()
923 m_psi_ref=0.
924 for i in xrange(self.DIM):
925 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
926 + whereZero(self._x[i]-sup(self._x[i]))
927 pde.setValue(A=kronecker(self.getDomain()), X=(1+self._k)*B_b, q=m_psi_ref)
928 pde.setSymmetryOn()
929 psi_ref=pde.getSolution()
930 del pde
931 B= (1+self._k) * B_b -grad(psi_ref)
932 sigma=self._B_mask
933 return B,sigma
934
935
936 def getBackgroundMagneticField(self):
937 theta = (90-self.latitude)/180.*np.pi
938 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
939 B_theta= B_0 * sin(theta)
940 B_r= 2 * B_0 * cos(theta)
941 DIM=self.getDomain().getDim()
942 if DIM<3:
943 return np.array([0., -B_r])
944 else:
945 return np.array([-B_theta, 0., -B_r])

  ViewVC Help
Powered by ViewVC 1.1.26