/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4049 - (show annotations)
Wed Oct 31 00:50:50 2012 UTC (6 years, 10 months ago) by caltinay
File MIME type: text/x-python
File size: 36433 byte(s)
Fixed typos.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction, Scalar
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34
35 try:
36 from scipy.io.netcdf import netcdf_file
37 __all__ += ['NetCDFDataSource']
38 except:
39 pass
40
41 def LatLonToUTM(lon, lat, wkt_string=None):
42 """
43 Converts one or more longitude,latitude pairs to the corresponding x,y
44 coordinates in the Universal Transverse Mercator projection.
45
46 :note: If the ``pyproj`` module is not installed a warning is printed and
47 the input values are scaled by a constant and returned.
48 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
49 not available to convert the string, then the input values are
50 assumed to be given in the Clarke 1866 projection.
51
52 :param lon: longitude value(s)
53 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
54 :param lat: latitude value(s)
55 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
56 :rtype: ``numpy.array``
57 """
58
59 # not really optimal: if pyproj is not installed we return the input
60 # values scaled by a constant.
61 try:
62 import pyproj
63 except:
64 print("Warning, pyproj not available. Domain extents will be wrong")
65 return np.array(lon)*1000., np.array(lat)*1000.
66
67 # determine UTM zone from the input data
68 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
69 try:
70 import osgeo.osr
71 srs = osgeo.osr.SpatialReference()
72 srs.ImportFromWkt(wkt_string)
73 p_src = pyproj.Proj(srs.ExportToProj4())
74 except:
75 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
76 # we assume southern hemisphere here
77 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
78 x,y=pyproj.transform(p_src, p_dest, lon, lat)
79 return x,y
80
81 class DataSource(object):
82 """
83 A class that provides survey data for the inversion process.
84 This is an abstract base class that implements common functionality.
85 Methods to be overwritten by subclasses are marked as such.
86 This class assumes 2D data which is mapped to a slice of a 3D domain.
87 For other setups override the `_createDomain` method.
88 """
89
90 def __init__(self):
91 """
92 Constructor. Sets some defaults and initializes logger.
93 """
94 self._constrainBottom=False
95 self._constrainSides=True
96 self._domain=None
97 self.__set_density_mask=None
98 self.__set_susceptibility_mask=None
99 self.setPadding()
100 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
101
102 def setPadding(self, pad_x=10, pad_y=10):
103 """
104 Sets the amount of padding around the dataset. If `pad_x`/`pad_y`
105 is >=1 the value is treated as number of elements to be added to the
106 domain (per side).
107 If ``0 < pad_x,pad_y < 1``, the padding amount is relative to the
108 dataset size. For example, calling ``setPadding(3, 0.1)`` to a data
109 source with size 10x20 will result in the padded data set size
110 16x24 (10+2*3, 20*(1+2*0.1))
111
112 :param pad_x: Padding per side in x direction (default: 10 elements)
113 :type pad_x: ``int`` or ``float``
114 :param pad_y: Padding per side in y direction (default: 10 elements).
115 This value is only used for 3-dimensional datasets
116 :type pad_y: ``int`` or ``float``
117 """
118 self._pad_x=pad_x
119 self._pad_y=pad_y
120
121 def setConstraints(self, bottom=False, sides=True):
122 """
123 If `bottom` is True, then the density mask will be set to 1 in the
124 padding area at the bottom of the domain. By default this area is
125 unconstrained. Similarly, if `sides` is True (default) then the
126 horizontal padding area is constrained, otherwise not.
127
128 :param bottom: Whether to constrain the density at the bottom of the
129 domain
130 :type bottom: ``bool``
131 :param sides: Whether to constrain the density in the padding area
132 surrounding the data
133 :type sides: ``bool``
134 """
135 self._constrainBottom=bottom
136 self._constrainSides=sides
137
138 def getDomain(self):
139 """
140 Returns a domain that spans the data area plus padding.
141 The domain is created the first time this method is called, subsequent
142 calls return the same domain so anything that affects the domain
143 (such as padding) needs to be set beforehand.
144
145 :return: The escript domain for this data source.
146 :rtype: `esys.escript.Domain`
147 """
148 if self._domain is None:
149 self._domain=self._createDomain(self._pad_x, self._pad_y)
150 return self._domain
151
152 def getSetDensityMask(self):
153 """
154 Returns the density mask data object, where mask has value 1 in the
155 padding area, 0 elsewhere.
156
157 :return: The mask for the density.
158 :rtype: `esys.escript.Data`
159 """
160 return self.__set_density_mask
161
162 def setSetDensityMask(self, mask):
163 """
164 Sets the density mask to use.
165 """
166 self.__set_density_mask=mask
167
168 def getSetSusceptibilityMask(self):
169 """
170 Returns the susceptibility mask data object, where mask has value 1
171 in the padding area, 0 elsewhere.
172
173 :return: The mask for the susceptibility.
174 :rtype: `esys.escript.Data`
175 """
176 return self.__set_susceptibility_mask
177
178 def setSetSusceptibilityMask(self, mask):
179 """
180 Sets the susceptibility mask to use.
181 """
182 self.__set_susceptibility_mask=mask
183
184 def getGravityAndStdDev(self):
185 """
186 Returns the gravity anomaly and standard deviation data objects as a
187 tuple. This method must be implemented in subclasses that supply
188 gravity data.
189 """
190 raise NotImplementedError
191
192 def getMagneticFieldAndStdDev(self):
193 """
194 Returns the magnetic field and standard deviation data objects as a
195 tuple. This method must be implemented in subclasses that supply
196 magnetic data.
197 """
198 raise NotImplementedError
199
200 def getBackgroundMagneticField(self):
201 """
202 Returns the background magnetic field. This method must be
203 implemented in subclasses that supply magnetic data.
204 """
205 return NotImplementedError
206
207 def getDataExtents(self):
208 """
209 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
210
211 - ``x0``, ``y0`` = coordinates of data origin
212 - ``nx``, ``ny`` = number of data points in x and y
213 - ``dx``, ``dy`` = spacing of data points in x and y
214
215 This method must be implemented in subclasses.
216 """
217 raise NotImplementedError
218
219 def getVerticalExtents(self):
220 """
221 returns a tuple ``(z0, nz, dz)``, where
222
223 - ``z0`` = minimum z coordinate (origin)
224 - ``nz`` = number of elements in z direction
225 - ``dz`` = spacing of elements (= cell size in z)
226
227 This method must be implemented in subclasses.
228 """
229 raise NotImplementedError
230
231 def _addPadding(self, pad_x, pad_y, NE, l, origin):
232 """
233 Helper method that computes new number of elements, length and origin
234 after adding padding to the input values.
235
236 :param pad_x: Number of elements or fraction of padding in x direction
237 :type pad_x: ``int`` or ``float``
238 :param pad_y: Number of elements or fraction of padding in y direction
239 :type pad_y: ``int`` or ``float``
240 :param NE: Initial number of elements
241 :type NE: ``tuple`` or ``list``
242 :param l: Initial side lengths
243 :type l: ``tuple`` or ``list``
244 :param origin: Initial origin
245 :type origin: ``tuple`` or ``list``
246 :return: tuple with three elements ``(NE_padded, l_padded, origin_padded)``,
247 which are lists of the updated input parameters
248 """
249 DIM=len(NE)
250 frac=[0.]*(DIM-1)+[0]
251 # padding is applied to each side so multiply by 2 to get the total
252 # amount of padding per dimension
253 if pad_x>0 and pad_y<1:
254 frac[0]=2.*pad_x
255 elif pad_x>=1:
256 frac[0]=2.*pad_x/float(NE[0])
257 if DIM>2:
258 if pad_y>0 and pad_y<1:
259 frac[1]=2.*pad_y
260 elif pad_y>=1:
261 frac[1]=2.*pad_y/(float(NE[1]))
262
263 # calculate new number of elements
264 NE_new=[int(NE[i]*(1+frac[i])) for i in range(DIM)]
265 NEdiff=[NE_new[i]-NE[i] for i in range(DIM)]
266 spacing=[l[i]/NE[i] for i in range(DIM)]
267 l_new=[NE_new[i]*spacing[i] for i in range(DIM)]
268 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in range(DIM)]
269 return NE_new, l_new, origin_new
270
271 def _interpolateOnDomain(self, data):
272 """
273 Helper method that interpolates data arrays onto the domain.
274 Currently this works like a nearest neighbour mapping, i.e. values
275 are directly inserted into data objects at closest location.
276 """
277 dom=self.getDomain()
278 dim=dom.getDim()
279 # determine number of values required per element
280 DPP=Scalar(0., ReducedFunction(dom)).getNumberOfDataPoints()
281 for i in range(dim):
282 DPP=DPP/self._dom_NE[i]
283 DPP=int(DPP)
284
285 # idx_mult.dot([x,y,z]) = flat index into data object
286 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
287
288 # separate data arrays and coordinates
289 num_arrays=len(data[0])-dim
290 arrays=[]
291 for i in range(num_arrays):
292 d=Scalar(0., ReducedFunction(dom))
293 d.expand()
294 arrays.append(d)
295
296 for entry in data:
297 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in range(dim)]
298 index=int(idx_mult.dot(index))
299 for i in range(num_arrays):
300 for p in range(DPP):
301 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
302
303 return arrays
304
305 def _createDomain(self, padding_x, padding_y):
306 """
307 Creates and returns an escript domain that spans the entire area of
308 available data plus a buffer zone. This method is called only once
309 the first time `getDomain()` is invoked and may be overwritten if
310 required.
311
312 :return: The escript domain for this data source.
313 :rtype: `esys.escript.Domain`
314 """
315 X0, NX, DX = self.getDataExtents()
316 z0, nz, dz = self.getVerticalExtents()
317
318 # number of elements (without padding)
319 NE = [NX[0], NX[1], nz]
320
321 # origin of domain (without padding)
322 origin = [X0[0], X0[1], z0]
323 origin = [np.round(oi) for oi in origin]
324
325 # cell size / point spacing
326 self._spacing = DX+[dz]
327 self._spacing = [float(np.round(si)) for si in self._spacing]
328
329 # length of domain (without padding)
330 l = [NE[i]*self._spacing[i] for i in range(len(NE))]
331
332 # now add padding to the values
333 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
334 NE, l, origin)
335
336 # number of padding elements per side
337 NE_pad=[(NE_new[i]-NE[i])//2 for i in range(3)]
338
339 self._dom_NE_pad = NE_pad
340 self._dom_len = l_new
341 self._dom_NE = NE_new
342 self._dom_origin = origin_new
343 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in range(3)]
344 dom=Brick(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
345 # ripley may internally adjust NE and length, so recompute
346 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in range(3)]
347 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in range(3)]
348 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in range(3)]
349 mask=wherePositive(dom.getX()[2])
350
351 # prepare density mask (=1 at padding area, 0 else)
352 if self._constrainSides:
353 for i in range(2):
354 mask=mask + whereNegative(x[i]) + \
355 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
356
357 if self._constrainBottom:
358 mask = mask + whereNonPositive(x[2])
359 self.setSetDensityMask(wherePositive(mask))
360
361 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
362 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
363 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
364
365 return dom
366
367
368 ##############################################################################
369 class UBCDataSource(DataSource):
370 def __init__(self, meshfile, gravfile, topofile=None):
371 super(UBCDataSource,self).__init__()
372 self.__meshfile=meshfile
373 self.__gravfile=gravfile
374 self.__topofile=topofile
375 self.__readMesh()
376
377 def __readMesh(self):
378 meshdata=open(self.__meshfile).readlines()
379 numDataPoints=meshdata[0].split()
380 origin=meshdata[1].split()
381 self.__nPts=map(int, numDataPoints)
382 self.__origin=map(float, origin)
383 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
384 # vertical data is upside down
385 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
386 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
387
388 def getDataExtents(self):
389 """
390 returns ( (x0, y0), (nx, ny), (dx, dy) )
391 """
392 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
393
394 def getVerticalExtents(self):
395 """
396 returns (z0, nz, dz)
397 """
398 return (self.__origin[2], self.__nPts[2], self.__delta[2])
399
400 #def getSetDensityMask(self):
401 # topodata=self.__readTopography()
402 # mask=self._interpolateOnDomain(topodata)
403 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
404 # return mask
405
406 def getGravityAndStdDev(self):
407 gravlist=self.__readGravity() # x,y,z,g,s
408 g_and_sigma=self._interpolateOnDomain(gravlist)
409 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
410
411 def __readTopography(self):
412 f=open(self.__topofile)
413 n=int(f.readline())
414 topodata=np.zeros((n,3))
415 for i in range(n):
416 x=f.readline().split()
417 x=map(float, x)
418 topodata[i]=x
419 f.close()
420 return topodata
421
422 def __readGravity(self):
423 f=open(self.__gravfile)
424 n=int(f.readline())
425 gravdata=np.zeros((n,5))
426 for i in range(n):
427 x=f.readline().split()
428 x=map(float, x) # x, y, z, anomaly in mGal, stddev
429 # convert gravity anomaly units to m/s^2 and rescale error
430 x[3]*=-1e-5
431 x[4]*=1e-5
432 gravdata[i]=x
433 f.close()
434 return gravdata
435
436 ##############################################################################
437 class NetCDFDataSource(DataSource):
438 """
439 Data Source for gridded netCDF data that use CF/COARDS conventions.
440 """
441 def __init__(self, gravfile=None, magfile=None, topofile=None,
442 depth=40000.,
443 air_layer=10000.,
444 vertical_cells=25,
445 alt_of_data=0.):
446 """
447 :param gravfile: file with gravity data in netcdf format
448 :type gravfile: ``str``
449 :param magfile: file with magnetic datafile in netcdf format
450 :type magfile: ``str``
451 :param topofile: file with topography data in netcdf format
452 :type topofile: ``str``
453 :param depth: depth below surface filled with rock
454 :type depth: ``float``
455 :param air_layer: height above surface filled with air. It is
456 assumed that that density correction is zero
457 in this region.
458 :type air_layer: ``float``
459 :param vertical_cells: number of vertical cells
460 :type vertical_cells: ``int``
461 :param alt_of_data: altitude of measurements above ground in meters
462 :type alt_of_data: ``float``
463 """
464 super(NetCDFDataSource,self).__init__()
465 self.__topofile=topofile
466 self.__gravfile=gravfile
467 self.__magfile=magfile
468 self.__determineExtents((-depth,air_layer,vertical_cells))
469 self.__altOfData=alt_of_data
470
471 def __determineExtents(self, ve):
472 self.logger.debug("Data Source: %s"%self.__gravfile)
473 f=netcdf_file(self.__gravfile, 'r')
474 NX=0
475 for n in ['lon','longitude','x']:
476 if n in f.dimensions:
477 NX=f.dimensions[n]
478 break
479 if NX==0:
480 raise RuntimeError("Could not determine extents of data")
481 NY=0
482 for n in ['lat','latitude','y']:
483 if n in f.dimensions:
484 NY=f.dimensions[n]
485 break
486 if NY==0:
487 raise RuntimeError("Could not determine extents of data")
488
489 # find longitude and latitude variables
490 lon_name=None
491 for n in ['lon','longitude']:
492 if n in f.variables:
493 lon_name=n
494 longitude=f.variables.pop(n)
495 break
496 if lon_name is None:
497 raise RuntimeError("Could not determine longitude variable")
498 lat_name=None
499 for n in ['lat','latitude']:
500 if n in f.variables:
501 lat_name=n
502 latitude=f.variables.pop(n)
503 break
504 if lat_name is None:
505 raise RuntimeError("Could not determine latitude variable")
506
507 # try to figure out gravity variable name
508 grav_name=None
509 if len(f.variables)==1:
510 grav_name=f.variables.keys()[0]
511 else:
512 for n in f.variables.keys():
513 dims=f.variables[n].dimensions
514 if (lat_name in dims) and (lon_name in dims):
515 grav_name=n
516 break
517 if grav_name is None:
518 raise RuntimeError("Could not determine gravity variable")
519
520 # try to determine value for unused data
521 if hasattr(f.variables[grav_name], 'missing_value'):
522 maskval = float(f.variables[grav_name].missing_value)
523 elif hasattr(f.variables[grav_name], '_FillValue'):
524 maskval = float(f.variables[grav_name]._FillValue)
525 else:
526 self.logger.debug("missing_value attribute not found, using default.")
527 maskval = 99999
528
529 # see if there is a wkt string to convert coordinates
530 try:
531 wkt_string=f.variables[grav_name].esri_pe_string
532 except:
533 wkt_string=None
534
535 # we don't trust actual_range & geospatial_lon_min/max since subset
536 # data does not seem to have these fields updated.
537 # Getting min/max from the arrays is obviously not very efficient but..
538 #lon_range=longitude.actual_range
539 #lat_range=latitude.actual_range
540 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
541 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
542 lon_range=longitude.data.min(),longitude.data.max()
543 lat_range=latitude.data.min(),latitude.data.max()
544 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
545 origin=[lon_range[0],lat_range[0],ve[0]]
546 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
547
548 f.close()
549
550 self.__nPts=[NX, NY, ve[2]]
551 self.__origin=origin
552 # we are rounding to avoid interpolation issues
553 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(3)]
554 self.__wkt_string=wkt_string
555 self.__lon=lon_name
556 self.__lat=lat_name
557 self.__grv=grav_name
558 self.__maskval=maskval
559
560 def getDataExtents(self):
561 """
562 returns ( (x0, y0), (nx, ny), (dx, dy) )
563 """
564 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
565
566 def getVerticalExtents(self):
567 """
568 returns (z0, nz, dz)
569 """
570 return (self.__origin[2], self.__nPts[2], self.__delta[2])
571
572 def getGravityAndStdDev(self):
573 nValues=self.__nPts[:2]+[1]
574 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
575 g=ripleycpp._readNcGrid(self.__gravfile, self.__grv,
576 ReducedFunction(self.getDomain()),
577 first, nValues, (), self.__maskval)
578 sigma=whereNonZero(g-self.__maskval)
579 g=g*1e-6
580 sigma=sigma*2e-6
581 return g*[0,0,1], sigma
582
583 def _readTopography(self):
584 f=netcdf_file(self.__topofile, 'r')
585 lon=None
586 for n in ['lon','longitude']:
587 if n in f.variables:
588 lon=f.variables[n][:]
589 break
590 if lon is None:
591 raise RuntimeError("Could not determine longitude variable")
592 lat=None
593 for n in ['lat','latitude']:
594 if n in f.variables:
595 lat=f.variables[n][:]
596 break
597 if lat is None:
598 raise RuntimeError("Could not determine latitude variable")
599 alt=None
600 for n in ['altitude','alt']:
601 if n in f.variables:
602 alt=f.variables[n][:]
603 break
604 if alt is None:
605 raise RuntimeError("Could not determine altitude variable")
606
607 topodata=np.column_stack((lon,lat,alt))
608 f.close()
609 return topodata
610
611 ##############################################################################
612 class ERSDataSource(DataSource):
613 """
614 Data Source for ER Mapper raster data.
615 Note that this class only accepts a very specific type of ER Mapper data
616 input and will raise an exception if other data is found.
617 """
618 def __init__(self, headerfile, datafile=None, depth=40000.,
619 air_layer=10000., vertical_cells=25, alt_of_data=0.):
620 """
621 :param headerfile: ER Mapper header file (usually ends in .ers)
622 :type headerfile: ``str``
623 :param datafile: ER Mapper binary data file name. If not supplied the
624 name of the header file without '.ers' is assumed
625 :type datafile: ``str``
626 :param depth: depth below surface filled with rock
627 :type depth: ``float``
628 :param air_layer: height above surface filled with air. It is
629 assumed that that density correction is zero
630 in this region.
631 :type air_layer: ``float``
632 :param vertical_cells: number of vertical cells
633 :type vertical_cells: ``int``
634 :param alt_of_data: altitude of measurements above ground in meters
635 :type alt_of_data: ``float``
636 """
637 super(ERSDataSource,self).__init__()
638 self.__headerfile=headerfile
639 if datafile is None:
640 self.__datafile=headerfile[:-4]
641 else:
642 self.__datafile=datafile
643 self.__readHeader((-depth, air_layer, vertical_cells))
644 self.__altOfData=alt_of_data
645
646 def __readHeader(self, ve):
647 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
648 metadata=open(self.__headerfile, 'r').readlines()
649 # parse metadata
650 start=-1
651 for i in range(len(metadata)):
652 if metadata[i].strip() == 'DatasetHeader Begin':
653 start=i+1
654 if start==-1:
655 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
656
657 md_dict={}
658 section=[]
659 for i in range(start, len(metadata)):
660 line=metadata[i]
661 if line[-6:].strip() == 'Begin':
662 section.append(line[:-6].strip())
663 elif line[-4:].strip() == 'End':
664 if len(section)>0:
665 section.pop()
666 else:
667 vals=line.split('=')
668 if len(vals)==2:
669 key = vals[0].strip()
670 value = vals[1].strip()
671 fullkey='.'.join(section+[key])
672 md_dict[fullkey]=value
673
674 try:
675 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
676 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
677 except KeyError:
678 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
679
680 try:
681 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
682 NY = int(md_dict['RasterInfo.NrOfLines'])
683 except:
684 raise RuntimeError("Could not determine extents of data")
685
686 try:
687 maskval = float(md_dict['RasterInfo.NullCellValue'])
688 except:
689 maskval = 99999
690
691 try:
692 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
693 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
694 except:
695 raise RuntimeError("Could not determine cell dimensions")
696
697 try:
698 if md_dict['CoordinateSpace.CoordinateType']=='EN':
699 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
700 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
701 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
702 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
703 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
704 else:
705 raise RuntimeError("Unknown CoordinateType")
706 except:
707 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
708 originX,originY = 0.0, 0.0
709
710 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
711 # it appears we have lat/lon coordinates so need to convert
712 # origin and spacing. Try using gdal to get the wkt if available:
713 try:
714 from osgeo import gdal
715 ds=gdal.Open(self.__headerfile)
716 wkt=ds.GetProjection()
717 except:
718 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
719 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
720 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
721 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
722 # we are rounding to avoid interpolation issues
723 spacingX=np.round(op1X-originX_UTM)
724 spacingY=np.round(op1Y-originY_UTM)
725 originX=np.round(originX_UTM)
726 originY=np.round(originY_UTM)
727
728 # data sets have origin in top-left corner so y runs top-down
729 self.__dataorigin=[originX, originY]
730 originY-=(NY-1)*spacingY
731 self.__maskval=maskval
732 spacingZ=np.round(float(ve[1]-ve[0])/ve[2])
733 self.__delta = [spacingX, spacingY, spacingZ]
734 self.__nPts = [NX, NY, ve[2]]
735 self.__origin = [originX, originY, ve[0]]
736
737 def getDataExtents(self):
738 """
739 returns ( (x0, y0), (nx, ny), (dx, dy) )
740 """
741 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
742
743 def getVerticalExtents(self):
744 """
745 returns (z0, nz, dz)
746 """
747 return (self.__origin[2], self.__nPts[2], self.__delta[2])
748
749 def getGravityAndStdDev(self):
750 nValues=self.__nPts[:2]+[1]
751 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
752 g=ripleycpp._readBinaryGrid(self.__datafile,
753 ReducedFunction(self.getDomain()),
754 first, nValues, (), self.__maskval)
755 sigma=whereNonZero(g-self.__maskval)
756 g=g*1e-6
757 sigma=sigma*2e-6
758 return g*[0,0,1], sigma
759
760
761 ##############################################################################
762 class SourceFeature(object):
763 """
764 A feature adds a density distribution to (parts of) a domain of a synthetic
765 data source, for example a layer of a specific rock type or a simulated
766 ore body.
767 """
768 def getDensity(self):
769 """
770 Returns the density for the area covered by mask. It can be constant
771 or a data object with spatial dependency.
772 """
773 raise NotImplementedError
774 def getMask(self, x):
775 """
776 Returns the mask of the area of interest for this feature. That is,
777 mask is non-zero where the density returned by getDensity() should be
778 applied, zero elsewhere.
779 """
780 raise NotImplementedError
781
782 class SmoothAnomaly(SourceFeature):
783 def __init__(self, lx, ly, lz, x, y, depth, rho_inner=None, rho_outer=None, k_inner=None, k_outer=None):
784 self.x=x
785 self.y=y
786 self.lx=lx
787 self.ly=ly
788 self.lz=lz
789 self.depth=depth
790 self.rho_inner=rho_inner
791 self.rho_outer=rho_outer
792 self.k_inner=k_inner
793 self.k_outer=k_outer
794 self.rho=None
795 self.k=None
796 self.mask=None
797
798 def getDensity(self,x):
799 if self.rho is None:
800 if self.rho_outer is None or self.rho_inner is None:
801 self.rho=0
802 else:
803 DIM=x.getDomain().getDim()
804 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
805 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
806 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
807 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
808 if self.rho_inner<0: self.rho=-self.rho
809
810 return self.rho
811
812 def getSusceptibility(self,x):
813 if self.k is None:
814 if self.k_outer is None or self.k_inner is None:
815 self.k=0
816 else:
817 DIM=x.getDomain().getDim()
818 alpha=-log(abs(self.k_outer/self.k_inner))*4
819 k=exp(-alpha*((x[0]-self.x)/self.lx)**2)
820 k=k*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
821 self.k=maximum(abs(self.k_outer), abs(self.k_inner*k))
822 if self.k_inner<0: self.k=-self.k
823
824 return self.k
825
826 def getMask(self, x):
827 DIM=x.getDomain().getDim()
828 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
829 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
830 if DIM>2:
831 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
832 self.mask = m
833 return m
834
835 ##############################################################################
836 class SyntheticDataSource(DataSource):
837 def __init__(self, DIM, NE, l, h, features, latitude=-32.):
838 super(SyntheticDataSource,self).__init__()
839 self._features = features
840 self.DIM=DIM
841 self.NE=NE
842 self.l=l
843 self.h=h
844 self.latitude=latitude
845
846 def _createDomain(self, padding_x, padding_y):
847 NE_H=self.NE
848 NE_L=int((self.l/self.h)*NE_H+0.5)
849 l=[self.l]*(self.DIM-1)+[self.h]
850 NE=[NE_L]*(self.DIM-1)+[NE_H]
851 origin=[0.]*self.DIM
852 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
853 NE, l, origin)
854
855 self.NE=NE_new
856 self.l=l_new[0]
857 self.h=l_new[self.DIM-1]
858
859 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
860 if self.DIM==2:
861 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
862 self._x = dom.getX() + origin_new
863 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
864 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
865 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
866 else:
867 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
868 self._x = dom.getX() + origin_new
869 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
870 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
871 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
872
873 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
874 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
875 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
876 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
877 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
878
879 self._B_mask=self._g_mask
880
881 mask=whereNegative(self._x[self.DIM-1]) + \
882 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
883 for i in range(self.DIM-1):
884 mask+= whereNegative(self._x[i]) + wherePositive(self._x[i]-l[i])
885 self.setSetDensityMask(wherePositive(mask))
886 self.setSetSusceptibilityMask(wherePositive(mask))
887
888 rho_ref=0.
889 k_ref=0
890 for f in self._features:
891 m=f.getMask(self._x)
892 rho_ref = rho_ref * (1-m) + f.getDensity(self._x) * m
893 k_ref = k_ref * (1-m) + f.getSusceptibility(self._x) * m
894 self._rho=rho_ref
895 self._k=k_ref
896
897 return dom
898
899
900 def getReferenceDensity(self):
901 return self._rho
902 def getReferenceSusceptibility(self):
903 return self._k
904
905 def getGravityAndStdDev(self):
906 pde=LinearSinglePDE(self.getDomain())
907 G=U.Gravitational_Constant
908 m_psi_ref=0.
909 for i in range(self.DIM):
910 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
911 + whereZero(self._x[i]-sup(self._x[i]))
912
913 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
914 pde.setSymmetryOn()
915 psi_ref=pde.getSolution()
916 del pde
917 g=-grad(psi_ref)
918 sigma=self._g_mask
919 return g,sigma
920
921 def getMagneticFieldAndStdDev(self):
922 pde=LinearSinglePDE(self.getDomain())
923 B_b=self.getBackgroundMagneticField()
924 DIM=self.getDomain().getDim()
925 m_psi_ref=0.
926 for i in range(self.DIM):
927 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
928 + whereZero(self._x[i]-sup(self._x[i]))
929 pde.setValue(A=kronecker(self.getDomain()), X=(1+self._k)*B_b, q=m_psi_ref)
930 pde.setSymmetryOn()
931 psi_ref=pde.getSolution()
932 del pde
933 B= (1+self._k) * B_b -grad(psi_ref)
934 sigma=self._B_mask
935 return B,sigma
936
937 def getBackgroundMagneticField(self):
938 theta = (90-self.latitude)/180.*np.pi
939 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
940 B_theta= B_0 * sin(theta)
941 B_r= 2 * B_0 * cos(theta)
942 DIM=self.getDomain().getDim()
943 if DIM<3:
944 return np.array([0., -B_r])
945 else:
946 return np.array([-B_theta, 0., -B_r])
947

  ViewVC Help
Powered by ViewVC 1.1.26