/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4047 - (show annotations)
Tue Oct 30 09:10:07 2012 UTC (6 years, 11 months ago) by gross
File MIME type: text/x-python
File size: 35666 byte(s)
More on magnetic inversion
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction, Scalar
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34
35 try:
36 from scipy.io.netcdf import netcdf_file
37 __all__ += ['NetCDFDataSource']
38 except:
39 pass
40
41 def LatLonToUTM(lon, lat, wkt_string=None):
42 """
43 Converts one or more longitude,latitude pairs to the corresponding x,y
44 coordinates in the Universal Transverse Mercator projection.
45
46 :note: If the ``pyproj`` module is not installed a warning is printed and
47 the input values are scaled by a constant and returned.
48 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
49 not available to convert the string, then the input values are
50 assumed to be given in the Clarke 1866 projection.
51
52 :param lon: longitude value(s)
53 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
54 :param lat: latitude value(s)
55 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
56 :rtype: ``numpy.array``
57 """
58
59 # not really optimal: if pyproj is not installed we return the input
60 # values scaled by a constant.
61 try:
62 import pyproj
63 except:
64 print("Warning, pyproj not available. Domain extents will be wrong")
65 return np.array(lon)*1000., np.array(lat)*1000.
66
67 # determine UTM zone from the input data
68 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
69 try:
70 import osgeo.osr
71 srs = osgeo.osr.SpatialReference()
72 srs.ImportFromWkt(wkt_string)
73 p_src = pyproj.Proj(srs.ExportToProj4())
74 except:
75 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
76 # we assume southern hemisphere here
77 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
78 x,y=pyproj.transform(p_src, p_dest, lon, lat)
79 return x,y
80
81 class DataSource(object):
82 """
83 A class that provides survey data for the inversion process.
84 This is an abstract base class that implements common functionality.
85 Methods to be overwritten by subclasses are marked as such.
86 This class assumes 2D data which is mapped to a slice of a 3D domain.
87 For other setups override the `_createDomain` method.
88 """
89
90 def __init__(self):
91 """
92 Constructor. Sets some defaults and initializes logger.
93 """
94 self._constrainBottom=False
95 self._constrainSides=True
96 self._domain=None
97 self.__set_density_mask=None
98 self.__set_susceptibility_mask=None
99 self.setPadding()
100 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
101
102 def setPadding(self, pad_x=10, pad_y=10):
103 """
104 Sets the amount of padding around the dataset. If `pad_x`/`pad_y`
105 is >=1 the value is treated as number of elements to be added to the
106 domain (per side).
107 If ``0 < pad_x,pad_y < 1``, the padding amount is relative to the
108 dataset size. For example, calling ``setPadding(3, 0.1)`` to a data
109 source with size 10x20 will result in the padded data set size
110 16x24 (10+2*3, 20*(1+2*0.1))
111
112 :param pad_x: Padding per side in x direction (default: 10 elements)
113 :type pad_x: ``int`` or ``float``
114 :param pad_y: Padding per side in y direction (default: 10 elements).
115 This value is only used for 3-dimensional datasets
116 :type pad_y: ``int`` or ``float``
117 """
118 self._pad_x=pad_x
119 self._pad_y=pad_y
120
121 def setConstraints(self, bottom=False, sides=True):
122 """
123 If `bottom` is True, then the density mask will be set to 1 in the
124 padding area at the bottom of the domain. By default this area is
125 unconstrained. Similarly, if `sides` is True (default) then the
126 horizontal padding area is constrained, otherwise not.
127
128 :param bottom: Whether to constrain the density at the bottom of the
129 domain
130 :type bottom: ``bool``
131 :param sides: Whether to constrain the density in the padding area
132 surrounding the data
133 :type sides: ``bool``
134 """
135 self._constrainBottom=bottom
136 self._constrainSides=sides
137
138 def getDomain(self):
139 """
140 Returns a domain that spans the data area plus padding.
141 The domain is created the first time this method is called, subsequent
142 calls return the same domain so anything that affects the domain
143 (such as padding) needs to be set beforehand.
144
145 :return: The escript domain for this data source.
146 :rtype: `esys.escript.Domain`
147 """
148 if self._domain is None:
149 self._domain=self._createDomain(self._pad_x, self._pad_y)
150 return self._domain
151
152 def getSetDensityMask(self):
153 """
154 Returns the density mask data object, where mask has value 1 in the
155 padding area, 0 elsewhere.
156
157 :return: The mask for the density.
158 :rtype: `esys.escript.Data`
159 """
160 return self.__set_density_mask
161
162 def setSetDensityMask(self, mask):
163 """
164 Sets the density mask to use.
165 """
166 self.__set_density_mask=mask
167
168 def getSetSusceptibilityMask(self):
169 """
170 Returns the susceptibility mask data object, where mask has value 1
171 in the padding area, 0 elsewhere.
172
173 :return: The mask for the susceptibility.
174 :rtype: `esys.escript.Data`
175 """
176 return self.__set_susceptibility_mask
177
178 def setSetSusceptibilityMask(self, mask):
179 """
180 Sets the susceptibility mask to use.
181 """
182 self.__set_susceptibility_mask=mask
183
184 def getGravityAndStdDev(self):
185 """
186 Returns the gravity anomaly and standard deviation data objects as a
187 tuple. This method must be implemented in subclasses that supply
188 gravity data.
189 """
190 raise NotImplementedError
191
192 def getMagneticFieldAndStdDev(self):
193 """
194 Returns the magnetic field and standard deviation data objects as a
195 tuple. This method must be implemented in subclasses that supply
196 magnetic data.
197 """
198 raise NotImplementedError
199
200 def getBackgroundMagneticField(self):
201 """
202 Returns the background magnetic field. This method must be
203 implemented in subclasses that supply magnetic data.
204 """
205 return NotImplementedError
206
207 def getDataExtents(self):
208 """
209 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
210
211 - ``x0``, ``y0`` = coordinates of data origin
212 - ``nx``, ``ny`` = number of data points in x and y
213 - ``dx``, ``dy`` = spacing of data points in x and y
214
215 This method must be implemented in subclasses.
216 """
217 raise NotImplementedError
218
219 def getVerticalExtents(self):
220 """
221 returns a tuple ``(z0, nz, dz)``, where
222
223 - ``z0`` = minimum z coordinate (origin)
224 - ``nz`` = number of elements in z direction
225 - ``dz`` = spacing of elements (= cell size in z)
226
227 This method must be implemented in subclasses.
228 """
229 raise NotImplementedError
230
231 def _addPadding(self, pad_x, pad_y, NE, l, origin):
232 """
233 Helper method that computes new number of elements, length and origin
234 after adding padding to the input values.
235
236 :param pad_x: Number of elements or fraction of padding in x direction
237 :type pad_x: ``int`` or ``float``
238 :param pad_y: Number of elements or fraction of padding in y direction
239 :type pad_y: ``int`` or ``float``
240 :param NE: Initial number of elements
241 :type NE: ``tuple`` or ``list``
242 :param l: Initial side lengths
243 :type l: ``tuple`` or ``list``
244 :param origin: Initial origin
245 :type origin: ``tuple`` or ``list``
246 :return: tuple with three elements ``(NE_padded, l_padded, origin_padded)``,
247 which are lists of the updated input parameters
248 """
249 DIM=len(NE)
250 frac=[0.]*(DIM-1)+[0]
251 # padding is applied to each side so multiply by 2 to get the total
252 # amount of padding per dimension
253 if pad_x>0 and pad_y<1:
254 frac[0]=2.*pad_x
255 elif pad_x>=1:
256 frac[0]=2.*pad_x/float(NE[0])
257 if DIM>2:
258 if pad_y>0 and pad_y<1:
259 frac[1]=2.*pad_y
260 elif pad_y>=1:
261 frac[1]=2.*pad_y/(float(NE[1]))
262
263 # calculate new number of elements
264 NE_new=[int(NE[i]*(1+frac[i])) for i in range(DIM)]
265 NEdiff=[NE_new[i]-NE[i] for i in range(DIM)]
266 spacing=[l[i]/NE[i] for i in range(DIM)]
267 l_new=[NE_new[i]*spacing[i] for i in range(DIM)]
268 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in range(DIM)]
269 return NE_new, l_new, origin_new
270
271 def _interpolateOnDomain(self, data):
272 """
273 Helper method that interpolates data arrays onto the domain.
274 Currently this works like a nearest neighbour mapping, i.e. values
275 are directly inserted into data objects at closest location.
276 """
277 dom=self.getDomain()
278 dim=dom.getDim()
279 # determine number of values required per element
280 DPP=Scalar(0., ReducedFunction(dom)).getNumberOfDataPoints()
281 for i in range(dim):
282 DPP=DPP/self._dom_NE[i]
283 DPP=int(DPP)
284
285 # idx_mult.dot([x,y,z]) = flat index into data object
286 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
287
288 # separate data arrays and coordinates
289 num_arrays=len(data[0])-dim
290 arrays=[]
291 for i in range(num_arrays):
292 d=Scalar(0., ReducedFunction(dom))
293 d.expand()
294 arrays.append(d)
295
296 for entry in data:
297 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in range(dim)]
298 index=int(idx_mult.dot(index))
299 for i in range(num_arrays):
300 for p in range(DPP):
301 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
302
303 return arrays
304
305 def _createDomain(self, padding_x, padding_y):
306 """
307 Creates and returns an escript domain that spans the entire area of
308 available data plus a buffer zone. This method is called only once
309 the first time `getDomain()` is invoked and may be overwritten if
310 required.
311
312 :return: The escript domain for this data source.
313 :rtype: `esys.escript.Domain`
314 """
315 X0, NX, DX = self.getDataExtents()
316 z0, nz, dz = self.getVerticalExtents()
317
318 # number of elements (without padding)
319 NE = [NX[0], NX[1], nz]
320
321 # origin of domain (without padding)
322 origin = [X0[0], X0[1], z0]
323 origin = [np.round(oi) for oi in origin]
324
325 # cell size / point spacing
326 self._spacing = DX+[dz]
327 self._spacing = [float(np.round(si)) for si in self._spacing]
328
329 # length of domain (without padding)
330 l = [NE[i]*self._spacing[i] for i in range(len(NE))]
331
332 # now add padding to the values
333 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
334 NE, l, origin)
335
336 # number of padding elements per side
337 NE_pad=[(NE_new[i]-NE[i])//2 for i in range(3)]
338
339 self._dom_NE_pad = NE_pad
340 self._dom_len = l_new
341 self._dom_NE = NE_new
342 self._dom_origin = origin_new
343 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in range(3)]
344 dom=Brick(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
345 # ripley may internally adjust NE and length, so recompute
346 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in range(3)]
347 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in range(3)]
348 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in range(3)]
349 mask=wherePositive(dom.getX()[2])
350
351 # prepare density mask (=1 at padding area, 0 else)
352 if self._constrainSides:
353 for i in range(2):
354 mask=mask + whereNegative(x[i]) + \
355 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
356
357 if self._constrainBottom:
358 mask = mask + whereNonPositive(x[2])
359 self.setSetDensityMask(wherePositive(mask))
360
361 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
362 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
363 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
364
365 return dom
366
367
368 ##############################################################################
369 class UBCDataSource(DataSource):
370 def __init__(self, meshfile, gravfile, topofile=None):
371 super(UBCDataSource,self).__init__()
372 self.__meshfile=meshfile
373 self.__gravfile=gravfile
374 self.__topofile=topofile
375 self.__readMesh()
376
377 def __readMesh(self):
378 meshdata=open(self.__meshfile).readlines()
379 numDataPoints=meshdata[0].split()
380 origin=meshdata[1].split()
381 self.__nPts=map(int, numDataPoints)
382 self.__origin=map(float, origin)
383 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
384 # vertical data is upside down
385 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
386 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
387
388 def getDataExtents(self):
389 """
390 returns ( (x0, y0), (nx, ny), (dx, dy) )
391 """
392 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
393
394 def getVerticalExtents(self):
395 """
396 returns (z0, nz, dz)
397 """
398 return (self.__origin[2], self.__nPts[2], self.__delta[2])
399
400 #def getSetDensityMask(self):
401 # topodata=self.__readTopography()
402 # mask=self._interpolateOnDomain(topodata)
403 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
404 # return mask
405
406 def getGravityAndStdDev(self):
407 gravlist=self.__readGravity() # x,y,z,g,s
408 g_and_sigma=self._interpolateOnDomain(gravlist)
409 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
410
411 def __readTopography(self):
412 f=open(self.__topofile)
413 n=int(f.readline())
414 topodata=np.zeros((n,3))
415 for i in range(n):
416 x=f.readline().split()
417 x=map(float, x)
418 topodata[i]=x
419 f.close()
420 return topodata
421
422 def __readGravity(self):
423 f=open(self.__gravfile)
424 n=int(f.readline())
425 gravdata=np.zeros((n,5))
426 for i in range(n):
427 x=f.readline().split()
428 x=map(float, x) # x, y, z, anomaly in mGal, stddev
429 # convert gravity anomaly units to m/s^2 and rescale error
430 x[3]*=-1e-5
431 x[4]*=1e-5
432 gravdata[i]=x
433 f.close()
434 return gravdata
435
436 ##############################################################################
437 class NetCDFDataSource(DataSource):
438 """
439 Data Source for gridded netCDF data that use CF/COARDS conventions.
440 """
441 def __init__(self, gravfile=None, magfile=None, topofile=None,
442 depth=40000.,
443 air_layer=10000.,
444 vertical_cells=25,
445 alt_of_data=0.):
446 """
447 :param gravfile: file with gravity data in netcdf format
448 :type gravfile: ```str```
449 :param magfile: file with magnetic datafile in netcdf format
450 :type magfile: ```str```
451 :param topofile: file with topography data in netcdf format
452 :type topofile: ```str```
453 :param depth: depth below surface filled with rock
454 :type depth: ```float```
455 :param air_layer: height above surface filled with air. It is assumed that that density correction is zero
456 in this region.
457 :type air_layer: ```float```
458 :param vertical_cells: number of vertical cells
459 type vertical_cells: ```int```
460 :param alt_of_data: altitude of measurements above ground in meter
461 :type alt_of_data: ```float```
462 """
463 super(NetCDFDataSource,self).__init__()
464 self.__topofile=topofile
465 self.__gravfile=gravfile
466 self.__magfile=magfile
467 self.__determineExtents((-depth,air_layer,n))
468 self.__altOfData=alt_of_data
469
470 def __determineExtents(self, ve):
471 self.logger.debug("Data Source: %s"%self.__gravfile)
472 f=netcdf_file(self.__gravfile, 'r')
473 NX=0
474 for n in ['lon','longitude','x']:
475 if n in f.dimensions:
476 NX=f.dimensions[n]
477 break
478 if NX==0:
479 raise RuntimeError("Could not determine extents of data")
480 NY=0
481 for n in ['lat','latitude','y']:
482 if n in f.dimensions:
483 NY=f.dimensions[n]
484 break
485 if NY==0:
486 raise RuntimeError("Could not determine extents of data")
487
488 # find longitude and latitude variables
489 lon_name=None
490 for n in ['lon','longitude']:
491 if n in f.variables:
492 lon_name=n
493 longitude=f.variables.pop(n)
494 break
495 if lon_name is None:
496 raise RuntimeError("Could not determine longitude variable")
497 lat_name=None
498 for n in ['lat','latitude']:
499 if n in f.variables:
500 lat_name=n
501 latitude=f.variables.pop(n)
502 break
503 if lat_name is None:
504 raise RuntimeError("Could not determine latitude variable")
505
506 # try to figure out gravity variable name
507 grav_name=None
508 if len(f.variables)==1:
509 grav_name=f.variables.keys()[0]
510 else:
511 for n in f.variables.keys():
512 dims=f.variables[n].dimensions
513 if (lat_name in dims) and (lon_name in dims):
514 grav_name=n
515 break
516 if grav_name is None:
517 raise RuntimeError("Could not determine gravity variable")
518
519 # try to determine value for unused data
520 if hasattr(f.variables[grav_name], 'missing_value'):
521 maskval = float(f.variables[grav_name].missing_value)
522 elif hasattr(f.variables[grav_name], '_FillValue'):
523 maskval = float(f.variables[grav_name]._FillValue)
524 else:
525 self.logger.debug("missing_value attribute not found, using default.")
526 maskval = 99999
527
528 # see if there is a wkt string to convert coordinates
529 try:
530 wkt_string=f.variables[grav_name].esri_pe_string
531 except:
532 wkt_string=None
533
534 # we don't trust actual_range & geospatial_lon_min/max since subset
535 # data does not seem to have these fields updated.
536 # Getting min/max from the arrays is obviously not very efficient but..
537 #lon_range=longitude.actual_range
538 #lat_range=latitude.actual_range
539 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
540 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
541 lon_range=longitude.data.min(),longitude.data.max()
542 lat_range=latitude.data.min(),latitude.data.max()
543 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
544 origin=[lon_range[0],lat_range[0],ve[0]]
545 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
546
547 f.close()
548
549 self.__nPts=[NX, NY, ve[2]]
550 self.__origin=origin
551 # we are rounding to avoid interpolation issues
552 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(3)]
553 self.__wkt_string=wkt_string
554 self.__lon=lon_name
555 self.__lat=lat_name
556 self.__grv=grav_name
557 self.__maskval=maskval
558
559 def getDataExtents(self):
560 """
561 returns ( (x0, y0), (nx, ny), (dx, dy) )
562 """
563 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
564
565 def getVerticalExtents(self):
566 """
567 returns (z0, nz, dz)
568 """
569 return (self.__origin[2], self.__nPts[2], self.__delta[2])
570
571 def getGravityAndStdDev(self):
572 nValues=self.__nPts[:2]+[1]
573 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
574 g=ripleycpp._readNcGrid(self.__gravfile, self.__grv,
575 ReducedFunction(self.getDomain()),
576 first, nValues, (), self.__maskval)
577 sigma=whereNonZero(g-self.__maskval)
578 g=g*1e-6
579 sigma=sigma*2e-6
580 return g*[0,0,1], sigma
581
582 def _readTopography(self):
583 f=netcdf_file(self.__topofile, 'r')
584 lon=None
585 for n in ['lon','longitude']:
586 if n in f.variables:
587 lon=f.variables[n][:]
588 break
589 if lon is None:
590 raise RuntimeError("Could not determine longitude variable")
591 lat=None
592 for n in ['lat','latitude']:
593 if n in f.variables:
594 lat=f.variables[n][:]
595 break
596 if lat is None:
597 raise RuntimeError("Could not determine latitude variable")
598 alt=None
599 for n in ['altitude','alt']:
600 if n in f.variables:
601 alt=f.variables[n][:]
602 break
603 if alt is None:
604 raise RuntimeError("Could not determine altitude variable")
605
606 topodata=np.column_stack((lon,lat,alt))
607 f.close()
608 return topodata
609
610 ##############################################################################
611 class ERSDataSource(DataSource):
612 """
613 Data Source for ER Mapper raster data.
614 Note that this class only accepts a very specific type of ER Mapper data
615 input and will raise an exception if other data is found.
616 """
617 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,25), alt_of_data=0.):
618 """
619 headerfile - usually ends in .ers
620 datafile - usually has the same name as the headerfile without '.ers'
621 """
622 super(ERSDataSource,self).__init__()
623 self.__headerfile=headerfile
624 if datafile is None:
625 self.__datafile=headerfile[:-4]
626 else:
627 self.__datafile=datafile
628 self.__readHeader(vertical_extents)
629 self.__altOfData=alt_of_data
630
631 def __readHeader(self, ve):
632 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
633 metadata=open(self.__headerfile, 'r').readlines()
634 # parse metadata
635 start=-1
636 for i in range(len(metadata)):
637 if metadata[i].strip() == 'DatasetHeader Begin':
638 start=i+1
639 if start==-1:
640 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
641
642 md_dict={}
643 section=[]
644 for i in range(start, len(metadata)):
645 line=metadata[i]
646 if line[-6:].strip() == 'Begin':
647 section.append(line[:-6].strip())
648 elif line[-4:].strip() == 'End':
649 if len(section)>0:
650 section.pop()
651 else:
652 vals=line.split('=')
653 if len(vals)==2:
654 key = vals[0].strip()
655 value = vals[1].strip()
656 fullkey='.'.join(section+[key])
657 md_dict[fullkey]=value
658
659 try:
660 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
661 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
662 except KeyError:
663 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
664
665 try:
666 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
667 NY = int(md_dict['RasterInfo.NrOfLines'])
668 except:
669 raise RuntimeError("Could not determine extents of data")
670
671 try:
672 maskval = float(md_dict['RasterInfo.NullCellValue'])
673 except:
674 maskval = 99999
675
676 try:
677 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
678 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
679 except:
680 raise RuntimeError("Could not determine cell dimensions")
681
682 try:
683 if md_dict['CoordinateSpace.CoordinateType']=='EN':
684 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
685 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
686 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
687 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
688 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
689 else:
690 raise RuntimeError("Unknown CoordinateType")
691 except:
692 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
693 originX,originY = 0.0, 0.0
694
695 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
696 # it appears we have lat/lon coordinates so need to convert
697 # origin and spacing. Try using gdal to get the wkt if available:
698 try:
699 from osgeo import gdal
700 ds=gdal.Open(self.__headerfile)
701 wkt=ds.GetProjection()
702 except:
703 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
704 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
705 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
706 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
707 # we are rounding to avoid interpolation issues
708 spacingX=np.round(op1X-originX_UTM)
709 spacingY=np.round(op1Y-originY_UTM)
710 originX=np.round(originX_UTM)
711 originY=np.round(originY_UTM)
712
713 # data sets have origin in top-left corner so y runs top-down
714 self.__dataorigin=[originX, originY]
715 originY-=(NY-1)*spacingY
716 self.__maskval=maskval
717 spacingZ=np.round(float(ve[1]-ve[0])/ve[2])
718 self.__delta = [spacingX, spacingY, spacingZ]
719 self.__nPts = [NX, NY, ve[2]]
720 self.__origin = [originX, originY, ve[0]]
721
722 def getDataExtents(self):
723 """
724 returns ( (x0, y0), (nx, ny), (dx, dy) )
725 """
726 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
727
728 def getVerticalExtents(self):
729 """
730 returns (z0, nz, dz)
731 """
732 return (self.__origin[2], self.__nPts[2], self.__delta[2])
733
734 def getGravityAndStdDev(self):
735 nValues=self.__nPts[:2]+[1]
736 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
737 g=ripleycpp._readBinaryGrid(self.__datafile,
738 ReducedFunction(self.getDomain()),
739 first, nValues, (), self.__maskval)
740 sigma=whereNonZero(g-self.__maskval)
741 g=g*1e-6
742 sigma=sigma*2e-6
743 return g*[0,0,1], sigma
744
745
746 ##############################################################################
747 class SourceFeature(object):
748 """
749 A feature adds a density distribution to (parts of) a domain of a synthetic
750 data source, for example a layer of a specific rock type or a simulated
751 ore body.
752 """
753 def getDensity(self):
754 """
755 Returns the density for the area covered by mask. It can be constant
756 or a data object with spatial dependency.
757 """
758 raise NotImplementedError
759 def getMask(self, x):
760 """
761 Returns the mask of the area of interest for this feature. That is,
762 mask is non-zero where the density returned by getDensity() should be
763 applied, zero elsewhere.
764 """
765 raise NotImplementedError
766
767 class SmoothAnomaly(SourceFeature):
768 def __init__(self, lx, ly, lz, x, y, depth, rho_inner=None, rho_outer=None, k_inner=None, k_outer=None):
769 self.x=x
770 self.y=y
771 self.lx=lx
772 self.ly=ly
773 self.lz=lz
774 self.depth=depth
775 self.rho_inner=rho_inner
776 self.rho_outer=rho_outer
777 self.k_inner=k_inner
778 self.k_outer=k_outer
779 self.rho=None
780 self.k=None
781 self.mask=None
782
783 def getDensity(self,x):
784 if self.rho is None:
785 if self.rho_outer is None or self.rho_inner is None:
786 self.rho=0
787 else:
788 DIM=x.getDomain().getDim()
789 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
790 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
791 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
792 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
793 if self.rho_inner<0: self.rho=-self.rho
794
795 return self.rho
796
797 def getSusceptibility(self,x):
798 if self.k is None:
799 if self.k_outer is None or self.k_inner is None:
800 self.k=0
801 else:
802 DIM=x.getDomain().getDim()
803 alpha=-log(abs(self.k_outer/self.k_inner))*4
804 k=exp(-alpha*((x[0]-self.x)/self.lx)**2)
805 k=k*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
806 self.k=maximum(abs(self.k_outer), abs(self.k_inner*k))
807 if self.k_inner<0: self.k=-self.k
808
809 return self.k
810
811 def getMask(self, x):
812 DIM=x.getDomain().getDim()
813 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
814 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
815 if DIM>2:
816 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
817 self.mask = m
818 return m
819
820 ##############################################################################
821 class SyntheticDataSource(DataSource):
822 def __init__(self, DIM, NE, l, h, features, latitude=-32.):
823 super(SyntheticDataSource,self).__init__()
824 self._features = features
825 self.DIM=DIM
826 self.NE=NE
827 self.l=l
828 self.h=h
829 self.latitude=latitude
830
831 def _createDomain(self, padding_x, padding_y):
832 NE_H=self.NE
833 NE_L=int((self.l/self.h)*NE_H+0.5)
834 l=[self.l]*(self.DIM-1)+[self.h]
835 NE=[NE_L]*(self.DIM-1)+[NE_H]
836 origin=[0.]*self.DIM
837 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
838 NE, l, origin)
839
840 self.NE=NE_new
841 self.l=l_new[0]
842 self.h=l_new[self.DIM-1]
843
844 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
845 if self.DIM==2:
846 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
847 self._x = dom.getX() + origin_new
848 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
849 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
850 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
851 else:
852 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
853 self._x = dom.getX() + origin_new
854 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
855 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
856 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
857
858 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
859 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
860 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
861 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
862 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
863
864 self._B_mask=self._g_mask
865
866 mask=whereNegative(self._x[self.DIM-1]) + \
867 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
868 for i in range(self.DIM-1):
869 mask+= whereNegative(self._x[i]) + wherePositive(self._x[i]-l[i])
870 self.setSetDensityMask(wherePositive(mask))
871 self.setSetSusceptibilityMask(wherePositive(mask))
872
873 rho_ref=0.
874 k_ref=0
875 for f in self._features:
876 m=f.getMask(self._x)
877 rho_ref = rho_ref * (1-m) + f.getDensity(self._x) * m
878 k_ref = k_ref * (1-m) + f.getSusceptibility(self._x) * m
879 self._rho=rho_ref
880 self._k=k_ref
881
882 return dom
883
884
885 def getReferenceDensity(self):
886 return self._rho
887 def getReferenceSusceptibility(self):
888 return self._k
889
890 def getGravityAndStdDev(self):
891 pde=LinearSinglePDE(self.getDomain())
892 G=U.Gravitational_Constant
893 m_psi_ref=0.
894 for i in range(self.DIM):
895 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
896 + whereZero(self._x[i]-sup(self._x[i]))
897
898 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
899 pde.setSymmetryOn()
900 psi_ref=pde.getSolution()
901 del pde
902 g=-grad(psi_ref)
903 sigma=self._g_mask
904 return g,sigma
905
906 def getMagneticFieldAndStdDev(self):
907 pde=LinearSinglePDE(self.getDomain())
908 B_b=self.getBackgroundMagneticField()
909 DIM=self.getDomain().getDim()
910 m_psi_ref=0.
911 for i in range(self.DIM):
912 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
913 + whereZero(self._x[i]-sup(self._x[i]))
914 pde.setValue(A=kronecker(self.getDomain()), X=(1+self._k)*B_b, q=m_psi_ref)
915 pde.setSymmetryOn()
916 psi_ref=pde.getSolution()
917 del pde
918 B= (1+self._k) * B_b -grad(psi_ref)
919 sigma=self._B_mask
920 return B,sigma
921
922 def getBackgroundMagneticField(self):
923 theta = (90-self.latitude)/180.*np.pi
924 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
925 B_theta= B_0 * sin(theta)
926 B_r= 2 * B_0 * cos(theta)
927 DIM=self.getDomain().getDim()
928 if DIM<3:
929 return np.array([0., -B_r])
930 else:
931 return np.array([-B_theta, 0., -B_r])
932

  ViewVC Help
Powered by ViewVC 1.1.26