/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4267 - (show annotations)
Fri Mar 1 04:50:54 2013 UTC (6 years, 7 months ago) by caltinay
File MIME type: text/x-python
File size: 40599 byte(s)
Updated inversion example scripts, added ER Mapper data, checked results
and corrected some issues. Cookbook does not reflect the changes yet.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2013 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource', 'ErMapperData', \
26 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27 'SmoothAnomaly']
28
29 import logging
30 import numpy as np
31 from esys.escript import ReducedFunction
32 from esys.escript import unitsSI as U
33 from esys.escript.linearPDEs import LinearSinglePDE
34 from esys.escript.util import *
35 from esys.ripley import ripleycpp
36
37 try:
38 from scipy.io.netcdf import netcdf_file
39 __all__ += ['NetCdfData']
40 except:
41 pass
42
43 def LatLonToUTM(lon, lat, wkt_string=None):
44 """
45 Converts one or more longitude,latitude pairs to the corresponding x,y
46 coordinates in the Universal Transverse Mercator projection.
47
48 :note: The ``pyproj`` module is required for this method. If it is not
49 found an exception is raised.
50 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51 not available to convert the string, then the input values are
52 assumed to be using the Clarke 1866 ellipsoid.
53
54 :param lon: longitude value(s)
55 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
56 :param lat: latitude value(s)
57 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
58 :param wkt_string: Well-known text (WKT) string describing the coordinate
59 system used. The ``gdal`` module is used to convert
60 the string to the corresponding Proj4 string.
61 :type wkt_string: ``str``
62 :rtype: ``tuple``
63 """
64
65 try:
66 import pyproj
67 except:
68 raise ImportError("In order to perform coordinate transformations on "
69 "the data you are using the 'pyproj' Python module is required but "
70 "was not found. Please install the module and try again.")
71
72 # determine UTM zone from the input data
73 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
74 try:
75 import osgeo.osr
76 srs = osgeo.osr.SpatialReference()
77 srs.ImportFromWkt(wkt_string)
78 p_src = pyproj.Proj(srs.ExportToProj4())
79 except:
80 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
81
82 # check for hemisphere
83 if np.median(np.array(lat)) < 0.:
84 south='+south '
85 else:
86 south=''
87 p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m'%(zone,south))
88 x,y=pyproj.transform(p_src, p_dest, lon, lat)
89 return x,y,zone
90
91 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
92 """
93 Returns an approximation of the geomagnetic flux density B at the given
94 `latitude`. The parameter `longitude` is currently ignored.
95
96 :rtype: ``tuple``
97 """
98 theta = (90-latitude)/180.*np.pi
99 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
100 B_theta= B_0 * sin(theta)
101 B_r= 2 * B_0 * cos(theta)
102 return B_r, B_theta, 0.
103
104 class DataSource(object):
105 """
106 A class that provides survey data for the inversion process.
107 This is an abstract base class that implements common functionality.
108 Methods to be overwritten by subclasses are marked as such.
109 This class assumes 2D data which is mapped to a slice of a 3D domain.
110 For other setups override the methods as required.
111 """
112
113 GRAVITY, MAGNETIC = list(range(2))
114
115 def __init__(self):
116 """
117 Constructor. Sets some defaults and initializes logger.
118 """
119 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
120 self.__subsampling_factor=1
121
122 def getDataExtents(self):
123 """
124 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
125
126 - ``x0``, ``y0`` = coordinates of data origin
127 - ``nx``, ``ny`` = number of data points in x and y
128 - ``dx``, ``dy`` = spacing of data points in x and y
129
130 This method must be implemented in subclasses.
131 """
132 raise NotImplementedError
133
134 def getDataType(self):
135 """
136 Returns the type of survey data managed by this source.
137 Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
138 """
139 raise NotImplementedError
140
141 def getSurveyData(self, domain, origin, NE, spacing):
142 """
143 This method is called by the `DomainBuilder` to retrieve the survey
144 data as `Data` objects on the given domain.
145
146 Subclasses should return one or more `Data` objects with survey data
147 interpolated on the given `escript` domain. The exact return type
148 depends on the type of data.
149
150 :param domain: the escript domain to use
151 :type domain: `esys.escript.Domain`
152 :param origin: the origin coordinates of the domain
153 :type origin: ``tuple`` or ``list``
154 :param NE: the number of domain elements in each dimension
155 :type NE: ``tuple`` or ``list``
156 :param spacing: the cell sizes (node spacing) in the domain
157 :type spacing: ``tuple`` or ``list``
158 """
159 raise NotImplementedError
160
161 def getUtmZone(self):
162 """
163 All data source coordinates are converted to UTM (Universal Transverse
164 Mercator) in order to have useful domain extents. Subclasses should
165 implement this method and return the UTM zone number of the projected
166 coordinates.
167 """
168 raise NotImplementedError
169
170 def setSubsamplingFactor(self, f):
171 """
172 Sets the data subsampling factor (default=1).
173
174 The factor is applied in all dimensions. For example a 2D dataset
175 with 300 x 150 data points will be reduced to 150 x 75 when a
176 subsampling factor of 2 is used.
177 This becomes important when adding data of varying resolution to
178 a `DomainBuilder`.
179 """
180 self.__subsampling_factor=f
181
182 def getSubsamplingFactor(self):
183 """
184 Returns the subsampling factor that was set via `setSubsamplingFactor`
185 (see there).
186 """
187 return self.__subsampling_factor
188
189
190 ##############################################################################
191 class ErMapperData(DataSource):
192 """
193 Data Source for ER Mapper raster data.
194 Note that this class only accepts a very specific type of ER Mapper data
195 input and will raise an exception if other data is found.
196 """
197 def __init__(self, data_type, headerfile, datafile=None, altitude=0.,
198 error=None, scale_factor=None, null_value=None):
199 """
200 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
201 :type data_type: ``int``
202 :param headerfile: ER Mapper header file (usually ends in .ers)
203 :type headerfile: ``str``
204 :param datafile: ER Mapper binary data file name. If not supplied the
205 name of the header file without '.ers' is assumed
206 :type datafile: ``str``
207 :param altitude: altitude of measurements above ground in meters
208 :type altitude: ``float``
209 :param error: constant value to use for the data uncertainties.
210 If a value is supplied, it is scaled by the same factor
211 as the measurements. If not provided the error is
212 assumed to be 2 units for all measurements (i.e. 0.2
213 mGal and 2 nT for gravity and magnetic, respectively)
214 :type error: ``float``
215 :param scale_factor: the measurements and error values are scaled by
216 this factor. By default, gravity data is assumed
217 to be given in 1e-6 m/s^2 (0.1 mGal), while
218 magnetic data is assumed to be in 1e-9 T (1 nT).
219 :type scale_factor: ``float``
220 :param null_value: value that is used in the file to mark undefined
221 areas. This information is usually included in the
222 file.
223 :type null_value: ``float``
224 """
225 super(ErMapperData, self).__init__()
226 self.__headerfile=headerfile
227 if datafile is None:
228 self.__datafile=headerfile[:-4]
229 else:
230 self.__datafile=datafile
231 self.__altitude=altitude
232 self.__data_type=data_type
233 self.__utm_zone = None
234 self.__scale_factor = scale_factor
235 self.__null_value = null_value
236 self.__error_value = error
237 self.__readHeader()
238
239 def __readHeader(self):
240 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
241 metadata=open(self.__headerfile, 'r').readlines()
242 start=-1
243 for i in range(len(metadata)):
244 if metadata[i].strip() == 'DatasetHeader Begin':
245 start=i+1
246 if start==-1:
247 raise RuntimeError('Invalid ER Mapper header file ("DatasetHeader" not found)')
248
249 # parse header file filling dictionary of found values
250 md_dict={}
251 section=[]
252 for i in range(start, len(metadata)):
253 line=metadata[i].strip()
254 if line[-6:].strip() == 'Begin':
255 section.append(line[:-6].strip())
256 elif line[-4:].strip() == 'End':
257 if len(section)>0:
258 section.pop()
259 else:
260 vals=line.split('=')
261 if len(vals)==2:
262 key = vals[0].strip()
263 value = vals[1].strip()
264 fullkey='.'.join(section+[key])
265 md_dict[fullkey]=value
266
267 # check that that the data format/type is supported
268 try:
269 if md_dict['ByteOrder'] != 'LSBFirst':
270 raise RuntimeError('Unsupported byte order '+md_dict['ByteOrder'])
271 except KeyError:
272 self.logger.warn("Byte order not specified. Assuming LSB first.")
273
274 try:
275 if md_dict['DataType'] != 'Raster':
276 raise RuntimeError('Unsupported data type '+md_dict['DataType'])
277 except KeyError:
278 self.logger.warn("Data type not specified. Assuming raster data.")
279
280 try:
281 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
282 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
283 except KeyError:
284 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
285
286 try:
287 fileOffset = int(md_dict['HeaderOffset'])
288 except:
289 fileOffset = 0
290 if fileOffset > 0:
291 raise RuntimeError("ER Mapper data with header offset >0 not supported.")
292
293 # now extract required information
294 try:
295 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
296 NY = int(md_dict['RasterInfo.NrOfLines'])
297 except:
298 raise RuntimeError("Could not determine extents of data")
299
300 ### mask/null value
301 # note that NaN is always filtered out in ripley
302 if self.__null_value is None:
303 try:
304 self.__null_value = float(md_dict['RasterInfo.NullCellValue'])
305 except:
306 self.logger.debug("Could not determine null value, using default.")
307 self.__null_value = 99999
308 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
309 raise TypeError("Invalid type of null_value parameter")
310
311 try:
312 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
313 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
314 except:
315 raise RuntimeError("Could not determine cell dimensions")
316
317 try:
318 if md_dict['CoordinateSpace.CoordinateType']=='EN':
319 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
320 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
321 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
322 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
323 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
324 else:
325 raise RuntimeError("Unknown CoordinateType")
326 except:
327 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
328 originX,originY = 0.0, 0.0
329
330 # data sets have origin in top-left corner so y runs top-down and
331 # we need to flip accordingly
332 originY-=NY*spacingY
333
334 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
335 # it appears we have lat/lon coordinates so need to convert
336 # origin and spacing. Try using gdal to get the wkt if available:
337 try:
338 from osgeo import gdal
339 ds=gdal.Open(self.__headerfile)
340 wkt=ds.GetProjection()
341 except:
342 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
343 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
344 originX_UTM,originY_UTM,zone=LatLonToUTM(originX, originY, wkt)
345 op1X,op1Y,_=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
346 self.__utm_zone = zone
347 # we are rounding to avoid interpolation issues
348 spacingX=np.round(op1X-originX_UTM)
349 spacingY=np.round(op1Y-originY_UTM)
350 originX=np.round(originX_UTM)
351 originY=np.round(originY_UTM)
352
353 self.__dataorigin=[originX, originY]
354 self.__delta = [spacingX, spacingY]
355 self.__nPts = [NX, NY]
356 self.__origin = [originX, originY]
357 ### scale factor
358 if self.__scale_factor is None:
359 if self.__data_type == self.GRAVITY:
360 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
361 self.__scale_factor = 1e-6
362 else:
363 self.logger.info("Assuming magnetic data units are 'nT'.")
364 self.__scale_factor = 1e-9
365
366 ### error value
367 if self.__error_value is None:
368 self.__error_value = 2.
369 elif not isinstance(self.__error_value,float) and not isinstance(self.__error_value,int):
370 raise TypeError("Invalid type of error parameter")
371
372
373 def getDataExtents(self):
374 """
375 returns ( (x0, y0), (nx, ny), (dx, dy) )
376 """
377 return (list(self.__origin), list(self.__nPts), list(self.__delta))
378
379 def getDataType(self):
380 return self.__data_type
381
382 def getSurveyData(self, domain, origin, NE, spacing):
383 nValues=self.__nPts
384 # determine base location of this dataset within the domain
385 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
386 if domain.getDim()==3:
387 first.append(int((self.__altitude-origin[2])/spacing[2]))
388 nValues=nValues+[1]
389
390 data=ripleycpp._readBinaryGrid(self.__datafile,
391 ReducedFunction(domain),
392 first, nValues, (), self.__null_value)
393 sigma = self.__error_value * whereNonZero(data-self.__null_value)
394
395 data = data * self.__scale_factor
396 sigma = sigma * self.__scale_factor
397 return data, sigma
398
399 def getUtmZone(self):
400 return self.__utm_zone
401
402
403 ##############################################################################
404 class NetCdfData(DataSource):
405 """
406 Data Source for gridded netCDF data that use CF/COARDS conventions.
407 """
408 def __init__(self, data_type, filename, altitude=0., data_variable=None,
409 error=None, scale_factor=None, null_value=None):
410 """
411 :param filename: file name for survey data in netCDF format
412 :type filename: ``str``
413 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
414 :type data_type: ``int``
415 :param altitude: altitude of measurements in meters
416 :type altitude: ``float``
417 :param data_variable: name of the netCDF variable that holds the data.
418 If not provided an attempt is made to determine
419 the variable and an exception thrown on failure.
420 :type data_variable: ``str``
421 :param error: either the name of the netCDF variable that holds the
422 uncertainties of the measurements or a constant value
423 to use for the uncertainties. If a constant value is
424 supplied, it is scaled by the same factor as the
425 measurements. If not provided the error is assumed to
426 be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
427 for gravity and magnetic, respectively)
428 :type error: ``str`` or ``float``
429 :param scale_factor: the measurements and error values are scaled by
430 this factor. By default, gravity data is assumed
431 to be given in 1e-6 m/s^2 (0.1 mGal), while
432 magnetic data is assumed to be in 1e-9 T (1 nT).
433 :type scale_factor: ``float``
434 :param null_value: value that is used in the file to mark undefined
435 areas. This information is usually included in the
436 file.
437 :type null_value: ``float``
438 """
439 super(NetCdfData,self).__init__()
440 self.__filename=filename
441 if not data_type in [self.GRAVITY,self.MAGNETIC]:
442 raise ValueError("Invalid value for data_type parameter")
443 self.__data_type = data_type
444 self.__altitude = altitude
445 self.__data_name = data_variable
446 self.__scale_factor = scale_factor
447 self.__null_value = null_value
448 self.__readMetadata(error)
449
450 def __readMetadata(self, error):
451 self.logger.debug("Checking Data Source: %s"%self.__filename)
452 f=netcdf_file(self.__filename, 'r')
453
454 ### longitude- / X-dimension and variable
455 NX=0
456 for n in ['lon','longitude','x']:
457 if n in f.dimensions:
458 NX=f.dimensions[n]
459 lon_name=n
460 break
461 if NX==0:
462 raise RuntimeError("Could not determine extents of data")
463
464 # CF/COARDS states that coordinate variables have the same name as
465 # the dimensions
466 if not lon_name in f.variables:
467 raise RuntimeError("Could not determine longitude variable")
468 longitude=f.variables.pop(lon_name)
469
470 ### latitude- / Y-dimension and variable
471 NY=0
472 for n in ['lat','latitude','y']:
473 if n in f.dimensions:
474 NY=f.dimensions[n]
475 lat_name=n
476 break
477 if NY==0:
478 raise RuntimeError("Could not determine extents of data")
479 if not lat_name in f.variables:
480 raise RuntimeError("Could not determine latitude variable")
481 latitude=f.variables.pop(lat_name)
482
483 ### data variable
484 if self.__data_name is not None:
485 try:
486 dims = f.variables[self.__data_name].dimensions
487 if not ((lat_name in dims) and (lon_name in dims)):
488 raise ValueError("Invalid data variable name supplied")
489 except KeyError:
490 raise ValueError("Invalid data variable name supplied")
491 else:
492 for n in f.variables.keys():
493 dims=f.variables[n].dimensions
494 if (lat_name in dims) and (lon_name in dims):
495 self.__data_name=n
496 break
497 if self.__data_name is None:
498 raise RuntimeError("Could not determine data variable")
499
500 datavar = f.variables[self.__data_name]
501
502 ### error value/variable
503 self.__error_name = None
504 if isinstance(error,str):
505 try:
506 dims = f.variables[error].dimensions
507 if not ((lat_name in dims) and (lon_name in dims)):
508 raise ValueError("Invalid error variable name supplied")
509 except KeyError:
510 raise ValueError("Invalid error variable name supplied")
511 self.__error_name = error
512 elif isinstance(error,float) or isinstance(error,int):
513 self.__error_value = float(error)
514 elif error is None:
515 self.__error_value = 2.
516 else:
517 raise TypeError("Invalid type of error parameter")
518
519 ### mask/null value
520 # note that NaN is always filtered out in ripley
521 if self.__null_value is None:
522 if hasattr(datavar, 'missing_value'):
523 self.__null_value = float(datavar.missing_value)
524 elif hasattr(datavar, '_FillValue'):
525 self.__null_value = float(datavar._FillValue)
526 else:
527 self.logger.debug("Could not determine null value, using default.")
528 self.__null_value = 99999
529 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
530 raise TypeError("Invalid type of null_value parameter")
531
532 # try to determine units of data - this is disabled until we obtain a
533 # file with valid information
534 #if hasattr(f.variables[data_name], 'units'):
535 # units=f.variables[data_name].units
536
537 ### scale factor
538 if self.__scale_factor is None:
539 if self.__data_type == self.GRAVITY:
540 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
541 self.__scale_factor = 1e-6
542 else:
543 self.logger.info("Assuming magnetic data units are 'nT'.")
544 self.__scale_factor = 1e-9
545
546 # see if there is a WKT string to convert coordinates
547 try:
548 wkt_string=datavar.esri_pe_string
549 except:
550 wkt_string=None
551
552 # actual_range & geospatial_lon_min/max do not always contain correct
553 # values so we have to obtain the min/max in a less efficient way:
554 lon_range=longitude.data.min(),longitude.data.max()
555 lat_range=latitude.data.min(),latitude.data.max()
556 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
557 self.__utm_zone = zone
558 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
559 f.close()
560
561 self.__nPts=[NX, NY]
562 self.__origin=[lon_range[0],lat_range[0]]
563 # we are rounding to avoid interpolation issues
564 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
565
566 def getDataExtents(self):
567 """
568 returns ( (x0, y0), (nx, ny), (dx, dy) )
569 """
570 return (list(self.__origin), list(self.__nPts), list(self.__delta))
571
572 def getDataType(self):
573 return self.__data_type
574
575 def getSurveyData(self, domain, origin, NE, spacing):
576 FS=ReducedFunction(domain)
577 nValues=self.__nPts
578 # determine base location of this dataset within the domain
579 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
580 if domain.getDim()==3:
581 first.append(int((self.__altitude-origin[2])/spacing[2]))
582 nValues=nValues+[1]
583
584 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
585 first, nValues, (), self.__null_value)
586 if self.__error_name is not None:
587 sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
588 FS, first, nValues, (), 0.)
589 else:
590 sigma = self.__error_value * whereNonZero(data-self.__null_value)
591
592 data = data * self.__scale_factor
593 sigma = sigma * self.__scale_factor
594 return data, sigma
595
596 def getUtmZone(self):
597 return self.__utm_zone
598
599
600 ##############################################################################
601 class SourceFeature(object):
602 """
603 A feature adds a density/susceptibility distribution to (parts of) a
604 domain of a synthetic data source, for example a layer of a specific
605 rock type or a simulated ore body.
606 """
607 def getValue(self):
608 """
609 Returns the value for the area covered by mask. It can be constant
610 or a `Data` object with spatial dependency.
611 """
612 raise NotImplementedError
613
614 def getMask(self, x):
615 """
616 Returns the mask of the area of interest for this feature. That is,
617 mask is non-zero where the value returned by `getValue()` should be
618 applied, zero elsewhere.
619 """
620 raise NotImplementedError
621
622 class SmoothAnomaly(SourceFeature):
623 """
624 A source feature in the form of a blob (roughly gaussian).
625 """
626 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
627 """
628 Intializes the smooth anomaly data.
629
630 :param lx: size of blob in x-dimension
631 :param ly: size of blob in y-dimension
632 :param lz: size of blob in z-dimension
633 :param x: location of blob in x-dimension
634 :param y: location of blob in y-dimension
635 :param depth: depth of blob
636 :param v_inner: value in the centre of the blob
637 :param v_outer: value in the periphery of the blob
638 """
639 self.x=x
640 self.y=y
641 self.lx=lx
642 self.ly=ly
643 self.lz=lz
644 self.depth=depth
645 self.v_inner=v_inner
646 self.v_outer=v_outer
647 self.value=None
648 self.mask=None
649
650 def getValue(self,x):
651 if self.value is None:
652 if self.v_outer is None or self.v_inner is None:
653 self.value=0
654 else:
655 DIM=x.getDomain().getDim()
656 alpha=-log(abs(self.v_outer/self.v_inner))*4
657 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
658 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
659 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
660 if self.v_inner<0: self.value=-self.value
661
662 return self.value
663
664 def getMask(self, x):
665 DIM=x.getDomain().getDim()
666 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
667 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
668 if DIM>2:
669 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
670 self.mask = m
671 return m
672
673 ##############################################################################
674 class SyntheticDataBase(DataSource):
675 """
676 Base class to define reference data based on a given property distribution
677 (density or susceptibility). Data are collected from a square region of
678 vertical extent `length` on a grid with `number_of_elements` cells in each
679 direction.
680
681 The synthetic data are constructed by solving the appropriate forward
682 problem. Data can be sampled with an offset from the surface at z=0 or
683 using the entire subsurface region.
684 """
685 def __init__(self, data_type,
686 DIM=2,
687 number_of_elements=10,
688 length=1*U.km,
689 B_b=None,
690 data_offset=0,
691 full_knowledge=False,
692 spherical=False):
693 """
694 :param data_type: data type indicator
695 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
696 :param DIM: number of spatial dimensions
697 :type DIM: ``int`` (2 or 3)
698 :param number_of_elements: lateral number of elements in the region
699 where data are collected
700 :type number_of_elements: ``int``
701 :param length: lateral extent of the region where data are collected
702 :type length: ``float``
703 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
704 :type B_b: ``list`` of ``Scalar``
705 :param data_offset: offset of the data collection region from the surface
706 :type data_offset: ``float``
707 :param full_knowledge: if ``True`` data are collected from the entire
708 subsurface region. This is mainly for testing.
709 :type full_knowledge: ``Bool``
710 :param spherical: if ``True`` spherical coordinates are used. This is
711 not supported yet and should be set to ``False``.
712 :type spherical: ``Bool``
713 """
714 super(SyntheticDataBase, self).__init__()
715 if not data_type in [self.GRAVITY, self.MAGNETIC]:
716 raise ValueError("Invalid value for data_type parameter")
717 self.DIM=DIM
718 self.number_of_elements=number_of_elements
719 self.length=length
720 self.__data_type = data_type
721
722 if spherical:
723 raise ValueError("Spherical coordinates are not supported yet")
724 self.__spherical = spherical
725 self.__full_knowledge= full_knowledge
726 self.__data_offset=data_offset
727 self.__B_b =None
728 # this is for Cartesian (FIXME ?)
729 if data_type == self.MAGNETIC:
730 if self.DIM < 3:
731 self.__B_b = np.array([-B_b[2], -B_b[0]])
732 else:
733 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
734 self.__origin = [0]*(DIM-1)
735 self.__delta = [float(length)/number_of_elements]*(DIM-1)
736 self.__nPts = [number_of_elements]*(DIM-1)
737 self._reference_data=None
738
739 def getUtmZone(self):
740 """
741 returns a dummy UTM zone since this class does not use real coordinate
742 values.
743 """
744 return 0
745
746 def getDataExtents(self):
747 """
748 returns the lateral data extend of the data set
749 """
750 return (list(self.__origin), list(self.__nPts), list(self.__delta))
751
752 def getDataType(self):
753 """
754 returns the data type
755 """
756 return self.__data_type
757
758 def getSurveyData(self, domain, origin, number_of_elements, spacing):
759 """
760 returns the survey data placed on a given domain.
761
762 :param domain: domain on which the data are to be placed
763 :type domain: ``Domain``
764 :param origin: origin of the domain
765 :type origin: ``list`` of ``float``
766 :param number_of_elements: number of elements (or cells) in each
767 spatial direction used to span the domain
768 :type number_of_elements: ``list`` of ``int``
769 :param spacing: cell size in each spatial direction
770 :type spacing: ``list`` of ``float``
771 :return: observed gravity field or magnetic flux density for each cell
772 in the domain and for each cell an indicator 1/0 if the data
773 are valid or not.
774 :rtype: pair of ``Scalar``
775 """
776 pde=LinearSinglePDE(domain)
777 DIM=domain.getDim()
778 x=domain.getX()
779 # set the reference data
780 k=self.getReferenceProperty(domain)
781 # calculate the corresponding potential
782 z=x[DIM-1]
783 m_psi_ref=whereZero(z-sup(z))
784 if self.getDataType()==DataSource.GRAVITY:
785 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
786 else:
787 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
788 pde.setSymmetryOn()
789 #pde.getSolverOptions().setTolerance(1e-13)
790 psi_ref=pde.getSolution()
791 del pde
792 if self.getDataType()==DataSource.GRAVITY:
793 data = -grad(psi_ref, ReducedFunction(domain))
794 else:
795 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
796
797 x=ReducedFunction(domain).getX()
798 if self.__full_knowledge:
799 sigma = whereNegative(x[DIM-1])
800 else:
801 sigma=1.
802 # limit mask to non-padding in horizontal area
803 for i in range(DIM-1):
804 x_i=x[i]
805 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
806 # limit mask to one cell thickness at z=0
807 z=x[DIM-1]
808 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
809 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
810 return data,sigma
811
812 def getReferenceProperty(self, domain=None):
813 """
814 Returns the reference `Data` object that was used to generate
815 the gravity/susceptibility anomaly data.
816
817 :return: the density or susceptibility anomaly used to create the
818 survey data
819 :note: it can be assumed that in the first call the ``domain``
820 argument is present so the actual anomaly data can be created.
821 In subsequent calls this may not be true.
822 :note: method needs to be overwritten
823 """
824 raise NotImplementedError()
825
826 class SyntheticFeatureData(SyntheticDataBase):
827 """
828 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
829 """
830 def __init__(self, data_type,
831 features,
832 DIM=2,
833 number_of_elements=10,
834 length=1*U.km,
835 B_b=None,
836 data_offset=0,
837 full_knowledge=False,
838 spherical=False):
839 """
840 :param data_type: data type indicator
841 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
842 :param features: list of features. It is recommended that the features
843 are located entirely below the surface.
844 :type features: ``list`` of `SourceFeature`
845 :param DIM: spatial dimensionality
846 :type DIM: ``int`` (2 or 3)
847 :param number_of_elements: lateral number of elements in the region
848 where data are collected
849 :type number_of_elements: ``int``
850 :param length: lateral extent of the region where data are collected
851 :type length: ``float``
852 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
853 :type B_b: ``list`` of ``Scalar``
854 :param data_offset: offset of the data collection region from the surface
855 :type data_offset: ``float``
856 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
857 :type full_knowledge: ``Bool``
858 :param spherical: if ``True`` spherical coordinates are used.
859 :type spherical: ``Bool``
860 """
861 super(SyntheticFeatureData,self).__init__(
862 data_type=data_type, DIM=DIM,
863 number_of_elements=number_of_elements,
864 length=length, B_b=B_b,
865 data_offset=data_offset,
866 full_knowledge=full_knowledge,
867 spherical=spherical)
868 self._features = features
869
870
871 def getReferenceProperty(self, domain=None):
872 """
873 Returns the reference `Data` object that was used to generate
874 the gravity/susceptibility anomaly data.
875 """
876 if self._reference_data == None:
877 DIM=domain.getDim()
878 x=domain.getX()
879 k=0.
880 for f in self._features:
881 m=f.getMask(x)
882 k = k * (1-m) + f.getValue(x) * m
883 self._reference_data= k
884 return self._reference_data
885
886 class SyntheticData(SyntheticDataBase):
887 """
888 Defines synthetic gravity/magnetic data based on harmonic property anomaly
889
890 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
891
892 for all x and z<=0. For z>0 rho = 0.
893 """
894 def __init__(self, data_type,
895 n_length=1,
896 n_depth=1,
897 depth_offset=0.,
898 depth=None,
899 amplitude=None,
900 DIM=2,
901 number_of_elements=10,
902 length=1*U.km,
903 B_b=None,
904 data_offset=0,
905 full_knowledge=False,
906 spherical=False):
907 """
908 :param data_type: data type indicator
909 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
910 :param n_length: number of oscillations in the anomaly data within the
911 observation region
912 :type n_length: ``int``
913 :param n_depth: number of oscillations in the anomaly data below surface
914 :type n_depth: ``int``
915 :param depth_offset: vertical offset of the data
916 :type depth_offset: ``float``
917 :param depth: vertical extent in the anomaly data. If not present the
918 depth of the domain is used.
919 :type depth: ``float``
920 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
921 gravity and 0.1 for magnetic data.
922 :param DIM: spatial dimensionality
923 :type DIM: ``int`` (2 or 3)
924 :param number_of_elements: lateral number of elements in the region
925 where data are collected
926 :type number_of_elements: ``int``
927 :param length: lateral extent of the region where data are collected
928 :type length: ``float``
929 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
930 Only used for magnetic data.
931 :type B_b: ``list`` of ``Scalar``
932 :param data_offset: offset of the data collection region from the surface
933 :type data_offset: ``float``
934 :param full_knowledge: if ``True`` data are collected from the entire
935 subsurface region. This is mainly for testing.
936 :type full_knowledge: ``Bool``
937 :param spherical: if ``True`` spherical coordinates are used
938 :type spherical: ``Bool``
939 """
940 super(SyntheticData,self).__init__(
941 data_type=data_type, DIM=DIM,
942 number_of_elements=number_of_elements,
943 length=length, B_b=B_b,
944 data_offset=data_offset,
945 full_knowledge=full_knowledge,
946 spherical=spherical)
947 self.__n_length = n_length
948 self.__n_depth = n_depth
949 self.depth = depth
950 self.depth_offset=depth_offset
951 if amplitude == None:
952 if data_type == DataSource.GRAVITY:
953 amplitude = 200 *U.kg/U.m**3
954 else:
955 amplitude = 0.1
956 self.__amplitude = amplitude
957
958 def getReferenceProperty(self, domain=None):
959 """
960 Returns the reference `Data` object that was used to generate
961 the gravity/susceptibility anomaly data.
962 """
963 if self._reference_data is None:
964 DIM=domain.getDim()
965 x=domain.getX()
966 # set the reference data
967 z=x[DIM-1]
968 dd=self.depth
969 if dd is None: dd=inf(z)
970 z2=(z+self.depth_offset)/(self.depth_offset-dd)
971 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
972 for i in range(DIM-1):
973 x_i=x[i]
974 min_x=inf(x_i)
975 max_x=sup(x_i)
976 k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
977 self._reference_data= k
978 return self._reference_data
979

  ViewVC Help
Powered by ViewVC 1.1.26