/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4152 - (show annotations)
Tue Jan 22 00:29:03 2013 UTC (6 years, 8 months ago) by caltinay
File MIME type: text/x-python
File size: 33217 byte(s)
Moved figures to subdirectory.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2013 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource', 'ErMapperData', \
26 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27 'SmoothAnomaly']
28
29 import logging
30 import numpy as np
31 from esys.escript import ReducedFunction
32 from esys.escript import unitsSI as U
33 from esys.escript.linearPDEs import LinearSinglePDE
34 from esys.escript.util import *
35 from esys.ripley import Brick, Rectangle, ripleycpp
36
37 try:
38 from scipy.io.netcdf import netcdf_file
39 __all__ += ['NetCdfData']
40 except:
41 pass
42
43 def LatLonToUTM(lon, lat, wkt_string=None):
44 """
45 Converts one or more longitude,latitude pairs to the corresponding x,y
46 coordinates in the Universal Transverse Mercator projection.
47
48 :note: If the ``pyproj`` module is not installed a warning is printed and
49 the input values are scaled by a constant and returned.
50 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51 not available to convert the string, then the input values are
52 assumed to be given in the Clarke 1866 projection.
53
54 :param lon: longitude value(s)
55 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
56 :param lat: latitude value(s)
57 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
58 :rtype: ``numpy.array``
59 """
60
61 # not really optimal: if pyproj is not installed we return the input
62 # values scaled by a constant.
63 try:
64 import pyproj
65 except:
66 print("Warning, pyproj not available. Domain extents will be wrong")
67 return np.array(lon)*1000., np.array(lat)*1000.
68
69 # determine UTM zone from the input data
70 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
71 try:
72 import osgeo.osr
73 srs = osgeo.osr.SpatialReference()
74 srs.ImportFromWkt(wkt_string)
75 p_src = pyproj.Proj(srs.ExportToProj4())
76 except:
77 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
78 # we assume southern hemisphere here
79 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
80 x,y=pyproj.transform(p_src, p_dest, lon, lat)
81 return x,y
82
83 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
84 """
85 Returns an approximation of the geomagnetic flux density B at the given
86 `latitude`. The parameter `longitude` is currently ignored.
87
88 :rtype: ``tuple``
89 """
90 theta = (90-latitude)/180.*np.pi
91 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
92 B_theta= B_0 * sin(theta)
93 B_r= 2 * B_0 * cos(theta)
94 return B_r, B_theta, 0.
95
96 class DataSource(object):
97 """
98 A class that provides survey data for the inversion process.
99 This is an abstract base class that implements common functionality.
100 Methods to be overwritten by subclasses are marked as such.
101 This class assumes 2D data which is mapped to a slice of a 3D domain.
102 For other setups override the methods as required.
103 """
104
105 GRAVITY, MAGNETIC = list(range(2))
106
107 def __init__(self):
108 """
109 Constructor. Sets some defaults and initializes logger.
110 """
111 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
112 self.__subsampling_factor=1
113
114 def getDataExtents(self):
115 """
116 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
117
118 - ``x0``, ``y0`` = coordinates of data origin
119 - ``nx``, ``ny`` = number of data points in x and y
120 - ``dx``, ``dy`` = spacing of data points in x and y
121
122 This method must be implemented in subclasses.
123 """
124 raise NotImplementedError
125
126 def getDataType(self):
127 """
128 Returns the type of survey data managed by this source.
129 Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
130 """
131 raise NotImplementedError
132
133 def getSurveyData(self, domain, origin, NE, spacing):
134 """
135 This method is called by the `DomainBuilder` to retrieve the survey
136 data as `Data` objects on the given domain.
137
138 Subclasses should return one or more `Data` objects with survey data
139 interpolated on the given ripley domain. The exact return type
140 depends on the type of data.
141
142 :param domain: the escript domain to use
143 :type domain: `esys.escript.Domain`
144 :param origin: the origin coordinates of the domain
145 :type origin: ``tuple`` or ``list``
146 :param NE: the number of domain elements in each dimension
147 :type NE: ``tuple`` or ``list``
148 :param spacing: the cell sizes (node spacing) in the domain
149 :type spacing: ``tuple`` or ``list``
150 """
151 raise NotImplementedError
152
153 def setSubsamplingFactor(self, f):
154 """
155 Sets the data subsampling factor (default=1).
156
157 The factor is applied in all dimensions. For example a 2D dataset
158 with 300 x 150 data points will be reduced to 150 x 75 when a
159 subsampling factor of 2 is used.
160 This becomes important when adding data of varying resolution to
161 a `DomainBuilder`.
162 """
163 self.__subsampling_factor=f
164
165 def getSubsamplingFactor(self):
166 """
167 Returns the subsampling factor that was set via `setSubsamplingFactor`
168 (see there).
169 """
170 return self.__subsampling_factor
171
172
173 ##############################################################################
174 class ErMapperData(DataSource):
175 """
176 Data Source for ER Mapper raster data.
177 Note that this class only accepts a very specific type of ER Mapper data
178 input and will raise an exception if other data is found.
179 """
180 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
181 """
182 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
183 :type datatype: ``int``
184 :param headerfile: ER Mapper header file (usually ends in .ers)
185 :type headerfile: ``str``
186 :param datafile: ER Mapper binary data file name. If not supplied the
187 name of the header file without '.ers' is assumed
188 :type datafile: ``str``
189 :param altitude: altitude of measurements above ground in meters
190 :type altitude: ``float``
191 """
192 super(ErMapperData,self).__init__()
193 self.__headerfile=headerfile
194 if datafile is None:
195 self.__datafile=headerfile[:-4]
196 else:
197 self.__datafile=datafile
198 self.__altitude=altitude
199 self.__datatype=datatype
200 self.__readHeader()
201
202 def __readHeader(self):
203 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
204 metadata=open(self.__headerfile, 'r').readlines()
205 # parse metadata
206 start=-1
207 for i in range(len(metadata)):
208 if metadata[i].strip() == 'DatasetHeader Begin':
209 start=i+1
210 if start==-1:
211 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
212
213 md_dict={}
214 section=[]
215 for i in range(start, len(metadata)):
216 line=metadata[i]
217 if line[-6:].strip() == 'Begin':
218 section.append(line[:-6].strip())
219 elif line[-4:].strip() == 'End':
220 if len(section)>0:
221 section.pop()
222 else:
223 vals=line.split('=')
224 if len(vals)==2:
225 key = vals[0].strip()
226 value = vals[1].strip()
227 fullkey='.'.join(section+[key])
228 md_dict[fullkey]=value
229
230 try:
231 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
232 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
233 except KeyError:
234 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
235
236 try:
237 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
238 NY = int(md_dict['RasterInfo.NrOfLines'])
239 except:
240 raise RuntimeError("Could not determine extents of data")
241
242 try:
243 maskval = float(md_dict['RasterInfo.NullCellValue'])
244 except:
245 maskval = 99999
246
247 try:
248 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
249 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
250 except:
251 raise RuntimeError("Could not determine cell dimensions")
252
253 try:
254 if md_dict['CoordinateSpace.CoordinateType']=='EN':
255 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
256 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
257 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
258 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
259 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
260 else:
261 raise RuntimeError("Unknown CoordinateType")
262 except:
263 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
264 originX,originY = 0.0, 0.0
265
266 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
267 # it appears we have lat/lon coordinates so need to convert
268 # origin and spacing. Try using gdal to get the wkt if available:
269 try:
270 from osgeo import gdal
271 ds=gdal.Open(self.__headerfile)
272 wkt=ds.GetProjection()
273 except:
274 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
275 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
276 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
277 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
278 # we are rounding to avoid interpolation issues
279 spacingX=np.round(op1X-originX_UTM)
280 spacingY=np.round(op1Y-originY_UTM)
281 originX=np.round(originX_UTM)
282 originY=np.round(originY_UTM)
283
284 # data sets have origin in top-left corner so y runs top-down
285 self.__dataorigin=[originX, originY]
286 originY-=(NY-1)*spacingY
287 self.__delta = [spacingX, spacingY]
288 self.__maskval = maskval
289 self.__nPts = [NX, NY]
290 self.__origin = [originX, originY]
291 if self.__datatype == self.GRAVITY:
292 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
293 self.__scalefactor = 1e-6
294 else:
295 self.logger.info("Assuming magnetic data units are 'nT'.")
296 self.__scalefactor = 1e-9
297
298 def getDataExtents(self):
299 """
300 returns ( (x0, y0), (nx, ny), (dx, dy) )
301 """
302 return (list(self.__origin), list(self.__nPts), list(self.__delta))
303
304 def getDataType(self):
305 return self.__datatype
306
307 def getSurveyData(self, domain, origin, NE, spacing):
308 nValues=self.__nPts
309 # determine base location of this dataset within the domain
310 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
311 if domain.getDim()==3:
312 first.append(int((self.__altitude-origin[2])/spacing[2]))
313 nValues=nValues+[1]
314
315 data=ripleycpp._readBinaryGrid(self.__datafile,
316 ReducedFunction(domain),
317 first, nValues, (), self.__maskval)
318 sigma = whereNonZero(data-self.__maskval)
319 data = data*self.__scalefactor
320 sigma = sigma * 2. * self.__scalefactor
321 return data, sigma
322
323
324 ##############################################################################
325 class NetCdfData(DataSource):
326 """
327 Data Source for gridded netCDF data that use CF/COARDS conventions.
328 """
329 def __init__(self, datatype, filename, altitude=0.):
330 """
331 :param filename: file name for survey data in netCDF format
332 :type filename: ``str``
333 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
334 :type datatype: ``int``
335 :param altitude: altitude of measurements in meters
336 :type altitude: ``float``
337 """
338 super(NetCdfData,self).__init__()
339 self.__filename=filename
340 if not datatype in [self.GRAVITY,self.MAGNETIC]:
341 raise ValueError("Invalid value for datatype parameter")
342 self.__datatype=datatype
343 self.__altitude=altitude
344 self.__readMetadata()
345
346 def __readMetadata(self):
347 self.logger.debug("Checking Data Source: %s"%self.__filename)
348 f=netcdf_file(self.__filename, 'r')
349 NX=0
350 for n in ['lon','longitude','x']:
351 if n in f.dimensions:
352 NX=f.dimensions[n]
353 break
354 if NX==0:
355 raise RuntimeError("Could not determine extents of data")
356 NY=0
357 for n in ['lat','latitude','y']:
358 if n in f.dimensions:
359 NY=f.dimensions[n]
360 break
361 if NY==0:
362 raise RuntimeError("Could not determine extents of data")
363
364 # find longitude and latitude variables
365 lon_name=None
366 for n in ['lon','longitude']:
367 if n in f.variables:
368 lon_name=n
369 longitude=f.variables.pop(n)
370 break
371 if lon_name is None:
372 raise RuntimeError("Could not determine longitude variable")
373 lat_name=None
374 for n in ['lat','latitude']:
375 if n in f.variables:
376 lat_name=n
377 latitude=f.variables.pop(n)
378 break
379 if lat_name is None:
380 raise RuntimeError("Could not determine latitude variable")
381
382 # try to figure out data variable name
383 data_name=None
384 if len(f.variables)==1:
385 data_name=f.variables.keys()[0]
386 else:
387 for n in f.variables.keys():
388 dims=f.variables[n].dimensions
389 if (lat_name in dims) and (lon_name in dims):
390 data_name=n
391 break
392 if data_name is None:
393 raise RuntimeError("Could not determine data variable")
394
395 # try to determine value for unused data
396 if hasattr(f.variables[data_name], 'missing_value'):
397 maskval = float(f.variables[data_name].missing_value)
398 elif hasattr(f.variables[data_name], '_FillValue'):
399 maskval = float(f.variables[data_name]._FillValue)
400 else:
401 self.logger.debug("missing_value attribute not found, using default.")
402 maskval = 99999
403
404 # try to determine units of data - this is disabled for now
405 #if hasattr(f.variables[data_name], 'units'):
406 # units=f.variables[data_name].units
407 if self.__datatype == self.GRAVITY:
408 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
409 self.__scalefactor = 1e-6
410 else:
411 self.logger.info("Assuming magnetic data units are 'nT'.")
412 self.__scalefactor = 1e-9
413
414 # see if there is a wkt string to convert coordinates
415 try:
416 wkt_string=f.variables[data_name].esri_pe_string
417 except:
418 wkt_string=None
419
420 # we don't trust actual_range & geospatial_lon_min/max since subset
421 # data does not seem to have these fields updated.
422 # Getting min/max from the arrays is obviously not very efficient but..
423 #lon_range=longitude.actual_range
424 #lat_range=latitude.actual_range
425 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
426 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
427 lon_range=longitude.data.min(),longitude.data.max()
428 lat_range=latitude.data.min(),latitude.data.max()
429 if lon_range[1]<180:
430 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
431 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
432 f.close()
433
434 self.__nPts=[NX, NY]
435 self.__origin=[lon_range[0],lat_range[0]]
436 # we are rounding to avoid interpolation issues
437 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
438 #self.__wkt_string=wkt_string
439 #self.__lon_name=lon_name
440 #self.__lat_name=lat_name
441 self.__data_name=data_name
442 self.__maskval=maskval
443
444 def getDataExtents(self):
445 """
446 returns ( (x0, y0), (nx, ny), (dx, dy) )
447 """
448 return (list(self.__origin), list(self.__nPts), list(self.__delta))
449
450 def getDataType(self):
451 return self.__datatype
452
453 def getSurveyData(self, domain, origin, NE, spacing):
454 nValues=self.__nPts
455 # determine base location of this dataset within the domain
456 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
457 if domain.getDim()==3:
458 first.append(int((self.__altitude-origin[2])/spacing[2]))
459 nValues=nValues+[1]
460
461 data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
462 ReducedFunction(domain), first, nValues, (), self.__maskval)
463 sigma=whereNonZero(data-self.__maskval)
464 data=data*self.__scalefactor
465 sigma=sigma * 2. * self.__scalefactor
466 return data, sigma
467
468
469 ##############################################################################
470 class SourceFeature(object):
471 """
472 A feature adds a density distribution to (parts of) a domain of a synthetic
473 data source, for example a layer of a specific rock type or a simulated
474 ore body.
475 """
476 def getValue(self):
477 """
478 Returns the value for the area covered by mask. It can be constant
479 or a data object with spatial dependency.
480 """
481 raise NotImplementedError
482 def getMask(self, x):
483 """
484 Returns the mask of the area of interest for this feature. That is,
485 mask is non-zero where the density returned by getDensity() should be
486 applied, zero elsewhere.
487 """
488 raise NotImplementedError
489
490 class SmoothAnomaly(SourceFeature):
491 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
492 self.x=x
493 self.y=y
494 self.lx=lx
495 self.ly=ly
496 self.lz=lz
497 self.depth=depth
498 self.v_inner=v_inner
499 self.v_outer=v_outer
500 self.value=None
501 self.mask=None
502
503 def getValue(self,x):
504 if self.value is None:
505 if self.v_outer is None or self.v_inner is None:
506 self.value=0
507 else:
508 DIM=x.getDomain().getDim()
509 alpha=-log(abs(self.v_outer/self.v_inner))*4
510 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
511 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
512 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
513 if self.v_inner<0: self.value=-self.value
514
515 return self.value
516
517 def getMask(self, x):
518 DIM=x.getDomain().getDim()
519 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
520 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
521 if DIM>2:
522 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
523 self.mask = m
524 return m
525
526 ##############################################################################
527 class SyntheticDataBase(DataSource):
528 """
529 Base class to define reference data based on a given property distribution
530 (density or susceptibility). Data are collected from a square region of
531 vertical extent `length` on a grid with `number_of_elements` cells in each
532 direction.
533
534 The synthetic data are constructed by solving the appropriate forward
535 problem. Data can be sampled with an offset from the surface at z=0 or
536 using the entire subsurface region.
537 """
538 def __init__(self, datatype,
539 DIM=2,
540 number_of_elements=10,
541 length=1*U.km,
542 B_b=None,
543 data_offset=0,
544 full_knowledge=False,
545 spherical=False):
546 """
547 :param datatype: data type indicator
548 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
549 :param DIM: number of spatial dimension
550 :type DIM: ``int`` (2 or 3)
551 :param number_of_elements: lateral number of elements in the region
552 where data are collected
553 :type number_of_elements: ``int``
554 :param length: lateral extend of the region where data are collected
555 :type length: ``float``
556 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
557 :type B_b: ``list`` of ``Scalar``
558 :param data_offset: offset of the data collection region from the surface
559 :type data_offset: ``float``
560 :param full_knowledge: if ``True`` data are collected from the entire
561 subsurface region. This is mainly for testing.
562 :type full_knowledge: ``Bool``
563 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
564 :type spherical: ``Bool``
565 """
566 super(SyntheticDataBase,self).__init__()
567 if not datatype in [self.GRAVITY,self.MAGNETIC]:
568 raise ValueError("Invalid value for datatype parameter")
569 self.DIM=DIM
570 self.number_of_elements=number_of_elements
571 self.length=length
572 self.__datatype = datatype
573
574 self.__spherical = spherical
575 self.__full_knowledge= full_knowledge
576 self.__data_offset=data_offset
577 self.__B_b =None
578 # this is for Cartesian (FIXME ?)
579 if datatype == self.MAGNETIC:
580 if self.DIM < 3:
581 self.__B_b = np.array([-B_b[2], -B_b[0]])
582 else:
583 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
584 self.__origin = [0]*(DIM-1)
585 self.__delta = [float(length)/number_of_elements]*(DIM-1)
586 self.__nPts = [number_of_elements]*(DIM-1)
587 self._reference_data=None
588
589 def getDataExtents(self):
590 """
591 returns the lateral data extend of the data set
592 """
593 return (list(self.__origin), list(self.__nPts), list(self.__delta))
594
595 def getDataType(self):
596 """
597 returns the data type
598 """
599 return self.__datatype
600
601 def getSurveyData(self, domain, origin, number_of_elements, spacing):
602 """
603 returns the survey data placed on a given domain.
604
605 :param domain: domain on which the data are to be placed
606 :type domain: ``Domain``
607 :param origin: origin of the domain
608 :type origin: ``list`` of ``float``
609 :param number_of_elements: number of elements (or cells) in each
610 spatial direction used to span the domain
611 :type number_of_elements: ``list`` of ``int``
612 :param spacing: cell size in each spatial direction
613 :type spacing: ``list`` of ``float``
614 :return: observed gravity field or magnetic flux density for each cell
615 in the domain and for each cell an indicator 1/0 if the data
616 are valid or not.
617 :rtype: pair of ``Scalar``
618 """
619 pde=LinearSinglePDE(domain)
620 DIM=domain.getDim()
621 x=domain.getX()
622 # set the reference data
623
624 k=self.getReferenceProperty(domain)
625 # calculate the corresponding potential
626 z=x[DIM-1]
627 m_psi_ref=whereZero(z-sup(z))
628 if self.getDataType()==DataSource.GRAVITY:
629 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
630 else:
631 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
632 pde.setSymmetryOn()
633 #pde.getSolverOptions().setTolerance(1e-13)
634 psi_ref=pde.getSolution()
635 del pde
636 if self.getDataType()==DataSource.GRAVITY:
637 data = -grad(psi_ref, ReducedFunction(domain))
638 else:
639 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
640
641 x=ReducedFunction(domain).getX()
642 if self.__full_knowledge:
643 sigma = whereNegative(x[DIM-1])
644 else:
645 sigma=1.
646 # limit mask to non-padding in horizontal area
647 for i in range(DIM-1):
648 x_i=x[i]
649 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
650 # limit mask to one cell thickness at z=0
651 z=x[DIM-1]
652 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
653 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
654 return data,sigma
655
656 def getReferenceProperty(self, domain=None):
657 """
658 Returns the reference density Data object that was used to generate
659 the gravity/susceptibility anomaly data.
660
661 :return: the density or susceptibility anomaly used to create the
662 survey data
663 :note: it can be assumed that in the first call the ``domain``
664 argument is present so the actual anomaly data can be created.
665 In subsequent calls this may not be true.
666 :note: method needs to be overwritten
667 """
668 raise NotImplementedError()
669
670 class SyntheticFeatureData(SyntheticDataBase):
671 """
672 uses a list of ``SourceFeature`` to define synthetic anomaly data
673 """
674 def __init__(self, datatype,
675 features,
676 DIM=2,
677 number_of_elements=10,
678 length=1*U.km,
679 B_b=None,
680 data_offset=0,
681 full_knowledge=False,
682 spherical=False):
683 """
684 :param datatype: data type indicator
685 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
686 :param features: list of features. It is recommended that the features do entirely lay below surface.
687 :type features: ``list`` of ``SourceFeature``
688 :param DIM: spatial dimension
689 :type DIM: 2 or 3
690 :param number_of_elements: lateral number of elements in the region where data are collected
691 :type number_of_elements: ``int``
692 :param length: lateral extend of the region where data are collected
693 :type length: ``float``
694 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
695 :type B_b: ``list`` of ``Scalar``
696 :param data_offset: offset of the data collection region from the surface
697 :type data_offset: ``float``
698 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
699 :type full_knowledge: ``Bool``
700 :param spherical: if ``True`` sperical coordinates are used (ignored)
701 :type spherical: ``Bool``
702 """
703 super(SyntheticFeatureData,self).__init__(
704 datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
705 length=length, B_b=B_b,
706 data_offset=data_offset,
707 full_knowledge=full_knowledge,
708 spherical=spherical)
709 self._features = features
710
711
712 def getReferenceProperty(self, domain=None):
713 """
714 Returns the reference density Data object that was used to generate
715 the gravity/susceptibility anomaly data.
716 """
717 if self._reference_data == None:
718 DIM=domain.getDim()
719 x=domain.getX()
720 k=0.
721 for f in self._features:
722 m=f.getMask(x)
723 k = k * (1-m) + f.getValue(x) * m
724 self._reference_data= k
725 return self._reference_data
726
727 class SyntheticData(SyntheticDataBase):
728 """
729 defines synthetic gravity/magnetic data based on harmonic property anomaly
730
731 rho = mean + amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * x - shift )
732
733 for all x and z<=0. for z>0 rho = 0.
734 """
735 def __init__(self, datatype,
736 n_length=1,
737 n_depth=1,
738 depth_offset=0.,
739 depth=None,
740 amplitude=None,
741 DIM=2,
742 number_of_elements=10,
743 length=1*U.km,
744 B_b=None,
745 data_offset=0,
746 full_knowledge=False,
747 spherical=False):
748 """
749 :param datatype: data type indicator
750 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
751 :param n_length: number of oscillations in the anomaly data within the
752 observation region
753 :type n_length: ``int``
754 :param n_depth: number of oscillations in the anomaly data below surface
755 :param depth: vertical extent in the anomaly data. If not present the
756 depth of the domain is used.
757 :type depth: ``float``
758 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
759 gravity and 0.1 for magnetic data.
760 :param DIM: spatial dimensionality
761 :type DIM: ``int`` (2 or 3)
762 :param number_of_elements: lateral number of elements in the region
763 where data are collected
764 :type number_of_elements: ``int``
765 :param length: lateral extent of the region where data are collected
766 :type length: ``float``
767 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
768 Only used for magnetic data.
769 :type B_b: ``list`` of ``Scalar``
770 :param data_offset: offset of the data collection region from the surface
771 :type data_offset: ``float``
772 :param full_knowledge: if ``True`` data are collected from the entire
773 subsurface region. This is mainly for testing.
774 :type full_knowledge: ``Bool``
775 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
776 :type spherical: ``Bool``
777 """
778 super(SyntheticData,self).__init__(
779 datatype=datatype, DIM=DIM,
780 number_of_elements=number_of_elements,
781 length=length, B_b=B_b,
782 data_offset=data_offset,
783 full_knowledge=full_knowledge,
784 spherical=spherical)
785 self.__n_length = n_length
786 self.__n_depth = n_depth
787 self.depth = depth
788 self.depth_offset=depth_offset
789 if amplitude == None:
790 if datatype == DataSource.GRAVITY:
791 amplitude = 200 *U.kg/U.m**3
792 else:
793 amplitude = 0.1
794 self.__amplitude = amplitude
795
796 def getReferenceProperty(self, domain=None):
797 """
798 Returns the reference density Data object that was used to generate
799 the gravity anomaly data.
800 """
801 if self._reference_data == None:
802 DIM=domain.getDim()
803 x=domain.getX()
804 # set the reference data
805 z=x[DIM-1]
806 dd=self.depth
807 if dd ==None : dd=inf(z)
808 z2=(z+self.depth_offset)/(self.depth_offset-dd)
809 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
810 for i in xrange(DIM-1):
811 x_i=x[i]
812 min_x=inf(x_i)
813 max_x=sup(x_i)
814 k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
815 self._reference_data= k
816 return self._reference_data
817

  ViewVC Help
Powered by ViewVC 1.1.26