/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4125 - (show annotations)
Wed Jan 2 06:15:00 2013 UTC (6 years, 8 months ago) by gross
File MIME type: text/x-python
File size: 31943 byte(s)
some fixes in the inversion set-up
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource','ErMapperData', 'SyntheticDataBase' , 'SyntheticFeatureData', 'SyntheticData','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34
35 try:
36 from scipy.io.netcdf import netcdf_file
37 __all__ += ['NetCdfData']
38 except:
39 pass
40
41 def LatLonToUTM(lon, lat, wkt_string=None):
42 """
43 Converts one or more longitude,latitude pairs to the corresponding x,y
44 coordinates in the Universal Transverse Mercator projection.
45
46 :note: If the ``pyproj`` module is not installed a warning is printed and
47 the input values are scaled by a constant and returned.
48 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
49 not available to convert the string, then the input values are
50 assumed to be given in the Clarke 1866 projection.
51
52 :param lon: longitude value(s)
53 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
54 :param lat: latitude value(s)
55 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
56 :rtype: ``numpy.array``
57 """
58
59 # not really optimal: if pyproj is not installed we return the input
60 # values scaled by a constant.
61 try:
62 import pyproj
63 except:
64 print("Warning, pyproj not available. Domain extents will be wrong")
65 return np.array(lon)*1000., np.array(lat)*1000.
66
67 # determine UTM zone from the input data
68 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
69 try:
70 import osgeo.osr
71 srs = osgeo.osr.SpatialReference()
72 srs.ImportFromWkt(wkt_string)
73 p_src = pyproj.Proj(srs.ExportToProj4())
74 except:
75 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
76 # we assume southern hemisphere here
77 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
78 x,y=pyproj.transform(p_src, p_dest, lon, lat)
79 return x,y
80
81 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
82 theta = (90-latitude)/180.*np.pi
83 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
84 B_theta= B_0 * sin(theta)
85 B_r= 2 * B_0 * cos(theta)
86 return B_r, B_theta, 0.
87
88 class DataSource(object):
89 """
90 A class that provides survey data for the inversion process.
91 This is an abstract base class that implements common functionality.
92 Methods to be overwritten by subclasses are marked as such.
93 This class assumes 2D data which is mapped to a slice of a 3D domain.
94 For other setups override the methods as required.
95 """
96
97 GRAVITY, MAGNETIC = list(range(2))
98
99 def __init__(self):
100 """
101 Constructor. Sets some defaults and initializes logger.
102 """
103 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
104 self.__subsampling_factor=1
105 self.__background_magnetic_field=None
106
107 def getDataExtents(self):
108 """
109 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
110
111 - ``x0``, ``y0`` = coordinates of data origin
112 - ``nx``, ``ny`` = number of data points in x and y
113 - ``dx``, ``dy`` = spacing of data points in x and y
114
115 This method must be implemented in subclasses.
116 """
117 raise NotImplementedError
118
119 def getDataType(self):
120 """
121 Returns the type of survey data managed by this source.
122 Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
123 """
124 raise NotImplementedError
125
126 def getSurveyData(self, domain, origin, NE, spacing):
127 """
128 This method is called by the `DomainBuilder` to retrieve the survey
129 data as `Data` objects on the given domain.
130 Subclasses should return one or more `Data` objects with survey data
131 interpolated on the given ripley domain. The exact return type
132 depends on the type of data.
133
134 :param domain: the escript domain to use
135 :type domain: `esys.escript.Domain`
136 :param origin: the origin coordinates of the domain
137 :type origin: ``tuple`` or ``list``
138 :param NE: the number of domain elements in each dimension
139 :type NE: ``tuple`` or ``list``
140 :param spacing: the cell sizes (node spacing) in the domain
141 :type spacing: ``tuple`` or ``list``
142 """
143 raise NotImplementedError
144
145 def setSubsamplingFactor(self, f):
146 """
147 Sets the data subsampling factor (default=1).
148 The factor is applied in all dimensions. For example a 2D dataset
149 with 300 x 150 data points will be reduced to 150 x 75 when a
150 subsampling factor of 2 is used.
151 This becomes important when adding data of varying resolution to
152 a `DomainBuilder`.
153 """
154 self.__subsampling_factor=f
155
156 def getSubsamplingFactor(self):
157 """
158 Returns the subsampling factor that was set via `setSubsamplingFactor`
159 (see there).
160 """
161 return self.__subsampling_factor
162
163
164 ##############################################################################
165 class ErMapperData(DataSource):
166 """
167 Data Source for ER Mapper raster data.
168 Note that this class only accepts a very specific type of ER Mapper data
169 input and will raise an exception if other data is found.
170 """
171 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
172 """
173 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
174 :type datatype: ``int``
175 :param headerfile: ER Mapper header file (usually ends in .ers)
176 :type headerfile: ``str``
177 :param datafile: ER Mapper binary data file name. If not supplied the
178 name of the header file without '.ers' is assumed
179 :type datafile: ``str``
180 :param altitude: altitude of measurements above ground in meters
181 :type altitude: ``float``
182 """
183 super(ErMapperData,self).__init__()
184 self.__headerfile=headerfile
185 if datafile is None:
186 self.__datafile=headerfile[:-4]
187 else:
188 self.__datafile=datafile
189 self.__altitude=altitude
190 self.__datatype=datatype
191 self.__readHeader()
192
193 def __readHeader(self):
194 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
195 metadata=open(self.__headerfile, 'r').readlines()
196 # parse metadata
197 start=-1
198 for i in range(len(metadata)):
199 if metadata[i].strip() == 'DatasetHeader Begin':
200 start=i+1
201 if start==-1:
202 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
203
204 md_dict={}
205 section=[]
206 for i in range(start, len(metadata)):
207 line=metadata[i]
208 if line[-6:].strip() == 'Begin':
209 section.append(line[:-6].strip())
210 elif line[-4:].strip() == 'End':
211 if len(section)>0:
212 section.pop()
213 else:
214 vals=line.split('=')
215 if len(vals)==2:
216 key = vals[0].strip()
217 value = vals[1].strip()
218 fullkey='.'.join(section+[key])
219 md_dict[fullkey]=value
220
221 try:
222 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
223 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
224 except KeyError:
225 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
226
227 try:
228 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
229 NY = int(md_dict['RasterInfo.NrOfLines'])
230 except:
231 raise RuntimeError("Could not determine extents of data")
232
233 try:
234 maskval = float(md_dict['RasterInfo.NullCellValue'])
235 except:
236 maskval = 99999
237
238 try:
239 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
240 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
241 except:
242 raise RuntimeError("Could not determine cell dimensions")
243
244 try:
245 if md_dict['CoordinateSpace.CoordinateType']=='EN':
246 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
247 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
248 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
249 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
250 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
251 else:
252 raise RuntimeError("Unknown CoordinateType")
253 except:
254 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
255 originX,originY = 0.0, 0.0
256
257 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
258 # it appears we have lat/lon coordinates so need to convert
259 # origin and spacing. Try using gdal to get the wkt if available:
260 try:
261 from osgeo import gdal
262 ds=gdal.Open(self.__headerfile)
263 wkt=ds.GetProjection()
264 except:
265 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
266 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
267 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
268 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
269 # we are rounding to avoid interpolation issues
270 spacingX=np.round(op1X-originX_UTM)
271 spacingY=np.round(op1Y-originY_UTM)
272 originX=np.round(originX_UTM)
273 originY=np.round(originY_UTM)
274
275 # data sets have origin in top-left corner so y runs top-down
276 self.__dataorigin=[originX, originY]
277 originY-=(NY-1)*spacingY
278 self.__delta = [spacingX, spacingY]
279 self.__maskval = maskval
280 self.__nPts = [NX, NY]
281 self.__origin = [originX, originY]
282 if self.__datatype == self.GRAVITY:
283 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
284 self.__scalefactor = 1e-6
285 else:
286 self.logger.info("Assuming magnetic data units are 'nT'.")
287 self.__scalefactor = 1e-9
288
289 def getDataExtents(self):
290 """
291 returns ( (x0, y0), (nx, ny), (dx, dy) )
292 """
293 return (list(self.__origin), list(self.__nPts), list(self.__delta))
294
295 def getDataType(self):
296 return self.__datatype
297
298 def getSurveyData(self, domain, origin, NE, spacing):
299 nValues=self.__nPts
300 # determine base location of this dataset within the domain
301 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
302 if domain.getDim()==3:
303 first.append(int((self.__altitude-origin[2])/spacing[2]))
304 nValues=nValues+[1]
305
306 data=ripleycpp._readBinaryGrid(self.__datafile,
307 ReducedFunction(domain),
308 first, nValues, (), self.__maskval)
309 sigma = whereNonZero(data-self.__maskval)
310 data = data*self.__scalefactor
311 sigma = sigma * 2. * self.__scalefactor
312 return data, sigma
313
314
315 ##############################################################################
316 class NetCdfData(DataSource):
317 """
318 Data Source for gridded netCDF data that use CF/COARDS conventions.
319 """
320 def __init__(self, datatype, filename, altitude=0.):
321 """
322 :param filename: file name for survey data in netCDF format
323 :type filename: ``str``
324 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
325 :type datatype: ``int``
326 :param altitude: altitude of measurements in meters
327 :type altitude: ``float``
328 """
329 super(NetCdfData,self).__init__()
330 self.__filename=filename
331 if not datatype in [self.GRAVITY,self.MAGNETIC]:
332 raise ValueError("Invalid value for datatype parameter")
333 self.__datatype=datatype
334 self.__altitude=altitude
335 self.__readMetadata()
336
337 def __readMetadata(self):
338 self.logger.debug("Checking Data Source: %s"%self.__filename)
339 f=netcdf_file(self.__filename, 'r')
340 NX=0
341 for n in ['lon','longitude','x']:
342 if n in f.dimensions:
343 NX=f.dimensions[n]
344 break
345 if NX==0:
346 raise RuntimeError("Could not determine extents of data")
347 NY=0
348 for n in ['lat','latitude','y']:
349 if n in f.dimensions:
350 NY=f.dimensions[n]
351 break
352 if NY==0:
353 raise RuntimeError("Could not determine extents of data")
354
355 # find longitude and latitude variables
356 lon_name=None
357 for n in ['lon','longitude']:
358 if n in f.variables:
359 lon_name=n
360 longitude=f.variables.pop(n)
361 break
362 if lon_name is None:
363 raise RuntimeError("Could not determine longitude variable")
364 lat_name=None
365 for n in ['lat','latitude']:
366 if n in f.variables:
367 lat_name=n
368 latitude=f.variables.pop(n)
369 break
370 if lat_name is None:
371 raise RuntimeError("Could not determine latitude variable")
372
373 # try to figure out data variable name
374 data_name=None
375 if len(f.variables)==1:
376 data_name=f.variables.keys()[0]
377 else:
378 for n in f.variables.keys():
379 dims=f.variables[n].dimensions
380 if (lat_name in dims) and (lon_name in dims):
381 data_name=n
382 break
383 if data_name is None:
384 raise RuntimeError("Could not determine data variable")
385
386 # try to determine value for unused data
387 if hasattr(f.variables[data_name], 'missing_value'):
388 maskval = float(f.variables[data_name].missing_value)
389 elif hasattr(f.variables[data_name], '_FillValue'):
390 maskval = float(f.variables[data_name]._FillValue)
391 else:
392 self.logger.debug("missing_value attribute not found, using default.")
393 maskval = 99999
394
395 # try to determine units of data - this is disabled for now
396 #if hasattr(f.variables[data_name], 'units'):
397 # units=f.variables[data_name].units
398 if self.__datatype == self.GRAVITY:
399 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
400 self.__scalefactor = 1e-6
401 else:
402 self.logger.info("Assuming magnetic data units are 'nT'.")
403 self.__scalefactor = 1e-9
404
405 # see if there is a wkt string to convert coordinates
406 try:
407 wkt_string=f.variables[data_name].esri_pe_string
408 except:
409 wkt_string=None
410
411 # we don't trust actual_range & geospatial_lon_min/max since subset
412 # data does not seem to have these fields updated.
413 # Getting min/max from the arrays is obviously not very efficient but..
414 #lon_range=longitude.actual_range
415 #lat_range=latitude.actual_range
416 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
417 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
418 lon_range=longitude.data.min(),longitude.data.max()
419 lat_range=latitude.data.min(),latitude.data.max()
420 if lon_range[1]<180:
421 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
422 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
423 f.close()
424
425 self.__nPts=[NX, NY]
426 self.__origin=[lon_range[0],lat_range[0]]
427 # we are rounding to avoid interpolation issues
428 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
429 #self.__wkt_string=wkt_string
430 #self.__lon_name=lon_name
431 #self.__lat_name=lat_name
432 self.__data_name=data_name
433 self.__maskval=maskval
434
435 def getDataExtents(self):
436 """
437 returns ( (x0, y0), (nx, ny), (dx, dy) )
438 """
439 return (list(self.__origin), list(self.__nPts), list(self.__delta))
440
441 def getDataType(self):
442 return self.__datatype
443
444 def getSurveyData(self, domain, origin, NE, spacing):
445 nValues=self.__nPts
446 # determine base location of this dataset within the domain
447 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
448 if domain.getDim()==3:
449 first.append(int((self.__altitude-origin[2])/spacing[2]))
450 nValues=nValues+[1]
451
452 data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
453 ReducedFunction(domain), first, nValues, (), self.__maskval)
454 sigma=whereNonZero(data-self.__maskval)
455 data=data*self.__scalefactor
456 sigma=sigma * 2. * self.__scalefactor
457 return data, sigma
458
459
460 ##############################################################################
461 class SourceFeature(object):
462 """
463 A feature adds a density distribution to (parts of) a domain of a synthetic
464 data source, for example a layer of a specific rock type or a simulated
465 ore body.
466 """
467 def getValue(self):
468 """
469 Returns the value for the area covered by mask. It can be constant
470 or a data object with spatial dependency.
471 """
472 raise NotImplementedError
473 def getMask(self, x):
474 """
475 Returns the mask of the area of interest for this feature. That is,
476 mask is non-zero where the density returned by getDensity() should be
477 applied, zero elsewhere.
478 """
479 raise NotImplementedError
480
481 class SmoothAnomaly(SourceFeature):
482 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
483 self.x=x
484 self.y=y
485 self.lx=lx
486 self.ly=ly
487 self.lz=lz
488 self.depth=depth
489 self.v_inner=v_inner
490 self.v_outer=v_outer
491 self.value=None
492 self.mask=None
493
494 def getValue(self,x):
495 if self.value is None:
496 if self.v_outer is None or self.v_inner is None:
497 self.value=0
498 else:
499 DIM=x.getDomain().getDim()
500 alpha=-log(abs(self.v_outer/self.v_inner))*4
501 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
502 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
503 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
504 if self.v_inner<0: self.value=-self.value
505
506 return self.value
507
508 def getMask(self, x):
509 DIM=x.getDomain().getDim()
510 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
511 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
512 if DIM>2:
513 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
514 self.mask = m
515 return m
516
517 ##############################################################################
518 class SyntheticDataBase(DataSource):
519 """
520 Base class to define reference data based on a given property distribution (density or
521 susceptibility). Data are collected from a square region of vertical extend `length` on a
522 grid with ``number_of_elements`` cells in each direction.
523
524 The synthetic data are constructed by solving the appropriate forward problem. Data can be sampled
525 with an offset from the surface at z=0 or using the entire subsurface region.
526 """
527 def __init__(self, datatype,
528 DIM=2,
529 number_of_elements=10,
530 length=1*U.km,
531 B_b=None,
532 data_offset=0,
533 full_knowledge=False,
534 spherical=False):
535 """
536 :param datatype: data type indicator
537 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
538 :param DIM: spatial dimension
539 :type DIM: 2 or 3
540 :param number_of_elements: lateral number of elements in the region where data are collected
541 :type number_of_elements: ``int``
542 :param length: lateral extend of the region where data are collected
543 :type length: ``float``
544 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
545 :type B_b: ``list`` of ``Scalar``
546 :param data_offset: offset of the data collection region from the surface
547 :type data_offset: ``float``
548 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
549 :type full_knowledge: ``Bool``
550 :param spherical: if ``True`` sperical coordinates are used (ignored)
551 :type spherical: ``Bool``
552 """
553 super(SyntheticDataBase,self).__init__()
554 if not datatype in [self.GRAVITY,self.MAGNETIC]:
555 raise ValueError("Invalid value for datatype parameter")
556 self.DIM=DIM
557 self.number_of_elements=number_of_elements
558 self.length=length
559 self.__datatype = datatype
560
561 self.__spherical = spherical
562 self.__full_knowledge= full_knowledge
563 self.__data_offset=data_offset
564 self.__B_b =None
565 # this is for Cartesian (FIXME ?)
566 if datatype == self.MAGNETIC:
567 if self.DIM<3:
568 self.__B_b = np.array([-B_b[2], -B_b[0]])
569 else:
570 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
571 self.__origin = [0]*(DIM-1)
572 self.__delta = [float(length)/number_of_elements]*(DIM-1)
573 self.__nPts = [number_of_elements]*(DIM-1)
574 self._reference_data=None
575
576 def getDataExtents(self):
577 """
578 returns the lateral data extend of the data set
579 """
580 return (list(self.__origin), list(self.__nPts), list(self.__delta))
581
582 def getDataType(self):
583 """
584 returns the data type
585 """
586 return self.__datatype
587
588 def getSurveyData(self, domain, origin, number_of_elements, spacing):
589 """
590 returns the survey data placed on a given domain.
591
592 :param domain: domain on which the data are to be placed
593 :type param: ``Domain``
594 :param origin: origin of the domain
595 :type origin: ``list`` of ``float``
596 :param number_of_elements: number of elements (or cells) in each spatial direction used
597 span the domain
598 :type number_of_elements: `list`` of ``int``
599 :param spacing: cell size in each spatial direction
600 :type spacing: ``list`` of ``float``
601 :return: observed gravity field or magnetic flux density for each cell in the domain and
602 for each cell an indicator 1/0 if the data are valid or not.
603 :rtype: pair of ``Scalar``
604 """
605 pde=LinearSinglePDE(domain)
606 DIM=domain.getDim()
607 x=domain.getX()
608 # set the reference data
609
610 k=self.getReferenceProperty(domain)
611 # calculate the corresponding potential
612 z=x[DIM-1]
613 m_psi_ref=whereZero(z-sup(z))
614 if self.getDataType()==DataSource.GRAVITY:
615 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
616 else:
617 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
618 pde.setSymmetryOn()
619 #pde.getSolverOptions().setTolerance(1e-13)
620 psi_ref=pde.getSolution()
621 del pde
622 if self.getDataType()==DataSource.GRAVITY:
623 data = -grad(psi_ref, ReducedFunction(domain))
624 else:
625 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
626
627 x=ReducedFunction(domain).getX()
628 if self.__full_knowledge:
629 sigma = whereNegative(x[DIM-1])
630 else:
631
632 sigma=1.
633 # limit mask to non-padding in horizontal area
634 for i in range(DIM-1):
635 x_i=x[i]
636 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
637 # limit mask to one cell thickness at z=0
638 z=x[DIM-1]
639 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
640 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
641 return data,sigma
642
643 def getReferenceProperty(self, domain=None):
644 """
645 Returns the reference density Data object that was used to generate
646 the gravity/susceptibility anomaly data.
647
648 :return: the density or susceptibility anomaly used to create the survey data.
649 :note: it can be assumed that in the first call the ``domain`` argument is present so the
650 actual anomaly data can be created. In subsequent calls this may not be true.
651 :note: method needs to be overwritten
652 """
653 raise NotImplementedError()
654
655 class SyntheticFeatureData(SyntheticDataBase):
656 """
657 uses a list of ``SourceFeature`` to define synthetic anomaly data
658 """
659 def __init__(self, datatype,
660 features,
661 DIM=2,
662 number_of_elements=10,
663 length=1*U.km,
664 B_b=None,
665 data_offset=0,
666 full_knowledge=False,
667 spherical=False):
668 """
669 :param datatype: data type indicator
670 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
671 :param features: list of features. It is recommended that the features do entirely lay below surface.
672 :type features: ``list`` of ``SourceFeature``
673 :param DIM: spatial dimension
674 :type DIM: 2 or 3
675 :param number_of_elements: lateral number of elements in the region where data are collected
676 :type number_of_elements: ``int``
677 :param length: lateral extend of the region where data are collected
678 :type length: ``float``
679 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
680 :type B_b: ``list`` of ``Scalar``
681 :param data_offset: offset of the data collection region from the surface
682 :type data_offset: ``float``
683 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
684 :type full_knowledge: ``Bool``
685 :param spherical: if ``True`` sperical coordinates are used (ignored)
686 :type spherical: ``Bool``
687 """
688 super(SyntheticFeatureData,self).__init__(
689 datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
690 length=length, B_b=B_b,
691 data_offset=data_offset,
692 full_knowledge=full_knowledge,
693 spherical=spherical)
694 self._features = features
695
696
697 def getReferenceProperty(self, domain=None):
698 """
699 Returns the reference density Data object that was used to generate
700 the gravity/susceptibility anomaly data.
701 """
702 if self._reference_data == None:
703 DIM=domain.getDim()
704 x=domain.getX()
705 k=0.
706 for f in self._features:
707 m=f.getMask(x)
708 k = k * (1-m) + f.getValue(x) * m
709 self._reference_data= k
710 return self._reference_data
711
712 class SyntheticData(SyntheticDataBase):
713 """
714 defines synthetic gravity/magnetic data based on harmonic property anomaly
715
716 rho = mean + amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * x - shift )
717
718 for all x and z<=0. for z>0 rho = 0.
719 """
720 def __init__(self, datatype,
721 n_length=1,
722 n_depth=1,
723 depth_offset=0.,
724 depth=None,
725 amplitude=None,
726 DIM=2,
727 number_of_elements=10,
728 length=1*U.km,
729 B_b=None,
730 data_offset=0,
731 full_knowledge=False,
732 spherical=False):
733 """
734 :param datatype: data type indicator
735 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
736 :param n_length: number of oscillation in the anomaly data within the observation region.
737 :type n_length: ``int``
738 :param n_depth: number of oscillation in the anomaly data below surface
739 :param depth: vertical extend in the anomaly data. If not present the depth of the domain is used.
740 :type depth: ``float``
741 :param amplitude: data amplitude. Default value is 200 *U.kg/U.m**3 for gravity and 0.1 for magnetic data.
742 :param features: list of features. It is recommended that the features do entirely lay below surface.
743 :type features: ``list`` of ``SourceFeature``
744 :param DIM: spatial dimension
745 :type DIM: 2 or 3
746 :param number_of_elements: lateral number of elements in the region where data are collected
747 :type number_of_elements: ``int``
748 :param length: lateral extend of the region where data are collected
749 :type length: ``float``
750 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
751 :type B_b: ``list`` of ``Scalar``
752 :param data_offset: offset of the data collection region from the surface
753 :type data_offset: ``float``
754 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
755 :type full_knowledge: ``Bool``
756 :param spherical: if ``True`` sperical coordinates are used (ignored)
757 :type spherical: ``Bool``
758 """
759 super(SyntheticData,self).__init__(
760 datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
761 length=length, B_b=B_b,
762 data_offset=data_offset,
763 full_knowledge=full_knowledge,
764 spherical=spherical)
765 self.__n_length = n_length
766 self.__n_depth = n_depth
767 self.depth = depth
768 self.depth_offset=depth_offset
769 if amplitude == None:
770 if datatype == DataSource.GRAVITY:
771 amplitude = 200 *U.kg/U.m**3
772 else:
773 amplitude =0.1
774 self.__amplitude = amplitude
775
776
777
778 def getReferenceProperty(self, domain=None):
779 """
780 Returns the reference density Data object that was used to generate
781 the gravity anomaly data.
782 """
783 if self._reference_data == None:
784 DIM=domain.getDim()
785 x=domain.getX()
786 # set the reference data
787 z=x[DIM-1]
788 dd=self.depth
789 if dd ==None : dd=inf(z)
790 z2=(z+self.depth_offset)/(self.depth_offset-dd)
791 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
792 for i in xrange(DIM-1):
793 x_i=x[i]
794 min_x=inf(x_i)
795 max_x=sup(x_i)
796 k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
797 self._reference_data= k
798 return self._reference_data
799
800

  ViewVC Help
Powered by ViewVC 1.1.26