/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4132 - (show annotations)
Fri Jan 11 05:46:49 2013 UTC (6 years, 8 months ago) by caltinay
File MIME type: text/x-python
File size: 33066 byte(s)
Range of epydoc fixes.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource','ErMapperData', 'SyntheticDataBase' , 'SyntheticFeatureData', 'SyntheticData','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34
35 try:
36 from scipy.io.netcdf import netcdf_file
37 __all__ += ['NetCdfData']
38 except:
39 pass
40
41 def LatLonToUTM(lon, lat, wkt_string=None):
42 """
43 Converts one or more longitude,latitude pairs to the corresponding x,y
44 coordinates in the Universal Transverse Mercator projection.
45
46 :note: If the ``pyproj`` module is not installed a warning is printed and
47 the input values are scaled by a constant and returned.
48 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
49 not available to convert the string, then the input values are
50 assumed to be given in the Clarke 1866 projection.
51
52 :param lon: longitude value(s)
53 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
54 :param lat: latitude value(s)
55 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
56 :rtype: ``numpy.array``
57 """
58
59 # not really optimal: if pyproj is not installed we return the input
60 # values scaled by a constant.
61 try:
62 import pyproj
63 except:
64 print("Warning, pyproj not available. Domain extents will be wrong")
65 return np.array(lon)*1000., np.array(lat)*1000.
66
67 # determine UTM zone from the input data
68 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
69 try:
70 import osgeo.osr
71 srs = osgeo.osr.SpatialReference()
72 srs.ImportFromWkt(wkt_string)
73 p_src = pyproj.Proj(srs.ExportToProj4())
74 except:
75 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
76 # we assume southern hemisphere here
77 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
78 x,y=pyproj.transform(p_src, p_dest, lon, lat)
79 return x,y
80
81 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
82 theta = (90-latitude)/180.*np.pi
83 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
84 B_theta= B_0 * sin(theta)
85 B_r= 2 * B_0 * cos(theta)
86 return B_r, B_theta, 0.
87
88 class DataSource(object):
89 """
90 A class that provides survey data for the inversion process.
91 This is an abstract base class that implements common functionality.
92 Methods to be overwritten by subclasses are marked as such.
93 This class assumes 2D data which is mapped to a slice of a 3D domain.
94 For other setups override the methods as required.
95 """
96
97 GRAVITY, MAGNETIC = list(range(2))
98
99 def __init__(self):
100 """
101 Constructor. Sets some defaults and initializes logger.
102 """
103 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
104 self.__subsampling_factor=1
105 self.__background_magnetic_field=None
106
107 def getDataExtents(self):
108 """
109 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
110
111 - ``x0``, ``y0`` = coordinates of data origin
112 - ``nx``, ``ny`` = number of data points in x and y
113 - ``dx``, ``dy`` = spacing of data points in x and y
114
115 This method must be implemented in subclasses.
116 """
117 raise NotImplementedError
118
119 def getDataType(self):
120 """
121 Returns the type of survey data managed by this source.
122 Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
123 """
124 raise NotImplementedError
125
126 def getSurveyData(self, domain, origin, NE, spacing):
127 """
128 This method is called by the `DomainBuilder` to retrieve the survey
129 data as `Data` objects on the given domain.
130 Subclasses should return one or more `Data` objects with survey data
131 interpolated on the given ripley domain. The exact return type
132 depends on the type of data.
133
134 :param domain: the escript domain to use
135 :type domain: `esys.escript.Domain`
136 :param origin: the origin coordinates of the domain
137 :type origin: ``tuple`` or ``list``
138 :param NE: the number of domain elements in each dimension
139 :type NE: ``tuple`` or ``list``
140 :param spacing: the cell sizes (node spacing) in the domain
141 :type spacing: ``tuple`` or ``list``
142 """
143 raise NotImplementedError
144
145 def setSubsamplingFactor(self, f):
146 """
147 Sets the data subsampling factor (default=1).
148 The factor is applied in all dimensions. For example a 2D dataset
149 with 300 x 150 data points will be reduced to 150 x 75 when a
150 subsampling factor of 2 is used.
151 This becomes important when adding data of varying resolution to
152 a `DomainBuilder`.
153 """
154 self.__subsampling_factor=f
155
156 def getSubsamplingFactor(self):
157 """
158 Returns the subsampling factor that was set via `setSubsamplingFactor`
159 (see there).
160 """
161 return self.__subsampling_factor
162
163
164 ##############################################################################
165 class ErMapperData(DataSource):
166 """
167 Data Source for ER Mapper raster data.
168 Note that this class only accepts a very specific type of ER Mapper data
169 input and will raise an exception if other data is found.
170 """
171 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
172 """
173 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
174 :type datatype: ``int``
175 :param headerfile: ER Mapper header file (usually ends in .ers)
176 :type headerfile: ``str``
177 :param datafile: ER Mapper binary data file name. If not supplied the
178 name of the header file without '.ers' is assumed
179 :type datafile: ``str``
180 :param altitude: altitude of measurements above ground in meters
181 :type altitude: ``float``
182 """
183 super(ErMapperData,self).__init__()
184 self.__headerfile=headerfile
185 if datafile is None:
186 self.__datafile=headerfile[:-4]
187 else:
188 self.__datafile=datafile
189 self.__altitude=altitude
190 self.__datatype=datatype
191 self.__readHeader()
192
193 def __readHeader(self):
194 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
195 metadata=open(self.__headerfile, 'r').readlines()
196 # parse metadata
197 start=-1
198 for i in range(len(metadata)):
199 if metadata[i].strip() == 'DatasetHeader Begin':
200 start=i+1
201 if start==-1:
202 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
203
204 md_dict={}
205 section=[]
206 for i in range(start, len(metadata)):
207 line=metadata[i]
208 if line[-6:].strip() == 'Begin':
209 section.append(line[:-6].strip())
210 elif line[-4:].strip() == 'End':
211 if len(section)>0:
212 section.pop()
213 else:
214 vals=line.split('=')
215 if len(vals)==2:
216 key = vals[0].strip()
217 value = vals[1].strip()
218 fullkey='.'.join(section+[key])
219 md_dict[fullkey]=value
220
221 try:
222 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
223 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
224 except KeyError:
225 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
226
227 try:
228 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
229 NY = int(md_dict['RasterInfo.NrOfLines'])
230 except:
231 raise RuntimeError("Could not determine extents of data")
232
233 try:
234 maskval = float(md_dict['RasterInfo.NullCellValue'])
235 except:
236 maskval = 99999
237
238 try:
239 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
240 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
241 except:
242 raise RuntimeError("Could not determine cell dimensions")
243
244 try:
245 if md_dict['CoordinateSpace.CoordinateType']=='EN':
246 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
247 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
248 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
249 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
250 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
251 else:
252 raise RuntimeError("Unknown CoordinateType")
253 except:
254 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
255 originX,originY = 0.0, 0.0
256
257 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
258 # it appears we have lat/lon coordinates so need to convert
259 # origin and spacing. Try using gdal to get the wkt if available:
260 try:
261 from osgeo import gdal
262 ds=gdal.Open(self.__headerfile)
263 wkt=ds.GetProjection()
264 except:
265 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
266 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
267 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
268 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
269 # we are rounding to avoid interpolation issues
270 spacingX=np.round(op1X-originX_UTM)
271 spacingY=np.round(op1Y-originY_UTM)
272 originX=np.round(originX_UTM)
273 originY=np.round(originY_UTM)
274
275 # data sets have origin in top-left corner so y runs top-down
276 self.__dataorigin=[originX, originY]
277 originY-=(NY-1)*spacingY
278 self.__delta = [spacingX, spacingY]
279 self.__maskval = maskval
280 self.__nPts = [NX, NY]
281 self.__origin = [originX, originY]
282 if self.__datatype == self.GRAVITY:
283 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
284 self.__scalefactor = 1e-6
285 else:
286 self.logger.info("Assuming magnetic data units are 'nT'.")
287 self.__scalefactor = 1e-9
288
289 def getDataExtents(self):
290 """
291 returns ( (x0, y0), (nx, ny), (dx, dy) )
292 """
293 return (list(self.__origin), list(self.__nPts), list(self.__delta))
294
295 def getDataType(self):
296 return self.__datatype
297
298 def getSurveyData(self, domain, origin, NE, spacing):
299 nValues=self.__nPts
300 # determine base location of this dataset within the domain
301 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
302 if domain.getDim()==3:
303 first.append(int((self.__altitude-origin[2])/spacing[2]))
304 nValues=nValues+[1]
305
306 data=ripleycpp._readBinaryGrid(self.__datafile,
307 ReducedFunction(domain),
308 first, nValues, (), self.__maskval)
309 sigma = whereNonZero(data-self.__maskval)
310 data = data*self.__scalefactor
311 sigma = sigma * 2. * self.__scalefactor
312 return data, sigma
313
314
315 ##############################################################################
316 class NetCdfData(DataSource):
317 """
318 Data Source for gridded netCDF data that use CF/COARDS conventions.
319 """
320 def __init__(self, datatype, filename, altitude=0.):
321 """
322 :param filename: file name for survey data in netCDF format
323 :type filename: ``str``
324 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
325 :type datatype: ``int``
326 :param altitude: altitude of measurements in meters
327 :type altitude: ``float``
328 """
329 super(NetCdfData,self).__init__()
330 self.__filename=filename
331 if not datatype in [self.GRAVITY,self.MAGNETIC]:
332 raise ValueError("Invalid value for datatype parameter")
333 self.__datatype=datatype
334 self.__altitude=altitude
335 self.__readMetadata()
336
337 def __readMetadata(self):
338 self.logger.debug("Checking Data Source: %s"%self.__filename)
339 f=netcdf_file(self.__filename, 'r')
340 NX=0
341 for n in ['lon','longitude','x']:
342 if n in f.dimensions:
343 NX=f.dimensions[n]
344 break
345 if NX==0:
346 raise RuntimeError("Could not determine extents of data")
347 NY=0
348 for n in ['lat','latitude','y']:
349 if n in f.dimensions:
350 NY=f.dimensions[n]
351 break
352 if NY==0:
353 raise RuntimeError("Could not determine extents of data")
354
355 # find longitude and latitude variables
356 lon_name=None
357 for n in ['lon','longitude']:
358 if n in f.variables:
359 lon_name=n
360 longitude=f.variables.pop(n)
361 break
362 if lon_name is None:
363 raise RuntimeError("Could not determine longitude variable")
364 lat_name=None
365 for n in ['lat','latitude']:
366 if n in f.variables:
367 lat_name=n
368 latitude=f.variables.pop(n)
369 break
370 if lat_name is None:
371 raise RuntimeError("Could not determine latitude variable")
372
373 # try to figure out data variable name
374 data_name=None
375 if len(f.variables)==1:
376 data_name=f.variables.keys()[0]
377 else:
378 for n in f.variables.keys():
379 dims=f.variables[n].dimensions
380 if (lat_name in dims) and (lon_name in dims):
381 data_name=n
382 break
383 if data_name is None:
384 raise RuntimeError("Could not determine data variable")
385
386 # try to determine value for unused data
387 if hasattr(f.variables[data_name], 'missing_value'):
388 maskval = float(f.variables[data_name].missing_value)
389 elif hasattr(f.variables[data_name], '_FillValue'):
390 maskval = float(f.variables[data_name]._FillValue)
391 else:
392 self.logger.debug("missing_value attribute not found, using default.")
393 maskval = 99999
394
395 # try to determine units of data - this is disabled for now
396 #if hasattr(f.variables[data_name], 'units'):
397 # units=f.variables[data_name].units
398 if self.__datatype == self.GRAVITY:
399 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
400 self.__scalefactor = 1e-6
401 else:
402 self.logger.info("Assuming magnetic data units are 'nT'.")
403 self.__scalefactor = 1e-9
404
405 # see if there is a wkt string to convert coordinates
406 try:
407 wkt_string=f.variables[data_name].esri_pe_string
408 except:
409 wkt_string=None
410
411 # we don't trust actual_range & geospatial_lon_min/max since subset
412 # data does not seem to have these fields updated.
413 # Getting min/max from the arrays is obviously not very efficient but..
414 #lon_range=longitude.actual_range
415 #lat_range=latitude.actual_range
416 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
417 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
418 lon_range=longitude.data.min(),longitude.data.max()
419 lat_range=latitude.data.min(),latitude.data.max()
420 if lon_range[1]<180:
421 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
422 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
423 f.close()
424
425 self.__nPts=[NX, NY]
426 self.__origin=[lon_range[0],lat_range[0]]
427 # we are rounding to avoid interpolation issues
428 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
429 #self.__wkt_string=wkt_string
430 #self.__lon_name=lon_name
431 #self.__lat_name=lat_name
432 self.__data_name=data_name
433 self.__maskval=maskval
434
435 def getDataExtents(self):
436 """
437 returns ( (x0, y0), (nx, ny), (dx, dy) )
438 """
439 return (list(self.__origin), list(self.__nPts), list(self.__delta))
440
441 def getDataType(self):
442 return self.__datatype
443
444 def getSurveyData(self, domain, origin, NE, spacing):
445 nValues=self.__nPts
446 # determine base location of this dataset within the domain
447 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
448 if domain.getDim()==3:
449 first.append(int((self.__altitude-origin[2])/spacing[2]))
450 nValues=nValues+[1]
451
452 data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
453 ReducedFunction(domain), first, nValues, (), self.__maskval)
454 sigma=whereNonZero(data-self.__maskval)
455 data=data*self.__scalefactor
456 sigma=sigma * 2. * self.__scalefactor
457 return data, sigma
458
459
460 ##############################################################################
461 class SourceFeature(object):
462 """
463 A feature adds a density distribution to (parts of) a domain of a synthetic
464 data source, for example a layer of a specific rock type or a simulated
465 ore body.
466 """
467 def getValue(self):
468 """
469 Returns the value for the area covered by mask. It can be constant
470 or a data object with spatial dependency.
471 """
472 raise NotImplementedError
473 def getMask(self, x):
474 """
475 Returns the mask of the area of interest for this feature. That is,
476 mask is non-zero where the density returned by getDensity() should be
477 applied, zero elsewhere.
478 """
479 raise NotImplementedError
480
481 class SmoothAnomaly(SourceFeature):
482 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
483 self.x=x
484 self.y=y
485 self.lx=lx
486 self.ly=ly
487 self.lz=lz
488 self.depth=depth
489 self.v_inner=v_inner
490 self.v_outer=v_outer
491 self.value=None
492 self.mask=None
493
494 def getValue(self,x):
495 if self.value is None:
496 if self.v_outer is None or self.v_inner is None:
497 self.value=0
498 else:
499 DIM=x.getDomain().getDim()
500 alpha=-log(abs(self.v_outer/self.v_inner))*4
501 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
502 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
503 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
504 if self.v_inner<0: self.value=-self.value
505
506 return self.value
507
508 def getMask(self, x):
509 DIM=x.getDomain().getDim()
510 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
511 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
512 if DIM>2:
513 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
514 self.mask = m
515 return m
516
517 ##############################################################################
518 class SyntheticDataBase(DataSource):
519 """
520 Base class to define reference data based on a given property distribution
521 (density or susceptibility). Data are collected from a square region of
522 vertical extent `length` on a grid with `number_of_elements` cells in each
523 direction.
524
525 The synthetic data are constructed by solving the appropriate forward
526 problem. Data can be sampled with an offset from the surface at z=0 or
527 using the entire subsurface region.
528 """
529 def __init__(self, datatype,
530 DIM=2,
531 number_of_elements=10,
532 length=1*U.km,
533 B_b=None,
534 data_offset=0,
535 full_knowledge=False,
536 spherical=False):
537 """
538 :param datatype: data type indicator
539 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
540 :param DIM: number of spatial dimension
541 :type DIM: ``int`` (2 or 3)
542 :param number_of_elements: lateral number of elements in the region
543 where data are collected
544 :type number_of_elements: ``int``
545 :param length: lateral extend of the region where data are collected
546 :type length: ``float``
547 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
548 :type B_b: ``list`` of ``Scalar``
549 :param data_offset: offset of the data collection region from the surface
550 :type data_offset: ``float``
551 :param full_knowledge: if ``True`` data are collected from the entire
552 subsurface region. This is mainly for testing.
553 :type full_knowledge: ``Bool``
554 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
555 :type spherical: ``Bool``
556 """
557 super(SyntheticDataBase,self).__init__()
558 if not datatype in [self.GRAVITY,self.MAGNETIC]:
559 raise ValueError("Invalid value for datatype parameter")
560 self.DIM=DIM
561 self.number_of_elements=number_of_elements
562 self.length=length
563 self.__datatype = datatype
564
565 self.__spherical = spherical
566 self.__full_knowledge= full_knowledge
567 self.__data_offset=data_offset
568 self.__B_b =None
569 # this is for Cartesian (FIXME ?)
570 if datatype == self.MAGNETIC:
571 if self.DIM < 3:
572 self.__B_b = np.array([-B_b[2], -B_b[0]])
573 else:
574 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
575 self.__origin = [0]*(DIM-1)
576 self.__delta = [float(length)/number_of_elements]*(DIM-1)
577 self.__nPts = [number_of_elements]*(DIM-1)
578 self._reference_data=None
579
580 def getDataExtents(self):
581 """
582 returns the lateral data extend of the data set
583 """
584 return (list(self.__origin), list(self.__nPts), list(self.__delta))
585
586 def getDataType(self):
587 """
588 returns the data type
589 """
590 return self.__datatype
591
592 def getSurveyData(self, domain, origin, number_of_elements, spacing):
593 """
594 returns the survey data placed on a given domain.
595
596 :param domain: domain on which the data are to be placed
597 :type domain: ``Domain``
598 :param origin: origin of the domain
599 :type origin: ``list`` of ``float``
600 :param number_of_elements: number of elements (or cells) in each
601 spatial direction used to span the domain
602 :type number_of_elements: ``list`` of ``int``
603 :param spacing: cell size in each spatial direction
604 :type spacing: ``list`` of ``float``
605 :return: observed gravity field or magnetic flux density for each cell
606 in the domain and for each cell an indicator 1/0 if the data
607 are valid or not.
608 :rtype: pair of ``Scalar``
609 """
610 pde=LinearSinglePDE(domain)
611 DIM=domain.getDim()
612 x=domain.getX()
613 # set the reference data
614
615 k=self.getReferenceProperty(domain)
616 # calculate the corresponding potential
617 z=x[DIM-1]
618 m_psi_ref=whereZero(z-sup(z))
619 if self.getDataType()==DataSource.GRAVITY:
620 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
621 else:
622 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
623 pde.setSymmetryOn()
624 #pde.getSolverOptions().setTolerance(1e-13)
625 psi_ref=pde.getSolution()
626 del pde
627 if self.getDataType()==DataSource.GRAVITY:
628 data = -grad(psi_ref, ReducedFunction(domain))
629 else:
630 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
631
632 x=ReducedFunction(domain).getX()
633 if self.__full_knowledge:
634 sigma = whereNegative(x[DIM-1])
635 else:
636 sigma=1.
637 # limit mask to non-padding in horizontal area
638 for i in range(DIM-1):
639 x_i=x[i]
640 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
641 # limit mask to one cell thickness at z=0
642 z=x[DIM-1]
643 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
644 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
645 return data,sigma
646
647 def getReferenceProperty(self, domain=None):
648 """
649 Returns the reference density Data object that was used to generate
650 the gravity/susceptibility anomaly data.
651
652 :return: the density or susceptibility anomaly used to create the
653 survey data
654 :note: it can be assumed that in the first call the ``domain``
655 argument is present so the actual anomaly data can be created.
656 In subsequent calls this may not be true.
657 :note: method needs to be overwritten
658 """
659 raise NotImplementedError()
660
661 class SyntheticFeatureData(SyntheticDataBase):
662 """
663 uses a list of ``SourceFeature`` to define synthetic anomaly data
664 """
665 def __init__(self, datatype,
666 features,
667 DIM=2,
668 number_of_elements=10,
669 length=1*U.km,
670 B_b=None,
671 data_offset=0,
672 full_knowledge=False,
673 spherical=False):
674 """
675 :param datatype: data type indicator
676 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
677 :param features: list of features. It is recommended that the features do entirely lay below surface.
678 :type features: ``list`` of ``SourceFeature``
679 :param DIM: spatial dimension
680 :type DIM: 2 or 3
681 :param number_of_elements: lateral number of elements in the region where data are collected
682 :type number_of_elements: ``int``
683 :param length: lateral extend of the region where data are collected
684 :type length: ``float``
685 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
686 :type B_b: ``list`` of ``Scalar``
687 :param data_offset: offset of the data collection region from the surface
688 :type data_offset: ``float``
689 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
690 :type full_knowledge: ``Bool``
691 :param spherical: if ``True`` sperical coordinates are used (ignored)
692 :type spherical: ``Bool``
693 """
694 super(SyntheticFeatureData,self).__init__(
695 datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
696 length=length, B_b=B_b,
697 data_offset=data_offset,
698 full_knowledge=full_knowledge,
699 spherical=spherical)
700 self._features = features
701
702
703 def getReferenceProperty(self, domain=None):
704 """
705 Returns the reference density Data object that was used to generate
706 the gravity/susceptibility anomaly data.
707 """
708 if self._reference_data == None:
709 DIM=domain.getDim()
710 x=domain.getX()
711 k=0.
712 for f in self._features:
713 m=f.getMask(x)
714 k = k * (1-m) + f.getValue(x) * m
715 self._reference_data= k
716 return self._reference_data
717
718 class SyntheticData(SyntheticDataBase):
719 """
720 defines synthetic gravity/magnetic data based on harmonic property anomaly
721
722 rho = mean + amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * x - shift )
723
724 for all x and z<=0. for z>0 rho = 0.
725 """
726 def __init__(self, datatype,
727 n_length=1,
728 n_depth=1,
729 depth_offset=0.,
730 depth=None,
731 amplitude=None,
732 DIM=2,
733 number_of_elements=10,
734 length=1*U.km,
735 B_b=None,
736 data_offset=0,
737 full_knowledge=False,
738 spherical=False):
739 """
740 :param datatype: data type indicator
741 :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
742 :param n_length: number of oscillations in the anomaly data within the
743 observation region
744 :type n_length: ``int``
745 :param n_depth: number of oscillations in the anomaly data below surface
746 :param depth: vertical extent in the anomaly data. If not present the
747 depth of the domain is used.
748 :type depth: ``float``
749 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
750 gravity and 0.1 for magnetic data.
751 :param DIM: spatial dimensionality
752 :type DIM: ``int`` (2 or 3)
753 :param number_of_elements: lateral number of elements in the region
754 where data are collected
755 :type number_of_elements: ``int``
756 :param length: lateral extent of the region where data are collected
757 :type length: ``float``
758 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
759 Only used for magnetic data.
760 :type B_b: ``list`` of ``Scalar``
761 :param data_offset: offset of the data collection region from the surface
762 :type data_offset: ``float``
763 :param full_knowledge: if ``True`` data are collected from the entire
764 subsurface region. This is mainly for testing.
765 :type full_knowledge: ``Bool``
766 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
767 :type spherical: ``Bool``
768 """
769 super(SyntheticData,self).__init__(
770 datatype=datatype, DIM=DIM,
771 number_of_elements=number_of_elements,
772 length=length, B_b=B_b,
773 data_offset=data_offset,
774 full_knowledge=full_knowledge,
775 spherical=spherical)
776 self.__n_length = n_length
777 self.__n_depth = n_depth
778 self.depth = depth
779 self.depth_offset=depth_offset
780 if amplitude == None:
781 if datatype == DataSource.GRAVITY:
782 amplitude = 200 *U.kg/U.m**3
783 else:
784 amplitude = 0.1
785 self.__amplitude = amplitude
786
787 def getReferenceProperty(self, domain=None):
788 """
789 Returns the reference density Data object that was used to generate
790 the gravity anomaly data.
791 """
792 if self._reference_data == None:
793 DIM=domain.getDim()
794 x=domain.getX()
795 # set the reference data
796 z=x[DIM-1]
797 dd=self.depth
798 if dd ==None : dd=inf(z)
799 z2=(z+self.depth_offset)/(self.depth_offset-dd)
800 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
801 for i in xrange(DIM-1):
802 x_i=x[i]
803 min_x=inf(x_i)
804 max_x=sup(x_i)
805 k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
806 self._reference_data= k
807 return self._reference_data
808

  ViewVC Help
Powered by ViewVC 1.1.26