/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4145 - (hide annotations)
Fri Jan 18 00:51:49 2013 UTC (6 years, 8 months ago) by caltinay
File MIME type: text/x-python
File size: 33026 byte(s)
Checkpoint.

1 caltinay 3946
2 jfenwick 3981 ##############################################################################
3 caltinay 3946 #
4 caltinay 4145 # Copyright (c) 2003-2013 by University of Queensland
5 jfenwick 3981 # http://www.uq.edu.au
6 caltinay 3946 #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11 jfenwick 3981 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12     # Development since 2012 by School of Earth Sciences
13     #
14     ##############################################################################
15 caltinay 3946
16 caltinay 4007 """Data readers/providers for inversions"""
17    
18 caltinay 4145 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 jfenwick 3981 http://www.uq.edu.au
20 caltinay 3946 Primary Business: Queensland, Australia"""
21     __license__="""Licensed under the Open Software License version 3.0
22     http://www.opensource.org/licenses/osl-3.0.php"""
23     __url__="https://launchpad.net/escript-finley"
24    
25 caltinay 4145 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource', 'ErMapperData', \
26     'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27     'SmoothAnomaly']
28 caltinay 3947
29 caltinay 3946 import logging
30     import numpy as np
31 caltinay 4060 from esys.escript import ReducedFunction
32 caltinay 4145 from esys.escript import unitsSI as U
33 caltinay 4005 from esys.escript.linearPDEs import LinearSinglePDE
34 caltinay 4007 from esys.escript.util import *
35 caltinay 4005 from esys.ripley import Brick, Rectangle, ripleycpp
36 caltinay 3971
37 caltinay 3947 try:
38     from scipy.io.netcdf import netcdf_file
39 caltinay 4060 __all__ += ['NetCdfData']
40 caltinay 3947 except:
41     pass
42 caltinay 3946
43 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
44     """
45     Converts one or more longitude,latitude pairs to the corresponding x,y
46     coordinates in the Universal Transverse Mercator projection.
47 caltinay 4007
48 caltinay 4020 :note: If the ``pyproj`` module is not installed a warning is printed and
49 caltinay 4007 the input values are scaled by a constant and returned.
50     :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51     not available to convert the string, then the input values are
52     assumed to be given in the Clarke 1866 projection.
53 caltinay 4009
54     :param lon: longitude value(s)
55     :type lon: `float`, `list`, `tuple`, or ``numpy.array``
56     :param lat: latitude value(s)
57     :type lat: `float`, `list`, `tuple`, or ``numpy.array``
58     :rtype: ``numpy.array``
59 caltinay 3957 """
60    
61     # not really optimal: if pyproj is not installed we return the input
62 caltinay 4005 # values scaled by a constant.
63 caltinay 3957 try:
64     import pyproj
65     except:
66     print("Warning, pyproj not available. Domain extents will be wrong")
67 caltinay 4009 return np.array(lon)*1000., np.array(lat)*1000.
68 caltinay 3957
69     # determine UTM zone from the input data
70 caltinay 3956 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
71     try:
72     import osgeo.osr
73     srs = osgeo.osr.SpatialReference()
74     srs.ImportFromWkt(wkt_string)
75     p_src = pyproj.Proj(srs.ExportToProj4())
76     except:
77     p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
78 caltinay 3959 # we assume southern hemisphere here
79     p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
80 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
81     return x,y
82 caltinay 3947
83 gross 4115 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
84 caltinay 4145 theta = (90-latitude)/180.*np.pi
85     B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
86     B_theta= B_0 * sin(theta)
87     B_r= 2 * B_0 * cos(theta)
88     return B_r, B_theta, 0.
89 caltinay 4108
90 caltinay 3946 class DataSource(object):
91     """
92     A class that provides survey data for the inversion process.
93 caltinay 4005 This is an abstract base class that implements common functionality.
94     Methods to be overwritten by subclasses are marked as such.
95 caltinay 4044 This class assumes 2D data which is mapped to a slice of a 3D domain.
96 caltinay 4097 For other setups override the methods as required.
97 caltinay 3946 """
98    
99 caltinay 4060 GRAVITY, MAGNETIC = list(range(2))
100    
101 caltinay 3946 def __init__(self):
102     """
103 caltinay 4005 Constructor. Sets some defaults and initializes logger.
104 caltinay 3946 """
105     self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
106 caltinay 4060 self.__subsampling_factor=1
107 caltinay 3946
108 caltinay 4005 def getDataExtents(self):
109     """
110     returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
111    
112     - ``x0``, ``y0`` = coordinates of data origin
113     - ``nx``, ``ny`` = number of data points in x and y
114     - ``dx``, ``dy`` = spacing of data points in x and y
115    
116     This method must be implemented in subclasses.
117     """
118     raise NotImplementedError
119    
120 caltinay 4060 def getDataType(self):
121 caltinay 4005 """
122 caltinay 4060 Returns the type of survey data managed by this source.
123     Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
124 caltinay 4005 """
125     raise NotImplementedError
126    
127 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
128 caltinay 4005 """
129 caltinay 4060 This method is called by the `DomainBuilder` to retrieve the survey
130     data as `Data` objects on the given domain.
131 caltinay 4145
132 caltinay 4060 Subclasses should return one or more `Data` objects with survey data
133     interpolated on the given ripley domain. The exact return type
134     depends on the type of data.
135 caltinay 4005
136 caltinay 4060 :param domain: the escript domain to use
137     :type domain: `esys.escript.Domain`
138     :param origin: the origin coordinates of the domain
139     :type origin: ``tuple`` or ``list``
140     :param NE: the number of domain elements in each dimension
141 caltinay 4005 :type NE: ``tuple`` or ``list``
142 caltinay 4060 :param spacing: the cell sizes (node spacing) in the domain
143     :type spacing: ``tuple`` or ``list``
144 caltinay 3946 """
145 caltinay 4060 raise NotImplementedError
146 caltinay 3964
147 caltinay 4060 def setSubsamplingFactor(self, f):
148 caltinay 3946 """
149 caltinay 4060 Sets the data subsampling factor (default=1).
150 caltinay 4145
151 caltinay 4060 The factor is applied in all dimensions. For example a 2D dataset
152     with 300 x 150 data points will be reduced to 150 x 75 when a
153     subsampling factor of 2 is used.
154     This becomes important when adding data of varying resolution to
155     a `DomainBuilder`.
156 caltinay 3946 """
157 caltinay 4060 self.__subsampling_factor=f
158 caltinay 3965
159 caltinay 4060 def getSubsamplingFactor(self):
160 caltinay 3965 """
161 caltinay 4060 Returns the subsampling factor that was set via `setSubsamplingFactor`
162     (see there).
163 caltinay 3946 """
164 caltinay 4060 return self.__subsampling_factor
165 caltinay 3946
166    
167 caltinay 3964 ##############################################################################
168 caltinay 4060 class ErMapperData(DataSource):
169 caltinay 4044 """
170 caltinay 3958 Data Source for ER Mapper raster data.
171     Note that this class only accepts a very specific type of ER Mapper data
172     input and will raise an exception if other data is found.
173     """
174 caltinay 4060 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
175 caltinay 3958 """
176 caltinay 4060 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
177     :type datatype: ``int``
178 caltinay 4049 :param headerfile: ER Mapper header file (usually ends in .ers)
179     :type headerfile: ``str``
180     :param datafile: ER Mapper binary data file name. If not supplied the
181     name of the header file without '.ers' is assumed
182     :type datafile: ``str``
183 caltinay 4060 :param altitude: altitude of measurements above ground in meters
184     :type altitude: ``float``
185 caltinay 3958 """
186 caltinay 4060 super(ErMapperData,self).__init__()
187 caltinay 3964 self.__headerfile=headerfile
188 caltinay 3958 if datafile is None:
189 caltinay 3964 self.__datafile=headerfile[:-4]
190 caltinay 3958 else:
191 caltinay 3964 self.__datafile=datafile
192 caltinay 4061 self.__altitude=altitude
193 caltinay 4060 self.__datatype=datatype
194     self.__readHeader()
195 caltinay 3958
196 caltinay 4060 def __readHeader(self):
197     self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
198 caltinay 3964 metadata=open(self.__headerfile, 'r').readlines()
199 caltinay 3958 # parse metadata
200     start=-1
201 caltinay 4044 for i in range(len(metadata)):
202 caltinay 3958 if metadata[i].strip() == 'DatasetHeader Begin':
203     start=i+1
204     if start==-1:
205     raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
206    
207     md_dict={}
208     section=[]
209 caltinay 4044 for i in range(start, len(metadata)):
210 caltinay 3958 line=metadata[i]
211     if line[-6:].strip() == 'Begin':
212     section.append(line[:-6].strip())
213     elif line[-4:].strip() == 'End':
214     if len(section)>0:
215     section.pop()
216     else:
217     vals=line.split('=')
218     if len(vals)==2:
219     key = vals[0].strip()
220     value = vals[1].strip()
221     fullkey='.'.join(section+[key])
222     md_dict[fullkey]=value
223    
224     try:
225     if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
226     raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
227     except KeyError:
228     self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
229    
230     try:
231     NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
232     NY = int(md_dict['RasterInfo.NrOfLines'])
233     except:
234     raise RuntimeError("Could not determine extents of data")
235    
236     try:
237     maskval = float(md_dict['RasterInfo.NullCellValue'])
238     except:
239 caltinay 3980 maskval = 99999
240 caltinay 3958
241     try:
242     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
243     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
244     except:
245     raise RuntimeError("Could not determine cell dimensions")
246    
247     try:
248 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
249     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
250     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
251     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
252     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
253     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
254     else:
255     raise RuntimeError("Unknown CoordinateType")
256 caltinay 3958 except:
257 caltinay 3959 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
258 caltinay 3958 originX,originY = 0.0, 0.0
259    
260 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
261     # it appears we have lat/lon coordinates so need to convert
262     # origin and spacing. Try using gdal to get the wkt if available:
263     try:
264     from osgeo import gdal
265 caltinay 3964 ds=gdal.Open(self.__headerfile)
266 caltinay 3959 wkt=ds.GetProjection()
267     except:
268     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
269 caltinay 3980 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
270 caltinay 3959 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
271     op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
272 caltinay 4009 # we are rounding to avoid interpolation issues
273 caltinay 3959 spacingX=np.round(op1X-originX_UTM)
274     spacingY=np.round(op1Y-originY_UTM)
275 caltinay 3964 originX=np.round(originX_UTM)
276     originY=np.round(originY_UTM)
277 caltinay 3959
278     # data sets have origin in top-left corner so y runs top-down
279 caltinay 3964 self.__dataorigin=[originX, originY]
280 caltinay 3958 originY-=(NY-1)*spacingY
281 caltinay 4060 self.__delta = [spacingX, spacingY]
282     self.__maskval = maskval
283     self.__nPts = [NX, NY]
284     self.__origin = [originX, originY]
285     if self.__datatype == self.GRAVITY:
286     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
287     self.__scalefactor = 1e-6
288     else:
289     self.logger.info("Assuming magnetic data units are 'nT'.")
290     self.__scalefactor = 1e-9
291 caltinay 3958
292 caltinay 3964 def getDataExtents(self):
293     """
294     returns ( (x0, y0), (nx, ny), (dx, dy) )
295     """
296 caltinay 4060 return (list(self.__origin), list(self.__nPts), list(self.__delta))
297 caltinay 3958
298 caltinay 4061 def getDataType(self):
299     return self.__datatype
300    
301 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
302     nValues=self.__nPts
303     # determine base location of this dataset within the domain
304     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
305     if domain.getDim()==3:
306     first.append(int((self.__altitude-origin[2])/spacing[2]))
307     nValues=nValues+[1]
308    
309     data=ripleycpp._readBinaryGrid(self.__datafile,
310     ReducedFunction(domain),
311     first, nValues, (), self.__maskval)
312     sigma = whereNonZero(data-self.__maskval)
313     data = data*self.__scalefactor
314 caltinay 4061 sigma = sigma * 2. * self.__scalefactor
315 caltinay 4060 return data, sigma
316    
317    
318     ##############################################################################
319     class NetCdfData(DataSource):
320     """
321     Data Source for gridded netCDF data that use CF/COARDS conventions.
322     """
323     def __init__(self, datatype, filename, altitude=0.):
324 caltinay 3964 """
325 caltinay 4060 :param filename: file name for survey data in netCDF format
326     :type filename: ``str``
327     :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
328     :type datatype: ``int``
329     :param altitude: altitude of measurements in meters
330 jfenwick 4080 :type altitude: ``float``
331 caltinay 3964 """
332 caltinay 4060 super(NetCdfData,self).__init__()
333     self.__filename=filename
334     if not datatype in [self.GRAVITY,self.MAGNETIC]:
335     raise ValueError("Invalid value for datatype parameter")
336     self.__datatype=datatype
337     self.__altitude=altitude
338     self.__readMetadata()
339 caltinay 3964
340 caltinay 4060 def __readMetadata(self):
341     self.logger.debug("Checking Data Source: %s"%self.__filename)
342     f=netcdf_file(self.__filename, 'r')
343     NX=0
344     for n in ['lon','longitude','x']:
345     if n in f.dimensions:
346     NX=f.dimensions[n]
347     break
348     if NX==0:
349     raise RuntimeError("Could not determine extents of data")
350     NY=0
351     for n in ['lat','latitude','y']:
352     if n in f.dimensions:
353     NY=f.dimensions[n]
354     break
355     if NY==0:
356     raise RuntimeError("Could not determine extents of data")
357 caltinay 3958
358 caltinay 4060 # find longitude and latitude variables
359     lon_name=None
360     for n in ['lon','longitude']:
361     if n in f.variables:
362     lon_name=n
363     longitude=f.variables.pop(n)
364     break
365     if lon_name is None:
366     raise RuntimeError("Could not determine longitude variable")
367     lat_name=None
368     for n in ['lat','latitude']:
369     if n in f.variables:
370     lat_name=n
371     latitude=f.variables.pop(n)
372     break
373     if lat_name is None:
374     raise RuntimeError("Could not determine latitude variable")
375 caltinay 3958
376 caltinay 4060 # try to figure out data variable name
377     data_name=None
378     if len(f.variables)==1:
379     data_name=f.variables.keys()[0]
380     else:
381     for n in f.variables.keys():
382     dims=f.variables[n].dimensions
383     if (lat_name in dims) and (lon_name in dims):
384     data_name=n
385     break
386     if data_name is None:
387     raise RuntimeError("Could not determine data variable")
388    
389     # try to determine value for unused data
390     if hasattr(f.variables[data_name], 'missing_value'):
391     maskval = float(f.variables[data_name].missing_value)
392     elif hasattr(f.variables[data_name], '_FillValue'):
393     maskval = float(f.variables[data_name]._FillValue)
394     else:
395     self.logger.debug("missing_value attribute not found, using default.")
396     maskval = 99999
397    
398     # try to determine units of data - this is disabled for now
399     #if hasattr(f.variables[data_name], 'units'):
400     # units=f.variables[data_name].units
401     if self.__datatype == self.GRAVITY:
402     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
403     self.__scalefactor = 1e-6
404     else:
405     self.logger.info("Assuming magnetic data units are 'nT'.")
406     self.__scalefactor = 1e-9
407    
408     # see if there is a wkt string to convert coordinates
409     try:
410     wkt_string=f.variables[data_name].esri_pe_string
411     except:
412     wkt_string=None
413    
414     # we don't trust actual_range & geospatial_lon_min/max since subset
415     # data does not seem to have these fields updated.
416     # Getting min/max from the arrays is obviously not very efficient but..
417     #lon_range=longitude.actual_range
418     #lat_range=latitude.actual_range
419     #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
420     #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
421     lon_range=longitude.data.min(),longitude.data.max()
422     lat_range=latitude.data.min(),latitude.data.max()
423 caltinay 4097 if lon_range[1]<180:
424     lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
425 caltinay 4060 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
426     f.close()
427    
428     self.__nPts=[NX, NY]
429     self.__origin=[lon_range[0],lat_range[0]]
430     # we are rounding to avoid interpolation issues
431     self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
432     #self.__wkt_string=wkt_string
433     #self.__lon_name=lon_name
434     #self.__lat_name=lat_name
435     self.__data_name=data_name
436     self.__maskval=maskval
437    
438     def getDataExtents(self):
439     """
440     returns ( (x0, y0), (nx, ny), (dx, dy) )
441     """
442     return (list(self.__origin), list(self.__nPts), list(self.__delta))
443    
444     def getDataType(self):
445     return self.__datatype
446    
447     def getSurveyData(self, domain, origin, NE, spacing):
448     nValues=self.__nPts
449     # determine base location of this dataset within the domain
450     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
451     if domain.getDim()==3:
452     first.append(int((self.__altitude-origin[2])/spacing[2]))
453     nValues=nValues+[1]
454    
455     data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
456     ReducedFunction(domain), first, nValues, (), self.__maskval)
457     sigma=whereNonZero(data-self.__maskval)
458     data=data*self.__scalefactor
459     sigma=sigma * 2. * self.__scalefactor
460     return data, sigma
461    
462    
463 caltinay 3958 ##############################################################################
464     class SourceFeature(object):
465     """
466     A feature adds a density distribution to (parts of) a domain of a synthetic
467     data source, for example a layer of a specific rock type or a simulated
468     ore body.
469     """
470 caltinay 4060 def getValue(self):
471 caltinay 3958 """
472 caltinay 4060 Returns the value for the area covered by mask. It can be constant
473 caltinay 3958 or a data object with spatial dependency.
474     """
475     raise NotImplementedError
476     def getMask(self, x):
477     """
478     Returns the mask of the area of interest for this feature. That is,
479     mask is non-zero where the density returned by getDensity() should be
480     applied, zero elsewhere.
481     """
482     raise NotImplementedError
483    
484     class SmoothAnomaly(SourceFeature):
485 caltinay 4060 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
486 caltinay 3946 self.x=x
487     self.y=y
488     self.lx=lx
489     self.ly=ly
490     self.lz=lz
491     self.depth=depth
492 caltinay 4060 self.v_inner=v_inner
493     self.v_outer=v_outer
494     self.value=None
495 caltinay 4044 self.mask=None
496 caltinay 4034
497 caltinay 4060 def getValue(self,x):
498     if self.value is None:
499     if self.v_outer is None or self.v_inner is None:
500     self.value=0
501 caltinay 4041 else:
502 caltinay 4044 DIM=x.getDomain().getDim()
503 caltinay 4060 alpha=-log(abs(self.v_outer/self.v_inner))*4
504     value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
505     value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
506     self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
507     if self.v_inner<0: self.value=-self.value
508 caltinay 4044
509 caltinay 4060 return self.value
510 caltinay 4044
511 caltinay 3946 def getMask(self, x):
512     DIM=x.getDomain().getDim()
513 caltinay 4060 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
514 caltinay 4034 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
515 caltinay 3946 if DIM>2:
516     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
517 caltinay 4044 self.mask = m
518 caltinay 4034 return m
519 caltinay 3946
520 caltinay 3958 ##############################################################################
521 gross 4115 class SyntheticDataBase(DataSource):
522 caltinay 4132 """
523     Base class to define reference data based on a given property distribution
524     (density or susceptibility). Data are collected from a square region of
525     vertical extent `length` on a grid with `number_of_elements` cells in each
526     direction.
527    
528     The synthetic data are constructed by solving the appropriate forward
529     problem. Data can be sampled with an offset from the surface at z=0 or
530     using the entire subsurface region.
531     """
532     def __init__(self, datatype,
533     DIM=2,
534     number_of_elements=10,
535     length=1*U.km,
536     B_b=None,
537     data_offset=0,
538     full_knowledge=False,
539     spherical=False):
540     """
541 gross 4115 :param datatype: data type indicator
542     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
543 caltinay 4132 :param DIM: number of spatial dimension
544     :type DIM: ``int`` (2 or 3)
545     :param number_of_elements: lateral number of elements in the region
546     where data are collected
547 gross 4115 :type number_of_elements: ``int``
548     :param length: lateral extend of the region where data are collected
549     :type length: ``float``
550     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
551     :type B_b: ``list`` of ``Scalar``
552     :param data_offset: offset of the data collection region from the surface
553     :type data_offset: ``float``
554 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
555     subsurface region. This is mainly for testing.
556 gross 4115 :type full_knowledge: ``Bool``
557 caltinay 4132 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
558 gross 4115 :type spherical: ``Bool``
559 caltinay 4132 """
560     super(SyntheticDataBase,self).__init__()
561     if not datatype in [self.GRAVITY,self.MAGNETIC]:
562     raise ValueError("Invalid value for datatype parameter")
563     self.DIM=DIM
564     self.number_of_elements=number_of_elements
565     self.length=length
566     self.__datatype = datatype
567    
568     self.__spherical = spherical
569     self.__full_knowledge= full_knowledge
570     self.__data_offset=data_offset
571     self.__B_b =None
572     # this is for Cartesian (FIXME ?)
573     if datatype == self.MAGNETIC:
574     if self.DIM < 3:
575     self.__B_b = np.array([-B_b[2], -B_b[0]])
576     else:
577     self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
578     self.__origin = [0]*(DIM-1)
579     self.__delta = [float(length)/number_of_elements]*(DIM-1)
580     self.__nPts = [number_of_elements]*(DIM-1)
581     self._reference_data=None
582    
583     def getDataExtents(self):
584     """
585     returns the lateral data extend of the data set
586     """
587     return (list(self.__origin), list(self.__nPts), list(self.__delta))
588 caltinay 3946
589 caltinay 4132 def getDataType(self):
590     """
591     returns the data type
592     """
593     return self.__datatype
594    
595     def getSurveyData(self, domain, origin, number_of_elements, spacing):
596     """
597     returns the survey data placed on a given domain.
598    
599     :param domain: domain on which the data are to be placed
600     :type domain: ``Domain``
601     :param origin: origin of the domain
602     :type origin: ``list`` of ``float``
603     :param number_of_elements: number of elements (or cells) in each
604     spatial direction used to span the domain
605     :type number_of_elements: ``list`` of ``int``
606     :param spacing: cell size in each spatial direction
607     :type spacing: ``list`` of ``float``
608     :return: observed gravity field or magnetic flux density for each cell
609     in the domain and for each cell an indicator 1/0 if the data
610     are valid or not.
611     :rtype: pair of ``Scalar``
612     """
613     pde=LinearSinglePDE(domain)
614     DIM=domain.getDim()
615     x=domain.getX()
616     # set the reference data
617    
618     k=self.getReferenceProperty(domain)
619     # calculate the corresponding potential
620     z=x[DIM-1]
621     m_psi_ref=whereZero(z-sup(z))
622     if self.getDataType()==DataSource.GRAVITY:
623     pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
624     else:
625     pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
626     pde.setSymmetryOn()
627     #pde.getSolverOptions().setTolerance(1e-13)
628     psi_ref=pde.getSolution()
629     del pde
630     if self.getDataType()==DataSource.GRAVITY:
631     data = -grad(psi_ref, ReducedFunction(domain))
632     else:
633     data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
634 gross 4121
635 caltinay 4132 x=ReducedFunction(domain).getX()
636     if self.__full_knowledge:
637     sigma = whereNegative(x[DIM-1])
638     else:
639     sigma=1.
640     # limit mask to non-padding in horizontal area
641     for i in range(DIM-1):
642     x_i=x[i]
643     sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
644     # limit mask to one cell thickness at z=0
645     z=x[DIM-1]
646     oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
647     sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
648     return data,sigma
649    
650     def getReferenceProperty(self, domain=None):
651     """
652     Returns the reference density Data object that was used to generate
653     the gravity/susceptibility anomaly data.
654    
655     :return: the density or susceptibility anomaly used to create the
656     survey data
657     :note: it can be assumed that in the first call the ``domain``
658     argument is present so the actual anomaly data can be created.
659     In subsequent calls this may not be true.
660     :note: method needs to be overwritten
661     """
662     raise NotImplementedError()
663 gross 4115
664     class SyntheticFeatureData(SyntheticDataBase):
665     """
666     uses a list of ``SourceFeature`` to define synthetic anomaly data
667     """
668     def __init__(self, datatype,
669     features,
670     DIM=2,
671     number_of_elements=10,
672     length=1*U.km,
673     B_b=None,
674     data_offset=0,
675     full_knowledge=False,
676     spherical=False):
677 caltinay 4060 """
678 gross 4115 :param datatype: data type indicator
679     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
680     :param features: list of features. It is recommended that the features do entirely lay below surface.
681     :type features: ``list`` of ``SourceFeature``
682     :param DIM: spatial dimension
683     :type DIM: 2 or 3
684     :param number_of_elements: lateral number of elements in the region where data are collected
685     :type number_of_elements: ``int``
686     :param length: lateral extend of the region where data are collected
687     :type length: ``float``
688     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
689     :type B_b: ``list`` of ``Scalar``
690     :param data_offset: offset of the data collection region from the surface
691     :type data_offset: ``float``
692     :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
693     :type full_knowledge: ``Bool``
694     :param spherical: if ``True`` sperical coordinates are used (ignored)
695     :type spherical: ``Bool``
696 caltinay 4060 """
697 gross 4115 super(SyntheticFeatureData,self).__init__(
698     datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
699     length=length, B_b=B_b,
700     data_offset=data_offset,
701     full_knowledge=full_knowledge,
702     spherical=spherical)
703     self._features = features
704 caltinay 4060
705 gross 4115
706     def getReferenceProperty(self, domain=None):
707 caltinay 4060 """
708 caltinay 4132 Returns the reference density Data object that was used to generate
709     the gravity/susceptibility anomaly data.
710 caltinay 4060 """
711 gross 4115 if self._reference_data == None:
712     DIM=domain.getDim()
713     x=domain.getX()
714     k=0.
715 caltinay 4060 for f in self._features:
716     m=f.getMask(x)
717 gross 4115 k = k * (1-m) + f.getValue(x) * m
718     self._reference_data= k
719     return self._reference_data
720    
721     class SyntheticData(SyntheticDataBase):
722     """
723     defines synthetic gravity/magnetic data based on harmonic property anomaly
724    
725 gross 4116 rho = mean + amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * x - shift )
726 gross 4115
727     for all x and z<=0. for z>0 rho = 0.
728     """
729     def __init__(self, datatype,
730     n_length=1,
731     n_depth=1,
732 gross 4116 depth_offset=0.,
733     depth=None,
734 gross 4115 amplitude=None,
735     DIM=2,
736     number_of_elements=10,
737     length=1*U.km,
738     B_b=None,
739     data_offset=0,
740     full_knowledge=False,
741     spherical=False):
742     """
743     :param datatype: data type indicator
744     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
745 caltinay 4132 :param n_length: number of oscillations in the anomaly data within the
746     observation region
747 gross 4115 :type n_length: ``int``
748 caltinay 4132 :param n_depth: number of oscillations in the anomaly data below surface
749     :param depth: vertical extent in the anomaly data. If not present the
750     depth of the domain is used.
751 gross 4116 :type depth: ``float``
752 caltinay 4132 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
753     gravity and 0.1 for magnetic data.
754     :param DIM: spatial dimensionality
755     :type DIM: ``int`` (2 or 3)
756     :param number_of_elements: lateral number of elements in the region
757     where data are collected
758 gross 4115 :type number_of_elements: ``int``
759 caltinay 4132 :param length: lateral extent of the region where data are collected
760 gross 4115 :type length: ``float``
761 caltinay 4132 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
762     Only used for magnetic data.
763 gross 4115 :type B_b: ``list`` of ``Scalar``
764     :param data_offset: offset of the data collection region from the surface
765     :type data_offset: ``float``
766 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
767     subsurface region. This is mainly for testing.
768 gross 4115 :type full_knowledge: ``Bool``
769 caltinay 4132 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
770 gross 4115 :type spherical: ``Bool``
771     """
772     super(SyntheticData,self).__init__(
773 caltinay 4132 datatype=datatype, DIM=DIM,
774     number_of_elements=number_of_elements,
775 gross 4115 length=length, B_b=B_b,
776     data_offset=data_offset,
777     full_knowledge=full_knowledge,
778     spherical=spherical)
779     self.__n_length = n_length
780     self.__n_depth = n_depth
781 gross 4116 self.depth = depth
782     self.depth_offset=depth_offset
783 gross 4115 if amplitude == None:
784 caltinay 4132 if datatype == DataSource.GRAVITY:
785     amplitude = 200 *U.kg/U.m**3
786     else:
787     amplitude = 0.1
788 gross 4115 self.__amplitude = amplitude
789 caltinay 4044
790 gross 4115 def getReferenceProperty(self, domain=None):
791     """
792     Returns the reference density Data object that was used to generate
793     the gravity anomaly data.
794     """
795     if self._reference_data == None:
796     DIM=domain.getDim()
797     x=domain.getX()
798     # set the reference data
799     z=x[DIM-1]
800 gross 4116 dd=self.depth
801     if dd ==None : dd=inf(z)
802     z2=(z+self.depth_offset)/(self.depth_offset-dd)
803     k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
804 gross 4115 for i in xrange(DIM-1):
805 caltinay 4132 x_i=x[i]
806     min_x=inf(x_i)
807     max_x=sup(x_i)
808     k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
809 gross 4115 self._reference_data= k
810     return self._reference_data
811 gross 4106

  ViewVC Help
Powered by ViewVC 1.1.26