/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4152 - (hide annotations)
Tue Jan 22 00:29:03 2013 UTC (6 years, 7 months ago) by caltinay
File MIME type: text/x-python
File size: 33217 byte(s)
Moved figures to subdirectory.

1 caltinay 3946
2 jfenwick 3981 ##############################################################################
3 caltinay 3946 #
4 caltinay 4145 # Copyright (c) 2003-2013 by University of Queensland
5 jfenwick 3981 # http://www.uq.edu.au
6 caltinay 3946 #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11 jfenwick 3981 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12     # Development since 2012 by School of Earth Sciences
13     #
14     ##############################################################################
15 caltinay 3946
16 caltinay 4007 """Data readers/providers for inversions"""
17    
18 caltinay 4145 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 jfenwick 3981 http://www.uq.edu.au
20 caltinay 3946 Primary Business: Queensland, Australia"""
21     __license__="""Licensed under the Open Software License version 3.0
22     http://www.opensource.org/licenses/osl-3.0.php"""
23     __url__="https://launchpad.net/escript-finley"
24    
25 caltinay 4145 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource', 'ErMapperData', \
26     'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27     'SmoothAnomaly']
28 caltinay 3947
29 caltinay 3946 import logging
30     import numpy as np
31 caltinay 4060 from esys.escript import ReducedFunction
32 caltinay 4145 from esys.escript import unitsSI as U
33 caltinay 4005 from esys.escript.linearPDEs import LinearSinglePDE
34 caltinay 4007 from esys.escript.util import *
35 caltinay 4005 from esys.ripley import Brick, Rectangle, ripleycpp
36 caltinay 3971
37 caltinay 3947 try:
38     from scipy.io.netcdf import netcdf_file
39 caltinay 4060 __all__ += ['NetCdfData']
40 caltinay 3947 except:
41     pass
42 caltinay 3946
43 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
44     """
45     Converts one or more longitude,latitude pairs to the corresponding x,y
46     coordinates in the Universal Transverse Mercator projection.
47 caltinay 4007
48 caltinay 4020 :note: If the ``pyproj`` module is not installed a warning is printed and
49 caltinay 4007 the input values are scaled by a constant and returned.
50     :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51     not available to convert the string, then the input values are
52     assumed to be given in the Clarke 1866 projection.
53 caltinay 4009
54     :param lon: longitude value(s)
55 caltinay 4152 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
56 caltinay 4009 :param lat: latitude value(s)
57 caltinay 4152 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
58 caltinay 4009 :rtype: ``numpy.array``
59 caltinay 3957 """
60    
61     # not really optimal: if pyproj is not installed we return the input
62 caltinay 4005 # values scaled by a constant.
63 caltinay 3957 try:
64     import pyproj
65     except:
66     print("Warning, pyproj not available. Domain extents will be wrong")
67 caltinay 4009 return np.array(lon)*1000., np.array(lat)*1000.
68 caltinay 3957
69     # determine UTM zone from the input data
70 caltinay 3956 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
71     try:
72     import osgeo.osr
73     srs = osgeo.osr.SpatialReference()
74     srs.ImportFromWkt(wkt_string)
75     p_src = pyproj.Proj(srs.ExportToProj4())
76     except:
77     p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
78 caltinay 3959 # we assume southern hemisphere here
79     p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
80 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
81     return x,y
82 caltinay 3947
83 gross 4115 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
84 caltinay 4152 """
85     Returns an approximation of the geomagnetic flux density B at the given
86     `latitude`. The parameter `longitude` is currently ignored.
87    
88     :rtype: ``tuple``
89     """
90 caltinay 4145 theta = (90-latitude)/180.*np.pi
91     B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
92     B_theta= B_0 * sin(theta)
93     B_r= 2 * B_0 * cos(theta)
94     return B_r, B_theta, 0.
95 caltinay 4108
96 caltinay 3946 class DataSource(object):
97     """
98     A class that provides survey data for the inversion process.
99 caltinay 4005 This is an abstract base class that implements common functionality.
100     Methods to be overwritten by subclasses are marked as such.
101 caltinay 4044 This class assumes 2D data which is mapped to a slice of a 3D domain.
102 caltinay 4097 For other setups override the methods as required.
103 caltinay 3946 """
104    
105 caltinay 4060 GRAVITY, MAGNETIC = list(range(2))
106    
107 caltinay 3946 def __init__(self):
108     """
109 caltinay 4005 Constructor. Sets some defaults and initializes logger.
110 caltinay 3946 """
111     self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
112 caltinay 4060 self.__subsampling_factor=1
113 caltinay 3946
114 caltinay 4005 def getDataExtents(self):
115     """
116     returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
117    
118     - ``x0``, ``y0`` = coordinates of data origin
119     - ``nx``, ``ny`` = number of data points in x and y
120     - ``dx``, ``dy`` = spacing of data points in x and y
121    
122     This method must be implemented in subclasses.
123     """
124     raise NotImplementedError
125    
126 caltinay 4060 def getDataType(self):
127 caltinay 4005 """
128 caltinay 4060 Returns the type of survey data managed by this source.
129     Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
130 caltinay 4005 """
131     raise NotImplementedError
132    
133 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
134 caltinay 4005 """
135 caltinay 4060 This method is called by the `DomainBuilder` to retrieve the survey
136     data as `Data` objects on the given domain.
137 caltinay 4145
138 caltinay 4060 Subclasses should return one or more `Data` objects with survey data
139     interpolated on the given ripley domain. The exact return type
140     depends on the type of data.
141 caltinay 4005
142 caltinay 4060 :param domain: the escript domain to use
143     :type domain: `esys.escript.Domain`
144     :param origin: the origin coordinates of the domain
145     :type origin: ``tuple`` or ``list``
146     :param NE: the number of domain elements in each dimension
147 caltinay 4005 :type NE: ``tuple`` or ``list``
148 caltinay 4060 :param spacing: the cell sizes (node spacing) in the domain
149     :type spacing: ``tuple`` or ``list``
150 caltinay 3946 """
151 caltinay 4060 raise NotImplementedError
152 caltinay 3964
153 caltinay 4060 def setSubsamplingFactor(self, f):
154 caltinay 3946 """
155 caltinay 4060 Sets the data subsampling factor (default=1).
156 caltinay 4145
157 caltinay 4060 The factor is applied in all dimensions. For example a 2D dataset
158     with 300 x 150 data points will be reduced to 150 x 75 when a
159     subsampling factor of 2 is used.
160     This becomes important when adding data of varying resolution to
161     a `DomainBuilder`.
162 caltinay 3946 """
163 caltinay 4060 self.__subsampling_factor=f
164 caltinay 3965
165 caltinay 4060 def getSubsamplingFactor(self):
166 caltinay 3965 """
167 caltinay 4060 Returns the subsampling factor that was set via `setSubsamplingFactor`
168     (see there).
169 caltinay 3946 """
170 caltinay 4060 return self.__subsampling_factor
171 caltinay 3946
172    
173 caltinay 3964 ##############################################################################
174 caltinay 4060 class ErMapperData(DataSource):
175 caltinay 4044 """
176 caltinay 3958 Data Source for ER Mapper raster data.
177     Note that this class only accepts a very specific type of ER Mapper data
178     input and will raise an exception if other data is found.
179     """
180 caltinay 4060 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
181 caltinay 3958 """
182 caltinay 4060 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
183     :type datatype: ``int``
184 caltinay 4049 :param headerfile: ER Mapper header file (usually ends in .ers)
185     :type headerfile: ``str``
186     :param datafile: ER Mapper binary data file name. If not supplied the
187     name of the header file without '.ers' is assumed
188     :type datafile: ``str``
189 caltinay 4060 :param altitude: altitude of measurements above ground in meters
190     :type altitude: ``float``
191 caltinay 3958 """
192 caltinay 4060 super(ErMapperData,self).__init__()
193 caltinay 3964 self.__headerfile=headerfile
194 caltinay 3958 if datafile is None:
195 caltinay 3964 self.__datafile=headerfile[:-4]
196 caltinay 3958 else:
197 caltinay 3964 self.__datafile=datafile
198 caltinay 4061 self.__altitude=altitude
199 caltinay 4060 self.__datatype=datatype
200     self.__readHeader()
201 caltinay 3958
202 caltinay 4060 def __readHeader(self):
203     self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
204 caltinay 3964 metadata=open(self.__headerfile, 'r').readlines()
205 caltinay 3958 # parse metadata
206     start=-1
207 caltinay 4044 for i in range(len(metadata)):
208 caltinay 3958 if metadata[i].strip() == 'DatasetHeader Begin':
209     start=i+1
210     if start==-1:
211     raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
212    
213     md_dict={}
214     section=[]
215 caltinay 4044 for i in range(start, len(metadata)):
216 caltinay 3958 line=metadata[i]
217     if line[-6:].strip() == 'Begin':
218     section.append(line[:-6].strip())
219     elif line[-4:].strip() == 'End':
220     if len(section)>0:
221     section.pop()
222     else:
223     vals=line.split('=')
224     if len(vals)==2:
225     key = vals[0].strip()
226     value = vals[1].strip()
227     fullkey='.'.join(section+[key])
228     md_dict[fullkey]=value
229    
230     try:
231     if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
232     raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
233     except KeyError:
234     self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
235    
236     try:
237     NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
238     NY = int(md_dict['RasterInfo.NrOfLines'])
239     except:
240     raise RuntimeError("Could not determine extents of data")
241    
242     try:
243     maskval = float(md_dict['RasterInfo.NullCellValue'])
244     except:
245 caltinay 3980 maskval = 99999
246 caltinay 3958
247     try:
248     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
249     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
250     except:
251     raise RuntimeError("Could not determine cell dimensions")
252    
253     try:
254 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
255     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
256     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
257     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
258     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
259     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
260     else:
261     raise RuntimeError("Unknown CoordinateType")
262 caltinay 3958 except:
263 caltinay 3959 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
264 caltinay 3958 originX,originY = 0.0, 0.0
265    
266 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
267     # it appears we have lat/lon coordinates so need to convert
268     # origin and spacing. Try using gdal to get the wkt if available:
269     try:
270     from osgeo import gdal
271 caltinay 3964 ds=gdal.Open(self.__headerfile)
272 caltinay 3959 wkt=ds.GetProjection()
273     except:
274     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
275 caltinay 3980 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
276 caltinay 3959 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
277     op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
278 caltinay 4009 # we are rounding to avoid interpolation issues
279 caltinay 3959 spacingX=np.round(op1X-originX_UTM)
280     spacingY=np.round(op1Y-originY_UTM)
281 caltinay 3964 originX=np.round(originX_UTM)
282     originY=np.round(originY_UTM)
283 caltinay 3959
284     # data sets have origin in top-left corner so y runs top-down
285 caltinay 3964 self.__dataorigin=[originX, originY]
286 caltinay 3958 originY-=(NY-1)*spacingY
287 caltinay 4060 self.__delta = [spacingX, spacingY]
288     self.__maskval = maskval
289     self.__nPts = [NX, NY]
290     self.__origin = [originX, originY]
291     if self.__datatype == self.GRAVITY:
292     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
293     self.__scalefactor = 1e-6
294     else:
295     self.logger.info("Assuming magnetic data units are 'nT'.")
296     self.__scalefactor = 1e-9
297 caltinay 3958
298 caltinay 3964 def getDataExtents(self):
299     """
300     returns ( (x0, y0), (nx, ny), (dx, dy) )
301     """
302 caltinay 4060 return (list(self.__origin), list(self.__nPts), list(self.__delta))
303 caltinay 3958
304 caltinay 4061 def getDataType(self):
305     return self.__datatype
306    
307 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
308     nValues=self.__nPts
309     # determine base location of this dataset within the domain
310     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
311     if domain.getDim()==3:
312     first.append(int((self.__altitude-origin[2])/spacing[2]))
313     nValues=nValues+[1]
314    
315     data=ripleycpp._readBinaryGrid(self.__datafile,
316     ReducedFunction(domain),
317     first, nValues, (), self.__maskval)
318     sigma = whereNonZero(data-self.__maskval)
319     data = data*self.__scalefactor
320 caltinay 4061 sigma = sigma * 2. * self.__scalefactor
321 caltinay 4060 return data, sigma
322    
323    
324     ##############################################################################
325     class NetCdfData(DataSource):
326     """
327     Data Source for gridded netCDF data that use CF/COARDS conventions.
328     """
329     def __init__(self, datatype, filename, altitude=0.):
330 caltinay 3964 """
331 caltinay 4060 :param filename: file name for survey data in netCDF format
332     :type filename: ``str``
333     :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
334     :type datatype: ``int``
335     :param altitude: altitude of measurements in meters
336 jfenwick 4080 :type altitude: ``float``
337 caltinay 3964 """
338 caltinay 4060 super(NetCdfData,self).__init__()
339     self.__filename=filename
340     if not datatype in [self.GRAVITY,self.MAGNETIC]:
341     raise ValueError("Invalid value for datatype parameter")
342     self.__datatype=datatype
343     self.__altitude=altitude
344     self.__readMetadata()
345 caltinay 3964
346 caltinay 4060 def __readMetadata(self):
347     self.logger.debug("Checking Data Source: %s"%self.__filename)
348     f=netcdf_file(self.__filename, 'r')
349     NX=0
350     for n in ['lon','longitude','x']:
351     if n in f.dimensions:
352     NX=f.dimensions[n]
353     break
354     if NX==0:
355     raise RuntimeError("Could not determine extents of data")
356     NY=0
357     for n in ['lat','latitude','y']:
358     if n in f.dimensions:
359     NY=f.dimensions[n]
360     break
361     if NY==0:
362     raise RuntimeError("Could not determine extents of data")
363 caltinay 3958
364 caltinay 4060 # find longitude and latitude variables
365     lon_name=None
366     for n in ['lon','longitude']:
367     if n in f.variables:
368     lon_name=n
369     longitude=f.variables.pop(n)
370     break
371     if lon_name is None:
372     raise RuntimeError("Could not determine longitude variable")
373     lat_name=None
374     for n in ['lat','latitude']:
375     if n in f.variables:
376     lat_name=n
377     latitude=f.variables.pop(n)
378     break
379     if lat_name is None:
380     raise RuntimeError("Could not determine latitude variable")
381 caltinay 3958
382 caltinay 4060 # try to figure out data variable name
383     data_name=None
384     if len(f.variables)==1:
385     data_name=f.variables.keys()[0]
386     else:
387     for n in f.variables.keys():
388     dims=f.variables[n].dimensions
389     if (lat_name in dims) and (lon_name in dims):
390     data_name=n
391     break
392     if data_name is None:
393     raise RuntimeError("Could not determine data variable")
394    
395     # try to determine value for unused data
396     if hasattr(f.variables[data_name], 'missing_value'):
397     maskval = float(f.variables[data_name].missing_value)
398     elif hasattr(f.variables[data_name], '_FillValue'):
399     maskval = float(f.variables[data_name]._FillValue)
400     else:
401     self.logger.debug("missing_value attribute not found, using default.")
402     maskval = 99999
403    
404     # try to determine units of data - this is disabled for now
405     #if hasattr(f.variables[data_name], 'units'):
406     # units=f.variables[data_name].units
407     if self.__datatype == self.GRAVITY:
408     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
409     self.__scalefactor = 1e-6
410     else:
411     self.logger.info("Assuming magnetic data units are 'nT'.")
412     self.__scalefactor = 1e-9
413    
414     # see if there is a wkt string to convert coordinates
415     try:
416     wkt_string=f.variables[data_name].esri_pe_string
417     except:
418     wkt_string=None
419    
420     # we don't trust actual_range & geospatial_lon_min/max since subset
421     # data does not seem to have these fields updated.
422     # Getting min/max from the arrays is obviously not very efficient but..
423     #lon_range=longitude.actual_range
424     #lat_range=latitude.actual_range
425     #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
426     #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
427     lon_range=longitude.data.min(),longitude.data.max()
428     lat_range=latitude.data.min(),latitude.data.max()
429 caltinay 4097 if lon_range[1]<180:
430     lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
431 caltinay 4060 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
432     f.close()
433    
434     self.__nPts=[NX, NY]
435     self.__origin=[lon_range[0],lat_range[0]]
436     # we are rounding to avoid interpolation issues
437     self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
438     #self.__wkt_string=wkt_string
439     #self.__lon_name=lon_name
440     #self.__lat_name=lat_name
441     self.__data_name=data_name
442     self.__maskval=maskval
443    
444     def getDataExtents(self):
445     """
446     returns ( (x0, y0), (nx, ny), (dx, dy) )
447     """
448     return (list(self.__origin), list(self.__nPts), list(self.__delta))
449    
450     def getDataType(self):
451     return self.__datatype
452    
453     def getSurveyData(self, domain, origin, NE, spacing):
454     nValues=self.__nPts
455     # determine base location of this dataset within the domain
456     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
457     if domain.getDim()==3:
458     first.append(int((self.__altitude-origin[2])/spacing[2]))
459     nValues=nValues+[1]
460    
461     data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
462     ReducedFunction(domain), first, nValues, (), self.__maskval)
463     sigma=whereNonZero(data-self.__maskval)
464     data=data*self.__scalefactor
465     sigma=sigma * 2. * self.__scalefactor
466     return data, sigma
467    
468    
469 caltinay 3958 ##############################################################################
470     class SourceFeature(object):
471     """
472     A feature adds a density distribution to (parts of) a domain of a synthetic
473     data source, for example a layer of a specific rock type or a simulated
474     ore body.
475     """
476 caltinay 4060 def getValue(self):
477 caltinay 3958 """
478 caltinay 4060 Returns the value for the area covered by mask. It can be constant
479 caltinay 3958 or a data object with spatial dependency.
480     """
481     raise NotImplementedError
482     def getMask(self, x):
483     """
484     Returns the mask of the area of interest for this feature. That is,
485     mask is non-zero where the density returned by getDensity() should be
486     applied, zero elsewhere.
487     """
488     raise NotImplementedError
489    
490     class SmoothAnomaly(SourceFeature):
491 caltinay 4060 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
492 caltinay 3946 self.x=x
493     self.y=y
494     self.lx=lx
495     self.ly=ly
496     self.lz=lz
497     self.depth=depth
498 caltinay 4060 self.v_inner=v_inner
499     self.v_outer=v_outer
500     self.value=None
501 caltinay 4044 self.mask=None
502 caltinay 4034
503 caltinay 4060 def getValue(self,x):
504     if self.value is None:
505     if self.v_outer is None or self.v_inner is None:
506     self.value=0
507 caltinay 4041 else:
508 caltinay 4044 DIM=x.getDomain().getDim()
509 caltinay 4060 alpha=-log(abs(self.v_outer/self.v_inner))*4
510     value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
511     value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
512     self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
513     if self.v_inner<0: self.value=-self.value
514 caltinay 4044
515 caltinay 4060 return self.value
516 caltinay 4044
517 caltinay 3946 def getMask(self, x):
518     DIM=x.getDomain().getDim()
519 caltinay 4060 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
520 caltinay 4034 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
521 caltinay 3946 if DIM>2:
522     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
523 caltinay 4044 self.mask = m
524 caltinay 4034 return m
525 caltinay 3946
526 caltinay 3958 ##############################################################################
527 gross 4115 class SyntheticDataBase(DataSource):
528 caltinay 4132 """
529     Base class to define reference data based on a given property distribution
530     (density or susceptibility). Data are collected from a square region of
531     vertical extent `length` on a grid with `number_of_elements` cells in each
532     direction.
533    
534     The synthetic data are constructed by solving the appropriate forward
535     problem. Data can be sampled with an offset from the surface at z=0 or
536     using the entire subsurface region.
537     """
538     def __init__(self, datatype,
539     DIM=2,
540     number_of_elements=10,
541     length=1*U.km,
542     B_b=None,
543     data_offset=0,
544     full_knowledge=False,
545     spherical=False):
546     """
547 gross 4115 :param datatype: data type indicator
548     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
549 caltinay 4132 :param DIM: number of spatial dimension
550     :type DIM: ``int`` (2 or 3)
551     :param number_of_elements: lateral number of elements in the region
552     where data are collected
553 gross 4115 :type number_of_elements: ``int``
554     :param length: lateral extend of the region where data are collected
555     :type length: ``float``
556     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
557     :type B_b: ``list`` of ``Scalar``
558     :param data_offset: offset of the data collection region from the surface
559     :type data_offset: ``float``
560 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
561     subsurface region. This is mainly for testing.
562 gross 4115 :type full_knowledge: ``Bool``
563 caltinay 4132 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
564 gross 4115 :type spherical: ``Bool``
565 caltinay 4132 """
566     super(SyntheticDataBase,self).__init__()
567     if not datatype in [self.GRAVITY,self.MAGNETIC]:
568     raise ValueError("Invalid value for datatype parameter")
569     self.DIM=DIM
570     self.number_of_elements=number_of_elements
571     self.length=length
572     self.__datatype = datatype
573    
574     self.__spherical = spherical
575     self.__full_knowledge= full_knowledge
576     self.__data_offset=data_offset
577     self.__B_b =None
578     # this is for Cartesian (FIXME ?)
579     if datatype == self.MAGNETIC:
580     if self.DIM < 3:
581     self.__B_b = np.array([-B_b[2], -B_b[0]])
582     else:
583     self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
584     self.__origin = [0]*(DIM-1)
585     self.__delta = [float(length)/number_of_elements]*(DIM-1)
586     self.__nPts = [number_of_elements]*(DIM-1)
587     self._reference_data=None
588    
589     def getDataExtents(self):
590     """
591     returns the lateral data extend of the data set
592     """
593     return (list(self.__origin), list(self.__nPts), list(self.__delta))
594 caltinay 3946
595 caltinay 4132 def getDataType(self):
596     """
597     returns the data type
598     """
599     return self.__datatype
600    
601     def getSurveyData(self, domain, origin, number_of_elements, spacing):
602     """
603     returns the survey data placed on a given domain.
604    
605     :param domain: domain on which the data are to be placed
606     :type domain: ``Domain``
607     :param origin: origin of the domain
608     :type origin: ``list`` of ``float``
609     :param number_of_elements: number of elements (or cells) in each
610     spatial direction used to span the domain
611     :type number_of_elements: ``list`` of ``int``
612     :param spacing: cell size in each spatial direction
613     :type spacing: ``list`` of ``float``
614     :return: observed gravity field or magnetic flux density for each cell
615     in the domain and for each cell an indicator 1/0 if the data
616     are valid or not.
617     :rtype: pair of ``Scalar``
618     """
619     pde=LinearSinglePDE(domain)
620     DIM=domain.getDim()
621     x=domain.getX()
622     # set the reference data
623    
624     k=self.getReferenceProperty(domain)
625     # calculate the corresponding potential
626     z=x[DIM-1]
627     m_psi_ref=whereZero(z-sup(z))
628     if self.getDataType()==DataSource.GRAVITY:
629     pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
630     else:
631     pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
632     pde.setSymmetryOn()
633     #pde.getSolverOptions().setTolerance(1e-13)
634     psi_ref=pde.getSolution()
635     del pde
636     if self.getDataType()==DataSource.GRAVITY:
637     data = -grad(psi_ref, ReducedFunction(domain))
638     else:
639     data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
640 gross 4121
641 caltinay 4132 x=ReducedFunction(domain).getX()
642     if self.__full_knowledge:
643     sigma = whereNegative(x[DIM-1])
644     else:
645     sigma=1.
646     # limit mask to non-padding in horizontal area
647     for i in range(DIM-1):
648     x_i=x[i]
649     sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
650     # limit mask to one cell thickness at z=0
651     z=x[DIM-1]
652     oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
653     sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
654     return data,sigma
655    
656     def getReferenceProperty(self, domain=None):
657     """
658     Returns the reference density Data object that was used to generate
659     the gravity/susceptibility anomaly data.
660    
661     :return: the density or susceptibility anomaly used to create the
662     survey data
663     :note: it can be assumed that in the first call the ``domain``
664     argument is present so the actual anomaly data can be created.
665     In subsequent calls this may not be true.
666     :note: method needs to be overwritten
667     """
668     raise NotImplementedError()
669 gross 4115
670     class SyntheticFeatureData(SyntheticDataBase):
671     """
672     uses a list of ``SourceFeature`` to define synthetic anomaly data
673     """
674     def __init__(self, datatype,
675     features,
676     DIM=2,
677     number_of_elements=10,
678     length=1*U.km,
679     B_b=None,
680     data_offset=0,
681     full_knowledge=False,
682     spherical=False):
683 caltinay 4060 """
684 gross 4115 :param datatype: data type indicator
685     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
686     :param features: list of features. It is recommended that the features do entirely lay below surface.
687     :type features: ``list`` of ``SourceFeature``
688     :param DIM: spatial dimension
689     :type DIM: 2 or 3
690     :param number_of_elements: lateral number of elements in the region where data are collected
691     :type number_of_elements: ``int``
692     :param length: lateral extend of the region where data are collected
693     :type length: ``float``
694     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
695     :type B_b: ``list`` of ``Scalar``
696     :param data_offset: offset of the data collection region from the surface
697     :type data_offset: ``float``
698     :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
699     :type full_knowledge: ``Bool``
700     :param spherical: if ``True`` sperical coordinates are used (ignored)
701     :type spherical: ``Bool``
702 caltinay 4060 """
703 gross 4115 super(SyntheticFeatureData,self).__init__(
704     datatype=datatype, DIM=DIM, number_of_elements=number_of_elements,
705     length=length, B_b=B_b,
706     data_offset=data_offset,
707     full_knowledge=full_knowledge,
708     spherical=spherical)
709     self._features = features
710 caltinay 4060
711 gross 4115
712     def getReferenceProperty(self, domain=None):
713 caltinay 4060 """
714 caltinay 4132 Returns the reference density Data object that was used to generate
715     the gravity/susceptibility anomaly data.
716 caltinay 4060 """
717 gross 4115 if self._reference_data == None:
718     DIM=domain.getDim()
719     x=domain.getX()
720     k=0.
721 caltinay 4060 for f in self._features:
722     m=f.getMask(x)
723 gross 4115 k = k * (1-m) + f.getValue(x) * m
724     self._reference_data= k
725     return self._reference_data
726    
727     class SyntheticData(SyntheticDataBase):
728     """
729     defines synthetic gravity/magnetic data based on harmonic property anomaly
730    
731 gross 4116 rho = mean + amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * x - shift )
732 gross 4115
733     for all x and z<=0. for z>0 rho = 0.
734     """
735     def __init__(self, datatype,
736     n_length=1,
737     n_depth=1,
738 gross 4116 depth_offset=0.,
739     depth=None,
740 gross 4115 amplitude=None,
741     DIM=2,
742     number_of_elements=10,
743     length=1*U.km,
744     B_b=None,
745     data_offset=0,
746     full_knowledge=False,
747     spherical=False):
748     """
749     :param datatype: data type indicator
750     :type datatype: ``DataSource.GRAVITY``, ``DataSource.MAGNETIC``
751 caltinay 4132 :param n_length: number of oscillations in the anomaly data within the
752     observation region
753 gross 4115 :type n_length: ``int``
754 caltinay 4132 :param n_depth: number of oscillations in the anomaly data below surface
755     :param depth: vertical extent in the anomaly data. If not present the
756     depth of the domain is used.
757 gross 4116 :type depth: ``float``
758 caltinay 4132 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
759     gravity and 0.1 for magnetic data.
760     :param DIM: spatial dimensionality
761     :type DIM: ``int`` (2 or 3)
762     :param number_of_elements: lateral number of elements in the region
763     where data are collected
764 gross 4115 :type number_of_elements: ``int``
765 caltinay 4132 :param length: lateral extent of the region where data are collected
766 gross 4115 :type length: ``float``
767 caltinay 4132 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
768     Only used for magnetic data.
769 gross 4115 :type B_b: ``list`` of ``Scalar``
770     :param data_offset: offset of the data collection region from the surface
771     :type data_offset: ``float``
772 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
773     subsurface region. This is mainly for testing.
774 gross 4115 :type full_knowledge: ``Bool``
775 caltinay 4132 :param spherical: if ``True`` sperical coordinates are used (ignored for now)
776 gross 4115 :type spherical: ``Bool``
777     """
778     super(SyntheticData,self).__init__(
779 caltinay 4132 datatype=datatype, DIM=DIM,
780     number_of_elements=number_of_elements,
781 gross 4115 length=length, B_b=B_b,
782     data_offset=data_offset,
783     full_knowledge=full_knowledge,
784     spherical=spherical)
785     self.__n_length = n_length
786     self.__n_depth = n_depth
787 gross 4116 self.depth = depth
788     self.depth_offset=depth_offset
789 gross 4115 if amplitude == None:
790 caltinay 4132 if datatype == DataSource.GRAVITY:
791     amplitude = 200 *U.kg/U.m**3
792     else:
793     amplitude = 0.1
794 gross 4115 self.__amplitude = amplitude
795 caltinay 4044
796 gross 4115 def getReferenceProperty(self, domain=None):
797     """
798     Returns the reference density Data object that was used to generate
799     the gravity anomaly data.
800     """
801     if self._reference_data == None:
802     DIM=domain.getDim()
803     x=domain.getX()
804     # set the reference data
805     z=x[DIM-1]
806 gross 4116 dd=self.depth
807     if dd ==None : dd=inf(z)
808     z2=(z+self.depth_offset)/(self.depth_offset-dd)
809     k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
810 gross 4115 for i in xrange(DIM-1):
811 caltinay 4132 x_i=x[i]
812     min_x=inf(x_i)
813     max_x=sup(x_i)
814     k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
815 gross 4115 self._reference_data= k
816     return self._reference_data
817 gross 4106

  ViewVC Help
Powered by ViewVC 1.1.26