/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 5288 - (hide annotations)
Tue Dec 2 23:18:40 2014 UTC (4 years, 10 months ago) by sshaw
File MIME type: text/x-python
File size: 58061 byte(s)
fixing tests for cases where required domains not built
1 caltinay 3946
2 jfenwick 3981 ##############################################################################
3 caltinay 3946 #
4 jfenwick 4657 # Copyright (c) 2003-2014 by University of Queensland
5 jfenwick 3981 # http://www.uq.edu.au
6 caltinay 3946 #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11 jfenwick 3981 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 jfenwick 4657 # Development 2012-2013 by School of Earth Sciences
13     # Development from 2014 by Centre for Geoscience Computing (GeoComp)
14 jfenwick 3981 #
15     ##############################################################################
16 caltinay 3946
17 caltinay 4007 """Data readers/providers for inversions"""
18    
19 jfenwick 4657 __copyright__="""Copyright (c) 2003-2014 by University of Queensland
20 jfenwick 3981 http://www.uq.edu.au
21 caltinay 3946 Primary Business: Queensland, Australia"""
22     __license__="""Licensed under the Open Software License version 3.0
23     http://www.opensource.org/licenses/osl-3.0.php"""
24     __url__="https://launchpad.net/escript-finley"
25    
26 caltinay 4390 __all__ = ['DataSource', 'ErMapperData', 'NumpyData', \
27 caltinay 4145 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
28 gross 4700 'SmoothAnomaly', 'SeismicSource']
29 caltinay 3947
30 caltinay 3946 import logging
31     import numpy as np
32 caltinay 4362 import tempfile
33 gross 4729 from esys.escript import ReducedFunction, FunctionOnBoundary, Scalar
34 caltinay 4145 from esys.escript import unitsSI as U
35 caltinay 4005 from esys.escript.linearPDEs import LinearSinglePDE
36 caltinay 4007 from esys.escript.util import *
37 gross 4373 from .coordinates import ReferenceSystem, CartesianReferenceSystem
38 caltinay 3971
39 sshaw 5288 HAS_RIPLEY = True
40 caltinay 3947 try:
41 sshaw 5288 from esys.ripley import *
42     except ImportError as e:
43     HAS_RIPLEY = False
44    
45     try:
46 caltinay 3947 from scipy.io.netcdf import netcdf_file
47 caltinay 4060 __all__ += ['NetCdfData']
48 caltinay 3947 except:
49     pass
50 caltinay 3946
51 caltinay 4516 try:
52 caltinay 4616 import pyproj
53     HAVE_PYPROJ=True
54     except:
55     HAVE_PYPROJ=False
56    
57     try:
58 caltinay 4516 import osgeo.osr
59     HAVE_GDAL=True
60     except ImportError:
61     HAVE_GDAL=False
62    
63 caltinay 4613 def getUTMZone(lon, lat, wkt_string=None):
64 caltinay 4516 """
65     """
66    
67 caltinay 4613 logger = logging.getLogger('inv.datasources.getUTMZone')
68     zone = 0
69    
70     nplon=np.array(lon)
71     nplat=np.array(lat)
72     if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
73     if HAVE_GDAL and (wkt_string is not None):
74     srs = osgeo.osr.SpatialReference()
75     result=srs.ImportFromWkt(wkt_string)
76     if result==0:
77     zone = srs.GetUTMZone()
78     else:
79     # determine UTM zone from the input data
80     zone = int(np.median((np.floor((nplon + 180)/6) + 1) % 60))
81    
82     logger.debug("Determined UTM zone %d."%zone)
83     return zone
84    
85 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
86     """
87     Converts one or more longitude,latitude pairs to the corresponding x,y
88     coordinates in the Universal Transverse Mercator projection.
89 caltinay 4007
90 caltinay 4516 :note: The ``pyproj`` module is required unless the input coordinates are
91     determined to be already projected. If it is not found an exception
92     is raised.
93 caltinay 4007 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
94     not available to convert the string, then the input values are
95 caltinay 4197 assumed to be using the Clarke 1866 ellipsoid.
96 caltinay 4009
97     :param lon: longitude value(s)
98 caltinay 4152 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
99 caltinay 4009 :param lat: latitude value(s)
100 caltinay 4152 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
101 caltinay 4197 :param wkt_string: Well-known text (WKT) string describing the coordinate
102     system used. The ``gdal`` module is used to convert
103     the string to the corresponding Proj4 string.
104     :type wkt_string: ``str``
105 caltinay 4201 :rtype: ``tuple``
106 caltinay 3957 """
107    
108 caltinay 4516 logger = logging.getLogger('inv.datasources.LatLonToUTM')
109    
110     nplon=np.array(lon)
111     nplat=np.array(lat)
112 caltinay 4613 zone = getUTMZone(nplon, nplat, wkt_string)
113    
114 caltinay 4516 if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
115     logger.debug('Coordinates appear to be projected. Passing through.')
116     return lon,lat,zone
117    
118     logger.debug('Need to project coordinates.')
119 caltinay 4616 if not HAVE_PYPROJ:
120 caltinay 4516 logger.error("Cannot import pyproj! Exiting.")
121 caltinay 4212 raise ImportError("In order to perform coordinate transformations on "
122 caltinay 4616 "the data you are using the 'pyproj' Python module is required but "
123     "was not found. Please install the module and try again.")
124 caltinay 3957
125 caltinay 4516 p_src=None
126 caltinay 4616 if HAVE_GDAL and (wkt_string is not None) and len(wkt_string)>0:
127 caltinay 3956 srs = osgeo.osr.SpatialReference()
128 caltinay 4516 result=srs.ImportFromWkt(wkt_string)
129     try:
130     p_src = pyproj.Proj(srs.ExportToProj4())
131 caltinay 4616 except RuntimeError as e:
132 caltinay 4658 logger.warning('pyproj returned exception: %s [wkt=%s]'%(e,wkt_string))
133 caltinay 4516
134     if p_src is None:
135 caltinay 4616 if HAVE_GDAL:
136     reason="no wkt string provided."
137     else:
138     reason="the gdal python module not available."
139 caltinay 4658 logger.warning("Assuming lon/lat coordinates on Clarke 1866 ellipsoid since "+reason)
140 caltinay 3956 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
141 caltinay 4197
142     # check for hemisphere
143 caltinay 4516 if np.median(nplat) < 0.:
144 caltinay 4197 south='+south '
145     else:
146     south=''
147 caltinay 4522 p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m +ellps=WGS84'%(zone,south))
148 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
149 caltinay 4201 return x,y,zone
150 caltinay 3947
151 caltinay 3946 class DataSource(object):
152     """
153     A class that provides survey data for the inversion process.
154 caltinay 4005 This is an abstract base class that implements common functionality.
155     Methods to be overwritten by subclasses are marked as such.
156 caltinay 4044 This class assumes 2D data which is mapped to a slice of a 3D domain.
157 caltinay 4097 For other setups override the methods as required.
158 caltinay 3946 """
159    
160 jduplessis 4991 GRAVITY, MAGNETIC, ACOUSTIC, MT = list(range(4))
161 caltinay 4060
162 gross 4697 def __init__(self, reference_system=None, tags=[]):
163 caltinay 3946 """
164 caltinay 4005 Constructor. Sets some defaults and initializes logger.
165 gross 4697
166     :param tags: a list of tags associated with the data set.
167     :type tags: ``list`` of almost any type (typically `str`)
168     :param reference_system: the reference coordinate system
169     :type reference_system: ``None`` or `ReferenceSystem`
170 caltinay 3946 """
171 gross 4697 if not isinstance(tags ,list):
172     raise ValueError("tags argument must be a list.")
173     self.__tags=tags
174 caltinay 3946 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
175 caltinay 4060 self.__subsampling_factor=1
176 gross 4373 if not reference_system:
177 caltinay 4390 self.__reference_system = CartesianReferenceSystem()
178 gross 4373 else:
179     self.__reference_system = reference_system
180 caltinay 4613
181     if self.__reference_system.isCartesian():
182     self.__v_scale=1.
183     else:
184     self.__v_scale=1./self.getReferenceSystem().getHeightUnit()
185 gross 4697
186     def getTags(self):
187     """
188     returns the list of tags
189    
190     :rtype: ``list``
191     """
192     return self.__tags
193    
194     def hasTag(self, tag):
195     """
196     returns true if the data set has tag ``tag``
197    
198     :rtype: ``bool``
199     """
200     return tag in self.__tags
201    
202 gross 4373
203     def getReferenceSystem(self):
204     """
205     returns the reference coordinate system
206    
207     :rtype: `ReferenceSystem`
208     """
209     return self.__reference_system
210 caltinay 4613
211     def getHeightScale(self):
212     """
213     returns the height scale factor to convert from meters to the
214     appropriate units of the reference system used.
215    
216     :rtype: ``float``
217     """
218     return self.__v_scale
219    
220 caltinay 4005 def getDataExtents(self):
221     """
222     returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
223    
224     - ``x0``, ``y0`` = coordinates of data origin
225     - ``nx``, ``ny`` = number of data points in x and y
226     - ``dx``, ``dy`` = spacing of data points in x and y
227    
228     This method must be implemented in subclasses.
229     """
230     raise NotImplementedError
231    
232 caltinay 4060 def getDataType(self):
233 caltinay 4005 """
234 caltinay 4060 Returns the type of survey data managed by this source.
235 gross 4697 Subclasses must return `GRAVITY` or `MAGNETIC` or `ACOUSTIC` as appropriate.
236 caltinay 4005 """
237     raise NotImplementedError
238    
239 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
240 caltinay 4005 """
241 caltinay 4060 This method is called by the `DomainBuilder` to retrieve the survey
242     data as `Data` objects on the given domain.
243 caltinay 4145
244 caltinay 4060 Subclasses should return one or more `Data` objects with survey data
245 caltinay 4162 interpolated on the given `escript` domain. The exact return type
246 caltinay 4060 depends on the type of data.
247 caltinay 4005
248 caltinay 4060 :param domain: the escript domain to use
249     :type domain: `esys.escript.Domain`
250     :param origin: the origin coordinates of the domain
251     :type origin: ``tuple`` or ``list``
252     :param NE: the number of domain elements in each dimension
253 caltinay 4005 :type NE: ``tuple`` or ``list``
254 caltinay 4060 :param spacing: the cell sizes (node spacing) in the domain
255     :type spacing: ``tuple`` or ``list``
256 caltinay 3946 """
257 caltinay 4060 raise NotImplementedError
258 caltinay 3964
259 caltinay 4201 def getUtmZone(self):
260     """
261     All data source coordinates are converted to UTM (Universal Transverse
262     Mercator) in order to have useful domain extents. Subclasses should
263     implement this method and return the UTM zone number of the projected
264     coordinates.
265     """
266     raise NotImplementedError
267    
268 caltinay 4060 def setSubsamplingFactor(self, f):
269 caltinay 3946 """
270 caltinay 4060 Sets the data subsampling factor (default=1).
271 caltinay 4145
272 caltinay 4060 The factor is applied in all dimensions. For example a 2D dataset
273     with 300 x 150 data points will be reduced to 150 x 75 when a
274     subsampling factor of 2 is used.
275     This becomes important when adding data of varying resolution to
276     a `DomainBuilder`.
277 caltinay 3946 """
278 caltinay 4060 self.__subsampling_factor=f
279 caltinay 3965
280 caltinay 4060 def getSubsamplingFactor(self):
281 caltinay 3965 """
282 caltinay 4060 Returns the subsampling factor that was set via `setSubsamplingFactor`
283     (see there).
284 caltinay 3946 """
285 caltinay 4060 return self.__subsampling_factor
286 caltinay 3946
287    
288 caltinay 3964 ##############################################################################
289 caltinay 4060 class ErMapperData(DataSource):
290 caltinay 4044 """
291 caltinay 3958 Data Source for ER Mapper raster data.
292     Note that this class only accepts a very specific type of ER Mapper data
293     input and will raise an exception if other data is found.
294     """
295 caltinay 4227 def __init__(self, data_type, headerfile, datafile=None, altitude=0.,
296 caltinay 4992 error=None, scale_factor=None, null_value=None,
297     reference_system=None):
298 caltinay 3958 """
299 caltinay 4201 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
300     :type data_type: ``int``
301 caltinay 4049 :param headerfile: ER Mapper header file (usually ends in .ers)
302     :type headerfile: ``str``
303     :param datafile: ER Mapper binary data file name. If not supplied the
304     name of the header file without '.ers' is assumed
305     :type datafile: ``str``
306 caltinay 4060 :param altitude: altitude of measurements above ground in meters
307     :type altitude: ``float``
308 caltinay 4227 :param error: constant value to use for the data uncertainties.
309     If a value is supplied, it is scaled by the same factor
310     as the measurements. If not provided the error is
311     assumed to be 2 units for all measurements (i.e. 0.2
312     mGal and 2 nT for gravity and magnetic, respectively)
313     :type error: ``float``
314     :param scale_factor: the measurements and error values are scaled by
315     this factor. By default, gravity data is assumed
316     to be given in 1e-6 m/s^2 (0.1 mGal), while
317     magnetic data is assumed to be in 1e-9 T (1 nT).
318     :type scale_factor: ``float``
319     :param null_value: value that is used in the file to mark undefined
320     areas. This information is usually included in the
321     file.
322     :type null_value: ``float``
323 caltinay 4495 :param reference_system: reference coordinate system to be used.
324     For a Cartesian reference (default) the
325     appropriate UTM transformation is applied.
326 gross 4373 :type reference_system: `ReferenceSystem`
327 caltinay 4495 :note: consistence in the reference coordinate system and the reference
328     coordinate system used in the data source is not checked.
329 caltinay 3958 """
330 gross 4697 super(ErMapperData, self).__init__(reference_system, [ headerfile ] )
331 caltinay 3964 self.__headerfile=headerfile
332 caltinay 3958 if datafile is None:
333 caltinay 3964 self.__datafile=headerfile[:-4]
334 caltinay 3958 else:
335 caltinay 3964 self.__datafile=datafile
336 caltinay 4061 self.__altitude=altitude
337 caltinay 4201 self.__data_type=data_type
338 caltinay 4239 self.__utm_zone = None
339 caltinay 4227 self.__scale_factor = scale_factor
340     self.__null_value = null_value
341     self.__error_value = error
342 caltinay 4060 self.__readHeader()
343 caltinay 3958
344 caltinay 4060 def __readHeader(self):
345     self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
346 caltinay 3964 metadata=open(self.__headerfile, 'r').readlines()
347 caltinay 3958 start=-1
348 caltinay 4044 for i in range(len(metadata)):
349 caltinay 3958 if metadata[i].strip() == 'DatasetHeader Begin':
350     start=i+1
351     if start==-1:
352 caltinay 4227 raise RuntimeError('Invalid ER Mapper header file ("DatasetHeader" not found)')
353 caltinay 3958
354 caltinay 4227 # parse header file filling dictionary of found values
355 caltinay 3958 md_dict={}
356     section=[]
357 caltinay 4044 for i in range(start, len(metadata)):
358 caltinay 4196 line=metadata[i].strip()
359 caltinay 3958 if line[-6:].strip() == 'Begin':
360     section.append(line[:-6].strip())
361     elif line[-4:].strip() == 'End':
362     if len(section)>0:
363     section.pop()
364     else:
365     vals=line.split('=')
366     if len(vals)==2:
367     key = vals[0].strip()
368     value = vals[1].strip()
369     fullkey='.'.join(section+[key])
370     md_dict[fullkey]=value
371    
372 caltinay 4992 # check that the data format/type is supported
373 caltinay 3958 try:
374 caltinay 4227 if md_dict['ByteOrder'] != 'LSBFirst':
375     raise RuntimeError('Unsupported byte order '+md_dict['ByteOrder'])
376     except KeyError:
377 caltinay 4658 self.logger.warning("Byte order not specified. Assuming LSB first.")
378 caltinay 4227
379     try:
380     if md_dict['DataType'] != 'Raster':
381     raise RuntimeError('Unsupported data type '+md_dict['DataType'])
382     except KeyError:
383 caltinay 4658 self.logger.warning("Data type not specified. Assuming raster data.")
384 caltinay 4227
385     try:
386 caltinay 4495 if md_dict['RasterInfo.CellType'] == 'IEEE4ByteReal':
387 caltinay 5077 self.__celltype = DATATYPE_FLOAT32
388 caltinay 4495 elif md_dict['RasterInfo.CellType'] == 'IEEE8ByteReal':
389 caltinay 5077 self.__celltype = DATATYPE_FLOAT64
390 caltinay 4495 elif md_dict['RasterInfo.CellType'] == 'Signed32BitInteger':
391 caltinay 5077 self.__celltype = DATATYPE_INT32
392 caltinay 4495 else:
393 caltinay 3958 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
394     except KeyError:
395 caltinay 4658 self.logger.warning("Cell type not specified. Assuming IEEE4ByteReal.")
396 caltinay 5077 self.__celltype = DATATYPE_FLOAT32
397 caltinay 3958
398     try:
399 caltinay 4227 fileOffset = int(md_dict['HeaderOffset'])
400     except:
401     fileOffset = 0
402     if fileOffset > 0:
403     raise RuntimeError("ER Mapper data with header offset >0 not supported.")
404    
405     # now extract required information
406     try:
407 caltinay 3958 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
408     NY = int(md_dict['RasterInfo.NrOfLines'])
409     except:
410     raise RuntimeError("Could not determine extents of data")
411    
412 caltinay 4227 ### mask/null value
413     # note that NaN is always filtered out in ripley
414     if self.__null_value is None:
415     try:
416     self.__null_value = float(md_dict['RasterInfo.NullCellValue'])
417     except:
418     self.logger.debug("Could not determine null value, using default.")
419     self.__null_value = 99999
420     elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
421     raise TypeError("Invalid type of null_value parameter")
422 caltinay 3958
423     try:
424     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
425     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
426     except:
427     raise RuntimeError("Could not determine cell dimensions")
428    
429     try:
430 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
431     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
432     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
433     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
434     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
435     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
436     else:
437     raise RuntimeError("Unknown CoordinateType")
438 caltinay 3958 except:
439 caltinay 4658 self.logger.warning("Could not determine coordinate origin. Setting to (0.0, 0.0)")
440 caltinay 3958 originX,originY = 0.0, 0.0
441    
442 caltinay 4267 # data sets have origin in top-left corner so y runs top-down and
443     # we need to flip accordingly
444     originY-=NY*spacingY
445    
446 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
447     # it appears we have lat/lon coordinates so need to convert
448     # origin and spacing. Try using gdal to get the wkt if available:
449     try:
450     from osgeo import gdal
451 caltinay 3964 ds=gdal.Open(self.__headerfile)
452 caltinay 4658 wkt=str(ds.GetProjection())
453 caltinay 3959 except:
454     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
455 caltinay 4658 self.logger.warning('GDAL not available or file read error, assuming GDA94 data')
456 gross 4373 if self.getReferenceSystem().isCartesian():
457 caltinay 4992 originX_UTM,originY_UTM,zone = LatLonToUTM(originX, originY, wkt)
458     op1X,op1Y,_ = LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
459 gross 4373 # we are rounding to avoid interpolation issues
460 caltinay 4992 spacingX = np.round(op1X-originX_UTM)
461     spacingY = np.round(op1Y-originY_UTM)
462     originX = np.round(originX_UTM)
463     originY = np.round(originY_UTM)
464     self.__utm_zone = zone
465 gross 4373 else:
466 caltinay 4992 op1X, op1Y = originX+spacingX, originY+spacingY
467     spacingX = np.round(op1X-originX,5)
468     spacingY = np.round(op1Y-originY,5)
469     originX = np.round(originX,5)
470     originY = np.round(originY,5)
471 caltinay 3959
472 caltinay 3964 self.__dataorigin=[originX, originY]
473 caltinay 4060 self.__delta = [spacingX, spacingY]
474     self.__nPts = [NX, NY]
475     self.__origin = [originX, originY]
476 caltinay 4227 ### scale factor
477     if self.__scale_factor is None:
478     if self.__data_type == self.GRAVITY:
479     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
480     self.__scale_factor = 1e-6
481     else:
482     self.logger.info("Assuming magnetic data units are 'nT'.")
483     self.__scale_factor = 1e-9
484 caltinay 3958
485 caltinay 4227 ### error value
486     if self.__error_value is None:
487     self.__error_value = 2.
488     elif not isinstance(self.__error_value,float) and not isinstance(self.__error_value,int):
489     raise TypeError("Invalid type of error parameter")
490    
491    
492 caltinay 3964 def getDataExtents(self):
493     """
494     returns ( (x0, y0), (nx, ny), (dx, dy) )
495     """
496 caltinay 4060 return (list(self.__origin), list(self.__nPts), list(self.__delta))
497 caltinay 3958
498 caltinay 4061 def getDataType(self):
499 caltinay 4201 return self.__data_type
500 caltinay 4061
501 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
502 caltinay 4277 FS = ReducedFunction(domain)
503 caltinay 4060 nValues=self.__nPts
504     # determine base location of this dataset within the domain
505     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
506 caltinay 4277 # determine the resolution difference between domain and data.
507     # If domain has twice the resolution we can double up the data etc.
508     multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
509 caltinay 4060 if domain.getDim()==3:
510 caltinay 4613 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
511 caltinay 4277 multiplier=multiplier+[1]
512 caltinay 4060 nValues=nValues+[1]
513    
514 caltinay 4616 reverse = [0]*domain.getDim()
515 caltinay 5077 byteorder=BYTEORDER_NATIVE
516 caltinay 4992 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s, reverse=%s"%(str(first),str(nValues),str(multiplier),str(reverse)))
517 caltinay 5077 data = readBinaryGrid(self.__datafile, FS, shape=(),
518 caltinay 4616 fill=self.__null_value, byteOrder=byteorder,
519     dataType=self.__celltype, first=first, numValues=nValues,
520     multiplier=multiplier, reverse=reverse)
521 caltinay 4227 sigma = self.__error_value * whereNonZero(data-self.__null_value)
522    
523     data = data * self.__scale_factor
524     sigma = sigma * self.__scale_factor
525 caltinay 4060 return data, sigma
526    
527 caltinay 4201 def getUtmZone(self):
528     return self.__utm_zone
529 caltinay 4060
530 caltinay 4201
531 caltinay 4060 ##############################################################################
532     class NetCdfData(DataSource):
533     """
534     Data Source for gridded netCDF data that use CF/COARDS conventions.
535     """
536 caltinay 4197 def __init__(self, data_type, filename, altitude=0., data_variable=None,
537 gross 4373 error=None, scale_factor=None, null_value=None, reference_system=None):
538 caltinay 3964 """
539 caltinay 4060 :param filename: file name for survey data in netCDF format
540     :type filename: ``str``
541 caltinay 4197 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
542     :type data_type: ``int``
543 caltinay 4060 :param altitude: altitude of measurements in meters
544 jfenwick 4080 :type altitude: ``float``
545 caltinay 4197 :param data_variable: name of the netCDF variable that holds the data.
546     If not provided an attempt is made to determine
547     the variable and an exception thrown on failure.
548     :type data_variable: ``str``
549     :param error: either the name of the netCDF variable that holds the
550     uncertainties of the measurements or a constant value
551     to use for the uncertainties. If a constant value is
552     supplied, it is scaled by the same factor as the
553     measurements. If not provided the error is assumed to
554     be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
555     for gravity and magnetic, respectively)
556     :type error: ``str`` or ``float``
557     :param scale_factor: the measurements and error values are scaled by
558     this factor. By default, gravity data is assumed
559     to be given in 1e-6 m/s^2 (0.1 mGal), while
560     magnetic data is assumed to be in 1e-9 T (1 nT).
561     :type scale_factor: ``float``
562     :param null_value: value that is used in the file to mark undefined
563     areas. This information is usually included in the
564     file.
565     :type null_value: ``float``
566 caltinay 4618 :param reference_system: reference coordinate system to be used.
567     For a Cartesian reference (default) the
568     appropriate UTM transformation is applied.
569 gross 4373 :type reference_system: `ReferenceSystem`
570 caltinay 4618 :note: it is the responsibility of the caller to ensure all data
571     sources and the domain builder use the same reference system.
572 caltinay 3964 """
573 gross 4697 super(NetCdfData,self).__init__(reference_system, [filename])
574 caltinay 4060 self.__filename=filename
575 caltinay 4197 if not data_type in [self.GRAVITY,self.MAGNETIC]:
576 caltinay 4201 raise ValueError("Invalid value for data_type parameter")
577 caltinay 4197 self.__data_type = data_type
578     self.__altitude = altitude
579     self.__data_name = data_variable
580     self.__scale_factor = scale_factor
581     self.__null_value = null_value
582 caltinay 4992 self.__utm_zone = None
583 caltinay 4197 self.__readMetadata(error)
584 caltinay 3964
585 caltinay 4197 def __readMetadata(self, error):
586 caltinay 4060 self.logger.debug("Checking Data Source: %s"%self.__filename)
587     f=netcdf_file(self.__filename, 'r')
588 caltinay 4197
589     ### longitude- / X-dimension and variable
590 caltinay 4060 NX=0
591     for n in ['lon','longitude','x']:
592     if n in f.dimensions:
593     NX=f.dimensions[n]
594 caltinay 4197 lon_name=n
595 caltinay 4060 break
596     if NX==0:
597     raise RuntimeError("Could not determine extents of data")
598 caltinay 4197
599     # CF/COARDS states that coordinate variables have the same name as
600     # the dimensions
601     if not lon_name in f.variables:
602     raise RuntimeError("Could not determine longitude variable")
603     longitude=f.variables.pop(lon_name)
604    
605     ### latitude- / Y-dimension and variable
606 caltinay 4060 NY=0
607     for n in ['lat','latitude','y']:
608     if n in f.dimensions:
609     NY=f.dimensions[n]
610 caltinay 4197 lat_name=n
611 caltinay 4060 break
612     if NY==0:
613     raise RuntimeError("Could not determine extents of data")
614 caltinay 4197 if not lat_name in f.variables:
615 caltinay 4060 raise RuntimeError("Could not determine latitude variable")
616 caltinay 4197 latitude=f.variables.pop(lat_name)
617 caltinay 3958
618 caltinay 4197 ### data variable
619     if self.__data_name is not None:
620     try:
621     dims = f.variables[self.__data_name].dimensions
622     if not ((lat_name in dims) and (lon_name in dims)):
623     raise ValueError("Invalid data variable name supplied")
624     except KeyError:
625     raise ValueError("Invalid data variable name supplied")
626 caltinay 4060 else:
627     for n in f.variables.keys():
628     dims=f.variables[n].dimensions
629     if (lat_name in dims) and (lon_name in dims):
630 caltinay 4197 self.__data_name=n
631 caltinay 4060 break
632 caltinay 4197 if self.__data_name is None:
633 caltinay 4060 raise RuntimeError("Could not determine data variable")
634    
635 caltinay 4197 datavar = f.variables[self.__data_name]
636    
637     ### error value/variable
638     self.__error_name = None
639     if isinstance(error,str):
640     try:
641     dims = f.variables[error].dimensions
642     if not ((lat_name in dims) and (lon_name in dims)):
643     raise ValueError("Invalid error variable name supplied")
644     except KeyError:
645     raise ValueError("Invalid error variable name supplied")
646     self.__error_name = error
647     elif isinstance(error,float) or isinstance(error,int):
648     self.__error_value = float(error)
649     elif error is None:
650     self.__error_value = 2.
651 caltinay 4060 else:
652 caltinay 4197 raise TypeError("Invalid type of error parameter")
653 caltinay 4060
654 caltinay 4197 ### mask/null value
655     # note that NaN is always filtered out in ripley
656     if self.__null_value is None:
657     if hasattr(datavar, 'missing_value'):
658     self.__null_value = float(datavar.missing_value)
659     elif hasattr(datavar, '_FillValue'):
660     self.__null_value = float(datavar._FillValue)
661     else:
662     self.logger.debug("Could not determine null value, using default.")
663     self.__null_value = 99999
664 caltinay 4227 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
665     raise TypeError("Invalid type of null_value parameter")
666 caltinay 4197
667     # try to determine units of data - this is disabled until we obtain a
668     # file with valid information
669 caltinay 4060 #if hasattr(f.variables[data_name], 'units'):
670     # units=f.variables[data_name].units
671    
672 caltinay 4197 ### scale factor
673     if self.__scale_factor is None:
674     if self.__data_type == self.GRAVITY:
675     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
676     self.__scale_factor = 1e-6
677     else:
678     self.logger.info("Assuming magnetic data units are 'nT'.")
679     self.__scale_factor = 1e-9
680    
681 caltinay 4201 # see if there is a WKT string to convert coordinates
682 caltinay 4060 try:
683 caltinay 4658 wkt_string=str(datavar.esri_pe_string)
684 caltinay 4516 self.logger.debug("wkt_string is: %s"%wkt_string)
685 caltinay 4060 except:
686     wkt_string=None
687    
688 caltinay 4516 # CF GDAL output: see if there is a grid_mapping attribute which
689     # contains the name of a dummy variable that holds information about
690     # mapping. GDAL puts the WKT string into spatial_ref:
691     if wkt_string is None:
692     try:
693     mapvar=f.variables[datavar.grid_mapping]
694 caltinay 4658 wkt_string=str(mapvar.spatial_ref)
695 caltinay 4516 self.logger.debug("wkt_string is: %s"%wkt_string)
696     except:
697     self.logger.debug("no wkt_string found!")
698    
699 caltinay 4197 # actual_range & geospatial_lon_min/max do not always contain correct
700     # values so we have to obtain the min/max in a less efficient way:
701 caltinay 4060 lon_range=longitude.data.min(),longitude.data.max()
702     lat_range=latitude.data.min(),latitude.data.max()
703 gross 4373 if self.getReferenceSystem().isCartesian():
704 caltinay 4992 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
705     self.__utm_zone = zone
706 caltinay 4390
707 caltinay 4060 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
708    
709 caltinay 4618 # see if lat or lon is stored in reverse order to domain conventions
710     self.__reverse=[False,False]
711     d=longitude.data
712     if d[0]>d[-1]:
713     self.__reverse[0]=True
714     d=latitude.data
715     if d[0]>d[-1]:
716     self.__reverse[1]=True
717 caltinay 4060 self.__nPts=[NX, NY]
718     self.__origin=[lon_range[0],lat_range[0]]
719     # we are rounding to avoid interpolation issues
720 gross 4373 if self.getReferenceSystem().isCartesian():
721 caltinay 4394 # rounding will give us about meter-accuracy with UTM coordinates
722     r=0
723 caltinay 4390 else:
724 caltinay 4394 # this should give us about meter-accuracy with lat/lon coords
725     r=5
726 gross 4373 self.__delta=[np.round(lengths[i]/self.__nPts[i],r) for i in range(2)]
727 caltinay 4939 f.close()
728 caltinay 4060
729     def getDataExtents(self):
730     """
731     returns ( (x0, y0), (nx, ny), (dx, dy) )
732     """
733     return (list(self.__origin), list(self.__nPts), list(self.__delta))
734    
735     def getDataType(self):
736 caltinay 4197 return self.__data_type
737 caltinay 4060
738     def getSurveyData(self, domain, origin, NE, spacing):
739 sshaw 5288 if not HAS_RIPLEY:
740     raise RuntimeError("Ripley module not available for reading")
741 caltinay 4197 FS=ReducedFunction(domain)
742 caltinay 4060 nValues=self.__nPts
743     # determine base location of this dataset within the domain
744     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
745 caltinay 4277 # determine the resolution difference between domain and data.
746     # If domain has twice the resolution we can double up the data etc.
747     multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
748 caltinay 4618 reverse = [int(self.__reverse[i]) for i in range(len(self.__reverse))]
749    
750     if domain.getDim() == 3:
751 caltinay 4613 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
752 caltinay 4618 multiplier = multiplier + [1]
753     nValues = nValues + [1]
754     reverse = reverse + [0]
755 caltinay 4060
756 caltinay 4616 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
757     self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
758 caltinay 4197 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
759 caltinay 4616 shape=(), fill=self.__null_value, first=first,
760     numValues=nValues, multiplier=multiplier, reverse=reverse)
761    
762 caltinay 4197 if self.__error_name is not None:
763 caltinay 4992 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
764     self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
765 caltinay 4197 sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
766 caltinay 4616 FS, shape=(), fill=0., first=first, numValues=nValues,
767     multiplier=multiplier, reverse=reverse)
768 caltinay 4197 else:
769     sigma = self.__error_value * whereNonZero(data-self.__null_value)
770    
771     data = data * self.__scale_factor
772     sigma = sigma * self.__scale_factor
773 caltinay 4060 return data, sigma
774    
775 caltinay 4201 def getUtmZone(self):
776     return self.__utm_zone
777 caltinay 4060
778 caltinay 4201
779 caltinay 3958 ##############################################################################
780     class SourceFeature(object):
781     """
782 caltinay 4178 A feature adds a density/susceptibility distribution to (parts of) a
783     domain of a synthetic data source, for example a layer of a specific
784     rock type or a simulated ore body.
785 caltinay 3958 """
786 caltinay 4060 def getValue(self):
787 caltinay 3958 """
788 caltinay 4060 Returns the value for the area covered by mask. It can be constant
789 caltinay 4178 or a `Data` object with spatial dependency.
790 caltinay 3958 """
791     raise NotImplementedError
792 caltinay 4178
793 caltinay 3958 def getMask(self, x):
794     """
795     Returns the mask of the area of interest for this feature. That is,
796 caltinay 4179 mask is non-zero where the value returned by `getValue()` should be
797 caltinay 3958 applied, zero elsewhere.
798     """
799     raise NotImplementedError
800    
801     class SmoothAnomaly(SourceFeature):
802 caltinay 4178 """
803     A source feature in the form of a blob (roughly gaussian).
804     """
805 caltinay 4060 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
806 caltinay 4178 """
807     Intializes the smooth anomaly data.
808    
809     :param lx: size of blob in x-dimension
810     :param ly: size of blob in y-dimension
811     :param lz: size of blob in z-dimension
812     :param x: location of blob in x-dimension
813     :param y: location of blob in y-dimension
814     :param depth: depth of blob
815     :param v_inner: value in the centre of the blob
816     :param v_outer: value in the periphery of the blob
817     """
818 caltinay 3946 self.x=x
819     self.y=y
820     self.lx=lx
821     self.ly=ly
822     self.lz=lz
823     self.depth=depth
824 caltinay 4060 self.v_inner=v_inner
825     self.v_outer=v_outer
826     self.value=None
827 caltinay 4044 self.mask=None
828 caltinay 4034
829 caltinay 4060 def getValue(self,x):
830     if self.value is None:
831     if self.v_outer is None or self.v_inner is None:
832     self.value=0
833 caltinay 4041 else:
834 caltinay 4044 DIM=x.getDomain().getDim()
835 caltinay 4060 alpha=-log(abs(self.v_outer/self.v_inner))*4
836     value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
837     value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
838     self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
839     if self.v_inner<0: self.value=-self.value
840 caltinay 4044
841 caltinay 4060 return self.value
842 caltinay 4044
843 caltinay 3946 def getMask(self, x):
844     DIM=x.getDomain().getDim()
845 caltinay 4060 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
846 caltinay 4034 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
847 caltinay 3946 if DIM>2:
848     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
849 caltinay 4044 self.mask = m
850 caltinay 4034 return m
851 caltinay 3946
852 caltinay 3958 ##############################################################################
853 gross 4115 class SyntheticDataBase(DataSource):
854 caltinay 4132 """
855     Base class to define reference data based on a given property distribution
856     (density or susceptibility). Data are collected from a square region of
857 caltinay 4283 vertical extent ``length`` on a grid with ``number_of_elements`` cells in
858     each direction.
859 caltinay 4178
860 caltinay 4132 The synthetic data are constructed by solving the appropriate forward
861     problem. Data can be sampled with an offset from the surface at z=0 or
862     using the entire subsurface region.
863     """
864 caltinay 4201 def __init__(self, data_type,
865 caltinay 4132 DIM=2,
866     number_of_elements=10,
867     length=1*U.km,
868     B_b=None,
869     data_offset=0,
870 gross 4373 full_knowledge=False):
871 caltinay 4132 """
872 caltinay 4201 :param data_type: data type indicator
873     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
874 caltinay 4178 :param DIM: number of spatial dimensions
875 caltinay 4132 :type DIM: ``int`` (2 or 3)
876     :param number_of_elements: lateral number of elements in the region
877     where data are collected
878 gross 4115 :type number_of_elements: ``int``
879 caltinay 4178 :param length: lateral extent of the region where data are collected
880 gross 4115 :type length: ``float``
881     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
882     :type B_b: ``list`` of ``Scalar``
883     :param data_offset: offset of the data collection region from the surface
884     :type data_offset: ``float``
885 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
886     subsurface region. This is mainly for testing.
887 gross 4115 :type full_knowledge: ``Bool``
888 caltinay 4132 """
889 caltinay 4178 super(SyntheticDataBase, self).__init__()
890 caltinay 4201 if not data_type in [self.GRAVITY, self.MAGNETIC]:
891     raise ValueError("Invalid value for data_type parameter")
892 caltinay 4132 self.DIM=DIM
893     self.number_of_elements=number_of_elements
894     self.length=length
895 caltinay 4201 self.__data_type = data_type
896 caltinay 4132 self.__full_knowledge= full_knowledge
897     self.__data_offset=data_offset
898     self.__B_b =None
899     # this is for Cartesian (FIXME ?)
900 caltinay 4201 if data_type == self.MAGNETIC:
901 caltinay 4132 if self.DIM < 3:
902 gross 4373 self.__B_b = np.array([B_b[0], B_b[2]])
903 caltinay 4132 else:
904 gross 4373 self.__B_b = ([B_b[0], B_b[1],B_b[2]])
905 caltinay 4132 self.__origin = [0]*(DIM-1)
906     self.__delta = [float(length)/number_of_elements]*(DIM-1)
907     self.__nPts = [number_of_elements]*(DIM-1)
908     self._reference_data=None
909 caltinay 4178
910 caltinay 4201 def getUtmZone(self):
911     """
912     returns a dummy UTM zone since this class does not use real coordinate
913     values.
914     """
915     return 0
916    
917 caltinay 4132 def getDataExtents(self):
918     """
919     returns the lateral data extend of the data set
920     """
921     return (list(self.__origin), list(self.__nPts), list(self.__delta))
922 caltinay 3946
923 caltinay 4132 def getDataType(self):
924     """
925     returns the data type
926     """
927 caltinay 4201 return self.__data_type
928 caltinay 4178
929 caltinay 4132 def getSurveyData(self, domain, origin, number_of_elements, spacing):
930     """
931     returns the survey data placed on a given domain.
932 caltinay 4178
933 caltinay 4132 :param domain: domain on which the data are to be placed
934     :type domain: ``Domain``
935     :param origin: origin of the domain
936     :type origin: ``list`` of ``float``
937     :param number_of_elements: number of elements (or cells) in each
938     spatial direction used to span the domain
939     :type number_of_elements: ``list`` of ``int``
940     :param spacing: cell size in each spatial direction
941     :type spacing: ``list`` of ``float``
942     :return: observed gravity field or magnetic flux density for each cell
943     in the domain and for each cell an indicator 1/0 if the data
944     are valid or not.
945     :rtype: pair of ``Scalar``
946     """
947     pde=LinearSinglePDE(domain)
948     DIM=domain.getDim()
949     x=domain.getX()
950     # set the reference data
951     k=self.getReferenceProperty(domain)
952     # calculate the corresponding potential
953     z=x[DIM-1]
954     m_psi_ref=whereZero(z-sup(z))
955     if self.getDataType()==DataSource.GRAVITY:
956     pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
957     else:
958     pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
959     pde.setSymmetryOn()
960     #pde.getSolverOptions().setTolerance(1e-13)
961     psi_ref=pde.getSolution()
962     del pde
963     if self.getDataType()==DataSource.GRAVITY:
964     data = -grad(psi_ref, ReducedFunction(domain))
965     else:
966     data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
967 gross 4121
968 caltinay 4178 x=ReducedFunction(domain).getX()
969 caltinay 4132 if self.__full_knowledge:
970     sigma = whereNegative(x[DIM-1])
971     else:
972     sigma=1.
973 caltinay 4178 # limit mask to non-padding in horizontal area
974 caltinay 4132 for i in range(DIM-1):
975     x_i=x[i]
976     sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
977     # limit mask to one cell thickness at z=0
978     z=x[DIM-1]
979     oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
980     sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
981     return data,sigma
982 caltinay 4178
983 caltinay 4132 def getReferenceProperty(self, domain=None):
984     """
985 caltinay 4178 Returns the reference `Data` object that was used to generate
986 caltinay 4132 the gravity/susceptibility anomaly data.
987 caltinay 4178
988 caltinay 4132 :return: the density or susceptibility anomaly used to create the
989     survey data
990     :note: it can be assumed that in the first call the ``domain``
991     argument is present so the actual anomaly data can be created.
992     In subsequent calls this may not be true.
993     :note: method needs to be overwritten
994     """
995 caltinay 4178 raise NotImplementedError()
996    
997 gross 4115 class SyntheticFeatureData(SyntheticDataBase):
998     """
999 caltinay 4178 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
1000 gross 4115 """
1001 caltinay 4201 def __init__(self, data_type,
1002 gross 4115 features,
1003     DIM=2,
1004     number_of_elements=10,
1005     length=1*U.km,
1006     B_b=None,
1007     data_offset=0,
1008 gross 4373 full_knowledge=False):
1009 caltinay 4060 """
1010 caltinay 4201 :param data_type: data type indicator
1011     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1012 caltinay 4178 :param features: list of features. It is recommended that the features
1013     are located entirely below the surface.
1014     :type features: ``list`` of `SourceFeature`
1015     :param DIM: spatial dimensionality
1016     :type DIM: ``int`` (2 or 3)
1017     :param number_of_elements: lateral number of elements in the region
1018     where data are collected
1019 gross 4115 :type number_of_elements: ``int``
1020 caltinay 4178 :param length: lateral extent of the region where data are collected
1021 gross 4115 :type length: ``float``
1022     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
1023     :type B_b: ``list`` of ``Scalar``
1024     :param data_offset: offset of the data collection region from the surface
1025     :type data_offset: ``float``
1026     :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
1027     :type full_knowledge: ``Bool``
1028 caltinay 4060 """
1029 gross 4115 super(SyntheticFeatureData,self).__init__(
1030 caltinay 4201 data_type=data_type, DIM=DIM,
1031 caltinay 4178 number_of_elements=number_of_elements,
1032     length=length, B_b=B_b,
1033 gross 4115 data_offset=data_offset,
1034 gross 4373 full_knowledge=full_knowledge)
1035 gross 4115 self._features = features
1036 caltinay 4060
1037 gross 4115
1038     def getReferenceProperty(self, domain=None):
1039 caltinay 4060 """
1040 caltinay 4178 Returns the reference `Data` object that was used to generate
1041 caltinay 4132 the gravity/susceptibility anomaly data.
1042 caltinay 4060 """
1043 gross 4115 if self._reference_data == None:
1044     DIM=domain.getDim()
1045     x=domain.getX()
1046     k=0.
1047 caltinay 4060 for f in self._features:
1048     m=f.getMask(x)
1049 gross 4115 k = k * (1-m) + f.getValue(x) * m
1050     self._reference_data= k
1051     return self._reference_data
1052 caltinay 4178
1053 gross 4115 class SyntheticData(SyntheticDataBase):
1054     """
1055 caltinay 4178 Defines synthetic gravity/magnetic data based on harmonic property anomaly
1056    
1057 caltinay 4179 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
1058 caltinay 4178
1059 caltinay 4179 for all x and z<=0. For z>0 rho = 0.
1060 gross 4115 """
1061 caltinay 4201 def __init__(self, data_type,
1062 gross 4115 n_length=1,
1063 caltinay 4178 n_depth=1,
1064 gross 4116 depth_offset=0.,
1065     depth=None,
1066 caltinay 4178 amplitude=None,
1067 gross 4115 DIM=2,
1068     number_of_elements=10,
1069     length=1*U.km,
1070     B_b=None,
1071     data_offset=0,
1072     full_knowledge=False,
1073 gross 4344 s=0.):
1074 gross 4115 """
1075 caltinay 4201 :param data_type: data type indicator
1076     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1077 caltinay 4132 :param n_length: number of oscillations in the anomaly data within the
1078     observation region
1079 caltinay 4178 :type n_length: ``int``
1080 caltinay 4132 :param n_depth: number of oscillations in the anomaly data below surface
1081 caltinay 4179 :type n_depth: ``int``
1082     :param depth_offset: vertical offset of the data
1083     :type depth_offset: ``float``
1084 caltinay 4132 :param depth: vertical extent in the anomaly data. If not present the
1085     depth of the domain is used.
1086 gross 4116 :type depth: ``float``
1087 caltinay 4132 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
1088     gravity and 0.1 for magnetic data.
1089     :param DIM: spatial dimensionality
1090     :type DIM: ``int`` (2 or 3)
1091     :param number_of_elements: lateral number of elements in the region
1092     where data are collected
1093 gross 4115 :type number_of_elements: ``int``
1094 caltinay 4132 :param length: lateral extent of the region where data are collected
1095 gross 4115 :type length: ``float``
1096 caltinay 4132 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
1097     Only used for magnetic data.
1098 gross 4115 :type B_b: ``list`` of ``Scalar``
1099     :param data_offset: offset of the data collection region from the surface
1100     :type data_offset: ``float``
1101 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
1102     subsurface region. This is mainly for testing.
1103 gross 4115 :type full_knowledge: ``Bool``
1104 caltinay 4178 """
1105 gross 4115 super(SyntheticData,self).__init__(
1106 caltinay 4201 data_type=data_type, DIM=DIM,
1107 caltinay 4178 number_of_elements=number_of_elements,
1108     length=length, B_b=B_b,
1109 gross 4115 data_offset=data_offset,
1110 gross 4373 full_knowledge=full_knowledge)
1111 gross 4115 self.__n_length = n_length
1112 caltinay 4178 self.__n_depth = n_depth
1113 gross 4116 self.depth = depth
1114     self.depth_offset=depth_offset
1115 gross 4115 if amplitude == None:
1116 caltinay 4201 if data_type == DataSource.GRAVITY:
1117 caltinay 4132 amplitude = 200 *U.kg/U.m**3
1118     else:
1119     amplitude = 0.1
1120 gross 4115 self.__amplitude = amplitude
1121 gross 4344 self.__s=s
1122 caltinay 4044
1123 gross 4115 def getReferenceProperty(self, domain=None):
1124     """
1125 caltinay 4178 Returns the reference `Data` object that was used to generate
1126     the gravity/susceptibility anomaly data.
1127 gross 4115 """
1128 caltinay 4179 if self._reference_data is None:
1129 gross 4115 DIM=domain.getDim()
1130     x=domain.getX()
1131     # set the reference data
1132     z=x[DIM-1]
1133 gross 4116 dd=self.depth
1134 caltinay 4179 if dd is None: dd=inf(z)
1135 gross 4116 z2=(z+self.depth_offset)/(self.depth_offset-dd)
1136 caltinay 4178 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
1137 caltinay 4262 for i in range(DIM-1):
1138 caltinay 4132 x_i=x[i]
1139     min_x=inf(x_i)
1140     max_x=sup(x_i)
1141 gross 4344 k*= sin(self.__n_length*np.pi*(x_i-min_x-self.__s)/(max_x-min_x))
1142 gross 4115 self._reference_data= k
1143     return self._reference_data
1144 gross 4106
1145 caltinay 4362 ##############################################################################
1146     class NumpyData(DataSource):
1147     """
1148     """
1149    
1150 gross 4729 def __init__(self, data_type, data, error=1., length=1.*U.km, null_value=-1., tags=[], origin=None):
1151 caltinay 4362 """
1152     A data source that uses survey data from a ``numpy`` object or list
1153     instead of a file.
1154     The dimensionality is inferred from the shape of ``data`` (1- and
1155     2-dimensional data is supported). The data origin is assumed to be
1156     at the coordinate origin.
1157    
1158     :param data_type: data type indicator
1159     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1160     :param data: the survey data array. Note that for a cartesian coordinate
1161     system the shape of the data is considered to be
1162     (nz,ny,nx).
1163     :type data: ``numpy.array`` or ``list``
1164     :param error: constant value to use for the data uncertainties or a
1165     numpy object with uncertainties for every data point.
1166     :type error: ``float`` or ``list`` or ``ndarray``
1167     :param length: side length(s) of the data slice/volume. This can be
1168     a scalar to indicate constant length in all dimensions
1169     or an array/list of values in each coordinate dimension.
1170     :type length: ``float`` or ``list`` or ``ndarray``
1171     :param null_value: value that is used in the undefined regions of the
1172     survey data object.
1173     :type null_value: ``float``
1174 gross 4698 :param tags: a list of tags associated with the data set.
1175     :type tags: ``list`` of almost any type (typically `str`)
1176 gross 4729 :param origin: offset of origin of offset
1177     :type origin: ``list`` of ``float``s
1178 caltinay 4362 """
1179 gross 4697 super(NumpyData, self).__init__(tags=tags)
1180 jduplessis 4991 if not data_type in [self.GRAVITY, self.MAGNETIC, self.ACOUSTIC, self.MT ]:
1181 caltinay 4362 raise ValueError("Invalid value for data_type parameter")
1182     self.__data_type = data_type
1183 gross 4700 if not isinstance(data, np.ndarray) or data.dtype not in [ np.float64, np.complex128]:
1184     self.__data = np.asarray(data, dtype=np.float64)
1185     else:
1186     self.__data = data
1187 caltinay 4362 DIM = len(self.__data.shape)
1188     if DIM not in (1,2):
1189     raise ValueError("NumpyData requires 1- or 2-dimensional data")
1190 caltinay 4495 self.__error = np.asarray(error, dtype=np.float64)
1191 caltinay 4362 if len(self.__error.shape) > 0 and \
1192     self.__error.shape != self.__data.shape:
1193     raise ValueError("error argument must be scalar or match the shape of the data")
1194     if isinstance(length,float) or isinstance(length,int):
1195     length = [float(length)] * DIM
1196     else:
1197     length=np.asarray(length, dtype=float)
1198     if len(length.shape) != 1 or length.shape[0] != DIM:
1199     raise ValueError("length must be scalar or an array with %d values"%DIM)
1200     length=length.tolist()
1201    
1202 gross 4729 self.__length=length
1203 caltinay 4362 self.__null_value = null_value
1204     self.__nPts = list(reversed(self.__data.shape))
1205     self.__delta = [length[i]/self.__nPts[i] for i in range(DIM)]
1206 gross 4729 if origin is None:
1207     self.__origin = [0.] * DIM
1208     else:
1209     self.__origin = origin[:DIM-1]
1210    
1211 caltinay 4362 def getDataExtents(self):
1212     return (self.__origin, self.__nPts, self.__delta)
1213    
1214     def getDataType(self):
1215     return self.__data_type
1216    
1217     def getSurveyData(self, domain, origin, NE, spacing):
1218 gross 4729 DIM=domain.getDim()
1219     if self.getDataType() == self.ACOUSTIC:
1220     x=FunctionOnBoundary(domain).getX()
1221     BBX=boundingBox(domain)
1222     z=x[DIM-1]
1223     mask= whereZero( z - inf(z)) # we don't use BBX[DIM-1][1] due to mountains'
1224     for i in range(DIM-1):
1225     x_i=x[i]
1226     mask+=whereNonPositive( x_i - self.__origin[i] ) + whereNonNegative( x_i - ( self.__origin[i]+self.__length[i] ) )
1227     mask=1-wherePositive(mask)
1228    
1229     data=Data(0.,(2,), FunctionOnBoundary(domain))
1230     step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1231     if DIM == 2:
1232     data[0] = interpolateTable(self.__data.real, x[0],self.__origin[0], step[0])
1233     data[1] = interpolateTable(self.__data.imag, x[0],self.__origin[0], step[0])
1234     if len(self.__error.shape) > 0:
1235     sigma = interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1236     else:
1237     sigma = Scalar(self.__error.item(), FunctionOnBoundary(domain))
1238     else:
1239     raise ValueError("3D domains are not supported yet.")
1240     data*=mask
1241     sigma*=mask
1242 jduplessis 4991 elif self.getDataType() == self.MT:
1243     if DIM == 2:
1244     step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1245     if len(self.__error.shape) > 0:
1246     sigma = interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1247     else:
1248     sigma = Scalar(self.__error.item(), FunctionOnBoundary(domain))
1249     return self.__data, sigma
1250     else:
1251     raise ValueError("3D domains are not supported yet.")
1252    
1253 gross 4729 else:
1254     FS = ReducedFunction(domain)
1255     nValues = self.__nPts
1256     dataDIM = len(nValues)
1257     # determine base location of this dataset within the domain
1258     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(dataDIM)]
1259     # determine the resolution difference between domain and data.
1260     # If domain has twice the resolution we can double up the data etc.
1261     multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(dataDIM)]
1262     if domain.getDim() > dataDIM:
1263     first.append(int(-origin[-1]/spacing[-1]))
1264     multiplier=multiplier+[1]
1265     nValues=nValues+[1]
1266 caltinay 4362
1267 gross 4729 _handle, numpyfile = tempfile.mkstemp()
1268     os.close(_handle)
1269     self.__data.tofile(numpyfile)
1270 caltinay 4362
1271 gross 4729 reverse=[0]*DIM
1272 caltinay 5077 byteorder=BYTEORDER_NATIVE
1273     datatype=DATATYPE_FLOAT64
1274 gross 4729 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1275 caltinay 5077 data = readBinaryGrid(numpyfile, FS, shape=(),
1276 caltinay 4616 fill=self.__null_value, byteOrder=byteorder, dataType=datatype,
1277     first=first, numValues=nValues, multiplier=multiplier,
1278     reverse=reverse)
1279 gross 4729 if len(self.__error.shape) > 0:
1280     self.__error.tofile(numpyfile)
1281     self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1282 caltinay 5077 sigma = readBinaryGrid(numpyfile, FS, shape=(),
1283 caltinay 4616 fill=0., byteOrder=byteorder, dataType=datatype,
1284     first=first, numValues=nValues, multiplier=multiplier,
1285     reverse=reverse)
1286 gross 4729 else:
1287     sigma = self.__error.item() * whereNonZero(data-self.__null_value)
1288     os.unlink(numpyfile)
1289 caltinay 4362
1290     return data, sigma
1291    
1292     def getUtmZone(self):
1293     """
1294     returns a dummy UTM zone since this class does not use real coordinate
1295     values.
1296     """
1297     return 0
1298    
1299 jduplessis 4974 class MT2DTe(object):
1300     """
1301     class used to store frequency information accosicated with mt data
1302     """
1303    
1304 jduplessis 4991 def __init__(self,x, omega=0):
1305 jduplessis 4974 """
1306     initiale the MT2DTe tag object
1307    
1308     :param omega: frequency of readings
1309     :type omega: ``float``
1310 jduplessis 4991 :param x: coordinates of measurements
1311     :type x: ``list`` of ``tuple`` with ``float``
1312 jduplessis 4974 """
1313     self.__omega=omega
1314 jduplessis 4991 self.__x=x
1315 jduplessis 4974
1316     def getFrequency(self):
1317     """
1318 jduplessis 4991 return frequency of measurement
1319 jduplessis 4974 :rtype: ``float``
1320     """
1321     return self.__omega
1322 jduplessis 4991
1323     def getX(self):
1324     """
1325     return coordinates of measurement
1326     :rtype: ``float``
1327     """
1328     return self.__x
1329 jduplessis 4974
1330 gross 4700 class SeismicSource(object):
1331     """
1332     describes a seimic source by location (x,y), frequency omega, power (if known) and orientation (if known).
1333     this class is used to tag seismic data sources.
1334     """
1335 gross 4729 def __init__(self, x, y=0., omega=0., elevation=0., power = None, orientation=None):
1336 gross 4700 """
1337     initiale the source
1338    
1339     :param x: lateral x location
1340     :param y: lateral y location
1341     :param omega: frequency of source
1342 gross 4729 :param elevation: elevation of source above reference level
1343 gross 4700 :param power: power of source at frequence
1344     :param orientation: oriententation of source in 3D or 2D (or None)
1345     :type x: ``float``
1346     :type y: ``float``
1347     :type omega: ``float``
1348     :type power: ``complex`` or ``None``
1349     :type orientation: vector of appropriate length or ``None``
1350     """
1351     self.__loc=(x,y)
1352     self.__omega=omega
1353     self.__power=power
1354 gross 4729 self.__elevation=elevation
1355 gross 4700 self.__orientation=orientation
1356    
1357     def __eq__(self, other):
1358     if isinstance(other, SeismicSource):
1359     return self.__loc == other.getLocation() \
1360     and self.__omega == other.getFrequency() \
1361 gross 4729 and self.__elevation == other.getElevation() \
1362 gross 4700 and self.__power == other.getPower() \
1363     and self.__orientation == other.getOrientation()
1364     else:
1365     return False
1366    
1367     def __ne__(self, other):
1368     return not self.__eq__(other)
1369    
1370     def getLocation(self):
1371     """
1372     return location of source
1373     :rtype: ``tuple`` of ``float``
1374     """
1375     return self.__loc
1376     def getFrequency(self):
1377     """
1378     return frequency of source
1379     :rtype: ``float``
1380     """
1381     return self.__omega
1382 gross 4729
1383     def getElevation(self):
1384     """
1385     return elevation of source
1386     :rtype: ``float``
1387     """
1388     return self.__elevation
1389    
1390 gross 4700 def getPower(self):
1391     """
1392     return power of source at frequency
1393     :rtype: ``complex`` or ``None``
1394     """
1395     return self.__power
1396     def getOrientation(self):
1397     """
1398     return power of source orientation at frequency
1399     :rtype: vector type object or ``None``
1400     """
1401     return self.__orientation
1402    
1403    

  ViewVC Help
Powered by ViewVC 1.1.26