/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4287 - (hide annotations)
Thu Mar 7 05:26:44 2013 UTC (6 years, 2 months ago) by caltinay
File MIME type: text/x-python
File size: 40784 byte(s)
Removed simpleGeoMagnetic field function from doco and example source.

1 caltinay 3946
2 jfenwick 3981 ##############################################################################
3 caltinay 3946 #
4 caltinay 4145 # Copyright (c) 2003-2013 by University of Queensland
5 jfenwick 3981 # http://www.uq.edu.au
6 caltinay 3946 #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11 jfenwick 3981 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12     # Development since 2012 by School of Earth Sciences
13     #
14     ##############################################################################
15 caltinay 3946
16 caltinay 4007 """Data readers/providers for inversions"""
17    
18 caltinay 4145 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 jfenwick 3981 http://www.uq.edu.au
20 caltinay 3946 Primary Business: Queensland, Australia"""
21     __license__="""Licensed under the Open Software License version 3.0
22     http://www.opensource.org/licenses/osl-3.0.php"""
23     __url__="https://launchpad.net/escript-finley"
24    
25 caltinay 4287 __all__ = ['DataSource', 'ErMapperData', \
26 caltinay 4145 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27     'SmoothAnomaly']
28 caltinay 3947
29 caltinay 3946 import logging
30     import numpy as np
31 caltinay 4060 from esys.escript import ReducedFunction
32 caltinay 4145 from esys.escript import unitsSI as U
33 caltinay 4005 from esys.escript.linearPDEs import LinearSinglePDE
34 caltinay 4007 from esys.escript.util import *
35 caltinay 4197 from esys.ripley import ripleycpp
36 caltinay 3971
37 caltinay 3947 try:
38     from scipy.io.netcdf import netcdf_file
39 caltinay 4060 __all__ += ['NetCdfData']
40 caltinay 3947 except:
41     pass
42 caltinay 3946
43 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
44     """
45     Converts one or more longitude,latitude pairs to the corresponding x,y
46     coordinates in the Universal Transverse Mercator projection.
47 caltinay 4007
48 caltinay 4197 :note: The ``pyproj`` module is required for this method. If it is not
49     found an exception is raised.
50 caltinay 4007 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51     not available to convert the string, then the input values are
52 caltinay 4197 assumed to be using the Clarke 1866 ellipsoid.
53 caltinay 4009
54     :param lon: longitude value(s)
55 caltinay 4152 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
56 caltinay 4009 :param lat: latitude value(s)
57 caltinay 4152 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
58 caltinay 4197 :param wkt_string: Well-known text (WKT) string describing the coordinate
59     system used. The ``gdal`` module is used to convert
60     the string to the corresponding Proj4 string.
61     :type wkt_string: ``str``
62 caltinay 4201 :rtype: ``tuple``
63 caltinay 3957 """
64    
65     try:
66     import pyproj
67     except:
68 caltinay 4212 raise ImportError("In order to perform coordinate transformations on "
69     "the data you are using the 'pyproj' Python module is required but "
70     "was not found. Please install the module and try again.")
71 caltinay 3957
72     # determine UTM zone from the input data
73 caltinay 3956 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
74     try:
75     import osgeo.osr
76     srs = osgeo.osr.SpatialReference()
77     srs.ImportFromWkt(wkt_string)
78     p_src = pyproj.Proj(srs.ExportToProj4())
79     except:
80     p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
81 caltinay 4197
82     # check for hemisphere
83     if np.median(np.array(lat)) < 0.:
84     south='+south '
85     else:
86     south=''
87     p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m'%(zone,south))
88 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
89 caltinay 4201 return x,y,zone
90 caltinay 3947
91 caltinay 3946 class DataSource(object):
92     """
93     A class that provides survey data for the inversion process.
94 caltinay 4005 This is an abstract base class that implements common functionality.
95     Methods to be overwritten by subclasses are marked as such.
96 caltinay 4044 This class assumes 2D data which is mapped to a slice of a 3D domain.
97 caltinay 4097 For other setups override the methods as required.
98 caltinay 3946 """
99    
100 caltinay 4060 GRAVITY, MAGNETIC = list(range(2))
101    
102 caltinay 3946 def __init__(self):
103     """
104 caltinay 4005 Constructor. Sets some defaults and initializes logger.
105 caltinay 3946 """
106     self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
107 caltinay 4060 self.__subsampling_factor=1
108 caltinay 3946
109 caltinay 4005 def getDataExtents(self):
110     """
111     returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
112    
113     - ``x0``, ``y0`` = coordinates of data origin
114     - ``nx``, ``ny`` = number of data points in x and y
115     - ``dx``, ``dy`` = spacing of data points in x and y
116    
117     This method must be implemented in subclasses.
118     """
119     raise NotImplementedError
120    
121 caltinay 4060 def getDataType(self):
122 caltinay 4005 """
123 caltinay 4060 Returns the type of survey data managed by this source.
124     Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
125 caltinay 4005 """
126     raise NotImplementedError
127    
128 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
129 caltinay 4005 """
130 caltinay 4060 This method is called by the `DomainBuilder` to retrieve the survey
131     data as `Data` objects on the given domain.
132 caltinay 4145
133 caltinay 4060 Subclasses should return one or more `Data` objects with survey data
134 caltinay 4162 interpolated on the given `escript` domain. The exact return type
135 caltinay 4060 depends on the type of data.
136 caltinay 4005
137 caltinay 4060 :param domain: the escript domain to use
138     :type domain: `esys.escript.Domain`
139     :param origin: the origin coordinates of the domain
140     :type origin: ``tuple`` or ``list``
141     :param NE: the number of domain elements in each dimension
142 caltinay 4005 :type NE: ``tuple`` or ``list``
143 caltinay 4060 :param spacing: the cell sizes (node spacing) in the domain
144     :type spacing: ``tuple`` or ``list``
145 caltinay 3946 """
146 caltinay 4060 raise NotImplementedError
147 caltinay 3964
148 caltinay 4201 def getUtmZone(self):
149     """
150     All data source coordinates are converted to UTM (Universal Transverse
151     Mercator) in order to have useful domain extents. Subclasses should
152     implement this method and return the UTM zone number of the projected
153     coordinates.
154     """
155     raise NotImplementedError
156    
157 caltinay 4060 def setSubsamplingFactor(self, f):
158 caltinay 3946 """
159 caltinay 4060 Sets the data subsampling factor (default=1).
160 caltinay 4145
161 caltinay 4060 The factor is applied in all dimensions. For example a 2D dataset
162     with 300 x 150 data points will be reduced to 150 x 75 when a
163     subsampling factor of 2 is used.
164     This becomes important when adding data of varying resolution to
165     a `DomainBuilder`.
166 caltinay 3946 """
167 caltinay 4060 self.__subsampling_factor=f
168 caltinay 3965
169 caltinay 4060 def getSubsamplingFactor(self):
170 caltinay 3965 """
171 caltinay 4060 Returns the subsampling factor that was set via `setSubsamplingFactor`
172     (see there).
173 caltinay 3946 """
174 caltinay 4060 return self.__subsampling_factor
175 caltinay 3946
176    
177 caltinay 3964 ##############################################################################
178 caltinay 4060 class ErMapperData(DataSource):
179 caltinay 4044 """
180 caltinay 3958 Data Source for ER Mapper raster data.
181     Note that this class only accepts a very specific type of ER Mapper data
182     input and will raise an exception if other data is found.
183     """
184 caltinay 4227 def __init__(self, data_type, headerfile, datafile=None, altitude=0.,
185     error=None, scale_factor=None, null_value=None):
186 caltinay 3958 """
187 caltinay 4201 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
188     :type data_type: ``int``
189 caltinay 4049 :param headerfile: ER Mapper header file (usually ends in .ers)
190     :type headerfile: ``str``
191     :param datafile: ER Mapper binary data file name. If not supplied the
192     name of the header file without '.ers' is assumed
193     :type datafile: ``str``
194 caltinay 4060 :param altitude: altitude of measurements above ground in meters
195     :type altitude: ``float``
196 caltinay 4227 :param error: constant value to use for the data uncertainties.
197     If a value is supplied, it is scaled by the same factor
198     as the measurements. If not provided the error is
199     assumed to be 2 units for all measurements (i.e. 0.2
200     mGal and 2 nT for gravity and magnetic, respectively)
201     :type error: ``float``
202     :param scale_factor: the measurements and error values are scaled by
203     this factor. By default, gravity data is assumed
204     to be given in 1e-6 m/s^2 (0.1 mGal), while
205     magnetic data is assumed to be in 1e-9 T (1 nT).
206     :type scale_factor: ``float``
207     :param null_value: value that is used in the file to mark undefined
208     areas. This information is usually included in the
209     file.
210     :type null_value: ``float``
211 caltinay 3958 """
212 caltinay 4227 super(ErMapperData, self).__init__()
213 caltinay 3964 self.__headerfile=headerfile
214 caltinay 3958 if datafile is None:
215 caltinay 3964 self.__datafile=headerfile[:-4]
216 caltinay 3958 else:
217 caltinay 3964 self.__datafile=datafile
218 caltinay 4061 self.__altitude=altitude
219 caltinay 4201 self.__data_type=data_type
220 caltinay 4239 self.__utm_zone = None
221 caltinay 4227 self.__scale_factor = scale_factor
222     self.__null_value = null_value
223     self.__error_value = error
224 caltinay 4060 self.__readHeader()
225 caltinay 3958
226 caltinay 4060 def __readHeader(self):
227     self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
228 caltinay 3964 metadata=open(self.__headerfile, 'r').readlines()
229 caltinay 3958 start=-1
230 caltinay 4044 for i in range(len(metadata)):
231 caltinay 3958 if metadata[i].strip() == 'DatasetHeader Begin':
232     start=i+1
233     if start==-1:
234 caltinay 4227 raise RuntimeError('Invalid ER Mapper header file ("DatasetHeader" not found)')
235 caltinay 3958
236 caltinay 4227 # parse header file filling dictionary of found values
237 caltinay 3958 md_dict={}
238     section=[]
239 caltinay 4044 for i in range(start, len(metadata)):
240 caltinay 4196 line=metadata[i].strip()
241 caltinay 3958 if line[-6:].strip() == 'Begin':
242     section.append(line[:-6].strip())
243     elif line[-4:].strip() == 'End':
244     if len(section)>0:
245     section.pop()
246     else:
247     vals=line.split('=')
248     if len(vals)==2:
249     key = vals[0].strip()
250     value = vals[1].strip()
251     fullkey='.'.join(section+[key])
252     md_dict[fullkey]=value
253    
254 caltinay 4227 # check that that the data format/type is supported
255 caltinay 3958 try:
256 caltinay 4227 if md_dict['ByteOrder'] != 'LSBFirst':
257     raise RuntimeError('Unsupported byte order '+md_dict['ByteOrder'])
258     except KeyError:
259     self.logger.warn("Byte order not specified. Assuming LSB first.")
260    
261     try:
262     if md_dict['DataType'] != 'Raster':
263     raise RuntimeError('Unsupported data type '+md_dict['DataType'])
264     except KeyError:
265     self.logger.warn("Data type not specified. Assuming raster data.")
266    
267     try:
268 caltinay 3958 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
269     raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
270     except KeyError:
271     self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
272    
273     try:
274 caltinay 4227 fileOffset = int(md_dict['HeaderOffset'])
275     except:
276     fileOffset = 0
277     if fileOffset > 0:
278     raise RuntimeError("ER Mapper data with header offset >0 not supported.")
279    
280     # now extract required information
281     try:
282 caltinay 3958 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
283     NY = int(md_dict['RasterInfo.NrOfLines'])
284     except:
285     raise RuntimeError("Could not determine extents of data")
286    
287 caltinay 4227 ### mask/null value
288     # note that NaN is always filtered out in ripley
289     if self.__null_value is None:
290     try:
291     self.__null_value = float(md_dict['RasterInfo.NullCellValue'])
292     except:
293     self.logger.debug("Could not determine null value, using default.")
294     self.__null_value = 99999
295     elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
296     raise TypeError("Invalid type of null_value parameter")
297 caltinay 3958
298     try:
299     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
300     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
301     except:
302     raise RuntimeError("Could not determine cell dimensions")
303    
304     try:
305 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
306     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
307     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
308     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
309     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
310     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
311     else:
312     raise RuntimeError("Unknown CoordinateType")
313 caltinay 3958 except:
314 caltinay 3959 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
315 caltinay 3958 originX,originY = 0.0, 0.0
316    
317 caltinay 4267 # data sets have origin in top-left corner so y runs top-down and
318     # we need to flip accordingly
319     originY-=NY*spacingY
320    
321 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
322     # it appears we have lat/lon coordinates so need to convert
323     # origin and spacing. Try using gdal to get the wkt if available:
324     try:
325     from osgeo import gdal
326 caltinay 3964 ds=gdal.Open(self.__headerfile)
327 caltinay 3959 wkt=ds.GetProjection()
328     except:
329     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
330 caltinay 3980 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
331 caltinay 4201 originX_UTM,originY_UTM,zone=LatLonToUTM(originX, originY, wkt)
332     op1X,op1Y,_=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
333     self.__utm_zone = zone
334 caltinay 4009 # we are rounding to avoid interpolation issues
335 caltinay 3959 spacingX=np.round(op1X-originX_UTM)
336     spacingY=np.round(op1Y-originY_UTM)
337 caltinay 3964 originX=np.round(originX_UTM)
338     originY=np.round(originY_UTM)
339 caltinay 3959
340 caltinay 3964 self.__dataorigin=[originX, originY]
341 caltinay 4060 self.__delta = [spacingX, spacingY]
342     self.__nPts = [NX, NY]
343     self.__origin = [originX, originY]
344 caltinay 4227 ### scale factor
345     if self.__scale_factor is None:
346     if self.__data_type == self.GRAVITY:
347     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
348     self.__scale_factor = 1e-6
349     else:
350     self.logger.info("Assuming magnetic data units are 'nT'.")
351     self.__scale_factor = 1e-9
352 caltinay 3958
353 caltinay 4227 ### error value
354     if self.__error_value is None:
355     self.__error_value = 2.
356     elif not isinstance(self.__error_value,float) and not isinstance(self.__error_value,int):
357     raise TypeError("Invalid type of error parameter")
358    
359    
360 caltinay 3964 def getDataExtents(self):
361     """
362     returns ( (x0, y0), (nx, ny), (dx, dy) )
363     """
364 caltinay 4060 return (list(self.__origin), list(self.__nPts), list(self.__delta))
365 caltinay 3958
366 caltinay 4061 def getDataType(self):
367 caltinay 4201 return self.__data_type
368 caltinay 4061
369 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
370 caltinay 4277 FS = ReducedFunction(domain)
371 caltinay 4060 nValues=self.__nPts
372     # determine base location of this dataset within the domain
373     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
374 caltinay 4277 # determine the resolution difference between domain and data.
375     # If domain has twice the resolution we can double up the data etc.
376     multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
377 caltinay 4060 if domain.getDim()==3:
378     first.append(int((self.__altitude-origin[2])/spacing[2]))
379 caltinay 4277 multiplier=multiplier+[1]
380 caltinay 4060 nValues=nValues+[1]
381    
382 caltinay 4277 data = ripleycpp._readBinaryGrid(self.__datafile, FS, first, nValues,
383     multiplier, (), self.__null_value)
384 caltinay 4227 sigma = self.__error_value * whereNonZero(data-self.__null_value)
385    
386     data = data * self.__scale_factor
387     sigma = sigma * self.__scale_factor
388 caltinay 4060 return data, sigma
389    
390 caltinay 4201 def getUtmZone(self):
391     return self.__utm_zone
392 caltinay 4060
393 caltinay 4201
394 caltinay 4060 ##############################################################################
395     class NetCdfData(DataSource):
396     """
397     Data Source for gridded netCDF data that use CF/COARDS conventions.
398     """
399 caltinay 4197 def __init__(self, data_type, filename, altitude=0., data_variable=None,
400     error=None, scale_factor=None, null_value=None):
401 caltinay 3964 """
402 caltinay 4060 :param filename: file name for survey data in netCDF format
403     :type filename: ``str``
404 caltinay 4197 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
405     :type data_type: ``int``
406 caltinay 4060 :param altitude: altitude of measurements in meters
407 jfenwick 4080 :type altitude: ``float``
408 caltinay 4197 :param data_variable: name of the netCDF variable that holds the data.
409     If not provided an attempt is made to determine
410     the variable and an exception thrown on failure.
411     :type data_variable: ``str``
412     :param error: either the name of the netCDF variable that holds the
413     uncertainties of the measurements or a constant value
414     to use for the uncertainties. If a constant value is
415     supplied, it is scaled by the same factor as the
416     measurements. If not provided the error is assumed to
417     be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
418     for gravity and magnetic, respectively)
419     :type error: ``str`` or ``float``
420     :param scale_factor: the measurements and error values are scaled by
421     this factor. By default, gravity data is assumed
422     to be given in 1e-6 m/s^2 (0.1 mGal), while
423     magnetic data is assumed to be in 1e-9 T (1 nT).
424     :type scale_factor: ``float``
425     :param null_value: value that is used in the file to mark undefined
426     areas. This information is usually included in the
427     file.
428     :type null_value: ``float``
429 caltinay 3964 """
430 caltinay 4060 super(NetCdfData,self).__init__()
431     self.__filename=filename
432 caltinay 4197 if not data_type in [self.GRAVITY,self.MAGNETIC]:
433 caltinay 4201 raise ValueError("Invalid value for data_type parameter")
434 caltinay 4197 self.__data_type = data_type
435     self.__altitude = altitude
436     self.__data_name = data_variable
437     self.__scale_factor = scale_factor
438     self.__null_value = null_value
439     self.__readMetadata(error)
440 caltinay 3964
441 caltinay 4197 def __readMetadata(self, error):
442 caltinay 4060 self.logger.debug("Checking Data Source: %s"%self.__filename)
443     f=netcdf_file(self.__filename, 'r')
444 caltinay 4197
445     ### longitude- / X-dimension and variable
446 caltinay 4060 NX=0
447     for n in ['lon','longitude','x']:
448     if n in f.dimensions:
449     NX=f.dimensions[n]
450 caltinay 4197 lon_name=n
451 caltinay 4060 break
452     if NX==0:
453     raise RuntimeError("Could not determine extents of data")
454 caltinay 4197
455     # CF/COARDS states that coordinate variables have the same name as
456     # the dimensions
457     if not lon_name in f.variables:
458     raise RuntimeError("Could not determine longitude variable")
459     longitude=f.variables.pop(lon_name)
460    
461     ### latitude- / Y-dimension and variable
462 caltinay 4060 NY=0
463     for n in ['lat','latitude','y']:
464     if n in f.dimensions:
465     NY=f.dimensions[n]
466 caltinay 4197 lat_name=n
467 caltinay 4060 break
468     if NY==0:
469     raise RuntimeError("Could not determine extents of data")
470 caltinay 4197 if not lat_name in f.variables:
471 caltinay 4060 raise RuntimeError("Could not determine latitude variable")
472 caltinay 4197 latitude=f.variables.pop(lat_name)
473 caltinay 3958
474 caltinay 4197 ### data variable
475     if self.__data_name is not None:
476     try:
477     dims = f.variables[self.__data_name].dimensions
478     if not ((lat_name in dims) and (lon_name in dims)):
479     raise ValueError("Invalid data variable name supplied")
480     except KeyError:
481     raise ValueError("Invalid data variable name supplied")
482 caltinay 4060 else:
483     for n in f.variables.keys():
484     dims=f.variables[n].dimensions
485     if (lat_name in dims) and (lon_name in dims):
486 caltinay 4197 self.__data_name=n
487 caltinay 4060 break
488 caltinay 4197 if self.__data_name is None:
489 caltinay 4060 raise RuntimeError("Could not determine data variable")
490    
491 caltinay 4197 datavar = f.variables[self.__data_name]
492    
493     ### error value/variable
494     self.__error_name = None
495     if isinstance(error,str):
496     try:
497     dims = f.variables[error].dimensions
498     if not ((lat_name in dims) and (lon_name in dims)):
499     raise ValueError("Invalid error variable name supplied")
500     except KeyError:
501     raise ValueError("Invalid error variable name supplied")
502     self.__error_name = error
503     elif isinstance(error,float) or isinstance(error,int):
504     self.__error_value = float(error)
505     elif error is None:
506     self.__error_value = 2.
507 caltinay 4060 else:
508 caltinay 4197 raise TypeError("Invalid type of error parameter")
509 caltinay 4060
510 caltinay 4197 ### mask/null value
511     # note that NaN is always filtered out in ripley
512     if self.__null_value is None:
513     if hasattr(datavar, 'missing_value'):
514     self.__null_value = float(datavar.missing_value)
515     elif hasattr(datavar, '_FillValue'):
516     self.__null_value = float(datavar._FillValue)
517     else:
518     self.logger.debug("Could not determine null value, using default.")
519     self.__null_value = 99999
520 caltinay 4227 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
521     raise TypeError("Invalid type of null_value parameter")
522 caltinay 4197
523     # try to determine units of data - this is disabled until we obtain a
524     # file with valid information
525 caltinay 4060 #if hasattr(f.variables[data_name], 'units'):
526     # units=f.variables[data_name].units
527    
528 caltinay 4197 ### scale factor
529     if self.__scale_factor is None:
530     if self.__data_type == self.GRAVITY:
531     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
532     self.__scale_factor = 1e-6
533     else:
534     self.logger.info("Assuming magnetic data units are 'nT'.")
535     self.__scale_factor = 1e-9
536    
537 caltinay 4201 # see if there is a WKT string to convert coordinates
538 caltinay 4060 try:
539 caltinay 4197 wkt_string=datavar.esri_pe_string
540 caltinay 4060 except:
541     wkt_string=None
542    
543 caltinay 4197 # actual_range & geospatial_lon_min/max do not always contain correct
544     # values so we have to obtain the min/max in a less efficient way:
545 caltinay 4060 lon_range=longitude.data.min(),longitude.data.max()
546     lat_range=latitude.data.min(),latitude.data.max()
547 caltinay 4201 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
548     self.__utm_zone = zone
549 caltinay 4060 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
550     f.close()
551    
552     self.__nPts=[NX, NY]
553     self.__origin=[lon_range[0],lat_range[0]]
554     # we are rounding to avoid interpolation issues
555     self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
556    
557     def getDataExtents(self):
558     """
559     returns ( (x0, y0), (nx, ny), (dx, dy) )
560     """
561     return (list(self.__origin), list(self.__nPts), list(self.__delta))
562    
563     def getDataType(self):
564 caltinay 4197 return self.__data_type
565 caltinay 4060
566     def getSurveyData(self, domain, origin, NE, spacing):
567 caltinay 4197 FS=ReducedFunction(domain)
568 caltinay 4060 nValues=self.__nPts
569     # determine base location of this dataset within the domain
570     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
571 caltinay 4277 # determine the resolution difference between domain and data.
572     # If domain has twice the resolution we can double up the data etc.
573     multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
574 caltinay 4060 if domain.getDim()==3:
575     first.append(int((self.__altitude-origin[2])/spacing[2]))
576 caltinay 4277 multiplier=multiplier+[1]
577 caltinay 4060 nValues=nValues+[1]
578    
579 caltinay 4197 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
580 caltinay 4277 first, nValues, multiplier, (),
581     self.__null_value)
582 caltinay 4197 if self.__error_name is not None:
583     sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
584 caltinay 4277 FS, first, nValues, multiplier, (),0.)
585 caltinay 4197 else:
586     sigma = self.__error_value * whereNonZero(data-self.__null_value)
587    
588     data = data * self.__scale_factor
589     sigma = sigma * self.__scale_factor
590 caltinay 4060 return data, sigma
591    
592 caltinay 4201 def getUtmZone(self):
593     return self.__utm_zone
594 caltinay 4060
595 caltinay 4201
596 caltinay 3958 ##############################################################################
597     class SourceFeature(object):
598     """
599 caltinay 4178 A feature adds a density/susceptibility distribution to (parts of) a
600     domain of a synthetic data source, for example a layer of a specific
601     rock type or a simulated ore body.
602 caltinay 3958 """
603 caltinay 4060 def getValue(self):
604 caltinay 3958 """
605 caltinay 4060 Returns the value for the area covered by mask. It can be constant
606 caltinay 4178 or a `Data` object with spatial dependency.
607 caltinay 3958 """
608     raise NotImplementedError
609 caltinay 4178
610 caltinay 3958 def getMask(self, x):
611     """
612     Returns the mask of the area of interest for this feature. That is,
613 caltinay 4179 mask is non-zero where the value returned by `getValue()` should be
614 caltinay 3958 applied, zero elsewhere.
615     """
616     raise NotImplementedError
617    
618     class SmoothAnomaly(SourceFeature):
619 caltinay 4178 """
620     A source feature in the form of a blob (roughly gaussian).
621     """
622 caltinay 4060 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
623 caltinay 4178 """
624     Intializes the smooth anomaly data.
625    
626     :param lx: size of blob in x-dimension
627     :param ly: size of blob in y-dimension
628     :param lz: size of blob in z-dimension
629     :param x: location of blob in x-dimension
630     :param y: location of blob in y-dimension
631     :param depth: depth of blob
632     :param v_inner: value in the centre of the blob
633     :param v_outer: value in the periphery of the blob
634     """
635 caltinay 3946 self.x=x
636     self.y=y
637     self.lx=lx
638     self.ly=ly
639     self.lz=lz
640     self.depth=depth
641 caltinay 4060 self.v_inner=v_inner
642     self.v_outer=v_outer
643     self.value=None
644 caltinay 4044 self.mask=None
645 caltinay 4034
646 caltinay 4060 def getValue(self,x):
647     if self.value is None:
648     if self.v_outer is None or self.v_inner is None:
649     self.value=0
650 caltinay 4041 else:
651 caltinay 4044 DIM=x.getDomain().getDim()
652 caltinay 4060 alpha=-log(abs(self.v_outer/self.v_inner))*4
653     value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
654     value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
655     self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
656     if self.v_inner<0: self.value=-self.value
657 caltinay 4044
658 caltinay 4060 return self.value
659 caltinay 4044
660 caltinay 3946 def getMask(self, x):
661     DIM=x.getDomain().getDim()
662 caltinay 4060 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
663 caltinay 4034 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
664 caltinay 3946 if DIM>2:
665     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
666 caltinay 4044 self.mask = m
667 caltinay 4034 return m
668 caltinay 3946
669 caltinay 3958 ##############################################################################
670 gross 4115 class SyntheticDataBase(DataSource):
671 caltinay 4132 """
672     Base class to define reference data based on a given property distribution
673     (density or susceptibility). Data are collected from a square region of
674 caltinay 4283 vertical extent ``length`` on a grid with ``number_of_elements`` cells in
675     each direction.
676 caltinay 4178
677 caltinay 4132 The synthetic data are constructed by solving the appropriate forward
678     problem. Data can be sampled with an offset from the surface at z=0 or
679     using the entire subsurface region.
680     """
681 caltinay 4201 def __init__(self, data_type,
682 caltinay 4132 DIM=2,
683     number_of_elements=10,
684     length=1*U.km,
685     B_b=None,
686     data_offset=0,
687     full_knowledge=False,
688     spherical=False):
689     """
690 caltinay 4201 :param data_type: data type indicator
691     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
692 caltinay 4178 :param DIM: number of spatial dimensions
693 caltinay 4132 :type DIM: ``int`` (2 or 3)
694     :param number_of_elements: lateral number of elements in the region
695     where data are collected
696 gross 4115 :type number_of_elements: ``int``
697 caltinay 4178 :param length: lateral extent of the region where data are collected
698 gross 4115 :type length: ``float``
699     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
700     :type B_b: ``list`` of ``Scalar``
701     :param data_offset: offset of the data collection region from the surface
702     :type data_offset: ``float``
703 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
704     subsurface region. This is mainly for testing.
705 gross 4115 :type full_knowledge: ``Bool``
706 caltinay 4178 :param spherical: if ``True`` spherical coordinates are used. This is
707     not supported yet and should be set to ``False``.
708 gross 4115 :type spherical: ``Bool``
709 caltinay 4132 """
710 caltinay 4178 super(SyntheticDataBase, self).__init__()
711 caltinay 4201 if not data_type in [self.GRAVITY, self.MAGNETIC]:
712     raise ValueError("Invalid value for data_type parameter")
713 caltinay 4132 self.DIM=DIM
714     self.number_of_elements=number_of_elements
715     self.length=length
716 caltinay 4201 self.__data_type = data_type
717 caltinay 4178
718     if spherical:
719     raise ValueError("Spherical coordinates are not supported yet")
720     self.__spherical = spherical
721 caltinay 4132 self.__full_knowledge= full_knowledge
722     self.__data_offset=data_offset
723     self.__B_b =None
724     # this is for Cartesian (FIXME ?)
725 caltinay 4201 if data_type == self.MAGNETIC:
726 caltinay 4132 if self.DIM < 3:
727     self.__B_b = np.array([-B_b[2], -B_b[0]])
728     else:
729 caltinay 4178 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
730 caltinay 4132 self.__origin = [0]*(DIM-1)
731     self.__delta = [float(length)/number_of_elements]*(DIM-1)
732     self.__nPts = [number_of_elements]*(DIM-1)
733     self._reference_data=None
734 caltinay 4178
735 caltinay 4201 def getUtmZone(self):
736     """
737     returns a dummy UTM zone since this class does not use real coordinate
738     values.
739     """
740     return 0
741    
742 caltinay 4132 def getDataExtents(self):
743     """
744     returns the lateral data extend of the data set
745     """
746     return (list(self.__origin), list(self.__nPts), list(self.__delta))
747 caltinay 3946
748 caltinay 4132 def getDataType(self):
749     """
750     returns the data type
751     """
752 caltinay 4201 return self.__data_type
753 caltinay 4178
754 caltinay 4132 def getSurveyData(self, domain, origin, number_of_elements, spacing):
755     """
756     returns the survey data placed on a given domain.
757 caltinay 4178
758 caltinay 4132 :param domain: domain on which the data are to be placed
759     :type domain: ``Domain``
760     :param origin: origin of the domain
761     :type origin: ``list`` of ``float``
762     :param number_of_elements: number of elements (or cells) in each
763     spatial direction used to span the domain
764     :type number_of_elements: ``list`` of ``int``
765     :param spacing: cell size in each spatial direction
766     :type spacing: ``list`` of ``float``
767     :return: observed gravity field or magnetic flux density for each cell
768     in the domain and for each cell an indicator 1/0 if the data
769     are valid or not.
770     :rtype: pair of ``Scalar``
771     """
772     pde=LinearSinglePDE(domain)
773     DIM=domain.getDim()
774     x=domain.getX()
775     # set the reference data
776     k=self.getReferenceProperty(domain)
777     # calculate the corresponding potential
778     z=x[DIM-1]
779     m_psi_ref=whereZero(z-sup(z))
780     if self.getDataType()==DataSource.GRAVITY:
781     pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
782     else:
783     pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
784     pde.setSymmetryOn()
785     #pde.getSolverOptions().setTolerance(1e-13)
786     psi_ref=pde.getSolution()
787     del pde
788     if self.getDataType()==DataSource.GRAVITY:
789     data = -grad(psi_ref, ReducedFunction(domain))
790     else:
791     data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
792 gross 4121
793 caltinay 4178 x=ReducedFunction(domain).getX()
794 caltinay 4132 if self.__full_knowledge:
795     sigma = whereNegative(x[DIM-1])
796     else:
797     sigma=1.
798 caltinay 4178 # limit mask to non-padding in horizontal area
799 caltinay 4132 for i in range(DIM-1):
800     x_i=x[i]
801     sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
802     # limit mask to one cell thickness at z=0
803     z=x[DIM-1]
804     oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
805     sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
806     return data,sigma
807 caltinay 4178
808 caltinay 4132 def getReferenceProperty(self, domain=None):
809     """
810 caltinay 4178 Returns the reference `Data` object that was used to generate
811 caltinay 4132 the gravity/susceptibility anomaly data.
812 caltinay 4178
813 caltinay 4132 :return: the density or susceptibility anomaly used to create the
814     survey data
815     :note: it can be assumed that in the first call the ``domain``
816     argument is present so the actual anomaly data can be created.
817     In subsequent calls this may not be true.
818     :note: method needs to be overwritten
819     """
820 caltinay 4178 raise NotImplementedError()
821    
822 gross 4115 class SyntheticFeatureData(SyntheticDataBase):
823     """
824 caltinay 4178 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
825 gross 4115 """
826 caltinay 4201 def __init__(self, data_type,
827 gross 4115 features,
828     DIM=2,
829     number_of_elements=10,
830     length=1*U.km,
831     B_b=None,
832     data_offset=0,
833     full_knowledge=False,
834     spherical=False):
835 caltinay 4060 """
836 caltinay 4201 :param data_type: data type indicator
837     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
838 caltinay 4178 :param features: list of features. It is recommended that the features
839     are located entirely below the surface.
840     :type features: ``list`` of `SourceFeature`
841     :param DIM: spatial dimensionality
842     :type DIM: ``int`` (2 or 3)
843     :param number_of_elements: lateral number of elements in the region
844     where data are collected
845 gross 4115 :type number_of_elements: ``int``
846 caltinay 4178 :param length: lateral extent of the region where data are collected
847 gross 4115 :type length: ``float``
848     :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
849     :type B_b: ``list`` of ``Scalar``
850     :param data_offset: offset of the data collection region from the surface
851     :type data_offset: ``float``
852     :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
853     :type full_knowledge: ``Bool``
854 caltinay 4178 :param spherical: if ``True`` spherical coordinates are used.
855 gross 4115 :type spherical: ``Bool``
856 caltinay 4060 """
857 gross 4115 super(SyntheticFeatureData,self).__init__(
858 caltinay 4201 data_type=data_type, DIM=DIM,
859 caltinay 4178 number_of_elements=number_of_elements,
860     length=length, B_b=B_b,
861 gross 4115 data_offset=data_offset,
862     full_knowledge=full_knowledge,
863     spherical=spherical)
864     self._features = features
865 caltinay 4060
866 gross 4115
867     def getReferenceProperty(self, domain=None):
868 caltinay 4060 """
869 caltinay 4178 Returns the reference `Data` object that was used to generate
870 caltinay 4132 the gravity/susceptibility anomaly data.
871 caltinay 4060 """
872 gross 4115 if self._reference_data == None:
873     DIM=domain.getDim()
874     x=domain.getX()
875     k=0.
876 caltinay 4060 for f in self._features:
877     m=f.getMask(x)
878 gross 4115 k = k * (1-m) + f.getValue(x) * m
879     self._reference_data= k
880     return self._reference_data
881 caltinay 4178
882 gross 4115 class SyntheticData(SyntheticDataBase):
883     """
884 caltinay 4178 Defines synthetic gravity/magnetic data based on harmonic property anomaly
885    
886 caltinay 4179 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
887 caltinay 4178
888 caltinay 4179 for all x and z<=0. For z>0 rho = 0.
889 gross 4115 """
890 caltinay 4201 def __init__(self, data_type,
891 gross 4115 n_length=1,
892 caltinay 4178 n_depth=1,
893 gross 4116 depth_offset=0.,
894     depth=None,
895 caltinay 4178 amplitude=None,
896 gross 4115 DIM=2,
897     number_of_elements=10,
898     length=1*U.km,
899     B_b=None,
900     data_offset=0,
901     full_knowledge=False,
902     spherical=False):
903     """
904 caltinay 4201 :param data_type: data type indicator
905     :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
906 caltinay 4132 :param n_length: number of oscillations in the anomaly data within the
907     observation region
908 caltinay 4178 :type n_length: ``int``
909 caltinay 4132 :param n_depth: number of oscillations in the anomaly data below surface
910 caltinay 4179 :type n_depth: ``int``
911     :param depth_offset: vertical offset of the data
912     :type depth_offset: ``float``
913 caltinay 4132 :param depth: vertical extent in the anomaly data. If not present the
914     depth of the domain is used.
915 gross 4116 :type depth: ``float``
916 caltinay 4132 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
917     gravity and 0.1 for magnetic data.
918     :param DIM: spatial dimensionality
919     :type DIM: ``int`` (2 or 3)
920     :param number_of_elements: lateral number of elements in the region
921     where data are collected
922 gross 4115 :type number_of_elements: ``int``
923 caltinay 4132 :param length: lateral extent of the region where data are collected
924 gross 4115 :type length: ``float``
925 caltinay 4132 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
926     Only used for magnetic data.
927 gross 4115 :type B_b: ``list`` of ``Scalar``
928     :param data_offset: offset of the data collection region from the surface
929     :type data_offset: ``float``
930 caltinay 4132 :param full_knowledge: if ``True`` data are collected from the entire
931     subsurface region. This is mainly for testing.
932 gross 4115 :type full_knowledge: ``Bool``
933 caltinay 4178 :param spherical: if ``True`` spherical coordinates are used
934 gross 4115 :type spherical: ``Bool``
935 caltinay 4178 """
936 gross 4115 super(SyntheticData,self).__init__(
937 caltinay 4201 data_type=data_type, DIM=DIM,
938 caltinay 4178 number_of_elements=number_of_elements,
939     length=length, B_b=B_b,
940 gross 4115 data_offset=data_offset,
941     full_knowledge=full_knowledge,
942     spherical=spherical)
943     self.__n_length = n_length
944 caltinay 4178 self.__n_depth = n_depth
945 gross 4116 self.depth = depth
946     self.depth_offset=depth_offset
947 gross 4115 if amplitude == None:
948 caltinay 4201 if data_type == DataSource.GRAVITY:
949 caltinay 4132 amplitude = 200 *U.kg/U.m**3
950     else:
951     amplitude = 0.1
952 gross 4115 self.__amplitude = amplitude
953 caltinay 4044
954 gross 4115 def getReferenceProperty(self, domain=None):
955     """
956 caltinay 4178 Returns the reference `Data` object that was used to generate
957     the gravity/susceptibility anomaly data.
958 gross 4115 """
959 caltinay 4179 if self._reference_data is None:
960 gross 4115 DIM=domain.getDim()
961     x=domain.getX()
962     # set the reference data
963     z=x[DIM-1]
964 gross 4116 dd=self.depth
965 caltinay 4179 if dd is None: dd=inf(z)
966 gross 4116 z2=(z+self.depth_offset)/(self.depth_offset-dd)
967 caltinay 4178 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
968 caltinay 4262 for i in range(DIM-1):
969 caltinay 4132 x_i=x[i]
970     min_x=inf(x_i)
971     max_x=sup(x_i)
972     k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
973 gross 4115 self._reference_data= k
974     return self._reference_data
975 gross 4106

  ViewVC Help
Powered by ViewVC 1.1.26