/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 6661 - (show annotations)
Mon Apr 9 04:08:21 2018 UTC (17 months, 2 weeks ago) by jfenwick
File MIME type: text/x-python
File size: 59834 byte(s)
Removing some from X import *
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2018 by The University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Apache License, version 2.0
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development 2012-2013 by School of Earth Sciences
13 # Development from 2014 by Centre for Geoscience Computing (GeoComp)
14 #
15 ##############################################################################
16
17 """Data readers/providers for inversions"""
18
19 from __future__ import print_function, division
20
21 __copyright__="""Copyright (c) 2003-2018 by The University of Queensland
22 http://www.uq.edu.au
23 Primary Business: Queensland, Australia"""
24 __license__="""Licensed under the Apache License, version 2.0
25 http://www.apache.org/licenses/LICENSE-2.0"""
26 __url__="https://launchpad.net/escript-finley"
27
28 __all__ = ['DataSource', 'ErMapperData', 'NumpyData', \
29 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
30 'SmoothAnomaly', 'SeismicSource']
31
32 import logging
33 import numpy as np
34 import tempfile
35 import esys.escript as es
36 from esys.escript import ReducedFunction, FunctionOnBoundary, Scalar
37 from esys.escript import unitsSI as U
38 from esys.escript.linearPDEs import LinearSinglePDE
39 import esys.escript.util as esu
40 from .coordinates import ReferenceSystem, CartesianReferenceSystem
41
42 HAS_RIPLEY = True
43 try:
44 from esys.ripley import *
45 except ImportError as e:
46 HAS_RIPLEY = False
47
48 try:
49 from scipy.io.netcdf import netcdf_file
50 __all__ += ['NetCdfData']
51 except:
52 pass
53
54 try:
55 import pyproj
56 HAVE_PYPROJ=True
57 except:
58 HAVE_PYPROJ=False
59
60 try:
61 import osgeo.osr
62 HAVE_GDAL=True
63 except ImportError:
64 HAVE_GDAL=False
65
66 def getUTMZone(lon, lat, wkt_string=None):
67 """
68 """
69
70 logger = logging.getLogger('inv.datasources.getUTMZone')
71 zone = 0
72
73 nplon=np.array(lon)
74 nplat=np.array(lat)
75 if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
76 if HAVE_GDAL and (wkt_string is not None):
77 srs = osgeo.osr.SpatialReference()
78 result=srs.ImportFromWkt(wkt_string)
79 if result==0:
80 zone = srs.GetUTMZone()
81 else:
82 # determine UTM zone from the input data
83 zone = int(np.median((np.floor((nplon + 180)/6) + 1) % 60))
84
85 logger.debug("Determined UTM zone %d."%zone)
86 return zone
87
88 def LatLonToUTM(lon, lat, wkt_string=None):
89 """
90 Converts one or more longitude,latitude pairs to the corresponding x,y
91 coordinates in the Universal Transverse Mercator projection.
92
93 :note: The ``pyproj`` module is required unless the input coordinates are
94 determined to be already projected. If it is not found an exception
95 is raised.
96 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
97 not available to convert the string, then the input values are
98 assumed to be using the Clarke 1866 ellipsoid.
99
100 :param lon: longitude value(s)
101 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
102 :param lat: latitude value(s)
103 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
104 :param wkt_string: Well-known text (WKT) string describing the coordinate
105 system used. The ``gdal`` module is used to convert
106 the string to the corresponding Proj4 string.
107 :type wkt_string: ``str``
108 :rtype: ``tuple``
109 """
110
111 logger = logging.getLogger('inv.datasources.LatLonToUTM')
112
113 nplon=np.array(lon)
114 nplat=np.array(lat)
115 zone = getUTMZone(nplon, nplat, wkt_string)
116
117 if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
118 logger.debug('Coordinates appear to be projected. Passing through.')
119 return lon,lat,zone
120
121 logger.debug('Need to project coordinates.')
122 if not HAVE_PYPROJ:
123 logger.error("Cannot import pyproj! Exiting.")
124 raise ImportError("In order to perform coordinate transformations on "
125 "the data you are using the 'pyproj' Python module is required but "
126 "was not found. Please install the module and try again.")
127
128 p_src=None
129 if HAVE_GDAL and (wkt_string is not None) and len(wkt_string)>0:
130 srs = osgeo.osr.SpatialReference()
131 result=srs.ImportFromWkt(wkt_string)
132 try:
133 p_src = pyproj.Proj(srs.ExportToProj4())
134 except RuntimeError as e:
135 logger.warning('pyproj returned exception: %s [wkt=%s]'%(e,wkt_string))
136
137 if p_src is None:
138 if HAVE_GDAL:
139 reason="no wkt string provided."
140 else:
141 reason="the gdal python module not available."
142 logger.warning("Assuming lon/lat coordinates on Clarke 1866 ellipsoid since "+reason)
143 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
144
145 # check for hemisphere
146 if np.median(nplat) < 0.:
147 south='+south '
148 else:
149 south=''
150 p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m +ellps=WGS84'%(zone,south))
151 x,y=pyproj.transform(p_src, p_dest, lon, lat)
152 return x,y,zone
153
154 class DataSource(object):
155 """
156 A class that provides survey data for the inversion process.
157 This is an abstract base class that implements common functionality.
158 Methods to be overwritten by subclasses are marked as such.
159 This class assumes 2D data which is mapped to a slice of a 3D domain.
160 For other setups override the methods as required.
161 """
162
163 GRAVITY, MAGNETIC, ACOUSTIC, MT = list(range(4))
164
165 def __init__(self, reference_system=None, tags=[]):
166 """
167 Constructor. Sets some defaults and initializes logger.
168
169 :param tags: a list of tags associated with the data set.
170 :type tags: ``list`` of almost any type (typically `str`)
171 :param reference_system: the reference coordinate system
172 :type reference_system: ``None`` or `ReferenceSystem`
173 """
174 if not isinstance(tags ,list):
175 raise ValueError("tags argument must be a list.")
176 self.__tags=tags
177 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
178 self.__subsampling_factor=1
179 if not reference_system:
180 self.__reference_system = CartesianReferenceSystem()
181 else:
182 self.__reference_system = reference_system
183
184 if self.__reference_system.isCartesian():
185 self.__v_scale=1.
186 else:
187 self.__v_scale=1./self.getReferenceSystem().getHeightUnit()
188
189 def getTags(self):
190 """
191 returns the list of tags
192
193 :rtype: ``list``
194 """
195 return self.__tags
196
197 def hasTag(self, tag):
198 """
199 returns true if the data set has tag ``tag``
200
201 :rtype: ``bool``
202 """
203 return tag in self.__tags
204
205
206 def getReferenceSystem(self):
207 """
208 returns the reference coordinate system
209
210 :rtype: `ReferenceSystem`
211 """
212 return self.__reference_system
213
214 def getHeightScale(self):
215 """
216 returns the height scale factor to convert from meters to the
217 appropriate units of the reference system used.
218
219 :rtype: ``float``
220 """
221 return self.__v_scale
222
223 def getDataExtents(self):
224 """
225 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
226
227 - ``x0``, ``y0`` = coordinates of data origin
228 - ``nx``, ``ny`` = number of data points in x and y
229 - ``dx``, ``dy`` = spacing of data points in x and y
230
231 This method must be implemented in subclasses.
232 """
233 raise NotImplementedError
234
235 def getDataType(self):
236 """
237 Returns the type of survey data managed by this source.
238 Subclasses must return `GRAVITY` or `MAGNETIC` or `ACOUSTIC` as appropriate.
239 """
240 raise NotImplementedError
241
242 def getSurveyData(self, domain, origin, NE, spacing):
243 """
244 This method is called by the `DomainBuilder` to retrieve the survey
245 data as `Data` objects on the given domain.
246
247 Subclasses should return one or more `Data` objects with survey data
248 interpolated on the given `escript` domain. The exact return type
249 depends on the type of data.
250
251 :param domain: the escript domain to use
252 :type domain: `esys.escript.Domain`
253 :param origin: the origin coordinates of the domain
254 :type origin: ``tuple`` or ``list``
255 :param NE: the number of domain elements in each dimension
256 :type NE: ``tuple`` or ``list``
257 :param spacing: the cell sizes (node spacing) in the domain
258 :type spacing: ``tuple`` or ``list``
259 """
260 raise NotImplementedError
261
262 def getUtmZone(self):
263 """
264 All data source coordinates are converted to UTM (Universal Transverse
265 Mercator) in order to have useful domain extents. Subclasses should
266 implement this method and return the UTM zone number of the projected
267 coordinates.
268 """
269 raise NotImplementedError
270
271 def setSubsamplingFactor(self, f):
272 """
273 Sets the data subsampling factor (default=1).
274
275 The factor is applied in all dimensions. For example a 2D dataset
276 with 300 x 150 data points will be reduced to 150 x 75 when a
277 subsampling factor of 2 is used.
278 This becomes important when adding data of varying resolution to
279 a `DomainBuilder`.
280 """
281 self.__subsampling_factor=f
282
283 def getSubsamplingFactor(self):
284 """
285 Returns the subsampling factor that was set via `setSubsamplingFactor`
286 (see there).
287 """
288 return self.__subsampling_factor
289
290
291 ##############################################################################
292 class ErMapperData(DataSource):
293 """
294 Data Source for ER Mapper raster data.
295 Note that this class only accepts a very specific type of ER Mapper data
296 input and will raise an exception if other data is found.
297 """
298 def __init__(self, data_type, headerfile, datafile=None, altitude=0.,
299 error=None, scale_factor=None, null_value=None,
300 reference_system=None):
301 """
302 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
303 :type data_type: ``int``
304 :param headerfile: ER Mapper header file (usually ends in .ers)
305 :type headerfile: ``str``
306 :param datafile: ER Mapper binary data file name. If not supplied the
307 name of the header file without '.ers' is assumed
308 :type datafile: ``str``
309 :param altitude: altitude of measurements above ground in meters
310 :type altitude: ``float``
311 :param error: constant value to use for the data uncertainties.
312 If a value is supplied, it is scaled by the same factor
313 as the measurements. If not provided the error is
314 assumed to be 2 units for all measurements (i.e. 0.2
315 mGal and 2 nT for gravity and magnetic, respectively)
316 :type error: ``float``
317 :param scale_factor: the measurements and error values are scaled by
318 this factor. By default, gravity data is assumed
319 to be given in 1e-6 m/s^2 (0.1 mGal), while
320 magnetic data is assumed to be in 1e-9 T (1 nT).
321 :type scale_factor: ``float``
322 :param null_value: value that is used in the file to mark undefined
323 areas. This information is usually included in the
324 file.
325 :type null_value: ``float``
326 :param reference_system: reference coordinate system to be used.
327 For a Cartesian reference (default) the
328 appropriate UTM transformation is applied.
329 :type reference_system: `ReferenceSystem`
330 :note: consistence in the reference coordinate system and the reference
331 coordinate system used in the data source is not checked.
332 """
333 super(ErMapperData, self).__init__(reference_system, [ headerfile ] )
334 self.__headerfile=headerfile
335 if datafile is None:
336 self.__datafile=headerfile[:-4]
337 else:
338 self.__datafile=datafile
339 self.__altitude=altitude
340 self.__data_type=data_type
341 self.__utm_zone = None
342 self.__scale_factor = scale_factor
343 self.__null_value = null_value
344 self.__error_value = error
345 self.__readHeader()
346
347 def __readHeader(self):
348 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
349 metadata=open(self.__headerfile, 'r').readlines()
350 start=-1
351 for i in range(len(metadata)):
352 if metadata[i].strip() == 'DatasetHeader Begin':
353 start=i+1
354 if start==-1:
355 raise RuntimeError('Invalid ER Mapper header file ("DatasetHeader" not found)')
356
357 # parse header file filling dictionary of found values
358 md_dict={}
359 section=[]
360 for i in range(start, len(metadata)):
361 line=metadata[i].strip()
362 if line[-6:].strip() == 'Begin':
363 section.append(line[:-6].strip())
364 elif line[-4:].strip() == 'End':
365 if len(section)>0:
366 section.pop()
367 else:
368 vals=line.split('=')
369 if len(vals)==2:
370 key = vals[0].strip()
371 value = vals[1].strip()
372 fullkey='.'.join(section+[key])
373 md_dict[fullkey]=value
374
375 # check that the data format/type is supported
376 try:
377 if md_dict['ByteOrder'] != 'LSBFirst':
378 raise RuntimeError('Unsupported byte order '+md_dict['ByteOrder'])
379 except KeyError:
380 self.logger.warning("Byte order not specified. Assuming LSB first.")
381
382 try:
383 if md_dict['DataType'] != 'Raster':
384 raise RuntimeError('Unsupported data type '+md_dict['DataType'])
385 except KeyError:
386 self.logger.warning("Data type not specified. Assuming raster data.")
387
388 try:
389 if md_dict['RasterInfo.CellType'] == 'IEEE4ByteReal':
390 self.__celltype = DATATYPE_FLOAT32
391 elif md_dict['RasterInfo.CellType'] == 'IEEE8ByteReal':
392 self.__celltype = DATATYPE_FLOAT64
393 elif md_dict['RasterInfo.CellType'] == 'Signed32BitInteger':
394 self.__celltype = DATATYPE_INT32
395 else:
396 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
397 except KeyError:
398 self.logger.warning("Cell type not specified. Assuming IEEE4ByteReal.")
399 self.__celltype = DATATYPE_FLOAT32
400
401 try:
402 fileOffset = int(md_dict['HeaderOffset'])
403 except:
404 fileOffset = 0
405 if fileOffset > 0:
406 raise RuntimeError("ER Mapper data with header offset >0 not supported.")
407
408 # now extract required information
409 try:
410 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
411 NY = int(md_dict['RasterInfo.NrOfLines'])
412 except:
413 raise RuntimeError("Could not determine extents of data")
414
415 ### mask/null value
416 # note that NaN is always filtered out in ripley
417 if self.__null_value is None:
418 try:
419 self.__null_value = float(md_dict['RasterInfo.NullCellValue'])
420 except:
421 self.logger.debug("Could not determine null value, using default.")
422 self.__null_value = 99999
423 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
424 raise TypeError("Invalid type of null_value parameter")
425
426 try:
427 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
428 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
429 except:
430 raise RuntimeError("Could not determine cell dimensions")
431
432 try:
433 if md_dict['CoordinateSpace.CoordinateType']=='EN':
434 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
435 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
436 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
437 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
438 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
439 else:
440 raise RuntimeError("Unknown CoordinateType")
441 except:
442 self.logger.warning("Could not determine coordinate origin. Setting to (0.0, 0.0)")
443 originX,originY = 0.0, 0.0
444
445 # data sets have origin in top-left corner so y runs top-down and
446 # we need to flip accordingly
447 originY-=NY*spacingY
448
449 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
450 # it appears we have lat/lon coordinates so need to convert
451 # origin and spacing. Try using gdal to get the wkt if available:
452 try:
453 from osgeo import gdal
454 ds=gdal.Open(self.__headerfile)
455 wkt=str(ds.GetProjection())
456 except:
457 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
458 self.logger.warning('GDAL not available or file read error, assuming GDA94 data')
459 if self.getReferenceSystem().isCartesian():
460 originX_UTM,originY_UTM,zone = LatLonToUTM(originX, originY, wkt)
461 op1X,op1Y,_ = LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
462 # we are rounding to avoid interpolation issues
463 spacingX = np.round(op1X-originX_UTM)
464 spacingY = np.round(op1Y-originY_UTM)
465 originX = np.round(originX_UTM)
466 originY = np.round(originY_UTM)
467 self.__utm_zone = zone
468 else:
469 op1X, op1Y = originX+spacingX, originY+spacingY
470 spacingX = np.round(op1X-originX,5)
471 spacingY = np.round(op1Y-originY,5)
472 originX = np.round(originX,5)
473 originY = np.round(originY,5)
474
475 self.__dataorigin=[originX, originY]
476 self.__delta = [spacingX, spacingY]
477 self.__nPts = [NX, NY]
478 self.__origin = [originX, originY]
479 ### scale factor
480 if self.__scale_factor is None:
481 if self.__data_type == self.GRAVITY:
482 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
483 self.__scale_factor = 1e-6
484 else:
485 self.logger.info("Assuming magnetic data units are 'nT'.")
486 self.__scale_factor = 1e-9
487
488 ### error value
489 if self.__error_value is None:
490 self.__error_value = 2.
491 elif not isinstance(self.__error_value,float) and not isinstance(self.__error_value,int):
492 raise TypeError("Invalid type of error parameter")
493
494
495 def getDataExtents(self):
496 """
497 returns ( (x0, y0), (nx, ny), (dx, dy) )
498 """
499 return (list(self.__origin), list(self.__nPts), list(self.__delta))
500
501 def getDataType(self):
502 return self.__data_type
503
504 def getSurveyData(self, domain, origin, NE, spacing):
505 FS = ReducedFunction(domain)
506 nValues=self.__nPts
507 # determine base location of this dataset within the domain
508 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
509 # determine the resolution difference between domain and data.
510 # If domain has twice the resolution we can double up the data etc.
511 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
512 if domain.getDim()==3:
513 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
514 multiplier=multiplier+[1]
515 nValues=nValues+[1]
516
517 reverse = [0]*domain.getDim()
518 byteorder=BYTEORDER_NATIVE
519 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s, reverse=%s"%(str(first),str(nValues),str(multiplier),str(reverse)))
520 data = readBinaryGrid(self.__datafile, FS, shape=(),
521 fill=self.__null_value, byteOrder=byteorder,
522 dataType=self.__celltype, first=first, numValues=nValues,
523 multiplier=multiplier, reverse=reverse)
524 sigma = self.__error_value * esu.whereNonZero(data-self.__null_value)
525
526 data = data * self.__scale_factor
527 sigma = sigma * self.__scale_factor
528 return data, sigma
529
530 def getUtmZone(self):
531 return self.__utm_zone
532
533
534 ##############################################################################
535 class NetCdfData(DataSource):
536 """
537 Data Source for gridded netCDF data that use CF/COARDS conventions.
538 """
539 def __init__(self, data_type, filename, altitude=0., data_variable=None,
540 error=None, scale_factor=None, null_value=None, reference_system=None):
541 """
542 :param filename: file name for survey data in netCDF format
543 :type filename: ``str``
544 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
545 :type data_type: ``int``
546 :param altitude: altitude of measurements in meters
547 :type altitude: ``float``
548 :param data_variable: name of the netCDF variable that holds the data.
549 If not provided an attempt is made to determine
550 the variable and an exception thrown on failure.
551 :type data_variable: ``str``
552 :param error: either the name of the netCDF variable that holds the
553 uncertainties of the measurements or a constant value
554 to use for the uncertainties. If a constant value is
555 supplied, it is scaled by the same factor as the
556 measurements. If not provided the error is assumed to
557 be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
558 for gravity and magnetic, respectively)
559 :type error: ``str`` or ``float``
560 :param scale_factor: the measurements and error values are scaled by
561 this factor. By default, gravity data is assumed
562 to be given in 1e-6 m/s^2 (0.1 mGal), while
563 magnetic data is assumed to be in 1e-9 T (1 nT).
564 :type scale_factor: ``float``
565 :param null_value: value that is used in the file to mark undefined
566 areas. This information is usually included in the
567 file.
568 :type null_value: ``float``
569 :param reference_system: reference coordinate system to be used.
570 For a Cartesian reference (default) the
571 appropriate UTM transformation is applied.
572 :type reference_system: `ReferenceSystem`
573 :note: it is the responsibility of the caller to ensure all data
574 sources and the domain builder use the same reference system.
575 """
576 super(NetCdfData,self).__init__(reference_system, [filename])
577 self.__filename=filename
578 if not data_type in [self.GRAVITY,self.MAGNETIC]:
579 raise ValueError("Invalid value for data_type parameter")
580 self.__data_type = data_type
581 self.__altitude = altitude
582 self.__data_name = data_variable
583 self.__scale_factor = scale_factor
584 self.__null_value = null_value
585 self.__utm_zone = None
586 self.__readMetadata(error)
587
588 def __readMetadata(self, error):
589 self.logger.debug("Checking Data Source: %s"%self.__filename)
590
591 #first check what sort of file we are dealing with
592 nct=es.NcFType(self.__filename)
593 if nct=='u':
594 raise RuntimeError('Unsupported filetype for '+str(self.__filename))
595 elif nct=='C' or nct=='c':
596 nct=3
597 f=netcdf_file(self.__filename, 'r')
598 elif nct=='4':
599 nct=4
600 import netCDF4 as nc4
601 f=nc4.Dataset(self.__filename, 'r')
602 else:
603 raise RuntimeError('Unable to determine type of file '+str(self.__filename)+" "+nct)
604 ### longitude- / X-dimension and variable
605 NX=0
606 for n in ['lon','longitude','x']:
607 if n in f.dimensions:
608 NX=f.dimensions[n]
609 lon_name=n
610 break
611 if NX==0:
612 raise RuntimeError("Could not determine extents of data")
613
614 # CF/COARDS states that coordinate variables have the same name as
615 # the dimensions
616 if not lon_name in f.variables:
617 raise RuntimeError("Could not determine longitude variable")
618 longitude=f.variables.pop(lon_name)
619
620 ### latitude- / Y-dimension and variable
621 NY=0
622 for n in ['lat','latitude','y']:
623 if n in f.dimensions:
624 NY=f.dimensions[n]
625 lat_name=n
626 break
627 if NY==0:
628 raise RuntimeError("Could not determine extents of data")
629 if not lat_name in f.variables:
630 raise RuntimeError("Could not determine latitude variable")
631 latitude=f.variables.pop(lat_name)
632
633 # correct for netCDF4 having a different type
634 if nct==4:
635 NX=NX.size
636 NY=NY.size
637 ### data variable
638 if self.__data_name is not None:
639 try:
640 dims = f.variables[self.__data_name].dimensions
641 if not ((lat_name in dims) and (lon_name in dims)):
642 raise ValueError("Invalid data variable name supplied")
643
644 except KeyError:
645 raise ValueError("Invalid data variable name supplied")
646 else:
647 for n in sorted(f.variables.keys()):
648 dims=f.variables[n].dimensions
649 if (lat_name in dims) and (lon_name in dims):
650 self.__data_name=n
651 break
652 if self.__data_name is None:
653 raise RuntimeError("Could not determine data variable")
654 self.__data_name=str(self.__data_name) # try to deal with unicode->str issue in py2
655
656 datavar = f.variables[self.__data_name]
657
658 ### error value/variable
659 self.__error_name = None
660 if isinstance(error,str):
661 try:
662 dims = f.variables[error].dimensions
663 if not ((lat_name in dims) and (lon_name in dims)):
664 raise ValueError("Invalid error variable name supplied")
665 except KeyError:
666 raise ValueError("Invalid error variable name supplied")
667 self.__error_name = error
668 elif isinstance(error,float) or isinstance(error,int):
669 self.__error_value = float(error)
670 elif error is None:
671 self.__error_value = 2.
672 else:
673 raise TypeError("Invalid type of error parameter")
674
675 ### mask/null value
676 # note that NaN is always filtered out in ripley
677 if self.__null_value is None:
678 if hasattr(datavar, 'missing_value'):
679 self.__null_value = float(datavar.missing_value)
680 elif hasattr(datavar, '_FillValue'):
681 self.__null_value = float(datavar._FillValue)
682 else:
683 self.logger.debug("Could not determine null value, using default.")
684 self.__null_value = 99999
685 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
686 raise TypeError("Invalid type of null_value parameter")
687
688 # try to determine units of data - this is disabled until we obtain a
689 # file with valid information
690 #if hasattr(f.variables[data_name], 'units'):
691 # units=f.variables[data_name].units
692
693 ### scale factor
694 if self.__scale_factor is None:
695 if self.__data_type == self.GRAVITY:
696 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
697 self.__scale_factor = 1e-6
698 else:
699 self.logger.info("Assuming magnetic data units are 'nT'.")
700 self.__scale_factor = 1e-9
701
702 # see if there is a WKT string to convert coordinates
703 try:
704 wkt_string=str(datavar.esri_pe_string)
705 self.logger.debug("wkt_string is: %s"%wkt_string)
706 except:
707 wkt_string=None
708
709 # CF GDAL output: see if there is a grid_mapping attribute which
710 # contains the name of a dummy variable that holds information about
711 # mapping. GDAL puts the WKT string into spatial_ref:
712 if wkt_string is None:
713 try:
714 mapvar=f.variables[datavar.grid_mapping]
715 wkt_string=str(mapvar.spatial_ref)
716 self.logger.debug("wkt_string is: %s"%wkt_string)
717 except:
718 self.logger.debug("no wkt_string found!")
719 # actual_range & geospatial_lon_min/max do not always contain correct
720 # values so we have to obtain the min/max in a less efficient way:
721 if nct==4:
722 lon_range=min(longitude),max(longitude)
723 lat_range=min(latitude),max(latitude)
724 else:
725 lon_range=longitude.data.min(),longitude.data.max()
726 lat_range=latitude.data.min(),latitude.data.max()
727 if self.getReferenceSystem().isCartesian():
728 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
729 self.__utm_zone = zone
730
731 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
732
733 # see if lat or lon is stored in reverse order to domain conventions
734 self.__reverse=[False,False]
735 d=longitude
736 if nct==3:
737 d=d.data
738 if d[0]>d[-1]:
739 self.__reverse[0]=True
740 d=latitude
741 if nct==3:
742 d=d.data
743 if d[0]>d[-1]:
744 self.__reverse[1]=True
745 self.__nPts=[NX, NY]
746 self.__origin=[lon_range[0],lat_range[0]]
747 # we are rounding to avoid interpolation issues
748 if self.getReferenceSystem().isCartesian():
749 # rounding will give us about meter-accuracy with UTM coordinates
750 r=0
751 else:
752 # this should give us about meter-accuracy with lat/lon coords
753 r=5
754 self.__delta=[np.round(lengths[i]/self.__nPts[i],r) for i in range(2)]
755 del longitude, latitude, d, datavar
756 f.close()
757
758 def getDataExtents(self):
759 """
760 returns ( (x0, y0), (nx, ny), (dx, dy) )
761 """
762 return (list(self.__origin), list(self.__nPts), list(self.__delta))
763
764 def getDataType(self):
765 return self.__data_type
766
767 def getScaleFactor(self):
768 """
769 returns the data scale factor for adjusting measurement to SI
770 """
771 return self.__scale_factor
772
773 def getSurveyData(self, domain, origin, NE, spacing):
774 if not HAS_RIPLEY:
775 raise RuntimeError("Ripley module not available for reading")
776 FS=ReducedFunction(domain)
777 nValues=self.__nPts
778 # determine base location of this dataset within the domain
779 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
780 # determine the resolution difference between domain and data.
781 # If domain has twice the resolution we can double up the data etc.
782 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
783 reverse = [int(self.__reverse[i]) for i in range(len(self.__reverse))]
784
785 if domain.getDim() == 3:
786 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
787 multiplier = multiplier + [1]
788 nValues = nValues + [1]
789 reverse = reverse + [0]
790
791 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
792 self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
793 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
794 shape=(), fill=self.__null_value, first=first,
795 numValues=nValues, multiplier=multiplier, reverse=reverse)
796
797 if self.__error_name is not None:
798 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
799 self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
800 sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
801 FS, shape=(), fill=0., first=first, numValues=nValues,
802 multiplier=multiplier, reverse=reverse)
803 else:
804 # arithmetics with NaN produces undesired results so we replace
805 # NaNs by a large positive number which (hopefully) is not present
806 # in the real dataset
807 if np.isnan(self.__null_value):
808 data.replaceNaN(1.e300)
809 self.__null_value = 1.e300
810 sigma = self.__error_value * esu.whereNonZero(data-self.__null_value)
811
812 data = data * self.__scale_factor
813 sigma = sigma * self.__scale_factor
814 return data, sigma
815
816 def getUtmZone(self):
817 return self.__utm_zone
818
819
820 ##############################################################################
821 class SourceFeature(object):
822 """
823 A feature adds a density/susceptibility distribution to (parts of) a
824 domain of a synthetic data source, for example a layer of a specific
825 rock type or a simulated ore body.
826 """
827 def getValue(self):
828 """
829 Returns the value for the area covered by mask. It can be constant
830 or a `Data` object with spatial dependency.
831 """
832 raise NotImplementedError
833
834 def getMask(self, x):
835 """
836 Returns the mask of the area of interest for this feature. That is,
837 mask is non-zero where the value returned by `getValue()` should be
838 applied, zero elsewhere.
839 """
840 raise NotImplementedError
841
842 class SmoothAnomaly(SourceFeature):
843 """
844 A source feature in the form of a blob (roughly gaussian).
845 """
846 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
847 """
848 Intializes the smooth anomaly data.
849
850 :param lx: size of blob in x-dimension
851 :param ly: size of blob in y-dimension
852 :param lz: size of blob in z-dimension
853 :param x: location of blob in x-dimension
854 :param y: location of blob in y-dimension
855 :param depth: depth of blob
856 :param v_inner: value in the centre of the blob
857 :param v_outer: value in the periphery of the blob
858 """
859 self.x=x
860 self.y=y
861 self.lx=lx
862 self.ly=ly
863 self.lz=lz
864 self.depth=depth
865 self.v_inner=v_inner
866 self.v_outer=v_outer
867 self.value=None
868 self.mask=None
869
870 def getValue(self,x):
871 if self.value is None:
872 if self.v_outer is None or self.v_inner is None:
873 self.value=0
874 else:
875 DIM=x.getDomain().getDim()
876 alpha=-log(abs(self.v_outer/self.v_inner))*4
877 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
878 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
879 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
880 if self.v_inner<0: self.value=-self.value
881
882 return self.value
883
884 def getMask(self, x):
885 DIM=x.getDomain().getDim()
886 m=esu.whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * esu.whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
887 *esu.whereNonNegative(x[0]-(self.x-self.lx/2)) * esu.whereNonPositive(x[0]-(self.x+self.lx/2))
888 if DIM>2:
889 m*=esu.whereNonNegative(x[1]-(self.y-self.ly/2)) * esu.whereNonPositive(x[1]-(self.y+self.ly/2))
890 self.mask = m
891 return m
892
893 ##############################################################################
894 class SyntheticDataBase(DataSource):
895 """
896 Base class to define reference data based on a given property distribution
897 (density or susceptibility). Data are collected from a square region of
898 vertical extent ``length`` on a grid with ``number_of_elements`` cells in
899 each direction.
900
901 The synthetic data are constructed by solving the appropriate forward
902 problem. Data can be sampled with an offset from the surface at z=0 or
903 using the entire subsurface region.
904 """
905 def __init__(self, data_type,
906 DIM=2,
907 number_of_elements=10,
908 length=1*U.km,
909 B_b=None,
910 data_offset=0,
911 full_knowledge=False):
912 """
913 :param data_type: data type indicator
914 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
915 :param DIM: number of spatial dimensions
916 :type DIM: ``int`` (2 or 3)
917 :param number_of_elements: lateral number of elements in the region
918 where data are collected
919 :type number_of_elements: ``int``
920 :param length: lateral extent of the region where data are collected
921 :type length: ``float``
922 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
923 :type B_b: ``list`` of ``Scalar``
924 :param data_offset: offset of the data collection region from the surface
925 :type data_offset: ``float``
926 :param full_knowledge: if ``True`` data are collected from the entire
927 subsurface region. This is mainly for testing.
928 :type full_knowledge: ``Bool``
929 """
930 super(SyntheticDataBase, self).__init__()
931 if not data_type in [self.GRAVITY, self.MAGNETIC]:
932 raise ValueError("Invalid value for data_type parameter")
933 self.DIM=DIM
934 self.number_of_elements=number_of_elements
935 self.length=length
936 self.__data_type = data_type
937 self.__full_knowledge= full_knowledge
938 self.__data_offset=data_offset
939 self.__B_b =None
940 # this is for Cartesian (FIXME ?)
941 if data_type == self.MAGNETIC:
942 if self.DIM < 3:
943 self.__B_b = np.array([B_b[0], B_b[2]])
944 else:
945 self.__B_b = ([B_b[0], B_b[1],B_b[2]])
946 self.__origin = [0]*(DIM-1)
947 self.__delta = [float(length)/number_of_elements]*(DIM-1)
948 self.__nPts = [number_of_elements]*(DIM-1)
949 self._reference_data=None
950
951 def getUtmZone(self):
952 """
953 returns a dummy UTM zone since this class does not use real coordinate
954 values.
955 """
956 return 0
957
958 def getDataExtents(self):
959 """
960 returns the lateral data extend of the data set
961 """
962 return (list(self.__origin), list(self.__nPts), list(self.__delta))
963
964 def getDataType(self):
965 """
966 returns the data type
967 """
968 return self.__data_type
969
970 def getSurveyData(self, domain, origin, number_of_elements, spacing):
971 """
972 returns the survey data placed on a given domain.
973
974 :param domain: domain on which the data are to be placed
975 :type domain: ``Domain``
976 :param origin: origin of the domain
977 :type origin: ``list`` of ``float``
978 :param number_of_elements: number of elements (or cells) in each
979 spatial direction used to span the domain
980 :type number_of_elements: ``list`` of ``int``
981 :param spacing: cell size in each spatial direction
982 :type spacing: ``list`` of ``float``
983 :return: observed gravity field or magnetic flux density for each cell
984 in the domain and for each cell an indicator 1/0 if the data
985 are valid or not.
986 :rtype: pair of ``Scalar``
987 """
988 pde=LinearSinglePDE(domain)
989 DIM=domain.getDim()
990 x=domain.getX()
991 # set the reference data
992 k=self.getReferenceProperty(domain)
993 # calculate the corresponding potential
994 z=x[DIM-1]
995 m_psi_ref=esu.whereZero(z-esu.sup(z))
996 if self.getDataType()==DataSource.GRAVITY:
997 pde.setValue(A=esu.kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
998 else:
999 pde.setValue(A=esu.kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
1000 pde.setSymmetryOn()
1001 #pde.getSolverOptions().setTolerance(1e-13)
1002 psi_ref=pde.getSolution()
1003 del pde
1004 if self.getDataType()==DataSource.GRAVITY:
1005 data = -esu.grad(psi_ref, ReducedFunction(domain))
1006 else:
1007 data = self._reference_data*self.__B_b-esu.grad(psi_ref, ReducedFunction(domain))
1008
1009 x=ReducedFunction(domain).getX()
1010 if self.__full_knowledge:
1011 sigma = esu.whereNegative(x[DIM-1])
1012 else:
1013 sigma=1.
1014 # limit mask to non-padding in horizontal area
1015 for i in range(DIM-1):
1016 x_i=x[i]
1017 sigma=sigma * esu.wherePositive(x_i) * esu.whereNegative(x_i-(esu.sup(x_i)+esu.inf(x_i)))
1018 # limit mask to one cell thickness at z=0
1019 z=x[DIM-1]
1020 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
1021 sigma = sigma * esu.whereNonNegative(z-oo) * esu.whereNonPositive(z-oo-spacing[DIM-1])
1022 return data,sigma
1023
1024 def getReferenceProperty(self, domain=None):
1025 """
1026 Returns the reference `Data` object that was used to generate
1027 the gravity/susceptibility anomaly data.
1028
1029 :return: the density or susceptibility anomaly used to create the
1030 survey data
1031 :note: it can be assumed that in the first call the ``domain``
1032 argument is present so the actual anomaly data can be created.
1033 In subsequent calls this may not be true.
1034 :note: method needs to be overwritten
1035 """
1036 raise NotImplementedError()
1037
1038 class SyntheticFeatureData(SyntheticDataBase):
1039 """
1040 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
1041 """
1042 def __init__(self, data_type,
1043 features,
1044 DIM=2,
1045 number_of_elements=10,
1046 length=1*U.km,
1047 B_b=None,
1048 data_offset=0,
1049 full_knowledge=False):
1050 """
1051 :param data_type: data type indicator
1052 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1053 :param features: list of features. It is recommended that the features
1054 are located entirely below the surface.
1055 :type features: ``list`` of `SourceFeature`
1056 :param DIM: spatial dimensionality
1057 :type DIM: ``int`` (2 or 3)
1058 :param number_of_elements: lateral number of elements in the region
1059 where data are collected
1060 :type number_of_elements: ``int``
1061 :param length: lateral extent of the region where data are collected
1062 :type length: ``float``
1063 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
1064 :type B_b: ``list`` of ``Scalar``
1065 :param data_offset: offset of the data collection region from the surface
1066 :type data_offset: ``float``
1067 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
1068 :type full_knowledge: ``Bool``
1069 """
1070 super(SyntheticFeatureData,self).__init__(
1071 data_type=data_type, DIM=DIM,
1072 number_of_elements=number_of_elements,
1073 length=length, B_b=B_b,
1074 data_offset=data_offset,
1075 full_knowledge=full_knowledge)
1076 self._features = features
1077
1078
1079 def getReferenceProperty(self, domain=None):
1080 """
1081 Returns the reference `Data` object that was used to generate
1082 the gravity/susceptibility anomaly data.
1083 """
1084 if self._reference_data == None:
1085 DIM=domain.getDim()
1086 x=domain.getX()
1087 k=0.
1088 for f in self._features:
1089 m=f.getMask(x)
1090 k = k * (1-m) + f.getValue(x) * m
1091 self._reference_data= k
1092 return self._reference_data
1093
1094 class SyntheticData(SyntheticDataBase):
1095 """
1096 Defines synthetic gravity/magnetic data based on harmonic property anomaly
1097
1098 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
1099
1100 for all x and z<=0. For z>0 rho = 0.
1101 """
1102 def __init__(self, data_type,
1103 n_length=1,
1104 n_depth=1,
1105 depth_offset=0.,
1106 depth=None,
1107 amplitude=None,
1108 DIM=2,
1109 number_of_elements=10,
1110 length=1*U.km,
1111 B_b=None,
1112 data_offset=0,
1113 full_knowledge=False,
1114 s=0.):
1115 """
1116 :param data_type: data type indicator
1117 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1118 :param n_length: number of oscillations in the anomaly data within the
1119 observation region
1120 :type n_length: ``int``
1121 :param n_depth: number of oscillations in the anomaly data below surface
1122 :type n_depth: ``int``
1123 :param depth_offset: vertical offset of the data
1124 :type depth_offset: ``float``
1125 :param depth: vertical extent in the anomaly data. If not present the
1126 depth of the domain is used.
1127 :type depth: ``float``
1128 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
1129 gravity and 0.1 for magnetic data.
1130 :param DIM: spatial dimensionality
1131 :type DIM: ``int`` (2 or 3)
1132 :param number_of_elements: lateral number of elements in the region
1133 where data are collected
1134 :type number_of_elements: ``int``
1135 :param length: lateral extent of the region where data are collected
1136 :type length: ``float``
1137 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
1138 Only used for magnetic data.
1139 :type B_b: ``list`` of ``Scalar``
1140 :param data_offset: offset of the data collection region from the surface
1141 :type data_offset: ``float``
1142 :param full_knowledge: if ``True`` data are collected from the entire
1143 subsurface region. This is mainly for testing.
1144 :type full_knowledge: ``Bool``
1145 """
1146 super(SyntheticData,self).__init__(
1147 data_type=data_type, DIM=DIM,
1148 number_of_elements=number_of_elements,
1149 length=length, B_b=B_b,
1150 data_offset=data_offset,
1151 full_knowledge=full_knowledge)
1152 self.__n_length = n_length
1153 self.__n_depth = n_depth
1154 self.depth = depth
1155 self.depth_offset=depth_offset
1156 if amplitude == None:
1157 if data_type == DataSource.GRAVITY:
1158 amplitude = 200 *U.kg/U.m**3
1159 else:
1160 amplitude = 0.1
1161 self.__amplitude = amplitude
1162 self.__s=s
1163
1164 def getReferenceProperty(self, domain=None):
1165 """
1166 Returns the reference `Data` object that was used to generate
1167 the gravity/susceptibility anomaly data.
1168 """
1169 if self._reference_data is None:
1170 DIM=domain.getDim()
1171 x=domain.getX()
1172 # set the reference data
1173 z=x[DIM-1]
1174 dd=self.depth
1175 if dd is None: dd=esu.inf(z)
1176 z2=(z+self.depth_offset)/(self.depth_offset-dd)
1177 k=esu.sin(self.__n_depth * np.pi * z2) * esu.whereNonNegative(z2) * esu.whereNonPositive(z2-1.) * self.__amplitude
1178 for i in range(DIM-1):
1179 x_i=x[i]
1180 min_x=esu.inf(x_i)
1181 max_x=esu.sup(x_i)
1182 k*= esu.sin(self.__n_length*np.pi*(x_i-min_x-self.__s)/(max_x-min_x))
1183 self._reference_data= k
1184 return self._reference_data
1185
1186 ##############################################################################
1187 class NumpyData(DataSource):
1188 """
1189 """
1190
1191 def __init__(self, data_type, data, error=1., length=1.*U.km, null_value=-1., tags=[], origin=None):
1192 """
1193 A data source that uses survey data from a ``numpy`` object or list
1194 instead of a file.
1195 The dimensionality is inferred from the shape of ``data`` (1- and
1196 2-dimensional data is supported). The data origin is assumed to be
1197 at the coordinate origin.
1198
1199 :param data_type: data type indicator
1200 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1201 :param data: the survey data array. Note that for a cartesian coordinate
1202 system the shape of the data is considered to be
1203 (nz,ny,nx).
1204 :type data: ``numpy.array`` or ``list``
1205 :param error: constant value to use for the data uncertainties or a
1206 numpy object with uncertainties for every data point.
1207 :type error: ``float`` or ``list`` or ``ndarray``
1208 :param length: side length(s) of the data slice/volume. This can be
1209 a scalar to indicate constant length in all dimensions
1210 or an array/list of values in each coordinate dimension.
1211 :type length: ``float`` or ``list`` or ``ndarray``
1212 :param null_value: value that is used in the undefined regions of the
1213 survey data object.
1214 :type null_value: ``float``
1215 :param tags: a list of tags associated with the data set.
1216 :type tags: ``list`` of almost any type (typically `str`)
1217 :param origin: offset of origin of offset
1218 :type origin: ``list`` of ``float``
1219 """
1220 super(NumpyData, self).__init__(tags=tags)
1221 if not data_type in [self.GRAVITY, self.MAGNETIC, self.ACOUSTIC, self.MT ]:
1222 raise ValueError("Invalid value for data_type parameter")
1223 self.__data_type = data_type
1224 if not isinstance(data, np.ndarray) or data.dtype not in [ np.float64, np.complex128]:
1225 self.__data = np.asarray(data, dtype=np.float64)
1226 else:
1227 self.__data = data
1228 DIM = len(self.__data.shape)
1229 if DIM not in (1,2):
1230 raise ValueError("NumpyData requires 1- or 2-dimensional data")
1231 self.__error = np.asarray(error, dtype=np.float64)
1232 if len(self.__error.shape) > 0 and \
1233 self.__error.shape != self.__data.shape:
1234 raise ValueError("error argument must be scalar or match the shape of the data")
1235 if isinstance(length,float) or isinstance(length,int):
1236 length = [float(length)] * DIM
1237 else:
1238 length=np.asarray(length, dtype=float)
1239 if len(length.shape) != 1 or length.shape[0] != DIM:
1240 raise ValueError("length must be scalar or an array with %d values"%DIM)
1241 length=length.tolist()
1242
1243 self.__length=length
1244 self.__null_value = null_value
1245 self.__nPts = list(reversed(self.__data.shape))
1246 self.__delta = [length[i]/self.__nPts[i] for i in range(DIM)]
1247 if origin is None:
1248 self.__origin = [0.] * DIM
1249 else:
1250 self.__origin = origin[:DIM-1]
1251
1252 def getDataExtents(self):
1253 return (self.__origin, self.__nPts, self.__delta)
1254
1255 def getDataType(self):
1256 return self.__data_type
1257
1258 def getSurveyData(self, domain, origin, NE, spacing):
1259 import os
1260 DIM=domain.getDim()
1261 if self.getDataType() == self.ACOUSTIC:
1262 x=esu.FunctionOnBoundary(domain).getX()
1263 BBX=boundingBox(domain)
1264 z=x[DIM-1]
1265 mask= esu.whereZero( z - esu.inf(z)) # we don't use BBX[DIM-1][1] due to mountains'
1266 for i in range(DIM-1):
1267 x_i=x[i]
1268 mask+=esu.whereNonPositive( x_i - self.__origin[i] ) + esu.whereNonNegative( x_i - ( self.__origin[i]+self.__length[i] ) )
1269 mask=1-esu.wherePositive(mask)
1270
1271 data=esu.Data(0.,(2,), esu.FunctionOnBoundary(domain))
1272 step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1273 if DIM == 2:
1274 data[0] = esu.interpolateTable(self.__data.real, x[0],self.__origin[0], step[0])
1275 data[1] = esu.interpolateTable(self.__data.imag, x[0],self.__origin[0], step[0])
1276 if len(self.__error.shape) > 0:
1277 sigma = esu.interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1278 else:
1279 sigma = esu.Scalar(self.__error.item(), esu.FunctionOnBoundary(domain))
1280 else:
1281 raise ValueError("3D domains are not supported yet.")
1282 data*=mask
1283 sigma*=mask
1284 elif self.getDataType() == self.MT:
1285 if DIM == 2:
1286 step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1287 if len(self.__error.shape) > 0:
1288 sigma = esu.interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1289 else:
1290 sigma = esu.Scalar(self.__error.item(), esu.FunctionOnBoundary(domain))
1291 return self.__data, sigma
1292 else:
1293 raise ValueError("3D domains are not supported yet.")
1294
1295 else:
1296 FS = ReducedFunction(domain)
1297 nValues = self.__nPts
1298 dataDIM = len(nValues)
1299 # determine base location of this dataset within the domain
1300 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(dataDIM)]
1301 # determine the resolution difference between domain and data.
1302 # If domain has twice the resolution we can double up the data etc.
1303 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(dataDIM)]
1304 if domain.getDim() > dataDIM:
1305 first.append(int(-origin[-1]/spacing[-1]))
1306 multiplier=multiplier+[1]
1307 nValues=nValues+[1]
1308
1309 _handle, numpyfile = tempfile.mkstemp()
1310 os.close(_handle)
1311 self.__data.tofile(numpyfile)
1312
1313 reverse=[0]*DIM
1314 byteorder=BYTEORDER_NATIVE
1315 datatype=DATATYPE_FLOAT64
1316 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1317 data = readBinaryGrid(numpyfile, FS, shape=(),
1318 fill=self.__null_value, byteOrder=byteorder, dataType=datatype,
1319 first=first, numValues=nValues, multiplier=multiplier,
1320 reverse=reverse)
1321 if len(self.__error.shape) > 0:
1322 self.__error.tofile(numpyfile)
1323 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1324 sigma = readBinaryGrid(numpyfile, FS, shape=(),
1325 fill=0., byteOrder=byteorder, dataType=datatype,
1326 first=first, numValues=nValues, multiplier=multiplier,
1327 reverse=reverse)
1328 else:
1329 sigma = self.__error.item() * esu.whereNonZero(data-self.__null_value)
1330 os.unlink(numpyfile)
1331
1332 return data, sigma
1333
1334 def getUtmZone(self):
1335 """
1336 returns a dummy UTM zone since this class does not use real coordinate
1337 values.
1338 """
1339 return 0
1340
1341 class MT2DTe(object):
1342 """
1343 class used to store frequency information accosicated with mt data
1344 """
1345
1346 def __init__(self,x, omega=0):
1347 """
1348 initiale the MT2DTe tag object
1349
1350 :param omega: frequency of readings
1351 :type omega: ``float``
1352 :param x: coordinates of measurements
1353 :type x: ``list`` of ``tuple`` with ``float``
1354 """
1355 self.__omega=omega
1356 self.__x=x
1357
1358 def getFrequency(self):
1359 """
1360 return frequency of measurement
1361 :rtype: ``float``
1362 """
1363 return self.__omega
1364
1365 def getX(self):
1366 """
1367 return coordinates of measurement
1368 :rtype: ``float``
1369 """
1370 return self.__x
1371
1372 class SeismicSource(object):
1373 """
1374 describes a seimic source by location (x,y), frequency omega, power (if known) and orientation (if known).
1375 this class is used to tag seismic data sources.
1376 """
1377 def __init__(self, x, y=0., omega=0., elevation=0., power = None, orientation=None):
1378 """
1379 initiale the source
1380
1381 :param x: lateral x location
1382 :param y: lateral y location
1383 :param omega: frequency of source
1384 :param elevation: elevation of source above reference level
1385 :param power: power of source at frequence
1386 :param orientation: oriententation of source in 3D or 2D (or None)
1387 :type x: ``float``
1388 :type y: ``float``
1389 :type omega: ``float``
1390 :type power: ``complex`` or ``None``
1391 :type orientation: vector of appropriate length or ``None``
1392 """
1393 self.__loc=(x,y)
1394 self.__omega=omega
1395 self.__power=power
1396 self.__elevation=elevation
1397 self.__orientation=orientation
1398
1399 def __eq__(self, other):
1400 if isinstance(other, SeismicSource):
1401 return self.__loc == other.getLocation() \
1402 and self.__omega == other.getFrequency() \
1403 and self.__elevation == other.getElevation() \
1404 and self.__power == other.getPower() \
1405 and self.__orientation == other.getOrientation()
1406 else:
1407 return False
1408
1409 def __ne__(self, other):
1410 return not self.__eq__(other)
1411
1412 def getLocation(self):
1413 """
1414 return location of source
1415 :rtype: ``tuple`` of ``float``
1416 """
1417 return self.__loc
1418 def getFrequency(self):
1419 """
1420 return frequency of source
1421 :rtype: ``float``
1422 """
1423 return self.__omega
1424
1425 def getElevation(self):
1426 """
1427 return elevation of source
1428 :rtype: ``float``
1429 """
1430 return self.__elevation
1431
1432 def getPower(self):
1433 """
1434 return power of source at frequency
1435 :rtype: ``complex`` or ``None``
1436 """
1437 return self.__power
1438 def getOrientation(self):
1439 """
1440 return power of source orientation at frequency
1441 :rtype: vector type object or ``None``
1442 """
1443 return self.__orientation
1444
1445

  ViewVC Help
Powered by ViewVC 1.1.26