/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 5288 - (show annotations)
Tue Dec 2 23:18:40 2014 UTC (4 years, 9 months ago) by sshaw
File MIME type: text/x-python
File size: 58061 byte(s)
fixing tests for cases where required domains not built
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2014 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development 2012-2013 by School of Earth Sciences
13 # Development from 2014 by Centre for Geoscience Computing (GeoComp)
14 #
15 ##############################################################################
16
17 """Data readers/providers for inversions"""
18
19 __copyright__="""Copyright (c) 2003-2014 by University of Queensland
20 http://www.uq.edu.au
21 Primary Business: Queensland, Australia"""
22 __license__="""Licensed under the Open Software License version 3.0
23 http://www.opensource.org/licenses/osl-3.0.php"""
24 __url__="https://launchpad.net/escript-finley"
25
26 __all__ = ['DataSource', 'ErMapperData', 'NumpyData', \
27 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
28 'SmoothAnomaly', 'SeismicSource']
29
30 import logging
31 import numpy as np
32 import tempfile
33 from esys.escript import ReducedFunction, FunctionOnBoundary, Scalar
34 from esys.escript import unitsSI as U
35 from esys.escript.linearPDEs import LinearSinglePDE
36 from esys.escript.util import *
37 from .coordinates import ReferenceSystem, CartesianReferenceSystem
38
39 HAS_RIPLEY = True
40 try:
41 from esys.ripley import *
42 except ImportError as e:
43 HAS_RIPLEY = False
44
45 try:
46 from scipy.io.netcdf import netcdf_file
47 __all__ += ['NetCdfData']
48 except:
49 pass
50
51 try:
52 import pyproj
53 HAVE_PYPROJ=True
54 except:
55 HAVE_PYPROJ=False
56
57 try:
58 import osgeo.osr
59 HAVE_GDAL=True
60 except ImportError:
61 HAVE_GDAL=False
62
63 def getUTMZone(lon, lat, wkt_string=None):
64 """
65 """
66
67 logger = logging.getLogger('inv.datasources.getUTMZone')
68 zone = 0
69
70 nplon=np.array(lon)
71 nplat=np.array(lat)
72 if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
73 if HAVE_GDAL and (wkt_string is not None):
74 srs = osgeo.osr.SpatialReference()
75 result=srs.ImportFromWkt(wkt_string)
76 if result==0:
77 zone = srs.GetUTMZone()
78 else:
79 # determine UTM zone from the input data
80 zone = int(np.median((np.floor((nplon + 180)/6) + 1) % 60))
81
82 logger.debug("Determined UTM zone %d."%zone)
83 return zone
84
85 def LatLonToUTM(lon, lat, wkt_string=None):
86 """
87 Converts one or more longitude,latitude pairs to the corresponding x,y
88 coordinates in the Universal Transverse Mercator projection.
89
90 :note: The ``pyproj`` module is required unless the input coordinates are
91 determined to be already projected. If it is not found an exception
92 is raised.
93 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
94 not available to convert the string, then the input values are
95 assumed to be using the Clarke 1866 ellipsoid.
96
97 :param lon: longitude value(s)
98 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
99 :param lat: latitude value(s)
100 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
101 :param wkt_string: Well-known text (WKT) string describing the coordinate
102 system used. The ``gdal`` module is used to convert
103 the string to the corresponding Proj4 string.
104 :type wkt_string: ``str``
105 :rtype: ``tuple``
106 """
107
108 logger = logging.getLogger('inv.datasources.LatLonToUTM')
109
110 nplon=np.array(lon)
111 nplat=np.array(lat)
112 zone = getUTMZone(nplon, nplat, wkt_string)
113
114 if np.abs(nplon).max()>360.0 or np.abs(nplat).max()>180.0:
115 logger.debug('Coordinates appear to be projected. Passing through.')
116 return lon,lat,zone
117
118 logger.debug('Need to project coordinates.')
119 if not HAVE_PYPROJ:
120 logger.error("Cannot import pyproj! Exiting.")
121 raise ImportError("In order to perform coordinate transformations on "
122 "the data you are using the 'pyproj' Python module is required but "
123 "was not found. Please install the module and try again.")
124
125 p_src=None
126 if HAVE_GDAL and (wkt_string is not None) and len(wkt_string)>0:
127 srs = osgeo.osr.SpatialReference()
128 result=srs.ImportFromWkt(wkt_string)
129 try:
130 p_src = pyproj.Proj(srs.ExportToProj4())
131 except RuntimeError as e:
132 logger.warning('pyproj returned exception: %s [wkt=%s]'%(e,wkt_string))
133
134 if p_src is None:
135 if HAVE_GDAL:
136 reason="no wkt string provided."
137 else:
138 reason="the gdal python module not available."
139 logger.warning("Assuming lon/lat coordinates on Clarke 1866 ellipsoid since "+reason)
140 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
141
142 # check for hemisphere
143 if np.median(nplat) < 0.:
144 south='+south '
145 else:
146 south=''
147 p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m +ellps=WGS84'%(zone,south))
148 x,y=pyproj.transform(p_src, p_dest, lon, lat)
149 return x,y,zone
150
151 class DataSource(object):
152 """
153 A class that provides survey data for the inversion process.
154 This is an abstract base class that implements common functionality.
155 Methods to be overwritten by subclasses are marked as such.
156 This class assumes 2D data which is mapped to a slice of a 3D domain.
157 For other setups override the methods as required.
158 """
159
160 GRAVITY, MAGNETIC, ACOUSTIC, MT = list(range(4))
161
162 def __init__(self, reference_system=None, tags=[]):
163 """
164 Constructor. Sets some defaults and initializes logger.
165
166 :param tags: a list of tags associated with the data set.
167 :type tags: ``list`` of almost any type (typically `str`)
168 :param reference_system: the reference coordinate system
169 :type reference_system: ``None`` or `ReferenceSystem`
170 """
171 if not isinstance(tags ,list):
172 raise ValueError("tags argument must be a list.")
173 self.__tags=tags
174 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
175 self.__subsampling_factor=1
176 if not reference_system:
177 self.__reference_system = CartesianReferenceSystem()
178 else:
179 self.__reference_system = reference_system
180
181 if self.__reference_system.isCartesian():
182 self.__v_scale=1.
183 else:
184 self.__v_scale=1./self.getReferenceSystem().getHeightUnit()
185
186 def getTags(self):
187 """
188 returns the list of tags
189
190 :rtype: ``list``
191 """
192 return self.__tags
193
194 def hasTag(self, tag):
195 """
196 returns true if the data set has tag ``tag``
197
198 :rtype: ``bool``
199 """
200 return tag in self.__tags
201
202
203 def getReferenceSystem(self):
204 """
205 returns the reference coordinate system
206
207 :rtype: `ReferenceSystem`
208 """
209 return self.__reference_system
210
211 def getHeightScale(self):
212 """
213 returns the height scale factor to convert from meters to the
214 appropriate units of the reference system used.
215
216 :rtype: ``float``
217 """
218 return self.__v_scale
219
220 def getDataExtents(self):
221 """
222 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
223
224 - ``x0``, ``y0`` = coordinates of data origin
225 - ``nx``, ``ny`` = number of data points in x and y
226 - ``dx``, ``dy`` = spacing of data points in x and y
227
228 This method must be implemented in subclasses.
229 """
230 raise NotImplementedError
231
232 def getDataType(self):
233 """
234 Returns the type of survey data managed by this source.
235 Subclasses must return `GRAVITY` or `MAGNETIC` or `ACOUSTIC` as appropriate.
236 """
237 raise NotImplementedError
238
239 def getSurveyData(self, domain, origin, NE, spacing):
240 """
241 This method is called by the `DomainBuilder` to retrieve the survey
242 data as `Data` objects on the given domain.
243
244 Subclasses should return one or more `Data` objects with survey data
245 interpolated on the given `escript` domain. The exact return type
246 depends on the type of data.
247
248 :param domain: the escript domain to use
249 :type domain: `esys.escript.Domain`
250 :param origin: the origin coordinates of the domain
251 :type origin: ``tuple`` or ``list``
252 :param NE: the number of domain elements in each dimension
253 :type NE: ``tuple`` or ``list``
254 :param spacing: the cell sizes (node spacing) in the domain
255 :type spacing: ``tuple`` or ``list``
256 """
257 raise NotImplementedError
258
259 def getUtmZone(self):
260 """
261 All data source coordinates are converted to UTM (Universal Transverse
262 Mercator) in order to have useful domain extents. Subclasses should
263 implement this method and return the UTM zone number of the projected
264 coordinates.
265 """
266 raise NotImplementedError
267
268 def setSubsamplingFactor(self, f):
269 """
270 Sets the data subsampling factor (default=1).
271
272 The factor is applied in all dimensions. For example a 2D dataset
273 with 300 x 150 data points will be reduced to 150 x 75 when a
274 subsampling factor of 2 is used.
275 This becomes important when adding data of varying resolution to
276 a `DomainBuilder`.
277 """
278 self.__subsampling_factor=f
279
280 def getSubsamplingFactor(self):
281 """
282 Returns the subsampling factor that was set via `setSubsamplingFactor`
283 (see there).
284 """
285 return self.__subsampling_factor
286
287
288 ##############################################################################
289 class ErMapperData(DataSource):
290 """
291 Data Source for ER Mapper raster data.
292 Note that this class only accepts a very specific type of ER Mapper data
293 input and will raise an exception if other data is found.
294 """
295 def __init__(self, data_type, headerfile, datafile=None, altitude=0.,
296 error=None, scale_factor=None, null_value=None,
297 reference_system=None):
298 """
299 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
300 :type data_type: ``int``
301 :param headerfile: ER Mapper header file (usually ends in .ers)
302 :type headerfile: ``str``
303 :param datafile: ER Mapper binary data file name. If not supplied the
304 name of the header file without '.ers' is assumed
305 :type datafile: ``str``
306 :param altitude: altitude of measurements above ground in meters
307 :type altitude: ``float``
308 :param error: constant value to use for the data uncertainties.
309 If a value is supplied, it is scaled by the same factor
310 as the measurements. If not provided the error is
311 assumed to be 2 units for all measurements (i.e. 0.2
312 mGal and 2 nT for gravity and magnetic, respectively)
313 :type error: ``float``
314 :param scale_factor: the measurements and error values are scaled by
315 this factor. By default, gravity data is assumed
316 to be given in 1e-6 m/s^2 (0.1 mGal), while
317 magnetic data is assumed to be in 1e-9 T (1 nT).
318 :type scale_factor: ``float``
319 :param null_value: value that is used in the file to mark undefined
320 areas. This information is usually included in the
321 file.
322 :type null_value: ``float``
323 :param reference_system: reference coordinate system to be used.
324 For a Cartesian reference (default) the
325 appropriate UTM transformation is applied.
326 :type reference_system: `ReferenceSystem`
327 :note: consistence in the reference coordinate system and the reference
328 coordinate system used in the data source is not checked.
329 """
330 super(ErMapperData, self).__init__(reference_system, [ headerfile ] )
331 self.__headerfile=headerfile
332 if datafile is None:
333 self.__datafile=headerfile[:-4]
334 else:
335 self.__datafile=datafile
336 self.__altitude=altitude
337 self.__data_type=data_type
338 self.__utm_zone = None
339 self.__scale_factor = scale_factor
340 self.__null_value = null_value
341 self.__error_value = error
342 self.__readHeader()
343
344 def __readHeader(self):
345 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
346 metadata=open(self.__headerfile, 'r').readlines()
347 start=-1
348 for i in range(len(metadata)):
349 if metadata[i].strip() == 'DatasetHeader Begin':
350 start=i+1
351 if start==-1:
352 raise RuntimeError('Invalid ER Mapper header file ("DatasetHeader" not found)')
353
354 # parse header file filling dictionary of found values
355 md_dict={}
356 section=[]
357 for i in range(start, len(metadata)):
358 line=metadata[i].strip()
359 if line[-6:].strip() == 'Begin':
360 section.append(line[:-6].strip())
361 elif line[-4:].strip() == 'End':
362 if len(section)>0:
363 section.pop()
364 else:
365 vals=line.split('=')
366 if len(vals)==2:
367 key = vals[0].strip()
368 value = vals[1].strip()
369 fullkey='.'.join(section+[key])
370 md_dict[fullkey]=value
371
372 # check that the data format/type is supported
373 try:
374 if md_dict['ByteOrder'] != 'LSBFirst':
375 raise RuntimeError('Unsupported byte order '+md_dict['ByteOrder'])
376 except KeyError:
377 self.logger.warning("Byte order not specified. Assuming LSB first.")
378
379 try:
380 if md_dict['DataType'] != 'Raster':
381 raise RuntimeError('Unsupported data type '+md_dict['DataType'])
382 except KeyError:
383 self.logger.warning("Data type not specified. Assuming raster data.")
384
385 try:
386 if md_dict['RasterInfo.CellType'] == 'IEEE4ByteReal':
387 self.__celltype = DATATYPE_FLOAT32
388 elif md_dict['RasterInfo.CellType'] == 'IEEE8ByteReal':
389 self.__celltype = DATATYPE_FLOAT64
390 elif md_dict['RasterInfo.CellType'] == 'Signed32BitInteger':
391 self.__celltype = DATATYPE_INT32
392 else:
393 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
394 except KeyError:
395 self.logger.warning("Cell type not specified. Assuming IEEE4ByteReal.")
396 self.__celltype = DATATYPE_FLOAT32
397
398 try:
399 fileOffset = int(md_dict['HeaderOffset'])
400 except:
401 fileOffset = 0
402 if fileOffset > 0:
403 raise RuntimeError("ER Mapper data with header offset >0 not supported.")
404
405 # now extract required information
406 try:
407 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
408 NY = int(md_dict['RasterInfo.NrOfLines'])
409 except:
410 raise RuntimeError("Could not determine extents of data")
411
412 ### mask/null value
413 # note that NaN is always filtered out in ripley
414 if self.__null_value is None:
415 try:
416 self.__null_value = float(md_dict['RasterInfo.NullCellValue'])
417 except:
418 self.logger.debug("Could not determine null value, using default.")
419 self.__null_value = 99999
420 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
421 raise TypeError("Invalid type of null_value parameter")
422
423 try:
424 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
425 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
426 except:
427 raise RuntimeError("Could not determine cell dimensions")
428
429 try:
430 if md_dict['CoordinateSpace.CoordinateType']=='EN':
431 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
432 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
433 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
434 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
435 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
436 else:
437 raise RuntimeError("Unknown CoordinateType")
438 except:
439 self.logger.warning("Could not determine coordinate origin. Setting to (0.0, 0.0)")
440 originX,originY = 0.0, 0.0
441
442 # data sets have origin in top-left corner so y runs top-down and
443 # we need to flip accordingly
444 originY-=NY*spacingY
445
446 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
447 # it appears we have lat/lon coordinates so need to convert
448 # origin and spacing. Try using gdal to get the wkt if available:
449 try:
450 from osgeo import gdal
451 ds=gdal.Open(self.__headerfile)
452 wkt=str(ds.GetProjection())
453 except:
454 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
455 self.logger.warning('GDAL not available or file read error, assuming GDA94 data')
456 if self.getReferenceSystem().isCartesian():
457 originX_UTM,originY_UTM,zone = LatLonToUTM(originX, originY, wkt)
458 op1X,op1Y,_ = LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
459 # we are rounding to avoid interpolation issues
460 spacingX = np.round(op1X-originX_UTM)
461 spacingY = np.round(op1Y-originY_UTM)
462 originX = np.round(originX_UTM)
463 originY = np.round(originY_UTM)
464 self.__utm_zone = zone
465 else:
466 op1X, op1Y = originX+spacingX, originY+spacingY
467 spacingX = np.round(op1X-originX,5)
468 spacingY = np.round(op1Y-originY,5)
469 originX = np.round(originX,5)
470 originY = np.round(originY,5)
471
472 self.__dataorigin=[originX, originY]
473 self.__delta = [spacingX, spacingY]
474 self.__nPts = [NX, NY]
475 self.__origin = [originX, originY]
476 ### scale factor
477 if self.__scale_factor is None:
478 if self.__data_type == self.GRAVITY:
479 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
480 self.__scale_factor = 1e-6
481 else:
482 self.logger.info("Assuming magnetic data units are 'nT'.")
483 self.__scale_factor = 1e-9
484
485 ### error value
486 if self.__error_value is None:
487 self.__error_value = 2.
488 elif not isinstance(self.__error_value,float) and not isinstance(self.__error_value,int):
489 raise TypeError("Invalid type of error parameter")
490
491
492 def getDataExtents(self):
493 """
494 returns ( (x0, y0), (nx, ny), (dx, dy) )
495 """
496 return (list(self.__origin), list(self.__nPts), list(self.__delta))
497
498 def getDataType(self):
499 return self.__data_type
500
501 def getSurveyData(self, domain, origin, NE, spacing):
502 FS = ReducedFunction(domain)
503 nValues=self.__nPts
504 # determine base location of this dataset within the domain
505 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
506 # determine the resolution difference between domain and data.
507 # If domain has twice the resolution we can double up the data etc.
508 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
509 if domain.getDim()==3:
510 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
511 multiplier=multiplier+[1]
512 nValues=nValues+[1]
513
514 reverse = [0]*domain.getDim()
515 byteorder=BYTEORDER_NATIVE
516 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s, reverse=%s"%(str(first),str(nValues),str(multiplier),str(reverse)))
517 data = readBinaryGrid(self.__datafile, FS, shape=(),
518 fill=self.__null_value, byteOrder=byteorder,
519 dataType=self.__celltype, first=first, numValues=nValues,
520 multiplier=multiplier, reverse=reverse)
521 sigma = self.__error_value * whereNonZero(data-self.__null_value)
522
523 data = data * self.__scale_factor
524 sigma = sigma * self.__scale_factor
525 return data, sigma
526
527 def getUtmZone(self):
528 return self.__utm_zone
529
530
531 ##############################################################################
532 class NetCdfData(DataSource):
533 """
534 Data Source for gridded netCDF data that use CF/COARDS conventions.
535 """
536 def __init__(self, data_type, filename, altitude=0., data_variable=None,
537 error=None, scale_factor=None, null_value=None, reference_system=None):
538 """
539 :param filename: file name for survey data in netCDF format
540 :type filename: ``str``
541 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
542 :type data_type: ``int``
543 :param altitude: altitude of measurements in meters
544 :type altitude: ``float``
545 :param data_variable: name of the netCDF variable that holds the data.
546 If not provided an attempt is made to determine
547 the variable and an exception thrown on failure.
548 :type data_variable: ``str``
549 :param error: either the name of the netCDF variable that holds the
550 uncertainties of the measurements or a constant value
551 to use for the uncertainties. If a constant value is
552 supplied, it is scaled by the same factor as the
553 measurements. If not provided the error is assumed to
554 be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
555 for gravity and magnetic, respectively)
556 :type error: ``str`` or ``float``
557 :param scale_factor: the measurements and error values are scaled by
558 this factor. By default, gravity data is assumed
559 to be given in 1e-6 m/s^2 (0.1 mGal), while
560 magnetic data is assumed to be in 1e-9 T (1 nT).
561 :type scale_factor: ``float``
562 :param null_value: value that is used in the file to mark undefined
563 areas. This information is usually included in the
564 file.
565 :type null_value: ``float``
566 :param reference_system: reference coordinate system to be used.
567 For a Cartesian reference (default) the
568 appropriate UTM transformation is applied.
569 :type reference_system: `ReferenceSystem`
570 :note: it is the responsibility of the caller to ensure all data
571 sources and the domain builder use the same reference system.
572 """
573 super(NetCdfData,self).__init__(reference_system, [filename])
574 self.__filename=filename
575 if not data_type in [self.GRAVITY,self.MAGNETIC]:
576 raise ValueError("Invalid value for data_type parameter")
577 self.__data_type = data_type
578 self.__altitude = altitude
579 self.__data_name = data_variable
580 self.__scale_factor = scale_factor
581 self.__null_value = null_value
582 self.__utm_zone = None
583 self.__readMetadata(error)
584
585 def __readMetadata(self, error):
586 self.logger.debug("Checking Data Source: %s"%self.__filename)
587 f=netcdf_file(self.__filename, 'r')
588
589 ### longitude- / X-dimension and variable
590 NX=0
591 for n in ['lon','longitude','x']:
592 if n in f.dimensions:
593 NX=f.dimensions[n]
594 lon_name=n
595 break
596 if NX==0:
597 raise RuntimeError("Could not determine extents of data")
598
599 # CF/COARDS states that coordinate variables have the same name as
600 # the dimensions
601 if not lon_name in f.variables:
602 raise RuntimeError("Could not determine longitude variable")
603 longitude=f.variables.pop(lon_name)
604
605 ### latitude- / Y-dimension and variable
606 NY=0
607 for n in ['lat','latitude','y']:
608 if n in f.dimensions:
609 NY=f.dimensions[n]
610 lat_name=n
611 break
612 if NY==0:
613 raise RuntimeError("Could not determine extents of data")
614 if not lat_name in f.variables:
615 raise RuntimeError("Could not determine latitude variable")
616 latitude=f.variables.pop(lat_name)
617
618 ### data variable
619 if self.__data_name is not None:
620 try:
621 dims = f.variables[self.__data_name].dimensions
622 if not ((lat_name in dims) and (lon_name in dims)):
623 raise ValueError("Invalid data variable name supplied")
624 except KeyError:
625 raise ValueError("Invalid data variable name supplied")
626 else:
627 for n in f.variables.keys():
628 dims=f.variables[n].dimensions
629 if (lat_name in dims) and (lon_name in dims):
630 self.__data_name=n
631 break
632 if self.__data_name is None:
633 raise RuntimeError("Could not determine data variable")
634
635 datavar = f.variables[self.__data_name]
636
637 ### error value/variable
638 self.__error_name = None
639 if isinstance(error,str):
640 try:
641 dims = f.variables[error].dimensions
642 if not ((lat_name in dims) and (lon_name in dims)):
643 raise ValueError("Invalid error variable name supplied")
644 except KeyError:
645 raise ValueError("Invalid error variable name supplied")
646 self.__error_name = error
647 elif isinstance(error,float) or isinstance(error,int):
648 self.__error_value = float(error)
649 elif error is None:
650 self.__error_value = 2.
651 else:
652 raise TypeError("Invalid type of error parameter")
653
654 ### mask/null value
655 # note that NaN is always filtered out in ripley
656 if self.__null_value is None:
657 if hasattr(datavar, 'missing_value'):
658 self.__null_value = float(datavar.missing_value)
659 elif hasattr(datavar, '_FillValue'):
660 self.__null_value = float(datavar._FillValue)
661 else:
662 self.logger.debug("Could not determine null value, using default.")
663 self.__null_value = 99999
664 elif not isinstance(self.__null_value,float) and not isinstance(self.__null_value,int):
665 raise TypeError("Invalid type of null_value parameter")
666
667 # try to determine units of data - this is disabled until we obtain a
668 # file with valid information
669 #if hasattr(f.variables[data_name], 'units'):
670 # units=f.variables[data_name].units
671
672 ### scale factor
673 if self.__scale_factor is None:
674 if self.__data_type == self.GRAVITY:
675 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
676 self.__scale_factor = 1e-6
677 else:
678 self.logger.info("Assuming magnetic data units are 'nT'.")
679 self.__scale_factor = 1e-9
680
681 # see if there is a WKT string to convert coordinates
682 try:
683 wkt_string=str(datavar.esri_pe_string)
684 self.logger.debug("wkt_string is: %s"%wkt_string)
685 except:
686 wkt_string=None
687
688 # CF GDAL output: see if there is a grid_mapping attribute which
689 # contains the name of a dummy variable that holds information about
690 # mapping. GDAL puts the WKT string into spatial_ref:
691 if wkt_string is None:
692 try:
693 mapvar=f.variables[datavar.grid_mapping]
694 wkt_string=str(mapvar.spatial_ref)
695 self.logger.debug("wkt_string is: %s"%wkt_string)
696 except:
697 self.logger.debug("no wkt_string found!")
698
699 # actual_range & geospatial_lon_min/max do not always contain correct
700 # values so we have to obtain the min/max in a less efficient way:
701 lon_range=longitude.data.min(),longitude.data.max()
702 lat_range=latitude.data.min(),latitude.data.max()
703 if self.getReferenceSystem().isCartesian():
704 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
705 self.__utm_zone = zone
706
707 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
708
709 # see if lat or lon is stored in reverse order to domain conventions
710 self.__reverse=[False,False]
711 d=longitude.data
712 if d[0]>d[-1]:
713 self.__reverse[0]=True
714 d=latitude.data
715 if d[0]>d[-1]:
716 self.__reverse[1]=True
717 self.__nPts=[NX, NY]
718 self.__origin=[lon_range[0],lat_range[0]]
719 # we are rounding to avoid interpolation issues
720 if self.getReferenceSystem().isCartesian():
721 # rounding will give us about meter-accuracy with UTM coordinates
722 r=0
723 else:
724 # this should give us about meter-accuracy with lat/lon coords
725 r=5
726 self.__delta=[np.round(lengths[i]/self.__nPts[i],r) for i in range(2)]
727 f.close()
728
729 def getDataExtents(self):
730 """
731 returns ( (x0, y0), (nx, ny), (dx, dy) )
732 """
733 return (list(self.__origin), list(self.__nPts), list(self.__delta))
734
735 def getDataType(self):
736 return self.__data_type
737
738 def getSurveyData(self, domain, origin, NE, spacing):
739 if not HAS_RIPLEY:
740 raise RuntimeError("Ripley module not available for reading")
741 FS=ReducedFunction(domain)
742 nValues=self.__nPts
743 # determine base location of this dataset within the domain
744 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
745 # determine the resolution difference between domain and data.
746 # If domain has twice the resolution we can double up the data etc.
747 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(len(self.__nPts))]
748 reverse = [int(self.__reverse[i]) for i in range(len(self.__reverse))]
749
750 if domain.getDim() == 3:
751 first.append(int((self.getHeightScale()*self.__altitude-origin[2])/spacing[2]))
752 multiplier = multiplier + [1]
753 nValues = nValues + [1]
754 reverse = reverse + [0]
755
756 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
757 self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
758 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
759 shape=(), fill=self.__null_value, first=first,
760 numValues=nValues, multiplier=multiplier, reverse=reverse)
761
762 if self.__error_name is not None:
763 self.logger.debug("calling readNcGrid with dataname=%s, first=%s, nValues=%s, multiplier=%s, reverse=%s"%(
764 self.__data_name, str(first),str(nValues),str(multiplier),str(reverse)))
765 sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
766 FS, shape=(), fill=0., first=first, numValues=nValues,
767 multiplier=multiplier, reverse=reverse)
768 else:
769 sigma = self.__error_value * whereNonZero(data-self.__null_value)
770
771 data = data * self.__scale_factor
772 sigma = sigma * self.__scale_factor
773 return data, sigma
774
775 def getUtmZone(self):
776 return self.__utm_zone
777
778
779 ##############################################################################
780 class SourceFeature(object):
781 """
782 A feature adds a density/susceptibility distribution to (parts of) a
783 domain of a synthetic data source, for example a layer of a specific
784 rock type or a simulated ore body.
785 """
786 def getValue(self):
787 """
788 Returns the value for the area covered by mask. It can be constant
789 or a `Data` object with spatial dependency.
790 """
791 raise NotImplementedError
792
793 def getMask(self, x):
794 """
795 Returns the mask of the area of interest for this feature. That is,
796 mask is non-zero where the value returned by `getValue()` should be
797 applied, zero elsewhere.
798 """
799 raise NotImplementedError
800
801 class SmoothAnomaly(SourceFeature):
802 """
803 A source feature in the form of a blob (roughly gaussian).
804 """
805 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
806 """
807 Intializes the smooth anomaly data.
808
809 :param lx: size of blob in x-dimension
810 :param ly: size of blob in y-dimension
811 :param lz: size of blob in z-dimension
812 :param x: location of blob in x-dimension
813 :param y: location of blob in y-dimension
814 :param depth: depth of blob
815 :param v_inner: value in the centre of the blob
816 :param v_outer: value in the periphery of the blob
817 """
818 self.x=x
819 self.y=y
820 self.lx=lx
821 self.ly=ly
822 self.lz=lz
823 self.depth=depth
824 self.v_inner=v_inner
825 self.v_outer=v_outer
826 self.value=None
827 self.mask=None
828
829 def getValue(self,x):
830 if self.value is None:
831 if self.v_outer is None or self.v_inner is None:
832 self.value=0
833 else:
834 DIM=x.getDomain().getDim()
835 alpha=-log(abs(self.v_outer/self.v_inner))*4
836 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
837 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
838 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
839 if self.v_inner<0: self.value=-self.value
840
841 return self.value
842
843 def getMask(self, x):
844 DIM=x.getDomain().getDim()
845 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
846 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
847 if DIM>2:
848 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
849 self.mask = m
850 return m
851
852 ##############################################################################
853 class SyntheticDataBase(DataSource):
854 """
855 Base class to define reference data based on a given property distribution
856 (density or susceptibility). Data are collected from a square region of
857 vertical extent ``length`` on a grid with ``number_of_elements`` cells in
858 each direction.
859
860 The synthetic data are constructed by solving the appropriate forward
861 problem. Data can be sampled with an offset from the surface at z=0 or
862 using the entire subsurface region.
863 """
864 def __init__(self, data_type,
865 DIM=2,
866 number_of_elements=10,
867 length=1*U.km,
868 B_b=None,
869 data_offset=0,
870 full_knowledge=False):
871 """
872 :param data_type: data type indicator
873 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
874 :param DIM: number of spatial dimensions
875 :type DIM: ``int`` (2 or 3)
876 :param number_of_elements: lateral number of elements in the region
877 where data are collected
878 :type number_of_elements: ``int``
879 :param length: lateral extent of the region where data are collected
880 :type length: ``float``
881 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
882 :type B_b: ``list`` of ``Scalar``
883 :param data_offset: offset of the data collection region from the surface
884 :type data_offset: ``float``
885 :param full_knowledge: if ``True`` data are collected from the entire
886 subsurface region. This is mainly for testing.
887 :type full_knowledge: ``Bool``
888 """
889 super(SyntheticDataBase, self).__init__()
890 if not data_type in [self.GRAVITY, self.MAGNETIC]:
891 raise ValueError("Invalid value for data_type parameter")
892 self.DIM=DIM
893 self.number_of_elements=number_of_elements
894 self.length=length
895 self.__data_type = data_type
896 self.__full_knowledge= full_knowledge
897 self.__data_offset=data_offset
898 self.__B_b =None
899 # this is for Cartesian (FIXME ?)
900 if data_type == self.MAGNETIC:
901 if self.DIM < 3:
902 self.__B_b = np.array([B_b[0], B_b[2]])
903 else:
904 self.__B_b = ([B_b[0], B_b[1],B_b[2]])
905 self.__origin = [0]*(DIM-1)
906 self.__delta = [float(length)/number_of_elements]*(DIM-1)
907 self.__nPts = [number_of_elements]*(DIM-1)
908 self._reference_data=None
909
910 def getUtmZone(self):
911 """
912 returns a dummy UTM zone since this class does not use real coordinate
913 values.
914 """
915 return 0
916
917 def getDataExtents(self):
918 """
919 returns the lateral data extend of the data set
920 """
921 return (list(self.__origin), list(self.__nPts), list(self.__delta))
922
923 def getDataType(self):
924 """
925 returns the data type
926 """
927 return self.__data_type
928
929 def getSurveyData(self, domain, origin, number_of_elements, spacing):
930 """
931 returns the survey data placed on a given domain.
932
933 :param domain: domain on which the data are to be placed
934 :type domain: ``Domain``
935 :param origin: origin of the domain
936 :type origin: ``list`` of ``float``
937 :param number_of_elements: number of elements (or cells) in each
938 spatial direction used to span the domain
939 :type number_of_elements: ``list`` of ``int``
940 :param spacing: cell size in each spatial direction
941 :type spacing: ``list`` of ``float``
942 :return: observed gravity field or magnetic flux density for each cell
943 in the domain and for each cell an indicator 1/0 if the data
944 are valid or not.
945 :rtype: pair of ``Scalar``
946 """
947 pde=LinearSinglePDE(domain)
948 DIM=domain.getDim()
949 x=domain.getX()
950 # set the reference data
951 k=self.getReferenceProperty(domain)
952 # calculate the corresponding potential
953 z=x[DIM-1]
954 m_psi_ref=whereZero(z-sup(z))
955 if self.getDataType()==DataSource.GRAVITY:
956 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
957 else:
958 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
959 pde.setSymmetryOn()
960 #pde.getSolverOptions().setTolerance(1e-13)
961 psi_ref=pde.getSolution()
962 del pde
963 if self.getDataType()==DataSource.GRAVITY:
964 data = -grad(psi_ref, ReducedFunction(domain))
965 else:
966 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
967
968 x=ReducedFunction(domain).getX()
969 if self.__full_knowledge:
970 sigma = whereNegative(x[DIM-1])
971 else:
972 sigma=1.
973 # limit mask to non-padding in horizontal area
974 for i in range(DIM-1):
975 x_i=x[i]
976 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
977 # limit mask to one cell thickness at z=0
978 z=x[DIM-1]
979 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
980 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
981 return data,sigma
982
983 def getReferenceProperty(self, domain=None):
984 """
985 Returns the reference `Data` object that was used to generate
986 the gravity/susceptibility anomaly data.
987
988 :return: the density or susceptibility anomaly used to create the
989 survey data
990 :note: it can be assumed that in the first call the ``domain``
991 argument is present so the actual anomaly data can be created.
992 In subsequent calls this may not be true.
993 :note: method needs to be overwritten
994 """
995 raise NotImplementedError()
996
997 class SyntheticFeatureData(SyntheticDataBase):
998 """
999 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
1000 """
1001 def __init__(self, data_type,
1002 features,
1003 DIM=2,
1004 number_of_elements=10,
1005 length=1*U.km,
1006 B_b=None,
1007 data_offset=0,
1008 full_knowledge=False):
1009 """
1010 :param data_type: data type indicator
1011 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1012 :param features: list of features. It is recommended that the features
1013 are located entirely below the surface.
1014 :type features: ``list`` of `SourceFeature`
1015 :param DIM: spatial dimensionality
1016 :type DIM: ``int`` (2 or 3)
1017 :param number_of_elements: lateral number of elements in the region
1018 where data are collected
1019 :type number_of_elements: ``int``
1020 :param length: lateral extent of the region where data are collected
1021 :type length: ``float``
1022 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
1023 :type B_b: ``list`` of ``Scalar``
1024 :param data_offset: offset of the data collection region from the surface
1025 :type data_offset: ``float``
1026 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
1027 :type full_knowledge: ``Bool``
1028 """
1029 super(SyntheticFeatureData,self).__init__(
1030 data_type=data_type, DIM=DIM,
1031 number_of_elements=number_of_elements,
1032 length=length, B_b=B_b,
1033 data_offset=data_offset,
1034 full_knowledge=full_knowledge)
1035 self._features = features
1036
1037
1038 def getReferenceProperty(self, domain=None):
1039 """
1040 Returns the reference `Data` object that was used to generate
1041 the gravity/susceptibility anomaly data.
1042 """
1043 if self._reference_data == None:
1044 DIM=domain.getDim()
1045 x=domain.getX()
1046 k=0.
1047 for f in self._features:
1048 m=f.getMask(x)
1049 k = k * (1-m) + f.getValue(x) * m
1050 self._reference_data= k
1051 return self._reference_data
1052
1053 class SyntheticData(SyntheticDataBase):
1054 """
1055 Defines synthetic gravity/magnetic data based on harmonic property anomaly
1056
1057 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
1058
1059 for all x and z<=0. For z>0 rho = 0.
1060 """
1061 def __init__(self, data_type,
1062 n_length=1,
1063 n_depth=1,
1064 depth_offset=0.,
1065 depth=None,
1066 amplitude=None,
1067 DIM=2,
1068 number_of_elements=10,
1069 length=1*U.km,
1070 B_b=None,
1071 data_offset=0,
1072 full_knowledge=False,
1073 s=0.):
1074 """
1075 :param data_type: data type indicator
1076 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1077 :param n_length: number of oscillations in the anomaly data within the
1078 observation region
1079 :type n_length: ``int``
1080 :param n_depth: number of oscillations in the anomaly data below surface
1081 :type n_depth: ``int``
1082 :param depth_offset: vertical offset of the data
1083 :type depth_offset: ``float``
1084 :param depth: vertical extent in the anomaly data. If not present the
1085 depth of the domain is used.
1086 :type depth: ``float``
1087 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
1088 gravity and 0.1 for magnetic data.
1089 :param DIM: spatial dimensionality
1090 :type DIM: ``int`` (2 or 3)
1091 :param number_of_elements: lateral number of elements in the region
1092 where data are collected
1093 :type number_of_elements: ``int``
1094 :param length: lateral extent of the region where data are collected
1095 :type length: ``float``
1096 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
1097 Only used for magnetic data.
1098 :type B_b: ``list`` of ``Scalar``
1099 :param data_offset: offset of the data collection region from the surface
1100 :type data_offset: ``float``
1101 :param full_knowledge: if ``True`` data are collected from the entire
1102 subsurface region. This is mainly for testing.
1103 :type full_knowledge: ``Bool``
1104 """
1105 super(SyntheticData,self).__init__(
1106 data_type=data_type, DIM=DIM,
1107 number_of_elements=number_of_elements,
1108 length=length, B_b=B_b,
1109 data_offset=data_offset,
1110 full_knowledge=full_knowledge)
1111 self.__n_length = n_length
1112 self.__n_depth = n_depth
1113 self.depth = depth
1114 self.depth_offset=depth_offset
1115 if amplitude == None:
1116 if data_type == DataSource.GRAVITY:
1117 amplitude = 200 *U.kg/U.m**3
1118 else:
1119 amplitude = 0.1
1120 self.__amplitude = amplitude
1121 self.__s=s
1122
1123 def getReferenceProperty(self, domain=None):
1124 """
1125 Returns the reference `Data` object that was used to generate
1126 the gravity/susceptibility anomaly data.
1127 """
1128 if self._reference_data is None:
1129 DIM=domain.getDim()
1130 x=domain.getX()
1131 # set the reference data
1132 z=x[DIM-1]
1133 dd=self.depth
1134 if dd is None: dd=inf(z)
1135 z2=(z+self.depth_offset)/(self.depth_offset-dd)
1136 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
1137 for i in range(DIM-1):
1138 x_i=x[i]
1139 min_x=inf(x_i)
1140 max_x=sup(x_i)
1141 k*= sin(self.__n_length*np.pi*(x_i-min_x-self.__s)/(max_x-min_x))
1142 self._reference_data= k
1143 return self._reference_data
1144
1145 ##############################################################################
1146 class NumpyData(DataSource):
1147 """
1148 """
1149
1150 def __init__(self, data_type, data, error=1., length=1.*U.km, null_value=-1., tags=[], origin=None):
1151 """
1152 A data source that uses survey data from a ``numpy`` object or list
1153 instead of a file.
1154 The dimensionality is inferred from the shape of ``data`` (1- and
1155 2-dimensional data is supported). The data origin is assumed to be
1156 at the coordinate origin.
1157
1158 :param data_type: data type indicator
1159 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
1160 :param data: the survey data array. Note that for a cartesian coordinate
1161 system the shape of the data is considered to be
1162 (nz,ny,nx).
1163 :type data: ``numpy.array`` or ``list``
1164 :param error: constant value to use for the data uncertainties or a
1165 numpy object with uncertainties for every data point.
1166 :type error: ``float`` or ``list`` or ``ndarray``
1167 :param length: side length(s) of the data slice/volume. This can be
1168 a scalar to indicate constant length in all dimensions
1169 or an array/list of values in each coordinate dimension.
1170 :type length: ``float`` or ``list`` or ``ndarray``
1171 :param null_value: value that is used in the undefined regions of the
1172 survey data object.
1173 :type null_value: ``float``
1174 :param tags: a list of tags associated with the data set.
1175 :type tags: ``list`` of almost any type (typically `str`)
1176 :param origin: offset of origin of offset
1177 :type origin: ``list`` of ``float``s
1178 """
1179 super(NumpyData, self).__init__(tags=tags)
1180 if not data_type in [self.GRAVITY, self.MAGNETIC, self.ACOUSTIC, self.MT ]:
1181 raise ValueError("Invalid value for data_type parameter")
1182 self.__data_type = data_type
1183 if not isinstance(data, np.ndarray) or data.dtype not in [ np.float64, np.complex128]:
1184 self.__data = np.asarray(data, dtype=np.float64)
1185 else:
1186 self.__data = data
1187 DIM = len(self.__data.shape)
1188 if DIM not in (1,2):
1189 raise ValueError("NumpyData requires 1- or 2-dimensional data")
1190 self.__error = np.asarray(error, dtype=np.float64)
1191 if len(self.__error.shape) > 0 and \
1192 self.__error.shape != self.__data.shape:
1193 raise ValueError("error argument must be scalar or match the shape of the data")
1194 if isinstance(length,float) or isinstance(length,int):
1195 length = [float(length)] * DIM
1196 else:
1197 length=np.asarray(length, dtype=float)
1198 if len(length.shape) != 1 or length.shape[0] != DIM:
1199 raise ValueError("length must be scalar or an array with %d values"%DIM)
1200 length=length.tolist()
1201
1202 self.__length=length
1203 self.__null_value = null_value
1204 self.__nPts = list(reversed(self.__data.shape))
1205 self.__delta = [length[i]/self.__nPts[i] for i in range(DIM)]
1206 if origin is None:
1207 self.__origin = [0.] * DIM
1208 else:
1209 self.__origin = origin[:DIM-1]
1210
1211 def getDataExtents(self):
1212 return (self.__origin, self.__nPts, self.__delta)
1213
1214 def getDataType(self):
1215 return self.__data_type
1216
1217 def getSurveyData(self, domain, origin, NE, spacing):
1218 DIM=domain.getDim()
1219 if self.getDataType() == self.ACOUSTIC:
1220 x=FunctionOnBoundary(domain).getX()
1221 BBX=boundingBox(domain)
1222 z=x[DIM-1]
1223 mask= whereZero( z - inf(z)) # we don't use BBX[DIM-1][1] due to mountains'
1224 for i in range(DIM-1):
1225 x_i=x[i]
1226 mask+=whereNonPositive( x_i - self.__origin[i] ) + whereNonNegative( x_i - ( self.__origin[i]+self.__length[i] ) )
1227 mask=1-wherePositive(mask)
1228
1229 data=Data(0.,(2,), FunctionOnBoundary(domain))
1230 step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1231 if DIM == 2:
1232 data[0] = interpolateTable(self.__data.real, x[0],self.__origin[0], step[0])
1233 data[1] = interpolateTable(self.__data.imag, x[0],self.__origin[0], step[0])
1234 if len(self.__error.shape) > 0:
1235 sigma = interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1236 else:
1237 sigma = Scalar(self.__error.item(), FunctionOnBoundary(domain))
1238 else:
1239 raise ValueError("3D domains are not supported yet.")
1240 data*=mask
1241 sigma*=mask
1242 elif self.getDataType() == self.MT:
1243 if DIM == 2:
1244 step= [ self.__length[i]/self.__data.shape[i] for i in range(DIM-1) ]
1245 if len(self.__error.shape) > 0:
1246 sigma = interpolateTable(self.__error, x[0], self.__origin[0], step[0])
1247 else:
1248 sigma = Scalar(self.__error.item(), FunctionOnBoundary(domain))
1249 return self.__data, sigma
1250 else:
1251 raise ValueError("3D domains are not supported yet.")
1252
1253 else:
1254 FS = ReducedFunction(domain)
1255 nValues = self.__nPts
1256 dataDIM = len(nValues)
1257 # determine base location of this dataset within the domain
1258 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(dataDIM)]
1259 # determine the resolution difference between domain and data.
1260 # If domain has twice the resolution we can double up the data etc.
1261 multiplier=[int(round(self.__delta[i]/spacing[i])) for i in range(dataDIM)]
1262 if domain.getDim() > dataDIM:
1263 first.append(int(-origin[-1]/spacing[-1]))
1264 multiplier=multiplier+[1]
1265 nValues=nValues+[1]
1266
1267 _handle, numpyfile = tempfile.mkstemp()
1268 os.close(_handle)
1269 self.__data.tofile(numpyfile)
1270
1271 reverse=[0]*DIM
1272 byteorder=BYTEORDER_NATIVE
1273 datatype=DATATYPE_FLOAT64
1274 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1275 data = readBinaryGrid(numpyfile, FS, shape=(),
1276 fill=self.__null_value, byteOrder=byteorder, dataType=datatype,
1277 first=first, numValues=nValues, multiplier=multiplier,
1278 reverse=reverse)
1279 if len(self.__error.shape) > 0:
1280 self.__error.tofile(numpyfile)
1281 self.logger.debug("calling readBinaryGrid with first=%s, nValues=%s, multiplier=%s"%(str(first),str(nValues),str(multiplier)))
1282 sigma = readBinaryGrid(numpyfile, FS, shape=(),
1283 fill=0., byteOrder=byteorder, dataType=datatype,
1284 first=first, numValues=nValues, multiplier=multiplier,
1285 reverse=reverse)
1286 else:
1287 sigma = self.__error.item() * whereNonZero(data-self.__null_value)
1288 os.unlink(numpyfile)
1289
1290 return data, sigma
1291
1292 def getUtmZone(self):
1293 """
1294 returns a dummy UTM zone since this class does not use real coordinate
1295 values.
1296 """
1297 return 0
1298
1299 class MT2DTe(object):
1300 """
1301 class used to store frequency information accosicated with mt data
1302 """
1303
1304 def __init__(self,x, omega=0):
1305 """
1306 initiale the MT2DTe tag object
1307
1308 :param omega: frequency of readings
1309 :type omega: ``float``
1310 :param x: coordinates of measurements
1311 :type x: ``list`` of ``tuple`` with ``float``
1312 """
1313 self.__omega=omega
1314 self.__x=x
1315
1316 def getFrequency(self):
1317 """
1318 return frequency of measurement
1319 :rtype: ``float``
1320 """
1321 return self.__omega
1322
1323 def getX(self):
1324 """
1325 return coordinates of measurement
1326 :rtype: ``float``
1327 """
1328 return self.__x
1329
1330 class SeismicSource(object):
1331 """
1332 describes a seimic source by location (x,y), frequency omega, power (if known) and orientation (if known).
1333 this class is used to tag seismic data sources.
1334 """
1335 def __init__(self, x, y=0., omega=0., elevation=0., power = None, orientation=None):
1336 """
1337 initiale the source
1338
1339 :param x: lateral x location
1340 :param y: lateral y location
1341 :param omega: frequency of source
1342 :param elevation: elevation of source above reference level
1343 :param power: power of source at frequence
1344 :param orientation: oriententation of source in 3D or 2D (or None)
1345 :type x: ``float``
1346 :type y: ``float``
1347 :type omega: ``float``
1348 :type power: ``complex`` or ``None``
1349 :type orientation: vector of appropriate length or ``None``
1350 """
1351 self.__loc=(x,y)
1352 self.__omega=omega
1353 self.__power=power
1354 self.__elevation=elevation
1355 self.__orientation=orientation
1356
1357 def __eq__(self, other):
1358 if isinstance(other, SeismicSource):
1359 return self.__loc == other.getLocation() \
1360 and self.__omega == other.getFrequency() \
1361 and self.__elevation == other.getElevation() \
1362 and self.__power == other.getPower() \
1363 and self.__orientation == other.getOrientation()
1364 else:
1365 return False
1366
1367 def __ne__(self, other):
1368 return not self.__eq__(other)
1369
1370 def getLocation(self):
1371 """
1372 return location of source
1373 :rtype: ``tuple`` of ``float``
1374 """
1375 return self.__loc
1376 def getFrequency(self):
1377 """
1378 return frequency of source
1379 :rtype: ``float``
1380 """
1381 return self.__omega
1382
1383 def getElevation(self):
1384 """
1385 return elevation of source
1386 :rtype: ``float``
1387 """
1388 return self.__elevation
1389
1390 def getPower(self):
1391 """
1392 return power of source at frequency
1393 :rtype: ``complex`` or ``None``
1394 """
1395 return self.__power
1396 def getOrientation(self):
1397 """
1398 return power of source orientation at frequency
1399 :rtype: vector type object or ``None``
1400 """
1401 return self.__orientation
1402
1403

  ViewVC Help
Powered by ViewVC 1.1.26