/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4212 - (show annotations)
Tue Feb 19 00:11:28 2013 UTC (6 years, 9 months ago) by caltinay
File MIME type: text/x-python
File size: 37580 byte(s)
Even friendlier message when pyproj is not installed but required.

1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2013 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2013 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['simpleGeoMagneticFluxDensity', 'DataSource', 'ErMapperData', \
26 'SyntheticDataBase', 'SyntheticFeatureData', 'SyntheticData',
27 'SmoothAnomaly']
28
29 import logging
30 import numpy as np
31 from esys.escript import ReducedFunction
32 from esys.escript import unitsSI as U
33 from esys.escript.linearPDEs import LinearSinglePDE
34 from esys.escript.util import *
35 from esys.ripley import ripleycpp
36
37 try:
38 from scipy.io.netcdf import netcdf_file
39 __all__ += ['NetCdfData']
40 except:
41 pass
42
43 def LatLonToUTM(lon, lat, wkt_string=None):
44 """
45 Converts one or more longitude,latitude pairs to the corresponding x,y
46 coordinates in the Universal Transverse Mercator projection.
47
48 :note: The ``pyproj`` module is required for this method. If it is not
49 found an exception is raised.
50 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
51 not available to convert the string, then the input values are
52 assumed to be using the Clarke 1866 ellipsoid.
53
54 :param lon: longitude value(s)
55 :type lon: ``float``, ``list``, ``tuple``, or ``numpy.array``
56 :param lat: latitude value(s)
57 :type lat: ``float``, ``list``, ``tuple``, or ``numpy.array``
58 :param wkt_string: Well-known text (WKT) string describing the coordinate
59 system used. The ``gdal`` module is used to convert
60 the string to the corresponding Proj4 string.
61 :type wkt_string: ``str``
62 :rtype: ``tuple``
63 """
64
65 try:
66 import pyproj
67 except:
68 raise ImportError("In order to perform coordinate transformations on "
69 "the data you are using the 'pyproj' Python module is required but "
70 "was not found. Please install the module and try again.")
71
72 # determine UTM zone from the input data
73 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
74 try:
75 import osgeo.osr
76 srs = osgeo.osr.SpatialReference()
77 srs.ImportFromWkt(wkt_string)
78 p_src = pyproj.Proj(srs.ExportToProj4())
79 except:
80 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
81
82 # check for hemisphere
83 if np.median(np.array(lat)) < 0.:
84 south='+south '
85 else:
86 south=''
87 p_dest = pyproj.Proj('+proj=utm +zone=%d %s+units=m'%(zone,south))
88 x,y=pyproj.transform(p_src, p_dest, lon, lat)
89 return x,y,zone
90
91 def simpleGeoMagneticFluxDensity(latitude, longitude=0.):
92 """
93 Returns an approximation of the geomagnetic flux density B at the given
94 `latitude`. The parameter `longitude` is currently ignored.
95
96 :rtype: ``tuple``
97 """
98 theta = (90-latitude)/180.*np.pi
99 B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
100 B_theta= B_0 * sin(theta)
101 B_r= 2 * B_0 * cos(theta)
102 return B_r, B_theta, 0.
103
104 class DataSource(object):
105 """
106 A class that provides survey data for the inversion process.
107 This is an abstract base class that implements common functionality.
108 Methods to be overwritten by subclasses are marked as such.
109 This class assumes 2D data which is mapped to a slice of a 3D domain.
110 For other setups override the methods as required.
111 """
112
113 GRAVITY, MAGNETIC = list(range(2))
114
115 def __init__(self):
116 """
117 Constructor. Sets some defaults and initializes logger.
118 """
119 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
120 self.__subsampling_factor=1
121
122 def getDataExtents(self):
123 """
124 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
125
126 - ``x0``, ``y0`` = coordinates of data origin
127 - ``nx``, ``ny`` = number of data points in x and y
128 - ``dx``, ``dy`` = spacing of data points in x and y
129
130 This method must be implemented in subclasses.
131 """
132 raise NotImplementedError
133
134 def getDataType(self):
135 """
136 Returns the type of survey data managed by this source.
137 Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
138 """
139 raise NotImplementedError
140
141 def getSurveyData(self, domain, origin, NE, spacing):
142 """
143 This method is called by the `DomainBuilder` to retrieve the survey
144 data as `Data` objects on the given domain.
145
146 Subclasses should return one or more `Data` objects with survey data
147 interpolated on the given `escript` domain. The exact return type
148 depends on the type of data.
149
150 :param domain: the escript domain to use
151 :type domain: `esys.escript.Domain`
152 :param origin: the origin coordinates of the domain
153 :type origin: ``tuple`` or ``list``
154 :param NE: the number of domain elements in each dimension
155 :type NE: ``tuple`` or ``list``
156 :param spacing: the cell sizes (node spacing) in the domain
157 :type spacing: ``tuple`` or ``list``
158 """
159 raise NotImplementedError
160
161 def getUtmZone(self):
162 """
163 All data source coordinates are converted to UTM (Universal Transverse
164 Mercator) in order to have useful domain extents. Subclasses should
165 implement this method and return the UTM zone number of the projected
166 coordinates.
167 """
168 raise NotImplementedError
169
170 def setSubsamplingFactor(self, f):
171 """
172 Sets the data subsampling factor (default=1).
173
174 The factor is applied in all dimensions. For example a 2D dataset
175 with 300 x 150 data points will be reduced to 150 x 75 when a
176 subsampling factor of 2 is used.
177 This becomes important when adding data of varying resolution to
178 a `DomainBuilder`.
179 """
180 self.__subsampling_factor=f
181
182 def getSubsamplingFactor(self):
183 """
184 Returns the subsampling factor that was set via `setSubsamplingFactor`
185 (see there).
186 """
187 return self.__subsampling_factor
188
189
190 ##############################################################################
191 class ErMapperData(DataSource):
192 """
193 Data Source for ER Mapper raster data.
194 Note that this class only accepts a very specific type of ER Mapper data
195 input and will raise an exception if other data is found.
196 """
197 def __init__(self, data_type, headerfile, datafile=None, altitude=0.):
198 """
199 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
200 :type data_type: ``int``
201 :param headerfile: ER Mapper header file (usually ends in .ers)
202 :type headerfile: ``str``
203 :param datafile: ER Mapper binary data file name. If not supplied the
204 name of the header file without '.ers' is assumed
205 :type datafile: ``str``
206 :param altitude: altitude of measurements above ground in meters
207 :type altitude: ``float``
208 """
209 super(ErMapperData,self).__init__()
210 self.__headerfile=headerfile
211 if datafile is None:
212 self.__datafile=headerfile[:-4]
213 else:
214 self.__datafile=datafile
215 self.__altitude=altitude
216 self.__data_type=data_type
217 self.__readHeader()
218
219 def __readHeader(self):
220 self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
221 metadata=open(self.__headerfile, 'r').readlines()
222 # parse metadata
223 start=-1
224 for i in range(len(metadata)):
225 if metadata[i].strip() == 'DatasetHeader Begin':
226 start=i+1
227 if start==-1:
228 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
229
230 md_dict={}
231 section=[]
232 for i in range(start, len(metadata)):
233 line=metadata[i].strip()
234 if line[-6:].strip() == 'Begin':
235 section.append(line[:-6].strip())
236 elif line[-4:].strip() == 'End':
237 if len(section)>0:
238 section.pop()
239 else:
240 vals=line.split('=')
241 if len(vals)==2:
242 key = vals[0].strip()
243 value = vals[1].strip()
244 fullkey='.'.join(section+[key])
245 md_dict[fullkey]=value
246
247 try:
248 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
249 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
250 except KeyError:
251 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
252
253 try:
254 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
255 NY = int(md_dict['RasterInfo.NrOfLines'])
256 except:
257 raise RuntimeError("Could not determine extents of data")
258
259 try:
260 maskval = float(md_dict['RasterInfo.NullCellValue'])
261 except:
262 maskval = 99999
263
264 try:
265 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
266 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
267 except:
268 raise RuntimeError("Could not determine cell dimensions")
269
270 try:
271 if md_dict['CoordinateSpace.CoordinateType']=='EN':
272 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
273 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
274 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
275 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
276 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
277 else:
278 raise RuntimeError("Unknown CoordinateType")
279 except:
280 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
281 originX,originY = 0.0, 0.0
282
283 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
284 # it appears we have lat/lon coordinates so need to convert
285 # origin and spacing. Try using gdal to get the wkt if available:
286 try:
287 from osgeo import gdal
288 ds=gdal.Open(self.__headerfile)
289 wkt=ds.GetProjection()
290 except:
291 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
292 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
293 originX_UTM,originY_UTM,zone=LatLonToUTM(originX, originY, wkt)
294 op1X,op1Y,_=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
295 self.__utm_zone = zone
296 # we are rounding to avoid interpolation issues
297 spacingX=np.round(op1X-originX_UTM)
298 spacingY=np.round(op1Y-originY_UTM)
299 originX=np.round(originX_UTM)
300 originY=np.round(originY_UTM)
301
302 # data sets have origin in top-left corner so y runs top-down
303 self.__dataorigin=[originX, originY]
304 originY-=(NY-1)*spacingY
305 self.__delta = [spacingX, spacingY]
306 self.__maskval = maskval
307 self.__nPts = [NX, NY]
308 self.__origin = [originX, originY]
309 if self.__data_type == self.GRAVITY:
310 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
311 self.__scalefactor = 1e-6
312 else:
313 self.logger.info("Assuming magnetic data units are 'nT'.")
314 self.__scalefactor = 1e-9
315
316 def getDataExtents(self):
317 """
318 returns ( (x0, y0), (nx, ny), (dx, dy) )
319 """
320 return (list(self.__origin), list(self.__nPts), list(self.__delta))
321
322 def getDataType(self):
323 return self.__data_type
324
325 def getSurveyData(self, domain, origin, NE, spacing):
326 nValues=self.__nPts
327 # determine base location of this dataset within the domain
328 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
329 if domain.getDim()==3:
330 first.append(int((self.__altitude-origin[2])/spacing[2]))
331 nValues=nValues+[1]
332
333 data=ripleycpp._readBinaryGrid(self.__datafile,
334 ReducedFunction(domain),
335 first, nValues, (), self.__maskval)
336 sigma = whereNonZero(data-self.__maskval)
337 data = data*self.__scalefactor
338 sigma = sigma * 2. * self.__scalefactor
339 return data, sigma
340
341 def getUtmZone(self):
342 return self.__utm_zone
343
344
345 ##############################################################################
346 class NetCdfData(DataSource):
347 """
348 Data Source for gridded netCDF data that use CF/COARDS conventions.
349 """
350 def __init__(self, data_type, filename, altitude=0., data_variable=None,
351 error=None, scale_factor=None, null_value=None):
352 """
353 :param filename: file name for survey data in netCDF format
354 :type filename: ``str``
355 :param data_type: type of data, must be `GRAVITY` or `MAGNETIC`
356 :type data_type: ``int``
357 :param altitude: altitude of measurements in meters
358 :type altitude: ``float``
359 :param data_variable: name of the netCDF variable that holds the data.
360 If not provided an attempt is made to determine
361 the variable and an exception thrown on failure.
362 :type data_variable: ``str``
363 :param error: either the name of the netCDF variable that holds the
364 uncertainties of the measurements or a constant value
365 to use for the uncertainties. If a constant value is
366 supplied, it is scaled by the same factor as the
367 measurements. If not provided the error is assumed to
368 be 2 units for all measurements (i.e. 0.2 mGal and 2 nT
369 for gravity and magnetic, respectively)
370 :type error: ``str`` or ``float``
371 :param scale_factor: the measurements and error values are scaled by
372 this factor. By default, gravity data is assumed
373 to be given in 1e-6 m/s^2 (0.1 mGal), while
374 magnetic data is assumed to be in 1e-9 T (1 nT).
375 :type scale_factor: ``float``
376 :param null_value: value that is used in the file to mark undefined
377 areas. This information is usually included in the
378 file.
379 :type null_value: ``float``
380 """
381 super(NetCdfData,self).__init__()
382 self.__filename=filename
383 if not data_type in [self.GRAVITY,self.MAGNETIC]:
384 raise ValueError("Invalid value for data_type parameter")
385 self.__data_type = data_type
386 self.__altitude = altitude
387 self.__data_name = data_variable
388 self.__scale_factor = scale_factor
389 self.__null_value = null_value
390 self.__readMetadata(error)
391
392 def __readMetadata(self, error):
393 self.logger.debug("Checking Data Source: %s"%self.__filename)
394 f=netcdf_file(self.__filename, 'r')
395
396 ### longitude- / X-dimension and variable
397 NX=0
398 for n in ['lon','longitude','x']:
399 if n in f.dimensions:
400 NX=f.dimensions[n]
401 lon_name=n
402 break
403 if NX==0:
404 raise RuntimeError("Could not determine extents of data")
405
406 # CF/COARDS states that coordinate variables have the same name as
407 # the dimensions
408 if not lon_name in f.variables:
409 raise RuntimeError("Could not determine longitude variable")
410 longitude=f.variables.pop(lon_name)
411
412 ### latitude- / Y-dimension and variable
413 NY=0
414 for n in ['lat','latitude','y']:
415 if n in f.dimensions:
416 NY=f.dimensions[n]
417 lat_name=n
418 break
419 if NY==0:
420 raise RuntimeError("Could not determine extents of data")
421 if not lat_name in f.variables:
422 raise RuntimeError("Could not determine latitude variable")
423 latitude=f.variables.pop(lat_name)
424
425 ### data variable
426 if self.__data_name is not None:
427 try:
428 dims = f.variables[self.__data_name].dimensions
429 if not ((lat_name in dims) and (lon_name in dims)):
430 raise ValueError("Invalid data variable name supplied")
431 except KeyError:
432 raise ValueError("Invalid data variable name supplied")
433 else:
434 for n in f.variables.keys():
435 dims=f.variables[n].dimensions
436 if (lat_name in dims) and (lon_name in dims):
437 self.__data_name=n
438 break
439 if self.__data_name is None:
440 raise RuntimeError("Could not determine data variable")
441
442 datavar = f.variables[self.__data_name]
443
444 ### error value/variable
445 self.__error_name = None
446 if isinstance(error,str):
447 try:
448 dims = f.variables[error].dimensions
449 if not ((lat_name in dims) and (lon_name in dims)):
450 raise ValueError("Invalid error variable name supplied")
451 except KeyError:
452 raise ValueError("Invalid error variable name supplied")
453 self.__error_name = error
454 elif isinstance(error,float) or isinstance(error,int):
455 self.__error_value = float(error)
456 elif error is None:
457 self.__error_value = 2.
458 else:
459 raise TypeError("Invalid type of error parameter")
460
461 ### mask/null value
462 # note that NaN is always filtered out in ripley
463 if self.__null_value is None:
464 if hasattr(datavar, 'missing_value'):
465 self.__null_value = float(datavar.missing_value)
466 elif hasattr(datavar, '_FillValue'):
467 self.__null_value = float(datavar._FillValue)
468 else:
469 self.logger.debug("Could not determine null value, using default.")
470 self.__null_value = 99999
471
472 # try to determine units of data - this is disabled until we obtain a
473 # file with valid information
474 #if hasattr(f.variables[data_name], 'units'):
475 # units=f.variables[data_name].units
476
477 ### scale factor
478 if self.__scale_factor is None:
479 if self.__data_type == self.GRAVITY:
480 self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
481 self.__scale_factor = 1e-6
482 else:
483 self.logger.info("Assuming magnetic data units are 'nT'.")
484 self.__scale_factor = 1e-9
485
486 # see if there is a WKT string to convert coordinates
487 try:
488 wkt_string=datavar.esri_pe_string
489 except:
490 wkt_string=None
491
492 # actual_range & geospatial_lon_min/max do not always contain correct
493 # values so we have to obtain the min/max in a less efficient way:
494 lon_range=longitude.data.min(),longitude.data.max()
495 lat_range=latitude.data.min(),latitude.data.max()
496 lon_range,lat_range,zone=LatLonToUTM(lon_range, lat_range, wkt_string)
497 self.__utm_zone = zone
498 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
499 f.close()
500
501 self.__nPts=[NX, NY]
502 self.__origin=[lon_range[0],lat_range[0]]
503 # we are rounding to avoid interpolation issues
504 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
505
506 def getDataExtents(self):
507 """
508 returns ( (x0, y0), (nx, ny), (dx, dy) )
509 """
510 return (list(self.__origin), list(self.__nPts), list(self.__delta))
511
512 def getDataType(self):
513 return self.__data_type
514
515 def getSurveyData(self, domain, origin, NE, spacing):
516 FS=ReducedFunction(domain)
517 nValues=self.__nPts
518 # determine base location of this dataset within the domain
519 first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
520 if domain.getDim()==3:
521 first.append(int((self.__altitude-origin[2])/spacing[2]))
522 nValues=nValues+[1]
523
524 data = ripleycpp._readNcGrid(self.__filename, self.__data_name, FS,
525 first, nValues, (), self.__null_value)
526 if self.__error_name is not None:
527 sigma = ripleycpp._readNcGrid(self.__filename, self.__error_name,
528 FS, first, nValues, (), 0.)
529 else:
530 sigma = self.__error_value * whereNonZero(data-self.__null_value)
531
532 data = data * self.__scale_factor
533 sigma = sigma * self.__scale_factor
534 return data, sigma
535
536 def getUtmZone(self):
537 return self.__utm_zone
538
539
540 ##############################################################################
541 class SourceFeature(object):
542 """
543 A feature adds a density/susceptibility distribution to (parts of) a
544 domain of a synthetic data source, for example a layer of a specific
545 rock type or a simulated ore body.
546 """
547 def getValue(self):
548 """
549 Returns the value for the area covered by mask. It can be constant
550 or a `Data` object with spatial dependency.
551 """
552 raise NotImplementedError
553
554 def getMask(self, x):
555 """
556 Returns the mask of the area of interest for this feature. That is,
557 mask is non-zero where the value returned by `getValue()` should be
558 applied, zero elsewhere.
559 """
560 raise NotImplementedError
561
562 class SmoothAnomaly(SourceFeature):
563 """
564 A source feature in the form of a blob (roughly gaussian).
565 """
566 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
567 """
568 Intializes the smooth anomaly data.
569
570 :param lx: size of blob in x-dimension
571 :param ly: size of blob in y-dimension
572 :param lz: size of blob in z-dimension
573 :param x: location of blob in x-dimension
574 :param y: location of blob in y-dimension
575 :param depth: depth of blob
576 :param v_inner: value in the centre of the blob
577 :param v_outer: value in the periphery of the blob
578 """
579 self.x=x
580 self.y=y
581 self.lx=lx
582 self.ly=ly
583 self.lz=lz
584 self.depth=depth
585 self.v_inner=v_inner
586 self.v_outer=v_outer
587 self.value=None
588 self.mask=None
589
590 def getValue(self,x):
591 if self.value is None:
592 if self.v_outer is None or self.v_inner is None:
593 self.value=0
594 else:
595 DIM=x.getDomain().getDim()
596 alpha=-log(abs(self.v_outer/self.v_inner))*4
597 value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
598 value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
599 self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
600 if self.v_inner<0: self.value=-self.value
601
602 return self.value
603
604 def getMask(self, x):
605 DIM=x.getDomain().getDim()
606 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
607 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
608 if DIM>2:
609 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
610 self.mask = m
611 return m
612
613 ##############################################################################
614 class SyntheticDataBase(DataSource):
615 """
616 Base class to define reference data based on a given property distribution
617 (density or susceptibility). Data are collected from a square region of
618 vertical extent `length` on a grid with `number_of_elements` cells in each
619 direction.
620
621 The synthetic data are constructed by solving the appropriate forward
622 problem. Data can be sampled with an offset from the surface at z=0 or
623 using the entire subsurface region.
624 """
625 def __init__(self, data_type,
626 DIM=2,
627 number_of_elements=10,
628 length=1*U.km,
629 B_b=None,
630 data_offset=0,
631 full_knowledge=False,
632 spherical=False):
633 """
634 :param data_type: data type indicator
635 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
636 :param DIM: number of spatial dimensions
637 :type DIM: ``int`` (2 or 3)
638 :param number_of_elements: lateral number of elements in the region
639 where data are collected
640 :type number_of_elements: ``int``
641 :param length: lateral extent of the region where data are collected
642 :type length: ``float``
643 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
644 :type B_b: ``list`` of ``Scalar``
645 :param data_offset: offset of the data collection region from the surface
646 :type data_offset: ``float``
647 :param full_knowledge: if ``True`` data are collected from the entire
648 subsurface region. This is mainly for testing.
649 :type full_knowledge: ``Bool``
650 :param spherical: if ``True`` spherical coordinates are used. This is
651 not supported yet and should be set to ``False``.
652 :type spherical: ``Bool``
653 """
654 super(SyntheticDataBase, self).__init__()
655 if not data_type in [self.GRAVITY, self.MAGNETIC]:
656 raise ValueError("Invalid value for data_type parameter")
657 self.DIM=DIM
658 self.number_of_elements=number_of_elements
659 self.length=length
660 self.__data_type = data_type
661
662 if spherical:
663 raise ValueError("Spherical coordinates are not supported yet")
664 self.__spherical = spherical
665 self.__full_knowledge= full_knowledge
666 self.__data_offset=data_offset
667 self.__B_b =None
668 # this is for Cartesian (FIXME ?)
669 if data_type == self.MAGNETIC:
670 if self.DIM < 3:
671 self.__B_b = np.array([-B_b[2], -B_b[0]])
672 else:
673 self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
674 self.__origin = [0]*(DIM-1)
675 self.__delta = [float(length)/number_of_elements]*(DIM-1)
676 self.__nPts = [number_of_elements]*(DIM-1)
677 self._reference_data=None
678
679 def getUtmZone(self):
680 """
681 returns a dummy UTM zone since this class does not use real coordinate
682 values.
683 """
684 return 0
685
686 def getDataExtents(self):
687 """
688 returns the lateral data extend of the data set
689 """
690 return (list(self.__origin), list(self.__nPts), list(self.__delta))
691
692 def getDataType(self):
693 """
694 returns the data type
695 """
696 return self.__data_type
697
698 def getSurveyData(self, domain, origin, number_of_elements, spacing):
699 """
700 returns the survey data placed on a given domain.
701
702 :param domain: domain on which the data are to be placed
703 :type domain: ``Domain``
704 :param origin: origin of the domain
705 :type origin: ``list`` of ``float``
706 :param number_of_elements: number of elements (or cells) in each
707 spatial direction used to span the domain
708 :type number_of_elements: ``list`` of ``int``
709 :param spacing: cell size in each spatial direction
710 :type spacing: ``list`` of ``float``
711 :return: observed gravity field or magnetic flux density for each cell
712 in the domain and for each cell an indicator 1/0 if the data
713 are valid or not.
714 :rtype: pair of ``Scalar``
715 """
716 pde=LinearSinglePDE(domain)
717 DIM=domain.getDim()
718 x=domain.getX()
719 # set the reference data
720 k=self.getReferenceProperty(domain)
721 # calculate the corresponding potential
722 z=x[DIM-1]
723 m_psi_ref=whereZero(z-sup(z))
724 if self.getDataType()==DataSource.GRAVITY:
725 pde.setValue(A=kronecker(domain), Y=-4*np.pi*U.Gravitational_Constant*self._reference_data, q=m_psi_ref)
726 else:
727 pde.setValue(A=kronecker(domain), X=self._reference_data*self.__B_b, q=m_psi_ref)
728 pde.setSymmetryOn()
729 #pde.getSolverOptions().setTolerance(1e-13)
730 psi_ref=pde.getSolution()
731 del pde
732 if self.getDataType()==DataSource.GRAVITY:
733 data = -grad(psi_ref, ReducedFunction(domain))
734 else:
735 data = self._reference_data*self.__B_b-grad(psi_ref, ReducedFunction(domain))
736
737 x=ReducedFunction(domain).getX()
738 if self.__full_knowledge:
739 sigma = whereNegative(x[DIM-1])
740 else:
741 sigma=1.
742 # limit mask to non-padding in horizontal area
743 for i in range(DIM-1):
744 x_i=x[i]
745 sigma=sigma * wherePositive(x_i) * whereNegative(x_i-(sup(x_i)+inf(x_i)))
746 # limit mask to one cell thickness at z=0
747 z=x[DIM-1]
748 oo=int(self.__data_offset/spacing[DIM-1]+0.5)*spacing[DIM-1]
749 sigma = sigma * whereNonNegative(z-oo) * whereNonPositive(z-oo-spacing[DIM-1])
750 return data,sigma
751
752 def getReferenceProperty(self, domain=None):
753 """
754 Returns the reference `Data` object that was used to generate
755 the gravity/susceptibility anomaly data.
756
757 :return: the density or susceptibility anomaly used to create the
758 survey data
759 :note: it can be assumed that in the first call the ``domain``
760 argument is present so the actual anomaly data can be created.
761 In subsequent calls this may not be true.
762 :note: method needs to be overwritten
763 """
764 raise NotImplementedError()
765
766 class SyntheticFeatureData(SyntheticDataBase):
767 """
768 Uses a list of `SourceFeature` objects to define synthetic anomaly data.
769 """
770 def __init__(self, data_type,
771 features,
772 DIM=2,
773 number_of_elements=10,
774 length=1*U.km,
775 B_b=None,
776 data_offset=0,
777 full_knowledge=False,
778 spherical=False):
779 """
780 :param data_type: data type indicator
781 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
782 :param features: list of features. It is recommended that the features
783 are located entirely below the surface.
784 :type features: ``list`` of `SourceFeature`
785 :param DIM: spatial dimensionality
786 :type DIM: ``int`` (2 or 3)
787 :param number_of_elements: lateral number of elements in the region
788 where data are collected
789 :type number_of_elements: ``int``
790 :param length: lateral extent of the region where data are collected
791 :type length: ``float``
792 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude]. Only used for magnetic data.
793 :type B_b: ``list`` of ``Scalar``
794 :param data_offset: offset of the data collection region from the surface
795 :type data_offset: ``float``
796 :param full_knowledge: if ``True`` data are collected from the entire subsurface region. This is mainly for testing.
797 :type full_knowledge: ``Bool``
798 :param spherical: if ``True`` spherical coordinates are used.
799 :type spherical: ``Bool``
800 """
801 super(SyntheticFeatureData,self).__init__(
802 data_type=data_type, DIM=DIM,
803 number_of_elements=number_of_elements,
804 length=length, B_b=B_b,
805 data_offset=data_offset,
806 full_knowledge=full_knowledge,
807 spherical=spherical)
808 self._features = features
809
810
811 def getReferenceProperty(self, domain=None):
812 """
813 Returns the reference `Data` object that was used to generate
814 the gravity/susceptibility anomaly data.
815 """
816 if self._reference_data == None:
817 DIM=domain.getDim()
818 x=domain.getX()
819 k=0.
820 for f in self._features:
821 m=f.getMask(x)
822 k = k * (1-m) + f.getValue(x) * m
823 self._reference_data= k
824 return self._reference_data
825
826 class SyntheticData(SyntheticDataBase):
827 """
828 Defines synthetic gravity/magnetic data based on harmonic property anomaly
829
830 rho = amplitude * sin(n_depth * pi /depth * (z+depth_offset)) * sin(n_length * pi /length * (x - shift) )
831
832 for all x and z<=0. For z>0 rho = 0.
833 """
834 def __init__(self, data_type,
835 n_length=1,
836 n_depth=1,
837 depth_offset=0.,
838 depth=None,
839 amplitude=None,
840 DIM=2,
841 number_of_elements=10,
842 length=1*U.km,
843 B_b=None,
844 data_offset=0,
845 full_knowledge=False,
846 spherical=False):
847 """
848 :param data_type: data type indicator
849 :type data_type: `DataSource.GRAVITY`, `DataSource.MAGNETIC`
850 :param n_length: number of oscillations in the anomaly data within the
851 observation region
852 :type n_length: ``int``
853 :param n_depth: number of oscillations in the anomaly data below surface
854 :type n_depth: ``int``
855 :param depth_offset: vertical offset of the data
856 :type depth_offset: ``float``
857 :param depth: vertical extent in the anomaly data. If not present the
858 depth of the domain is used.
859 :type depth: ``float``
860 :param amplitude: data amplitude. Default value is 200 U.kg/U.m**3 for
861 gravity and 0.1 for magnetic data.
862 :param DIM: spatial dimensionality
863 :type DIM: ``int`` (2 or 3)
864 :param number_of_elements: lateral number of elements in the region
865 where data are collected
866 :type number_of_elements: ``int``
867 :param length: lateral extent of the region where data are collected
868 :type length: ``float``
869 :param B_b: background magnetic flux density [B_r, B_latiude, B_longitude].
870 Only used for magnetic data.
871 :type B_b: ``list`` of ``Scalar``
872 :param data_offset: offset of the data collection region from the surface
873 :type data_offset: ``float``
874 :param full_knowledge: if ``True`` data are collected from the entire
875 subsurface region. This is mainly for testing.
876 :type full_knowledge: ``Bool``
877 :param spherical: if ``True`` spherical coordinates are used
878 :type spherical: ``Bool``
879 """
880 super(SyntheticData,self).__init__(
881 data_type=data_type, DIM=DIM,
882 number_of_elements=number_of_elements,
883 length=length, B_b=B_b,
884 data_offset=data_offset,
885 full_knowledge=full_knowledge,
886 spherical=spherical)
887 self.__n_length = n_length
888 self.__n_depth = n_depth
889 self.depth = depth
890 self.depth_offset=depth_offset
891 if amplitude == None:
892 if data_type == DataSource.GRAVITY:
893 amplitude = 200 *U.kg/U.m**3
894 else:
895 amplitude = 0.1
896 self.__amplitude = amplitude
897
898 def getReferenceProperty(self, domain=None):
899 """
900 Returns the reference `Data` object that was used to generate
901 the gravity/susceptibility anomaly data.
902 """
903 if self._reference_data is None:
904 DIM=domain.getDim()
905 x=domain.getX()
906 # set the reference data
907 z=x[DIM-1]
908 dd=self.depth
909 if dd is None: dd=inf(z)
910 z2=(z+self.depth_offset)/(self.depth_offset-dd)
911 k=sin(self.__n_depth * np.pi * z2) * whereNonNegative(z2) * whereNonPositive(z2-1.) * self.__amplitude
912 for i in xrange(DIM-1):
913 x_i=x[i]
914 min_x=inf(x_i)
915 max_x=sup(x_i)
916 k*= sin(self.__n_length*np.pi*(x_i-min_x)/(max_x-min_x))
917 self._reference_data= k
918 return self._reference_data
919

  ViewVC Help
Powered by ViewVC 1.1.26