/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4108 - (hide annotations)
Thu Dec 13 06:38:11 2012 UTC (6 years, 9 months ago) by caltinay
File MIME type: text/x-python
File size: 23235 byte(s)
Changed methods to add padding areas.

1 caltinay 3946
2 jfenwick 3981 ##############################################################################
3 caltinay 3946 #
4     # Copyright (c) 2003-2012 by University of Queensland
5 jfenwick 3981 # http://www.uq.edu.au
6 caltinay 3946 #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11 jfenwick 3981 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12     # Development since 2012 by School of Earth Sciences
13     #
14     ##############################################################################
15 caltinay 3946
16 caltinay 4007 """Data readers/providers for inversions"""
17    
18 caltinay 3946 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 jfenwick 3981 http://www.uq.edu.au
20 caltinay 3946 Primary Business: Queensland, Australia"""
21     __license__="""Licensed under the Open Software License version 3.0
22     http://www.opensource.org/licenses/osl-3.0.php"""
23     __url__="https://launchpad.net/escript-finley"
24    
25 gross 4106 __all__ = ['simpleBackgroundMagneticField', 'DataSource','ErMapperData','SyntheticFeatureData','SmoothAnomaly']
26 caltinay 3947
27 caltinay 3946 import logging
28     import numpy as np
29 caltinay 4060 from esys.escript import ReducedFunction
30 caltinay 4005 from esys.escript.linearPDEs import LinearSinglePDE
31 caltinay 4007 from esys.escript.util import *
32 caltinay 3946 import esys.escript.unitsSI as U
33 caltinay 4005 from esys.ripley import Brick, Rectangle, ripleycpp
34 caltinay 3971
35 caltinay 3947 try:
36     from scipy.io.netcdf import netcdf_file
37 caltinay 4060 __all__ += ['NetCdfData']
38 caltinay 3947 except:
39     pass
40 caltinay 3946
41 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
42     """
43     Converts one or more longitude,latitude pairs to the corresponding x,y
44     coordinates in the Universal Transverse Mercator projection.
45 caltinay 4007
46 caltinay 4020 :note: If the ``pyproj`` module is not installed a warning is printed and
47 caltinay 4007 the input values are scaled by a constant and returned.
48     :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
49     not available to convert the string, then the input values are
50     assumed to be given in the Clarke 1866 projection.
51 caltinay 4009
52     :param lon: longitude value(s)
53     :type lon: `float`, `list`, `tuple`, or ``numpy.array``
54     :param lat: latitude value(s)
55     :type lat: `float`, `list`, `tuple`, or ``numpy.array``
56     :rtype: ``numpy.array``
57 caltinay 3957 """
58    
59     # not really optimal: if pyproj is not installed we return the input
60 caltinay 4005 # values scaled by a constant.
61 caltinay 3957 try:
62     import pyproj
63     except:
64     print("Warning, pyproj not available. Domain extents will be wrong")
65 caltinay 4009 return np.array(lon)*1000., np.array(lat)*1000.
66 caltinay 3957
67     # determine UTM zone from the input data
68 caltinay 3956 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
69     try:
70     import osgeo.osr
71     srs = osgeo.osr.SpatialReference()
72     srs.ImportFromWkt(wkt_string)
73     p_src = pyproj.Proj(srs.ExportToProj4())
74     except:
75     p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
76 caltinay 3959 # we assume southern hemisphere here
77     p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
78 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
79     return x,y
80 caltinay 3947
81 gross 4106 def simpleBackgroundMagneticField(latitude, longitude=0.):
82     theta = (90-latitude)/180.*np.pi
83     B_0=U.Mu_0 * U.Magnetic_Dipole_Moment_Earth / (4 * np.pi * U.R_Earth**3)
84     B_theta= B_0 * sin(theta)
85     B_r= 2 * B_0 * cos(theta)
86     return B_r, B_theta, 0.
87 caltinay 4108
88 caltinay 3946 class DataSource(object):
89     """
90     A class that provides survey data for the inversion process.
91 caltinay 4005 This is an abstract base class that implements common functionality.
92     Methods to be overwritten by subclasses are marked as such.
93 caltinay 4044 This class assumes 2D data which is mapped to a slice of a 3D domain.
94 caltinay 4097 For other setups override the methods as required.
95 caltinay 3946 """
96    
97 caltinay 4060 GRAVITY, MAGNETIC = list(range(2))
98    
99 caltinay 3946 def __init__(self):
100     """
101 caltinay 4005 Constructor. Sets some defaults and initializes logger.
102 caltinay 3946 """
103     self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
104 caltinay 4060 self.__subsampling_factor=1
105 gross 4106 self.__background_magnetic_field=None
106 caltinay 3946
107 caltinay 4005 def getDataExtents(self):
108     """
109     returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
110    
111     - ``x0``, ``y0`` = coordinates of data origin
112     - ``nx``, ``ny`` = number of data points in x and y
113     - ``dx``, ``dy`` = spacing of data points in x and y
114    
115     This method must be implemented in subclasses.
116     """
117     raise NotImplementedError
118    
119 caltinay 4060 def getDataType(self):
120 caltinay 4005 """
121 caltinay 4060 Returns the type of survey data managed by this source.
122     Subclasses must return `GRAVITY` or `MAGNETIC` as appropriate.
123 caltinay 4005 """
124     raise NotImplementedError
125    
126 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
127 caltinay 4005 """
128 caltinay 4060 This method is called by the `DomainBuilder` to retrieve the survey
129     data as `Data` objects on the given domain.
130     Subclasses should return one or more `Data` objects with survey data
131     interpolated on the given ripley domain. The exact return type
132     depends on the type of data.
133 caltinay 4005
134 caltinay 4060 :param domain: the escript domain to use
135     :type domain: `esys.escript.Domain`
136     :param origin: the origin coordinates of the domain
137     :type origin: ``tuple`` or ``list``
138     :param NE: the number of domain elements in each dimension
139 caltinay 4005 :type NE: ``tuple`` or ``list``
140 caltinay 4060 :param spacing: the cell sizes (node spacing) in the domain
141     :type spacing: ``tuple`` or ``list``
142 caltinay 3946 """
143 caltinay 4060 raise NotImplementedError
144 caltinay 3964
145 caltinay 4060 def setSubsamplingFactor(self, f):
146 caltinay 3946 """
147 caltinay 4060 Sets the data subsampling factor (default=1).
148     The factor is applied in all dimensions. For example a 2D dataset
149     with 300 x 150 data points will be reduced to 150 x 75 when a
150     subsampling factor of 2 is used.
151     This becomes important when adding data of varying resolution to
152     a `DomainBuilder`.
153 caltinay 3946 """
154 caltinay 4060 self.__subsampling_factor=f
155 caltinay 3965
156 caltinay 4060 def getSubsamplingFactor(self):
157 caltinay 3965 """
158 caltinay 4060 Returns the subsampling factor that was set via `setSubsamplingFactor`
159     (see there).
160 caltinay 3946 """
161 caltinay 4060 return self.__subsampling_factor
162 caltinay 3946
163    
164 caltinay 3964 ##############################################################################
165 caltinay 4060 class ErMapperData(DataSource):
166 caltinay 4044 """
167 caltinay 3958 Data Source for ER Mapper raster data.
168     Note that this class only accepts a very specific type of ER Mapper data
169     input and will raise an exception if other data is found.
170     """
171 caltinay 4060 def __init__(self, datatype, headerfile, datafile=None, altitude=0.):
172 caltinay 3958 """
173 caltinay 4060 :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
174     :type datatype: ``int``
175 caltinay 4049 :param headerfile: ER Mapper header file (usually ends in .ers)
176     :type headerfile: ``str``
177     :param datafile: ER Mapper binary data file name. If not supplied the
178     name of the header file without '.ers' is assumed
179     :type datafile: ``str``
180 caltinay 4060 :param altitude: altitude of measurements above ground in meters
181     :type altitude: ``float``
182 caltinay 3958 """
183 caltinay 4060 super(ErMapperData,self).__init__()
184 caltinay 3964 self.__headerfile=headerfile
185 caltinay 3958 if datafile is None:
186 caltinay 3964 self.__datafile=headerfile[:-4]
187 caltinay 3958 else:
188 caltinay 3964 self.__datafile=datafile
189 caltinay 4061 self.__altitude=altitude
190 caltinay 4060 self.__datatype=datatype
191     self.__readHeader()
192 caltinay 3958
193 caltinay 4060 def __readHeader(self):
194     self.logger.debug("Checking Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
195 caltinay 3964 metadata=open(self.__headerfile, 'r').readlines()
196 caltinay 3958 # parse metadata
197     start=-1
198 caltinay 4044 for i in range(len(metadata)):
199 caltinay 3958 if metadata[i].strip() == 'DatasetHeader Begin':
200     start=i+1
201     if start==-1:
202     raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
203    
204     md_dict={}
205     section=[]
206 caltinay 4044 for i in range(start, len(metadata)):
207 caltinay 3958 line=metadata[i]
208     if line[-6:].strip() == 'Begin':
209     section.append(line[:-6].strip())
210     elif line[-4:].strip() == 'End':
211     if len(section)>0:
212     section.pop()
213     else:
214     vals=line.split('=')
215     if len(vals)==2:
216     key = vals[0].strip()
217     value = vals[1].strip()
218     fullkey='.'.join(section+[key])
219     md_dict[fullkey]=value
220    
221     try:
222     if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
223     raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
224     except KeyError:
225     self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
226    
227     try:
228     NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
229     NY = int(md_dict['RasterInfo.NrOfLines'])
230     except:
231     raise RuntimeError("Could not determine extents of data")
232    
233     try:
234     maskval = float(md_dict['RasterInfo.NullCellValue'])
235     except:
236 caltinay 3980 maskval = 99999
237 caltinay 3958
238     try:
239     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
240     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
241     except:
242     raise RuntimeError("Could not determine cell dimensions")
243    
244     try:
245 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
246     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
247     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
248     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
249     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
250     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
251     else:
252     raise RuntimeError("Unknown CoordinateType")
253 caltinay 3958 except:
254 caltinay 3959 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
255 caltinay 3958 originX,originY = 0.0, 0.0
256    
257 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
258     # it appears we have lat/lon coordinates so need to convert
259     # origin and spacing. Try using gdal to get the wkt if available:
260     try:
261     from osgeo import gdal
262 caltinay 3964 ds=gdal.Open(self.__headerfile)
263 caltinay 3959 wkt=ds.GetProjection()
264     except:
265     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
266 caltinay 3980 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
267 caltinay 3959 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
268     op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
269 caltinay 4009 # we are rounding to avoid interpolation issues
270 caltinay 3959 spacingX=np.round(op1X-originX_UTM)
271     spacingY=np.round(op1Y-originY_UTM)
272 caltinay 3964 originX=np.round(originX_UTM)
273     originY=np.round(originY_UTM)
274 caltinay 3959
275     # data sets have origin in top-left corner so y runs top-down
276 caltinay 3964 self.__dataorigin=[originX, originY]
277 caltinay 3958 originY-=(NY-1)*spacingY
278 caltinay 4060 self.__delta = [spacingX, spacingY]
279     self.__maskval = maskval
280     self.__nPts = [NX, NY]
281     self.__origin = [originX, originY]
282     if self.__datatype == self.GRAVITY:
283     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
284     self.__scalefactor = 1e-6
285     else:
286     self.logger.info("Assuming magnetic data units are 'nT'.")
287     self.__scalefactor = 1e-9
288 caltinay 3958
289 caltinay 3964 def getDataExtents(self):
290     """
291     returns ( (x0, y0), (nx, ny), (dx, dy) )
292     """
293 caltinay 4060 return (list(self.__origin), list(self.__nPts), list(self.__delta))
294 caltinay 3958
295 caltinay 4061 def getDataType(self):
296     return self.__datatype
297    
298 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
299     nValues=self.__nPts
300     # determine base location of this dataset within the domain
301     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
302     if domain.getDim()==3:
303     first.append(int((self.__altitude-origin[2])/spacing[2]))
304     nValues=nValues+[1]
305    
306     data=ripleycpp._readBinaryGrid(self.__datafile,
307     ReducedFunction(domain),
308     first, nValues, (), self.__maskval)
309     sigma = whereNonZero(data-self.__maskval)
310     data = data*self.__scalefactor
311 caltinay 4061 sigma = sigma * 2. * self.__scalefactor
312 caltinay 4060 return data, sigma
313    
314    
315     ##############################################################################
316     class NetCdfData(DataSource):
317     """
318     Data Source for gridded netCDF data that use CF/COARDS conventions.
319     """
320     def __init__(self, datatype, filename, altitude=0.):
321 caltinay 3964 """
322 caltinay 4060 :param filename: file name for survey data in netCDF format
323     :type filename: ``str``
324     :param datatype: type of data, must be `GRAVITY` or `MAGNETIC`
325     :type datatype: ``int``
326     :param altitude: altitude of measurements in meters
327 jfenwick 4080 :type altitude: ``float``
328 caltinay 3964 """
329 caltinay 4060 super(NetCdfData,self).__init__()
330     self.__filename=filename
331     if not datatype in [self.GRAVITY,self.MAGNETIC]:
332     raise ValueError("Invalid value for datatype parameter")
333     self.__datatype=datatype
334     self.__altitude=altitude
335     self.__readMetadata()
336 caltinay 3964
337 caltinay 4060 def __readMetadata(self):
338     self.logger.debug("Checking Data Source: %s"%self.__filename)
339     f=netcdf_file(self.__filename, 'r')
340     NX=0
341     for n in ['lon','longitude','x']:
342     if n in f.dimensions:
343     NX=f.dimensions[n]
344     break
345     if NX==0:
346     raise RuntimeError("Could not determine extents of data")
347     NY=0
348     for n in ['lat','latitude','y']:
349     if n in f.dimensions:
350     NY=f.dimensions[n]
351     break
352     if NY==0:
353     raise RuntimeError("Could not determine extents of data")
354 caltinay 3958
355 caltinay 4060 # find longitude and latitude variables
356     lon_name=None
357     for n in ['lon','longitude']:
358     if n in f.variables:
359     lon_name=n
360     longitude=f.variables.pop(n)
361     break
362     if lon_name is None:
363     raise RuntimeError("Could not determine longitude variable")
364     lat_name=None
365     for n in ['lat','latitude']:
366     if n in f.variables:
367     lat_name=n
368     latitude=f.variables.pop(n)
369     break
370     if lat_name is None:
371     raise RuntimeError("Could not determine latitude variable")
372 caltinay 3958
373 caltinay 4060 # try to figure out data variable name
374     data_name=None
375     if len(f.variables)==1:
376     data_name=f.variables.keys()[0]
377     else:
378     for n in f.variables.keys():
379     dims=f.variables[n].dimensions
380     if (lat_name in dims) and (lon_name in dims):
381     data_name=n
382     break
383     if data_name is None:
384     raise RuntimeError("Could not determine data variable")
385    
386     # try to determine value for unused data
387     if hasattr(f.variables[data_name], 'missing_value'):
388     maskval = float(f.variables[data_name].missing_value)
389     elif hasattr(f.variables[data_name], '_FillValue'):
390     maskval = float(f.variables[data_name]._FillValue)
391     else:
392     self.logger.debug("missing_value attribute not found, using default.")
393     maskval = 99999
394    
395     # try to determine units of data - this is disabled for now
396     #if hasattr(f.variables[data_name], 'units'):
397     # units=f.variables[data_name].units
398     if self.__datatype == self.GRAVITY:
399     self.logger.info("Assuming gravity data scale is 1e-6 m/s^2.")
400     self.__scalefactor = 1e-6
401     else:
402     self.logger.info("Assuming magnetic data units are 'nT'.")
403     self.__scalefactor = 1e-9
404    
405     # see if there is a wkt string to convert coordinates
406     try:
407     wkt_string=f.variables[data_name].esri_pe_string
408     except:
409     wkt_string=None
410    
411     # we don't trust actual_range & geospatial_lon_min/max since subset
412     # data does not seem to have these fields updated.
413     # Getting min/max from the arrays is obviously not very efficient but..
414     #lon_range=longitude.actual_range
415     #lat_range=latitude.actual_range
416     #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
417     #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
418     lon_range=longitude.data.min(),longitude.data.max()
419     lat_range=latitude.data.min(),latitude.data.max()
420 caltinay 4097 if lon_range[1]<180:
421     lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
422 caltinay 4060 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]]
423     f.close()
424    
425     self.__nPts=[NX, NY]
426     self.__origin=[lon_range[0],lat_range[0]]
427     # we are rounding to avoid interpolation issues
428     self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in range(2)]
429     #self.__wkt_string=wkt_string
430     #self.__lon_name=lon_name
431     #self.__lat_name=lat_name
432     self.__data_name=data_name
433     self.__maskval=maskval
434    
435     def getDataExtents(self):
436     """
437     returns ( (x0, y0), (nx, ny), (dx, dy) )
438     """
439     return (list(self.__origin), list(self.__nPts), list(self.__delta))
440    
441     def getDataType(self):
442     return self.__datatype
443    
444     def getSurveyData(self, domain, origin, NE, spacing):
445     nValues=self.__nPts
446     # determine base location of this dataset within the domain
447     first=[int((self.__origin[i]-origin[i])/spacing[i]) for i in range(len(self.__nPts))]
448     if domain.getDim()==3:
449     first.append(int((self.__altitude-origin[2])/spacing[2]))
450     nValues=nValues+[1]
451    
452     data=ripleycpp._readNcGrid(self.__filename, self.__data_name,
453     ReducedFunction(domain), first, nValues, (), self.__maskval)
454     sigma=whereNonZero(data-self.__maskval)
455     data=data*self.__scalefactor
456     sigma=sigma * 2. * self.__scalefactor
457     return data, sigma
458    
459    
460 caltinay 3958 ##############################################################################
461     class SourceFeature(object):
462     """
463     A feature adds a density distribution to (parts of) a domain of a synthetic
464     data source, for example a layer of a specific rock type or a simulated
465     ore body.
466     """
467 caltinay 4060 def getValue(self):
468 caltinay 3958 """
469 caltinay 4060 Returns the value for the area covered by mask. It can be constant
470 caltinay 3958 or a data object with spatial dependency.
471     """
472     raise NotImplementedError
473     def getMask(self, x):
474     """
475     Returns the mask of the area of interest for this feature. That is,
476     mask is non-zero where the density returned by getDensity() should be
477     applied, zero elsewhere.
478     """
479     raise NotImplementedError
480    
481     class SmoothAnomaly(SourceFeature):
482 caltinay 4060 def __init__(self, lx, ly, lz, x, y, depth, v_inner=None, v_outer=None):
483 caltinay 3946 self.x=x
484     self.y=y
485     self.lx=lx
486     self.ly=ly
487     self.lz=lz
488     self.depth=depth
489 caltinay 4060 self.v_inner=v_inner
490     self.v_outer=v_outer
491     self.value=None
492 caltinay 4044 self.mask=None
493 caltinay 4034
494 caltinay 4060 def getValue(self,x):
495     if self.value is None:
496     if self.v_outer is None or self.v_inner is None:
497     self.value=0
498 caltinay 4041 else:
499 caltinay 4044 DIM=x.getDomain().getDim()
500 caltinay 4060 alpha=-log(abs(self.v_outer/self.v_inner))*4
501     value=exp(-alpha*((x[0]-self.x)/self.lx)**2)
502     value=value*exp(-alpha*((x[DIM-1]+self.depth)/self.lz)**2)
503     self.value=maximum(abs(self.v_outer), abs(self.v_inner*value))
504     if self.v_inner<0: self.value=-self.value
505 caltinay 4044
506 caltinay 4060 return self.value
507 caltinay 4044
508 caltinay 3946 def getMask(self, x):
509     DIM=x.getDomain().getDim()
510 caltinay 4060 m=whereNonNegative(x[DIM-1]+self.depth+self.lz/2) * whereNonPositive(x[DIM-1]+self.depth-self.lz/2) \
511 caltinay 4034 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
512 caltinay 3946 if DIM>2:
513     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
514 caltinay 4044 self.mask = m
515 caltinay 4034 return m
516 caltinay 3946
517 caltinay 3958 ##############################################################################
518 gross 4106 class SyntheticFeatureData(DataSource):
519     def __init__(self, datatype, DIM, NE, l, features, B_b=None):
520     super(SyntheticFeatureData,self).__init__()
521 caltinay 4060 if not datatype in [self.GRAVITY,self.MAGNETIC]:
522     raise ValueError("Invalid value for datatype parameter")
523     self.__datatype = datatype
524 caltinay 3946 self._features = features
525 caltinay 4060 self.__origin = [0]*(DIM-1)
526     self.__nPts = [NE]*(DIM-1)
527     self.__delta = [float(l)/NE]*(DIM-1)
528 gross 4106 self.__B_b =None
529 caltinay 3946 self.DIM=DIM
530     self.NE=NE
531     self.l=l
532 gross 4106 # this is for Cartesian (FIXME ?)
533     if datatype == self.MAGNETIC:
534     if self.DIM<3:
535     self.__B_b = np.array([-B_b[2], -B_b[0]])
536     else:
537     self.__B_b = ([-B_b[1], -B_b[2], -B_b[0]])
538 caltinay 3946
539 caltinay 4060 def getDataExtents(self):
540     return (list(self.__origin), list(self.__nPts), list(self.__delta))
541 caltinay 3946
542 caltinay 4060 def getDataType(self):
543     return self.__datatype
544 caltinay 3946
545     def getReferenceDensity(self):
546 caltinay 4060 """
547     Returns the reference density Data object that was used to generate
548     the gravity anomaly data.
549     """
550 caltinay 3946 return self._rho
551 caltinay 4060
552 gross 4039 def getReferenceSusceptibility(self):
553 caltinay 4060 """
554     Returns the reference magnetic susceptibility Data objects that was
555     used to generate the magnetic field data.
556     """
557 caltinay 4044 return self._k
558 caltinay 3946
559 caltinay 4060 def getSurveyData(self, domain, origin, NE, spacing):
560     pde=LinearSinglePDE(domain)
561 gross 4039 G=U.Gravitational_Constant
562 caltinay 3946 m_psi_ref=0.
563 caltinay 4060 x=domain.getX()
564 gross 4079 DIM=domain.getDim()
565     m_psi_ref=whereZero(x[DIM-1]-sup(x[DIM-1])) # + whereZero(x[DIM-1]-inf(x[DIM-1]))
566 caltinay 4060 if self.getDataType()==DataSource.GRAVITY:
567     rho_ref=0.
568     for f in self._features:
569     m=f.getMask(x)
570     rho_ref = rho_ref * (1-m) + f.getValue(x) * m
571     self._rho=rho_ref
572     pde.setValue(A=kronecker(domain), Y=-4*np.pi*G*rho_ref, q=m_psi_ref)
573     else:
574     k_ref=0.
575     for f in self._features:
576     m=f.getMask(x)
577     k_ref = k_ref * (1-m) + f.getValue(x) * m
578     self._k=k_ref
579 gross 4106 pde.setValue(A=kronecker(domain), X=k_ref*self.__B_b, q=m_psi_ref)
580 caltinay 3946 pde.setSymmetryOn()
581     psi_ref=pde.getSolution()
582     del pde
583 caltinay 4060 if self.getDataType()==DataSource.GRAVITY:
584     data = -grad(psi_ref, ReducedFunction(domain))
585     else:
586 gross 4106 data = self._k*self.__B_b-grad(psi_ref, ReducedFunction(domain))
587 caltinay 4044
588 caltinay 4060 sigma=1.
589     # limit mask to non-padding in horizontal area
590     x=ReducedFunction(domain).getX()
591     for i in range(self.DIM-1):
592     sigma=sigma * wherePositive(x[i]) \
593     * whereNegative(x[i]-(sup(x[i])+inf(x[i])))
594     # limit mask to one cell thickness at z=0
595     sigma = sigma * whereNonNegative(x[self.DIM-1]) \
596     * whereNonPositive(x[self.DIM-1]-spacing[self.DIM-1])
597     return data,sigma
598 caltinay 4044
599    
600 gross 4106

  ViewVC Help
Powered by ViewVC 1.1.26