/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4019 - (show annotations)
Thu Oct 11 08:12:55 2012 UTC (6 years, 11 months ago) by jfenwick
File MIME type: text/x-python
File size: 32454 byte(s)
More tabbing errors,
range/xrange
...
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 """Data readers/providers for inversions"""
17
18 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
19 http://www.uq.edu.au
20 Primary Business: Queensland, Australia"""
21 __license__="""Licensed under the Open Software License version 3.0
22 http://www.opensource.org/licenses/osl-3.0.php"""
23 __url__="https://launchpad.net/escript-finley"
24
25 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
26
27 import logging
28 import numpy as np
29 from esys.escript import ReducedFunction, Scalar
30 from esys.escript.linearPDEs import LinearSinglePDE
31 from esys.escript.util import *
32 import esys.escript.unitsSI as U
33 from esys.ripley import Brick, Rectangle, ripleycpp
34 import sys
35
36 if sys.version_info.major>2:
37 xrange=range
38
39 try:
40 from scipy.io.netcdf import netcdf_file
41 __all__ += ['NetCDFDataSource']
42 except:
43 pass
44
45 def LatLonToUTM(lon, lat, wkt_string=None):
46 """
47 Converts one or more longitude,latitude pairs to the corresponding x,y
48 coordinates in the Universal Transverse Mercator projection.
49
50 :note: If the ``pyproj`` module is not installed a warning if printed and
51 the input values are scaled by a constant and returned.
52 :note: If `wkt_string` is not given or invalid or the ``gdal`` module is
53 not available to convert the string, then the input values are
54 assumed to be given in the Clarke 1866 projection.
55
56 :param lon: longitude value(s)
57 :type lon: `float`, `list`, `tuple`, or ``numpy.array``
58 :param lat: latitude value(s)
59 :type lat: `float`, `list`, `tuple`, or ``numpy.array``
60 :rtype: ``numpy.array``
61 """
62
63 # not really optimal: if pyproj is not installed we return the input
64 # values scaled by a constant.
65 try:
66 import pyproj
67 except:
68 print("Warning, pyproj not available. Domain extents will be wrong")
69 return np.array(lon)*1000., np.array(lat)*1000.
70
71 # determine UTM zone from the input data
72 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
73 try:
74 import osgeo.osr
75 srs = osgeo.osr.SpatialReference()
76 srs.ImportFromWkt(wkt_string)
77 p_src = pyproj.Proj(srs.ExportToProj4())
78 except:
79 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
80 # we assume southern hemisphere here
81 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
82 x,y=pyproj.transform(p_src, p_dest, lon, lat)
83 return x,y
84
85 class DataSource(object):
86 """
87 A class that provides survey data for the inversion process.
88 This is an abstract base class that implements common functionality.
89 Methods to be overwritten by subclasses are marked as such.
90 """
91 # this is currently specific to gravity inversion and should be generalised
92
93 def __init__(self):
94 """
95 Constructor. Sets some defaults and initializes logger.
96 """
97 self._constrainBottom=False
98 self._constrainSides=True
99 self._domain=None
100 self.setPadding()
101 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
102
103 def setPadding(self, pad_x=10, pad_y=10):
104 """
105 Sets the amount of padding around the dataset. If `pad_x`/`pad_y`
106 is >=1 the value is treated as number of elements to be added to the
107 domain (per side).
108 If ``0 < pad_x,pad_y < 1``, the padding amount is relative to the
109 dataset size. For example, calling ``setPadding(3, 0.1)`` to a data
110 source with size 10x20 will result in the padded data set size
111 16x24 (10+2*3, 20*(1+2*0.1))
112
113 :param pad_x: Padding per side in x direction (default: 10 elements)
114 :type pad_x: ``int`` or ``float``
115 :param pad_y: Padding per side in y direction (default: 10 elements).
116 This value is only used for 3-dimensional datasets
117 :type pad_y: ``int`` or ``float``
118 """
119 self._pad_x=pad_x
120 self._pad_y=pad_y
121
122 def setConstraints(self, bottom=False, sides=True):
123 """
124 If `bottom` is True, then the density mask will be set to 1 in the
125 padding area at the bottom of the domain. By default this area is
126 unconstrained. Similarly, if `sides` is True (default) then the
127 horizontal padding area is constrained, otherwise not.
128
129 :param bottom: Whether to constrain the density at the bottom of the
130 domain
131 :type bottom: ``bool``
132 :param sides: Whether to constrain the density in the padding area
133 surrounding the data
134 :type sides: ``bool``
135 """
136 self._constrainBottom=bottom
137 self._constrainSides=sides
138
139 def getDomain(self):
140 """
141 Returns a domain that spans the data area plus padding.
142 The domain is created the first time this method is called, subsequent
143 calls return the same domain so anything that affects the domain
144 (such as padding) needs to be set beforehand.
145
146 :return: The escript domain for this data source.
147 :rtype: `esys.escript.Domain`
148 """
149 if self._domain is None:
150 self._domain=self._createDomain(self._pad_x, self._pad_y)
151 return self._domain
152
153 def getDensityMask(self):
154 """
155 Returns the density mask data object, where mask has value 1 in the
156 padding area, 0 elsewhere.
157
158 :return: The mask for the density.
159 :rtype: `esys.escript.Data`
160 """
161 return self._mask
162
163 def getGravityAndStdDev(self):
164 """
165 Returns the gravity anomaly and standard deviation data objects as a
166 tuple. This method must be implemented in subclasses.
167 """
168 raise NotImplementedError
169
170 def getDataExtents(self):
171 """
172 returns a tuple of tuples ``( (x0, y0), (nx, ny), (dx, dy) )``, where
173
174 - ``x0``, ``y0`` = coordinates of data origin
175 - ``nx``, ``ny`` = number of data points in x and y
176 - ``dx``, ``dy`` = spacing of data points in x and y
177
178 This method must be implemented in subclasses.
179 """
180 raise NotImplementedError
181
182 def getVerticalExtents(self):
183 """
184 returns a tuple ``(z0, nz, dz)``, where
185
186 - ``z0`` = minimum z coordinate (origin)
187 - ``nz`` = number of nodes in z direction
188 - ``dz`` = spacing of nodes (= cell size in z)
189
190 This method must be implemented in subclasses.
191 """
192 raise NotImplementedError
193
194 def getDomainClass(self):
195 """
196 returns the domain generator class (e.g. esys.ripley.Brick).
197 Must be implemented in subclasses.
198 """
199 raise NotImplementedError
200
201 def _addPadding(self, pad_x, pad_y, NE, l, origin):
202 """
203 Helper method that computes new number of elements, length and origin
204 after adding padding to the input values.
205
206 :param pad_x: Number of elements or fraction of padding in x direction
207 :type pad_x: ``int`` or ``float``
208 :param pad_y: Number of elements or fraction of padding in y direction
209 :type pad_y: ``int`` or ``float``
210 :param NE: Initial number of elements
211 :type NE: ``tuple`` or ``list``
212 :param l: Initial side lengths
213 :type l: ``tuple`` or ``list``
214 :param origin: Initial origin
215 :type origin: ``tuple`` or ``list``
216 :return: tuple with three elements ``(NE_padded, l_padded, origin_padded)``,
217 which are lists of the updated input parameters
218 """
219 DIM=len(NE)
220 frac=[0.]*(DIM-1)+[0]
221 # padding is applied to each side so multiply by 2 to get the total
222 # amount of padding per dimension
223 if pad_x>0 and pad_y<1:
224 frac[0]=2.*pad_x
225 elif pad_x>=1:
226 frac[0]=2.*pad_x/float(NE[0])
227 if DIM>2:
228 if pad_y>0 and pad_y<1:
229 frac[1]=2.*pad_y
230 elif pad_y>=1:
231 frac[1]=2.*pad_y/(float(NE[1]))
232
233 # calculate new number of elements
234 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
235 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
236 spacing=[l[i]/NE[i] for i in xrange(DIM)]
237 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
238 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
239 return NE_new, l_new, origin_new
240
241 def _interpolateOnDomain(self, data):
242 """
243 Helper method that interpolates data arrays onto the domain.
244 Currently this works like a nearest neighbour mapping, i.e. values
245 are directly inserted into data objects at closest location.
246 """
247 dom=self.getDomain()
248 dim=dom.getDim()
249 # determine number of values required per element
250 DPP=Scalar(0., ReducedFunction(dom)).getNumberOfDataPoints()
251 for i in xrange(dim):
252 DPP=DPP/self._dom_NE[i]
253 DPP=int(DPP)
254
255 # idx_mult.dot([x,y,z]) = flat index into data object
256 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
257
258 # separate data arrays and coordinates
259 num_arrays=len(data[0])-dim
260 arrays=[]
261 for i in xrange(num_arrays):
262 d=Scalar(0., ReducedFunction(dom))
263 d.expand()
264 arrays.append(d)
265
266 for entry in data:
267 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
268 index=int(idx_mult.dot(index))
269 for i in xrange(num_arrays):
270 for p in xrange(DPP):
271 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
272
273 return arrays
274
275 def _createDomain(self, padding_x, padding_y):
276 """
277 Creates and returns an escript domain that spans the entire area of
278 available data plus a buffer zone. This method is called only once
279 the first time `getDomain()` is invoked and may be overwritten if
280 required.
281
282 :return: The escript domain for this data source.
283 :rtype: `esys.escript.Domain`
284 """
285 X0, NX, DX = self.getDataExtents()
286 z0, nz, dz = self.getVerticalExtents()
287
288 # number of elements (without padding)
289 NE = [NX[0], NX[1], nz]
290
291 # origin of domain (without padding)
292 origin = [X0[0], X0[1], z0]
293 origin = [np.round(oi) for oi in origin]
294
295 # cell size / point spacing
296 self._spacing = DX+[dz]
297 self._spacing = [float(np.round(si)) for si in self._spacing]
298
299 # length of domain (without padding)
300 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
301
302 # now add padding to the values
303 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
304 NE, l, origin)
305
306 # number of padding elements per side
307 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
308
309 self._dom_NE_pad = NE_pad
310 self._dom_len = l_new
311 self._dom_NE = NE_new
312 self._dom_origin = origin_new
313 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
314 try:
315 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
316 # ripley may internally adjust NE and length, so recompute
317 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
318 self._dom_NE=[int(self._dom_len[i]/self._spacing[i]) for i in xrange(3)]
319 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
320 mask=wherePositive(dom.getX()[2])
321
322 except TypeError:
323 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
324 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
325 mask=wherePositive(x[2]+self._dom_origin[2])
326
327 # prepare density mask (=1 at padding area, 0 else)
328 if self._constrainSides:
329 for i in xrange(2):
330 mask=mask + whereNegative(x[i]) + \
331 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
332
333 if self._constrainBottom:
334 mask = mask + whereNonPositive(x[2])
335 self._mask=wherePositive(mask)
336
337 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
338 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
339 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
340
341 return dom
342
343
344 ##############################################################################
345 class UBCDataSource(DataSource):
346 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
347 super(UBCDataSource,self).__init__()
348 self.__meshfile=meshfile
349 self.__gravfile=gravfile
350 self.__topofile=topofile
351 self.__domainclass=domainclass
352 self.__readMesh()
353
354 def __readMesh(self):
355 meshdata=open(self.__meshfile).readlines()
356 numDataPoints=meshdata[0].split()
357 origin=meshdata[1].split()
358 self.__nPts=map(int, numDataPoints)
359 self.__origin=map(float, origin)
360 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
361 # vertical data is upside down
362 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
363 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
364
365 def getDataExtents(self):
366 """
367 returns ( (x0, y0), (nx, ny), (dx, dy) )
368 """
369 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
370
371 def getVerticalExtents(self):
372 """
373 returns (z0, nz, dz)
374 """
375 return (self.__origin[2], self.__nPts[2], self.__delta[2])
376
377 def getDomainClass(self):
378 """
379 returns the domain generator class (e.g. esys.ripley.Brick)
380 """
381 return self.__domainclass
382
383 #def getDensityMask(self):
384 # topodata=self.__readTopography()
385 # mask=self._interpolateOnDomain(topodata)
386 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
387 # return mask
388
389 def getGravityAndStdDev(self):
390 gravlist=self.__readGravity() # x,y,z,g,s
391 g_and_sigma=self._interpolateOnDomain(gravlist)
392 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
393
394 def __readTopography(self):
395 f=open(self.__topofile)
396 n=int(f.readline())
397 topodata=np.zeros((n,3))
398 for i in xrange(n):
399 x=f.readline().split()
400 x=map(float, x)
401 topodata[i]=x
402 f.close()
403 return topodata
404
405 def __readGravity(self):
406 f=open(self.__gravfile)
407 n=int(f.readline())
408 gravdata=np.zeros((n,5))
409 for i in xrange(n):
410 x=f.readline().split()
411 x=map(float, x) # x, y, z, anomaly in mGal, stddev
412 # convert gravity anomaly units to m/s^2 and rescale error
413 x[3]*=-1e-5
414 x[4]*=1e-5
415 gravdata[i]=x
416 f.close()
417 return gravdata
418
419 ##############################################################################
420 class NetCDFDataSource(DataSource):
421 def __init__(self, gravfile, topofile=None, vertical_extents=(-40000,10000,25), alt_of_data=0.):
422 """
423 vertical_extents - (alt_min, alt_max, num_points)
424 alt_of_data - altitude of measurements
425 """
426 super(NetCDFDataSource,self).__init__()
427 self.__topofile=topofile
428 self.__gravfile=gravfile
429 self.__determineExtents(vertical_extents)
430 self.__altOfData=alt_of_data
431
432 def __determineExtents(self, ve):
433 self.logger.debug("Data Source: %s"%self.__gravfile)
434 f=netcdf_file(self.__gravfile, 'r')
435 NX=0
436 for n in ['lon','longitude','x']:
437 if n in f.dimensions:
438 NX=f.dimensions[n]
439 break
440 if NX==0:
441 raise RuntimeError("Could not determine extents of data")
442 NY=0
443 for n in ['lat','latitude','y']:
444 if n in f.dimensions:
445 NY=f.dimensions[n]
446 break
447 if NY==0:
448 raise RuntimeError("Could not determine extents of data")
449
450 # find longitude and latitude variables
451 lon_name=None
452 for n in ['lon','longitude']:
453 if n in f.variables:
454 lon_name=n
455 longitude=f.variables.pop(n)
456 break
457 if lon_name is None:
458 raise RuntimeError("Could not determine longitude variable")
459 lat_name=None
460 for n in ['lat','latitude']:
461 if n in f.variables:
462 lat_name=n
463 latitude=f.variables.pop(n)
464 break
465 if lat_name is None:
466 raise RuntimeError("Could not determine latitude variable")
467
468 # try to figure out gravity variable name
469 grav_name=None
470 if len(f.variables)==1:
471 grav_name=f.variables.keys()[0]
472 else:
473 for n in f.variables.keys():
474 dims=f.variables[n].dimensions
475 if (lat_name in dims) and (lon_name in dims):
476 grav_name=n
477 break
478 if grav_name is None:
479 raise RuntimeError("Could not determine gravity variable")
480
481 # try to determine value for unused data
482 if hasattr(f.variables[grav_name], 'missing_value'):
483 maskval = float(f.variables[grav_name].missing_value)
484 elif hasattr(f.variables[grav_name], '_FillValue'):
485 maskval = float(f.variables[grav_name]._FillValue)
486 else:
487 self.logger.debug("missing_value attribute not found, using default.")
488 maskval = 99999
489
490 # see if there is a wkt string to convert coordinates
491 try:
492 wkt_string=f.variables[grav_name].esri_pe_string
493 except:
494 wkt_string=None
495
496 # we don't trust actual_range & geospatial_lon_min/max since subset
497 # data does not seem to have these fields updated.
498 # Getting min/max from the arrays is obviously not very efficient but..
499 #lon_range=longitude.actual_range
500 #lat_range=latitude.actual_range
501 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
502 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
503 lon_range=longitude.data.min(),longitude.data.max()
504 lat_range=latitude.data.min(),latitude.data.max()
505 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
506 origin=[lon_range[0],lat_range[0],ve[0]]
507 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
508
509 f.close()
510
511 self.__nPts=[NX, NY, ve[2]]
512 self.__origin=origin
513 # we are rounding to avoid interpolation issues
514 self.__delta=[np.round(lengths[i]/self.__nPts[i]) for i in xrange(3)]
515 self.__wkt_string=wkt_string
516 self.__lon=lon_name
517 self.__lat=lat_name
518 self.__grv=grav_name
519 self.__maskval=maskval
520
521 def getDataExtents(self):
522 """
523 returns ( (x0, y0), (nx, ny), (dx, dy) )
524 """
525 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
526
527 def getVerticalExtents(self):
528 """
529 returns (z0, nz, dz)
530 """
531 return (self.__origin[2], self.__nPts[2], self.__delta[2])
532
533 def getDomainClass(self):
534 """
535 returns the domain generator class (e.g. esys.ripley.Brick)
536 """
537 return Brick
538
539 def getGravityAndStdDev(self):
540 nValues=self.__nPts[:2]+[1]
541 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
542 g=ripleycpp._readNcGrid(self.__gravfile, self.__grv,
543 ReducedFunction(self.getDomain()),
544 first, nValues, (), self.__maskval)
545 sigma=whereNonZero(g-self.__maskval)
546 g=g*1e-6
547 sigma=sigma*2e-6
548 return g*[0,0,1], sigma
549
550 def _readTopography(self):
551 f=netcdf_file(self.__topofile, 'r')
552 lon=None
553 for n in ['lon','longitude']:
554 if n in f.variables:
555 lon=f.variables[n][:]
556 break
557 if lon is None:
558 raise RuntimeError("Could not determine longitude variable")
559 lat=None
560 for n in ['lat','latitude']:
561 if n in f.variables:
562 lat=f.variables[n][:]
563 break
564 if lat is None:
565 raise RuntimeError("Could not determine latitude variable")
566 alt=None
567 for n in ['altitude','alt']:
568 if n in f.variables:
569 alt=f.variables[n][:]
570 break
571 if alt is None:
572 raise RuntimeError("Could not determine altitude variable")
573
574 topodata=np.column_stack((lon,lat,alt))
575 f.close()
576 return topodata
577
578 ##############################################################################
579 class ERSDataSource(DataSource):
580 """
581 Data Source for ER Mapper raster data.
582 Note that this class only accepts a very specific type of ER Mapper data
583 input and will raise an exception if other data is found.
584 """
585 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,25), alt_of_data=0.):
586 """
587 headerfile - usually ends in .ers
588 datafile - usually has the same name as the headerfile without '.ers'
589 """
590 super(ERSDataSource,self).__init__()
591 self.__headerfile=headerfile
592 if datafile is None:
593 self.__datafile=headerfile[:-4]
594 else:
595 self.__datafile=datafile
596 self.__readHeader(vertical_extents)
597 self.__altOfData=alt_of_data
598
599 def __readHeader(self, ve):
600 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
601 metadata=open(self.__headerfile, 'r').readlines()
602 # parse metadata
603 start=-1
604 for i in xrange(len(metadata)):
605 if metadata[i].strip() == 'DatasetHeader Begin':
606 start=i+1
607 if start==-1:
608 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
609
610 md_dict={}
611 section=[]
612 for i in xrange(start, len(metadata)):
613 line=metadata[i]
614 if line[-6:].strip() == 'Begin':
615 section.append(line[:-6].strip())
616 elif line[-4:].strip() == 'End':
617 if len(section)>0:
618 section.pop()
619 else:
620 vals=line.split('=')
621 if len(vals)==2:
622 key = vals[0].strip()
623 value = vals[1].strip()
624 fullkey='.'.join(section+[key])
625 md_dict[fullkey]=value
626
627 try:
628 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
629 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
630 except KeyError:
631 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
632
633 try:
634 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
635 NY = int(md_dict['RasterInfo.NrOfLines'])
636 except:
637 raise RuntimeError("Could not determine extents of data")
638
639 try:
640 maskval = float(md_dict['RasterInfo.NullCellValue'])
641 except:
642 maskval = 99999
643
644 try:
645 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
646 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
647 except:
648 raise RuntimeError("Could not determine cell dimensions")
649
650 try:
651 if md_dict['CoordinateSpace.CoordinateType']=='EN':
652 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
653 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
654 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
655 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
656 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
657 else:
658 raise RuntimeError("Unknown CoordinateType")
659 except:
660 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
661 originX,originY = 0.0, 0.0
662
663 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
664 # it appears we have lat/lon coordinates so need to convert
665 # origin and spacing. Try using gdal to get the wkt if available:
666 try:
667 from osgeo import gdal
668 ds=gdal.Open(self.__headerfile)
669 wkt=ds.GetProjection()
670 except:
671 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
672 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
673 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
674 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
675 # we are rounding to avoid interpolation issues
676 spacingX=np.round(op1X-originX_UTM)
677 spacingY=np.round(op1Y-originY_UTM)
678 originX=np.round(originX_UTM)
679 originY=np.round(originY_UTM)
680
681 # data sets have origin in top-left corner so y runs top-down
682 self.__dataorigin=[originX, originY]
683 originY-=(NY-1)*spacingY
684 self.__maskval=maskval
685 spacingZ=np.round(float(ve[1]-ve[0])/ve[2])
686 self.__delta = [spacingX, spacingY, spacingZ]
687 self.__nPts = [NX, NY, ve[2]]
688 self.__origin = [originX, originY, ve[0]]
689
690 def getDataExtents(self):
691 """
692 returns ( (x0, y0), (nx, ny), (dx, dy) )
693 """
694 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
695
696 def getVerticalExtents(self):
697 """
698 returns (z0, nz, dz)
699 """
700 return (self.__origin[2], self.__nPts[2], self.__delta[2])
701
702 def getDomainClass(self):
703 """
704 returns the domain generator class (e.g. esys.ripley.Brick)
705 """
706 return Brick
707
708 def getGravityAndStdDev(self):
709 nValues=self.__nPts[:2]+[1]
710 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
711 g=ripleycpp._readBinaryGrid(self.__datafile,
712 ReducedFunction(self.getDomain()),
713 first, nValues, (), self.__maskval)
714 sigma=whereNonZero(g-self.__maskval)
715 g=g*1e-6
716 sigma=sigma*2e-6
717 return g*[0,0,1], sigma
718
719
720 ##############################################################################
721 class SourceFeature(object):
722 """
723 A feature adds a density distribution to (parts of) a domain of a synthetic
724 data source, for example a layer of a specific rock type or a simulated
725 ore body.
726 """
727 def getDensity(self):
728 """
729 Returns the density for the area covered by mask. It can be constant
730 or a data object with spatial dependency.
731 """
732 raise NotImplementedError
733 def getMask(self, x):
734 """
735 Returns the mask of the area of interest for this feature. That is,
736 mask is non-zero where the density returned by getDensity() should be
737 applied, zero elsewhere.
738 """
739 raise NotImplementedError
740
741 class SmoothAnomaly(SourceFeature):
742 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
743 self.x=x
744 self.y=y
745 self.lx=lx
746 self.ly=ly
747 self.lz=lz
748 self.depth=depth
749 self.rho_inner=rho_inner
750 self.rho_outer=rho_outer
751 self.rho=None
752
753 def getDensity(self):
754 return self.rho
755
756 def getMask(self, x):
757 DIM=x.getDomain().getDim()
758 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
759 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
760 if DIM>2:
761 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
762 if self.rho is None:
763 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
764 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
765 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
766 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
767 if self.rho_inner<0: self.rho=-self.rho
768 return m
769
770 ##############################################################################
771 class SyntheticDataSource(DataSource):
772 def __init__(self, DIM, NE, l, h, features):
773 super(SyntheticDataSource,self).__init__()
774 self._features = features
775 self.DIM=DIM
776 self.NE=NE
777 self.l=l
778 self.h=h
779
780 def _createDomain(self, padding_x, padding_y):
781 NE_H=self.NE
782 NE_L=int((self.l/self.h)*NE_H+0.5)
783 l=[self.l]*(self.DIM-1)+[self.h]
784 NE=[NE_L]*(self.DIM-1)+[NE_H]
785 origin=[0.]*self.DIM
786 NE_new, l_new, origin_new = self._addPadding(padding_x, padding_y, \
787 NE, l, origin)
788
789 self.NE=NE_new
790 self.l=l_new[0]
791 self.h=l_new[self.DIM-1]
792
793 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
794 if self.DIM==2:
795 from esys.finley import Rectangle
796 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
797 self._x = dom.getX() + origin_new
798 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
799 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
800 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
801 else:
802 from esys.finley import Brick
803 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
804 self._x = dom.getX() + origin_new
805 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
806 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
807 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
808
809 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
810 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
811 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
812 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
813 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
814 self._mask=whereNegative(self._x[self.DIM-1]) + \
815 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
816 for i in xrange(self.DIM-1):
817 self._mask=self._mask + whereNegative(self._x[i]) + \
818 wherePositive(self._x[i]-l[i])
819 self._mask=wherePositive(self._mask)
820
821 rho_ref=0.
822 for f in self._features:
823 m=f.getMask(self._x)
824 rho_ref = rho_ref * (1-m) + f.getDensity() * m
825 self._rho=rho_ref
826
827 return dom
828
829 def getDensityMask(self):
830 return self._mask
831
832 def getReferenceDensity(self):
833 return self._rho
834
835 def getGravityAndStdDev(self):
836 pde=LinearSinglePDE(self.getDomain())
837 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
838 m_psi_ref=0.
839 for i in xrange(self.DIM):
840 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
841 + whereZero(self._x[i]-sup(self._x[i]))
842
843 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
844 pde.setSymmetryOn()
845 psi_ref=pde.getSolution()
846 del pde
847 g=-grad(psi_ref)
848 sigma=self._g_mask
849 return g,sigma
850

  ViewVC Help
Powered by ViewVC 1.1.26