/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Annotation of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3965 - (hide annotations)
Fri Sep 14 01:23:17 2012 UTC (7 years ago) by caltinay
File MIME type: text/x-python
File size: 31292 byte(s)
Data sources have to end up in the Function fs or we get into trouble when
interpolating. Now using setValueOfDataPoint() which doesn't seem to slow
things down too much.

1 caltinay 3946
2     ########################################################
3     #
4     # Copyright (c) 2003-2012 by University of Queensland
5     # Earth Systems Science Computational Center (ESSCC)
6     # http://www.uq.edu.au/esscc
7     #
8     # Primary Business: Queensland, Australia
9     # Licensed under the Open Software License version 3.0
10     # http://www.opensource.org/licenses/osl-3.0.php
11     #
12     ########################################################
13    
14     __copyright__="""Copyright (c) 2003-2012 by University of Queensland
15     Earth Systems Science Computational Center (ESSCC)
16     http://www.uq.edu.au/esscc
17     Primary Business: Queensland, Australia"""
18     __license__="""Licensed under the Open Software License version 3.0
19     http://www.opensource.org/licenses/osl-3.0.php"""
20     __url__="https://launchpad.net/escript-finley"
21    
22 caltinay 3958 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
23 caltinay 3947
24 caltinay 3946 import logging
25     import numpy as np
26 caltinay 3958 import struct
27 caltinay 3946 from esys.escript import *
28     from esys.escript.linearPDEs import *
29     import esys.escript.unitsSI as U
30 caltinay 3947 try:
31     from scipy.io.netcdf import netcdf_file
32     __all__ += ['NetCDFDataSource']
33     except:
34     pass
35 caltinay 3946
36 caltinay 3957 def LatLonToUTM(lon, lat, wkt_string=None):
37     """
38     Converts one or more longitude,latitude pairs to the corresponding x,y
39     coordinates in the Universal Transverse Mercator projection.
40     If wkt_string is not given or invalid or the gdal module is not available
41     to convert the string, then the input values are assumed to be given in the
42     Clarke 1866 projection.
43     """
44    
45     # not really optimal: if pyproj is not installed we return the input
46     # values without modification.
47     try:
48     import pyproj
49     except:
50     print("Warning, pyproj not available. Domain extents will be wrong")
51     return lon,lat
52    
53     # determine UTM zone from the input data
54 caltinay 3956 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
55     try:
56     import osgeo.osr
57     srs = osgeo.osr.SpatialReference()
58     srs.ImportFromWkt(wkt_string)
59     p_src = pyproj.Proj(srs.ExportToProj4())
60     except:
61     p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
62 caltinay 3959 # we assume southern hemisphere here
63     p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
64 caltinay 3956 x,y=pyproj.transform(p_src, p_dest, lon, lat)
65     return x,y
66 caltinay 3947
67 caltinay 3946 class DataSource(object):
68     """
69     A class that provides survey data for the inversion process.
70     """
71     # this is currently specific to gravity inversion and should be generalised
72    
73     def __init__(self):
74     """
75     """
76 caltinay 3958 self._constrainBottom=False
77 caltinay 3946 self._domain=None
78     self._pad_l=0.1
79     self._pad_h=0.1
80     self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
81    
82     def _addPadding(self, pad_l, pad_h, NE, l, origin):
83     """
84     Helper method that computes new number of elements, length and origin
85     after adding padding to the input values.
86     """
87     DIM=len(NE)
88     frac=[0.]*DIM
89     # padding is applied to each side so multiply by 2 to get the total
90     # amount of padding per dimension
91     if pad_l>0 and pad_l<1:
92     for i in xrange(DIM-1):
93 caltinay 3964 frac[i]=2.*pad_l
94 caltinay 3946 elif pad_l>=1:
95     for i in xrange(DIM-1):
96 caltinay 3964 frac[i]=2.*pad_l/float(NE[i])
97 caltinay 3946 if pad_h>0 and pad_h<1:
98 caltinay 3964 frac[DIM-1]=2.*pad_h
99 caltinay 3946 elif pad_h>=1:
100 caltinay 3964 frac[DIM-1]=2.*pad_h/(float(NE[DIM-1]))
101    
102 caltinay 3946 # calculate new number of elements
103     NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
104     NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
105     spacing=[l[i]/NE[i] for i in xrange(DIM)]
106     l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
107     origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
108     return NE_new, l_new, origin_new
109    
110 caltinay 3964 def _interpolateOnDomain(self, data):
111 caltinay 3946 """
112     Helper method that interpolates data arrays onto the domain.
113 caltinay 3965 Currently this works like a nearest neighbour mapping, i.e. values
114     are directly inserted into data objects at closest location.
115 caltinay 3946 """
116 caltinay 3964 dom=self.getDomain()
117     dim=dom.getDim()
118 caltinay 3965 # determine number of values required per element
119     DPP=Scalar(0., Function(dom)).getNumberOfDataPoints()
120     for i in xrange(dim):
121     DPP=DPP/self._dom_NE[i]
122     DPP=int(DPP)
123    
124     # idx_mult.dot([x,y,z]) = flat index into data object
125     idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
126    
127     # separate data arrays and coordinates
128     num_arrays=len(data[0])-dim
129     arrays=[]
130     for i in xrange(num_arrays):
131     d=Scalar(0., Function(dom))
132     d.expand()
133     arrays.append(d)
134    
135     for entry in data:
136     index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
137     index=int(idx_mult.dot(index))
138     for i in xrange(num_arrays):
139     for p in xrange(DPP):
140     arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
141    
142     return arrays
143    
144     def _interpolateOnDomain_old(self, data):
145     """
146     Old interpolation method. Works only on ContinuousFunction and thus
147     produces wrong values once interpolated on Function.
148     """
149     dom=self.getDomain()
150     dim=dom.getDim()
151 caltinay 3964 # shape = number of data points/nodes in each dimension
152     shape=()
153     for i in xrange(dim):
154     shape=(self._dom_NE[i]+1,)+shape
155     # separate data arrays and coordinates
156     arrays=np.zeros(((len(data[0])-dim),)+shape)
157     num_arrays=arrays.shape[0]
158 caltinay 3946 for entry in data:
159     index=()
160 caltinay 3958 for i in xrange(dim):
161 caltinay 3964 index=(int((entry[i]-self._dom_origin[i])/self._spacing[i]),)+index
162     for i in xrange(num_arrays):
163 caltinay 3946 arrays[i][index]=entry[dim+i]
164     x=dom.getX()
165 caltinay 3964 delta=[self._dom_len[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
166 caltinay 3946 realorigin=[inf(x[i]) for i in xrange(dim)]
167     res=[]
168 caltinay 3964 for i in xrange(num_arrays):
169 caltinay 3946 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
170     return res
171    
172     def setPadding(self, pad_l=0.1, pad_h=0.1):
173     """
174     Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
175     they are treated as number of elements to be added to the domain.
176     If 0 < pad_l;pad_h < 1, the padding amount is relative.
177     """
178     self._pad_l=pad_l
179     self._pad_h=pad_h
180    
181 caltinay 3958 def setConstrainBottom(self, constrain):
182     """
183     If `constrain` is True, then the density mask will be set to 1 in the
184     padding area at the bottom of the domain. By default this area is
185     unconstrained.
186     """
187     self._constrainBottom=constrain
188    
189 caltinay 3946 def getDomain(self):
190     """
191     Returns a domain that spans the data area plus padding.
192     """
193     if self._domain is None:
194     self._domain=self._createDomain(self._pad_l, self._pad_h)
195     return self._domain
196    
197     def getDensityMask(self):
198     """
199     Returns the density mask data object, where mask has value 1 on the
200     padding area, 0 elsewhere.
201     """
202 caltinay 3964 return self._mask
203 caltinay 3946
204     def getGravityAndStdDev(self):
205     """
206     Returns the gravity anomaly and standard deviation data objects as a
207     tuple.
208     """
209     raise NotImplementedError
210    
211 caltinay 3964 def getDataExtents(self):
212     """
213     returns ( (x0, y0), (nx, ny), (dx, dy) ), where
214     x0, y0 = coordinates of data origin
215     nx, ny = number of data points in x and y
216     dx, dy = spacing of data points in x and y
217     """
218     raise NotImplementedError
219    
220     def getVerticalExtents(self):
221     """
222     returns (z0, nz, dz), where
223     z0 = minimum z coordinate (origin)
224     nz = number of nodes in z direction
225     dz = spacing of nodes (= cell size in z)
226     """
227     raise NotImplementedError
228    
229     def getDomainClass(self):
230     """
231     returns the domain generator class (e.g. esys.ripley.Brick)
232     """
233     raise NotImplementedError
234    
235 caltinay 3946 def _createDomain(self, padding_l, padding_h):
236     """
237     creates and returns an escript domain that spans the entire area of
238     available data plus a buffer zone.
239     """
240 caltinay 3964 X0, NX, DX = self.getDataExtents()
241     z0, nz, dz = self.getVerticalExtents()
242 caltinay 3946
243 caltinay 3964 # number of elements (without padding)
244 caltinay 3965 NE = [NX[0], NX[1], nz]
245 caltinay 3946
246 caltinay 3964 # origin of domain (without padding)
247     origin = [X0[0], X0[1], z0]
248     origin = [np.round(oi) for oi in origin]
249 caltinay 3946
250 caltinay 3964 # cell size / point spacing
251     self._spacing = DX+[dz]
252     self._spacing = [np.round(si) for si in self._spacing]
253 caltinay 3946
254 caltinay 3964 # length of domain (without padding)
255     l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
256 caltinay 3946
257 caltinay 3964 # now add padding to the values
258 caltinay 3946 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
259 caltinay 3964 NE, l, origin)
260 caltinay 3946
261 caltinay 3964 # number of padding elements per side
262     NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
263    
264     self._dom_len = l_new
265     self._dom_NE = NE_new
266     self._dom_origin = origin_new
267     lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
268 caltinay 3946 try:
269 caltinay 3964 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
270     # ripley may internally adjust NE and length, so recompute
271     self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
272     self._dom_NE=[self._dom_len[i]/self._spacing[i] for i in xrange(3)]
273     x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
274 caltinay 3946 mask=wherePositive(dom.getX()[2])
275    
276     except TypeError:
277 caltinay 3964 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
278     x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
279     mask=wherePositive(x[2]+self._dom_origin[2])
280 caltinay 3946
281 caltinay 3964 # prepare density mask (=1 at padding area, 0 else)
282 caltinay 3958 if self._constrainBottom:
283     M=3 # constrain bottom
284     else:
285     M=2 # do not constrain bottom
286 caltinay 3946 for i in xrange(M):
287     mask=mask + whereNegative(x[i]) + \
288 caltinay 3964 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
289 caltinay 3946 self._mask=wherePositive(mask)
290    
291 caltinay 3964 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
292     self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
293 caltinay 3946 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
294    
295     return dom
296    
297 caltinay 3964
298     ##############################################################################
299     class UBCDataSource(DataSource):
300     def __init__(self, domainclass, meshfile, gravfile, topofile=None):
301     super(UBCDataSource,self).__init__()
302     self.__meshfile=meshfile
303     self.__gravfile=gravfile
304     self.__topofile=topofile
305     self.__domainclass=domainclass
306     self.__readMesh()
307    
308     def __readMesh(self):
309     meshdata=open(self.__meshfile).readlines()
310     numDataPoints=meshdata[0].split()
311     origin=meshdata[1].split()
312     self.__nPts=map(int, numDataPoints)
313     self.__origin=map(float, origin)
314     self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
315     # vertical data is upside down
316     self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
317     self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
318    
319     def getDataExtents(self):
320     """
321     returns ( (x0, y0), (nx, ny), (dx, dy) )
322     """
323     return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
324    
325     def getVerticalExtents(self):
326     """
327     returns (z0, nz, dz)
328     """
329     return (self.__origin[2], self.__nPts[2], self.__delta[2])
330    
331     def getDomainClass(self):
332     """
333     returns the domain generator class (e.g. esys.ripley.Brick)
334     """
335     return self.__domainclass
336    
337     #def getDensityMask(self):
338     # topodata=self.__readTopography()
339     # mask=self._interpolateOnDomain(topodata)
340     # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
341     # return mask
342    
343     def getGravityAndStdDev(self):
344     gravlist=self._readGravity() # x,y,z,g,s
345     g_and_sigma=self._interpolateOnDomain(gravlist)
346     return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
347    
348     def __readTopography(self):
349     f=open(self.__topofile)
350 caltinay 3946 n=int(f.readline())
351     topodata=np.zeros((n,3))
352     for i in xrange(n):
353     x=f.readline().split()
354 caltinay 3964 x=map(float, x)
355 caltinay 3946 topodata[i]=x
356     f.close()
357     return topodata
358    
359 caltinay 3964 def __readGravity(self):
360     f=open(self.__gravfile)
361 caltinay 3946 n=int(f.readline())
362     gravdata=np.zeros((n,5))
363     for i in xrange(n):
364     x=f.readline().split()
365 caltinay 3964 x=map(float, x) # x, y, z, anomaly in mGal, stddev
366 caltinay 3946 # convert gravity anomaly units to m/s^2 and rescale error
367     x[3]*=-1e-5
368     x[4]*=1e-5
369     gravdata[i]=x
370     f.close()
371     return gravdata
372    
373     ##############################################################################
374     class NetCDFDataSource(DataSource):
375 caltinay 3958 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
376 caltinay 3946 """
377 caltinay 3964 vertical_extents - (alt_min, alt_max, num_points)
378 caltinay 3946 alt_of_data - altitude of measurements
379     """
380     super(NetCDFDataSource,self).__init__()
381 caltinay 3964 self.__topofile=topofile
382     self.__gravfile=gravfile
383     self.__domainclass=domainclass
384     self.__determineExtents(vertical_extents)
385     self.__altOfData=alt_of_data
386 caltinay 3946
387 caltinay 3964 def __determineExtents(self, ve):
388     self.logger.debug("Data Source: %s"%self.__gravfile)
389     f=netcdf_file(self.__gravfile, 'r')
390 caltinay 3946 NX=0
391     for n in ['lon','longitude','x']:
392     if n in f.dimensions:
393     NX=f.dimensions[n]
394     break
395     if NX==0:
396     raise RuntimeError("Could not determine extents of data")
397     NY=0
398     for n in ['lat','latitude','y']:
399     if n in f.dimensions:
400     NY=f.dimensions[n]
401     break
402     if NY==0:
403     raise RuntimeError("Could not determine extents of data")
404    
405     # find longitude and latitude variables
406     lon_name=None
407     for n in ['lon','longitude']:
408 caltinay 3956 if n in f.variables:
409 caltinay 3946 lon_name=n
410 caltinay 3956 longitude=f.variables.pop(n)
411 caltinay 3946 break
412     if lon_name is None:
413     raise RuntimeError("Could not determine longitude variable")
414     lat_name=None
415     for n in ['lat','latitude']:
416 caltinay 3956 if n in f.variables:
417 caltinay 3946 lat_name=n
418 caltinay 3956 latitude=f.variables.pop(n)
419 caltinay 3946 break
420     if lat_name is None:
421     raise RuntimeError("Could not determine latitude variable")
422    
423     # try to figure out gravity variable name
424     grav_name=None
425 caltinay 3956 if len(f.variables)==1:
426     grav_name=f.variables.keys()[0]
427 caltinay 3946 else:
428 caltinay 3956 for n in f.variables.keys():
429     dims=f.variables[n].dimensions
430 caltinay 3946 if (lat_name in dims) and (lon_name in dims):
431     grav_name=n
432     break
433     if grav_name is None:
434     raise RuntimeError("Could not determine gravity variable")
435    
436 caltinay 3956 # see if there is a wkt string to convert coordinates
437     try:
438     wkt_string=f.variables[grav_name].esri_pe_string
439     except:
440     wkt_string=None
441    
442 caltinay 3958 # we don't trust actual_range & geospatial_lon_min/max since subset
443     # data does not seem to have these fields updated it seems.
444     # Getting min/max from the arrays is obviously not very efficient...
445     #lon_range=longitude.actual_range
446     #lat_range=latitude.actual_range
447     #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
448     #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
449     lon_range=longitude.data.min(),longitude.data.max()
450     lat_range=latitude.data.min(),latitude.data.max()
451     lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
452     origin=[lon_range[0],lat_range[0],ve[0]]
453     lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
454 caltinay 3956
455 caltinay 3946 f.close()
456 caltinay 3956
457 caltinay 3964 self.__nPts=[NX, NY, ve[2]]
458     self.__origin=origin
459 caltinay 3958 # we are rounding to avoid interpolateOnDomain issues
460 caltinay 3964 self.__delta=[np.round(lengths[i]/(self.__nPts[i]-1)) for i in xrange(3)]
461     self.__wkt_string=wkt_string
462     self.__lon=lon_name
463     self.__lat=lat_name
464     self.__grv=grav_name
465 caltinay 3946
466 caltinay 3964 def getDataExtents(self):
467     """
468     returns ( (x0, y0), (nx, ny), (dx, dy) )
469     """
470     return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
471 caltinay 3946
472 caltinay 3964 def getVerticalExtents(self):
473     """
474     returns (z0, nz, dz)
475     """
476     return (self.__origin[2], self.__nPts[2], self.__delta[2])
477 caltinay 3946
478 caltinay 3964 def getDomainClass(self):
479     """
480     returns the domain generator class (e.g. esys.ripley.Brick)
481     """
482     return self.__domainclass
483 caltinay 3946
484 caltinay 3964 def getGravityAndStdDev(self):
485     gravlist=self._readGravity() # x,y,z,g,s
486     g_and_sigma=self._interpolateOnDomain(gravlist)
487     return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
488 caltinay 3946
489     def _readTopography(self):
490 caltinay 3964 f=netcdf_file(self.__topofile, 'r')
491 caltinay 3946 lon=None
492     for n in ['lon','longitude']:
493     if n in f.variables:
494     lon=f.variables[n][:]
495     break
496     if lon is None:
497     raise RuntimeError("Could not determine longitude variable")
498     lat=None
499     for n in ['lat','latitude']:
500     if n in f.variables:
501     lat=f.variables[n][:]
502     break
503     if lat is None:
504     raise RuntimeError("Could not determine latitude variable")
505     alt=None
506     for n in ['altitude','alt']:
507     if n in f.variables:
508     alt=f.variables[n][:]
509     break
510     if alt is None:
511     raise RuntimeError("Could not determine altitude variable")
512    
513     topodata=np.column_stack((lon,lat,alt))
514     f.close()
515     return topodata
516    
517     def _readGravity(self):
518 caltinay 3964 f=netcdf_file(self.__gravfile, 'r')
519     #lon=f.variables[self.__lon][:]
520     #lat=f.variables[self.__lat][:]
521     NE=[self.__nPts[i]-1 for i in xrange(2)]
522     lon=np.linspace(self.__origin[0], self.__origin[0]+NE[0]*self.__delta[0], NE[0]+1)
523     lat=np.linspace(self.__origin[1], self.__origin[1]+NE[1]*self.__delta[1], NE[1]+1)
524 caltinay 3956 lon,lat=np.meshgrid(lon,lat)
525 caltinay 3964 grav=f.variables[self.__grv][:]
526 caltinay 3946 f.close()
527     lon=lon.flatten()
528     lat=lat.flatten()
529     grav=grav.flatten()
530 caltinay 3964 alt=self.__altOfData*np.ones(grav.shape)
531 caltinay 3956 # error value is an assumption
532 caltinay 3946 try:
533 caltinay 3964 missing=f.variables[self.__grv].missing_value
534 caltinay 3958 err=np.where(grav==missing, 0.0, 20.0)
535 caltinay 3946 except:
536 caltinay 3956 err=20.0*np.ones(lon.shape)
537     # convert units
538 caltinay 3946 err=1e-6*err
539     grav=1e-6*grav
540     gravdata=np.column_stack((lon,lat,alt,grav,err))
541     return gravdata
542    
543 caltinay 3958 ##############################################################################
544     class ERSDataSource(DataSource):
545     """
546     Data Source for ER Mapper raster data.
547     Note that this class only accepts a very specific type of ER Mapper data
548     input and will raise an exception if other data is found.
549     """
550     def __init__(self, domainclass, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
551     """
552     headerfile - usually ends in .ers
553     datafile - usually has the same name as the headerfile without '.ers'
554     """
555     super(ERSDataSource,self).__init__()
556 caltinay 3964 self.__headerfile=headerfile
557 caltinay 3958 if datafile is None:
558 caltinay 3964 self.__datafile=headerfile[:-4]
559 caltinay 3958 else:
560 caltinay 3964 self.__datafile=datafile
561     self.__domainclass=domainclass
562     self.__readHeader(vertical_extents)
563     self.__altOfData=alt_of_data
564 caltinay 3958
565 caltinay 3964 def __readHeader(self, ve):
566     self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
567     metadata=open(self.__headerfile, 'r').readlines()
568 caltinay 3958 # parse metadata
569     start=-1
570     for i in xrange(len(metadata)):
571     if metadata[i].strip() == 'DatasetHeader Begin':
572     start=i+1
573     if start==-1:
574     raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
575    
576     md_dict={}
577     section=[]
578     for i in xrange(start, len(metadata)):
579     line=metadata[i]
580     if line[-6:].strip() == 'Begin':
581     section.append(line[:-6].strip())
582     elif line[-4:].strip() == 'End':
583     if len(section)>0:
584     section.pop()
585     else:
586     vals=line.split('=')
587     if len(vals)==2:
588     key = vals[0].strip()
589     value = vals[1].strip()
590     fullkey='.'.join(section+[key])
591     md_dict[fullkey]=value
592    
593     try:
594     if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
595     raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
596     except KeyError:
597     self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
598    
599     try:
600     NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
601     NY = int(md_dict['RasterInfo.NrOfLines'])
602     except:
603     raise RuntimeError("Could not determine extents of data")
604    
605     try:
606     maskval = float(md_dict['RasterInfo.NullCellValue'])
607     except:
608     raise RuntimeError("Could not determine NullCellValue")
609    
610     try:
611     spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
612     spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
613     except:
614     raise RuntimeError("Could not determine cell dimensions")
615    
616     try:
617 caltinay 3959 if md_dict['CoordinateSpace.CoordinateType']=='EN':
618     originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
619     originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
620     elif md_dict['CoordinateSpace.CoordinateType']=='LL':
621     originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
622     originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
623     else:
624     raise RuntimeError("Unknown CoordinateType")
625 caltinay 3958 except:
626 caltinay 3959 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
627 caltinay 3958 originX,originY = 0.0, 0.0
628    
629 caltinay 3959 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
630     # it appears we have lat/lon coordinates so need to convert
631     # origin and spacing. Try using gdal to get the wkt if available:
632     try:
633     from osgeo import gdal
634 caltinay 3964 ds=gdal.Open(self.__headerfile)
635 caltinay 3959 wkt=ds.GetProjection()
636     except:
637     wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
638     self.logger.warn('GDAL not available, assuming GDA94 data')
639     originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
640     op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
641     # we are rounding to avoid interpolateOnDomain issues
642     spacingX=np.round(op1X-originX_UTM)
643     spacingY=np.round(op1Y-originY_UTM)
644 caltinay 3964 originX=np.round(originX_UTM)
645     originY=np.round(originY_UTM)
646 caltinay 3959
647     # data sets have origin in top-left corner so y runs top-down
648 caltinay 3964 self.__dataorigin=[originX, originY]
649 caltinay 3958 originY-=(NY-1)*spacingY
650 caltinay 3964 self.__maskval=maskval
651     spacingZ=np.round(float(ve[1]-ve[0])/(ve[2]-1))
652     self.__delta = [spacingX, spacingY, spacingZ]
653     self.__nPts = [NX, NY, ve[2]]
654     self.__origin = [originX, originY, ve[0]]
655 caltinay 3958
656 caltinay 3964 def getDataExtents(self):
657     """
658     returns ( (x0, y0), (nx, ny), (dx, dy) )
659     """
660     return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
661 caltinay 3958
662 caltinay 3964 def getVerticalExtents(self):
663     """
664     returns (z0, nz, dz)
665     """
666     return (self.__origin[2], self.__nPts[2], self.__delta[2])
667    
668     def getDomainClass(self):
669     """
670     returns the domain generator class (e.g. esys.ripley.Brick)
671     """
672     return self.__domainclass
673    
674 caltinay 3958 def getGravityAndStdDev(self):
675     gravlist=self._readGravity() # x,y,z,g,s
676 caltinay 3964 g_and_sigma=self._interpolateOnDomain(gravlist)
677 caltinay 3958 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
678    
679     def _readGravity(self):
680 caltinay 3964 f=open(self.__datafile, 'r')
681     n=self.__nPts[0]*self.__nPts[1]
682 caltinay 3958 grav=[]
683     err=[]
684     for i in xrange(n):
685     v=struct.unpack('f',f.read(4))[0]
686     grav.append(v)
687 caltinay 3964 if abs((self.__maskval-v)/v) < 1e-6:
688 caltinay 3958 err.append(0.)
689     else:
690     err.append(1.)
691     f.close()
692    
693 caltinay 3964 NE=[self.__nPts[i] for i in xrange(2)]
694     x=self.__dataorigin[0]+np.arange(start=0, stop=NE[0]*self.__delta[0], step=self.__delta[0])
695 caltinay 3959 # data sets have origin in top-left corner so y runs top-down
696 caltinay 3964 y=self.__dataorigin[1]-np.arange(start=0, stop=NE[1]*self.__delta[1], step=self.__delta[1])
697 caltinay 3958 x,y=np.meshgrid(x,y)
698     x,y=x.flatten(),y.flatten()
699 caltinay 3964 alt=self.__altOfData*np.ones((n,))
700 caltinay 3958
701     # convert units
702     err=2e-5*np.array(err)
703     grav=1e-5*np.array(grav)
704     gravdata=np.column_stack((x,y,alt,grav,err))
705     return gravdata
706    
707     ##############################################################################
708     class SourceFeature(object):
709     """
710     A feature adds a density distribution to (parts of) a domain of a synthetic
711     data source, for example a layer of a specific rock type or a simulated
712     ore body.
713     """
714     def getDensity(self):
715     """
716     Returns the density for the area covered by mask. It can be constant
717     or a data object with spatial dependency.
718     """
719     raise NotImplementedError
720     def getMask(self, x):
721     """
722     Returns the mask of the area of interest for this feature. That is,
723     mask is non-zero where the density returned by getDensity() should be
724     applied, zero elsewhere.
725     """
726     raise NotImplementedError
727    
728     class SmoothAnomaly(SourceFeature):
729 caltinay 3946 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
730     self.x=x
731     self.y=y
732     self.lx=lx
733     self.ly=ly
734     self.lz=lz
735     self.depth=depth
736     self.rho_inner=rho_inner
737     self.rho_outer=rho_outer
738     self.rho=None
739 caltinay 3958
740     def getDensity(self):
741     return self.rho
742    
743 caltinay 3946 def getMask(self, x):
744     DIM=x.getDomain().getDim()
745     m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
746     *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
747     if DIM>2:
748     m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
749     if self.rho is None:
750     alpha=-log(abs(self.rho_outer/self.rho_inner))*4
751     rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
752     rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
753     self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
754     if self.rho_inner<0: self.rho=-self.rho
755     return m
756    
757 caltinay 3958 ##############################################################################
758 caltinay 3946 class SyntheticDataSource(DataSource):
759     def __init__(self, DIM, NE, l, h, features):
760     super(SyntheticDataSource,self).__init__()
761     self._features = features
762     self.DIM=DIM
763     self.NE=NE
764     self.l=l
765     self.h=h
766    
767     def _createDomain(self, padding_l, padding_h):
768     NE_H=self.NE
769     NE_L=int((self.l/self.h)*NE_H+0.5)
770     l=[self.l]*(self.DIM-1)+[self.h]
771     NE=[NE_L]*(self.DIM-1)+[NE_H]
772     origin=[0.]*self.DIM
773     NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
774     NE, l, origin)
775    
776     self.NE=NE_new
777     self.l=l_new[0]
778     self.h=l_new[self.DIM-1]
779    
780 caltinay 3960 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
781 caltinay 3946 if self.DIM==2:
782     from esys.finley import Rectangle
783     dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
784     self._x = dom.getX() + origin_new
785     self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
786     self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
787     self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
788     else:
789     from esys.finley import Brick
790     dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
791     self._x = dom.getX() + origin_new
792     self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
793     self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
794     self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
795    
796     dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
797     self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
798     * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
799     * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
800     * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
801     self._mask=whereNegative(self._x[self.DIM-1]) + \
802     wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
803     for i in xrange(self.DIM-1):
804     self._mask=self._mask + whereNegative(self._x[i]) + \
805     wherePositive(self._x[i]-l[i])
806     self._mask=wherePositive(self._mask)
807    
808     rho_ref=0.
809     for f in self._features:
810     m=f.getMask(self._x)
811 caltinay 3958 rho_ref = rho_ref * (1-m) + f.getDensity() * m
812 caltinay 3946 self._rho=rho_ref
813    
814     return dom
815    
816     def getDensityMask(self):
817     return self._mask
818    
819     def getReferenceDensity(self):
820     return self._rho
821    
822     def getGravityAndStdDev(self):
823     pde=LinearSinglePDE(self.getDomain())
824     G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
825     m_psi_ref=0.
826 caltinay 3964 for i in xrange(self.DIM):
827 caltinay 3946 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
828     + whereZero(self._x[i]-sup(self._x[i]))
829    
830     pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
831     pde.setSymmetryOn()
832     psi_ref=pde.getSolution()
833     del pde
834     g=-grad(psi_ref)
835     sigma=self._g_mask
836     return g,sigma
837    

  ViewVC Help
Powered by ViewVC 1.1.26