/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3966 - (show annotations)
Mon Sep 17 00:45:39 2012 UTC (7 years ago) by caltinay
File MIME type: text/x-python
File size: 31295 byte(s)
Changed unit scale for ER Mapper files. Fixed an issue with UBC source.

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
15 Earth Systems Science Computational Center (ESSCC)
16 http://www.uq.edu.au/esscc
17 Primary Business: Queensland, Australia"""
18 __license__="""Licensed under the Open Software License version 3.0
19 http://www.opensource.org/licenses/osl-3.0.php"""
20 __url__="https://launchpad.net/escript-finley"
21
22 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
23
24 import logging
25 import numpy as np
26 import struct
27 from esys.escript import *
28 from esys.escript.linearPDEs import *
29 import esys.escript.unitsSI as U
30 try:
31 from scipy.io.netcdf import netcdf_file
32 __all__ += ['NetCDFDataSource']
33 except:
34 pass
35
36 def LatLonToUTM(lon, lat, wkt_string=None):
37 """
38 Converts one or more longitude,latitude pairs to the corresponding x,y
39 coordinates in the Universal Transverse Mercator projection.
40 If wkt_string is not given or invalid or the gdal module is not available
41 to convert the string, then the input values are assumed to be given in the
42 Clarke 1866 projection.
43 """
44
45 # not really optimal: if pyproj is not installed we return the input
46 # values without modification.
47 try:
48 import pyproj
49 except:
50 print("Warning, pyproj not available. Domain extents will be wrong")
51 return lon,lat
52
53 # determine UTM zone from the input data
54 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
55 try:
56 import osgeo.osr
57 srs = osgeo.osr.SpatialReference()
58 srs.ImportFromWkt(wkt_string)
59 p_src = pyproj.Proj(srs.ExportToProj4())
60 except:
61 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
62 # we assume southern hemisphere here
63 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
64 x,y=pyproj.transform(p_src, p_dest, lon, lat)
65 return x,y
66
67 class DataSource(object):
68 """
69 A class that provides survey data for the inversion process.
70 """
71 # this is currently specific to gravity inversion and should be generalised
72
73 def __init__(self):
74 """
75 """
76 self._constrainBottom=False
77 self._domain=None
78 self._pad_l=0.1
79 self._pad_h=0.1
80 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
81
82 def _addPadding(self, pad_l, pad_h, NE, l, origin):
83 """
84 Helper method that computes new number of elements, length and origin
85 after adding padding to the input values.
86 """
87 DIM=len(NE)
88 frac=[0.]*DIM
89 # padding is applied to each side so multiply by 2 to get the total
90 # amount of padding per dimension
91 if pad_l>0 and pad_l<1:
92 for i in xrange(DIM-1):
93 frac[i]=2.*pad_l
94 elif pad_l>=1:
95 for i in xrange(DIM-1):
96 frac[i]=2.*pad_l/float(NE[i])
97 if pad_h>0 and pad_h<1:
98 frac[DIM-1]=2.*pad_h
99 elif pad_h>=1:
100 frac[DIM-1]=2.*pad_h/(float(NE[DIM-1]))
101
102 # calculate new number of elements
103 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
104 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
105 spacing=[l[i]/NE[i] for i in xrange(DIM)]
106 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
107 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
108 return NE_new, l_new, origin_new
109
110 def _interpolateOnDomain(self, data):
111 """
112 Helper method that interpolates data arrays onto the domain.
113 Currently this works like a nearest neighbour mapping, i.e. values
114 are directly inserted into data objects at closest location.
115 """
116 dom=self.getDomain()
117 dim=dom.getDim()
118 # determine number of values required per element
119 DPP=Scalar(0., Function(dom)).getNumberOfDataPoints()
120 for i in xrange(dim):
121 DPP=DPP/self._dom_NE[i]
122 DPP=int(DPP)
123
124 # idx_mult.dot([x,y,z]) = flat index into data object
125 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
126
127 # separate data arrays and coordinates
128 num_arrays=len(data[0])-dim
129 arrays=[]
130 for i in xrange(num_arrays):
131 d=Scalar(0., Function(dom))
132 d.expand()
133 arrays.append(d)
134
135 for entry in data:
136 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
137 index=int(idx_mult.dot(index))
138 for i in xrange(num_arrays):
139 for p in xrange(DPP):
140 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
141
142 return arrays
143
144 def _interpolateOnDomain_old(self, data):
145 """
146 Old interpolation method. Works only on ContinuousFunction and thus
147 produces wrong values once interpolated on Function.
148 """
149 dom=self.getDomain()
150 dim=dom.getDim()
151 # shape = number of data points/nodes in each dimension
152 shape=()
153 for i in xrange(dim):
154 shape=(self._dom_NE[i]+1,)+shape
155 # separate data arrays and coordinates
156 arrays=np.zeros(((len(data[0])-dim),)+shape)
157 num_arrays=arrays.shape[0]
158 for entry in data:
159 index=()
160 for i in xrange(dim):
161 index=(int((entry[i]-self._dom_origin[i])/self._spacing[i]),)+index
162 for i in xrange(num_arrays):
163 arrays[i][index]=entry[dim+i]
164 x=dom.getX()
165 delta=[self._dom_len[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
166 realorigin=[inf(x[i]) for i in xrange(dim)]
167 res=[]
168 for i in xrange(num_arrays):
169 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
170 return res
171
172 def setPadding(self, pad_l=0.1, pad_h=0.1):
173 """
174 Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
175 they are treated as number of elements to be added to the domain.
176 If 0 < pad_l;pad_h < 1, the padding amount is relative.
177 """
178 self._pad_l=pad_l
179 self._pad_h=pad_h
180
181 def setConstrainBottom(self, constrain):
182 """
183 If `constrain` is True, then the density mask will be set to 1 in the
184 padding area at the bottom of the domain. By default this area is
185 unconstrained.
186 """
187 self._constrainBottom=constrain
188
189 def getDomain(self):
190 """
191 Returns a domain that spans the data area plus padding.
192 """
193 if self._domain is None:
194 self._domain=self._createDomain(self._pad_l, self._pad_h)
195 return self._domain
196
197 def getDensityMask(self):
198 """
199 Returns the density mask data object, where mask has value 1 on the
200 padding area, 0 elsewhere.
201 """
202 return self._mask
203
204 def getGravityAndStdDev(self):
205 """
206 Returns the gravity anomaly and standard deviation data objects as a
207 tuple.
208 """
209 raise NotImplementedError
210
211 def getDataExtents(self):
212 """
213 returns ( (x0, y0), (nx, ny), (dx, dy) ), where
214 x0, y0 = coordinates of data origin
215 nx, ny = number of data points in x and y
216 dx, dy = spacing of data points in x and y
217 """
218 raise NotImplementedError
219
220 def getVerticalExtents(self):
221 """
222 returns (z0, nz, dz), where
223 z0 = minimum z coordinate (origin)
224 nz = number of nodes in z direction
225 dz = spacing of nodes (= cell size in z)
226 """
227 raise NotImplementedError
228
229 def getDomainClass(self):
230 """
231 returns the domain generator class (e.g. esys.ripley.Brick)
232 """
233 raise NotImplementedError
234
235 def _createDomain(self, padding_l, padding_h):
236 """
237 creates and returns an escript domain that spans the entire area of
238 available data plus a buffer zone.
239 """
240 X0, NX, DX = self.getDataExtents()
241 z0, nz, dz = self.getVerticalExtents()
242
243 # number of elements (without padding)
244 NE = [NX[0], NX[1], nz]
245
246 # origin of domain (without padding)
247 origin = [X0[0], X0[1], z0]
248 origin = [np.round(oi) for oi in origin]
249
250 # cell size / point spacing
251 self._spacing = DX+[dz]
252 self._spacing = [np.round(si) for si in self._spacing]
253
254 # length of domain (without padding)
255 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
256
257 # now add padding to the values
258 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
259 NE, l, origin)
260
261 # number of padding elements per side
262 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
263
264 self._dom_len = l_new
265 self._dom_NE = NE_new
266 self._dom_origin = origin_new
267 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
268 try:
269 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
270 # ripley may internally adjust NE and length, so recompute
271 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
272 self._dom_NE=[self._dom_len[i]/self._spacing[i] for i in xrange(3)]
273 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
274 mask=wherePositive(dom.getX()[2])
275
276 except TypeError:
277 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
278 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
279 mask=wherePositive(x[2]+self._dom_origin[2])
280
281 # prepare density mask (=1 at padding area, 0 else)
282 if self._constrainBottom:
283 M=3 # constrain bottom
284 else:
285 M=2 # do not constrain bottom
286 for i in xrange(M):
287 mask=mask + whereNegative(x[i]) + \
288 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
289 self._mask=wherePositive(mask)
290
291 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
292 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
293 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
294
295 return dom
296
297
298 ##############################################################################
299 class UBCDataSource(DataSource):
300 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
301 super(UBCDataSource,self).__init__()
302 self.__meshfile=meshfile
303 self.__gravfile=gravfile
304 self.__topofile=topofile
305 self.__domainclass=domainclass
306 self.__readMesh()
307
308 def __readMesh(self):
309 meshdata=open(self.__meshfile).readlines()
310 numDataPoints=meshdata[0].split()
311 origin=meshdata[1].split()
312 self.__nPts=map(int, numDataPoints)
313 self.__origin=map(float, origin)
314 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
315 # vertical data is upside down
316 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
317 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
318
319 def getDataExtents(self):
320 """
321 returns ( (x0, y0), (nx, ny), (dx, dy) )
322 """
323 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
324
325 def getVerticalExtents(self):
326 """
327 returns (z0, nz, dz)
328 """
329 return (self.__origin[2], self.__nPts[2], self.__delta[2])
330
331 def getDomainClass(self):
332 """
333 returns the domain generator class (e.g. esys.ripley.Brick)
334 """
335 return self.__domainclass
336
337 #def getDensityMask(self):
338 # topodata=self.__readTopography()
339 # mask=self._interpolateOnDomain(topodata)
340 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
341 # return mask
342
343 def getGravityAndStdDev(self):
344 gravlist=self.__readGravity() # x,y,z,g,s
345 g_and_sigma=self._interpolateOnDomain(gravlist)
346 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
347
348 def __readTopography(self):
349 f=open(self.__topofile)
350 n=int(f.readline())
351 topodata=np.zeros((n,3))
352 for i in xrange(n):
353 x=f.readline().split()
354 x=map(float, x)
355 topodata[i]=x
356 f.close()
357 return topodata
358
359 def __readGravity(self):
360 f=open(self.__gravfile)
361 n=int(f.readline())
362 gravdata=np.zeros((n,5))
363 for i in xrange(n):
364 x=f.readline().split()
365 x=map(float, x) # x, y, z, anomaly in mGal, stddev
366 # convert gravity anomaly units to m/s^2 and rescale error
367 x[3]*=-1e-5
368 x[4]*=1e-5
369 gravdata[i]=x
370 f.close()
371 return gravdata
372
373 ##############################################################################
374 class NetCDFDataSource(DataSource):
375 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
376 """
377 vertical_extents - (alt_min, alt_max, num_points)
378 alt_of_data - altitude of measurements
379 """
380 super(NetCDFDataSource,self).__init__()
381 self.__topofile=topofile
382 self.__gravfile=gravfile
383 self.__domainclass=domainclass
384 self.__determineExtents(vertical_extents)
385 self.__altOfData=alt_of_data
386
387 def __determineExtents(self, ve):
388 self.logger.debug("Data Source: %s"%self.__gravfile)
389 f=netcdf_file(self.__gravfile, 'r')
390 NX=0
391 for n in ['lon','longitude','x']:
392 if n in f.dimensions:
393 NX=f.dimensions[n]
394 break
395 if NX==0:
396 raise RuntimeError("Could not determine extents of data")
397 NY=0
398 for n in ['lat','latitude','y']:
399 if n in f.dimensions:
400 NY=f.dimensions[n]
401 break
402 if NY==0:
403 raise RuntimeError("Could not determine extents of data")
404
405 # find longitude and latitude variables
406 lon_name=None
407 for n in ['lon','longitude']:
408 if n in f.variables:
409 lon_name=n
410 longitude=f.variables.pop(n)
411 break
412 if lon_name is None:
413 raise RuntimeError("Could not determine longitude variable")
414 lat_name=None
415 for n in ['lat','latitude']:
416 if n in f.variables:
417 lat_name=n
418 latitude=f.variables.pop(n)
419 break
420 if lat_name is None:
421 raise RuntimeError("Could not determine latitude variable")
422
423 # try to figure out gravity variable name
424 grav_name=None
425 if len(f.variables)==1:
426 grav_name=f.variables.keys()[0]
427 else:
428 for n in f.variables.keys():
429 dims=f.variables[n].dimensions
430 if (lat_name in dims) and (lon_name in dims):
431 grav_name=n
432 break
433 if grav_name is None:
434 raise RuntimeError("Could not determine gravity variable")
435
436 # see if there is a wkt string to convert coordinates
437 try:
438 wkt_string=f.variables[grav_name].esri_pe_string
439 except:
440 wkt_string=None
441
442 # we don't trust actual_range & geospatial_lon_min/max since subset
443 # data does not seem to have these fields updated it seems.
444 # Getting min/max from the arrays is obviously not very efficient...
445 #lon_range=longitude.actual_range
446 #lat_range=latitude.actual_range
447 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
448 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
449 lon_range=longitude.data.min(),longitude.data.max()
450 lat_range=latitude.data.min(),latitude.data.max()
451 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
452 origin=[lon_range[0],lat_range[0],ve[0]]
453 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
454
455 f.close()
456
457 self.__nPts=[NX, NY, ve[2]]
458 self.__origin=origin
459 # we are rounding to avoid interpolateOnDomain issues
460 self.__delta=[np.round(lengths[i]/(self.__nPts[i]-1)) for i in xrange(3)]
461 self.__wkt_string=wkt_string
462 self.__lon=lon_name
463 self.__lat=lat_name
464 self.__grv=grav_name
465
466 def getDataExtents(self):
467 """
468 returns ( (x0, y0), (nx, ny), (dx, dy) )
469 """
470 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
471
472 def getVerticalExtents(self):
473 """
474 returns (z0, nz, dz)
475 """
476 return (self.__origin[2], self.__nPts[2], self.__delta[2])
477
478 def getDomainClass(self):
479 """
480 returns the domain generator class (e.g. esys.ripley.Brick)
481 """
482 return self.__domainclass
483
484 def getGravityAndStdDev(self):
485 gravlist=self._readGravity() # x,y,z,g,s
486 g_and_sigma=self._interpolateOnDomain(gravlist)
487 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
488
489 def _readTopography(self):
490 f=netcdf_file(self.__topofile, 'r')
491 lon=None
492 for n in ['lon','longitude']:
493 if n in f.variables:
494 lon=f.variables[n][:]
495 break
496 if lon is None:
497 raise RuntimeError("Could not determine longitude variable")
498 lat=None
499 for n in ['lat','latitude']:
500 if n in f.variables:
501 lat=f.variables[n][:]
502 break
503 if lat is None:
504 raise RuntimeError("Could not determine latitude variable")
505 alt=None
506 for n in ['altitude','alt']:
507 if n in f.variables:
508 alt=f.variables[n][:]
509 break
510 if alt is None:
511 raise RuntimeError("Could not determine altitude variable")
512
513 topodata=np.column_stack((lon,lat,alt))
514 f.close()
515 return topodata
516
517 def _readGravity(self):
518 f=netcdf_file(self.__gravfile, 'r')
519 #lon=f.variables[self.__lon][:]
520 #lat=f.variables[self.__lat][:]
521 NE=[self.__nPts[i]-1 for i in xrange(2)]
522 lon=np.linspace(self.__origin[0], self.__origin[0]+NE[0]*self.__delta[0], NE[0]+1)
523 lat=np.linspace(self.__origin[1], self.__origin[1]+NE[1]*self.__delta[1], NE[1]+1)
524 lon,lat=np.meshgrid(lon,lat)
525 grav=f.variables[self.__grv][:]
526 f.close()
527 lon=lon.flatten()
528 lat=lat.flatten()
529 grav=grav.flatten()
530 alt=self.__altOfData*np.ones(grav.shape)
531 # error value is an assumption
532 try:
533 missing=f.variables[self.__grv].missing_value
534 err=np.where(grav==missing, 0.0, 20.0)
535 except:
536 err=20.0*np.ones(lon.shape)
537 # convert units
538 err=1e-6*err
539 grav=1e-6*grav
540 gravdata=np.column_stack((lon,lat,alt,grav,err))
541 return gravdata
542
543 ##############################################################################
544 class ERSDataSource(DataSource):
545 """
546 Data Source for ER Mapper raster data.
547 Note that this class only accepts a very specific type of ER Mapper data
548 input and will raise an exception if other data is found.
549 """
550 def __init__(self, domainclass, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
551 """
552 headerfile - usually ends in .ers
553 datafile - usually has the same name as the headerfile without '.ers'
554 """
555 super(ERSDataSource,self).__init__()
556 self.__headerfile=headerfile
557 if datafile is None:
558 self.__datafile=headerfile[:-4]
559 else:
560 self.__datafile=datafile
561 self.__domainclass=domainclass
562 self.__readHeader(vertical_extents)
563 self.__altOfData=alt_of_data
564
565 def __readHeader(self, ve):
566 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
567 metadata=open(self.__headerfile, 'r').readlines()
568 # parse metadata
569 start=-1
570 for i in xrange(len(metadata)):
571 if metadata[i].strip() == 'DatasetHeader Begin':
572 start=i+1
573 if start==-1:
574 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
575
576 md_dict={}
577 section=[]
578 for i in xrange(start, len(metadata)):
579 line=metadata[i]
580 if line[-6:].strip() == 'Begin':
581 section.append(line[:-6].strip())
582 elif line[-4:].strip() == 'End':
583 if len(section)>0:
584 section.pop()
585 else:
586 vals=line.split('=')
587 if len(vals)==2:
588 key = vals[0].strip()
589 value = vals[1].strip()
590 fullkey='.'.join(section+[key])
591 md_dict[fullkey]=value
592
593 try:
594 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
595 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
596 except KeyError:
597 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
598
599 try:
600 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
601 NY = int(md_dict['RasterInfo.NrOfLines'])
602 except:
603 raise RuntimeError("Could not determine extents of data")
604
605 try:
606 maskval = float(md_dict['RasterInfo.NullCellValue'])
607 except:
608 raise RuntimeError("Could not determine NullCellValue")
609
610 try:
611 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
612 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
613 except:
614 raise RuntimeError("Could not determine cell dimensions")
615
616 try:
617 if md_dict['CoordinateSpace.CoordinateType']=='EN':
618 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
619 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
620 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
621 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
622 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
623 else:
624 raise RuntimeError("Unknown CoordinateType")
625 except:
626 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
627 originX,originY = 0.0, 0.0
628
629 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
630 # it appears we have lat/lon coordinates so need to convert
631 # origin and spacing. Try using gdal to get the wkt if available:
632 try:
633 from osgeo import gdal
634 ds=gdal.Open(self.__headerfile)
635 wkt=ds.GetProjection()
636 except:
637 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
638 self.logger.warn('GDAL not available, assuming GDA94 data')
639 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
640 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
641 # we are rounding to avoid interpolateOnDomain issues
642 spacingX=np.round(op1X-originX_UTM)
643 spacingY=np.round(op1Y-originY_UTM)
644 originX=np.round(originX_UTM)
645 originY=np.round(originY_UTM)
646
647 # data sets have origin in top-left corner so y runs top-down
648 self.__dataorigin=[originX, originY]
649 originY-=(NY-1)*spacingY
650 self.__maskval=maskval
651 spacingZ=np.round(float(ve[1]-ve[0])/(ve[2]-1))
652 self.__delta = [spacingX, spacingY, spacingZ]
653 self.__nPts = [NX, NY, ve[2]]
654 self.__origin = [originX, originY, ve[0]]
655
656 def getDataExtents(self):
657 """
658 returns ( (x0, y0), (nx, ny), (dx, dy) )
659 """
660 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
661
662 def getVerticalExtents(self):
663 """
664 returns (z0, nz, dz)
665 """
666 return (self.__origin[2], self.__nPts[2], self.__delta[2])
667
668 def getDomainClass(self):
669 """
670 returns the domain generator class (e.g. esys.ripley.Brick)
671 """
672 return self.__domainclass
673
674 def getGravityAndStdDev(self):
675 gravlist=self.__readGravity() # x,y,z,g,s
676 g_and_sigma=self._interpolateOnDomain(gravlist)
677 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
678
679 def __readGravity(self):
680 f=open(self.__datafile, 'r')
681 n=self.__nPts[0]*self.__nPts[1]
682 grav=[]
683 err=[]
684 for i in xrange(n):
685 v=struct.unpack('f',f.read(4))[0]
686 grav.append(v)
687 if abs((self.__maskval-v)/v) < 1e-6:
688 err.append(0.)
689 else:
690 err.append(1.)
691 f.close()
692
693 NE=[self.__nPts[i] for i in xrange(2)]
694 x=self.__dataorigin[0]+np.arange(start=0, stop=NE[0]*self.__delta[0], step=self.__delta[0])
695 # data sets have origin in top-left corner so y runs top-down
696 y=self.__dataorigin[1]-np.arange(start=0, stop=NE[1]*self.__delta[1], step=self.__delta[1])
697 x,y=np.meshgrid(x,y)
698 x,y=x.flatten(),y.flatten()
699 alt=self.__altOfData*np.ones((n,))
700
701 # convert units
702 err=2e-6*np.array(err)
703 grav=1e-6*np.array(grav)
704 gravdata=np.column_stack((x,y,alt,grav,err))
705 return gravdata
706
707 ##############################################################################
708 class SourceFeature(object):
709 """
710 A feature adds a density distribution to (parts of) a domain of a synthetic
711 data source, for example a layer of a specific rock type or a simulated
712 ore body.
713 """
714 def getDensity(self):
715 """
716 Returns the density for the area covered by mask. It can be constant
717 or a data object with spatial dependency.
718 """
719 raise NotImplementedError
720 def getMask(self, x):
721 """
722 Returns the mask of the area of interest for this feature. That is,
723 mask is non-zero where the density returned by getDensity() should be
724 applied, zero elsewhere.
725 """
726 raise NotImplementedError
727
728 class SmoothAnomaly(SourceFeature):
729 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
730 self.x=x
731 self.y=y
732 self.lx=lx
733 self.ly=ly
734 self.lz=lz
735 self.depth=depth
736 self.rho_inner=rho_inner
737 self.rho_outer=rho_outer
738 self.rho=None
739
740 def getDensity(self):
741 return self.rho
742
743 def getMask(self, x):
744 DIM=x.getDomain().getDim()
745 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
746 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
747 if DIM>2:
748 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
749 if self.rho is None:
750 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
751 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
752 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
753 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
754 if self.rho_inner<0: self.rho=-self.rho
755 return m
756
757 ##############################################################################
758 class SyntheticDataSource(DataSource):
759 def __init__(self, DIM, NE, l, h, features):
760 super(SyntheticDataSource,self).__init__()
761 self._features = features
762 self.DIM=DIM
763 self.NE=NE
764 self.l=l
765 self.h=h
766
767 def _createDomain(self, padding_l, padding_h):
768 NE_H=self.NE
769 NE_L=int((self.l/self.h)*NE_H+0.5)
770 l=[self.l]*(self.DIM-1)+[self.h]
771 NE=[NE_L]*(self.DIM-1)+[NE_H]
772 origin=[0.]*self.DIM
773 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
774 NE, l, origin)
775
776 self.NE=NE_new
777 self.l=l_new[0]
778 self.h=l_new[self.DIM-1]
779
780 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
781 if self.DIM==2:
782 from esys.finley import Rectangle
783 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
784 self._x = dom.getX() + origin_new
785 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
786 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
787 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
788 else:
789 from esys.finley import Brick
790 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
791 self._x = dom.getX() + origin_new
792 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
793 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
794 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
795
796 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
797 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
798 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
799 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
800 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
801 self._mask=whereNegative(self._x[self.DIM-1]) + \
802 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
803 for i in xrange(self.DIM-1):
804 self._mask=self._mask + whereNegative(self._x[i]) + \
805 wherePositive(self._x[i]-l[i])
806 self._mask=wherePositive(self._mask)
807
808 rho_ref=0.
809 for f in self._features:
810 m=f.getMask(self._x)
811 rho_ref = rho_ref * (1-m) + f.getDensity() * m
812 self._rho=rho_ref
813
814 return dom
815
816 def getDensityMask(self):
817 return self._mask
818
819 def getReferenceDensity(self):
820 return self._rho
821
822 def getGravityAndStdDev(self):
823 pde=LinearSinglePDE(self.getDomain())
824 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
825 m_psi_ref=0.
826 for i in xrange(self.DIM):
827 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
828 + whereZero(self._x[i]-sup(self._x[i]))
829
830 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
831 pde.setSymmetryOn()
832 psi_ref=pde.getSolution()
833 del pde
834 g=-grad(psi_ref)
835 sigma=self._g_mask
836 return g,sigma
837

  ViewVC Help
Powered by ViewVC 1.1.26