/[escript]/trunk/downunder/py_src/datasources.py
ViewVC logotype

Contents of /trunk/downunder/py_src/datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3980 - (show annotations)
Fri Sep 21 01:55:39 2012 UTC (6 years, 8 months ago) by caltinay
File MIME type: text/x-python
File size: 30722 byte(s)
Now using NullCellValue to initialise data object for ERS reader.

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
15 Earth Systems Science Computational Center (ESSCC)
16 http://www.uq.edu.au/esscc
17 Primary Business: Queensland, Australia"""
18 __license__="""Licensed under the Open Software License version 3.0
19 http://www.opensource.org/licenses/osl-3.0.php"""
20 __url__="https://launchpad.net/escript-finley"
21
22 __all__ = ['DataSource','UBCDataSource','ERSDataSource','SyntheticDataSource','SmoothAnomaly']
23
24 import logging
25 import numpy as np
26 import struct
27 from esys.escript import *
28 from esys.escript.linearPDEs import *
29 import esys.escript.unitsSI as U
30 from esys.ripley import *
31
32 try:
33 from scipy.io.netcdf import netcdf_file
34 __all__ += ['NetCDFDataSource']
35 except:
36 pass
37
38 def LatLonToUTM(lon, lat, wkt_string=None):
39 """
40 Converts one or more longitude,latitude pairs to the corresponding x,y
41 coordinates in the Universal Transverse Mercator projection.
42 If wkt_string is not given or invalid or the gdal module is not available
43 to convert the string, then the input values are assumed to be given in the
44 Clarke 1866 projection.
45 """
46
47 # not really optimal: if pyproj is not installed we return the input
48 # values without modification.
49 try:
50 import pyproj
51 except:
52 print("Warning, pyproj not available. Domain extents will be wrong")
53 return lon,lat
54
55 # determine UTM zone from the input data
56 zone=int(np.median((np.floor((np.array(lon) + 180)/6) + 1) % 60))
57 try:
58 import osgeo.osr
59 srs = osgeo.osr.SpatialReference()
60 srs.ImportFromWkt(wkt_string)
61 p_src = pyproj.Proj(srs.ExportToProj4())
62 except:
63 p_src = pyproj.Proj('+proj=longlat +ellps=clrk66 +no_defs')
64 # we assume southern hemisphere here
65 p_dest = pyproj.Proj('+proj=utm +zone=%d +south +units=m'%zone)
66 x,y=pyproj.transform(p_src, p_dest, lon, lat)
67 return x,y
68
69 class DataSource(object):
70 """
71 A class that provides survey data for the inversion process.
72 """
73 # this is currently specific to gravity inversion and should be generalised
74
75 def __init__(self):
76 """
77 """
78 self._constrainBottom=False
79 self._constrainSides=True
80 self._domain=None
81 self._pad_l=0.1
82 self._pad_h=0.1
83 self.logger = logging.getLogger('inv.%s'%self.__class__.__name__)
84
85 def _addPadding(self, pad_l, pad_h, NE, l, origin):
86 """
87 Helper method that computes new number of elements, length and origin
88 after adding padding to the input values.
89 """
90 DIM=len(NE)
91 frac=[0.]*DIM
92 # padding is applied to each side so multiply by 2 to get the total
93 # amount of padding per dimension
94 if pad_l>0 and pad_l<1:
95 for i in xrange(DIM-1):
96 frac[i]=2.*pad_l
97 elif pad_l>=1:
98 for i in xrange(DIM-1):
99 frac[i]=2.*pad_l/float(NE[i])
100 if pad_h>0 and pad_h<1:
101 frac[DIM-1]=2.*pad_h
102 elif pad_h>=1:
103 frac[DIM-1]=2.*pad_h/(float(NE[DIM-1]))
104
105 # calculate new number of elements
106 NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)]
107 NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)]
108 spacing=[l[i]/NE[i] for i in xrange(DIM)]
109 l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)]
110 origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)]
111 return NE_new, l_new, origin_new
112
113 def _interpolateOnDomain(self, data):
114 """
115 Helper method that interpolates data arrays onto the domain.
116 Currently this works like a nearest neighbour mapping, i.e. values
117 are directly inserted into data objects at closest location.
118 """
119 dom=self.getDomain()
120 dim=dom.getDim()
121 # determine number of values required per element
122 DPP=Scalar(0., Function(dom)).getNumberOfDataPoints()
123 for i in xrange(dim):
124 DPP=DPP/self._dom_NE[i]
125 DPP=int(DPP)
126
127 # idx_mult.dot([x,y,z]) = flat index into data object
128 idx_mult=np.array([DPP]+self._dom_NE[:dim-1]).cumprod()
129
130 # separate data arrays and coordinates
131 num_arrays=len(data[0])-dim
132 arrays=[]
133 for i in xrange(num_arrays):
134 d=Scalar(0., Function(dom))
135 d.expand()
136 arrays.append(d)
137
138 for entry in data:
139 index=[int((entry[i]-self._dom_origin[i])/self._spacing[i]) for i in xrange(dim)]
140 index=int(idx_mult.dot(index))
141 for i in xrange(num_arrays):
142 for p in xrange(DPP):
143 arrays[i].setValueOfDataPoint(index+p, entry[dim+i])
144
145 return arrays
146
147 def _interpolateOnDomain_old(self, data):
148 """
149 Old interpolation method. Works only on ContinuousFunction and thus
150 produces wrong values once interpolated on Function.
151 """
152 dom=self.getDomain()
153 dim=dom.getDim()
154 # shape = number of data points/nodes in each dimension
155 shape=()
156 for i in xrange(dim):
157 shape=(self._dom_NE[i]+1,)+shape
158 # separate data arrays and coordinates
159 arrays=np.zeros(((len(data[0])-dim),)+shape)
160 num_arrays=arrays.shape[0]
161 for entry in data:
162 index=()
163 for i in xrange(dim):
164 index=(int((entry[i]-self._dom_origin[i])/self._spacing[i]),)+index
165 for i in xrange(num_arrays):
166 arrays[i][index]=entry[dim+i]
167 x=dom.getX()
168 delta=[self._dom_len[i]/(shape[dim-i-1]-1) for i in xrange(dim)]
169 realorigin=[inf(x[i]) for i in xrange(dim)]
170 res=[]
171 for i in xrange(num_arrays):
172 res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9))
173 return res
174
175 def setPadding(self, pad_l=0.1, pad_h=0.1):
176 """
177 Sets the amount of padding around the dataset. If pad_l/pad_h is >=1
178 they are treated as number of elements to be added to the domain.
179 If 0 < pad_l;pad_h < 1, the padding amount is relative.
180 """
181 self._pad_l=pad_l
182 self._pad_h=pad_h
183
184 def setConstraints(self, bottom=False, sides=True):
185 """
186 If `bottom` is True, then the density mask will be set to 1 in the
187 padding area at the bottom of the domain. By default this area is
188 unconstrained. Similarly, if `sides` is True (default) then the
189 horizontal padding area is constrained, otherwise not.
190 """
191 self._constrainBottom=bottom
192 self._constrainSides=sides
193
194 def getDomain(self):
195 """
196 Returns a domain that spans the data area plus padding.
197 """
198 if self._domain is None:
199 self._domain=self._createDomain(self._pad_l, self._pad_h)
200 return self._domain
201
202 def getDensityMask(self):
203 """
204 Returns the density mask data object, where mask has value 1 on the
205 padding area, 0 elsewhere.
206 """
207 return self._mask
208
209 def getGravityAndStdDev(self):
210 """
211 Returns the gravity anomaly and standard deviation data objects as a
212 tuple.
213 """
214 raise NotImplementedError
215
216 def getDataExtents(self):
217 """
218 returns ( (x0, y0), (nx, ny), (dx, dy) ), where
219 x0, y0 = coordinates of data origin
220 nx, ny = number of data points in x and y
221 dx, dy = spacing of data points in x and y
222 """
223 raise NotImplementedError
224
225 def getVerticalExtents(self):
226 """
227 returns (z0, nz, dz), where
228 z0 = minimum z coordinate (origin)
229 nz = number of nodes in z direction
230 dz = spacing of nodes (= cell size in z)
231 """
232 raise NotImplementedError
233
234 def getDomainClass(self):
235 """
236 returns the domain generator class (e.g. esys.ripley.Brick)
237 """
238 raise NotImplementedError
239
240 def _createDomain(self, padding_l, padding_h):
241 """
242 creates and returns an escript domain that spans the entire area of
243 available data plus a buffer zone.
244 """
245 X0, NX, DX = self.getDataExtents()
246 z0, nz, dz = self.getVerticalExtents()
247
248 # number of elements (without padding)
249 NE = [NX[0], NX[1], nz]
250
251 # origin of domain (without padding)
252 origin = [X0[0], X0[1], z0]
253 origin = [np.round(oi) for oi in origin]
254
255 # cell size / point spacing
256 self._spacing = DX+[dz]
257 self._spacing = [np.round(si) for si in self._spacing]
258
259 # length of domain (without padding)
260 l = [NE[i]*self._spacing[i] for i in xrange(len(NE))]
261
262 # now add padding to the values
263 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
264 NE, l, origin)
265
266 # number of padding elements per side
267 NE_pad=[(NE_new[i]-NE[i])/2 for i in xrange(3)]
268
269 self._dom_NE_pad = NE_pad
270 self._dom_len = l_new
271 self._dom_NE = NE_new
272 self._dom_origin = origin_new
273 lo=[(origin_new[i], origin_new[i]+l_new[i]) for i in xrange(3)]
274 try:
275 dom=self.getDomainClass()(*self._dom_NE, l0=lo[0], l1=lo[1], l2=lo[2])
276 # ripley may internally adjust NE and length, so recompute
277 self._dom_len=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)]
278 self._dom_NE=[self._dom_len[i]/self._spacing[i] for i in xrange(3)]
279 x=dom.getX()-[self._dom_origin[i]+NE_pad[i]*self._spacing[i] for i in xrange(3)]
280 mask=wherePositive(dom.getX()[2])
281
282 except TypeError:
283 dom=self.getDomainClass()(*self._dom_NE, l0=l_new[0], l1=l_new[1], l2=l_new[2])
284 x=dom.getX()-[NE_pad[i]*self._spacing[i] for i in xrange(3)]
285 mask=wherePositive(x[2]+self._dom_origin[2])
286
287 # prepare density mask (=1 at padding area, 0 else)
288 if self._constrainSides:
289 for i in xrange(2):
290 mask=mask + whereNegative(x[i]) + \
291 wherePositive(x[i]-l_new[i]+2*NE_pad[i]*self._spacing[i])
292
293 if self._constrainBottom:
294 mask = mask + whereNegative(x[2])
295 self._mask=wherePositive(mask)
296
297 self.logger.debug("Domain size: %d x %d x %d elements"%(self._dom_NE[0],self._dom_NE[1],self._dom_NE[2]))
298 self.logger.debug(" length: %g x %g x %g"%(self._dom_len[0],self._dom_len[1],self._dom_len[2]))
299 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
300
301 return dom
302
303
304 ##############################################################################
305 class UBCDataSource(DataSource):
306 def __init__(self, domainclass, meshfile, gravfile, topofile=None):
307 super(UBCDataSource,self).__init__()
308 self.__meshfile=meshfile
309 self.__gravfile=gravfile
310 self.__topofile=topofile
311 self.__domainclass=domainclass
312 self.__readMesh()
313
314 def __readMesh(self):
315 meshdata=open(self.__meshfile).readlines()
316 numDataPoints=meshdata[0].split()
317 origin=meshdata[1].split()
318 self.__nPts=map(int, numDataPoints)
319 self.__origin=map(float, origin)
320 self.__delta=[float(X.split('*')[1]) for X in meshdata[2:]]
321 # vertical data is upside down
322 self.__origin[2]-=(self.__nPts[2]-1)*self.__delta[2]
323 self.logger.debug("Data Source: %s (mesh file: %s)"%(self.__gravfile, self.__meshfile))
324
325 def getDataExtents(self):
326 """
327 returns ( (x0, y0), (nx, ny), (dx, dy) )
328 """
329 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
330
331 def getVerticalExtents(self):
332 """
333 returns (z0, nz, dz)
334 """
335 return (self.__origin[2], self.__nPts[2], self.__delta[2])
336
337 def getDomainClass(self):
338 """
339 returns the domain generator class (e.g. esys.ripley.Brick)
340 """
341 return self.__domainclass
342
343 #def getDensityMask(self):
344 # topodata=self.__readTopography()
345 # mask=self._interpolateOnDomain(topodata)
346 # mask=wherePositive(self.getDomain().getX()[2]-mask[0])
347 # return mask
348
349 def getGravityAndStdDev(self):
350 gravlist=self.__readGravity() # x,y,z,g,s
351 g_and_sigma=self._interpolateOnDomain(gravlist)
352 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
353
354 def __readTopography(self):
355 f=open(self.__topofile)
356 n=int(f.readline())
357 topodata=np.zeros((n,3))
358 for i in xrange(n):
359 x=f.readline().split()
360 x=map(float, x)
361 topodata[i]=x
362 f.close()
363 return topodata
364
365 def __readGravity(self):
366 f=open(self.__gravfile)
367 n=int(f.readline())
368 gravdata=np.zeros((n,5))
369 for i in xrange(n):
370 x=f.readline().split()
371 x=map(float, x) # x, y, z, anomaly in mGal, stddev
372 # convert gravity anomaly units to m/s^2 and rescale error
373 x[3]*=-1e-5
374 x[4]*=1e-5
375 gravdata[i]=x
376 f.close()
377 return gravdata
378
379 ##############################################################################
380 class NetCDFDataSource(DataSource):
381 def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
382 """
383 vertical_extents - (alt_min, alt_max, num_points)
384 alt_of_data - altitude of measurements
385 """
386 super(NetCDFDataSource,self).__init__()
387 self.__topofile=topofile
388 self.__gravfile=gravfile
389 self.__domainclass=domainclass
390 self.__determineExtents(vertical_extents)
391 self.__altOfData=alt_of_data
392
393 def __determineExtents(self, ve):
394 self.logger.debug("Data Source: %s"%self.__gravfile)
395 f=netcdf_file(self.__gravfile, 'r')
396 NX=0
397 for n in ['lon','longitude','x']:
398 if n in f.dimensions:
399 NX=f.dimensions[n]
400 break
401 if NX==0:
402 raise RuntimeError("Could not determine extents of data")
403 NY=0
404 for n in ['lat','latitude','y']:
405 if n in f.dimensions:
406 NY=f.dimensions[n]
407 break
408 if NY==0:
409 raise RuntimeError("Could not determine extents of data")
410
411 # find longitude and latitude variables
412 lon_name=None
413 for n in ['lon','longitude']:
414 if n in f.variables:
415 lon_name=n
416 longitude=f.variables.pop(n)
417 break
418 if lon_name is None:
419 raise RuntimeError("Could not determine longitude variable")
420 lat_name=None
421 for n in ['lat','latitude']:
422 if n in f.variables:
423 lat_name=n
424 latitude=f.variables.pop(n)
425 break
426 if lat_name is None:
427 raise RuntimeError("Could not determine latitude variable")
428
429 # try to figure out gravity variable name
430 grav_name=None
431 if len(f.variables)==1:
432 grav_name=f.variables.keys()[0]
433 else:
434 for n in f.variables.keys():
435 dims=f.variables[n].dimensions
436 if (lat_name in dims) and (lon_name in dims):
437 grav_name=n
438 break
439 if grav_name is None:
440 raise RuntimeError("Could not determine gravity variable")
441
442 # see if there is a wkt string to convert coordinates
443 try:
444 wkt_string=f.variables[grav_name].esri_pe_string
445 except:
446 wkt_string=None
447
448 # we don't trust actual_range & geospatial_lon_min/max since subset
449 # data does not seem to have these fields updated it seems.
450 # Getting min/max from the arrays is obviously not very efficient...
451 #lon_range=longitude.actual_range
452 #lat_range=latitude.actual_range
453 #lon_range=[f.geospatial_lon_min,f.geospatial_lon_max]
454 #lat_range=[f.geospatial_lat_min,f.geospatial_lat_max]
455 lon_range=longitude.data.min(),longitude.data.max()
456 lat_range=latitude.data.min(),latitude.data.max()
457 lon_range,lat_range=LatLonToUTM(lon_range, lat_range, wkt_string)
458 origin=[lon_range[0],lat_range[0],ve[0]]
459 lengths=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0],ve[1]-ve[0]]
460
461 f.close()
462
463 self.__nPts=[NX, NY, ve[2]]
464 self.__origin=origin
465 # we are rounding to avoid interpolateOnDomain issues
466 self.__delta=[np.round(lengths[i]/(self.__nPts[i]-1)) for i in xrange(3)]
467 self.__wkt_string=wkt_string
468 self.__lon=lon_name
469 self.__lat=lat_name
470 self.__grv=grav_name
471
472 def getDataExtents(self):
473 """
474 returns ( (x0, y0), (nx, ny), (dx, dy) )
475 """
476 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
477
478 def getVerticalExtents(self):
479 """
480 returns (z0, nz, dz)
481 """
482 return (self.__origin[2], self.__nPts[2], self.__delta[2])
483
484 def getDomainClass(self):
485 """
486 returns the domain generator class (e.g. esys.ripley.Brick)
487 """
488 return self.__domainclass
489
490 def getGravityAndStdDev(self):
491 gravlist=self._readGravity() # x,y,z,g,s
492 g_and_sigma=self._interpolateOnDomain(gravlist)
493 return g_and_sigma[0]*[0,0,1], g_and_sigma[1]
494
495 def _readTopography(self):
496 f=netcdf_file(self.__topofile, 'r')
497 lon=None
498 for n in ['lon','longitude']:
499 if n in f.variables:
500 lon=f.variables[n][:]
501 break
502 if lon is None:
503 raise RuntimeError("Could not determine longitude variable")
504 lat=None
505 for n in ['lat','latitude']:
506 if n in f.variables:
507 lat=f.variables[n][:]
508 break
509 if lat is None:
510 raise RuntimeError("Could not determine latitude variable")
511 alt=None
512 for n in ['altitude','alt']:
513 if n in f.variables:
514 alt=f.variables[n][:]
515 break
516 if alt is None:
517 raise RuntimeError("Could not determine altitude variable")
518
519 topodata=np.column_stack((lon,lat,alt))
520 f.close()
521 return topodata
522
523 def _readGravity(self):
524 f=netcdf_file(self.__gravfile, 'r')
525 #lon=f.variables[self.__lon][:]
526 #lat=f.variables[self.__lat][:]
527 NE=[self.__nPts[i]-1 for i in xrange(2)]
528 lon=np.linspace(self.__origin[0], self.__origin[0]+NE[0]*self.__delta[0], NE[0]+1)
529 lat=np.linspace(self.__origin[1], self.__origin[1]+NE[1]*self.__delta[1], NE[1]+1)
530 lon,lat=np.meshgrid(lon,lat)
531 grav=f.variables[self.__grv][:]
532 f.close()
533 lon=lon.flatten()
534 lat=lat.flatten()
535 grav=grav.flatten()
536 alt=self.__altOfData*np.ones(grav.shape)
537 # error value is an assumption
538 try:
539 missing=f.variables[self.__grv].missing_value
540 err=np.where(grav==missing, 0.0, 20.0)
541 except:
542 err=20.0*np.ones(lon.shape)
543 # convert units
544 err=1e-6*err
545 grav=1e-6*grav
546 gravdata=np.column_stack((lon,lat,alt,grav,err))
547 return gravdata
548
549 ##############################################################################
550 class ERSDataSource(DataSource):
551 """
552 Data Source for ER Mapper raster data.
553 Note that this class only accepts a very specific type of ER Mapper data
554 input and will raise an exception if other data is found.
555 """
556 def __init__(self, headerfile, datafile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.):
557 """
558 headerfile - usually ends in .ers
559 datafile - usually has the same name as the headerfile without '.ers'
560 """
561 super(ERSDataSource,self).__init__()
562 self.__headerfile=headerfile
563 if datafile is None:
564 self.__datafile=headerfile[:-4]
565 else:
566 self.__datafile=datafile
567 self.__readHeader(vertical_extents)
568 self.__altOfData=alt_of_data
569
570 def __readHeader(self, ve):
571 self.logger.debug("Data Source: %s (header: %s)"%(self.__datafile, self.__headerfile))
572 metadata=open(self.__headerfile, 'r').readlines()
573 # parse metadata
574 start=-1
575 for i in xrange(len(metadata)):
576 if metadata[i].strip() == 'DatasetHeader Begin':
577 start=i+1
578 if start==-1:
579 raise RuntimeError('Invalid ERS file (DatasetHeader not found)')
580
581 md_dict={}
582 section=[]
583 for i in xrange(start, len(metadata)):
584 line=metadata[i]
585 if line[-6:].strip() == 'Begin':
586 section.append(line[:-6].strip())
587 elif line[-4:].strip() == 'End':
588 if len(section)>0:
589 section.pop()
590 else:
591 vals=line.split('=')
592 if len(vals)==2:
593 key = vals[0].strip()
594 value = vals[1].strip()
595 fullkey='.'.join(section+[key])
596 md_dict[fullkey]=value
597
598 try:
599 if md_dict['RasterInfo.CellType'] != 'IEEE4ByteReal':
600 raise RuntimeError('Unsupported data type '+md_dict['RasterInfo.CellType'])
601 except KeyError:
602 self.logger.warn("Cell type not specified. Assuming IEEE4ByteReal.")
603
604 try:
605 NX = int(md_dict['RasterInfo.NrOfCellsPerLine'])
606 NY = int(md_dict['RasterInfo.NrOfLines'])
607 except:
608 raise RuntimeError("Could not determine extents of data")
609
610 try:
611 maskval = float(md_dict['RasterInfo.NullCellValue'])
612 except:
613 maskval = 99999
614
615 try:
616 spacingX = float(md_dict['RasterInfo.CellInfo.Xdimension'])
617 spacingY = float(md_dict['RasterInfo.CellInfo.Ydimension'])
618 except:
619 raise RuntimeError("Could not determine cell dimensions")
620
621 try:
622 if md_dict['CoordinateSpace.CoordinateType']=='EN':
623 originX = float(md_dict['RasterInfo.RegistrationCoord.Eastings'])
624 originY = float(md_dict['RasterInfo.RegistrationCoord.Northings'])
625 elif md_dict['CoordinateSpace.CoordinateType']=='LL':
626 originX = float(md_dict['RasterInfo.RegistrationCoord.Longitude'])
627 originY = float(md_dict['RasterInfo.RegistrationCoord.Latitude'])
628 else:
629 raise RuntimeError("Unknown CoordinateType")
630 except:
631 self.logger.warn("Could not determine coordinate origin. Setting to (0.0, 0.0)")
632 originX,originY = 0.0, 0.0
633
634 if 'GEODETIC' in md_dict['CoordinateSpace.Projection']:
635 # it appears we have lat/lon coordinates so need to convert
636 # origin and spacing. Try using gdal to get the wkt if available:
637 try:
638 from osgeo import gdal
639 ds=gdal.Open(self.__headerfile)
640 wkt=ds.GetProjection()
641 except:
642 wkt='GEOGCS["GEOCENTRIC DATUM of AUSTRALIA",DATUM["GDA94",SPHEROID["GRS80",6378137,298.257222101]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]]'
643 self.logger.warn('GDAL not available or file read error, assuming GDA94 data')
644 originX_UTM,originY_UTM=LatLonToUTM(originX, originY, wkt)
645 op1X,op1Y=LatLonToUTM(originX+spacingX, originY+spacingY, wkt)
646 # we are rounding to avoid interpolateOnDomain issues
647 spacingX=np.round(op1X-originX_UTM)
648 spacingY=np.round(op1Y-originY_UTM)
649 originX=np.round(originX_UTM)
650 originY=np.round(originY_UTM)
651
652 # data sets have origin in top-left corner so y runs top-down
653 self.__dataorigin=[originX, originY]
654 originY-=(NY-1)*spacingY
655 self.__maskval=maskval
656 spacingZ=np.round(float(ve[1]-ve[0])/(ve[2]-1))
657 self.__delta = [spacingX, spacingY, spacingZ]
658 self.__nPts = [NX, NY, ve[2]]
659 self.__origin = [originX, originY, ve[0]]
660
661 def getDataExtents(self):
662 """
663 returns ( (x0, y0), (nx, ny), (dx, dy) )
664 """
665 return (self.__origin[:2], self.__nPts[:2], self.__delta[:2])
666
667 def getVerticalExtents(self):
668 """
669 returns (z0, nz, dz)
670 """
671 return (self.__origin[2], self.__nPts[2], self.__delta[2])
672
673 def getDomainClass(self):
674 """
675 returns the domain generator class (e.g. esys.ripley.Brick)
676 """
677 return Brick
678
679 def getGravityAndStdDev(self):
680 nValues=self.__nPts[:2]+[1]
681 first=self._dom_NE_pad[:2]+[self._dom_NE_pad[2]+int((self.__altOfData-self.__origin[2])/self.__delta[2])]
682 g=ripleycpp._readBinaryGrid(self.__datafile, Function(self.getDomain()),
683 first, nValues, (), self.__maskval)
684 sigma=whereNonZero(g-self.__maskval)
685 g=g*1e-6
686 sigma=sigma*2e-6
687 return g*[0,0,1], sigma
688
689
690 ##############################################################################
691 class SourceFeature(object):
692 """
693 A feature adds a density distribution to (parts of) a domain of a synthetic
694 data source, for example a layer of a specific rock type or a simulated
695 ore body.
696 """
697 def getDensity(self):
698 """
699 Returns the density for the area covered by mask. It can be constant
700 or a data object with spatial dependency.
701 """
702 raise NotImplementedError
703 def getMask(self, x):
704 """
705 Returns the mask of the area of interest for this feature. That is,
706 mask is non-zero where the density returned by getDensity() should be
707 applied, zero elsewhere.
708 """
709 raise NotImplementedError
710
711 class SmoothAnomaly(SourceFeature):
712 def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer):
713 self.x=x
714 self.y=y
715 self.lx=lx
716 self.ly=ly
717 self.lz=lz
718 self.depth=depth
719 self.rho_inner=rho_inner
720 self.rho_outer=rho_outer
721 self.rho=None
722
723 def getDensity(self):
724 return self.rho
725
726 def getMask(self, x):
727 DIM=x.getDomain().getDim()
728 m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \
729 *whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2))
730 if DIM>2:
731 m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2))
732 if self.rho is None:
733 alpha=-log(abs(self.rho_outer/self.rho_inner))*4
734 rho=exp(-alpha*((x[0]-self.x)/self.lx)**2)
735 rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2)
736 self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho))
737 if self.rho_inner<0: self.rho=-self.rho
738 return m
739
740 ##############################################################################
741 class SyntheticDataSource(DataSource):
742 def __init__(self, DIM, NE, l, h, features):
743 super(SyntheticDataSource,self).__init__()
744 self._features = features
745 self.DIM=DIM
746 self.NE=NE
747 self.l=l
748 self.h=h
749
750 def _createDomain(self, padding_l, padding_h):
751 NE_H=self.NE
752 NE_L=int((self.l/self.h)*NE_H+0.5)
753 l=[self.l]*(self.DIM-1)+[self.h]
754 NE=[NE_L]*(self.DIM-1)+[NE_H]
755 origin=[0.]*self.DIM
756 NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \
757 NE, l, origin)
758
759 self.NE=NE_new
760 self.l=l_new[0]
761 self.h=l_new[self.DIM-1]
762
763 self.logger.debug("Data Source: synthetic with %d features"%len(self._features))
764 if self.DIM==2:
765 from esys.finley import Rectangle
766 dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1])
767 self._x = dom.getX() + origin_new
768 self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1]))
769 self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1]))
770 self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1]))
771 else:
772 from esys.finley import Brick
773 dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2])
774 self._x = dom.getX() + origin_new
775 self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2]))
776 self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2]))
777 self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2]))
778
779 dz=l_new[self.DIM-1]/NE_new[self.DIM-1]
780 self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \
781 * whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \
782 * whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \
783 * whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz)))
784 self._mask=whereNegative(self._x[self.DIM-1]) + \
785 wherePositive(self._x[self.DIM-1]-l[self.DIM-1])
786 for i in xrange(self.DIM-1):
787 self._mask=self._mask + whereNegative(self._x[i]) + \
788 wherePositive(self._x[i]-l[i])
789 self._mask=wherePositive(self._mask)
790
791 rho_ref=0.
792 for f in self._features:
793 m=f.getMask(self._x)
794 rho_ref = rho_ref * (1-m) + f.getDensity() * m
795 self._rho=rho_ref
796
797 return dom
798
799 def getDensityMask(self):
800 return self._mask
801
802 def getReferenceDensity(self):
803 return self._rho
804
805 def getGravityAndStdDev(self):
806 pde=LinearSinglePDE(self.getDomain())
807 G=6.6742e-11*U.m**3/(U.kg*U.sec**2)
808 m_psi_ref=0.
809 for i in xrange(self.DIM):
810 m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \
811 + whereZero(self._x[i]-sup(self._x[i]))
812
813 pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref)
814 pde.setSymmetryOn()
815 psi_ref=pde.getSolution()
816 del pde
817 g=-grad(psi_ref)
818 sigma=self._g_mask
819 return g,sigma
820

  ViewVC Help
Powered by ViewVC 1.1.26