1 |
|
2 |
######################################################## |
3 |
# |
4 |
# Copyright (c) 2003-2012 by University of Queensland |
5 |
# Earth Systems Science Computational Center (ESSCC) |
6 |
# http://www.uq.edu.au/esscc |
7 |
# |
8 |
# Primary Business: Queensland, Australia |
9 |
# Licensed under the Open Software License version 3.0 |
10 |
# http://www.opensource.org/licenses/osl-3.0.php |
11 |
# |
12 |
######################################################## |
13 |
|
14 |
__copyright__="""Copyright (c) 2003-2012 by University of Queensland |
15 |
Earth Systems Science Computational Center (ESSCC) |
16 |
http://www.uq.edu.au/esscc |
17 |
Primary Business: Queensland, Australia""" |
18 |
__license__="""Licensed under the Open Software License version 3.0 |
19 |
http://www.opensource.org/licenses/osl-3.0.php""" |
20 |
__url__="https://launchpad.net/escript-finley" |
21 |
|
22 |
__all__ = ['DataSource','UBCDataSource','SyntheticDataSource','SmoothAnomaly'] |
23 |
|
24 |
import logging |
25 |
import numpy as np |
26 |
from esys.escript import * |
27 |
from esys.escript.linearPDEs import * |
28 |
import esys.escript.unitsSI as U |
29 |
try: |
30 |
from scipy.io.netcdf import netcdf_file |
31 |
__all__ += ['NetCDFDataSource'] |
32 |
except: |
33 |
pass |
34 |
|
35 |
|
36 |
class DataSource(object): |
37 |
""" |
38 |
A class that provides survey data for the inversion process. |
39 |
""" |
40 |
# this is currently specific to gravity inversion and should be generalised |
41 |
|
42 |
def __init__(self): |
43 |
""" |
44 |
""" |
45 |
self._domain=None |
46 |
self._pad_l=0.1 |
47 |
self._pad_h=0.1 |
48 |
self.logger = logging.getLogger('inv.%s'%self.__class__.__name__) |
49 |
|
50 |
def _addPadding(self, pad_l, pad_h, NE, l, origin): |
51 |
""" |
52 |
Helper method that computes new number of elements, length and origin |
53 |
after adding padding to the input values. |
54 |
""" |
55 |
DIM=len(NE) |
56 |
frac=[0.]*DIM |
57 |
# padding is applied to each side so multiply by 2 to get the total |
58 |
# amount of padding per dimension |
59 |
if pad_l>0 and pad_l<1: |
60 |
for i in xrange(DIM-1): |
61 |
frac[i]=2*pad_l |
62 |
elif pad_l>=1: |
63 |
for i in xrange(DIM-1): |
64 |
frac[i]=2*pad_l/float(NE[i]) |
65 |
if pad_h>0 and pad_h<1: |
66 |
frac[DIM-1]=2*pad_h |
67 |
elif pad_h>=1: |
68 |
frac[DIM-1]=2*pad_h/(float(NE[DIM-1])) |
69 |
# calculate new number of elements |
70 |
NE_new=[int(NE[i]*(1+frac[i])) for i in xrange(DIM)] |
71 |
NEdiff=[NE_new[i]-NE[i] for i in xrange(DIM)] |
72 |
spacing=[l[i]/NE[i] for i in xrange(DIM)] |
73 |
l_new=[NE_new[i]*spacing[i] for i in xrange(DIM)] |
74 |
origin_new=[origin[i]-NEdiff[i]/2.*spacing[i] for i in xrange(DIM)] |
75 |
return NE_new, l_new, origin_new |
76 |
|
77 |
def _interpolateOnDomain(self, data, shape, origin, spacing, length): |
78 |
""" |
79 |
Helper method that interpolates data arrays onto the domain. |
80 |
""" |
81 |
dim=len(shape) |
82 |
arrays=np.zeros(((len(data[0])-dim),)+tuple(shape)) |
83 |
for entry in data: |
84 |
index=() |
85 |
for i in range(dim): |
86 |
index=((entry[i]-origin[i])/spacing[i],)+index |
87 |
for i in range(arrays.shape[0]): |
88 |
arrays[i][index]=entry[dim+i] |
89 |
dom=self.getDomain() |
90 |
x=dom.getX() |
91 |
delta=[length[i]/(shape[dim-i-1]-1) for i in xrange(dim)] |
92 |
realorigin=[inf(x[i]) for i in xrange(dim)] |
93 |
res=[] |
94 |
for i in range(arrays.shape[0]): |
95 |
res.append(interpolateTable(arrays[i], x[:dim], realorigin, delta, 1e9)) |
96 |
return res |
97 |
|
98 |
def setPadding(self, pad_l=0.1, pad_h=0.1): |
99 |
""" |
100 |
Sets the amount of padding around the dataset. If pad_l/pad_h is >=1 |
101 |
they are treated as number of elements to be added to the domain. |
102 |
If 0 < pad_l;pad_h < 1, the padding amount is relative. |
103 |
""" |
104 |
self._pad_l=pad_l |
105 |
self._pad_h=pad_h |
106 |
|
107 |
def getDomain(self): |
108 |
""" |
109 |
Returns a domain that spans the data area plus padding. |
110 |
""" |
111 |
if self._domain is None: |
112 |
self._domain=self._createDomain(self._pad_l, self._pad_h) |
113 |
return self._domain |
114 |
|
115 |
def getDensityMask(self): |
116 |
""" |
117 |
Returns the density mask data object, where mask has value 1 on the |
118 |
padding area, 0 elsewhere. |
119 |
""" |
120 |
raise NotImplementedError |
121 |
|
122 |
def getGravityAndStdDev(self): |
123 |
""" |
124 |
Returns the gravity anomaly and standard deviation data objects as a |
125 |
tuple. |
126 |
""" |
127 |
raise NotImplementedError |
128 |
|
129 |
def _createDomain(self, padding_l, padding_h): |
130 |
""" |
131 |
creates and returns an escript domain that spans the entire area of |
132 |
available data plus a buffer zone. |
133 |
""" |
134 |
raise NotImplementedError |
135 |
|
136 |
|
137 |
############################################################################## |
138 |
class UBCDataSource(DataSource): |
139 |
def __init__(self, domainclass, meshfile, gravfile, topofile=None): |
140 |
super(UBCDataSource,self).__init__() |
141 |
self._meshfile=meshfile |
142 |
self._gravfile=gravfile |
143 |
self._topofile=topofile |
144 |
self._domainclass=domainclass |
145 |
self._readMesh() |
146 |
|
147 |
def getDensityMask(self): |
148 |
#topodata=self._readTopography() |
149 |
#shape=[self.NE[1]+1, self.NE[0]+1] |
150 |
#mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen) |
151 |
#mask=wherePositive(self.getDomain().getX()[2]-mask[0]) |
152 |
return self._mask |
153 |
|
154 |
def getGravityAndStdDev(self): |
155 |
gravlist=self._readGravity() # x,y,z,g,s |
156 |
shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1] |
157 |
g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen) |
158 |
return g_and_sigma[0]*[0,0,1], g_and_sigma[1] |
159 |
|
160 |
def _readMesh(self): |
161 |
meshdata=open(self._meshfile).readlines() |
162 |
numDataPoints=meshdata[0].split() |
163 |
origin=meshdata[1].split() |
164 |
self._numDataPoints=[int(x) for x in numDataPoints] |
165 |
self._origin=[float(x) for x in origin] |
166 |
self._spacing=[float(X.split('*')[1]) for X in meshdata[2:]] |
167 |
# vertical data is upside down |
168 |
self._origin[2]-=(self._numDataPoints[2]-1)*self._spacing[2] |
169 |
|
170 |
def _createDomain(self, padding_l, padding_h): |
171 |
NE=[self._numDataPoints[i]-1 for i in xrange(3)] |
172 |
l=[NE[i]*self._spacing[i] for i in xrange(3)] |
173 |
NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \ |
174 |
NE, l, self._origin) |
175 |
|
176 |
self._meshlen=l_new |
177 |
self.NE=NE_new |
178 |
self._origin=origin_new |
179 |
lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)] |
180 |
NEdiff=[NE_new[i]-NE[i] for i in xrange(3)] |
181 |
try: |
182 |
dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2]) |
183 |
x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)] |
184 |
mask=wherePositive(dom.getX()[2]) |
185 |
|
186 |
except TypeError: |
187 |
dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2]) |
188 |
x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)] |
189 |
mask=wherePositive(x[2]+self._origin[2]) |
190 |
|
191 |
M=2 # do not constrain bottom |
192 |
#M=3 # constrain bottom |
193 |
for i in xrange(M): |
194 |
mask=mask + whereNegative(x[i]) + \ |
195 |
wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i]) |
196 |
self._mask=wherePositive(mask) |
197 |
|
198 |
self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2])) |
199 |
self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2])) |
200 |
self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2])) |
201 |
|
202 |
return dom |
203 |
|
204 |
def _readTopography(self): |
205 |
f=open(self._topofile) |
206 |
n=int(f.readline()) |
207 |
topodata=np.zeros((n,3)) |
208 |
for i in xrange(n): |
209 |
x=f.readline().split() |
210 |
x[0]=float(x[0]) |
211 |
x[1]=float(x[1]) |
212 |
x[2]=float(x[2]) |
213 |
topodata[i]=x |
214 |
f.close() |
215 |
return topodata |
216 |
|
217 |
def _readGravity(self): |
218 |
f=open(self._gravfile) |
219 |
n=int(f.readline()) |
220 |
gravdata=np.zeros((n,5)) |
221 |
for i in xrange(n): |
222 |
x=f.readline().split() |
223 |
x[0]=float(x[0]) # x |
224 |
x[1]=float(x[1]) # y |
225 |
x[2]=float(x[2]) # z |
226 |
x[3]=float(x[3]) # gravity anomaly in mGal |
227 |
x[4]=float(x[4]) # stddev |
228 |
# convert gravity anomaly units to m/s^2 and rescale error |
229 |
x[3]*=-1e-5 |
230 |
x[4]*=1e-5 |
231 |
gravdata[i]=x |
232 |
f.close() |
233 |
return gravdata |
234 |
|
235 |
############################################################################## |
236 |
class NetCDFDataSource(DataSource): |
237 |
def __init__(self, domainclass, gravfile, topofile=None, vertical_extents=(-40000,10000,26), alt_of_data=0.): |
238 |
""" |
239 |
vertical_extents - (alt_min, alt_max, num_elements) |
240 |
alt_of_data - altitude of measurements |
241 |
""" |
242 |
super(NetCDFDataSource,self).__init__() |
243 |
self._topofile=topofile |
244 |
self._gravfile=gravfile |
245 |
self._domainclass=domainclass |
246 |
self._determineExtents(vertical_extents) |
247 |
self._altOfData=alt_of_data |
248 |
|
249 |
def getDensityMask(self): |
250 |
#topodata=self._readTopography() |
251 |
#shape=self._numDataPoints[1::-1] |
252 |
#mask=self._interpolateOnDomain(topodata, shape, self._origin, self._spacing, self._meshlen) |
253 |
#mask=wherePositive(self.getDomain().getX()[2]-mask[0]) |
254 |
#rho=fill*(1.-mask) + RHO_AIR*mask |
255 |
return self._mask |
256 |
|
257 |
def getGravityAndStdDev(self): |
258 |
gravlist=self._readGravity() # x,y,z,g,s |
259 |
shape=[self.NE[2]+1, self.NE[1]+1, self.NE[0]+1] |
260 |
g_and_sigma=self._interpolateOnDomain(gravlist, shape, self._origin, self._spacing, self._meshlen) |
261 |
return g_and_sigma[0]*[0,0,1], g_and_sigma[1] |
262 |
|
263 |
def _determineExtents(self, ve): |
264 |
f=netcdf_file(self._gravfile, 'r') |
265 |
NX=0 |
266 |
for n in ['lon','longitude','x']: |
267 |
if n in f.dimensions: |
268 |
NX=f.dimensions[n] |
269 |
break |
270 |
if NX==0: |
271 |
raise RuntimeError("Could not determine extents of data") |
272 |
NY=0 |
273 |
for n in ['lat','latitude','y']: |
274 |
if n in f.dimensions: |
275 |
NY=f.dimensions[n] |
276 |
break |
277 |
if NY==0: |
278 |
raise RuntimeError("Could not determine extents of data") |
279 |
|
280 |
# find longitude and latitude variables |
281 |
variables=f.variables |
282 |
lon_name=None |
283 |
for n in ['lon','longitude']: |
284 |
if n in variables: |
285 |
lon_name=n |
286 |
variables.pop(n) |
287 |
break |
288 |
if lon_name is None: |
289 |
raise RuntimeError("Could not determine longitude variable") |
290 |
lat_name=None |
291 |
for n in ['lat','latitude']: |
292 |
if n in variables: |
293 |
lat_name=n |
294 |
variables.pop(n) |
295 |
break |
296 |
if lat_name is None: |
297 |
raise RuntimeError("Could not determine latitude variable") |
298 |
|
299 |
# try to figure out gravity variable name |
300 |
grav_name=None |
301 |
if len(variables)==1: |
302 |
grav_name=variables.keys()[0] |
303 |
else: |
304 |
for n in variables.keys(): |
305 |
dims=variables[n].dimensions |
306 |
if (lat_name in dims) and (lon_name in dims): |
307 |
grav_name=n |
308 |
break |
309 |
if grav_name is None: |
310 |
raise RuntimeError("Could not determine gravity variable") |
311 |
|
312 |
origin=[0.,0.,ve[0]] |
313 |
lengths=[1.,1.,ve[1]-ve[0]] |
314 |
try: |
315 |
lon_range=f.variables[lon_name].actual_range |
316 |
lat_range=f.variables[lat_name].actual_range |
317 |
#origin[:2]=[lon_range[0],lat_range[0]] |
318 |
lengths[:2]=[lon_range[1]-lon_range[0], lat_range[1]-lat_range[0]] |
319 |
except: |
320 |
try: |
321 |
#origin[:2]=[f.geospatial_lon_min, f.geospatial_lat_min] |
322 |
lengths[:2]=[f.geospatial_lon_max-origin[0], f.geospatial_lat_max-origin[1]] |
323 |
except: |
324 |
pass |
325 |
|
326 |
# hack |
327 |
# grid spacing ~800m |
328 |
SPACING_X=SPACING_Y=800. |
329 |
lengths[0]=(NX-1)*SPACING_X |
330 |
lengths[1]=(NY-1)*SPACING_Y |
331 |
f.close() |
332 |
self._numDataPoints=[NX, NY, ve[2]] |
333 |
self._origin=origin |
334 |
self._meshlen=lengths |
335 |
self._spacing=[lengths[i]/(self._numDataPoints[i]-1) for i in xrange(3)] |
336 |
self._lon=lon_name |
337 |
self._lat=lat_name |
338 |
self._grv=grav_name |
339 |
|
340 |
def _createDomain(self, padding_l, padding_h): |
341 |
NE=[self._numDataPoints[i]-1 for i in xrange(3)] |
342 |
l=self._meshlen |
343 |
NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \ |
344 |
NE, l, self._origin) |
345 |
|
346 |
self._meshlen=l_new |
347 |
self.NE=NE_new |
348 |
self._origin=origin_new |
349 |
lo=[(self._origin[i], self._origin[i]+l_new[i]) for i in xrange(3)] |
350 |
NEdiff=[NE_new[i]-NE[i] for i in xrange(3)] |
351 |
try: |
352 |
dom=self._domainclass(*self.NE, l0=lo[0], l1=lo[1], l2=lo[2]) |
353 |
# ripley may adjust NE and length |
354 |
self._meshlen=[sup(dom.getX()[i])-inf(dom.getX()[i]) for i in xrange(3)] |
355 |
self.NE=[self._meshlen[i]/(self._spacing[i]) for i in xrange(3)] |
356 |
x=dom.getX()-[self._origin[i]+NEdiff[i]/2.*self._spacing[i] for i in xrange(3)] |
357 |
mask=wherePositive(dom.getX()[2]) |
358 |
|
359 |
except TypeError: |
360 |
dom=self._domainclass(*self.NE, l0=l_new[0], l1=l_new[1], l2=l_new[2]) |
361 |
x=dom.getX()-[NEdiff[i]/2.*self._spacing[i] for i in xrange(3)] |
362 |
mask=wherePositive(x[2]+self._origin[2]) |
363 |
|
364 |
M=2 # do not constrain bottom |
365 |
#M=3 # constrain bottom |
366 |
for i in xrange(M): |
367 |
mask=mask + whereNegative(x[i]) + \ |
368 |
wherePositive(x[i]-l_new[i]+NEdiff[i]*self._spacing[i]) |
369 |
self._mask=wherePositive(mask) |
370 |
|
371 |
self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2])) |
372 |
self.logger.debug(" length: %s x %s x %s"%(self._meshlen[0],self._meshlen[1],self._meshlen[2])) |
373 |
self.logger.debug(" origin: %s x %s x %s"%(self._origin[0],self._origin[1],self._origin[2])) |
374 |
|
375 |
return dom |
376 |
|
377 |
def _readTopography(self): |
378 |
f=netcdf_file(self._topofile, 'r') |
379 |
lon=None |
380 |
for n in ['lon','longitude']: |
381 |
if n in f.variables: |
382 |
lon=f.variables[n][:] |
383 |
break |
384 |
if lon is None: |
385 |
raise RuntimeError("Could not determine longitude variable") |
386 |
lat=None |
387 |
for n in ['lat','latitude']: |
388 |
if n in f.variables: |
389 |
lat=f.variables[n][:] |
390 |
break |
391 |
if lat is None: |
392 |
raise RuntimeError("Could not determine latitude variable") |
393 |
alt=None |
394 |
for n in ['altitude','alt']: |
395 |
if n in f.variables: |
396 |
alt=f.variables[n][:] |
397 |
break |
398 |
if alt is None: |
399 |
raise RuntimeError("Could not determine altitude variable") |
400 |
|
401 |
topodata=np.column_stack((lon,lat,alt)) |
402 |
f.close() |
403 |
return topodata |
404 |
|
405 |
def _readGravity(self): |
406 |
f=netcdf_file(self._gravfile, 'r') |
407 |
#lon=f.variables[self._lon][:] |
408 |
#lat=f.variables[self._lat][:] |
409 |
grav=f.variables[self._grv][:] |
410 |
f.close() |
411 |
lon=np.linspace(0, (grav.shape[1]-1)*self._spacing[0], num=grav.shape[1]) |
412 |
lat=np.linspace(0, (grav.shape[0]-1)*self._spacing[1], num=grav.shape[0]) |
413 |
lon,lat=np.meshgrid(lon,lat) |
414 |
lon=lon.flatten() |
415 |
lat=lat.flatten() |
416 |
grav=grav.flatten() |
417 |
alt=self._altOfData*np.ones(grav.shape) |
418 |
try: |
419 |
missing=grav.missing_value |
420 |
err=np.where(grav==missing, 1.0, 0.0) |
421 |
except: |
422 |
err=np.ones(lon.shape) |
423 |
err=1e-6*err |
424 |
grav=1e-6*grav |
425 |
gravdata=np.column_stack((lon,lat,alt,grav,err)) |
426 |
return gravdata |
427 |
|
428 |
class SmoothAnomaly(object): |
429 |
def __init__(self, lx, ly, lz, x, y, depth, rho_inner, rho_outer): |
430 |
self.x=x |
431 |
self.y=y |
432 |
self.lx=lx |
433 |
self.ly=ly |
434 |
self.lz=lz |
435 |
self.depth=depth |
436 |
self.rho_inner=rho_inner |
437 |
self.rho_outer=rho_outer |
438 |
self.rho=None |
439 |
def getMask(self, x): |
440 |
DIM=x.getDomain().getDim() |
441 |
m=whereNonNegative(x[DIM-1]-(sup(x[DIM-1])-self.depth-self.lz/2)) * whereNonPositive(x[DIM-1]-(sup(x[DIM-1])-self.depth+self.lz/2)) \ |
442 |
*whereNonNegative(x[0]-(self.x-self.lx/2)) * whereNonPositive(x[0]-(self.x+self.lx/2)) |
443 |
if DIM>2: |
444 |
m*=whereNonNegative(x[1]-(self.y-self.ly/2)) * whereNonPositive(x[1]-(self.y+self.ly/2)) |
445 |
if self.rho is None: |
446 |
alpha=-log(abs(self.rho_outer/self.rho_inner))*4 |
447 |
rho=exp(-alpha*((x[0]-self.x)/self.lx)**2) |
448 |
rho=rho*exp(-alpha*((x[DIM-1]-(sup(x[DIM-1])-self.depth))/self.lz)**2) |
449 |
self.rho=maximum(abs(self.rho_outer), abs(self.rho_inner*rho)) |
450 |
if self.rho_inner<0: self.rho=-self.rho |
451 |
return m |
452 |
|
453 |
class SyntheticDataSource(DataSource): |
454 |
def __init__(self, DIM, NE, l, h, features): |
455 |
super(SyntheticDataSource,self).__init__() |
456 |
self._features = features |
457 |
self.DIM=DIM |
458 |
self.NE=NE |
459 |
self.l=l |
460 |
self.h=h |
461 |
|
462 |
def _createDomain(self, padding_l, padding_h): |
463 |
NE_H=self.NE |
464 |
NE_L=int((self.l/self.h)*NE_H+0.5) |
465 |
l=[self.l]*(self.DIM-1)+[self.h] |
466 |
NE=[NE_L]*(self.DIM-1)+[NE_H] |
467 |
origin=[0.]*self.DIM |
468 |
NE_new, l_new, origin_new = self._addPadding(padding_l, padding_h, \ |
469 |
NE, l, origin) |
470 |
|
471 |
self.NE=NE_new |
472 |
self.l=l_new[0] |
473 |
self.h=l_new[self.DIM-1] |
474 |
|
475 |
if self.DIM==2: |
476 |
from esys.finley import Rectangle |
477 |
dom = Rectangle(n0=NE_new[0], n1=NE_new[1], l0=l_new[0], l1=l_new[1]) |
478 |
self._x = dom.getX() + origin_new |
479 |
self.logger.debug("Domain size: %d x %d elements"%(NE_new[0], NE_new[1])) |
480 |
self.logger.debug(" length: %g x %g"%(l_new[0],l_new[1])) |
481 |
self.logger.debug(" origin: %g x %g"%(origin_new[0],origin_new[1])) |
482 |
else: |
483 |
from esys.finley import Brick |
484 |
dom = Brick(n0=NE_new[0], n1=NE_new[1], n2=NE_new[2], l0=l_new[0], l1=l_new[1], l2=l_new[2]) |
485 |
self._x = dom.getX() + origin_new |
486 |
self.logger.debug("Domain size: %d x %d x %d elements"%(self.NE[0],self.NE[1],self.NE[2])) |
487 |
self.logger.debug(" length: %g x %g x %g"%(l_new[0],l_new[1],l_new[2])) |
488 |
self.logger.debug(" origin: %g x %g x %g"%(origin_new[0],origin_new[1],origin_new[2])) |
489 |
|
490 |
dz=l_new[self.DIM-1]/NE_new[self.DIM-1] |
491 |
self._g_mask=wherePositive(dom.getX()[0]-origin_new[0]) \ |
492 |
* whereNegative(dom.getX()[0]-(l_new[0]-origin_new[0])) \ |
493 |
* whereNonNegative(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+origin_new[self.DIM-1])) \ |
494 |
* whereNonPositive(dom.getX()[self.DIM-1]-(l_new[self.DIM-1]+(origin_new[self.DIM-1]+dz))) |
495 |
self._mask=whereNegative(self._x[self.DIM-1]) + \ |
496 |
wherePositive(self._x[self.DIM-1]-l[self.DIM-1]) |
497 |
for i in xrange(self.DIM-1): |
498 |
self._mask=self._mask + whereNegative(self._x[i]) + \ |
499 |
wherePositive(self._x[i]-l[i]) |
500 |
self._mask=wherePositive(self._mask) |
501 |
|
502 |
rho_ref=0. |
503 |
for f in self._features: |
504 |
m=f.getMask(self._x) |
505 |
rho_ref = rho_ref * (1-m) + f.rho * m |
506 |
self._rho=rho_ref |
507 |
|
508 |
return dom |
509 |
|
510 |
def getDensityMask(self): |
511 |
return self._mask |
512 |
|
513 |
def getReferenceDensity(self): |
514 |
return self._rho |
515 |
|
516 |
def getGravityAndStdDev(self): |
517 |
pde=LinearSinglePDE(self.getDomain()) |
518 |
G=6.6742e-11*U.m**3/(U.kg*U.sec**2) |
519 |
m_psi_ref=0. |
520 |
for i in range(self.DIM): |
521 |
m_psi_ref=m_psi_ref + whereZero(self._x[i]-inf(self._x[i])) \ |
522 |
+ whereZero(self._x[i]-sup(self._x[i])) |
523 |
|
524 |
pde.setValue(A=kronecker(self.getDomain()), Y=-4*np.pi*G*self._rho, q=m_psi_ref) |
525 |
pde.setSymmetryOn() |
526 |
psi_ref=pde.getSolution() |
527 |
del pde |
528 |
g=-grad(psi_ref) |
529 |
sigma=self._g_mask |
530 |
return g,sigma |
531 |
|