/[escript]/trunk/downunder/test/python/run_datasources.py
ViewVC logotype

Contents of /trunk/downunder/test/python/run_datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4019 - (show annotations)
Thu Oct 11 08:12:55 2012 UTC (6 years, 11 months ago) by jfenwick
File MIME type: text/x-python
File size: 6460 byte(s)
More tabbing errors,
range/xrange
...
1
2 ##############################################################################
3 #
4 # Copyright (c) 2003-2012 by University of Queensland
5 # http://www.uq.edu.au
6 #
7 # Primary Business: Queensland, Australia
8 # Licensed under the Open Software License version 3.0
9 # http://www.opensource.org/licenses/osl-3.0.php
10 #
11 # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12 # Development since 2012 by School of Earth Sciences
13 #
14 ##############################################################################
15
16 __copyright__="""Copyright (c) 2003-2012 by University of Queensland
17 http://www.uq.edu.au
18 Primary Business: Queensland, Australia"""
19 __license__="""Licensed under the Open Software License version 3.0
20 http://www.opensource.org/licenses/osl-3.0.php"""
21 __url__="https://launchpad.net/escript-finley"
22
23 import logging
24 import numpy as np
25 import os
26 import sys
27 import unittest
28 from esys.escript import inf,sup,saveDataCSV,getMPISizeWorld
29 from esys.downunder.datasources import *
30
31 # this is mainly to avoid warning messages
32 logger=logging.getLogger('inv')
33 logger.setLevel(logging.INFO)
34 handler=logging.StreamHandler()
35 handler.setLevel(logging.INFO)
36 logger.addHandler(handler)
37
38 try:
39 TEST_DATA_ROOT=os.environ['DOWNUNDER_TEST_DATA_ROOT']
40 except KeyError:
41 TEST_DATA_ROOT='ref_data'
42
43 try:
44 WORKDIR=os.environ['DOWNUNDER_WORKDIR']
45 except KeyError:
46 WORKDIR='.'
47
48
49 ERS_DATA = os.path.join(TEST_DATA_ROOT, 'ermapper_test.ers')
50 ERS_REF = os.path.join(TEST_DATA_ROOT, 'ermapper_test.csv')
51 ERS_NULL = -99999 * 1e-6
52 ERS_SIZE = [20,15]
53 ERS_ORIGIN = [309097.0, 6319002.0]
54 NC_DATA = os.path.join(TEST_DATA_ROOT, 'netcdf_test.nc')
55 NC_REF = os.path.join(TEST_DATA_ROOT, 'netcdf_test.csv')
56 NC_NULL = 0.
57 NC_SIZE = [20,15]
58 NC_ORIGIN = [403320.91466610413, 6414860.942530109]
59 VMIN=-10000.
60 VMAX=10000
61 NE_V=15
62 ALT=0.
63 PAD_X=3
64 PAD_Y=2
65
66 class TestERSDataSource(unittest.TestCase):
67 def test_ers_with_padding(self):
68 source = ERSDataSource(headerfile=ERS_DATA, vertical_extents=(VMIN,VMAX,NE_V), alt_of_data=ALT)
69 source.setPadding(PAD_X,PAD_Y)
70 dom=source.getDomain()
71 g,s=source.getGravityAndStdDev()
72
73 outfn=os.path.join(WORKDIR, '_ersdata.csv')
74 saveDataCSV(outfn, g=g[2], s=s)
75
76 X0,NP,DX=source.getDataExtents()
77 V0,NV,DV=source.getVerticalExtents()
78
79 # check metadata
80 self.assertEqual(NP, ERS_SIZE, msg="Wrong number of data points")
81 # this test only works if gdal is available
82 try:
83 import osgeo.osr
84 for i in range(len(ERS_ORIGIN)):
85 self.assertAlmostEqual(X0[i], ERS_ORIGIN[i], msg="Data origin wrong")
86 except ImportError:
87 print("Skipping test of data origin since gdal is not installed.")
88
89 # check data
90 nx=NP[0]+2*PAD_X
91 ny=NP[1]+2*PAD_Y
92 nz=NE_V
93 z_data=int(np.round((ALT-V0)/DV)-1)
94
95 ref=np.genfromtxt(ERS_REF, delimiter=',', dtype=float)
96 g_ref=ref[:,0].reshape((NP[1],NP[0]))
97 s_ref=ref[:,1].reshape((NP[1],NP[0]))
98
99 out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=float)
100 # recompute nz since ripley might have adjusted number of elements
101 nz=len(out)/(nx*ny)
102 g_out=out[:,0].reshape(nz,ny,nx)
103 s_out=out[:,1].reshape(nz,ny,nx)
104 self.assertAlmostEqual(np.abs(
105 g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-g_ref).max(),
106 0., msg="Difference in gravity data area")
107
108 self.assertAlmostEqual(np.abs(
109 s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-s_ref).max(),
110 0., msg="Difference in error data area")
111
112 # overwrite data -> should only be padding value left
113 g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=ERS_NULL
114 self.assertAlmostEqual(np.abs(g_out-ERS_NULL).max(), 0.,
115 msg="Wrong values in padding area")
116
117 class TestNetCDFDataSource(unittest.TestCase):
118 def test_cdf_with_padding(self):
119 source = NetCDFDataSource(gravfile=NC_DATA, vertical_extents=(VMIN,VMAX,NE_V), alt_of_data=ALT)
120 source.setPadding(PAD_X,PAD_Y)
121 dom=source.getDomain()
122 g,s=source.getGravityAndStdDev()
123
124 outfn=os.path.join(WORKDIR, '_ncdata.csv')
125 saveDataCSV(outfn, g=g[2], s=s)
126
127 X0,NP,DX=source.getDataExtents()
128 V0,NV,DV=source.getVerticalExtents()
129
130 # check metadata
131 self.assertEqual(NP, NC_SIZE, msg="Wrong number of data points")
132 # this only works if gdal is available
133 try:
134 import osgeo.osr
135 for i in range(len(NC_ORIGIN)):
136 self.assertAlmostEqual(X0[i], NC_ORIGIN[i], msg="Data origin wrong")
137 except ImportError:
138 print("Skipping test of data origin since gdal is not installed.")
139
140 # check data
141 nx=NP[0]+2*PAD_X
142 ny=NP[1]+2*PAD_Y
143 nz=NE_V
144 z_data=int(np.round((ALT-V0)/DV)-1)
145
146 ref=np.genfromtxt(NC_REF, delimiter=',', dtype=float)
147 g_ref=ref[:,0].reshape((NP[1],NP[0]))
148 s_ref=ref[:,1].reshape((NP[1],NP[0]))
149
150 out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=float)
151 # recompute nz since ripley might have adjusted number of elements
152 nz=len(out)/(nx*ny)
153 g_out=out[:,0].reshape(nz,ny,nx)
154 s_out=out[:,1].reshape(nz,ny,nx)
155
156 self.assertAlmostEqual(np.abs(
157 g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-g_ref).max(),
158 0., msg="Difference in gravity data area")
159
160 self.assertAlmostEqual(np.abs(
161 s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-s_ref).max(),
162 0., msg="Difference in error data area")
163
164 # overwrite data -> should only be padding value left
165 g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=NC_NULL
166 self.assertAlmostEqual(np.abs(g_out-NC_NULL).max(), 0.,
167 msg="Wrong values in padding area")
168
169 if __name__ == "__main__":
170 suite = unittest.TestSuite()
171 if getMPISizeWorld()==1:
172 suite.addTest(unittest.makeSuite(TestERSDataSource))
173 if 'NetCDFDataSource' in dir():
174 suite.addTest(unittest.makeSuite(TestNetCDFDataSource))
175 else:
176 print("Skipping netCDF data source test since netCDF is not installed")
177 else:
178 print("Skipping data source tests since MPI size > 1")
179 s=unittest.TextTestRunner(verbosity=2).run(suite)
180 if not s.wasSuccessful(): sys.exit(1)
181

  ViewVC Help
Powered by ViewVC 1.1.26