/[escript]/trunk/downunder/test/python/run_datasources.py
ViewVC logotype

Annotation of /trunk/downunder/test/python/run_datasources.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 4016 - (hide annotations)
Tue Oct 9 03:50:27 2012 UTC (6 years, 11 months ago) by caltinay
File MIME type: text/x-python
File size: 6462 byte(s)
Skip data source tests under MPI for now as there is no straightforward way
of comparing the data (saveVTK appears to be the best option).

1 caltinay 3985
2     ##############################################################################
3     #
4     # Copyright (c) 2003-2012 by University of Queensland
5     # http://www.uq.edu.au
6     #
7     # Primary Business: Queensland, Australia
8     # Licensed under the Open Software License version 3.0
9     # http://www.opensource.org/licenses/osl-3.0.php
10     #
11     # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
12     # Development since 2012 by School of Earth Sciences
13     #
14     ##############################################################################
15    
16     __copyright__="""Copyright (c) 2003-2012 by University of Queensland
17     http://www.uq.edu.au
18     Primary Business: Queensland, Australia"""
19     __license__="""Licensed under the Open Software License version 3.0
20     http://www.opensource.org/licenses/osl-3.0.php"""
21     __url__="https://launchpad.net/escript-finley"
22    
23     import logging
24     import numpy as np
25     import os
26     import sys
27     import unittest
28 caltinay 4016 from esys.escript import inf,sup,saveDataCSV,getMPISizeWorld
29 caltinay 3985 from esys.downunder.datasources import *
30    
31     # this is mainly to avoid warning messages
32     logger=logging.getLogger('inv')
33     logger.setLevel(logging.INFO)
34     handler=logging.StreamHandler()
35     handler.setLevel(logging.INFO)
36     logger.addHandler(handler)
37    
38     try:
39     TEST_DATA_ROOT=os.environ['DOWNUNDER_TEST_DATA_ROOT']
40     except KeyError:
41 caltinay 4016 TEST_DATA_ROOT='ref_data'
42 caltinay 3985
43     try:
44     WORKDIR=os.environ['DOWNUNDER_WORKDIR']
45     except KeyError:
46     WORKDIR='.'
47    
48    
49     ERS_DATA = os.path.join(TEST_DATA_ROOT, 'ermapper_test.ers')
50     ERS_REF = os.path.join(TEST_DATA_ROOT, 'ermapper_test.csv')
51     ERS_NULL = -99999 * 1e-6
52 caltinay 4014 ERS_SIZE = [20,15]
53     ERS_ORIGIN = [309097.0, 6319002.0]
54     NC_DATA = os.path.join(TEST_DATA_ROOT, 'netcdf_test.nc')
55     NC_REF = os.path.join(TEST_DATA_ROOT, 'netcdf_test.csv')
56     NC_NULL = 0.
57     NC_SIZE = [20,15]
58 caltinay 4016 NC_ORIGIN = [403320.91466610413, 6414860.942530109]
59 caltinay 3985 VMIN=-10000.
60     VMAX=10000
61     NE_V=15
62     ALT=0.
63 caltinay 4016 PAD_X=3
64     PAD_Y=2
65 caltinay 3985
66     class TestERSDataSource(unittest.TestCase):
67     def test_ers_with_padding(self):
68     source = ERSDataSource(headerfile=ERS_DATA, vertical_extents=(VMIN,VMAX,NE_V), alt_of_data=ALT)
69 caltinay 4005 source.setPadding(PAD_X,PAD_Y)
70 caltinay 3985 dom=source.getDomain()
71     g,s=source.getGravityAndStdDev()
72    
73     outfn=os.path.join(WORKDIR, '_ersdata.csv')
74     saveDataCSV(outfn, g=g[2], s=s)
75    
76     X0,NP,DX=source.getDataExtents()
77     V0,NV,DV=source.getVerticalExtents()
78    
79     # check metadata
80     self.assertEqual(NP, ERS_SIZE, msg="Wrong number of data points")
81 caltinay 4014 # this test only works if gdal is available
82     try:
83     import osgeo.osr
84     for i in xrange(len(ERS_ORIGIN)):
85     self.assertAlmostEqual(X0[i], ERS_ORIGIN[i], msg="Data origin wrong")
86     except ImportError:
87     print("Skipping test of data origin since gdal is not installed.")
88 caltinay 3985
89     # check data
90 caltinay 4005 nx=NP[0]+2*PAD_X
91     ny=NP[1]+2*PAD_Y
92     nz=NE_V
93     z_data=int(np.round((ALT-V0)/DV)-1)
94 caltinay 3985
95     ref=np.genfromtxt(ERS_REF, delimiter=',', dtype=float)
96 caltinay 4014 g_ref=ref[:,0].reshape((NP[1],NP[0]))
97     s_ref=ref[:,1].reshape((NP[1],NP[0]))
98 caltinay 3985
99     out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=float)
100 caltinay 3987 # recompute nz since ripley might have adjusted number of elements
101     nz=len(out)/(nx*ny)
102 caltinay 3985 g_out=out[:,0].reshape(nz,ny,nx)
103     s_out=out[:,1].reshape(nz,ny,nx)
104 caltinay 4014 self.assertAlmostEqual(np.abs(
105     g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-g_ref).max(),
106     0., msg="Difference in gravity data area")
107 caltinay 3985
108     self.assertAlmostEqual(np.abs(
109 caltinay 4014 s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-s_ref).max(),
110     0., msg="Difference in error data area")
111 caltinay 3985
112     # overwrite data -> should only be padding value left
113 caltinay 4005 g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=ERS_NULL
114 caltinay 3985 self.assertAlmostEqual(np.abs(g_out-ERS_NULL).max(), 0.,
115     msg="Wrong values in padding area")
116    
117 caltinay 4014 class TestNetCDFDataSource(unittest.TestCase):
118     def test_cdf_with_padding(self):
119     source = NetCDFDataSource(gravfile=NC_DATA, vertical_extents=(VMIN,VMAX,NE_V), alt_of_data=ALT)
120     source.setPadding(PAD_X,PAD_Y)
121     dom=source.getDomain()
122     g,s=source.getGravityAndStdDev()
123    
124     outfn=os.path.join(WORKDIR, '_ncdata.csv')
125     saveDataCSV(outfn, g=g[2], s=s)
126    
127     X0,NP,DX=source.getDataExtents()
128     V0,NV,DV=source.getVerticalExtents()
129    
130     # check metadata
131     self.assertEqual(NP, NC_SIZE, msg="Wrong number of data points")
132     # this only works if gdal is available
133 caltinay 4016 try:
134     import osgeo.osr
135     for i in xrange(len(NC_ORIGIN)):
136     self.assertAlmostEqual(X0[i], NC_ORIGIN[i], msg="Data origin wrong")
137     except ImportError:
138     print("Skipping test of data origin since gdal is not installed.")
139 caltinay 4014
140     # check data
141     nx=NP[0]+2*PAD_X
142     ny=NP[1]+2*PAD_Y
143     nz=NE_V
144     z_data=int(np.round((ALT-V0)/DV)-1)
145    
146     ref=np.genfromtxt(NC_REF, delimiter=',', dtype=float)
147     g_ref=ref[:,0].reshape((NP[1],NP[0]))
148     s_ref=ref[:,1].reshape((NP[1],NP[0]))
149    
150     out=np.genfromtxt(outfn, delimiter=',', skip_header=1, dtype=float)
151     # recompute nz since ripley might have adjusted number of elements
152     nz=len(out)/(nx*ny)
153     g_out=out[:,0].reshape(nz,ny,nx)
154     s_out=out[:,1].reshape(nz,ny,nx)
155    
156     self.assertAlmostEqual(np.abs(
157     g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-g_ref).max(),
158     0., msg="Difference in gravity data area")
159    
160     self.assertAlmostEqual(np.abs(
161     s_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]-s_ref).max(),
162     0., msg="Difference in error data area")
163    
164     # overwrite data -> should only be padding value left
165     g_out[z_data, PAD_Y:PAD_Y+NP[1], PAD_X:PAD_X+NP[0]]=NC_NULL
166     self.assertAlmostEqual(np.abs(g_out-NC_NULL).max(), 0.,
167     msg="Wrong values in padding area")
168    
169 caltinay 3985 if __name__ == "__main__":
170     suite = unittest.TestSuite()
171 caltinay 4016 if getMPISizeWorld()==1:
172     suite.addTest(unittest.makeSuite(TestERSDataSource))
173     if 'NetCDFDataSource' in dir():
174     suite.addTest(unittest.makeSuite(TestNetCDFDataSource))
175     else:
176     print("Skipping netCDF data source test since netCDF is not installed")
177 caltinay 4014 else:
178 caltinay 4016 print("Skipping data source tests since MPI size > 1")
179 caltinay 3985 s=unittest.TextTestRunner(verbosity=2).run(suite)
180     if not s.wasSuccessful(): sys.exit(1)
181    

  ViewVC Help
Powered by ViewVC 1.1.26