1 |
######################################################## |
2 |
# |
3 |
# Copyright (c) 2003-2010 by University of Queensland |
4 |
# Earth Systems Science Computational Center (ESSCC) |
5 |
# http://www.uq.edu.au/esscc |
6 |
# |
7 |
# Primary Business: Queensland, Australia |
8 |
# Licensed under the Open Software License version 3.0 |
9 |
# http://www.opensource.org/licenses/osl-3.0.php |
10 |
# |
11 |
######################################################## |
12 |
|
13 |
EnsureSConsVersion(0,98,1) |
14 |
EnsurePythonVersion(2,5) |
15 |
|
16 |
import sys, os, platform, re |
17 |
from distutils import sysconfig |
18 |
from site_init import * |
19 |
|
20 |
# Version number to check for in options file. Increment when new features are |
21 |
# added or existing options changed. |
22 |
REQUIRED_OPTS_VERSION=200 |
23 |
|
24 |
# MS Windows support, many thanks to PH |
25 |
IS_WINDOWS = (os.name == 'nt') |
26 |
|
27 |
########################## Determine options file ############################ |
28 |
# 1. command line |
29 |
# 2. scons/<hostname>_options.py |
30 |
# 3. name as part of a cluster |
31 |
options_file=ARGUMENTS.get('options_file', None) |
32 |
if not options_file: |
33 |
ext_dir = os.path.join(os.getcwd(), 'scons') |
34 |
hostname = platform.node().split('.')[0] |
35 |
for name in hostname, effectiveName(hostname): |
36 |
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) |
37 |
options_file = os.path.join(ext_dir, mangledhostname+'_options.py') |
38 |
if os.path.isfile(options_file): break |
39 |
|
40 |
if not os.path.isfile(options_file): |
41 |
print("\nWARNING:\nOptions file %s" % options_file) |
42 |
print("not found! Default options will be used which is most likely suboptimal.") |
43 |
print("It is recommended that you copy one of the TEMPLATE files in the scons/") |
44 |
print("subdirectory and customize it to your needs.\n") |
45 |
options_file = None |
46 |
|
47 |
############################### Build options ################################ |
48 |
|
49 |
default_prefix='/usr' |
50 |
mpi_flavours=('none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
51 |
lapack_flavours=('none', 'clapack', 'mkl') |
52 |
|
53 |
vars = Variables(options_file, ARGUMENTS) |
54 |
vars.AddVariables( |
55 |
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), |
56 |
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), |
57 |
BoolVariable('verbose', 'Output full compile/link lines', False), |
58 |
# Compiler/Linker options |
59 |
('cc', 'Path to C compiler', 'default'), |
60 |
('cxx', 'Path to C++ compiler', 'default'), |
61 |
('cc_flags', 'Base C/C++ compiler flags', 'default'), |
62 |
('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'), |
63 |
('cc_debug', 'Additional C/C++ flags for a debug build', 'default'), |
64 |
('cc_extra', 'Extra C compiler flags', ''), |
65 |
('cxx_extra', 'Extra C++ compiler flags', ''), |
66 |
('ld_extra', 'Extra linker flags', ''), |
67 |
BoolVariable('werror','Treat compiler warnings as errors', True), |
68 |
BoolVariable('debug', 'Compile with debug flags', False), |
69 |
BoolVariable('openmp', 'Compile parallel version using OpenMP', False), |
70 |
('omp_flags', 'OpenMP compiler flags', 'default'), |
71 |
('omp_ldflags', 'OpenMP linker flags', 'default'), |
72 |
# Mandatory libraries |
73 |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
74 |
('boost_libs', 'Boost libraries to link with', ['boost_python']), |
75 |
# Optional libraries and options |
76 |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
77 |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
78 |
('mpi_libs', 'MPI shared libraries to link with', ['mpi']), |
79 |
BoolVariable('netcdf', 'Enable netCDF file support', False), |
80 |
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), |
81 |
('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']), |
82 |
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), |
83 |
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), |
84 |
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), |
85 |
BoolVariable('papi', 'Enable PAPI', False), |
86 |
('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), |
87 |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
88 |
BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), |
89 |
BoolVariable('mkl', 'Enable the Math Kernel Library', False), |
90 |
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), |
91 |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), |
92 |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
93 |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
94 |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
95 |
EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), |
96 |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
97 |
('lapack_libs', 'LAPACK libraries to link with', []), |
98 |
BoolVariable('silo', 'Enable the Silo file format in weipa', False), |
99 |
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), |
100 |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
101 |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
102 |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
103 |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
104 |
BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False), |
105 |
# Advanced settings |
106 |
#dudley_assemble_flags = -funroll-loops to actually do something |
107 |
('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''), |
108 |
# To enable passing function pointers through python |
109 |
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), |
110 |
# An option for specifying the compiler tools (see windows branch) |
111 |
('tools_names', 'Compiler tools to use', ['default']), |
112 |
('env_export', 'Environment variables to be passed to tools',[]), |
113 |
EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
114 |
EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
115 |
# finer control over library building, intel aggressive global optimisation |
116 |
# works with dynamic libraries on windows. |
117 |
('share_esysutils', 'Build a dynamic esysUtils library', False), |
118 |
('share_paso', 'Build a dynamic paso library', False), |
119 |
('sys_libs', 'Extra libraries to link with', []), |
120 |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
121 |
) |
122 |
|
123 |
##################### Create environment and help text ####################### |
124 |
|
125 |
# Intel's compiler uses regular expressions improperly and emits a warning |
126 |
# about failing to find the compilers. This warning can be safely ignored. |
127 |
|
128 |
env = Environment(tools = ['default'], options = vars) |
129 |
if env['tools_names'] != 'default': |
130 |
env = Environment(tools = ['default'] + env['tools_names'], options = vars) |
131 |
|
132 |
if options_file: |
133 |
opts_valid=False |
134 |
if 'escript_opts_version' in env.Dictionary() and \ |
135 |
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: |
136 |
opts_valid=True |
137 |
if opts_valid: |
138 |
print("Using options in %s." % options_file) |
139 |
else: |
140 |
print("\nOptions file %s" % options_file) |
141 |
print("is outdated! Please update the file by examining one of the TEMPLATE") |
142 |
print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) |
143 |
Exit(1) |
144 |
|
145 |
# Generate help text (scons -h) |
146 |
Help(vars.GenerateHelpText(env)) |
147 |
|
148 |
# Check for superfluous options |
149 |
for k in vars.UnknownVariables(): |
150 |
print("WARNING: Ignoring unknown option '%s'" % k) |
151 |
|
152 |
#################### Make sure install directories exist ##################### |
153 |
|
154 |
prefix=Dir(env['prefix']).abspath |
155 |
env['incinstall'] = os.path.join(prefix, 'include') |
156 |
env['bininstall'] = os.path.join(prefix, 'bin') |
157 |
env['libinstall'] = os.path.join(prefix, 'lib') |
158 |
env['pyinstall'] = os.path.join(prefix, 'esys') |
159 |
if not os.path.isdir(env['bininstall']): |
160 |
os.makedirs(env['bininstall']) |
161 |
if not os.path.isdir(env['libinstall']): |
162 |
os.makedirs(env['libinstall']) |
163 |
if not os.path.isdir(env['pyinstall']): |
164 |
os.makedirs(env['pyinstall']) |
165 |
|
166 |
env.Append(CPPPATH = [env['incinstall']]) |
167 |
env.Append(LIBPATH = [env['libinstall']]) |
168 |
|
169 |
################# Fill in compiler options if not set above ################## |
170 |
|
171 |
if env['cc'] != 'default': env['CC']=env['cc'] |
172 |
if env['cxx'] != 'default': env['CXX']=env['cxx'] |
173 |
|
174 |
# version >=9 of intel C++ compiler requires use of icpc to link in C++ |
175 |
# runtimes (icc does not) |
176 |
if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc': |
177 |
env['LINK'] = env['CXX'] |
178 |
|
179 |
# default compiler/linker options |
180 |
cc_flags = '' |
181 |
cc_optim = '' |
182 |
cc_debug = '' |
183 |
omp_flags = '' |
184 |
omp_ldflags = '' |
185 |
fatalwarning = '' # switch to turn warnings into errors |
186 |
sysheaderopt = '' # how to indicate that a header is a system header |
187 |
|
188 |
# env['CC'] might be a full path |
189 |
cc_name=os.path.basename(env['CC']) |
190 |
|
191 |
if cc_name == 'icc': |
192 |
# Intel compiler |
193 |
cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
194 |
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" |
195 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
196 |
omp_flags = "-openmp -openmp_report0" |
197 |
omp_ldflags = "-openmp -openmp_report0 -lguide -lpthread" |
198 |
fatalwarning = "-Werror" |
199 |
elif cc_name[:3] == 'gcc': |
200 |
# GNU C on any system |
201 |
cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
202 |
cc_optim = "-O3" |
203 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
204 |
omp_flags = "-fopenmp" |
205 |
omp_ldflags = "-fopenmp" |
206 |
fatalwarning = "-Werror" |
207 |
sysheaderopt = "-isystem" |
208 |
elif cc_name == 'cl': |
209 |
# Microsoft Visual C on Windows |
210 |
cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF" |
211 |
cc_optim = "/O2 /Op /W3" |
212 |
cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK" |
213 |
fatalwarning = "/WX" |
214 |
elif cc_name == 'icl': |
215 |
# Intel C on Windows |
216 |
cc_flags = '/EHsc /GR /MD' |
217 |
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' |
218 |
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' |
219 |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
220 |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
221 |
|
222 |
# set defaults if not otherwise specified |
223 |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
224 |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
225 |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
226 |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
227 |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
228 |
if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) |
229 |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
230 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
231 |
|
232 |
# set up the autolazy values |
233 |
if env['forcelazy'] == 'on': |
234 |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
235 |
elif env['forcelazy'] == 'off': |
236 |
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
237 |
|
238 |
# set up the collective resolve values |
239 |
if env['forcecollres'] == 'on': |
240 |
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
241 |
elif env['forcecollres'] == 'off': |
242 |
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
243 |
|
244 |
# allow non-standard C if requested |
245 |
if env['iknowwhatimdoing']: |
246 |
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
247 |
|
248 |
# Disable OpenMP if no flags provided |
249 |
if env['openmp'] and env['omp_flags'] == '': |
250 |
print("OpenMP requested but no flags provided - disabling OpenMP!") |
251 |
env['openmp'] = False |
252 |
|
253 |
if env['openmp']: |
254 |
env.Append(CCFLAGS = env['omp_flags']) |
255 |
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) |
256 |
else: |
257 |
env['omp_flags']='' |
258 |
env['omp_ldflags']='' |
259 |
|
260 |
# add debug/non-debug compiler flags |
261 |
if env['debug']: |
262 |
env.Append(CCFLAGS = env['cc_debug']) |
263 |
else: |
264 |
env.Append(CCFLAGS = env['cc_optim']) |
265 |
|
266 |
# always add cc_flags |
267 |
env.Append(CCFLAGS = env['cc_flags']) |
268 |
|
269 |
# add system libraries |
270 |
env.AppendUnique(LIBS = env['sys_libs']) |
271 |
|
272 |
# Get the global Subversion revision number for the getVersion() method |
273 |
try: |
274 |
global_revision = os.popen('svnversion -n .').read() |
275 |
global_revision = re.sub(':.*', '', global_revision) |
276 |
global_revision = re.sub('[^0-9]', '', global_revision) |
277 |
if global_revision == '': global_revision='-2' |
278 |
except: |
279 |
global_revision = '-1' |
280 |
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
281 |
|
282 |
if IS_WINDOWS: |
283 |
if not env['share_esysutils']: |
284 |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
285 |
if not env['share_paso']: |
286 |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
287 |
|
288 |
###################### Copy required environment vars ######################## |
289 |
|
290 |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
291 |
if IS_WINDOWS: |
292 |
LD_LIBRARY_PATH_KEY='PATH' |
293 |
env['ENV']['LD_LIBRARY_PATH']='' |
294 |
else: |
295 |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
296 |
|
297 |
# the following env variables are exported for the unit tests, PATH is needed |
298 |
# so the compiler/linker is found if they are not in default locations. |
299 |
|
300 |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
301 |
try: |
302 |
env['ENV'][key] = os.environ[key] |
303 |
except KeyError: |
304 |
env['ENV'][key] = 1 |
305 |
|
306 |
env_export=env['env_export'] |
307 |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME']) |
308 |
|
309 |
for key in set(env_export): |
310 |
try: |
311 |
env['ENV'][key] = os.environ[key] |
312 |
except KeyError: |
313 |
pass |
314 |
|
315 |
try: |
316 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) |
317 |
except KeyError: |
318 |
pass |
319 |
|
320 |
# these shouldn't be needed |
321 |
#for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': |
322 |
# try: |
323 |
# env['ENV'][key] = os.environ[key] |
324 |
# except KeyError: |
325 |
# pass |
326 |
|
327 |
try: |
328 |
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
329 |
except KeyError: |
330 |
pass |
331 |
|
332 |
######################## Add some custom builders ############################ |
333 |
|
334 |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
335 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
336 |
|
337 |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
338 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
339 |
|
340 |
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
341 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
342 |
|
343 |
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) |
344 |
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
345 |
|
346 |
############################ Dependency checks ############################### |
347 |
|
348 |
# Create a Configure() environment to check for compilers and python |
349 |
conf = Configure(env.Clone()) |
350 |
|
351 |
######## Test that the compilers work |
352 |
|
353 |
if 'CheckCC' in dir(conf): # exists since scons 1.1.0 |
354 |
if not conf.CheckCC(): |
355 |
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
356 |
Exit(1) |
357 |
if not conf.CheckCXX(): |
358 |
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
359 |
Exit(1) |
360 |
else: |
361 |
if not conf.CheckFunc('printf', language='c'): |
362 |
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
363 |
Exit(1) |
364 |
if not conf.CheckFunc('printf', language='c++'): |
365 |
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
366 |
Exit(1) |
367 |
|
368 |
if conf.CheckFunc('gethostname'): |
369 |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
370 |
|
371 |
######## Python headers & library (required) |
372 |
|
373 |
python_inc_path=sysconfig.get_python_inc() |
374 |
if IS_WINDOWS: |
375 |
python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs') |
376 |
else: |
377 |
python_lib_path=sysconfig.get_config_var('LIBDIR') |
378 |
#python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux |
379 |
if IS_WINDOWS: |
380 |
python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])] |
381 |
else: |
382 |
python_libs=['python'+sysconfig.get_python_version()] |
383 |
|
384 |
if sysheaderopt == '': |
385 |
conf.env.AppendUnique(CPPPATH = [python_inc_path]) |
386 |
else: |
387 |
conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path]) |
388 |
|
389 |
conf.env.AppendUnique(LIBPATH = [python_lib_path]) |
390 |
conf.env.AppendUnique(LIBS = python_libs) |
391 |
# The wrapper script needs to find the libs |
392 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path) |
393 |
|
394 |
if not conf.CheckCHeader('Python.h'): |
395 |
print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path)) |
396 |
Exit(1) |
397 |
if not conf.CheckFunc('Py_Exit'): |
398 |
print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path)) |
399 |
Exit(1) |
400 |
|
401 |
# Commit changes to environment |
402 |
env = conf.Finish() |
403 |
|
404 |
######## boost (required) |
405 |
|
406 |
boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++') |
407 |
if sysheaderopt == '': |
408 |
env.AppendUnique(CPPPATH = [boost_inc_path]) |
409 |
else: |
410 |
# This is required because we can't -isystem /usr/include since it breaks |
411 |
# std includes |
412 |
if os.path.normpath(boost_inc_path) == '/usr/include': |
413 |
conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')]) |
414 |
else: |
415 |
env.Append(CCFLAGS=[sysheaderopt, boost_inc_path]) |
416 |
|
417 |
env.AppendUnique(LIBPATH = [boost_lib_path]) |
418 |
env.AppendUnique(LIBS = env['boost_libs']) |
419 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path) |
420 |
|
421 |
######## numpy (required) |
422 |
|
423 |
try: |
424 |
from numpy import identity |
425 |
except ImportError: |
426 |
print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) |
427 |
Exit(1) |
428 |
|
429 |
######## VTK (optional) |
430 |
|
431 |
if env['pyvisi']: |
432 |
try: |
433 |
import vtk |
434 |
env['pyvisi'] = True |
435 |
except ImportError: |
436 |
print("Cannot import vtk, disabling pyvisi.") |
437 |
env['pyvisi'] = False |
438 |
|
439 |
######## netCDF (optional) |
440 |
|
441 |
netcdf_inc_path='' |
442 |
netcdf_lib_path='' |
443 |
if env['netcdf']: |
444 |
netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++') |
445 |
env.AppendUnique(CPPPATH = [netcdf_inc_path]) |
446 |
env.AppendUnique(LIBPATH = [netcdf_lib_path]) |
447 |
env.AppendUnique(LIBS = env['netcdf_libs']) |
448 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path) |
449 |
env.Append(CPPDEFINES = ['USE_NETCDF']) |
450 |
|
451 |
######## PAPI (optional) |
452 |
|
453 |
papi_inc_path='' |
454 |
papi_lib_path='' |
455 |
if env['papi']: |
456 |
papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c') |
457 |
env.AppendUnique(CPPPATH = [papi_inc_path]) |
458 |
env.AppendUnique(LIBPATH = [papi_lib_path]) |
459 |
env.AppendUnique(LIBS = env['papi_libs']) |
460 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path) |
461 |
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
462 |
|
463 |
######## MKL (optional) |
464 |
|
465 |
mkl_inc_path='' |
466 |
mkl_lib_path='' |
467 |
if env['mkl']: |
468 |
mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c') |
469 |
env.AppendUnique(CPPPATH = [mkl_inc_path]) |
470 |
env.AppendUnique(LIBPATH = [mkl_lib_path]) |
471 |
env.AppendUnique(LIBS = env['mkl_libs']) |
472 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path) |
473 |
env.Append(CPPDEFINES = ['MKL']) |
474 |
|
475 |
######## UMFPACK (optional) |
476 |
|
477 |
umfpack_inc_path='' |
478 |
umfpack_lib_path='' |
479 |
if env['umfpack']: |
480 |
umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c') |
481 |
env.AppendUnique(CPPPATH = [umfpack_inc_path]) |
482 |
env.AppendUnique(LIBPATH = [umfpack_lib_path]) |
483 |
env.AppendUnique(LIBS = env['umfpack_libs']) |
484 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path) |
485 |
env.Append(CPPDEFINES = ['UMFPACK']) |
486 |
|
487 |
######## LAPACK (optional) |
488 |
|
489 |
if env['lapack']=='mkl' and not env['mkl']: |
490 |
print("mkl_lapack requires MKL!") |
491 |
Exit(1) |
492 |
|
493 |
env['uselapack'] = env['lapack']!='none' |
494 |
lapack_inc_path='' |
495 |
lapack_lib_path='' |
496 |
if env['uselapack']: |
497 |
header='clapack.h' |
498 |
if env['lapack']=='mkl': |
499 |
env.AppendUnique(CPPDEFINES = ['MKL_LAPACK']) |
500 |
header='mkl_lapack.h' |
501 |
lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c') |
502 |
env.AppendUnique(CPPPATH = [lapack_inc_path]) |
503 |
env.AppendUnique(LIBPATH = [lapack_lib_path]) |
504 |
env.AppendUnique(LIBS = env['lapack_libs']) |
505 |
env.Append(CPPDEFINES = ['USE_LAPACK']) |
506 |
|
507 |
######## Silo (optional) |
508 |
|
509 |
silo_inc_path='' |
510 |
silo_lib_path='' |
511 |
if env['silo']: |
512 |
silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c') |
513 |
env.AppendUnique(CPPPATH = [silo_inc_path]) |
514 |
env.AppendUnique(LIBPATH = [silo_lib_path]) |
515 |
# Note that we do not add the libs since they are only needed for the |
516 |
# weipa library and tools. |
517 |
#env.AppendUnique(LIBS = [env['silo_libs']]) |
518 |
|
519 |
######## VisIt (optional) |
520 |
|
521 |
visit_inc_path='' |
522 |
visit_lib_path='' |
523 |
if env['visit']: |
524 |
visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c') |
525 |
env.AppendUnique(CPPPATH = [visit_inc_path]) |
526 |
env.AppendUnique(LIBPATH = [visit_lib_path]) |
527 |
|
528 |
######## MPI (optional) |
529 |
|
530 |
env['usempi'] = env['mpi']!='none' |
531 |
mpi_inc_path='' |
532 |
mpi_lib_path='' |
533 |
if env['usempi']: |
534 |
mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c') |
535 |
env.AppendUnique(CPPPATH = [mpi_inc_path]) |
536 |
env.AppendUnique(LIBPATH = [mpi_lib_path]) |
537 |
env.AppendUnique(LIBS = env['mpi_libs']) |
538 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path) |
539 |
env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK']) |
540 |
# NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! |
541 |
# On the other hand MPT and OpenMPI don't define the latter so we have to |
542 |
# do that here |
543 |
if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']: |
544 |
env.Append(CPPDEFINES = ['MPI_INCLUDED']) |
545 |
|
546 |
######## ParMETIS (optional) |
547 |
|
548 |
if not env['usempi']: env['parmetis'] = False |
549 |
|
550 |
parmetis_inc_path='' |
551 |
parmetis_lib_path='' |
552 |
if env['parmetis']: |
553 |
parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c') |
554 |
env.AppendUnique(CPPPATH = [parmetis_inc_path]) |
555 |
env.AppendUnique(LIBPATH = [parmetis_lib_path]) |
556 |
env.AppendUnique(LIBS = env['parmetis_libs']) |
557 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path) |
558 |
env.Append(CPPDEFINES = ['USE_PARMETIS']) |
559 |
|
560 |
######################## Summarize our environment ########################### |
561 |
|
562 |
# keep some of our install paths first in the list for the unit tests |
563 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
564 |
env.PrependENVPath('PYTHONPATH', prefix) |
565 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
566 |
|
567 |
if not env['verbose']: |
568 |
env['CCCOMSTR'] = "Compiling $TARGET" |
569 |
env['CXXCOMSTR'] = "Compiling $TARGET" |
570 |
env['SHCCCOMSTR'] = "Compiling $TARGET" |
571 |
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
572 |
env['ARCOMSTR'] = "Linking $TARGET" |
573 |
env['LINKCOMSTR'] = "Linking $TARGET" |
574 |
env['SHLINKCOMSTR'] = "Linking $TARGET" |
575 |
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
576 |
|
577 |
print("") |
578 |
print("*** Config Summary (see config.log and lib/buildvars for details) ***") |
579 |
print("Escript/Finley revision %s"%global_revision) |
580 |
print(" Install prefix: %s"%env['prefix']) |
581 |
print(" Python: %s"%sysconfig.PREFIX) |
582 |
print(" boost: %s"%env['boost_prefix']) |
583 |
print(" numpy: YES") |
584 |
if env['usempi']: |
585 |
print(" MPI: YES (flavour: %s)"%env['mpi']) |
586 |
else: |
587 |
print(" MPI: DISABLED") |
588 |
if env['uselapack']: |
589 |
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
590 |
else: |
591 |
print(" LAPACK: DISABLED") |
592 |
d_list=[] |
593 |
e_list=[] |
594 |
for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','silo','visit','pyvisi': |
595 |
if env[i]: e_list.append(i) |
596 |
else: d_list.append(i) |
597 |
for i in e_list: |
598 |
print("%16s: YES"%i) |
599 |
for i in d_list: |
600 |
print("%16s: DISABLED"%i) |
601 |
if ((fatalwarning != '') and (env['werror'])): |
602 |
print(" Treating warnings as errors") |
603 |
else: |
604 |
print(" NOT treating warnings as errors") |
605 |
print("") |
606 |
|
607 |
####################### Configure the subdirectories ######################### |
608 |
|
609 |
from grouptest import * |
610 |
|
611 |
TestGroups=[] |
612 |
|
613 |
# keep an environment without warnings-as-errors |
614 |
dodgy_env=env.Clone() |
615 |
|
616 |
# now add warnings-as-errors flags. This needs to be done after configuration |
617 |
# because the scons test files have warnings in them |
618 |
if ((fatalwarning != '') and (env['werror'])): |
619 |
env.Append(CCFLAGS = fatalwarning) |
620 |
|
621 |
Export( |
622 |
['env', |
623 |
'dodgy_env', |
624 |
'IS_WINDOWS', |
625 |
'TestGroups' |
626 |
] |
627 |
) |
628 |
|
629 |
env.SConscript(dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
630 |
env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0) |
631 |
env.SConscript(dirs = ['paso/src'], variant_dir='build/$PLATFORM/paso', duplicate=0) |
632 |
env.SConscript(dirs = ['weipa/src'], variant_dir='build/$PLATFORM/weipa', duplicate=0) |
633 |
env.SConscript(dirs = ['escript/src'], variant_dir='build/$PLATFORM/escript', duplicate=0) |
634 |
env.SConscript(dirs = ['esysUtils/src'], variant_dir='build/$PLATFORM/esysUtils', duplicate=0) |
635 |
env.SConscript(dirs = ['dudley/src'], variant_dir='build/$PLATFORM/dudley', duplicate=0) |
636 |
env.SConscript(dirs = ['finley/src'], variant_dir='build/$PLATFORM/finley', duplicate=0) |
637 |
env.SConscript(dirs = ['modellib/py_src'], variant_dir='build/$PLATFORM/modellib', duplicate=0) |
638 |
env.SConscript(dirs = ['doc'], variant_dir='build/$PLATFORM/doc', duplicate=0) |
639 |
env.SConscript(dirs = ['pyvisi/py_src'], variant_dir='build/$PLATFORM/pyvisi', duplicate=0) |
640 |
env.SConscript(dirs = ['pycad/py_src'], variant_dir='build/$PLATFORM/pycad', duplicate=0) |
641 |
env.SConscript(dirs = ['pythonMPI/src'], variant_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
642 |
env.SConscript(dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0) |
643 |
|
644 |
######################## Populate the buildvars file ######################### |
645 |
|
646 |
# remove obsolete file |
647 |
if not env['usempi']: |
648 |
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) |
649 |
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) |
650 |
|
651 |
# Try to extract the boost version from version.hpp |
652 |
boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp')) |
653 |
boostversion='unknown' |
654 |
try: |
655 |
for line in boosthpp: |
656 |
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
657 |
if ver: |
658 |
boostversion=ver.group(1) |
659 |
except StopIteration: |
660 |
pass |
661 |
boosthpp.close() |
662 |
|
663 |
buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w') |
664 |
buildvars.write("svn_revision="+str(global_revision)+"\n") |
665 |
buildvars.write("prefix="+prefix+"\n") |
666 |
buildvars.write("cc="+env['CC']+"\n") |
667 |
buildvars.write("cxx="+env['CXX']+"\n") |
668 |
buildvars.write("python="+sys.executable+"\n") |
669 |
buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n") |
670 |
buildvars.write("boost_inc_path="+boost_inc_path+"\n") |
671 |
buildvars.write("boost_lib_path="+boost_lib_path+"\n") |
672 |
buildvars.write("boost_version="+boostversion+"\n") |
673 |
buildvars.write("debug=%d\n"%int(env['debug'])) |
674 |
buildvars.write("openmp=%d\n"%int(env['openmp'])) |
675 |
buildvars.write("mpi=%s\n"%env['mpi']) |
676 |
buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path) |
677 |
buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path) |
678 |
buildvars.write("lapack=%s\n"%env['lapack']) |
679 |
buildvars.write("pyvisi=%d\n"%env['pyvisi']) |
680 |
for i in 'netcdf','parmetis','papi','mkl','umfpack','silo','visit': |
681 |
buildvars.write("%s=%d\n"%(i, int(env[i]))) |
682 |
if env[i]: |
683 |
buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path'))) |
684 |
buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path'))) |
685 |
buildvars.close() |
686 |
|
687 |
################### Targets to build and install libraries ################### |
688 |
|
689 |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
690 |
env.Alias('target_init', [target_init]) |
691 |
|
692 |
# The headers have to be installed prior to build in order to satisfy |
693 |
# #include <paso/Common.h> |
694 |
env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib']) |
695 |
env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib']) |
696 |
|
697 |
env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib']) |
698 |
env.Alias('install_paso', ['build_paso', 'install_paso_lib']) |
699 |
|
700 |
env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib']) |
701 |
env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py']) |
702 |
|
703 |
env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib']) |
704 |
env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py']) |
705 |
|
706 |
env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib']) |
707 |
env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py']) |
708 |
|
709 |
env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib']) |
710 |
env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py']) |
711 |
|
712 |
env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib']) |
713 |
env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib']) |
714 |
|
715 |
# Now gather all the above into some easy targets: build_all and install_all |
716 |
build_all_list = [] |
717 |
build_all_list += ['build_esysUtils'] |
718 |
build_all_list += ['build_paso'] |
719 |
build_all_list += ['build_escript'] |
720 |
build_all_list += ['build_dudley'] |
721 |
build_all_list += ['build_finley'] |
722 |
build_all_list += ['build_weipa'] |
723 |
if not IS_WINDOWS: build_all_list += ['build_escriptreader'] |
724 |
if env['usempi']: build_all_list += ['build_pythonMPI'] |
725 |
build_all_list += ['build_escriptconvert'] |
726 |
env.Alias('build_all', build_all_list) |
727 |
|
728 |
install_all_list = [] |
729 |
install_all_list += ['target_init'] |
730 |
install_all_list += ['install_esysUtils'] |
731 |
install_all_list += ['install_paso'] |
732 |
install_all_list += ['install_escript'] |
733 |
install_all_list += ['install_dudley'] |
734 |
install_all_list += ['install_finley'] |
735 |
install_all_list += ['install_weipa'] |
736 |
if not IS_WINDOWS: install_all_list += ['install_escriptreader'] |
737 |
install_all_list += ['install_pyvisi_py'] |
738 |
install_all_list += ['install_modellib_py'] |
739 |
install_all_list += ['install_pycad_py'] |
740 |
if env['usempi']: install_all_list += ['install_pythonMPI'] |
741 |
install_all_list += ['install_escriptconvert'] |
742 |
env.Alias('install_all', install_all_list) |
743 |
|
744 |
# Default target is install |
745 |
env.Default('install_all') |
746 |
|
747 |
################## Targets to build and run the test suite ################### |
748 |
|
749 |
env.Alias('build_cppunittest', ['install_cppunittest_headers', 'build_cppunittest_lib']) |
750 |
env.Alias('install_cppunittest', ['build_cppunittest', 'install_cppunittest_lib']) |
751 |
env.Alias('run_tests', ['install_all', 'install_cppunittest_lib']) |
752 |
env.Alias('all_tests', ['install_all', 'install_cppunittest_lib', 'run_tests', 'py_tests']) |
753 |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
754 |
env.Alias('build_PasoTests','build/$PLATFORM/paso/profiling/PasoTests') |
755 |
|
756 |
##################### Targets to build the documentation ##################### |
757 |
|
758 |
env.Alias('api_epydoc','install_all') |
759 |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf']) |
760 |
env.Alias('release_prep', ['docs', 'install_all']) |
761 |
|
762 |
if not IS_WINDOWS: |
763 |
try: |
764 |
utest=open('utest.sh','w') |
765 |
utest.write(GroupTest.makeHeader(env['PLATFORM'])) |
766 |
for tests in TestGroups: |
767 |
utest.write(tests.makeString()) |
768 |
utest.close() |
769 |
Execute(Chmod('utest.sh', 0755)) |
770 |
print("Generated utest.sh.") |
771 |
except IOError: |
772 |
print("Error attempting to write unittests file.") |
773 |
Exit(1) |
774 |
|
775 |
# Make sure that the escript wrapper is in place |
776 |
if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')): |
777 |
print("Copying escript wrapper.") |
778 |
Execute(Copy(os.path.join(env['bininstall'],'escript'), 'bin/run-escript')) |
779 |
|