1 |
############################################################################## |
2 |
# |
3 |
# Copyright (c) 2003-2016 by The University of Queensland |
4 |
# http://www.uq.edu.au |
5 |
# |
6 |
# Primary Business: Queensland, Australia |
7 |
# Licensed under the Apache License, version 2.0 |
8 |
# http://www.apache.org/licenses/LICENSE-2.0 |
9 |
# |
10 |
# Development until 2012 by Earth Systems Science Computational Center (ESSCC) |
11 |
# Development 2012-2013 by School of Earth Sciences |
12 |
# Development from 2014 by Centre for Geoscience Computing (GeoComp) |
13 |
# |
14 |
############################################################################## |
15 |
|
16 |
EnsureSConsVersion(0,98,1) |
17 |
EnsurePythonVersion(2,5) |
18 |
|
19 |
import atexit, sys, os, platform, re |
20 |
from distutils import sysconfig |
21 |
from dependencies import * |
22 |
from site_init import * |
23 |
|
24 |
# Version number to check for in options file. Increment when new features are |
25 |
# added or existing options changed. |
26 |
REQUIRED_OPTS_VERSION=203 |
27 |
|
28 |
# MS Windows support, many thanks to PH |
29 |
IS_WINDOWS = (os.name == 'nt') |
30 |
|
31 |
IS_OSX = (os.uname()[0] == 'Darwin') |
32 |
|
33 |
########################## Determine options file ############################ |
34 |
# 1. command line |
35 |
# 2. scons/<hostname>_options.py |
36 |
# 3. name as part of a cluster |
37 |
options_file=ARGUMENTS.get('options_file', None) |
38 |
if not options_file: |
39 |
ext_dir = os.path.join(os.getcwd(), 'scons') |
40 |
hostname = platform.node().split('.')[0] |
41 |
for name in hostname, effectiveName(hostname): |
42 |
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) |
43 |
options_file = os.path.join(ext_dir, mangledhostname+'_options.py') |
44 |
if os.path.isfile(options_file): break |
45 |
|
46 |
if not os.path.isfile(options_file): |
47 |
print("\nWARNING:\nOptions file %s" % options_file) |
48 |
print("not found! Default options will be used which is most likely suboptimal.") |
49 |
print("We recommend that you copy the most relavent options file in the scons/template/") |
50 |
print("subdirectory and customize it to your needs.\n") |
51 |
options_file = None |
52 |
|
53 |
############################### Build options ################################ |
54 |
|
55 |
default_prefix='/usr' |
56 |
mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
57 |
all_domains = ['dudley','finley','ripley','speckley'] |
58 |
|
59 |
#Note that scons construction vars the the following purposes: |
60 |
# CPPFLAGS -> to the preprocessor |
61 |
# CCFLAGS -> flags for _both_ C and C++ |
62 |
# CXXFLAGS -> flags for c++ _only_ |
63 |
# CFLAGS -> flags for c only |
64 |
|
65 |
vars = Variables(options_file, ARGUMENTS) |
66 |
vars.AddVariables( |
67 |
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), |
68 |
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), |
69 |
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), |
70 |
BoolVariable('verbose', 'Output full compile/link lines', False), |
71 |
# Compiler/Linker options |
72 |
('cxx', 'Path to C++ compiler', 'default'), |
73 |
('cc_flags', 'Base (C and C++) compiler flags', 'default'), |
74 |
('cc_optim', 'Additional (C and C++) flags for a non-debug build', 'default'), |
75 |
('cc_debug', 'Additional (C and C++) flags for a debug build', 'default'), |
76 |
('cxx_extra', 'Extra C++ compiler flags', ''), |
77 |
('ld_extra', 'Extra linker flags', ''), |
78 |
('nvcc', 'Path to CUDA compiler', 'default'), |
79 |
('nvccflags', 'Base CUDA compiler flags', 'default'), |
80 |
BoolVariable('werror','Treat compiler warnings as errors', True), |
81 |
BoolVariable('debug', 'Compile with debug flags', False), |
82 |
BoolVariable('openmp', 'Compile parallel version using OpenMP', False), |
83 |
('omp_flags', 'OpenMP compiler flags', 'default'), |
84 |
('omp_ldflags', 'OpenMP linker flags', 'default'), |
85 |
# Mandatory libraries |
86 |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
87 |
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), |
88 |
# Mandatory for tests |
89 |
('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix), |
90 |
('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']), |
91 |
# Optional libraries and options |
92 |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
93 |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
94 |
('mpi_libs', 'MPI shared libraries to link with', ['mpi']), |
95 |
BoolVariable('cuda', 'Enable GPU code with CUDA (requires thrust)', False), |
96 |
('cuda_prefix', 'Prefix/Paths to NVidia CUDA installation', default_prefix), |
97 |
BoolVariable('netcdf', 'Enable netCDF file support', False), |
98 |
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), |
99 |
('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']), |
100 |
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), |
101 |
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), |
102 |
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), |
103 |
BoolVariable('mkl', 'Enable the Math Kernel Library', False), |
104 |
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), |
105 |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), |
106 |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
107 |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
108 |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
109 |
BoolVariable('boomeramg', 'Enable BoomerAMG', False), |
110 |
('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix), |
111 |
('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']), |
112 |
TristateVariable('lapack', 'Enable LAPACK', 'auto'), |
113 |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
114 |
('lapack_libs', 'LAPACK libraries to link with', []), |
115 |
BoolVariable('silo', 'Enable the Silo file format in weipa', False), |
116 |
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), |
117 |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
118 |
BoolVariable('trilinos', 'Enable the Trilinos solvers', False), |
119 |
('trilinos_prefix', 'Prefix/Paths to Trilinos installation', default_prefix), |
120 |
('trilinos_libs', 'Trilinos libraries to link with', []), |
121 |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
122 |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
123 |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
124 |
ListVariable('domains', 'Which domains to build', 'all', all_domains), |
125 |
BoolVariable('paso', 'Build Paso solver library', True), |
126 |
BoolVariable('weipa', 'Build Weipa data export library', True), |
127 |
# Advanced settings |
128 |
('launcher', 'Launcher command (e.g. mpirun)', 'default'), |
129 |
('prelaunch', 'Command to execute before launcher (e.g. mpdboot)', 'default'), |
130 |
('postlaunch', 'Command to execute after launcher (e.g. mpdexit)', 'default'), |
131 |
#dudley_assemble_flags = -funroll-loops to actually do something |
132 |
('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''), |
133 |
# To enable passing function pointers through python |
134 |
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), |
135 |
# An option for specifying the compiler tools |
136 |
('tools_names', 'Compiler tools to use', ['default']), |
137 |
('env_export', 'Environment variables to be passed to tools',[]), |
138 |
TristateVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'auto'), |
139 |
TristateVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'auto'), |
140 |
BoolVariable('build_shared', '(deprecated option, ignored)', True), |
141 |
('sys_libs', 'Extra libraries to link with', []), |
142 |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
143 |
('SVN_VERSION', 'Do not use from options file', -2), |
144 |
('pythoncmd', 'which python to compile with', sys.executable), |
145 |
('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''), |
146 |
('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''), |
147 |
('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''), |
148 |
BoolVariable('longindices', 'use long indices (for very large matrices)', False), |
149 |
BoolVariable('compressed_files','Enables reading from compressed binary files', True), |
150 |
('compression_libs', 'Compression libraries to link with', ['boost_iostreams']), |
151 |
BoolVariable('papi', 'Enable PAPI', False), |
152 |
('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), |
153 |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
154 |
BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), |
155 |
BoolVariable('osx_dependency_fix', 'Fix dependencies for libraries to have absolute paths (OSX)', False) |
156 |
) |
157 |
|
158 |
##################### Create environment and help text ####################### |
159 |
|
160 |
# Intel's compiler uses regular expressions improperly and emits a warning |
161 |
# about failing to find the compilers. This warning can be safely ignored. |
162 |
|
163 |
# PATH is needed so the compiler, linker and tools are found if they are not |
164 |
# in default locations. |
165 |
env = Environment(tools = ['default'], options = vars, |
166 |
ENV = {'PATH': os.environ['PATH']}) |
167 |
|
168 |
# set the vars for clang |
169 |
def mkclang(env): |
170 |
env['CXX']='clang++' |
171 |
|
172 |
if env['tools_names'] != ['default']: |
173 |
zz=env['tools_names'] |
174 |
if 'clang' in zz: |
175 |
zz.remove('clang') |
176 |
zz.insert(0, mkclang) |
177 |
env = Environment(tools = ['default'] + env['tools_names'], options = vars, |
178 |
ENV = {'PATH' : os.environ['PATH']}) |
179 |
|
180 |
if options_file: |
181 |
opts_valid=False |
182 |
if 'escript_opts_version' in env.Dictionary() and \ |
183 |
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: |
184 |
opts_valid=True |
185 |
if opts_valid: |
186 |
print("Using options in %s." % options_file) |
187 |
else: |
188 |
print("\nOptions file %s" % options_file) |
189 |
print("is outdated! Please update the file after reading scons/templates/README_FIRST") |
190 |
print("and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) |
191 |
Exit(1) |
192 |
|
193 |
# Generate help text (scons -h) |
194 |
Help(vars.GenerateHelpText(env)) |
195 |
|
196 |
# Check for superfluous options |
197 |
if len(vars.UnknownVariables())>0: |
198 |
for k in vars.UnknownVariables(): |
199 |
print("Unknown option '%s'" % k) |
200 |
Exit(1) |
201 |
|
202 |
if env['cuda']: |
203 |
if env['nvcc'] != 'default': |
204 |
env['NVCC'] = env['nvcc'] |
205 |
env.Tool('nvcc') |
206 |
|
207 |
if 'dudley' in env['domains']: |
208 |
env['domains'].append('finley') |
209 |
|
210 |
env['domains'] = sorted(set(env['domains'])) |
211 |
|
212 |
# create dictionary which will be populated with info for buildvars file |
213 |
env['buildvars'] = {} |
214 |
# create list which will be populated with warnings if there are any |
215 |
env['warnings'] = [] |
216 |
|
217 |
#################### Make sure install directories exist ##################### |
218 |
|
219 |
env['BUILD_DIR'] = Dir(env['build_dir']).abspath |
220 |
prefix = Dir(env['prefix']).abspath |
221 |
env['buildvars']['prefix'] = prefix |
222 |
env['incinstall'] = os.path.join(prefix, 'include') |
223 |
env['bininstall'] = os.path.join(prefix, 'bin') |
224 |
env['libinstall'] = os.path.join(prefix, 'lib') |
225 |
env['pyinstall'] = os.path.join(prefix, 'esys') |
226 |
if not os.path.isdir(env['bininstall']): |
227 |
os.makedirs(env['bininstall']) |
228 |
if not os.path.isdir(env['libinstall']): |
229 |
os.makedirs(env['libinstall']) |
230 |
if not os.path.isdir(env['pyinstall']): |
231 |
os.makedirs(env['pyinstall']) |
232 |
|
233 |
env.Append(CPPPATH = [env['incinstall']]) |
234 |
env.Append(LIBPATH = [env['libinstall']]) |
235 |
|
236 |
################# Fill in compiler options if not set above ################## |
237 |
|
238 |
if env['cxx'] != 'default': |
239 |
env['CXX'] = env['cxx'] |
240 |
|
241 |
# default compiler/linker options |
242 |
cc_flags = '-std=c++11' |
243 |
cc_optim = '' |
244 |
cc_debug = '' |
245 |
omp_flags = '' |
246 |
omp_ldflags = '' |
247 |
fatalwarning = '' # switch to turn warnings into errors |
248 |
sysheaderopt = '' # how to indicate that a header is a system header |
249 |
|
250 |
# env['CC'] might be a full path |
251 |
cc_name=os.path.basename(env['CXX']) |
252 |
|
253 |
if cc_name == 'icpc': |
254 |
# Intel compiler |
255 |
# #1478: class "std::auto_ptr<...>" was declared deprecated |
256 |
# #1875: offsetof applied to non-POD types is nonstandard (in boost) |
257 |
# removed -std=c99 because icpc doesn't like it and we aren't using c anymore |
258 |
cc_flags = "-std=c++11 -fPIC -w2 -wd1875 -wd1478 -Wno-unknown-pragmas" |
259 |
cc_optim = "-O3 -ftz -fno-alias -inline-level=2 -ipo -xHost" |
260 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK" |
261 |
omp_flags = "-openmp" |
262 |
omp_ldflags = "-openmp -openmp_report=1" |
263 |
fatalwarning = "-Werror" |
264 |
elif cc_name[:3] == 'g++': |
265 |
# GNU C++ on any system |
266 |
# note that -ffast-math is not used because it breaks isnan(), |
267 |
# see mantis #691 |
268 |
cc_flags = "-std=c++11 -pedantic -Wall -fPIC -Wno-unknown-pragmas -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
269 |
cc_optim = "-O3" |
270 |
#max-vartrack-size: avoid vartrack limit being exceeded with escriptcpp.cpp |
271 |
cc_debug = "-g3 -O0 -D_GLIBCXX_DEBUG -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK --param=max-vartrack-size=100000000" |
272 |
omp_flags = "-fopenmp" |
273 |
omp_ldflags = "-fopenmp" |
274 |
fatalwarning = "-Werror" |
275 |
sysheaderopt = "-isystem" |
276 |
elif cc_name == 'cl': |
277 |
# Microsoft Visual C on Windows |
278 |
cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF" |
279 |
cc_optim = "/O2 /Op /W3" |
280 |
cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK" |
281 |
fatalwarning = "/WX" |
282 |
elif cc_name == 'icl': |
283 |
# Intel C on Windows |
284 |
cc_flags = '/EHsc /GR /MD' |
285 |
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' |
286 |
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' |
287 |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
288 |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
289 |
|
290 |
env['sysheaderopt']=sysheaderopt |
291 |
|
292 |
# set defaults if not otherwise specified |
293 |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
294 |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
295 |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
296 |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
297 |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
298 |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
299 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
300 |
|
301 |
if env['nvccflags'] != 'default': |
302 |
env['NVCCFLAGS'] = env['nvccflags'] |
303 |
env['SHNVCCFLAGS'] = env['nvccflags'] + ' -shared' |
304 |
|
305 |
if env['longindices']: |
306 |
env.Append(CPPDEFINES = ['ESYS_INDEXTYPE_LONG']) |
307 |
|
308 |
# set up the autolazy values |
309 |
if env['forcelazy'] == 1: |
310 |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
311 |
elif env['forcelazy'] == 0: |
312 |
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
313 |
|
314 |
# set up the collective resolve values |
315 |
if env['forcecollres'] == 1: |
316 |
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
317 |
elif env['forcecollres'] == 0: |
318 |
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
319 |
|
320 |
# allow non-standard C if requested |
321 |
if env['iknowwhatimdoing']: |
322 |
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
323 |
|
324 |
# Disable OpenMP if no flags provided |
325 |
if env['openmp'] and env['omp_flags'] == '': |
326 |
env['warnings'].append("OpenMP requested but no flags provided - disabling OpenMP!") |
327 |
env['openmp'] = False |
328 |
|
329 |
if env['openmp']: |
330 |
env.Append(CCFLAGS = env['omp_flags']) |
331 |
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) |
332 |
else: |
333 |
env['omp_flags']='' |
334 |
env['omp_ldflags']='' |
335 |
|
336 |
env['buildvars']['openmp']=int(env['openmp']) |
337 |
|
338 |
# add debug/non-debug compiler flags |
339 |
env['buildvars']['debug']=int(env['debug']) |
340 |
if env['debug']: |
341 |
env.Append(CCFLAGS = env['cc_debug']) |
342 |
else: |
343 |
env.Append(CCFLAGS = env['cc_optim']) |
344 |
|
345 |
# always add cc_flags |
346 |
env.Append(CCFLAGS = env['cc_flags']) |
347 |
|
348 |
# add system libraries |
349 |
env.AppendUnique(LIBS = env['sys_libs']) |
350 |
|
351 |
# determine svn revision |
352 |
global_revision=ARGUMENTS.get('SVN_VERSION', None) |
353 |
if global_revision: |
354 |
global_revision = re.sub(':.*', '', global_revision) |
355 |
global_revision = re.sub('[^0-9]', '', global_revision) |
356 |
if global_revision == '': global_revision='-2' |
357 |
else: |
358 |
# Get the global Subversion revision number for the getVersion() method |
359 |
try: |
360 |
global_revision = os.popen('svnversion -n .').read() |
361 |
global_revision = re.sub(':.*', '', global_revision) |
362 |
global_revision = re.sub('[^0-9]', '', global_revision) |
363 |
if global_revision == '': global_revision='-2' |
364 |
except: |
365 |
global_revision = '-1' |
366 |
env['svn_revision']=global_revision |
367 |
env['buildvars']['svn_revision']=global_revision |
368 |
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
369 |
|
370 |
env['IS_WINDOWS']=IS_WINDOWS |
371 |
env['IS_OSX']=IS_OSX |
372 |
|
373 |
###################### Copy required environment vars ######################## |
374 |
|
375 |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
376 |
if IS_WINDOWS: |
377 |
LD_LIBRARY_PATH_KEY='PATH' |
378 |
env['ENV']['LD_LIBRARY_PATH']='' |
379 |
else: |
380 |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
381 |
|
382 |
env['LD_LIBRARY_PATH_KEY']=LD_LIBRARY_PATH_KEY |
383 |
|
384 |
# the following env variables are exported for the unit tests |
385 |
|
386 |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
387 |
try: |
388 |
env['ENV'][key] = os.environ[key] |
389 |
except KeyError: |
390 |
env['ENV'][key] = '1' |
391 |
|
392 |
env_export=env['env_export'] |
393 |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP','LD_PRELOAD']) |
394 |
|
395 |
for key in set(env_export): |
396 |
try: |
397 |
env['ENV'][key] = os.environ[key] |
398 |
except KeyError: |
399 |
pass |
400 |
|
401 |
for key in os.environ.keys(): |
402 |
if key.startswith("SLURM_"): |
403 |
env['ENV'][key] = os.environ[key] |
404 |
|
405 |
try: |
406 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) |
407 |
except KeyError: |
408 |
pass |
409 |
|
410 |
if IS_OSX: |
411 |
try: |
412 |
env.PrependENVPath('DYLD_LIBRARY_PATH', os.environ['DYLD_LIBRARY_PATH']) |
413 |
except KeyError: |
414 |
pass |
415 |
|
416 |
try: |
417 |
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
418 |
except KeyError: |
419 |
pass |
420 |
|
421 |
######################## Add some custom builders ############################ |
422 |
|
423 |
# Takes care of prefix and suffix for Python modules: |
424 |
def build_python_module(env, target, source): |
425 |
return env.SharedLibrary(target, source, SHLIBPREFIX='', SHLIBSUFFIX='.so') |
426 |
env.AddMethod(build_python_module, "PythonModule") |
427 |
|
428 |
if env['pythoncmd']=='python': |
429 |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
430 |
else: |
431 |
py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True) |
432 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
433 |
|
434 |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
435 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
436 |
|
437 |
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
438 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
439 |
|
440 |
runPyExample_builder = Builder(action = runPyExample, suffix = '.passed', src_suffic='.py', single_source=True) |
441 |
env.Append(BUILDERS = {'RunPyExample' : runPyExample_builder}); |
442 |
|
443 |
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) |
444 |
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
445 |
|
446 |
############################ Dependency checks ############################### |
447 |
|
448 |
######## Compiler |
449 |
env=checkCompiler(env) |
450 |
|
451 |
######## Python headers & library (required) |
452 |
env=checkPython(env) |
453 |
|
454 |
######## boost & boost-python (required) |
455 |
env=checkBoost(env) |
456 |
|
457 |
######## numpy (required) and numpy headers (optional) |
458 |
env=checkNumpy(env) |
459 |
|
460 |
######## CppUnit (required for tests) |
461 |
env=checkCppUnit(env) |
462 |
|
463 |
######## NVCC version (optional) |
464 |
if env['cuda'] and 'ripley' in env['domains']: |
465 |
env=checkCudaVersion(env) |
466 |
env=checkCUDA(env) |
467 |
|
468 |
######## optional python modules (sympy, pyproj) |
469 |
env=checkOptionalModules(env) |
470 |
|
471 |
######## optional dependencies (netCDF, PAPI, MKL, UMFPACK, Lapack, Silo, ...) |
472 |
env=checkOptionalLibraries(env) |
473 |
|
474 |
######## PDFLaTeX (for documentation) |
475 |
env=checkPDFLatex(env) |
476 |
|
477 |
# set defaults for launchers if not otherwise specified |
478 |
if env['prelaunch'] == 'default': |
479 |
if env['mpi'] == 'INTELMPI' and env['openmp']: |
480 |
env['prelaunch'] = "export I_MPI_PIN_DOMAIN=omp" |
481 |
elif env['mpi'] == 'OPENMPI': |
482 |
# transform comma-separated list to '-x a -x b -x c ...' |
483 |
env['prelaunch'] = "EE=$(echo -x %e|sed -e 's/,/ -x /g')" |
484 |
elif env['mpi'] == 'MPT': |
485 |
env['prelaunch'] = "export MPI_NUM_MEMORY_REGIONS=0" |
486 |
elif env['mpi'] == 'MPICH2': |
487 |
env['prelaunch'] = "mpdboot -n %n -r ssh -f %f" |
488 |
else: |
489 |
env['prelaunch'] = "" |
490 |
|
491 |
if env['launcher'] == 'default': |
492 |
if env['mpi'] == 'INTELMPI': |
493 |
env['launcher'] = "mpirun -hostfile %f -n %N -ppn %p %b" |
494 |
elif env['mpi'] == 'OPENMPI': |
495 |
# default to OpenMPI version 1.10 or higher |
496 |
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} --host %h --map-by node:pe=%t -bind-to core -np %N %b" |
497 |
if 'orte_version' in env: |
498 |
major,minor,point = [int(i) for i in env['orte_version'].split('.')] |
499 |
if major == 1 and minor < 10: |
500 |
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} --host %h --cpus-per-rank %t -np %N %b" |
501 |
elif env['mpi'] == 'MPT': |
502 |
env['launcher'] = "mpirun %h -np %p %b" |
503 |
elif env['mpi'] == 'MPICH': |
504 |
env['launcher'] = "mpirun -machinefile %f -np %N %b" |
505 |
elif env['mpi'] == 'MPICH2': |
506 |
env['launcher'] = "mpiexec -genvlist %e -np %N %b" |
507 |
else: |
508 |
env['launcher'] = "%b" |
509 |
|
510 |
if env['postlaunch'] == 'default': |
511 |
if env['mpi'] == 'MPICH2': |
512 |
env['postlaunch'] = "mpdallexit" |
513 |
else: |
514 |
env['postlaunch'] = "" |
515 |
|
516 |
# dependency sanity checks |
517 |
|
518 |
if len(env['domains']) == 0: |
519 |
env['warnings'].append("No domains have been built, escript will not be very useful!") |
520 |
|
521 |
# keep some of our install paths first in the list for the unit tests |
522 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
523 |
env.PrependENVPath('PYTHONPATH', prefix) |
524 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
525 |
|
526 |
if not env['verbose']: |
527 |
env['CXXCOMSTR'] = "Compiling $TARGET" |
528 |
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
529 |
env['ARCOMSTR'] = "Linking $TARGET" |
530 |
env['LINKCOMSTR'] = "Linking $TARGET" |
531 |
env['SHLINKCOMSTR'] = "Linking $TARGET" |
532 |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
533 |
env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET" |
534 |
env['MAKEINDEXCOMSTR'] = "Generating index $TARGET" |
535 |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
536 |
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
537 |
|
538 |
########################### Configure the targets ############################ |
539 |
|
540 |
from grouptest import GroupTest |
541 |
TestGroups=[] |
542 |
|
543 |
# keep an environment without warnings-as-errors |
544 |
dodgy_env=env.Clone() |
545 |
|
546 |
# now add warnings-as-errors flags. This needs to be done after configuration |
547 |
# because the scons test files have warnings in them |
548 |
if ((fatalwarning != '') and (env['werror'])): |
549 |
env.Append(CCFLAGS = fatalwarning) |
550 |
|
551 |
Export( |
552 |
['env', |
553 |
'dodgy_env', |
554 |
'IS_WINDOWS', |
555 |
'TestGroups' |
556 |
] |
557 |
) |
558 |
|
559 |
target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET')) |
560 |
env.Alias('target_init', [target_init]) |
561 |
|
562 |
# escript can't be turned off |
563 |
build_all_list = ['build_escript'] |
564 |
install_all_list = ['target_init', 'install_escript'] |
565 |
|
566 |
if env['usempi']: |
567 |
build_all_list += ['build_pythonMPI', 'build_overlord'] |
568 |
install_all_list += ['install_pythonMPI', 'install_overlord'] |
569 |
|
570 |
env['buildvars']['paso'] = int(env['paso']) |
571 |
if env['paso']: |
572 |
env.Append(CPPDEFINES = ['ESYS_HAVE_PASO']) |
573 |
build_all_list += ['build_paso'] |
574 |
install_all_list += ['install_paso'] |
575 |
|
576 |
env['buildvars']['paso'] = int(env['trilinos']) |
577 |
if env['trilinos']: |
578 |
build_all_list += ['build_trilinoswrap'] |
579 |
install_all_list += ['install_trilinoswrap'] |
580 |
|
581 |
env['buildvars']['domains'] = ','.join(env['domains']) |
582 |
for domain in env['domains']: |
583 |
env.Append(CPPDEFINES = ['ESYS_HAVE_'+domain.upper()]) |
584 |
build_all_list += ['build_%s'%domain] |
585 |
install_all_list += ['install_%s'%domain] |
586 |
|
587 |
env['buildvars']['weipa'] = int(env['weipa']) |
588 |
if env['weipa']: |
589 |
env.Append(CPPDEFINES = ['ESYS_HAVE_WEIPA']) |
590 |
build_all_list += ['build_weipa'] |
591 |
install_all_list += ['install_weipa'] |
592 |
if 'finley' in env['domains'] or 'dudley' in env['domains']: |
593 |
build_all_list += ['build_escriptreader'] |
594 |
install_all_list += ['install_escriptreader'] |
595 |
|
596 |
variant='$BUILD_DIR/$PLATFORM/' |
597 |
env.SConscript('escriptcore/SConscript', variant_dir=variant+'escriptcore', duplicate=0) |
598 |
env.SConscript('escript/py_src/SConscript', variant_dir=variant+'escript', duplicate=0) |
599 |
env.SConscript('pythonMPI/src/SConscript', variant_dir=variant+'pythonMPI', duplicate=0) |
600 |
env.SConscript('tools/overlord/SConscript', variant_dir=variant+'tools/overlord', duplicate=0) |
601 |
env.SConscript('paso/SConscript', variant_dir=variant+'paso', duplicate=0) |
602 |
env.SConscript('trilinoswrap/SConscript', variant_dir=variant+'trilinoswrap', duplicate=0) |
603 |
env.SConscript('cusplibrary/SConscript') |
604 |
env.SConscript('dudley/SConscript', variant_dir=variant+'dudley', duplicate=0) |
605 |
env.SConscript('finley/SConscript', variant_dir=variant+'finley', duplicate=0) |
606 |
env.SConscript('ripley/SConscript', variant_dir=variant+'ripley', duplicate=0) |
607 |
env.SConscript('speckley/SConscript', variant_dir=variant+'speckley', duplicate=0) |
608 |
env.SConscript('weipa/SConscript', variant_dir=variant+'weipa', duplicate=0) |
609 |
env.SConscript(dirs = ['downunder/py_src'], variant_dir=variant+'downunder', duplicate=0) |
610 |
env.SConscript(dirs = ['modellib/py_src'], variant_dir=variant+'modellib', duplicate=0) |
611 |
env.SConscript(dirs = ['pycad/py_src'], variant_dir=variant+'pycad', duplicate=0) |
612 |
env.SConscript('tools/escriptconvert/SConscript', variant_dir=variant+'tools/escriptconvert', duplicate=0) |
613 |
env.SConscript('doc/SConscript', variant_dir=variant+'doc', duplicate=0) |
614 |
|
615 |
env.Alias('build', build_all_list) |
616 |
|
617 |
install_all_list += ['install_downunder_py'] |
618 |
install_all_list += ['install_modellib_py'] |
619 |
install_all_list += ['install_pycad_py'] |
620 |
install_all_list += [env.Install(Dir('scripts',env['build_dir']), os.path.join('scripts', 'release_sanity.py'))] |
621 |
|
622 |
if env['osx_dependency_fix']: |
623 |
print("Require dependency fix") |
624 |
install_all=env.Command('install', install_all_list, 'scripts/moveall.sh') |
625 |
else: |
626 |
install_all=env.Alias('install', install_all_list) |
627 |
|
628 |
sanity=env.Alias('sanity', env.Command('dummy','',os.path.join(env['prefix'], 'bin', 'run-escript')+' '+os.path.join(env['build_dir'],'scripts', 'release_sanity.py'))) |
629 |
env.Depends('dummy', install_all) |
630 |
if env['usempi']: |
631 |
env.Depends('dummy', ['install_pythonMPI']) |
632 |
|
633 |
# if all domains are built: |
634 |
if env['domains'] == all_domains: |
635 |
env.AlwaysBuild('sanity') |
636 |
env.Default('sanity') |
637 |
else: |
638 |
env.Default('install') |
639 |
|
640 |
################## Targets to build and run the test suite ################### |
641 |
|
642 |
if not env['cppunit']: |
643 |
test_msg = env.Command('.dummy.', None, '@echo "Cannot run C++ unit tests, CppUnit not found!";exit 1') |
644 |
env.Alias('run_tests', test_msg) |
645 |
env.Alias('build_tests', '') |
646 |
env.Alias('run_tests', ['install']) |
647 |
env.Alias('all_tests', ['install', 'run_tests', 'py_tests']) |
648 |
env.Alias('build_full',['install','build_tests','build_py_tests']) |
649 |
Requires('py_tests', 'install') |
650 |
|
651 |
##################### Targets to build the documentation ##################### |
652 |
|
653 |
env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf']) |
654 |
env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen']) |
655 |
env.Alias('docs', ['basedocs', 'sphinxdoc']) |
656 |
env.Alias('release_prep', ['docs', 'install']) |
657 |
env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install']) |
658 |
|
659 |
# The test scripts are always generated, this target allows us to |
660 |
# generate the testscripts without doing a full build |
661 |
env.Alias('testscripts',[]) |
662 |
|
663 |
if not IS_WINDOWS: |
664 |
generateTestScripts(env, TestGroups) |
665 |
|
666 |
######################## Populate the buildvars file ######################### |
667 |
|
668 |
write_buildvars(env) |
669 |
# delete buildvars upon cleanup - target_init is default so use it |
670 |
env.Clean('target_init', File('buildvars', env['libinstall'])) |
671 |
|
672 |
write_launcher(env) |
673 |
|
674 |
# remove obsolete files |
675 |
if not env['usempi']: |
676 |
Execute(Delete(File(['pythonMPI','pythonMPIredirect'], env['libinstall']))) |
677 |
Execute(Delete(File('escript-overlord', env['bininstall']))) |
678 |
|
679 |
######################## Summarize our environment ########################### |
680 |
def print_summary(): |
681 |
d_list=[] |
682 |
print("") |
683 |
print("*** Config Summary (see config.log and <prefix>/lib/buildvars for details) ***") |
684 |
print("Escript revision %s"%global_revision) |
685 |
print(" Install prefix: %s"%env['prefix']) |
686 |
print(" Python: %s (Version %s)"%(env['pythoncmd'],env['python_version'])) |
687 |
print(" boost: %s (Version %s)"%(env['boost_prefix'],env['boost_version'])) |
688 |
if env['numpy_h']: |
689 |
print(" numpy: YES (with headers)") |
690 |
else: |
691 |
print(" numpy: YES (without headers)") |
692 |
if env['usempi']: |
693 |
if 'orte_version' in env: |
694 |
print(" MPI: %s (Version %s)"%(env['mpi'], env['orte_version'])) |
695 |
else: |
696 |
print(" MPI: YES (flavour: %s)"%env['mpi']) |
697 |
else: |
698 |
d_list.append('mpi') |
699 |
if env['parmetis']: |
700 |
print(" ParMETIS: %s (Version %s)"%(env['parmetis_prefix'],env['parmetis_version'])) |
701 |
else: |
702 |
d_list.append('parmetis') |
703 |
if env['uselapack']: |
704 |
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
705 |
else: |
706 |
d_list.append('lapack') |
707 |
if env['cuda']: |
708 |
print(" CUDA: YES (nvcc: %s)"%env['nvcc_version']) |
709 |
else: |
710 |
d_list.append('cuda') |
711 |
if env['gmshpy']: |
712 |
gmshpy=" + python module" |
713 |
else: |
714 |
gmshpy="" |
715 |
if env['gmsh']=='m': |
716 |
print(" gmsh: YES, MPI-ENABLED"+gmshpy) |
717 |
elif env['gmsh']=='s': |
718 |
print(" gmsh: YES"+gmshpy) |
719 |
else: |
720 |
if env['gmshpy']: |
721 |
print(" gmsh: python module only") |
722 |
else: |
723 |
d_list.append('gmsh') |
724 |
if env['compressed_files']: |
725 |
print(" gzip: YES") |
726 |
else: |
727 |
d_list.append('gzip') |
728 |
|
729 |
solvers = [] |
730 |
direct = [] |
731 |
if env['paso']: |
732 |
solvers.append('paso') |
733 |
if env['mkl']: |
734 |
direct.append('mkl') |
735 |
if env['umfpack']: |
736 |
direct.append('umfpack') |
737 |
else: |
738 |
d_list.append('paso') |
739 |
if env['trilinos']: |
740 |
solvers.append('trilinos') |
741 |
direct.append('trilinos') |
742 |
else: |
743 |
d_list.append('trilinos') |
744 |
|
745 |
print(" Solver library: %s"%(", ".join(solvers))) |
746 |
if len(direct) > 0: |
747 |
print(" Direct solver: YES (%s)"%(", ".join(direct))) |
748 |
else: |
749 |
print(" Direct solver: NONE") |
750 |
print(" domains: %s"%(", ".join(env['domains']))) |
751 |
|
752 |
e_list=[] |
753 |
for i in 'weipa','debug','openmp','boomeramg','cppunit','gdal','mkl',\ |
754 |
'netcdf','papi','pyproj','scipy','silo','sympy','umfpack','visit': |
755 |
if env[i]: e_list.append(i) |
756 |
else: d_list.append(i) |
757 |
|
758 |
d_list += set(all_domains).difference(env['domains']) |
759 |
for i in e_list: |
760 |
print("%16s: YES"%i) |
761 |
print("\n DISABLED features: %s"%(" ".join(sorted(d_list)))) |
762 |
|
763 |
if ((fatalwarning != '') and (env['werror'])): |
764 |
print(" Treating warnings as errors") |
765 |
else: |
766 |
print(" NOT treating warnings as errors") |
767 |
print("") |
768 |
for w in env['warnings']: |
769 |
print("WARNING: %s"%w) |
770 |
if len(GetBuildFailures()): |
771 |
print("\nERROR: build stopped due to errors\n") |
772 |
else: |
773 |
print("\nSUCCESS: build complete\n") |
774 |
|
775 |
atexit.register(print_summary) |
776 |
|