1 |
############################################################################## |
2 |
# |
3 |
# Copyright (c) 2003-2017 by The University of Queensland |
4 |
# http://www.uq.edu.au |
5 |
# |
6 |
# Primary Business: Queensland, Australia |
7 |
# Licensed under the Apache License, version 2.0 |
8 |
# http://www.apache.org/licenses/LICENSE-2.0 |
9 |
# |
10 |
# Development until 2012 by Earth Systems Science Computational Center (ESSCC) |
11 |
# Development 2012-2013 by School of Earth Sciences |
12 |
# Development from 2014 by Centre for Geoscience Computing (GeoComp) |
13 |
# |
14 |
############################################################################## |
15 |
|
16 |
EnsureSConsVersion(0,98,1) |
17 |
EnsurePythonVersion(2,5) |
18 |
|
19 |
import atexit, sys, os, platform, re |
20 |
from distutils import sysconfig |
21 |
from dependencies import * |
22 |
from site_init import * |
23 |
|
24 |
# Version number to check for in options file. Increment when new features are |
25 |
# added or existing options changed. |
26 |
REQUIRED_OPTS_VERSION=203 |
27 |
|
28 |
# MS Windows support, many thanks to PH |
29 |
IS_WINDOWS = (os.name == 'nt') |
30 |
|
31 |
IS_OSX = (os.uname()[0] == 'Darwin') |
32 |
|
33 |
########################## Determine options file ############################ |
34 |
# 1. command line |
35 |
# 2. scons/<hostname>_options.py |
36 |
# 3. name as part of a cluster |
37 |
options_file=ARGUMENTS.get('options_file', None) |
38 |
if not options_file: |
39 |
ext_dir = os.path.join(os.getcwd(), 'scons') |
40 |
hostname = platform.node().split('.')[0] |
41 |
for name in hostname, effectiveName(hostname): |
42 |
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) |
43 |
options_file = os.path.join(ext_dir, mangledhostname+'_options.py') |
44 |
if os.path.isfile(options_file): break |
45 |
|
46 |
if not os.path.isfile(options_file): |
47 |
print("\nWARNING:\nOptions file %s" % options_file) |
48 |
print("not found! Default options will be used which is most likely suboptimal.") |
49 |
print("We recommend that you copy the most relavent options file in the scons/template/") |
50 |
print("subdirectory and customize it to your needs.\n") |
51 |
options_file = None |
52 |
|
53 |
############################### Build options ################################ |
54 |
|
55 |
default_prefix='/usr' |
56 |
mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
57 |
netcdf_flavours = ('no', 'off', 'none', 'False', # Must be last of the false alternatives |
58 |
'yes', 'on', 'True', '3', # Must be last of the version 3 alternatives |
59 |
'4') |
60 |
all_domains = ['dudley','finley','ripley','speckley'] |
61 |
|
62 |
#Note that scons construction vars the the following purposes: |
63 |
# CPPFLAGS -> to the preprocessor |
64 |
# CCFLAGS -> flags for _both_ C and C++ |
65 |
# CXXFLAGS -> flags for c++ _only_ |
66 |
# CFLAGS -> flags for c only |
67 |
|
68 |
vars = Variables(options_file, ARGUMENTS) |
69 |
vars.AddVariables( |
70 |
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), |
71 |
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), |
72 |
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), |
73 |
BoolVariable('verbose', 'Output full compile/link lines', False), |
74 |
# Compiler/Linker options |
75 |
('cxx', 'Path to C++ compiler', 'default'), |
76 |
('cc_flags', 'Base (C and C++) compiler flags', 'default'), |
77 |
('cc_optim', 'Additional (C and C++) flags for a non-debug build', 'default'), |
78 |
('cc_debug', 'Additional (C and C++) flags for a debug build', 'default'), |
79 |
('cxx_extra', 'Extra C++ compiler flags', ''), |
80 |
('ld_extra', 'Extra linker flags', ''), |
81 |
('nvcc', 'Path to CUDA compiler', 'default'), |
82 |
('nvccflags', 'Base CUDA compiler flags', 'default'), |
83 |
BoolVariable('werror','Treat compiler warnings as errors', True), |
84 |
BoolVariable('debug', 'Compile with debug flags', False), |
85 |
BoolVariable('openmp', 'Compile parallel version using OpenMP', False), |
86 |
('omp_flags', 'OpenMP compiler flags', 'default'), |
87 |
('omp_ldflags', 'OpenMP linker flags', 'default'), |
88 |
# Mandatory libraries |
89 |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
90 |
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), |
91 |
# Mandatory for tests |
92 |
('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix), |
93 |
('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']), |
94 |
# Optional libraries and options |
95 |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
96 |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
97 |
('mpi_libs', 'MPI shared libraries to link with', ['mpi']), |
98 |
BoolVariable('cuda', 'Enable GPU code with CUDA (requires thrust)', False), |
99 |
('cuda_prefix', 'Prefix/Paths to NVidia CUDA installation', default_prefix), |
100 |
EnumVariable('netcdf', 'Enable netCDF file support', False, allowed_values=netcdf_flavours), |
101 |
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), |
102 |
('netcdf_libs', 'netCDF libraries to link with', 'DEFAULT'), |
103 |
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), |
104 |
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), |
105 |
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), |
106 |
BoolVariable('mkl', 'Enable the Math Kernel Library', False), |
107 |
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), |
108 |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), |
109 |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
110 |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
111 |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
112 |
BoolVariable('boomeramg', 'Enable BoomerAMG', False), |
113 |
('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix), |
114 |
('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']), |
115 |
TristateVariable('lapack', 'Enable LAPACK', 'auto'), |
116 |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
117 |
('lapack_libs', 'LAPACK libraries to link with', []), |
118 |
BoolVariable('silo', 'Enable the Silo file format in weipa', False), |
119 |
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), |
120 |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
121 |
BoolVariable('trilinos', 'Enable the Trilinos solvers', False), |
122 |
('trilinos_prefix', 'Prefix/Paths to Trilinos installation', default_prefix), |
123 |
('trilinos_libs', 'Trilinos libraries to link with', []), |
124 |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
125 |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
126 |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
127 |
ListVariable('domains', 'Which domains to build', 'all', all_domains), |
128 |
BoolVariable('paso', 'Build Paso solver library', True), |
129 |
BoolVariable('weipa', 'Build Weipa data export library', True), |
130 |
# Advanced settings |
131 |
('launcher', 'Launcher command (e.g. mpirun)', 'default'), |
132 |
('prelaunch', 'Command to execute before launcher (e.g. mpdboot)', 'default'), |
133 |
('postlaunch', 'Command to execute after launcher (e.g. mpdexit)', 'default'), |
134 |
#dudley_assemble_flags = -funroll-loops to actually do something |
135 |
('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''), |
136 |
# To enable passing function pointers through python |
137 |
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), |
138 |
# An option for specifying the compiler tools |
139 |
('tools_names', 'Compiler tools to use', ['default']), |
140 |
('env_export', 'Environment variables to be passed to tools',[]), |
141 |
TristateVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'auto'), |
142 |
TristateVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'auto'), |
143 |
BoolVariable('build_shared', '(deprecated option, ignored)', True), |
144 |
('sys_libs', 'Extra libraries to link with', []), |
145 |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
146 |
('SVN_VERSION', 'Do not use from options file', -2), |
147 |
('pythoncmd', 'which python to compile with', sys.executable), |
148 |
('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''), |
149 |
('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''), |
150 |
('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''), |
151 |
BoolVariable('longindices', 'use long indices (for very large matrices)', False), |
152 |
BoolVariable('compressed_files','Enables reading from compressed binary files', True), |
153 |
('compression_libs', 'Compression libraries to link with', ['boost_iostreams']), |
154 |
BoolVariable('papi', 'Enable PAPI', False), |
155 |
('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), |
156 |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
157 |
BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), |
158 |
BoolVariable('osx_dependency_fix', 'Fix dependencies for libraries to have absolute paths (OSX)', False), |
159 |
BoolVariable('stdlocationisprefix', 'Set the prefix as escript root in the launcher', False), |
160 |
BoolVariable('mpi_no_host', 'Do not specify --host in run-escript launcher (only OPENMPI)', False) |
161 |
) |
162 |
|
163 |
##################### Create environment and help text ####################### |
164 |
|
165 |
# Intel's compiler uses regular expressions improperly and emits a warning |
166 |
# about failing to find the compilers. This warning can be safely ignored. |
167 |
|
168 |
# PATH is needed so the compiler, linker and tools are found if they are not |
169 |
# in default locations. |
170 |
env = Environment(tools = ['default'], options = vars, |
171 |
ENV = {'PATH': os.environ['PATH']}) |
172 |
|
173 |
# set the vars for clang |
174 |
def mkclang(env): |
175 |
env['CXX']='clang++' |
176 |
|
177 |
if env['tools_names'] != ['default']: |
178 |
zz=env['tools_names'] |
179 |
if 'clang' in zz: |
180 |
zz.remove('clang') |
181 |
zz.insert(0, mkclang) |
182 |
env = Environment(tools = ['default'] + env['tools_names'], options = vars, |
183 |
ENV = {'PATH' : os.environ['PATH']}) |
184 |
|
185 |
# Covert env['netcdf'] into one of False, 3, 4 |
186 |
# Also choose default values for libraries |
187 |
pos1=netcdf_flavours.index('False') |
188 |
pos2=netcdf_flavours.index('3') |
189 |
mypos=netcdf_flavours.index(env['netcdf']) |
190 |
if 0 <= mypos <=pos1: |
191 |
env['netcdf']=0 |
192 |
elif pos1 < mypos <= pos2: |
193 |
env['netcdf']=3 |
194 |
if env['netcdf_libs']=='DEFAULT': |
195 |
env['netcdf_libs']=['netcdf_c++', 'netcdf'] |
196 |
else: # netcdf4 |
197 |
env['netcdf']=4 |
198 |
if env['netcdf_libs']=='DEFAULT': |
199 |
env['netcdf_libs']=['netcdf_c++4'] |
200 |
|
201 |
if options_file: |
202 |
opts_valid=False |
203 |
if 'escript_opts_version' in env.Dictionary() and \ |
204 |
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: |
205 |
opts_valid=True |
206 |
if opts_valid: |
207 |
print("Using options in %s." % options_file) |
208 |
else: |
209 |
print("\nOptions file %s" % options_file) |
210 |
print("is outdated! Please update the file after reading scons/templates/README_FIRST") |
211 |
print("and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) |
212 |
Exit(1) |
213 |
|
214 |
# Generate help text (scons -h) |
215 |
Help(vars.GenerateHelpText(env)) |
216 |
|
217 |
# Check for superfluous options |
218 |
if len(vars.UnknownVariables())>0: |
219 |
for k in vars.UnknownVariables(): |
220 |
print("Unknown option '%s'" % k) |
221 |
Exit(1) |
222 |
|
223 |
if env['cuda']: |
224 |
if env['nvcc'] != 'default': |
225 |
env['NVCC'] = env['nvcc'] |
226 |
env.Tool('nvcc') |
227 |
|
228 |
if 'dudley' in env['domains']: |
229 |
env['domains'].append('finley') |
230 |
|
231 |
env['domains'] = sorted(set(env['domains'])) |
232 |
|
233 |
# create dictionary which will be populated with info for buildvars file |
234 |
env['buildvars'] = {} |
235 |
# create list which will be populated with warnings if there are any |
236 |
env['warnings'] = [] |
237 |
|
238 |
#################### Make sure install directories exist ##################### |
239 |
|
240 |
env['BUILD_DIR'] = Dir(env['build_dir']).abspath |
241 |
prefix = Dir(env['prefix']).abspath |
242 |
env['buildvars']['prefix'] = prefix |
243 |
env['incinstall'] = os.path.join(prefix, 'include') |
244 |
env['bininstall'] = os.path.join(prefix, 'bin') |
245 |
env['libinstall'] = os.path.join(prefix, 'lib') |
246 |
env['pyinstall'] = os.path.join(prefix, 'esys') |
247 |
if not os.path.isdir(env['bininstall']): |
248 |
os.makedirs(env['bininstall']) |
249 |
if not os.path.isdir(env['libinstall']): |
250 |
os.makedirs(env['libinstall']) |
251 |
if not os.path.isdir(env['pyinstall']): |
252 |
os.makedirs(env['pyinstall']) |
253 |
|
254 |
env.Append(CPPPATH = [env['incinstall']]) |
255 |
env.Append(LIBPATH = [env['libinstall']]) |
256 |
|
257 |
################# Fill in compiler options if not set above ################## |
258 |
|
259 |
if env['cxx'] != 'default': |
260 |
env['CXX'] = env['cxx'] |
261 |
|
262 |
# default compiler/linker options |
263 |
cc_flags = '-std=c++11' |
264 |
cc_optim = '' |
265 |
cc_debug = '' |
266 |
omp_flags = '' |
267 |
omp_ldflags = '' |
268 |
fatalwarning = '' # switch to turn warnings into errors |
269 |
sysheaderopt = '' # how to indicate that a header is a system header |
270 |
|
271 |
# env['CC'] might be a full path |
272 |
cc_name=os.path.basename(env['CXX']) |
273 |
|
274 |
if cc_name == 'icpc': |
275 |
# Intel compiler |
276 |
# #1478: class "std::auto_ptr<...>" was declared deprecated |
277 |
# #1875: offsetof applied to non-POD types is nonstandard (in boost) |
278 |
# removed -std=c99 because icpc doesn't like it and we aren't using c anymore |
279 |
cc_flags = "-std=c++11 -fPIC -w2 -wd1875 -wd1478 -Wno-unknown-pragmas" |
280 |
cc_optim = "-O3 -ftz -fno-alias -inline-level=2 -ipo -xHost" |
281 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK" |
282 |
omp_flags = "-qopenmp" |
283 |
omp_ldflags = "-qopenmp" # removing -openmp-report (which is deprecated) because the replacement outputs to a file |
284 |
fatalwarning = "-Werror" |
285 |
elif cc_name[:3] == 'g++': |
286 |
# GNU C++ on any system |
287 |
# note that -ffast-math is not used because it breaks isnan(), |
288 |
# see mantis #691 |
289 |
cc_flags = "-std=c++11 -pedantic -Wall -fPIC -Wno-unknown-pragmas -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
290 |
cc_optim = "-O3" |
291 |
#max-vartrack-size: avoid vartrack limit being exceeded with escriptcpp.cpp |
292 |
cc_debug = "-g3 -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK -DSLOWSHARECHECK --param=max-vartrack-size=100000000" |
293 |
#Removed because new netcdf doesn't seem to like it |
294 |
#cc_debug += ' -D_GLIBCXX_DEBUG ' |
295 |
omp_flags = "-fopenmp" |
296 |
omp_ldflags = "-fopenmp" |
297 |
fatalwarning = "-Werror" |
298 |
sysheaderopt = "-isystem" |
299 |
elif cc_name == 'cl': |
300 |
# Microsoft Visual C on Windows |
301 |
cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF" |
302 |
cc_optim = "/O2 /Op /W3" |
303 |
cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK" |
304 |
fatalwarning = "/WX" |
305 |
elif cc_name == 'icl': |
306 |
# Intel C on Windows |
307 |
cc_flags = '/EHsc /GR /MD' |
308 |
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' |
309 |
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' |
310 |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
311 |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
312 |
|
313 |
env['sysheaderopt']=sysheaderopt |
314 |
|
315 |
# set defaults if not otherwise specified |
316 |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
317 |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
318 |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
319 |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
320 |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
321 |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
322 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
323 |
|
324 |
if env['nvccflags'] != 'default': |
325 |
env['NVCCFLAGS'] = env['nvccflags'] |
326 |
env['SHNVCCFLAGS'] = env['nvccflags'] + ' -shared' |
327 |
|
328 |
if env['longindices']: |
329 |
env.Append(CPPDEFINES = ['ESYS_INDEXTYPE_LONG']) |
330 |
|
331 |
# set up the autolazy values |
332 |
if env['forcelazy'] == 1: |
333 |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
334 |
elif env['forcelazy'] == 0: |
335 |
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
336 |
|
337 |
# set up the collective resolve values |
338 |
if env['forcecollres'] == 1: |
339 |
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
340 |
elif env['forcecollres'] == 0: |
341 |
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
342 |
|
343 |
# allow non-standard C if requested |
344 |
if env['iknowwhatimdoing']: |
345 |
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
346 |
|
347 |
# Disable OpenMP if no flags provided |
348 |
if env['openmp'] and env['omp_flags'] == '': |
349 |
env['warnings'].append("OpenMP requested but no flags provided - disabling OpenMP!") |
350 |
env['openmp'] = False |
351 |
|
352 |
if env['openmp']: |
353 |
env.Append(CCFLAGS = env['omp_flags']) |
354 |
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) |
355 |
else: |
356 |
env['omp_flags']='' |
357 |
env['omp_ldflags']='' |
358 |
|
359 |
env['buildvars']['openmp']=int(env['openmp']) |
360 |
|
361 |
# add debug/non-debug compiler flags |
362 |
env['buildvars']['debug']=int(env['debug']) |
363 |
if env['debug']: |
364 |
env.Append(CCFLAGS = env['cc_debug']) |
365 |
else: |
366 |
env.Append(CCFLAGS = env['cc_optim']) |
367 |
|
368 |
# always add cc_flags |
369 |
env.Append(CCFLAGS = env['cc_flags']) |
370 |
|
371 |
# add system libraries |
372 |
env.AppendUnique(LIBS = env['sys_libs']) |
373 |
|
374 |
# determine svn revision |
375 |
global_revision=ARGUMENTS.get('SVN_VERSION', None) |
376 |
if global_revision: |
377 |
global_revision = re.sub(':.*', '', global_revision) |
378 |
global_revision = re.sub('[^0-9]', '', global_revision) |
379 |
if global_revision == '': global_revision='-2' |
380 |
else: |
381 |
# Get the global Subversion revision number for the getVersion() method |
382 |
try: |
383 |
global_revision = os.popen('svnversion -n .').read() |
384 |
global_revision = re.sub(':.*', '', global_revision) |
385 |
global_revision = re.sub('[^0-9]', '', global_revision) |
386 |
if global_revision == '': global_revision='-2' |
387 |
except: |
388 |
global_revision = '-1' |
389 |
env['svn_revision']=global_revision |
390 |
env['buildvars']['svn_revision']=global_revision |
391 |
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
392 |
|
393 |
env['IS_WINDOWS']=IS_WINDOWS |
394 |
env['IS_OSX']=IS_OSX |
395 |
|
396 |
###################### Copy required environment vars ######################## |
397 |
|
398 |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
399 |
if IS_WINDOWS: |
400 |
LD_LIBRARY_PATH_KEY='PATH' |
401 |
env['ENV']['LD_LIBRARY_PATH']='' |
402 |
else: |
403 |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
404 |
|
405 |
env['LD_LIBRARY_PATH_KEY']=LD_LIBRARY_PATH_KEY |
406 |
|
407 |
# the following env variables are exported for the unit tests |
408 |
|
409 |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
410 |
try: |
411 |
env['ENV'][key] = os.environ[key] |
412 |
except KeyError: |
413 |
env['ENV'][key] = '1' |
414 |
|
415 |
env_export=env['env_export'] |
416 |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP','LD_PRELOAD']) |
417 |
|
418 |
for key in set(env_export): |
419 |
try: |
420 |
env['ENV'][key] = os.environ[key] |
421 |
except KeyError: |
422 |
pass |
423 |
|
424 |
for key in os.environ.keys(): |
425 |
if key.startswith("SLURM_"): |
426 |
env['ENV'][key] = os.environ[key] |
427 |
|
428 |
try: |
429 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) |
430 |
except KeyError: |
431 |
pass |
432 |
|
433 |
if IS_OSX: |
434 |
try: |
435 |
env.PrependENVPath('DYLD_LIBRARY_PATH', os.environ['DYLD_LIBRARY_PATH']) |
436 |
except KeyError: |
437 |
pass |
438 |
|
439 |
try: |
440 |
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
441 |
except KeyError: |
442 |
pass |
443 |
|
444 |
######################## Add some custom builders ############################ |
445 |
|
446 |
# Takes care of prefix and suffix for Python modules: |
447 |
def build_python_module(env, target, source): |
448 |
return env.SharedLibrary(target, source, SHLIBPREFIX='', SHLIBSUFFIX='.so') |
449 |
env.AddMethod(build_python_module, "PythonModule") |
450 |
|
451 |
if env['pythoncmd']=='python': |
452 |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
453 |
else: |
454 |
py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True) |
455 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
456 |
|
457 |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
458 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
459 |
|
460 |
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
461 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
462 |
|
463 |
runPyExample_builder = Builder(action = runPyExample, suffix = '.passed', src_suffic='.py', single_source=True) |
464 |
env.Append(BUILDERS = {'RunPyExample' : runPyExample_builder}); |
465 |
|
466 |
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) |
467 |
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
468 |
|
469 |
############################ Dependency checks ############################### |
470 |
|
471 |
######## Compiler |
472 |
env=checkCompiler(env) |
473 |
|
474 |
######## Python headers & library (required) |
475 |
env=checkPython(env) |
476 |
|
477 |
######## boost & boost-python (required) |
478 |
env=checkBoost(env) |
479 |
|
480 |
######## numpy (required) and numpy headers (optional) |
481 |
env=checkNumpy(env) |
482 |
|
483 |
######## CppUnit (required for tests) |
484 |
env=checkCppUnit(env) |
485 |
|
486 |
######## NVCC version (optional) |
487 |
if env['cuda'] and 'ripley' in env['domains']: |
488 |
env=checkCudaVersion(env) |
489 |
env=checkCUDA(env) |
490 |
|
491 |
######## optional python modules (sympy, pyproj) |
492 |
env=checkOptionalModules(env) |
493 |
|
494 |
######## optional dependencies (netCDF, PAPI, MKL, UMFPACK, Lapack, Silo, ...) |
495 |
env=checkOptionalLibraries(env) |
496 |
|
497 |
######## PDFLaTeX (for documentation) |
498 |
env=checkPDFLatex(env) |
499 |
|
500 |
# set defaults for launchers if not otherwise specified |
501 |
if env['prelaunch'] == 'default': |
502 |
if env['mpi'] == 'INTELMPI' and env['openmp']: |
503 |
env['prelaunch'] = "export I_MPI_PIN_DOMAIN=omp" |
504 |
elif env['mpi'] == 'OPENMPI': |
505 |
# transform comma-separated list to '-x a -x b -x c ...' |
506 |
env['prelaunch'] = "EE=$(echo -x %e|sed -e 's/,/ -x /g')" |
507 |
elif env['mpi'] == 'MPT': |
508 |
env['prelaunch'] = "export MPI_NUM_MEMORY_REGIONS=0" |
509 |
elif env['mpi'] == 'MPICH2': |
510 |
env['prelaunch'] = "mpdboot -n %n -r ssh -f %f" |
511 |
else: |
512 |
env['prelaunch'] = "" |
513 |
|
514 |
if env['launcher'] == 'default': |
515 |
if env['mpi'] == 'INTELMPI': |
516 |
env['launcher'] = "mpirun -hostfile %f -n %N -ppn %p %b" |
517 |
elif env['mpi'] == 'OPENMPI': |
518 |
if env['mpi_no_host']: |
519 |
hostoptionstr='' |
520 |
else: |
521 |
hostoptionstr='--host %h' |
522 |
# default to OpenMPI version 1.10 or higher |
523 |
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} "+hostoptionstr+" --map-by node:pe=%t -bind-to core -np %N %b" |
524 |
if 'orte_version' in env: |
525 |
major,minor,point = [int(i) for i in env['orte_version'].split('.')] |
526 |
if major == 1 and minor < 10: |
527 |
env['launcher'] = "mpirun ${AGENTOVERRIDE} --gmca mpi_warn_on_fork 0 ${EE} "+hostoptionstr+" --cpus-per-rank %t -np %N %b" |
528 |
elif env['mpi'] == 'MPT': |
529 |
env['launcher'] = "mpirun %h -np %p %b" |
530 |
elif env['mpi'] == 'MPICH': |
531 |
env['launcher'] = "mpirun -machinefile %f -np %N %b" |
532 |
elif env['mpi'] == 'MPICH2': |
533 |
env['launcher'] = "mpiexec -genvlist %e -np %N %b" |
534 |
else: |
535 |
env['launcher'] = "%b" |
536 |
|
537 |
if env['postlaunch'] == 'default': |
538 |
if env['mpi'] == 'MPICH2': |
539 |
env['postlaunch'] = "mpdallexit" |
540 |
else: |
541 |
env['postlaunch'] = "" |
542 |
|
543 |
# dependency sanity checks |
544 |
|
545 |
if len(env['domains']) == 0: |
546 |
env['warnings'].append("No domains have been built, escript will not be very useful!") |
547 |
|
548 |
# keep some of our install paths first in the list for the unit tests |
549 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
550 |
env.PrependENVPath('PYTHONPATH', prefix) |
551 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
552 |
|
553 |
if not env['verbose']: |
554 |
env['CXXCOMSTR'] = "Compiling $TARGET" |
555 |
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
556 |
env['ARCOMSTR'] = "Linking $TARGET" |
557 |
env['LINKCOMSTR'] = "Linking $TARGET" |
558 |
env['SHLINKCOMSTR'] = "Linking $TARGET" |
559 |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
560 |
env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET" |
561 |
env['MAKEINDEXCOMSTR'] = "Generating index $TARGET" |
562 |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
563 |
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
564 |
|
565 |
########################### Configure the targets ############################ |
566 |
|
567 |
from grouptest import GroupTest |
568 |
TestGroups=[] |
569 |
|
570 |
# keep an environment without warnings-as-errors |
571 |
dodgy_env=env.Clone() |
572 |
|
573 |
# now add warnings-as-errors flags. This needs to be done after configuration |
574 |
# because the scons test files have warnings in them |
575 |
if ((fatalwarning != '') and (env['werror'])): |
576 |
env.Append(CCFLAGS = fatalwarning) |
577 |
|
578 |
Export( |
579 |
['env', |
580 |
'dodgy_env', |
581 |
'IS_WINDOWS', |
582 |
'TestGroups' |
583 |
] |
584 |
) |
585 |
|
586 |
target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET')) |
587 |
env.Alias('target_init', [target_init]) |
588 |
|
589 |
# escript can't be turned off |
590 |
build_all_list = ['build_escript'] |
591 |
install_all_list = ['target_init', 'install_escript'] |
592 |
|
593 |
if env['usempi']: |
594 |
build_all_list += ['build_pythonMPI', 'build_overlord'] |
595 |
install_all_list += ['install_pythonMPI', 'install_overlord'] |
596 |
|
597 |
env['buildvars']['paso'] = int(env['paso']) |
598 |
if env['paso']: |
599 |
env.Append(CPPDEFINES = ['ESYS_HAVE_PASO']) |
600 |
build_all_list += ['build_paso'] |
601 |
install_all_list += ['install_paso'] |
602 |
|
603 |
env['buildvars']['trilinos'] = int(env['trilinos']) |
604 |
if env['trilinos']: |
605 |
build_all_list += ['build_trilinoswrap'] |
606 |
install_all_list += ['install_trilinoswrap'] |
607 |
|
608 |
env['buildvars']['domains'] = ','.join(env['domains']) |
609 |
for domain in env['domains']: |
610 |
env.Append(CPPDEFINES = ['ESYS_HAVE_'+domain.upper()]) |
611 |
build_all_list += ['build_%s'%domain] |
612 |
install_all_list += ['install_%s'%domain] |
613 |
|
614 |
env['buildvars']['weipa'] = int(env['weipa']) |
615 |
if env['weipa']: |
616 |
env.Append(CPPDEFINES = ['ESYS_HAVE_WEIPA']) |
617 |
build_all_list += ['build_weipa'] |
618 |
install_all_list += ['install_weipa'] |
619 |
if 'finley' in env['domains'] or 'dudley' in env['domains']: |
620 |
build_all_list += ['build_escriptreader'] |
621 |
install_all_list += ['install_escriptreader'] |
622 |
|
623 |
variant='$BUILD_DIR/$PLATFORM/' |
624 |
env.SConscript('escriptcore/SConscript', variant_dir=variant+'escriptcore', duplicate=0) |
625 |
env.SConscript('escript/py_src/SConscript', variant_dir=variant+'escript', duplicate=0) |
626 |
env.SConscript('pythonMPI/src/SConscript', variant_dir=variant+'pythonMPI', duplicate=0) |
627 |
env.SConscript('tools/overlord/SConscript', variant_dir=variant+'tools/overlord', duplicate=0) |
628 |
env.SConscript('paso/SConscript', variant_dir=variant+'paso', duplicate=0) |
629 |
env.SConscript('trilinoswrap/SConscript', variant_dir=variant+'trilinoswrap', duplicate=0) |
630 |
env.SConscript('cusplibrary/SConscript') |
631 |
env.SConscript('dudley/SConscript', variant_dir=variant+'dudley', duplicate=0) |
632 |
env.SConscript('finley/SConscript', variant_dir=variant+'finley', duplicate=0) |
633 |
env.SConscript('ripley/SConscript', variant_dir=variant+'ripley', duplicate=0) |
634 |
env.SConscript('speckley/SConscript', variant_dir=variant+'speckley', duplicate=0) |
635 |
env.SConscript('weipa/SConscript', variant_dir=variant+'weipa', duplicate=0) |
636 |
env.SConscript(dirs = ['downunder/py_src'], variant_dir=variant+'downunder', duplicate=0) |
637 |
env.SConscript(dirs = ['modellib/py_src'], variant_dir=variant+'modellib', duplicate=0) |
638 |
env.SConscript(dirs = ['pycad/py_src'], variant_dir=variant+'pycad', duplicate=0) |
639 |
env.SConscript('tools/escriptconvert/SConscript', variant_dir=variant+'tools/escriptconvert', duplicate=0) |
640 |
env.SConscript('doc/SConscript', variant_dir=variant+'doc', duplicate=0) |
641 |
|
642 |
env.Alias('build', build_all_list) |
643 |
|
644 |
install_all_list += ['install_downunder_py'] |
645 |
install_all_list += ['install_modellib_py'] |
646 |
install_all_list += ['install_pycad_py'] |
647 |
install_all_list += [env.Install(Dir('scripts',env['build_dir']), os.path.join('scripts', 'release_sanity.py'))] |
648 |
|
649 |
if env['osx_dependency_fix']: |
650 |
print("Require dependency fix") |
651 |
install_all=env.Command('install', install_all_list, 'scripts/moveall.sh') |
652 |
else: |
653 |
install_all=env.Alias('install', install_all_list) |
654 |
|
655 |
sanity=env.Alias('sanity', env.Command('dummy','',os.path.join(env['prefix'], 'bin', 'run-escript')+' '+os.path.join(env['build_dir'],'scripts', 'release_sanity.py'))) |
656 |
env.Depends('dummy', install_all) |
657 |
if env['usempi']: |
658 |
env.Depends('dummy', ['install_pythonMPI']) |
659 |
|
660 |
# if all domains are built: |
661 |
if env['domains'] == all_domains: |
662 |
env.AlwaysBuild('sanity') |
663 |
env.Default('sanity') |
664 |
else: |
665 |
env.Default('install') |
666 |
|
667 |
################## Targets to build and run the test suite ################### |
668 |
|
669 |
if not env['cppunit']: |
670 |
test_msg = env.Command('.dummy.', None, '@echo "Cannot run C++ unit tests, CppUnit not found!";exit 1') |
671 |
env.Alias('run_tests', test_msg) |
672 |
env.Alias('build_tests', '') |
673 |
env.Alias('run_tests', ['install']) |
674 |
env.Alias('all_tests', ['install', 'run_tests', 'py_tests']) |
675 |
env.Alias('build_full',['install','build_tests','build_py_tests']) |
676 |
Requires('py_tests', 'install') |
677 |
|
678 |
##################### Targets to build the documentation ##################### |
679 |
|
680 |
env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf']) |
681 |
env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen']) |
682 |
env.Alias('docs', ['basedocs', 'sphinxdoc']) |
683 |
env.Alias('release_prep', ['docs', 'install']) |
684 |
env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install']) |
685 |
|
686 |
# The test scripts are always generated, this target allows us to |
687 |
# generate the testscripts without doing a full build |
688 |
env.Alias('testscripts',[]) |
689 |
|
690 |
if not IS_WINDOWS: |
691 |
generateTestScripts(env, TestGroups) |
692 |
|
693 |
######################## Populate the buildvars file ######################### |
694 |
|
695 |
write_buildvars(env) |
696 |
# delete buildvars upon cleanup - target_init is default so use it |
697 |
env.Clean('target_init', File('buildvars', env['libinstall'])) |
698 |
|
699 |
write_launcher(env) |
700 |
|
701 |
# remove obsolete files |
702 |
if not env['usempi']: |
703 |
Execute(Delete(File(['pythonMPI','pythonMPIredirect'], env['libinstall']))) |
704 |
Execute(Delete(File('escript-overlord', env['bininstall']))) |
705 |
|
706 |
######################## Summarize our environment ########################### |
707 |
def print_summary(): |
708 |
d_list=[] |
709 |
print("") |
710 |
print("*** Config Summary (see config.log and <prefix>/lib/buildvars for details) ***") |
711 |
print("Escript revision %s"%global_revision) |
712 |
print(" Install prefix: %s"%env['prefix']) |
713 |
print(" Python: %s (Version %s)"%(env['pythoncmd'],env['python_version'])) |
714 |
print(" boost: %s (Version %s)"%(env['boost_prefix'],env['boost_version'])) |
715 |
if env['numpy_h']: |
716 |
print(" numpy: YES (with headers)") |
717 |
else: |
718 |
print(" numpy: YES (without headers)") |
719 |
if env['usempi']: |
720 |
if 'orte_version' in env: |
721 |
print(" MPI: %s (Version %s)"%(env['mpi'], env['orte_version'])) |
722 |
else: |
723 |
print(" MPI: YES (flavour: %s)"%env['mpi']) |
724 |
else: |
725 |
d_list.append('mpi') |
726 |
if env['parmetis']: |
727 |
print(" ParMETIS: %s (Version %s)"%(env['parmetis_prefix'],env['parmetis_version'])) |
728 |
else: |
729 |
d_list.append('parmetis') |
730 |
if env['uselapack']: |
731 |
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
732 |
else: |
733 |
d_list.append('lapack') |
734 |
if env['cuda']: |
735 |
print(" CUDA: YES (nvcc: %s)"%env['nvcc_version']) |
736 |
else: |
737 |
d_list.append('cuda') |
738 |
if env['gmshpy']: |
739 |
gmshpy=" + python module" |
740 |
else: |
741 |
gmshpy="" |
742 |
if env['gmsh']=='m': |
743 |
print(" gmsh: YES, MPI-ENABLED"+gmshpy) |
744 |
elif env['gmsh']=='s': |
745 |
print(" gmsh: YES"+gmshpy) |
746 |
else: |
747 |
if env['gmshpy']: |
748 |
print(" gmsh: python module only") |
749 |
else: |
750 |
d_list.append('gmsh') |
751 |
if env['compressed_files']: |
752 |
print(" gzip: YES") |
753 |
else: |
754 |
d_list.append('gzip') |
755 |
|
756 |
solvers = [] |
757 |
direct = [] |
758 |
if env['paso']: |
759 |
solvers.append('paso') |
760 |
if env['mkl']: |
761 |
direct.append('mkl') |
762 |
if env['umfpack']: |
763 |
direct.append('umfpack') |
764 |
else: |
765 |
d_list.append('paso') |
766 |
if env['trilinos']: |
767 |
solvers.append('trilinos') |
768 |
direct.append('trilinos') |
769 |
else: |
770 |
d_list.append('trilinos') |
771 |
|
772 |
print(" Solver library: %s"%(", ".join(solvers))) |
773 |
if len(direct) > 0: |
774 |
print(" Direct solver: YES (%s)"%(", ".join(direct))) |
775 |
else: |
776 |
print(" Direct solver: NONE") |
777 |
print(" domains: %s"%(", ".join(env['domains']))) |
778 |
if env['netcdf']==4: |
779 |
print(" netcdf: YES (4 + 3)") |
780 |
elif env['netcdf']==3: |
781 |
print(" netcdf: YES (3)") |
782 |
else: |
783 |
print(" netcdf: NO") |
784 |
e_list=[] |
785 |
for i in ('weipa','debug','openmp','boomeramg','cppunit','gdal','mkl', |
786 |
'papi','pyproj','scipy','silo','sympy','umfpack','visit'): |
787 |
if env[i]: e_list.append(i) |
788 |
else: d_list.append(i) |
789 |
|
790 |
d_list += set(all_domains).difference(env['domains']) |
791 |
for i in e_list: |
792 |
print("%16s: YES"%i) |
793 |
print("\n DISABLED features: %s"%(" ".join(sorted(d_list)))) |
794 |
|
795 |
if ((fatalwarning != '') and (env['werror'])): |
796 |
print(" Treating warnings as errors") |
797 |
else: |
798 |
print(" NOT treating warnings as errors") |
799 |
print("") |
800 |
for w in env['warnings']: |
801 |
print("WARNING: %s"%w) |
802 |
if len(GetBuildFailures()): |
803 |
print("\nERROR: build stopped due to errors\n") |
804 |
else: |
805 |
print("\nSUCCESS: build complete\n") |
806 |
|
807 |
atexit.register(print_summary) |
808 |
|