1 |
######################################################## |
############################################################################## |
2 |
# |
# |
3 |
# Copyright (c) 2003-2010 by University of Queensland |
# Copyright (c) 2003-2014 by University of Queensland |
4 |
# Earth Systems Science Computational Center (ESSCC) |
# http://www.uq.edu.au |
|
# http://www.uq.edu.au/esscc |
|
5 |
# |
# |
6 |
# Primary Business: Queensland, Australia |
# Primary Business: Queensland, Australia |
7 |
# Licensed under the Open Software License version 3.0 |
# Licensed under the Open Software License version 3.0 |
8 |
# http://www.opensource.org/licenses/osl-3.0.php |
# http://www.opensource.org/licenses/osl-3.0.php |
9 |
# |
# |
10 |
######################################################## |
# Development until 2012 by Earth Systems Science Computational Center (ESSCC) |
11 |
|
# Development 2012-2013 by School of Earth Sciences |
12 |
|
# Development from 2014 by Centre for Geoscience Computing (GeoComp) |
13 |
|
# |
14 |
|
############################################################################## |
15 |
|
|
16 |
EnsureSConsVersion(0,98,1) |
EnsureSConsVersion(0,98,1) |
17 |
EnsurePythonVersion(2,5) |
EnsurePythonVersion(2,5) |
18 |
|
|
19 |
import sys, os, platform, re |
import atexit, sys, os, platform, re |
20 |
from distutils import sysconfig |
from distutils import sysconfig |
21 |
|
from dependencies import * |
22 |
from site_init import * |
from site_init import * |
23 |
|
|
24 |
# Version number to check for in options file. Increment when new features are |
# Version number to check for in options file. Increment when new features are |
25 |
# added or existing options changed. |
# added or existing options changed. |
26 |
REQUIRED_OPTS_VERSION=200 |
REQUIRED_OPTS_VERSION=201 |
27 |
|
|
28 |
# MS Windows support, many thanks to PH |
# MS Windows support, many thanks to PH |
29 |
IS_WINDOWS = (os.name == 'nt') |
IS_WINDOWS = (os.name == 'nt') |
30 |
|
|
31 |
|
IS_OSX = (os.uname()[0] == 'Darwin') |
32 |
|
|
33 |
########################## Determine options file ############################ |
########################## Determine options file ############################ |
34 |
# 1. command line |
# 1. command line |
35 |
# 2. scons/<hostname>_options.py |
# 2. scons/<hostname>_options.py |
46 |
if not os.path.isfile(options_file): |
if not os.path.isfile(options_file): |
47 |
print("\nWARNING:\nOptions file %s" % options_file) |
print("\nWARNING:\nOptions file %s" % options_file) |
48 |
print("not found! Default options will be used which is most likely suboptimal.") |
print("not found! Default options will be used which is most likely suboptimal.") |
49 |
print("It is recommended that you copy one of the TEMPLATE files in the scons/") |
print("We recommend that you copy one of the TEMPLATE files in the scons/") |
50 |
print("subdirectory and customize it to your needs.\n") |
print("subdirectory and customize it to your needs.\n") |
51 |
options_file = None |
options_file = None |
52 |
|
|
53 |
############################### Build options ################################ |
############################### Build options ################################ |
54 |
|
|
55 |
default_prefix='/usr' |
default_prefix='/usr' |
56 |
mpi_flavours=('none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
57 |
lapack_flavours=('none', 'clapack', 'mkl') |
lapack_flavours=('none', 'clapack', 'mkl') |
58 |
|
|
59 |
vars = Variables(options_file, ARGUMENTS) |
vars = Variables(options_file, ARGUMENTS) |
63 |
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), |
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), |
64 |
BoolVariable('verbose', 'Output full compile/link lines', False), |
BoolVariable('verbose', 'Output full compile/link lines', False), |
65 |
# Compiler/Linker options |
# Compiler/Linker options |
|
('cc', 'Path to C compiler', 'default'), |
|
66 |
('cxx', 'Path to C++ compiler', 'default'), |
('cxx', 'Path to C++ compiler', 'default'), |
67 |
('cc_flags', 'Base C/C++ compiler flags', 'default'), |
('cc_flags', 'Base C++ compiler flags', 'default'), |
68 |
('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'), |
('cc_optim', 'Additional C++ flags for a non-debug build', 'default'), |
69 |
('cc_debug', 'Additional C/C++ flags for a debug build', 'default'), |
('cc_debug', 'Additional C++ flags for a debug build', 'default'), |
|
('cc_extra', 'Extra C compiler flags', ''), |
|
70 |
('cxx_extra', 'Extra C++ compiler flags', ''), |
('cxx_extra', 'Extra C++ compiler flags', ''), |
71 |
('ld_extra', 'Extra linker flags', ''), |
('ld_extra', 'Extra linker flags', ''), |
72 |
BoolVariable('werror','Treat compiler warnings as errors', True), |
BoolVariable('werror','Treat compiler warnings as errors', True), |
77 |
# Mandatory libraries |
# Mandatory libraries |
78 |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
79 |
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), |
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), |
80 |
|
# Mandatory for tests |
81 |
|
('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix), |
82 |
|
('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']), |
83 |
# Optional libraries and options |
# Optional libraries and options |
84 |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
85 |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
100 |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
101 |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
102 |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
103 |
|
BoolVariable('boomeramg', 'Enable BoomerAMG', False), |
104 |
|
('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix), |
105 |
|
('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']), |
106 |
EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), |
EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), |
107 |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
108 |
('lapack_libs', 'LAPACK libraries to link with', []), |
('lapack_libs', 'LAPACK libraries to link with', []), |
112 |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
113 |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
114 |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
|
BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False), |
|
115 |
BoolVariable('vsl_random', 'Use VSL from intel for random data', False), |
BoolVariable('vsl_random', 'Use VSL from intel for random data', False), |
116 |
# Advanced settings |
# Advanced settings |
117 |
#dudley_assemble_flags = -funroll-loops to actually do something |
#dudley_assemble_flags = -funroll-loops to actually do something |
125 |
EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
126 |
# finer control over library building, intel aggressive global optimisation |
# finer control over library building, intel aggressive global optimisation |
127 |
# works with dynamic libraries on windows. |
# works with dynamic libraries on windows. |
128 |
('share_esysutils', 'Build a dynamic esysUtils library', False), |
('build_shared', 'Build dynamic libraries only', False), |
|
('share_paso', 'Build a dynamic paso library', False), |
|
129 |
('sys_libs', 'Extra libraries to link with', []), |
('sys_libs', 'Extra libraries to link with', []), |
130 |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
131 |
|
('SVN_VERSION', 'Do not use from options file', -2), |
132 |
|
('pythoncmd', 'which python to compile with','python'), |
133 |
|
('usepython3', 'Is this a python3 build? (experimental)', False), |
134 |
|
('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''), |
135 |
|
('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''), |
136 |
|
('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''), |
137 |
|
BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True), |
138 |
) |
) |
139 |
|
|
140 |
##################### Create environment and help text ####################### |
##################### Create environment and help text ####################### |
146 |
# in default locations. |
# in default locations. |
147 |
env = Environment(tools = ['default'], options = vars, |
env = Environment(tools = ['default'], options = vars, |
148 |
ENV = {'PATH': os.environ['PATH']}) |
ENV = {'PATH': os.environ['PATH']}) |
149 |
|
|
150 |
|
# set the vars for clang |
151 |
|
def mkclang(env): |
152 |
|
env['CXX']='clang++' |
153 |
|
|
154 |
if env['tools_names'] != 'default': |
if env['tools_names'] != 'default': |
155 |
|
zz=env['tools_names'] |
156 |
|
if 'clang' in zz: |
157 |
|
zz.remove('clang') |
158 |
|
zz.insert(0, mkclang) |
159 |
env = Environment(tools = ['default'] + env['tools_names'], options = vars, |
env = Environment(tools = ['default'] + env['tools_names'], options = vars, |
160 |
ENV = {'PATH' : os.environ['PATH']}) |
ENV = {'PATH' : os.environ['PATH']}) |
161 |
|
|
181 |
print("Unknown option '%s'" % k) |
print("Unknown option '%s'" % k) |
182 |
Exit(1) |
Exit(1) |
183 |
|
|
184 |
|
# create dictionary which will be populated with info for buildvars file |
185 |
|
env['buildvars']={} |
186 |
|
# create list which will be populated with warnings if there are any |
187 |
|
env['warnings']=[] |
188 |
|
|
189 |
#################### Make sure install directories exist ##################### |
#################### Make sure install directories exist ##################### |
190 |
|
|
191 |
env['BUILD_DIR']=env['build_dir'] |
env['BUILD_DIR']=Dir(env['build_dir']).abspath |
192 |
prefix=Dir(env['prefix']).abspath |
prefix=Dir(env['prefix']).abspath |
193 |
|
env['buildvars']['prefix']=prefix |
194 |
env['incinstall'] = os.path.join(prefix, 'include') |
env['incinstall'] = os.path.join(prefix, 'include') |
195 |
env['bininstall'] = os.path.join(prefix, 'bin') |
env['bininstall'] = os.path.join(prefix, 'bin') |
196 |
env['libinstall'] = os.path.join(prefix, 'lib') |
env['libinstall'] = os.path.join(prefix, 'lib') |
207 |
|
|
208 |
################# Fill in compiler options if not set above ################## |
################# Fill in compiler options if not set above ################## |
209 |
|
|
|
if env['cc'] != 'default': env['CC']=env['cc'] |
|
210 |
if env['cxx'] != 'default': env['CXX']=env['cxx'] |
if env['cxx'] != 'default': env['CXX']=env['cxx'] |
211 |
|
|
212 |
# version >=9 of intel C++ compiler requires use of icpc to link in C++ |
# version >=9 of intel C++ compiler requires use of icpc to link in C++ |
224 |
sysheaderopt = '' # how to indicate that a header is a system header |
sysheaderopt = '' # how to indicate that a header is a system header |
225 |
|
|
226 |
# env['CC'] might be a full path |
# env['CC'] might be a full path |
227 |
cc_name=os.path.basename(env['CC']) |
cc_name=os.path.basename(env['CXX']) |
228 |
|
|
229 |
if cc_name == 'icc': |
if cc_name == 'icpc': |
230 |
# Intel compiler |
# Intel compiler |
231 |
cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
# #1875: offsetof applied to non-POD types is nonstandard (in boost) |
232 |
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" |
# removed -std=c99 because icpc doesn't like it and we aren't using c anymore |
233 |
|
cc_flags = "-fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1" |
234 |
|
cc_optim = "-O3 -ftz -fno-alias -ipo -xHost" |
235 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
236 |
omp_flags = "-openmp -openmp_report0" |
omp_flags = "-openmp" |
237 |
omp_ldflags = "-openmp -openmp_report0 -lguide -lpthread" |
omp_ldflags = "-openmp -openmp_report=1" |
238 |
fatalwarning = "-Werror" |
fatalwarning = "-Werror" |
239 |
elif cc_name[:3] == 'gcc': |
elif cc_name[:3] == 'g++': |
240 |
# GNU C on any system |
# GNU C on any system |
241 |
cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
# note that -ffast-math is not used because it breaks isnan(), |
242 |
|
# see mantis #691 |
243 |
|
cc_flags = "-pedantic -Wall -fPIC -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
244 |
cc_optim = "-O3" |
cc_optim = "-O3" |
245 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
246 |
omp_flags = "-fopenmp" |
omp_flags = "-fopenmp" |
261 |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
262 |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
263 |
|
|
264 |
|
env['sysheaderopt']=sysheaderopt |
265 |
|
|
266 |
# set defaults if not otherwise specified |
# set defaults if not otherwise specified |
267 |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
268 |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
269 |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
270 |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
271 |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
|
if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) |
|
272 |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
273 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
274 |
|
|
275 |
|
if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS') |
276 |
|
|
277 |
|
if env['usepython3']: |
278 |
|
env.Append(CPPDEFINES=['ESPYTHON3']) |
279 |
|
|
280 |
# set up the autolazy values |
# set up the autolazy values |
281 |
if env['forcelazy'] == 'on': |
if env['forcelazy'] == 'on': |
282 |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
295 |
|
|
296 |
# Disable OpenMP if no flags provided |
# Disable OpenMP if no flags provided |
297 |
if env['openmp'] and env['omp_flags'] == '': |
if env['openmp'] and env['omp_flags'] == '': |
298 |
print("OpenMP requested but no flags provided - disabling OpenMP!") |
env['warnings'].append("OpenMP requested but no flags provided - disabling OpenMP!") |
299 |
env['openmp'] = False |
env['openmp'] = False |
300 |
|
|
301 |
if env['openmp']: |
if env['openmp']: |
305 |
env['omp_flags']='' |
env['omp_flags']='' |
306 |
env['omp_ldflags']='' |
env['omp_ldflags']='' |
307 |
|
|
308 |
|
env['buildvars']['openmp']=int(env['openmp']) |
309 |
|
|
310 |
# add debug/non-debug compiler flags |
# add debug/non-debug compiler flags |
311 |
|
env['buildvars']['debug']=int(env['debug']) |
312 |
if env['debug']: |
if env['debug']: |
313 |
env.Append(CCFLAGS = env['cc_debug']) |
env.Append(CCFLAGS = env['cc_debug']) |
314 |
else: |
else: |
320 |
# add system libraries |
# add system libraries |
321 |
env.AppendUnique(LIBS = env['sys_libs']) |
env.AppendUnique(LIBS = env['sys_libs']) |
322 |
|
|
323 |
# Get the global Subversion revision number for the getVersion() method |
# determine svn revision |
324 |
try: |
global_revision=ARGUMENTS.get('SVN_VERSION', None) |
325 |
|
if global_revision: |
326 |
|
global_revision = re.sub(':.*', '', global_revision) |
327 |
|
global_revision = re.sub('[^0-9]', '', global_revision) |
328 |
|
if global_revision == '': global_revision='-2' |
329 |
|
else: |
330 |
|
# Get the global Subversion revision number for the getVersion() method |
331 |
|
try: |
332 |
global_revision = os.popen('svnversion -n .').read() |
global_revision = os.popen('svnversion -n .').read() |
333 |
global_revision = re.sub(':.*', '', global_revision) |
global_revision = re.sub(':.*', '', global_revision) |
334 |
global_revision = re.sub('[^0-9]', '', global_revision) |
global_revision = re.sub('[^0-9]', '', global_revision) |
335 |
if global_revision == '': global_revision='-2' |
if global_revision == '': global_revision='-2' |
336 |
except: |
except: |
337 |
global_revision = '-1' |
global_revision = '-1' |
338 |
env['svn_revision']=global_revision |
env['svn_revision']=global_revision |
339 |
|
env['buildvars']['svn_revision']=global_revision |
340 |
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
341 |
|
|
342 |
if IS_WINDOWS: |
if IS_WINDOWS: |
343 |
if not env['share_esysutils']: |
if not env['build_shared']: |
344 |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
|
if not env['share_paso']: |
|
345 |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
346 |
|
|
347 |
|
# VSL random numbers |
348 |
|
env['buildvars']['vsl_random']=int(env['vsl_random']) |
349 |
|
if env['vsl_random']: |
350 |
|
env.Append(CPPDEFINES = ['MKLRANDOM']) |
351 |
|
|
352 |
|
env['IS_WINDOWS']=IS_WINDOWS |
353 |
|
|
354 |
###################### Copy required environment vars ######################## |
###################### Copy required environment vars ######################## |
355 |
|
|
356 |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
360 |
else: |
else: |
361 |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
362 |
|
|
363 |
|
env['LD_LIBRARY_PATH_KEY']=LD_LIBRARY_PATH_KEY |
364 |
|
|
365 |
# the following env variables are exported for the unit tests |
# the following env variables are exported for the unit tests |
366 |
|
|
367 |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
371 |
env['ENV'][key] = 1 |
env['ENV'][key] = 1 |
372 |
|
|
373 |
env_export=env['env_export'] |
env_export=env['env_export'] |
374 |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','TMPDIR','TEMP','TMP']) |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP']) |
375 |
|
|
376 |
for key in set(env_export): |
for key in set(env_export): |
377 |
try: |
try: |
384 |
except KeyError: |
except KeyError: |
385 |
pass |
pass |
386 |
|
|
387 |
|
if IS_OSX: |
388 |
|
try: |
389 |
|
env.PrependENVPath('DYLD_LIBRARY_PATH', os.environ['DYLD_LIBRARY_PATH']) |
390 |
|
except KeyError: |
391 |
|
pass |
392 |
|
|
393 |
|
|
394 |
# these shouldn't be needed |
# these shouldn't be needed |
395 |
#for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': |
#for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': |
396 |
# try: |
# try: |
405 |
|
|
406 |
######################## Add some custom builders ############################ |
######################## Add some custom builders ############################ |
407 |
|
|
408 |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
if env['pythoncmd']=='python': |
409 |
|
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
410 |
|
else: |
411 |
|
py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True) |
412 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
413 |
|
|
414 |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
422 |
|
|
423 |
############################ Dependency checks ############################### |
############################ Dependency checks ############################### |
424 |
|
|
425 |
# Create a Configure() environment to check for compilers and python |
######## Compiler |
426 |
conf = Configure(env.Clone()) |
env=checkCompiler(env) |
|
|
|
|
######## Test that the compilers work |
|
|
|
|
|
if 'CheckCC' in dir(conf): # exists since scons 1.1.0 |
|
|
if not conf.CheckCC(): |
|
|
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
|
|
Exit(1) |
|
|
if not conf.CheckCXX(): |
|
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
|
|
Exit(1) |
|
|
else: |
|
|
if not conf.CheckFunc('printf', language='c'): |
|
|
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
|
|
Exit(1) |
|
|
if not conf.CheckFunc('printf', language='c++'): |
|
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
|
|
Exit(1) |
|
|
|
|
|
if conf.CheckFunc('gethostname'): |
|
|
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
|
427 |
|
|
428 |
######## Python headers & library (required) |
######## Python headers & library (required) |
429 |
|
env=checkPython(env) |
430 |
|
|
431 |
python_inc_path=sysconfig.get_python_inc() |
######## boost & boost-python (required) |
432 |
if IS_WINDOWS: |
env=checkBoost(env) |
|
python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs') |
|
|
elif env['PLATFORM']=='darwin': |
|
|
python_lib_path=sysconfig.get_config_var('LIBPL') |
|
|
else: |
|
|
python_lib_path=sysconfig.get_config_var('LIBDIR') |
|
|
#python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux |
|
|
if IS_WINDOWS: |
|
|
python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])] |
|
|
else: |
|
|
python_libs=['python'+sysconfig.get_python_version()] |
|
433 |
|
|
434 |
if sysheaderopt == '': |
######## numpy (required) and numpy headers (optional) |
435 |
conf.env.AppendUnique(CPPPATH = [python_inc_path]) |
env=checkNumpy(env) |
|
else: |
|
|
conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path]) |
|
436 |
|
|
437 |
conf.env.AppendUnique(LIBPATH = [python_lib_path]) |
######## CppUnit (required for tests) |
438 |
conf.env.AppendUnique(LIBS = python_libs) |
env=checkCppUnit(env) |
|
# The wrapper script needs to find the libs |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path) |
|
439 |
|
|
440 |
if not conf.CheckCHeader('Python.h'): |
######## optional python modules (sympy, pyproj) |
441 |
print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path)) |
env=checkOptionalModules(env) |
|
Exit(1) |
|
|
if not conf.CheckFunc('Py_Exit'): |
|
|
print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path)) |
|
|
Exit(1) |
|
442 |
|
|
443 |
# Commit changes to environment |
######## optional dependencies (netCDF, PAPI, MKL, UMFPACK, Lapack, Silo, ...) |
444 |
env = conf.Finish() |
env=checkOptionalLibraries(env) |
445 |
|
|
446 |
######## boost (required) |
######## PDFLaTeX (for documentation) |
447 |
|
env=checkPDFLatex(env) |
|
boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++') |
|
|
if sysheaderopt == '': |
|
|
env.AppendUnique(CPPPATH = [boost_inc_path]) |
|
|
else: |
|
|
# This is required because we can't -isystem /usr/include since it breaks |
|
|
# std includes |
|
|
if os.path.normpath(boost_inc_path) == '/usr/include': |
|
|
conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')]) |
|
|
else: |
|
|
env.Append(CCFLAGS=[sysheaderopt, boost_inc_path]) |
|
|
|
|
|
env.AppendUnique(LIBPATH = [boost_lib_path]) |
|
|
env.AppendUnique(LIBS = env['boost_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path) |
|
|
|
|
|
######## numpy (required) |
|
|
|
|
|
try: |
|
|
from numpy import identity |
|
|
except ImportError: |
|
|
print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) |
|
|
Exit(1) |
|
|
|
|
|
######## VTK (optional) |
|
|
|
|
|
if env['pyvisi']: |
|
|
try: |
|
|
import vtk |
|
|
env['pyvisi'] = True |
|
|
except ImportError: |
|
|
print("Cannot import vtk, disabling pyvisi.") |
|
|
env['pyvisi'] = False |
|
|
|
|
|
######## netCDF (optional) |
|
|
|
|
|
netcdf_inc_path='' |
|
|
netcdf_lib_path='' |
|
|
if env['netcdf']: |
|
|
netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++') |
|
|
env.AppendUnique(CPPPATH = [netcdf_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [netcdf_lib_path]) |
|
|
env.AppendUnique(LIBS = env['netcdf_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path) |
|
|
env.Append(CPPDEFINES = ['USE_NETCDF']) |
|
|
|
|
|
######## PAPI (optional) |
|
|
|
|
|
papi_inc_path='' |
|
|
papi_lib_path='' |
|
|
if env['papi']: |
|
|
papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [papi_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [papi_lib_path]) |
|
|
env.AppendUnique(LIBS = env['papi_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path) |
|
|
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
|
|
|
|
|
######## MKL (optional) |
|
|
|
|
|
mkl_inc_path='' |
|
|
mkl_lib_path='' |
|
|
if env['mkl']: |
|
|
mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [mkl_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [mkl_lib_path]) |
|
|
env.AppendUnique(LIBS = env['mkl_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path) |
|
|
env.Append(CPPDEFINES = ['MKL']) |
|
|
|
|
|
######## UMFPACK (optional) |
|
|
|
|
|
umfpack_inc_path='' |
|
|
umfpack_lib_path='' |
|
|
if env['umfpack']: |
|
|
umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [umfpack_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [umfpack_lib_path]) |
|
|
env.AppendUnique(LIBS = env['umfpack_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path) |
|
|
env.Append(CPPDEFINES = ['UMFPACK']) |
|
|
|
|
|
######## LAPACK (optional) |
|
|
|
|
|
if env['lapack']=='mkl' and not env['mkl']: |
|
|
print("mkl_lapack requires MKL!") |
|
|
Exit(1) |
|
|
|
|
|
env['uselapack'] = env['lapack']!='none' |
|
|
lapack_inc_path='' |
|
|
lapack_lib_path='' |
|
|
if env['uselapack']: |
|
|
header='clapack.h' |
|
|
if env['lapack']=='mkl': |
|
|
env.AppendUnique(CPPDEFINES = ['MKL_LAPACK']) |
|
|
header='mkl_lapack.h' |
|
|
lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [lapack_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [lapack_lib_path]) |
|
|
env.AppendUnique(LIBS = env['lapack_libs']) |
|
|
env.Append(CPPDEFINES = ['USE_LAPACK']) |
|
|
|
|
|
######## Silo (optional) |
|
|
|
|
|
silo_inc_path='' |
|
|
silo_lib_path='' |
|
|
if env['silo']: |
|
|
silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [silo_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [silo_lib_path]) |
|
|
# Note that we do not add the libs since they are only needed for the |
|
|
# weipa library and tools. |
|
|
#env.AppendUnique(LIBS = [env['silo_libs']]) |
|
|
|
|
|
######## VSL random numbers (optional) |
|
|
if env['vsl_random']: |
|
|
env.Append(CPPDEFINES = ['MKLRANDOM']) |
|
|
|
|
|
######## VisIt (optional) |
|
|
|
|
|
visit_inc_path='' |
|
|
visit_lib_path='' |
|
|
if env['visit']: |
|
|
visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [visit_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [visit_lib_path]) |
|
|
|
|
|
######## MPI (optional) |
|
|
|
|
|
env['usempi'] = env['mpi']!='none' |
|
|
mpi_inc_path='' |
|
|
mpi_lib_path='' |
|
|
if env['usempi']: |
|
|
mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [mpi_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [mpi_lib_path]) |
|
|
env.AppendUnique(LIBS = env['mpi_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path) |
|
|
env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK']) |
|
|
# NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! |
|
|
# On the other hand MPT and OpenMPI don't define the latter so we have to |
|
|
# do that here |
|
|
if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']: |
|
|
env.Append(CPPDEFINES = ['MPI_INCLUDED']) |
|
|
|
|
|
######## ParMETIS (optional) |
|
|
|
|
|
if not env['usempi']: env['parmetis'] = False |
|
|
|
|
|
parmetis_inc_path='' |
|
|
parmetis_lib_path='' |
|
|
if env['parmetis']: |
|
|
parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c') |
|
|
env.AppendUnique(CPPPATH = [parmetis_inc_path]) |
|
|
env.AppendUnique(LIBPATH = [parmetis_lib_path]) |
|
|
env.AppendUnique(LIBS = env['parmetis_libs']) |
|
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path) |
|
|
env.Append(CPPDEFINES = ['USE_PARMETIS']) |
|
|
|
|
|
######## gmsh (optional, for tests) |
|
|
|
|
|
try: |
|
|
import subprocess |
|
|
p=subprocess.Popen(['gmsh', '-version'], stderr=subprocess.PIPE) |
|
|
p.poll() |
|
|
env['gmsh']=True |
|
|
except OSError: |
|
|
env['gmsh']=False |
|
|
|
|
|
######################## Summarize our environment ########################### |
|
448 |
|
|
449 |
# keep some of our install paths first in the list for the unit tests |
# keep some of our install paths first in the list for the unit tests |
450 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
452 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
env['ENV']['ESCRIPT_ROOT'] = prefix |
453 |
|
|
454 |
if not env['verbose']: |
if not env['verbose']: |
|
env['CCCOMSTR'] = "Compiling $TARGET" |
|
455 |
env['CXXCOMSTR'] = "Compiling $TARGET" |
env['CXXCOMSTR'] = "Compiling $TARGET" |
|
env['SHCCCOMSTR'] = "Compiling $TARGET" |
|
456 |
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
457 |
env['ARCOMSTR'] = "Linking $TARGET" |
env['ARCOMSTR'] = "Linking $TARGET" |
458 |
env['LINKCOMSTR'] = "Linking $TARGET" |
env['LINKCOMSTR'] = "Linking $TARGET" |
463 |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
464 |
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
465 |
|
|
|
print("") |
|
|
print("*** Config Summary (see config.log and lib/buildvars for details) ***") |
|
|
print("Escript/Finley revision %s"%global_revision) |
|
|
print(" Install prefix: %s"%env['prefix']) |
|
|
print(" Python: %s"%sysconfig.PREFIX) |
|
|
print(" boost: %s"%env['boost_prefix']) |
|
|
print(" numpy: YES") |
|
|
if env['usempi']: |
|
|
print(" MPI: YES (flavour: %s)"%env['mpi']) |
|
|
else: |
|
|
print(" MPI: DISABLED") |
|
|
if env['uselapack']: |
|
|
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
|
|
else: |
|
|
print(" LAPACK: DISABLED") |
|
|
d_list=[] |
|
|
e_list=[] |
|
|
for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','silo','visit': |
|
|
if env[i]: e_list.append(i) |
|
|
else: d_list.append(i) |
|
|
for i in e_list: |
|
|
print("%16s: YES"%i) |
|
|
for i in d_list: |
|
|
print("%16s: DISABLED"%i) |
|
|
if env['gmsh']: |
|
|
print(" gmsh: FOUND") |
|
|
else: |
|
|
print(" gmsh: NOT FOUND") |
|
|
print(" vsl_random: %s"%env['vsl_random']) |
|
|
|
|
|
if ((fatalwarning != '') and (env['werror'])): |
|
|
print(" Treating warnings as errors") |
|
|
else: |
|
|
print(" NOT treating warnings as errors") |
|
|
print("") |
|
|
|
|
466 |
####################### Configure the subdirectories ######################### |
####################### Configure the subdirectories ######################### |
467 |
|
|
468 |
from grouptest import * |
# remove obsolete files |
469 |
|
if not env['usempi']: |
470 |
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) |
471 |
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) |
472 |
|
|
473 |
|
from grouptest import * |
474 |
TestGroups=[] |
TestGroups=[] |
475 |
|
|
476 |
# keep an environment without warnings-as-errors |
# keep an environment without warnings-as-errors |
489 |
] |
] |
490 |
) |
) |
491 |
|
|
|
env.SConscript(dirs = ['tools/CppUnitTest/src'], variant_dir='$BUILD_DIR/$PLATFORM/tools/CppUnitTest', duplicate=0) |
|
492 |
env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0) |
env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0) |
493 |
env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0) |
env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0) |
494 |
env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0) |
env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0) |
495 |
env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0) |
env.SConscript(dirs = ['escript/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0) |
496 |
|
|
497 |
|
#This will pull in the escriptcore/py_src and escriptcore/test |
498 |
|
env.SConscript(dirs = ['escriptcore/src'], variant_dir='$BUILD_DIR/$PLATFORM/escriptcore', duplicate=0) |
499 |
|
#env.SConscript(dirs = ['escript/test'], variant_dir='$BUILD_DIR/$PLATFORM/escript/test', duplicate=0) |
500 |
env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0) |
env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0) |
501 |
|
env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0) |
502 |
env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0) |
env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0) |
503 |
env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0) |
env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0) |
504 |
|
env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0) |
505 |
|
env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0) |
506 |
env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0) |
env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0) |
|
env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0) |
|
|
env.SConscript(dirs = ['pyvisi/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pyvisi', duplicate=0) |
|
507 |
env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0) |
env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0) |
508 |
env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0) |
env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0) |
509 |
|
env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0) |
510 |
env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0) |
env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0) |
511 |
|
|
|
######################## Populate the buildvars file ######################### |
|
512 |
|
|
513 |
# remove obsolete file |
######################## Populate the buildvars file ######################### |
|
if not env['usempi']: |
|
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) |
|
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) |
|
|
|
|
|
# Try to extract the boost version from version.hpp |
|
|
boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp')) |
|
|
boostversion='unknown' |
|
|
try: |
|
|
for line in boosthpp: |
|
|
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
|
|
if ver: |
|
|
boostversion=ver.group(1) |
|
|
except StopIteration: |
|
|
pass |
|
|
boosthpp.close() |
|
514 |
|
|
515 |
buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w') |
write_buildvars(env) |
|
buildvars.write("svn_revision="+str(global_revision)+"\n") |
|
|
buildvars.write("prefix="+prefix+"\n") |
|
|
buildvars.write("cc="+env['CC']+"\n") |
|
|
buildvars.write("cxx="+env['CXX']+"\n") |
|
|
buildvars.write("python="+sys.executable+"\n") |
|
|
buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n") |
|
|
buildvars.write("boost_inc_path="+boost_inc_path+"\n") |
|
|
buildvars.write("boost_lib_path="+boost_lib_path+"\n") |
|
|
buildvars.write("boost_version="+boostversion+"\n") |
|
|
buildvars.write("debug=%d\n"%int(env['debug'])) |
|
|
buildvars.write("openmp=%d\n"%int(env['openmp'])) |
|
|
buildvars.write("mpi=%s\n"%env['mpi']) |
|
|
buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path) |
|
|
buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path) |
|
|
buildvars.write("lapack=%s\n"%env['lapack']) |
|
|
buildvars.write("pyvisi=%d\n"%env['pyvisi']) |
|
|
buildvars.write("vsl_random=%d"%int(env['vsl_random'])) |
|
|
for i in 'netcdf','parmetis','papi','mkl','umfpack','silo','visit': |
|
|
buildvars.write("%s=%d\n"%(i, int(env[i]))) |
|
|
if env[i]: |
|
|
buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path'))) |
|
|
buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path'))) |
|
|
buildvars.close() |
|
516 |
|
|
517 |
################### Targets to build and install libraries ################### |
################### Targets to build and install libraries ################### |
518 |
|
|
519 |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET')) |
520 |
env.Alias('target_init', [target_init]) |
env.Alias('target_init', [target_init]) |
521 |
|
# delete buildvars upon cleanup |
522 |
|
env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars')) |
523 |
|
|
524 |
# The headers have to be installed prior to build in order to satisfy |
# The headers have to be installed prior to build in order to satisfy |
525 |
# #include <paso/Common.h> |
# #include <paso/Common.h> |
530 |
env.Alias('install_paso', ['build_paso', 'install_paso_lib']) |
env.Alias('install_paso', ['build_paso', 'install_paso_lib']) |
531 |
|
|
532 |
env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib']) |
env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib']) |
533 |
env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py']) |
env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escriptcore_py', 'install_escript_py']) |
534 |
|
|
535 |
|
env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib']) |
536 |
|
env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py']) |
537 |
|
|
538 |
env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib']) |
env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib']) |
539 |
env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py']) |
env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py']) |
541 |
env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib']) |
env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib']) |
542 |
env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py']) |
env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py']) |
543 |
|
|
544 |
|
env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib']) |
545 |
|
env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py']) |
546 |
|
|
547 |
env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib']) |
env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib']) |
548 |
env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py']) |
env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py']) |
549 |
|
|
555 |
build_all_list += ['build_esysUtils'] |
build_all_list += ['build_esysUtils'] |
556 |
build_all_list += ['build_paso'] |
build_all_list += ['build_paso'] |
557 |
build_all_list += ['build_escript'] |
build_all_list += ['build_escript'] |
558 |
|
build_all_list += ['build_pasowrap'] |
559 |
build_all_list += ['build_dudley'] |
build_all_list += ['build_dudley'] |
560 |
build_all_list += ['build_finley'] |
build_all_list += ['build_finley'] |
561 |
|
build_all_list += ['build_ripley'] |
562 |
build_all_list += ['build_weipa'] |
build_all_list += ['build_weipa'] |
563 |
if not IS_WINDOWS: build_all_list += ['build_escriptreader'] |
if not IS_WINDOWS: build_all_list += ['build_escriptreader'] |
564 |
if env['usempi']: build_all_list += ['build_pythonMPI'] |
if env['usempi']: build_all_list += ['build_pythonMPI'] |
570 |
install_all_list += ['install_esysUtils'] |
install_all_list += ['install_esysUtils'] |
571 |
install_all_list += ['install_paso'] |
install_all_list += ['install_paso'] |
572 |
install_all_list += ['install_escript'] |
install_all_list += ['install_escript'] |
573 |
|
install_all_list += ['install_pasowrap'] |
574 |
install_all_list += ['install_dudley'] |
install_all_list += ['install_dudley'] |
575 |
install_all_list += ['install_finley'] |
install_all_list += ['install_finley'] |
576 |
|
install_all_list += ['install_ripley'] |
577 |
install_all_list += ['install_weipa'] |
install_all_list += ['install_weipa'] |
578 |
if not IS_WINDOWS: install_all_list += ['install_escriptreader'] |
if not IS_WINDOWS: install_all_list += ['install_escriptreader'] |
579 |
install_all_list += ['install_pyvisi_py'] |
install_all_list += ['install_downunder_py'] |
580 |
install_all_list += ['install_modellib_py'] |
install_all_list += ['install_modellib_py'] |
581 |
install_all_list += ['install_pycad_py'] |
install_all_list += ['install_pycad_py'] |
582 |
if env['usempi']: install_all_list += ['install_pythonMPI'] |
if env['usempi']: install_all_list += ['install_pythonMPI'] |
588 |
|
|
589 |
################## Targets to build and run the test suite ################### |
################## Targets to build and run the test suite ################### |
590 |
|
|
591 |
env.Alias('build_cppunittest', ['install_cppunittest_headers', 'build_cppunittest_lib']) |
if not env['cppunit']: |
592 |
env.Alias('install_cppunittest', ['build_cppunittest', 'install_cppunittest_lib']) |
test_msg = env.Command('.dummy.', None, '@echo "Cannot run C++ unit tests, CppUnit not found!";exit 1') |
593 |
env.Alias('run_tests', ['install_all', 'install_cppunittest_lib']) |
env.Alias('run_tests', test_msg) |
594 |
env.Alias('all_tests', ['install_all', 'install_cppunittest_lib', 'run_tests', 'py_tests']) |
env.Alias('build_tests', '') |
595 |
|
env.Alias('run_tests', ['install_all']) |
596 |
|
env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests']) |
597 |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
598 |
env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests') |
env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests') |
599 |
|
|
600 |
##################### Targets to build the documentation ##################### |
##################### Targets to build the documentation ##################### |
601 |
|
|
602 |
env.Alias('api_epydoc','install_all') |
env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf']) |
603 |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf']) |
env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen']) |
604 |
|
env.Alias('docs', ['basedocs', 'sphinxdoc']) |
605 |
env.Alias('release_prep', ['docs', 'install_all']) |
env.Alias('release_prep', ['docs', 'install_all']) |
606 |
|
env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all']) |
607 |
|
|
608 |
|
# The test scripts are always generated, this target allows us to |
609 |
|
# generate the testscripts without doing a full build |
610 |
|
env.Alias('testscripts',[]) |
611 |
|
|
612 |
if not IS_WINDOWS: |
if not IS_WINDOWS: |
613 |
try: |
generateTestScripts(env, TestGroups) |
614 |
utest=open('utest.sh','w') |
|
615 |
utest.write(GroupTest.makeHeader(env['PLATFORM'])) |
|
616 |
for tests in TestGroups: |
|
617 |
utest.write(tests.makeString()) |
######################## Summarize our environment ########################### |
618 |
utest.close() |
def print_summary(): |
619 |
Execute(Chmod('utest.sh', 0755)) |
print("") |
620 |
print("Generated utest.sh.") |
print("*** Config Summary (see config.log and <prefix>/lib/buildvars for details) ***") |
621 |
except IOError: |
print("Escript/Finley revision %s"%global_revision) |
622 |
print("Error attempting to write unittests file.") |
print(" Install prefix: %s"%env['prefix']) |
623 |
Exit(1) |
print(" Python: %s"%sysconfig.PREFIX) |
624 |
|
print(" boost: %s"%env['boost_prefix']) |
625 |
|
if env['numpy_h']: |
626 |
|
print(" numpy: YES (with headers)") |
627 |
|
else: |
628 |
|
print(" numpy: YES (without headers)") |
629 |
|
if env['usempi']: |
630 |
|
print(" MPI: YES (flavour: %s)"%env['mpi']) |
631 |
|
else: |
632 |
|
print(" MPI: DISABLED") |
633 |
|
if env['uselapack']: |
634 |
|
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
635 |
|
else: |
636 |
|
print(" LAPACK: DISABLED") |
637 |
|
d_list=[] |
638 |
|
e_list=[] |
639 |
|
for i in 'debug','openmp','boomeramg','gdal','mkl','netcdf','papi','parmetis','pyproj','scipy','silo','sympy','umfpack','visit','vsl_random': |
640 |
|
if env[i]: e_list.append(i) |
641 |
|
else: d_list.append(i) |
642 |
|
for i in e_list: |
643 |
|
print("%16s: YES"%i) |
644 |
|
for i in d_list: |
645 |
|
print("%16s: DISABLED"%i) |
646 |
|
if env['cppunit']: |
647 |
|
print(" CppUnit: FOUND") |
648 |
|
else: |
649 |
|
print(" CppUnit: NOT FOUND") |
650 |
|
if env['gmsh']=='m': |
651 |
|
print(" gmsh: FOUND, MPI-ENABLED") |
652 |
|
elif env['gmsh']=='s': |
653 |
|
print(" gmsh: FOUND") |
654 |
|
else: |
655 |
|
print(" gmsh: NOT FOUND") |
656 |
|
|
657 |
|
if ((fatalwarning != '') and (env['werror'])): |
658 |
|
print(" Treating warnings as errors") |
659 |
|
else: |
660 |
|
print(" NOT treating warnings as errors") |
661 |
|
print("") |
662 |
|
for w in env['warnings']: |
663 |
|
print("WARNING: %s"%w) |
664 |
|
|
665 |
# Make sure that the escript wrapper is in place |
atexit.register(print_summary) |
|
if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')): |
|
|
print("Copying escript wrapper.") |
|
|
Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript')) |
|
666 |
|
|