1 |
# Copyright 2006 by ACcESS MNRF |
######################################################## |
2 |
# |
# |
3 |
# http://www.access.edu.au |
# Copyright (c) 2003-2010 by University of Queensland |
4 |
# Primary Business: Queensland, Australia |
# Earth Systems Science Computational Center (ESSCC) |
5 |
# Licensed under the Open Software License version 3.0 |
# http://www.uq.edu.au/esscc |
6 |
# http://www.opensource.org/licenses/osl-3.0.php |
# |
7 |
|
# Primary Business: Queensland, Australia |
8 |
# top-level Scons configuration file for all esys13 modules |
# Licensed under the Open Software License version 3.0 |
9 |
# Begin initialisation Section |
# http://www.opensource.org/licenses/osl-3.0.php |
10 |
# all of this section just intialises default environments and helper |
# |
11 |
# scripts. You shouldn't need to modify this section. |
######################################################## |
12 |
EnsureSConsVersion(0,96,91) |
|
13 |
EnsurePythonVersion(2,3) |
EnsureSConsVersion(0,98,1) |
14 |
|
EnsurePythonVersion(2,5) |
15 |
|
|
16 |
#=============================================================== |
import sys, os, platform, re |
17 |
# import tools: |
from distutils import sysconfig |
18 |
import glob |
from site_init import * |
19 |
import sys, os, re |
import subprocess |
20 |
# Add our extensions |
from subprocess import PIPE, Popen |
21 |
if sys.path.count('scons')==0: sys.path.append('scons') |
|
22 |
import scons_extensions |
# Version number to check for in options file. Increment when new features are |
23 |
|
# added or existing options changed. |
24 |
# We may also need to know where python's site-packages subdirectory lives |
REQUIRED_OPTS_VERSION=201 |
25 |
python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1]) |
|
26 |
|
# MS Windows support, many thanks to PH |
27 |
#=============================================================== |
IS_WINDOWS = (os.name == 'nt') |
28 |
|
|
29 |
tools_prefix="/usr" |
########################## Determine options file ############################ |
30 |
|
# 1. command line |
31 |
#============================================================================================== |
# 2. scons/<hostname>_options.py |
32 |
# |
# 3. name as part of a cluster |
33 |
# get the installation prefix |
options_file=ARGUMENTS.get('options_file', None) |
34 |
# |
if not options_file: |
35 |
prefix = ARGUMENTS.get('prefix', sys.prefix ) |
ext_dir = os.path.join(os.getcwd(), 'scons') |
36 |
|
hostname = platform.node().split('.')[0] |
37 |
# We may also need to know where python's site-packages subdirectory lives |
for name in hostname, effectiveName(hostname): |
38 |
python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1]) |
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) |
39 |
# Install as a standard python package in /usr/lib64 if available, else in /usr/lib |
options_file = os.path.join(ext_dir, mangledhostname+'_options.py') |
40 |
if os.path.isdir( prefix+"/lib64/"+python_version+"/site-packages"): |
if os.path.isfile(options_file): break |
41 |
sys_dir_packages = prefix+"/lib64/"+python_version+"/site-packages/esys" |
|
42 |
sys_dir_libraries = prefix+"/lib64" |
if not os.path.isfile(options_file): |
43 |
else: |
print("\nWARNING:\nOptions file %s" % options_file) |
44 |
sys_dir_packages = prefix+"/lib/"+python_version+"/site-packages/esys" |
print("not found! Default options will be used which is most likely suboptimal.") |
45 |
sys_dir_libraries = prefix+"/lib" |
print("It is recommended that you copy one of the TEMPLATE files in the scons/") |
46 |
|
print("subdirectory and customize it to your needs.\n") |
47 |
sys_dir_examples = prefix+"/share/doc/esys" |
options_file = None |
48 |
|
|
49 |
source_root = Dir('#.').abspath |
############################### Build options ################################ |
50 |
|
|
51 |
dir_packages = os.path.join(source_root,"esys") |
default_prefix='/usr' |
52 |
dir_examples = os.path.join(source_root,"examples") |
mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
53 |
dir_libraries = os.path.join(source_root,"lib") |
lapack_flavours=('none', 'clapack', 'mkl') |
54 |
|
|
55 |
print "Source root is : ",source_root |
vars = Variables(options_file, ARGUMENTS) |
56 |
print " Default packages local installation: ", dir_packages |
vars.AddVariables( |
57 |
print " Default library local installation ", dir_libraries |
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), |
58 |
print " Default example local installation: ", dir_examples |
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), |
59 |
print "Install prefix is: ", prefix |
PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate), |
60 |
print " Default packages system installation: ", sys_dir_packages |
BoolVariable('verbose', 'Output full compile/link lines', False), |
61 |
print " Default library system installation ", sys_dir_libraries |
# Compiler/Linker options |
62 |
print " Default example system installation: ", sys_dir_examples |
('cc', 'Path to C compiler', 'default'), |
63 |
|
('cxx', 'Path to C++ compiler', 'default'), |
64 |
#============================================================================================== |
('cc_flags', 'Base C/C++ compiler flags', 'default'), |
65 |
|
('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'), |
66 |
# Default options and options help text |
('cc_debug', 'Additional C/C++ flags for a debug build', 'default'), |
67 |
# These are defaults and can be overridden using command line arguments or an options file. |
('cc_extra', 'Extra C compiler flags', ''), |
68 |
# if the options_file or ARGUMENTS do not exist then the ones listed as default here are used |
('cxx_extra', 'Extra C++ compiler flags', ''), |
69 |
# DO NOT CHANGE THEM HERE |
('ld_extra', 'Extra linker flags', ''), |
70 |
# Where to install? |
BoolVariable('werror','Treat compiler warnings as errors', True), |
71 |
#============================================================================================== |
BoolVariable('debug', 'Compile with debug flags', False), |
72 |
# |
BoolVariable('openmp', 'Compile parallel version using OpenMP', False), |
73 |
# get the options file if present: |
('omp_flags', 'OpenMP compiler flags', 'default'), |
74 |
# |
('omp_ldflags', 'OpenMP linker flags', 'default'), |
75 |
options_file = ARGUMENTS.get('options_file','') |
# Mandatory libraries |
76 |
|
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
77 |
if not os.path.isfile(options_file) : |
('boost_libs', 'Boost libraries to link with', ['boost_python-mt']), |
78 |
options_file = False |
# Mandatory for tests |
79 |
|
('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix), |
80 |
if not options_file : |
('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']), |
81 |
import socket |
# Optional libraries and options |
82 |
hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
83 |
tmp = os.path.join("scons",hostname+"_options.py") |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
84 |
|
('mpi_libs', 'MPI shared libraries to link with', ['mpi']), |
85 |
if os.path.isfile(tmp) : |
BoolVariable('netcdf', 'Enable netCDF file support', False), |
86 |
options_file = tmp |
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), |
87 |
|
('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']), |
88 |
IS_WINDOWS_PLATFORM = (os.name== "nt") |
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), |
89 |
|
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), |
90 |
# If you're not going to tell me then...... |
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), |
91 |
# FIXME: add one for the altix too. |
BoolVariable('papi', 'Enable PAPI', False), |
92 |
if not options_file : |
('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), |
93 |
if IS_WINDOWS_PLATFORM : |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
94 |
options_file = "scons/windows_mscv71_options.py" |
BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), |
95 |
else: |
BoolVariable('mkl', 'Enable the Math Kernel Library', False), |
96 |
options_file = "scons/linux_gcc_eg_options.py" |
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), |
97 |
|
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), |
98 |
# and load it |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
99 |
opts = Options(options_file, ARGUMENTS) |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
100 |
#================================================================ |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
101 |
# |
BoolVariable('boomeramg', 'Enable BoomerAMG', False), |
102 |
# check if UMFPACK is installed on the system: |
('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix), |
103 |
# |
('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']), |
104 |
uf_root=None |
EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), |
105 |
for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']: |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
106 |
if os.path.isdir(os.path.join(tools_prefix,'include',i)): |
('lapack_libs', 'LAPACK libraries to link with', []), |
107 |
uf_root=i |
BoolVariable('silo', 'Enable the Silo file format in weipa', False), |
108 |
print i," is used form ",tools_prefix |
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), |
109 |
break |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
110 |
if not uf_root==None: |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
111 |
umf_path_default=os.path.join(tools_prefix,'include',uf_root) |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
112 |
umf_lib_path_default=os.path.join(tools_prefix,'lib') |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
113 |
umf_libs_default=['umfpack'] |
BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False), |
114 |
amd_path_default=os.path.join(tools_prefix,'include',uf_root) |
BoolVariable('vsl_random', 'Use VSL from intel for random data', False), |
115 |
amd_lib_path_default=os.path.join(tools_prefix,'lib') |
# Advanced settings |
116 |
amd_libs_default=['amd'] |
#dudley_assemble_flags = -funroll-loops to actually do something |
117 |
ufc_path_default=os.path.join(tools_prefix,'include',uf_root) |
('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''), |
118 |
else: |
# To enable passing function pointers through python |
119 |
umf_path_default=None |
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), |
120 |
umf_lib_path_default=None |
# An option for specifying the compiler tools (see windows branch) |
121 |
umf_libs_default=None |
('tools_names', 'Compiler tools to use', ['default']), |
122 |
amd_path_default=None |
('env_export', 'Environment variables to be passed to tools',[]), |
123 |
amd_lib_path_default=None |
EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
124 |
amd_libs_default=None |
EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
125 |
ufc_path_default=None |
# finer control over library building, intel aggressive global optimisation |
126 |
# |
# works with dynamic libraries on windows. |
127 |
#========================================================================== |
('build_shared', 'Build dynamic libraries only', False), |
128 |
# |
('sys_libs', 'Extra libraries to link with', []), |
129 |
# python installation: |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
130 |
# |
('SVN_VERSION', 'Do not use from options file', -2), |
131 |
python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1])) |
('pythoncmd', 'which python to compile with','python'), |
132 |
python_lib_path_default=os.path.join(tools_prefix,'lib') |
('usepython3', 'Is this a python3 build? (experimental)', False), |
133 |
python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1]) |
('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''), |
134 |
|
) |
|
#========================================================================== |
|
|
# |
|
|
# boost installation: |
|
|
# |
|
|
boost_path_default=os.path.join(tools_prefix,'include') |
|
|
boost_lib_path_default=os.path.join(tools_prefix,'lib') |
|
|
boost_lib_default=['boost_python'] |
|
135 |
|
|
136 |
#========================================================================== |
##################### Create environment and help text ####################### |
|
# |
|
|
# check if netCDF is installed on the system: |
|
|
# |
|
|
netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3') |
|
|
netCDF_lib_path_default=os.path.join(tools_prefix,'lib') |
|
137 |
|
|
138 |
if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default): |
# Intel's compiler uses regular expressions improperly and emits a warning |
139 |
useNetCDF_default='yes' |
# about failing to find the compilers. This warning can be safely ignored. |
|
netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ] |
|
|
else: |
|
|
useNetCDF_default='no' |
|
|
netCDF_path_default=None |
|
|
netCDF_lib_path_default=None |
|
|
netCDF_libs_default=None |
|
140 |
|
|
141 |
#========================================================================== |
# PATH is needed so the compiler, linker and tools are found if they are not |
142 |
# |
# in default locations. |
143 |
# MPI: |
env = Environment(tools = ['default'], options = vars, |
144 |
# |
ENV = {'PATH': os.environ['PATH']}) |
145 |
if IS_WINDOWS_PLATFORM: |
if env['tools_names'] != 'default': |
146 |
useMPI_default='no' |
env = Environment(tools = ['default'] + env['tools_names'], options = vars, |
147 |
mpi_path_default=None |
ENV = {'PATH' : os.environ['PATH']}) |
148 |
mpi_lib_path_default=None |
|
149 |
mpi_libs_default=[] |
if options_file: |
150 |
mpi_run_default=None |
opts_valid=False |
151 |
else: |
if 'escript_opts_version' in env.Dictionary() and \ |
152 |
useMPI_default='no' |
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: |
153 |
mpi_root='/usr/local' |
opts_valid=True |
154 |
mpi_path_default=os.path.join(mpi_root,'include') |
if opts_valid: |
155 |
mpi_lib_path_default=os.path.join(mpi_root,'lib') |
print("Using options in %s." % options_file) |
156 |
mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ] |
else: |
157 |
mpi_run_default='mpiexec -np 1' |
print("\nOptions file %s" % options_file) |
158 |
# |
print("is outdated! Please update the file by examining one of the TEMPLATE") |
159 |
#========================================================================== |
print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) |
160 |
# |
Exit(1) |
161 |
# compile: |
|
162 |
# |
# Generate help text (scons -h) |
163 |
cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi' |
Help(vars.GenerateHelpText(env)) |
164 |
cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi' |
|
165 |
cxx_flags_default='--no-warn -ansi' |
# Check for superfluous options |
166 |
cxx_flags_debug_default='--no-warn -ansi -DDOASSERT' |
if len(vars.UnknownVariables())>0: |
167 |
|
for k in vars.UnknownVariables(): |
168 |
#============================================================================================== |
print("Unknown option '%s'" % k) |
169 |
# Default options and options help text |
Exit(1) |
170 |
# These are defaults and can be overridden using command line arguments or an options file. |
|
171 |
# if the options_file or ARGUMENTS do not exist then the ones listed as default here are used |
#################### Make sure install directories exist ##################### |
172 |
# DO NOT CHANGE THEM HERE |
|
173 |
opts.AddOptions( |
env['BUILD_DIR']=env['build_dir'] |
174 |
# Where to install esys stuff |
prefix=Dir(env['prefix']).abspath |
175 |
('incinstall', 'where the esys headers will be installed', Dir('#.').abspath+'/include'), |
env['incinstall'] = os.path.join(prefix, 'include') |
176 |
('libinstall', 'where the esys libraries will be installed', dir_libraries), |
env['bininstall'] = os.path.join(prefix, 'bin') |
177 |
('pyinstall', 'where the esys python modules will be installed', dir_packages), |
env['libinstall'] = os.path.join(prefix, 'lib') |
178 |
('exinstall', 'where the esys examples will be installed', dir_examples), |
env['pyinstall'] = os.path.join(prefix, 'esys') |
179 |
('sys_libinstall', 'where the system esys libraries will be installed', sys_dir_libraries), |
if not os.path.isdir(env['bininstall']): |
180 |
('sys_pyinstall', 'where the system esys python modules will be installed', sys_dir_packages), |
os.makedirs(env['bininstall']) |
181 |
('sys_exinstall', 'where the system esys examples will be installed', sys_dir_examples), |
if not os.path.isdir(env['libinstall']): |
182 |
('src_zipfile', 'the source zip file will be installed.', Dir('#.').abspath+"/release/escript_src.zip"), |
os.makedirs(env['libinstall']) |
183 |
('test_zipfile', 'the test zip file will be installed.', Dir('#.').abspath+"/release/escript_tests.zip"), |
if not os.path.isdir(env['pyinstall']): |
184 |
('src_tarfile', 'the source tar file will be installed.', Dir('#.').abspath+"/release/escript_src.tar.gz"), |
os.makedirs(env['pyinstall']) |
185 |
('test_tarfile', 'the test tar file will be installed.', Dir('#.').abspath+"/release/escript_tests.tar.gz"), |
|
186 |
('examples_tarfile', 'the examples tar file will be installed.', Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"), |
env.Append(CPPPATH = [env['incinstall']]) |
187 |
('examples_zipfile', 'the examples zip file will be installed.', Dir('#.').abspath+"/release/doc/escript_examples.zip"), |
env.Append(LIBPATH = [env['libinstall']]) |
188 |
('guide_pdf', 'name of the user guide in pdf format', Dir('#.').abspath+"/release/doc/user/guide.pdf"), |
|
189 |
('api_epydoc', 'name of the epydoc api docs directory', Dir('#.').abspath+"/release/doc/epydoc"), |
################# Fill in compiler options if not set above ################## |
190 |
('guide_html', 'name of the directory for user guide in html format', Dir('#.').abspath+"/release/doc/user/html"), |
|
191 |
('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"), |
if env['cc'] != 'default': env['CC']=env['cc'] |
192 |
# Compilation options |
if env['cxx'] != 'default': env['CXX']=env['cxx'] |
193 |
BoolOption('dodebug', 'Do you want a debug build?', 'no'), |
|
194 |
BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'), |
# version >=9 of intel C++ compiler requires use of icpc to link in C++ |
195 |
('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file), |
# runtimes (icc does not) |
196 |
('cc_defines','C/C++ defines to use', None), |
if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc': |
197 |
('cc_flags','C compiler flags to use (Release build)', cc_flags_default), |
env['LINK'] = env['CXX'] |
198 |
('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default), |
|
199 |
('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default), |
# default compiler/linker options |
200 |
('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default), |
cc_flags = '' |
201 |
('omp_flags', 'OpenMP compiler flags to use (Release build)', ''), |
cc_optim = '' |
202 |
('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''), |
cc_debug = '' |
203 |
('ar_flags', 'Static library archiver flags to use', None), |
omp_flags = '' |
204 |
('sys_libs', 'System libraries to link with', None), |
omp_ldflags = '' |
205 |
('tar_flags','flags for zip files','-c -z'), |
fatalwarning = '' # switch to turn warnings into errors |
206 |
# MKL |
sysheaderopt = '' # how to indicate that a header is a system header |
207 |
PathOption('mkl_path', 'Path to MKL includes', None), |
|
208 |
PathOption('mkl_lib_path', 'Path to MKL libs', None), |
# env['CC'] might be a full path |
209 |
('mkl_libs', 'MKL libraries to link with', None), |
cc_name=os.path.basename(env['CC']) |
210 |
# SCSL |
|
211 |
PathOption('scsl_path', 'Path to SCSL includes', None), |
if cc_name == 'icc': |
212 |
PathOption('scsl_lib_path', 'Path to SCSL libs', None), |
# Intel compiler |
213 |
('scsl_libs', 'SCSL libraries to link with', None), |
cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
214 |
('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None), |
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" |
215 |
# UMFPACK |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
216 |
PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default), |
omp_flags = "-openmp -openmp_report0" |
217 |
PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default), |
omp_ldflags = "-openmp -openmp_report0 -lpthread" |
218 |
PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default), |
fatalwarning = "-Werror" |
219 |
('umf_libs', 'UMFPACK libraries to link with', umf_libs_default), |
elif cc_name[:3] == 'gcc': |
220 |
# AMD (used by UMFPACK) |
# GNU C on any system |
221 |
PathOption('amd_path', 'Path to AMD includes', amd_path_default), |
cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
222 |
PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default), |
cc_optim = "-O3" |
223 |
('amd_libs', 'AMD libraries to link with', amd_libs_default), |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
224 |
# TRILINOS |
omp_flags = "-fopenmp" |
225 |
PathOption('trilinos_path', 'Path to TRILINOS includes', None), |
omp_ldflags = "-fopenmp" |
226 |
PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None), |
fatalwarning = "-Werror" |
227 |
('trilinos_libs', 'TRILINOS libraries to link with', None), |
sysheaderopt = "-isystem" |
228 |
# BLAS |
elif cc_name == 'cl': |
229 |
PathOption('blas_path', 'Path to BLAS includes', None), |
# Microsoft Visual C on Windows |
230 |
PathOption('blas_lib_path', 'Path to BLAS libs', None), |
cc_flags = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF" |
231 |
('blas_libs', 'BLAS libraries to link with', None), |
cc_optim = "/O2 /Op /W3" |
232 |
# netCDF |
cc_debug = "/Od /RTCcsu /ZI /DBOUNDS_CHECK" |
233 |
('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default), |
fatalwarning = "/WX" |
234 |
PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default), |
elif cc_name == 'icl': |
235 |
PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default), |
# Intel C on Windows |
236 |
('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default), |
cc_flags = '/EHsc /GR /MD' |
237 |
# Python |
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' |
238 |
# locations of include files for python |
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' |
239 |
# FIXME: python_path should be python_inc_path and the same for boost etc. |
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
240 |
PathOption('python_path', 'Path to Python includes', python_path_default), |
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
241 |
PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default), |
|
242 |
('python_lib', 'Python libraries to link with', python_lib_default), |
# set defaults if not otherwise specified |
243 |
('python_cmd', 'Python command', 'python'), |
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
244 |
# Boost |
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
245 |
PathOption('boost_path', 'Path to Boost includes', boost_path_default), |
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
246 |
PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default), |
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
247 |
('boost_lib', 'Boost libraries to link with', boost_lib_default), |
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
248 |
# Doc building |
if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) |
249 |
# PathOption('doxygen_path', 'Path to Doxygen executable', None), |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
250 |
# PathOption('epydoc_path', 'Path to Epydoc executable', None), |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
251 |
# PAPI |
|
252 |
PathOption('papi_path', 'Path to PAPI includes', None), |
if env['usepython3']: |
253 |
PathOption('papi_lib_path', 'Path to PAPI libs', None), |
env.Append(CPPDEFINES=['ESPYTHON3']) |
254 |
('papi_libs', 'PAPI libraries to link with', None), |
|
255 |
('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None), |
# set up the autolazy values |
256 |
# MPI |
if env['forcelazy'] == 'on': |
257 |
BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default), |
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
258 |
('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'), |
elif env['forcelazy'] == 'off': |
259 |
PathOption('mpi_path', 'Path to MPI includes', mpi_path_default), |
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
260 |
('mpi_run', 'mpirun name' , mpi_run_default), |
|
261 |
PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default), |
# set up the collective resolve values |
262 |
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default) |
if env['forcecollres'] == 'on': |
263 |
) |
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
264 |
#================================================================================================= |
elif env['forcecollres'] == 'off': |
265 |
# |
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
266 |
# Note: On the Altix the intel compilers are not automatically |
|
267 |
# detected by scons intelc.py script. The Altix has a different directory |
# allow non-standard C if requested |
268 |
# path and in some locations the "modules" facility is used to support |
if env['iknowwhatimdoing']: |
269 |
# multiple compiler versions. This forces the need to import the users PATH |
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
270 |
# environment which isn't the "scons way" |
|
271 |
# This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms) |
# Disable OpenMP if no flags provided |
272 |
# FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix |
if env['openmp'] and env['omp_flags'] == '': |
273 |
# |
print("OpenMP requested but no flags provided - disabling OpenMP!") |
274 |
|
env['openmp'] = False |
275 |
if IS_WINDOWS_PLATFORM: |
|
276 |
env = Environment(tools = ['default', 'msvc'], options = opts) |
if env['openmp']: |
277 |
|
env.Append(CCFLAGS = env['omp_flags']) |
278 |
|
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) |
279 |
|
else: |
280 |
|
env['omp_flags']='' |
281 |
|
env['omp_ldflags']='' |
282 |
|
|
283 |
|
# add debug/non-debug compiler flags |
284 |
|
if env['debug']: |
285 |
|
env.Append(CCFLAGS = env['cc_debug']) |
286 |
|
else: |
287 |
|
env.Append(CCFLAGS = env['cc_optim']) |
288 |
|
|
289 |
|
# always add cc_flags |
290 |
|
env.Append(CCFLAGS = env['cc_flags']) |
291 |
|
|
292 |
|
# add system libraries |
293 |
|
env.AppendUnique(LIBS = env['sys_libs']) |
294 |
|
|
295 |
|
|
296 |
|
global_revision=ARGUMENTS.get('SVN_VERSION', None) |
297 |
|
if global_revision: |
298 |
|
global_revision = re.sub(':.*', '', global_revision) |
299 |
|
global_revision = re.sub('[^0-9]', '', global_revision) |
300 |
|
if global_revision == '': global_revision='-2' |
301 |
|
else: |
302 |
|
# Get the global Subversion revision number for the getVersion() method |
303 |
|
try: |
304 |
|
global_revision = os.popen('svnversion -n .').read() |
305 |
|
global_revision = re.sub(':.*', '', global_revision) |
306 |
|
global_revision = re.sub('[^0-9]', '', global_revision) |
307 |
|
if global_revision == '': global_revision='-2' |
308 |
|
except: |
309 |
|
global_revision = '-1' |
310 |
|
env['svn_revision']=global_revision |
311 |
|
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
312 |
|
|
313 |
|
if IS_WINDOWS: |
314 |
|
if not env['build_shared']: |
315 |
|
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
316 |
|
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
317 |
|
|
318 |
|
###################### Copy required environment vars ######################## |
319 |
|
|
320 |
|
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
321 |
|
if IS_WINDOWS: |
322 |
|
LD_LIBRARY_PATH_KEY='PATH' |
323 |
|
env['ENV']['LD_LIBRARY_PATH']='' |
324 |
else: |
else: |
325 |
if os.uname()[4]=='ia64': |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
|
env = Environment(tools = ['default', 'intelc'], options = opts) |
|
|
if env['CXX'] == 'icpc': |
|
|
env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py |
|
|
else: |
|
|
env = Environment(tools = ['default'], options = opts) |
|
|
Help(opts.GenerateHelpText(env)) |
|
|
|
|
|
if env['bounds_check']: |
|
|
env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ]) |
|
|
env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ]) |
|
|
bounds_check = env['bounds_check'] |
|
|
else: |
|
|
bounds_check = 0 |
|
326 |
|
|
327 |
#================================================================================================= |
# the following env variables are exported for the unit tests |
|
# |
|
|
# Initialise Scons Build Environment |
|
|
# check for user environment variables we are interested in |
|
|
try: |
|
|
tmp = os.environ['PYTHONPATH'] |
|
|
env['ENV']['PYTHONPATH'] = tmp |
|
|
except KeyError: |
|
|
pass |
|
328 |
|
|
329 |
env.PrependENVPath('PYTHONPATH', source_root) |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
330 |
|
try: |
331 |
|
env['ENV'][key] = os.environ[key] |
332 |
|
except KeyError: |
333 |
|
env['ENV'][key] = 1 |
334 |
|
|
335 |
try: |
env_export=env['env_export'] |
336 |
omp_num_threads = os.environ['OMP_NUM_THREADS'] |
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','TMPDIR','TEMP','TMP']) |
337 |
except KeyError: |
|
338 |
omp_num_threads = 1 |
for key in set(env_export): |
339 |
env['ENV']['OMP_NUM_THREADS'] = omp_num_threads |
try: |
340 |
|
env['ENV'][key] = os.environ[key] |
341 |
|
except KeyError: |
342 |
|
pass |
343 |
|
|
344 |
try: |
try: |
345 |
path = os.environ['PATH'] |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) |
|
env['ENV']['PATH'] = path |
|
346 |
except KeyError: |
except KeyError: |
347 |
omp_num_threads = 1 |
pass |
|
|
|
|
env['ENV']['OMP_NUM_THREADS'] = omp_num_threads |
|
348 |
|
|
349 |
|
# these shouldn't be needed |
350 |
|
#for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': |
351 |
|
# try: |
352 |
|
# env['ENV'][key] = os.environ[key] |
353 |
|
# except KeyError: |
354 |
|
# pass |
355 |
|
|
|
# Copy some variables from the system environment to the build environment |
|
356 |
try: |
try: |
357 |
env['ENV']['DISPLAY'] = os.environ['DISPLAY'] |
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
|
env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY'] |
|
|
home_temp = os.environ['HOME'] # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf |
|
|
env['ENV']['HOME'] = home_temp |
|
358 |
except KeyError: |
except KeyError: |
359 |
pass |
pass |
360 |
|
|
361 |
try: |
######################## Add some custom builders ############################ |
|
tmp = os.environ['PATH'] |
|
|
env['ENV']['PATH'] = tmp |
|
|
except KeyError: |
|
|
pass |
|
362 |
|
|
363 |
try: |
if env['pythoncmd']=='python': |
364 |
tmp = os.environ['LD_LIBRARY_PATH'] |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
365 |
print tmp |
else: |
366 |
env['ENV']['LD_LIBRARY_PATH'] = tmp |
py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True) |
|
except KeyError: |
|
|
pass |
|
|
#========================================================================== |
|
|
# |
|
|
# Add some customer builders |
|
|
# |
|
|
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
|
367 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
368 |
|
|
369 |
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
|
src_suffix=env['PROGSUFFIX'], single_source=True) |
|
|
|
|
370 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
371 |
|
|
372 |
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
373 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
374 |
|
|
375 |
# Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options |
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) |
376 |
try: |
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
|
incinstall = env['incinstall'] |
|
|
env.Append(CPPPATH = [incinstall,]) |
|
|
except KeyError: |
|
|
incinstall = None |
|
|
try: |
|
|
libinstall = env['libinstall'] |
|
|
env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so |
|
|
env.PrependENVPath('LD_LIBRARY_PATH', libinstall) |
|
|
if IS_WINDOWS_PLATFORM : |
|
|
env.PrependENVPath('PATH', libinstall) |
|
|
env.PrependENVPath('PATH', env['boost_lib_path']) |
|
|
except KeyError: |
|
|
libinstall = None |
|
|
try: |
|
|
pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc |
|
|
except KeyError: |
|
|
pyinstall = None |
|
|
|
|
|
try: |
|
|
cc_defines = env['cc_defines'] |
|
|
env.Append(CPPDEFINES = cc_defines) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
flags = env['ar_flags'] |
|
|
env.Append(ARFLAGS = flags) |
|
|
except KeyError: |
|
|
ar_flags = None |
|
|
try: |
|
|
sys_libs = env['sys_libs'] |
|
|
except KeyError: |
|
|
sys_libs = [] |
|
|
|
|
|
try: |
|
|
tar_flags = env['tar_flags'] |
|
|
env.Replace(TARFLAGS = tar_flags) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
exinstall = env['exinstall'] |
|
|
except KeyError: |
|
|
exinstall = None |
|
|
try: |
|
|
sys_libinstall = env['sys_libinstall'] |
|
|
except KeyError: |
|
|
sys_libinstall = None |
|
|
try: |
|
|
sys_pyinstall = env['sys_pyinstall'] |
|
|
except KeyError: |
|
|
sys_pyinstall = None |
|
|
try: |
|
|
sys_exinstall = env['sys_exinstall'] |
|
|
except KeyError: |
|
|
sys_exinstall = None |
|
377 |
|
|
378 |
# ====================== debugging =================================== |
############################ Dependency checks ############################### |
|
try: |
|
|
dodebug = env['dodebug'] |
|
|
except KeyError: |
|
|
dodebug = None |
|
379 |
|
|
380 |
# === switch on omp =================================================== |
# Create a Configure() environment to check for compilers and python |
381 |
try: |
conf = Configure(env.Clone()) |
|
omp_flags = env['omp_flags'] |
|
|
except KeyError: |
|
|
omp_flags = '' |
|
382 |
|
|
383 |
try: |
######## Test that the compilers work |
384 |
omp_flags_debug = env['omp_flags_debug'] |
|
385 |
except KeyError: |
if 'CheckCC' in dir(conf): # exists since scons 1.1.0 |
386 |
omp_flags_debug = '' |
if not conf.CheckCC(): |
387 |
|
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
388 |
|
Exit(1) |
389 |
|
if not conf.CheckCXX(): |
390 |
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
391 |
|
Exit(1) |
392 |
|
else: |
393 |
|
if not conf.CheckFunc('printf', language='c'): |
394 |
|
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
395 |
|
Exit(1) |
396 |
|
if not conf.CheckFunc('printf', language='c++'): |
397 |
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
398 |
|
Exit(1) |
399 |
|
|
400 |
|
if conf.CheckFunc('gethostname'): |
401 |
|
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
402 |
|
|
403 |
|
######## Python headers & library (required) |
404 |
|
|
405 |
|
# Use the python scons is running |
406 |
|
if env['pythoncmd']=='python': |
407 |
|
python_inc_path=sysconfig.get_python_inc() |
408 |
|
if IS_WINDOWS: |
409 |
|
python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs') |
410 |
|
elif env['PLATFORM']=='darwin': |
411 |
|
python_lib_path=sysconfig.get_config_var('LIBPL') |
412 |
|
else: |
413 |
|
python_lib_path=sysconfig.get_config_var('LIBDIR') |
414 |
|
|
415 |
|
#python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux |
416 |
|
if IS_WINDOWS: |
417 |
|
python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])] |
418 |
|
else: |
419 |
|
python_libs=['python'+sysconfig.get_python_version()] |
420 |
|
|
421 |
|
#if we want to use a python other than the one scons is running |
422 |
|
else: |
423 |
|
initstring='from __future__ import print_function;from distutils import sysconfig;' |
424 |
|
if env['pythonlibname']!='': |
425 |
|
python_libs=env['pythonlibname'] |
426 |
|
else: # work it out by calling python |
427 |
|
if IS_WINDOWS: |
428 |
|
cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))' |
429 |
|
else: |
430 |
|
cmd='print("python"+sysconfig.get_python_version())' |
431 |
|
p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE) |
432 |
|
python_libs=p.stdout.readline() |
433 |
|
if env['usepython3']: # This is to convert unicode str into py2 string |
434 |
|
python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought |
435 |
|
p.wait() |
436 |
|
python_libs=python_libs.strip() |
437 |
|
|
438 |
# ========= use mpi? ===================================================== |
|
439 |
try: |
# Now we know whether we are using python3 or not |
440 |
useMPI = env['useMPI'] |
p=Popen([env['pythoncmd'], '-c', initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE) |
441 |
except KeyError: |
python_inc_path=p.stdout.readline() |
442 |
useMPI = None |
if env['usepython3']: |
443 |
# ========= set compiler flags =========================================== |
python_inc_path=python_inc_path.encode() |
444 |
|
p.wait() |
445 |
|
python_inc_path=python_inc_path.strip() |
446 |
|
if IS_WINDOWS: |
447 |
|
cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')" |
448 |
|
elif env['PLATFORM']=='darwin': |
449 |
|
cmd="sysconfig.get_config_var(\"LIBPL\")" |
450 |
|
else: |
451 |
|
cmd="sysconfig.get_config_var(\"LIBDIR\")" |
452 |
|
|
453 |
|
p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE) |
454 |
|
python_lib_path=p.stdout.readline() |
455 |
|
if env['usepython3']: |
456 |
|
python_lib_path=python_lib_path.decode() |
457 |
|
p.wait() |
458 |
|
python_lib_path=python_lib_path.strip() |
459 |
|
|
460 |
|
if sysheaderopt == '': |
461 |
|
conf.env.AppendUnique(CPPPATH = [python_inc_path]) |
462 |
|
else: |
463 |
|
conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path]) |
464 |
|
|
465 |
|
conf.env.AppendUnique(LIBPATH = [python_lib_path]) |
466 |
|
conf.env.AppendUnique(LIBS = python_libs) |
467 |
|
# The wrapper script needs to find the libs |
468 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path) |
469 |
|
|
470 |
|
if not conf.CheckCHeader('Python.h'): |
471 |
|
print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path)) |
472 |
|
Exit(1) |
473 |
|
if not conf.CheckFunc('Py_Exit'): |
474 |
|
print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path)) |
475 |
|
Exit(1) |
476 |
|
|
477 |
|
## reuse conf to check for numpy header (optional) |
478 |
|
if env['usepython3']: |
479 |
|
# FIXME: This is until we can work out how to make the checks in python 3 |
480 |
|
conf.env['numpy_h']=False |
481 |
|
else: |
482 |
|
if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']): |
483 |
|
conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H']) |
484 |
|
conf.env['numpy_h']=True |
485 |
|
else: |
486 |
|
conf.env['numpy_h']=False |
487 |
|
|
488 |
|
# Commit changes to environment |
489 |
|
env = conf.Finish() |
490 |
|
|
491 |
|
######## boost (required) |
492 |
|
|
493 |
|
boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++') |
494 |
|
if sysheaderopt == '': |
495 |
|
env.AppendUnique(CPPPATH = [boost_inc_path]) |
496 |
|
else: |
497 |
|
# This is required because we can't -isystem /usr/include since it breaks |
498 |
|
# std includes |
499 |
|
if os.path.normpath(boost_inc_path) == '/usr/include': |
500 |
|
conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')]) |
501 |
|
else: |
502 |
|
env.Append(CCFLAGS=[sysheaderopt, boost_inc_path]) |
503 |
|
|
504 |
|
env.AppendUnique(LIBPATH = [boost_lib_path]) |
505 |
|
env.AppendUnique(LIBS = env['boost_libs']) |
506 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path) |
507 |
|
|
508 |
# Can't use MPI and OpenMP simultaneously at this time |
######## numpy (required) |
|
if useMPI: |
|
|
omp_flags='' |
|
|
omp_flags_debug='' |
|
509 |
|
|
510 |
if dodebug: |
if env['pythoncmd']=='python': |
511 |
try: |
try: |
512 |
flags = env['cc_flags_debug'] + ' ' + omp_flags_debug |
from numpy import identity |
513 |
env.Append(CCFLAGS = flags) |
except ImportError: |
514 |
except KeyError: |
print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) |
515 |
pass |
Exit(1) |
516 |
else: |
else: |
517 |
try: |
p=subprocess.call([env['pythoncmd'],'-c','import numpy']) |
518 |
flags = env['cc_flags'] + ' ' + omp_flags |
if p!=0: |
519 |
env.Append(CCFLAGS = flags) |
print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) |
520 |
except KeyError: |
Exit(1) |
521 |
pass |
|
522 |
if dodebug: |
######## CppUnit (required for tests) |
523 |
try: |
|
524 |
flags = env['cxx_flags_debug'] |
try: |
525 |
env.Append(CXXFLAGS = flags) |
cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++') |
526 |
except KeyError: |
env.AppendUnique(CPPPATH = [cppunit_inc_path]) |
527 |
pass |
env.AppendUnique(LIBPATH = [cppunit_lib_path]) |
528 |
else: |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path) |
529 |
try: |
env['cppunit']=True |
|
flags = env['cxx_flags'] |
|
|
env.Append(CXXFLAGS = flags) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long") |
|
530 |
except: |
except: |
531 |
pass |
env['cppunit']=False |
|
|
|
|
# ============= Remember what options were used in the compile ===================================== |
|
|
if not IS_WINDOWS_PLATFORM: |
|
|
env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*") |
|
|
if dodebug: env.Execute("touch " + libinstall + "/Compiled.with.debug") |
|
|
if useMPI: env.Execute("touch " + libinstall + "/Compiled.with.mpi") |
|
|
if omp_flags != '': env.Execute("touch " + libinstall + "/Compiled.with.OpenMP") |
|
|
if bounds_check: env.Execute("touch " + libinstall + "/Compiled.with.bounds_check") |
|
|
|
|
|
# ============= set mkl (but only of no MPI) ===================================== |
|
|
if not useMPI: |
|
|
try: |
|
|
includes = env['mkl_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['mkl_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
mkl_libs = env['mkl_libs'] |
|
|
except KeyError: |
|
|
mkl_libs = [] |
|
|
else: |
|
|
mkl_libs = [] |
|
|
|
|
|
# ============= set scsl (but only of no MPI) ===================================== |
|
|
if not useMPI: |
|
|
try: |
|
|
includes = env['scsl_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['scsl_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
scsl_libs = env['scsl_libs'] |
|
|
except KeyError: |
|
|
scsl_libs = [ ] |
|
|
|
|
|
else: |
|
|
scsl_libs = [] |
|
|
|
|
|
# ============= set TRILINOS (but only with MPI) ===================================== |
|
|
if useMPI: |
|
|
try: |
|
|
includes = env['trilinos_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['trilinos_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
trilinos_libs = env['trilinos_libs'] |
|
|
except KeyError: |
|
|
trilinos_libs = [] |
|
|
else: |
|
|
trilinos_libs = [] |
|
|
|
|
|
|
|
|
# ============= set umfpack (but only without MPI) ===================================== |
|
|
umf_libs=[ ] |
|
|
if not useMPI: |
|
|
try: |
|
|
includes = env['umf_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['umf_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
umf_libs = env['umf_libs'] |
|
|
umf_libs+=umf_libs |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
includes = env['ufc_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
includes = env['amd_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['amd_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
amd_libs = env['amd_libs'] |
|
|
umf_libs+=amd_libs |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
# ============= set TRILINOS (but only with MPI) ===================================== |
|
|
if useMPI: |
|
|
try: |
|
|
includes = env['trilinos_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['trilinos_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
trilinos_libs = env['trilinos_libs'] |
|
|
except KeyError: |
|
|
trilinos_libs = [] |
|
|
else: |
|
|
trilinos_libs = [] |
|
|
|
|
|
# ============= set blas ===================================== |
|
|
try: |
|
|
includes = env['blas_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['blas_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
blas_libs = env['blas_libs'] |
|
|
except KeyError: |
|
|
blas_libs = [ ] |
|
|
|
|
|
# ========== netcdf ==================================== |
|
|
try: |
|
|
useNetCDF = env['useNetCDF'] |
|
|
except KeyError: |
|
|
useNetCDF = 'yes' |
|
|
pass |
|
|
|
|
|
if useNetCDF == 'yes': |
|
|
try: |
|
|
netCDF_libs = env['netCDF_libs'] |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
env.Append(LIBS = netCDF_libs) |
|
|
env.Append(CPPDEFINES = [ 'USE_NETCDF' ]) |
|
|
try: |
|
|
includes = env['netCDF_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
|
|
|
try: |
|
|
lib_path = env['netCDF_lib_path'] |
|
|
env.Append(LIBPATH = [ lib_path, ]) |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
if IS_WINDOWS_PLATFORM : |
|
|
env.PrependENVPath('PATH', lib_path) |
|
|
except KeyError: |
|
|
pass |
|
|
else: |
|
|
print "Warning: Installation is not configured with netCDF. Some I/O function may not be available." |
|
|
netCDF_libs=[ ] |
|
|
|
|
|
# ====================== boost ====================================== |
|
|
try: |
|
|
includes = env['boost_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
lib_path = env['boost_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
if IS_WINDOWS_PLATFORM : |
|
|
env.PrependENVPath('PATH', lib_path) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
boost_lib = env['boost_lib'] |
|
|
except KeyError: |
|
|
boost_lib = None |
|
|
# ====================== python ====================================== |
|
|
try: |
|
|
includes = env['python_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
lib_path = env['python_lib_path'] |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
python_lib = env['python_lib'] |
|
|
except KeyError: |
|
|
python_lib = None |
|
|
# =============== documentation ======================================= |
|
|
try: |
|
|
doxygen_path = env['doxygen_path'] |
|
|
except KeyError: |
|
|
doxygen_path = None |
|
|
try: |
|
|
epydoc_path = env['epydoc_path'] |
|
|
except KeyError: |
|
|
epydoc_path = None |
|
|
# =============== PAPI ======================================= |
|
|
try: |
|
|
includes = env['papi_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
lib_path = env['papi_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
papi_libs = env['papi_libs'] |
|
|
except KeyError: |
|
|
papi_libs = None |
|
|
# ============= set mpi ===================================== |
|
|
if useMPI: |
|
|
env.Append(CPPDEFINES=['PASO_MPI',]) |
|
|
try: |
|
|
includes = env['mpi_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
lib_path = env['mpi_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
mpi_libs = env['mpi_libs'] |
|
|
except KeyError: |
|
|
mpi_libs = [] |
|
|
|
|
|
try: |
|
|
mpi_run = env['mpi_run'] |
|
|
except KeyError: |
|
|
mpi_run = '' |
|
|
|
|
|
try: |
|
|
mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK'] |
|
|
env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] ) |
|
|
except KeyError: |
|
|
pass |
|
|
else: |
|
|
mpi_libs=[] |
|
|
mpi_run = mpi_run_default |
|
|
# =========== zip files =========================================== |
|
|
try: |
|
|
includes = env['papi_path'] |
|
|
env.Append(CPPPATH = [includes,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
lib_path = env['papi_lib_path'] |
|
|
env.Append(LIBPATH = [lib_path,]) |
|
|
except KeyError: |
|
|
pass |
|
|
try: |
|
|
papi_libs = env['papi_libs'] |
|
|
except KeyError: |
|
|
papi_libs = None |
|
|
try: |
|
|
papi_instrument_solver = env['papi_instrument_solver'] |
|
|
except KeyError: |
|
|
papi_instrument_solver = None |
|
|
|
|
|
|
|
|
# ============= and some helpers ===================================== |
|
|
try: |
|
|
doxygen_path = env['doxygen_path'] |
|
|
except KeyError: |
|
|
doxygen_path = None |
|
|
try: |
|
|
epydoc_path = env['epydoc_path'] |
|
|
except KeyError: |
|
|
epydoc_path = None |
|
|
try: |
|
|
src_zipfile = env.File(env['src_zipfile']) |
|
|
except KeyError: |
|
|
src_zipfile = None |
|
|
try: |
|
|
test_zipfile = env.File(env['test_zipfile']) |
|
|
except KeyError: |
|
|
test_zipfile = None |
|
|
try: |
|
|
examples_zipfile = env.File(env['examples_zipfile']) |
|
|
except KeyError: |
|
|
examples_zipfile = None |
|
|
|
|
|
try: |
|
|
src_tarfile = env.File(env['src_tarfile']) |
|
|
except KeyError: |
|
|
src_tarfile = None |
|
|
try: |
|
|
test_tarfile = env.File(env['test_tarfile']) |
|
|
except KeyError: |
|
|
test_tarfile = None |
|
|
try: |
|
|
examples_tarfile = env.File(env['examples_tarfile']) |
|
|
except KeyError: |
|
|
examples_tarfile = None |
|
532 |
|
|
533 |
try: |
######## VTK (optional) |
|
guide_pdf = env.File(env['guide_pdf']) |
|
|
except KeyError: |
|
|
guide_pdf = None |
|
534 |
|
|
535 |
try: |
if env['pyvisi']: |
536 |
guide_html_index = env.File('index.htm',env['guide_html']) |
try: |
537 |
except KeyError: |
import vtk |
538 |
guide_html_index = None |
env['pyvisi'] = True |
539 |
|
except ImportError: |
540 |
try: |
print("Cannot import vtk, disabling pyvisi.") |
541 |
api_epydoc = env.Dir(env['api_epydoc']) |
env['pyvisi'] = False |
542 |
except KeyError: |
|
543 |
api_epydoc = None |
######## netCDF (optional) |
544 |
|
|
545 |
try: |
netcdf_inc_path='' |
546 |
api_doxygen = env.Dir(env['api_doxygen']) |
netcdf_lib_path='' |
547 |
except KeyError: |
if env['netcdf']: |
548 |
api_doxygen = None |
netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++') |
549 |
|
env.AppendUnique(CPPPATH = [netcdf_inc_path]) |
550 |
try: |
env.AppendUnique(LIBPATH = [netcdf_lib_path]) |
551 |
svn_pipe = os.popen("svnversion -n .") |
env.AppendUnique(LIBS = env['netcdf_libs']) |
552 |
global_revision = svn_pipe.readlines() |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path) |
553 |
svn_pipe.close() |
env.Append(CPPDEFINES = ['USE_NETCDF']) |
554 |
global_revision = re.sub(":.*", "", global_revision[0]) |
|
555 |
global_revision = re.sub("[^0-9]", "", global_revision) |
######## PAPI (optional) |
556 |
except: |
|
557 |
global_revision="-1" |
papi_inc_path='' |
558 |
print "Warning: unable to recover global revsion number." |
papi_lib_path='' |
559 |
print "Revision number is %s."%global_revision |
if env['papi']: |
560 |
env.Append(CPPDEFINES = "SVN_VERSION="+global_revision) |
papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c') |
561 |
|
env.AppendUnique(CPPPATH = [papi_inc_path]) |
562 |
# Python install - esys __init__.py |
env.AppendUnique(LIBPATH = [papi_lib_path]) |
563 |
init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET')) |
env.AppendUnique(LIBS = env['papi_libs']) |
564 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path) |
565 |
# FIXME: exinstall and friends related to examples are not working. |
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
566 |
build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target]) |
|
567 |
|
######## MKL (optional) |
568 |
env.Default(build_target) |
|
569 |
|
mkl_inc_path='' |
570 |
# Zipgets |
mkl_lib_path='' |
571 |
env.Alias('release_src',[ src_zipfile, src_tarfile ]) |
if env['mkl']: |
572 |
env.Alias('release_tests',[ test_zipfile, test_tarfile]) |
mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c') |
573 |
env.Alias('release_examples',[ examples_zipfile, examples_tarfile]) |
env.AppendUnique(CPPPATH = [mkl_inc_path]) |
574 |
env.Alias('examples_zipfile',examples_zipfile) |
env.AppendUnique(LIBPATH = [mkl_lib_path]) |
575 |
env.Alias('examples_tarfile',examples_tarfile) |
env.AppendUnique(LIBS = env['mkl_libs']) |
576 |
env.Alias('api_epydoc',api_epydoc) |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path) |
577 |
env.Alias('api_doxygen',api_doxygen) |
env.Append(CPPDEFINES = ['MKL']) |
578 |
env.Alias('guide_html_index',guide_html_index) |
|
579 |
env.Alias('guide_pdf', guide_pdf) |
######## UMFPACK (optional) |
580 |
env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index]) |
|
581 |
env.Alias('release', ['release_src', 'release_tests', 'docs']) |
umfpack_inc_path='' |
582 |
|
umfpack_lib_path='' |
583 |
env.Alias('build_tests',build_target) # target to build all C++ tests |
if env['umfpack']: |
584 |
env.Alias('build_py_tests',build_target) # target to build all python tests |
umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c') |
585 |
env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests |
env.AppendUnique(CPPPATH = [umfpack_inc_path]) |
586 |
env.Alias('run_tests', 'build_tests') # target to run all C++ test |
env.AppendUnique(LIBPATH = [umfpack_lib_path]) |
587 |
env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests |
env.AppendUnique(LIBS = env['umfpack_libs']) |
588 |
env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path) |
589 |
|
env.Append(CPPDEFINES = ['UMFPACK']) |
590 |
|
|
591 |
# Allow sconscripts to see the env |
######## LAPACK (optional) |
592 |
Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run", |
|
593 |
"boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs", |
if env['lapack']=='mkl' and not env['mkl']: |
594 |
"sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver", |
print("mkl_lapack requires MKL!") |
595 |
"guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ]) |
Exit(1) |
596 |
|
|
597 |
# End initialisation section |
env['uselapack'] = env['lapack']!='none' |
598 |
# Begin configuration section |
lapack_inc_path='' |
599 |
# adds this file and the scons option directore to the source tar |
lapack_lib_path='' |
600 |
release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ] |
if env['uselapack']: |
601 |
release_testfiles=[env.File('README_TESTS'),] |
header='clapack.h' |
602 |
env.Zip(src_zipfile, release_srcfiles) |
if env['lapack']=='mkl': |
603 |
env.Zip(test_zipfile, release_testfiles) |
env.AppendUnique(CPPDEFINES = ['MKL_LAPACK']) |
604 |
try: |
header='mkl_lapack.h' |
605 |
env.Tar(src_tarfile, release_srcfiles) |
lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c') |
606 |
env.Tar(test_tarfile, release_testfiles) |
env.AppendUnique(CPPPATH = [lapack_inc_path]) |
607 |
except AttributeError: |
env.AppendUnique(LIBPATH = [lapack_lib_path]) |
608 |
pass |
env.AppendUnique(LIBS = env['lapack_libs']) |
609 |
# Insert new components to be build here |
env.Append(CPPDEFINES = ['USE_LAPACK']) |
610 |
# FIXME: might be nice to replace this verbosity with a list of targets and some |
|
611 |
# FIXME: nifty python to create the lengthy but very similar env.Sconscript lines |
######## Silo (optional) |
612 |
# Third Party libraries |
|
613 |
env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
silo_inc_path='' |
614 |
# C/C++ Libraries |
silo_lib_path='' |
615 |
env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) |
if env['silo']: |
616 |
# bruce is removed for now as it doesn't really do anything |
silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c') |
617 |
# env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0) |
env.AppendUnique(CPPPATH = [silo_inc_path]) |
618 |
env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) |
env.AppendUnique(LIBPATH = [silo_lib_path]) |
619 |
env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) |
# Note that we do not add the libs since they are only needed for the |
620 |
env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0) |
# weipa library and tools. |
621 |
env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0) |
#env.AppendUnique(LIBS = [env['silo_libs']]) |
622 |
env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0) |
|
623 |
env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0) |
######## VSL random numbers (optional) |
624 |
env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) |
if env['vsl_random']: |
625 |
env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
env.Append(CPPDEFINES = ['MKLRANDOM']) |
626 |
#env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0) |
|
627 |
|
######## VisIt (optional) |
628 |
|
|
629 |
|
visit_inc_path='' |
630 |
|
visit_lib_path='' |
631 |
|
if env['visit']: |
632 |
|
visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c') |
633 |
|
env.AppendUnique(CPPPATH = [visit_inc_path]) |
634 |
|
env.AppendUnique(LIBPATH = [visit_lib_path]) |
635 |
|
|
636 |
|
######## MPI (optional) |
637 |
|
|
638 |
|
if env['mpi']=='no': |
639 |
|
env['mpi']='none' |
640 |
|
|
641 |
|
env['usempi'] = env['mpi']!='none' |
642 |
|
mpi_inc_path='' |
643 |
|
mpi_lib_path='' |
644 |
|
if env['usempi']: |
645 |
|
mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c') |
646 |
|
env.AppendUnique(CPPPATH = [mpi_inc_path]) |
647 |
|
env.AppendUnique(LIBPATH = [mpi_lib_path]) |
648 |
|
env.AppendUnique(LIBS = env['mpi_libs']) |
649 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path) |
650 |
|
env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK']) |
651 |
|
# NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! |
652 |
|
# On the other hand MPT and OpenMPI don't define the latter so we have to |
653 |
|
# do that here |
654 |
|
if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']: |
655 |
|
env.Append(CPPDEFINES = ['MPI_INCLUDED']) |
656 |
|
|
657 |
|
######## BOOMERAMG (optional) |
658 |
|
|
659 |
|
if env['mpi'] == 'none': env['boomeramg'] = False |
660 |
|
|
661 |
|
boomeramg_inc_path='' |
662 |
|
boomeramg_lib_path='' |
663 |
|
if env['boomeramg']: |
664 |
|
boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c') |
665 |
|
env.AppendUnique(CPPPATH = [boomeramg_inc_path]) |
666 |
|
env.AppendUnique(LIBPATH = [boomeramg_lib_path]) |
667 |
|
env.AppendUnique(LIBS = env['boomeramg_libs']) |
668 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path) |
669 |
|
env.Append(CPPDEFINES = ['BOOMERAMG']) |
670 |
|
|
671 |
|
######## ParMETIS (optional) |
672 |
|
|
673 |
|
if not env['usempi']: env['parmetis'] = False |
674 |
|
|
675 |
|
parmetis_inc_path='' |
676 |
|
parmetis_lib_path='' |
677 |
|
if env['parmetis']: |
678 |
|
parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c') |
679 |
|
env.AppendUnique(CPPPATH = [parmetis_inc_path]) |
680 |
|
env.AppendUnique(LIBPATH = [parmetis_lib_path]) |
681 |
|
env.AppendUnique(LIBS = env['parmetis_libs']) |
682 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path) |
683 |
|
env.Append(CPPDEFINES = ['USE_PARMETIS']) |
684 |
|
|
685 |
|
######## gmsh (optional, for tests) |
686 |
|
|
687 |
|
try: |
688 |
|
import subprocess |
689 |
|
p=subprocess.Popen(['gmsh', '-info'], stderr=subprocess.PIPE) |
690 |
|
_,e=p.communicate() |
691 |
|
if e.split().count("MPI"): |
692 |
|
env['gmsh']='m' |
693 |
|
else: |
694 |
|
env['gmsh']='s' |
695 |
|
except OSError: |
696 |
|
env['gmsh']=False |
697 |
|
|
698 |
|
######## PDFLaTeX (for documentation) |
699 |
|
if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env): |
700 |
|
env['pdflatex']=True |
701 |
|
else: |
702 |
|
env['pdflatex']=False |
703 |
|
|
704 |
|
######################## Summarize our environment ########################### |
705 |
|
|
706 |
|
# keep some of our install paths first in the list for the unit tests |
707 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
708 |
|
env.PrependENVPath('PYTHONPATH', prefix) |
709 |
|
env['ENV']['ESCRIPT_ROOT'] = prefix |
710 |
|
|
711 |
|
if not env['verbose']: |
712 |
|
env['CCCOMSTR'] = "Compiling $TARGET" |
713 |
|
env['CXXCOMSTR'] = "Compiling $TARGET" |
714 |
|
env['SHCCCOMSTR'] = "Compiling $TARGET" |
715 |
|
env['SHCXXCOMSTR'] = "Compiling $TARGET" |
716 |
|
env['ARCOMSTR'] = "Linking $TARGET" |
717 |
|
env['LINKCOMSTR'] = "Linking $TARGET" |
718 |
|
env['SHLINKCOMSTR'] = "Linking $TARGET" |
719 |
|
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
720 |
|
env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET" |
721 |
|
env['MAKEINDEXCOMSTR'] = "Generating index $TARGET" |
722 |
|
env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES" |
723 |
|
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
724 |
|
|
725 |
|
print("") |
726 |
|
print("*** Config Summary (see config.log and lib/buildvars for details) ***") |
727 |
|
print("Escript/Finley revision %s"%global_revision) |
728 |
|
print(" Install prefix: %s"%env['prefix']) |
729 |
|
print(" Python: %s"%sysconfig.PREFIX) |
730 |
|
print(" boost: %s"%env['boost_prefix']) |
731 |
|
print(" numpy: YES") |
732 |
|
if env['usempi']: |
733 |
|
print(" MPI: YES (flavour: %s)"%env['mpi']) |
734 |
|
else: |
735 |
|
print(" MPI: DISABLED") |
736 |
|
if env['uselapack']: |
737 |
|
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
738 |
|
else: |
739 |
|
print(" LAPACK: DISABLED") |
740 |
|
d_list=[] |
741 |
|
e_list=[] |
742 |
|
for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit','vsl_random': |
743 |
|
if env[i]: e_list.append(i) |
744 |
|
else: d_list.append(i) |
745 |
|
for i in e_list: |
746 |
|
print("%16s: YES"%i) |
747 |
|
for i in d_list: |
748 |
|
print("%16s: DISABLED"%i) |
749 |
|
if env['cppunit']: |
750 |
|
print(" CppUnit: FOUND") |
751 |
|
else: |
752 |
|
print(" CppUnit: NOT FOUND") |
753 |
|
if env['gmsh']=='m': |
754 |
|
print(" gmsh: FOUND, MPI-ENABLED") |
755 |
|
elif env['gmsh']=='s': |
756 |
|
print(" gmsh: FOUND") |
757 |
|
else: |
758 |
|
print(" gmsh: NOT FOUND") |
759 |
|
if env['numpy_h']: |
760 |
|
print(" numpy headers: FOUND") |
761 |
|
else: |
762 |
|
print(" numpy headers: NOT FOUND") |
763 |
|
print(" vsl_random: %s"%env['vsl_random']) |
764 |
|
|
765 |
|
if ((fatalwarning != '') and (env['werror'])): |
766 |
|
print(" Treating warnings as errors") |
767 |
|
else: |
768 |
|
print(" NOT treating warnings as errors") |
769 |
|
print("") |
770 |
|
|
771 |
|
####################### Configure the subdirectories ######################### |
772 |
|
|
773 |
|
from grouptest import * |
774 |
|
|
775 |
|
TestGroups=[] |
776 |
|
|
777 |
|
# keep an environment without warnings-as-errors |
778 |
|
dodgy_env=env.Clone() |
779 |
|
|
780 |
|
# now add warnings-as-errors flags. This needs to be done after configuration |
781 |
|
# because the scons test files have warnings in them |
782 |
|
if ((fatalwarning != '') and (env['werror'])): |
783 |
|
env.Append(CCFLAGS = fatalwarning) |
784 |
|
|
785 |
|
Export( |
786 |
|
['env', |
787 |
|
'dodgy_env', |
788 |
|
'IS_WINDOWS', |
789 |
|
'TestGroups' |
790 |
|
] |
791 |
|
) |
792 |
|
|
793 |
|
env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0) |
794 |
|
env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0) |
795 |
|
env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0) |
796 |
|
env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0) |
797 |
|
env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0) |
798 |
|
env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0) |
799 |
|
env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0) |
800 |
|
env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0) |
801 |
|
env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0) |
802 |
|
env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0) |
803 |
|
env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0) |
804 |
|
env.SConscript(dirs = ['pyvisi/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pyvisi', duplicate=0) |
805 |
|
env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0) |
806 |
|
env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0) |
807 |
|
env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0) |
808 |
|
|
809 |
|
|
810 |
|
######################## Populate the buildvars file ######################### |
811 |
|
|
812 |
|
# remove obsolete file |
813 |
|
if not env['usempi']: |
814 |
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) |
815 |
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) |
816 |
|
|
817 |
|
# Try to extract the boost version from version.hpp |
818 |
|
boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp')) |
819 |
|
boostversion='unknown' |
820 |
|
try: |
821 |
|
for line in boosthpp: |
822 |
|
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
823 |
|
if ver: |
824 |
|
boostversion=ver.group(1) |
825 |
|
except StopIteration: |
826 |
|
pass |
827 |
|
boosthpp.close() |
828 |
|
|
829 |
|
|
830 |
|
buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w') |
831 |
|
buildvars.write("svn_revision="+str(global_revision)+"\n") |
832 |
|
buildvars.write("prefix="+prefix+"\n") |
833 |
|
buildvars.write("cc="+env['CC']+"\n") |
834 |
|
buildvars.write("cxx="+env['CXX']+"\n") |
835 |
|
if env['pythoncmd']=='python': |
836 |
|
buildvars.write("python="+sys.executable+"\n") |
837 |
|
buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n") |
838 |
|
else: |
839 |
|
buildvars.write("python="+env['pythoncmd']+"\n") |
840 |
|
p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE) |
841 |
|
verstring=p.stdout.readline().strip() |
842 |
|
p.wait() |
843 |
|
buildvars.write("python_version="+verstring+"\n") |
844 |
|
buildvars.write("boost_inc_path="+boost_inc_path+"\n") |
845 |
|
buildvars.write("boost_lib_path="+boost_lib_path+"\n") |
846 |
|
buildvars.write("boost_version="+boostversion+"\n") |
847 |
|
buildvars.write("debug=%d\n"%int(env['debug'])) |
848 |
|
buildvars.write("openmp=%d\n"%int(env['openmp'])) |
849 |
|
buildvars.write("mpi=%s\n"%env['mpi']) |
850 |
|
buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path) |
851 |
|
buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path) |
852 |
|
buildvars.write("lapack=%s\n"%env['lapack']) |
853 |
|
buildvars.write("pyvisi=%d\n"%env['pyvisi']) |
854 |
|
buildvars.write("vsl_random=%d\n"%int(env['vsl_random'])) |
855 |
|
for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit': |
856 |
|
buildvars.write("%s=%d\n"%(i, int(env[i]))) |
857 |
|
if env[i]: |
858 |
|
buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path'))) |
859 |
|
buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path'))) |
860 |
|
buildvars.close() |
861 |
|
|
862 |
|
################### Targets to build and install libraries ################### |
863 |
|
|
864 |
|
target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET')) |
865 |
|
env.Alias('target_init', [target_init]) |
866 |
|
# delete buildvars upon cleanup |
867 |
|
env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars')) |
868 |
|
|
869 |
|
# The headers have to be installed prior to build in order to satisfy |
870 |
|
# #include <paso/Common.h> |
871 |
|
env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib']) |
872 |
|
env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib']) |
873 |
|
|
874 |
|
env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib']) |
875 |
|
env.Alias('install_paso', ['build_paso', 'install_paso_lib']) |
876 |
|
|
877 |
|
env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib']) |
878 |
|
env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py']) |
879 |
|
|
880 |
|
env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib']) |
881 |
|
env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py']) |
882 |
|
|
883 |
|
env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib']) |
884 |
|
env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py']) |
885 |
|
|
886 |
|
env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib']) |
887 |
|
env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py']) |
888 |
|
|
889 |
|
env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib']) |
890 |
|
env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py']) |
891 |
|
|
892 |
|
env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib']) |
893 |
|
env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py']) |
894 |
|
|
895 |
|
env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib']) |
896 |
|
env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib']) |
897 |
|
|
898 |
|
# Now gather all the above into some easy targets: build_all and install_all |
899 |
|
build_all_list = [] |
900 |
|
build_all_list += ['build_esysUtils'] |
901 |
|
build_all_list += ['build_paso'] |
902 |
|
build_all_list += ['build_escript'] |
903 |
|
build_all_list += ['build_pasowrap'] |
904 |
|
build_all_list += ['build_dudley'] |
905 |
|
build_all_list += ['build_finley'] |
906 |
|
build_all_list += ['build_ripley'] |
907 |
|
build_all_list += ['build_weipa'] |
908 |
|
if not IS_WINDOWS: build_all_list += ['build_escriptreader'] |
909 |
|
if env['usempi']: build_all_list += ['build_pythonMPI'] |
910 |
|
build_all_list += ['build_escriptconvert'] |
911 |
|
env.Alias('build_all', build_all_list) |
912 |
|
|
913 |
|
install_all_list = [] |
914 |
|
install_all_list += ['target_init'] |
915 |
|
install_all_list += ['install_esysUtils'] |
916 |
|
install_all_list += ['install_paso'] |
917 |
|
install_all_list += ['install_escript'] |
918 |
|
install_all_list += ['install_pasowrap'] |
919 |
|
install_all_list += ['install_dudley'] |
920 |
|
install_all_list += ['install_finley'] |
921 |
|
install_all_list += ['install_ripley'] |
922 |
|
install_all_list += ['install_weipa'] |
923 |
|
if not IS_WINDOWS: install_all_list += ['install_escriptreader'] |
924 |
|
#install_all_list += ['install_pyvisi_py'] |
925 |
|
install_all_list += ['install_modellib_py'] |
926 |
|
install_all_list += ['install_pycad_py'] |
927 |
|
if env['usempi']: install_all_list += ['install_pythonMPI'] |
928 |
|
install_all_list += ['install_escriptconvert'] |
929 |
|
env.Alias('install_all', install_all_list) |
930 |
|
|
931 |
|
# Default target is install |
932 |
|
env.Default('install_all') |
933 |
|
|
934 |
|
################## Targets to build and run the test suite ################### |
935 |
|
|
936 |
|
test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1') |
937 |
|
if not env['cppunit']: |
938 |
|
env.Alias('run_tests', test_msg) |
939 |
|
env.Alias('run_tests', ['install_all']) |
940 |
|
env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests']) |
941 |
|
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
942 |
|
env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests') |
943 |
|
|
944 |
|
##################### Targets to build the documentation ##################### |
945 |
|
|
946 |
|
env.Alias('api_epydoc','install_all') |
947 |
|
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'user_pdf', 'install_pdf', 'cookbook_pdf']) |
948 |
|
env.Alias('release_prep', ['docs', 'install_all']) |
949 |
|
|
950 |
syslib_install_target = env.installDirectory(sys_libinstall,libinstall) |
if not IS_WINDOWS: |
951 |
syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True) |
try: |
952 |
|
utest=open('utest.sh','w') |
953 |
|
utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix)) |
954 |
|
for tests in TestGroups: |
955 |
|
utest.write(tests.makeString()) |
956 |
|
utest.close() |
957 |
|
Execute(Chmod('utest.sh', 0o755)) |
958 |
|
print("Generated utest.sh.") |
959 |
|
except IOError: |
960 |
|
print("Error attempting to write unittests file.") |
961 |
|
Exit(1) |
962 |
|
|
963 |
|
# delete utest.sh upon cleanup |
964 |
|
env.Clean('target_init', 'utest.sh') |
965 |
|
|
966 |
|
# Make sure that the escript wrapper is in place |
967 |
|
if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')): |
968 |
|
print("Copying escript wrapper.") |
969 |
|
Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript')) |
970 |
|
|
|
install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) ) |
|