|
|
|
1 |
######################################################## |
######################################################## |
2 |
# |
# |
3 |
# Copyright (c) 2003-2010 by University of Queensland |
# Copyright (c) 2003-2010 by University of Queensland |
10 |
# |
# |
11 |
######################################################## |
######################################################## |
12 |
|
|
|
|
|
13 |
EnsureSConsVersion(0,96,91) |
EnsureSConsVersion(0,96,91) |
14 |
EnsurePythonVersion(2,3) |
EnsurePythonVersion(2,5) |
15 |
|
|
16 |
import sys, os, re, socket, platform, stat |
import sys, os, platform, re |
17 |
# For copy() |
from distutils import sysconfig |
|
import shutil |
|
|
|
|
|
# Add our extensions |
|
|
if os.path.isdir('scons'): sys.path.append('scons') |
|
|
import scons_extensions |
|
|
|
|
|
# Use /usr/lib64 if available, else /usr/lib |
|
|
usr_lib = '/usr/lib' |
|
|
if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64' |
|
18 |
|
|
19 |
# The string python2.4 or python2.5 |
# Version number to check for in options file. Increment when new features are |
20 |
python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1]) |
# added or existing options changed. |
21 |
|
REQUIRED_OPTS_VERSION=200 |
22 |
|
|
23 |
# MS Windows support, many thanks to PH |
# MS Windows support, many thanks to PH |
24 |
IS_WINDOWS_PLATFORM = (os.name== "nt") |
IS_WINDOWS = (os.name == 'nt') |
|
|
|
|
prefix = ARGUMENTS.get('prefix', Dir('#.').abspath) |
|
25 |
|
|
26 |
#Holds names of variables from the calling environment which need to be passed |
########################## Determine options file ############################ |
27 |
#to tools |
# 1. command line |
28 |
env_export=[] |
# 2. scons/<hostname>_options.py |
29 |
|
# 3. name as part of a cluster |
|
#Determine where to read options from use: |
|
|
#1. command line |
|
|
#2. scons/<hostname>_options.py |
|
|
#3. name as part of a cluster |
|
30 |
options_file=ARGUMENTS.get('options_file', None) |
options_file=ARGUMENTS.get('options_file', None) |
|
effective_hostname=socket.gethostname().split('.')[0] |
|
31 |
if not options_file: |
if not options_file: |
32 |
mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname) |
ext_dir = os.path.join(os.getcwd(), 'scons') |
33 |
options_file = os.path.join("scons",mangledhostname+"_options.py") |
hostname = platform.node().split('.')[0] |
34 |
#If there is no options file with that name see if there is a substitute |
for name in hostname, effectiveName(hostname): |
35 |
if not os.path.isfile(options_file): |
mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname) |
36 |
effective_hostname = scons_extensions.effectiveName(effective_hostname) |
options_file = os.path.join(ext_dir, mangledhostname+'_options.py') |
37 |
mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname) |
if os.path.isfile(options_file): break |
|
options_file = os.path.join("scons",mangledhostname+"_options.py") |
|
38 |
|
|
39 |
if not os.path.isfile(options_file): |
if not os.path.isfile(options_file): |
40 |
print "Options file not found (expected '%s')" % options_file |
print("\nWARNING:\nOptions file %s" % options_file) |
41 |
options_file = False |
print("not found! Default options will be used which is most likely suboptimal.") |
42 |
else: |
print("It is recommended that you copy one of the TEMPLATE files in the scons/") |
43 |
print "Options file is", options_file |
print("subdirectory and customize it to your needs.\n") |
44 |
|
options_file = None |
45 |
|
|
46 |
|
######################## SCons compatibility stuff ########################### |
47 |
|
|
48 |
|
# Call additional SConscript |
49 |
|
# scons < 0.98: build_dir, BuildDir() |
50 |
|
# scons >= 0.98: variant_dir, VariantDir() |
51 |
|
import SCons |
52 |
|
scons_ver=SCons.__version__.split('.') |
53 |
|
cantusevariantdir=float(scons_ver[0]+'.'+scons_ver[1])<0.98 |
54 |
|
|
55 |
#Does our scons support the newer Variables class or do we need to use Options? |
def CallSConscript(obj, **kw): |
56 |
|
if cantusevariantdir: |
57 |
|
if 'variant_dir' in kw: |
58 |
|
kw['build_dir']=kw['variant_dir'] |
59 |
|
del kw['variant_dir'] |
60 |
|
obj.SConscript(**kw) |
61 |
|
|
62 |
|
# Make a copy of an environment |
63 |
|
# scons <= 0.98: env.Copy() |
64 |
|
# scons > 0.98: env.Clone() |
65 |
|
def clone_env(env): |
66 |
|
if 'Clone' in dir(env): |
67 |
|
return env.Clone() |
68 |
|
else: |
69 |
|
return env.Copy() |
70 |
|
|
71 |
|
# Prepare user-settable variables |
72 |
|
# scons <= 0.98.0: Options |
73 |
|
# scons >= 0.98.1: Variables |
74 |
|
unknown_vars=None |
75 |
try: |
try: |
76 |
dummyvar=Variables |
vars = Variables(options_file, ARGUMENTS) |
77 |
opts = Variables(options_file, ARGUMENTS) |
adder = vars.AddVariables |
78 |
adder = opts.AddVariables |
unknown_vars = vars.UnknownVariables |
79 |
except: |
except: |
80 |
opts = Options(options_file, ARGUMENTS) |
vars = Options(options_file, ARGUMENTS) |
81 |
adder = opts.AddOptions |
adder = vars.AddOptions |
82 |
BoolVariable = BoolOption |
if 'UnknownOptions' in dir(vars): |
83 |
|
unknown_vars = vars.UnknownOptions |
84 |
############ Load build options ################################ |
BoolVariable = BoolOption |
85 |
|
EnumVariable = EnumOption |
86 |
|
PackageVariable = PackageOption |
87 |
|
PathVariable = PathOption |
88 |
|
|
89 |
|
############################### build options ################################ |
90 |
|
|
91 |
|
default_prefix='/usr' |
92 |
|
mpi_flavours=('none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI') |
93 |
|
lapack_flavours=('none', 'clapack', 'mkl') |
94 |
|
|
95 |
adder( |
adder( |
96 |
#opts.AddOptions( |
PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile), |
97 |
# Where to install esys stuff |
PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate), |
98 |
('prefix', 'where everything will be installed', Dir('#.').abspath), |
BoolVariable('verbose', 'Output full compile/link lines', False), |
99 |
('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')), |
# Compiler/Linker options |
100 |
('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')), |
('cc', 'Path to C compiler', 'default'), |
101 |
('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')), |
('cxx', 'Path to C++ compiler', 'default'), |
102 |
('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')), |
('cc_flags', 'Base C/C++ compiler flags', 'default'), |
103 |
# Compilation options |
('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'), |
104 |
BoolVariable('dodebug', 'For backwards compatibility', 'no'), |
('cc_debug', 'Additional C/C++ flags for a debug build', 'default'), |
|
BoolVariable('usedebug', 'Do you want a debug build?', 'no'), |
|
|
BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'), |
|
|
('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file), |
|
|
('cc', 'path to C compiler', 'DEFAULT'), |
|
|
('cxx', 'path to C++ compiler', 'DEFAULT'), |
|
|
('win_cc_name', 'windows C compiler name if needed', 'msvc'), |
|
|
# The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below |
|
|
('cc_flags', 'C/C++ compiler flags to use', '-DEFAULT_1'), |
|
|
('cc_optim', 'C/C++ optimization flags to use', '-DEFAULT_2'), |
|
|
('cc_debug', 'C/C++ debug flags to use', '-DEFAULT_3'), |
|
|
('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'), |
|
|
('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'), |
|
|
('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'), |
|
105 |
('cc_extra', 'Extra C compiler flags', ''), |
('cc_extra', 'Extra C compiler flags', ''), |
106 |
('cxx_extra', 'Extra C++ compiler flags', ''), |
('cxx_extra', 'Extra C++ compiler flags', ''), |
107 |
('ld_extra', 'Extra linker flags', ''), |
('ld_extra', 'Extra linker flags', ''), |
108 |
('sys_libs', 'System libraries to link with', []), |
BoolVariable('werror','Treat compiler warnings as errors', True), |
109 |
('ar_flags', 'Static library archiver flags to use', ''), |
BoolVariable('debug', 'Compile with debug flags', False), |
110 |
BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'), |
BoolVariable('openmp', 'Compile parallel version using OpenMP', False), |
111 |
BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'), |
('omp_flags', 'OpenMP compiler flags', 'default'), |
112 |
BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'), |
('omp_ldflags', 'OpenMP linker flags', 'default'), |
113 |
('forcelazy','for testing use only - set the default value for autolazy','leave_alone'), |
# Mandatory libraries |
114 |
('forcecollres','for testing use only - set the default value for force resolving collective ops','leave_alone'), |
EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours), |
115 |
# Python |
('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix), |
116 |
('python_path', 'Path to Python includes', '/usr/include/'+python_version), |
('mpi_libs', 'MPI shared libraries to link with', ['mpi']), |
117 |
('python_lib_path', 'Path to Python libs', usr_lib), |
('boost_prefix', 'Prefix/Paths of boost installation', default_prefix), |
|
('python_libs', 'Python libraries to link with', [python_version]), |
|
|
('python_cmd', 'Python command', 'python'), |
|
|
# Boost |
|
|
('boost_path', 'Path to Boost includes', '/usr/include'), |
|
|
('boost_lib_path', 'Path to Boost libs', usr_lib), |
|
118 |
('boost_libs', 'Boost libraries to link with', ['boost_python']), |
('boost_libs', 'Boost libraries to link with', ['boost_python']), |
119 |
# NetCDF |
# Optional libraries |
120 |
BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'), |
BoolVariable('netcdf', 'Enable netCDF file support', True), |
121 |
('netCDF_path', 'Path to netCDF includes', '/usr/include'), |
('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix), |
122 |
('netCDF_lib_path', 'Path to netCDF libs', usr_lib), |
('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']), |
123 |
('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']), |
BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False), |
124 |
# MPI |
('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix), |
125 |
BoolVariable('useMPI', 'For backwards compatibility', 'no'), |
('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']), |
126 |
BoolVariable('usempi', 'Compile parallel version using MPI', 'no'), |
BoolVariable('papi', 'Enable PAPI', False), |
127 |
('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'), |
('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix), |
|
('mpi_path', 'Path to MPI includes', '/usr/include'), |
|
|
('mpi_run', 'mpirun name' , 'mpiexec -np 1'), |
|
|
('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib), |
|
|
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', []), |
|
|
('mpi_flavour','Type of MPI execution environment','none'), |
|
|
# ParMETIS |
|
|
BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'), |
|
|
('parmetis_path', 'Path to ParMETIS includes', '/usr/include'), |
|
|
('parmetis_lib_path', 'Path to ParMETIS library', usr_lib), |
|
|
('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']), |
|
|
# PAPI |
|
|
BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'), |
|
|
('papi_path', 'Path to PAPI includes', '/usr/include'), |
|
|
('papi_lib_path', 'Path to PAPI libs', usr_lib), |
|
128 |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
129 |
BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False), |
BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False), |
130 |
# MKL |
BoolVariable('mkl', 'Enable the Math Kernel Library', False), |
131 |
BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'), |
('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix), |
132 |
('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'), |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']), |
133 |
('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'), |
BoolVariable('umfpack', 'Enable UMFPACK', False), |
134 |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']), |
('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix), |
135 |
# UMFPACK |
('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']), |
136 |
BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'), |
EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours), |
137 |
('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'), |
('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix), |
138 |
('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'), |
('lapack_libs', 'LAPACK libraries to link with', []), |
139 |
('umf_lib_path', 'Path to UMFPACK libs', usr_lib), |
BoolVariable('silo', 'Enable the Silo file format in weipa', False), |
140 |
('umf_libs', 'UMFPACK libraries to link with', ['umfpack']), |
('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix), |
|
# Silo |
|
|
BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'), |
|
|
('silo_path', 'Path to Silo includes', '/usr/include'), |
|
|
('silo_lib_path', 'Path to Silo libs', usr_lib), |
|
141 |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
142 |
# VisIt |
BoolVariable('visit', 'Enable the VisIt simulation interface', False), |
143 |
BoolVariable('usevisit', 'switch on/off the usage of the VisIt sim library', 'no'), |
('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix), |
144 |
('visit_path', 'Path to VisIt libsim includes', '/usr/include'), |
('visit_libs', 'VisIt libraries to link with', ['simV2']), |
145 |
('visit_lib_path', 'Path to VisIt sim library', usr_lib), |
BoolVariable('pyvisi', 'Enable pyvisi (deprecated, requires VTK module)', False), |
146 |
# AMD (used by UMFPACK) |
# Advanced settings |
147 |
('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'), |
# To enable passing function pointers through python |
148 |
('amd_lib_path', 'Path to AMD libs', usr_lib), |
BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False), |
149 |
('amd_libs', 'AMD libraries to link with', ['amd']), |
# An option for specifying the compiler tools (see windows branch) |
150 |
# BLAS (used by UMFPACK) |
('tools_names', 'Compiler tools to use', ['default']), |
151 |
('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'), |
('env_export', 'Environment variables to be passed to tools',[]), |
152 |
('blas_lib_path', 'Path to BLAS libs', usr_lib), |
EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
153 |
('blas_libs', 'BLAS libraries to link with', ['blas']), |
EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')), |
154 |
#Lapack options |
# finer control over library building, intel aggressive global optimisation |
155 |
BoolVariable('uselapack','switch on/off use of Lapack','no'), |
# works with dynamic libraries on windows. |
156 |
('lapack_path', 'Path to Lapack includes','/usr/include'), |
('share_esysutils', 'Build a dynamic esysUtils library', False), |
157 |
('lapack_lib_path', 'Path to Lapack libs', usr_lib), |
('share_paso', 'Build a dynamic paso library', False), |
158 |
('lapack_libs', 'Lapack libraries to link with', []), |
('sys_libs', 'Extra libraries to link with', []), |
159 |
('lapack_type', '{clapack,mkl}','clapack'), |
('escript_opts_version', 'Version of options file (do not specify on command line)'), |
|
# An option for specifying the compiler tools set (see windows branch). |
|
|
('tools_names', 'allow control over the tools in the env setup', ['default']), |
|
|
# finer control over library building, intel aggressive global optimisation |
|
|
# works with dynamic libraries on windows. |
|
|
('share_esysUtils', 'control static or dynamic esysUtils lib', False), |
|
|
('share_paso', 'control static or dynamic paso lib', False), |
|
|
('env_export','Environment variables to be passed to children',[]), |
|
|
#To enable passing function pointers through python |
|
|
BoolVariable('iknowwhatimdoing','allow nonstandard C',False) |
|
160 |
) |
) |
161 |
|
|
162 |
|
##################### Create environment and help text ####################### |
163 |
|
|
164 |
################### |
# Intel's compiler uses regular expressions improperly and emits a warning |
165 |
|
# about failing to find the compilers. This warning can be safely ignored. |
166 |
|
|
167 |
# This is only to support old versions of scons which don't accept |
if ARGUMENTS.get('tools_names', 'default') == 'default': |
168 |
# the variant_dir parameter (older than 0.98 I think). |
env = Environment(tools = ['default'], options = vars) |
|
# Once these are no longer an issue we can go back to a direct call |
|
|
# to obj.SConscript |
|
|
import SCons |
|
|
vs=SCons.__version__.split('.') |
|
|
cantusevariantdir=float(vs[0]+'.'+vs[1])<0.98 |
|
|
|
|
|
|
|
|
def CallSConscript(obj, **kw): |
|
|
if cantusevariantdir: |
|
|
if 'variant_dir' in kw: |
|
|
kw['build_dir']=kw['variant_dir'] |
|
|
del kw['variant_dir'] |
|
|
obj.SConscript(**kw) |
|
|
|
|
|
|
|
|
############ Specify which compilers to use #################### |
|
|
|
|
|
# intelc uses regular expressions improperly and emits a warning about |
|
|
# failing to find the compilers. This warning can be safely ignored. |
|
|
|
|
|
if IS_WINDOWS_PLATFORM: |
|
|
env = Environment(options = opts) |
|
|
env = Environment(tools = ['default'] + env['tools_names'], |
|
|
options = opts) |
|
169 |
else: |
else: |
170 |
if os.uname()[4]=='ia64': |
env = Environment(tools = ['default']+env['tools_names'], options = vars) |
171 |
env = Environment(tools = ['default', 'intelc'], options = opts) |
|
172 |
if env['CXX'] == 'icpc': |
if options_file: |
173 |
env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not) |
opts_valid=False |
174 |
else: |
if 'escript_opts_version' in env.Dictionary() and \ |
175 |
env = Environment(tools = ['default'], options = opts) |
int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION: |
176 |
if env['tools_names']!='default': |
opts_valid=True |
177 |
env=Environment(tools = ['default'] +env['tools_names'], options=opts) |
if opts_valid: |
178 |
|
print("Using options in %s." % options_file) |
179 |
# Override compiler choice if provided |
else: |
180 |
if env['cc'] != 'DEFAULT': env['CC']=env['cc'] |
print("\nOptions file %s" % options_file) |
181 |
if env['cxx'] != 'DEFAULT': env['CXX']=env['cxx'] |
print("is outdated! Please update the file by examining one of the TEMPLATE") |
182 |
|
print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION) |
183 |
Help(opts.GenerateHelpText(env)) |
Exit(1) |
184 |
|
|
185 |
############ Make sure target directories exist ################ |
# Generate help text (scons -h) |
186 |
|
Help(vars.GenerateHelpText(env)) |
187 |
|
|
188 |
|
# Check for superfluous options if scons supports it |
189 |
|
if unknown_vars: |
190 |
|
for k in unknown_vars(): |
191 |
|
print("WARNING: Ignoring unknown option '%s'" % k) |
192 |
|
|
193 |
|
#################### Make sure install directories exist ##################### |
194 |
|
|
195 |
|
prefix=Dir(env['prefix']).abspath |
196 |
|
env['incinstall'] = os.path.join(prefix, 'include') |
197 |
|
env['bininstall'] = os.path.join(prefix, 'bin') |
198 |
|
env['libinstall'] = os.path.join(prefix, 'lib') |
199 |
|
env['pyinstall'] = os.path.join(prefix, 'esys') |
200 |
if not os.path.isdir(env['bininstall']): |
if not os.path.isdir(env['bininstall']): |
201 |
os.makedirs(env['bininstall']) |
os.makedirs(env['bininstall']) |
202 |
if not os.path.isdir(env['libinstall']): |
if not os.path.isdir(env['libinstall']): |
204 |
if not os.path.isdir(env['pyinstall']): |
if not os.path.isdir(env['pyinstall']): |
205 |
os.makedirs(env['pyinstall']) |
os.makedirs(env['pyinstall']) |
206 |
|
|
207 |
########## Copy required environment vars ###################### |
env.Append(CPPPATH = [env['incinstall']]) |
208 |
|
env.Append(LIBPATH = [env['libinstall']]) |
|
for i in env['env_export']: |
|
|
env.Append(ENV = {i:os.environ[i]}) |
|
|
|
|
|
############ Fill in compiler options if not set above ######### |
|
|
|
|
|
# Backwards compatibility: allow dodebug=yes and useMPI=yes |
|
|
if env['dodebug']: env['usedebug'] = 1 |
|
|
if env['useMPI']: env['usempi'] = 1 |
|
|
|
|
|
# Default compiler options (override allowed in hostname_options.py, but should not be necessary) |
|
|
# For both C and C++ you get: cc_flags and either the optim flags or debug flags |
|
|
|
|
|
sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action. |
|
|
|
|
|
cc_flags = "" |
|
|
cc_optim = "" |
|
|
cc_debug = "" |
|
|
omp_optim = "" |
|
|
omp_debug = "" |
|
|
omp_libs = [] |
|
|
|
|
|
if env["CC"] == "icc": |
|
|
# Intel compilers |
|
|
cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
|
|
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" |
|
|
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
|
|
omp_optim = "-openmp -openmp_report0" |
|
|
omp_debug = "-openmp -openmp_report0" |
|
|
omp_libs = ['guide', 'pthread'] |
|
|
pedantic = "" |
|
|
fatalwarning = "" # Switch to turn warnings into errors |
|
|
sysheaderopt = "" |
|
|
elif env["CC"][:3] == "gcc": |
|
|
# GNU C on any system |
|
|
cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
|
|
#the long long warning occurs on the Mac |
|
|
cc_optim = "-O3" |
|
|
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
|
|
omp_optim = "-fopenmp" |
|
|
omp_debug = "-fopenmp" |
|
|
omp_libs = [] |
|
|
pedantic = "-pedantic-errors -Wno-long-long" |
|
|
fatalwarning = "-Werror" |
|
|
sysheaderopt = "-isystem " |
|
|
elif env["CC"] == "cl": |
|
|
# Microsoft Visual C on Windows |
|
|
cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" |
|
|
cc_optim = "/O2 /Op /MT /W3" |
|
|
cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK" |
|
|
omp_optim = "" |
|
|
omp_debug = "" |
|
|
omp_libs = [] |
|
|
pedantic = "" |
|
|
fatalwarning = "" |
|
|
sysheaderopt = "" |
|
|
elif env["CC"] == "icl": |
|
|
# intel C on Windows, see windows_intelc_options.py for a start |
|
|
pedantic = "" |
|
|
fatalwarning = "" |
|
|
sysheaderopt = "" |
|
|
|
|
|
|
|
|
# If not specified in hostname_options.py then set them here |
|
|
if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags |
|
|
if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim |
|
|
if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug |
|
|
if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim |
|
|
if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug |
|
|
if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs |
|
|
|
|
|
#set up the autolazy values |
|
|
if env['forcelazy'] != "leave_alone": |
|
|
if env['forcelazy'] == 'on': |
|
|
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
|
|
else: |
|
|
if env['forcelazy'] == 'off': |
|
|
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
|
|
|
|
|
#set up the colective resolve values |
|
|
if env['forcecollres'] != "leave_alone": |
|
|
print env['forcecollres'] |
|
|
if env['forcecollres'] == 'on': |
|
|
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
|
|
else: |
|
|
if env['forcecollres'] == 'off': |
|
|
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
|
|
|
|
|
|
|
|
if env['iknowwhatimdoing']: |
|
|
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
|
|
|
|
|
# OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty |
|
|
if not env["useopenmp"]: |
|
|
env['omp_optim'] = "" |
|
|
env['omp_debug'] = "" |
|
|
env['omp_libs'] = [] |
|
|
|
|
|
if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0 |
|
|
|
|
|
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
|
|
if IS_WINDOWS_PLATFORM: |
|
|
LD_LIBRARY_PATH_KEY='PATH' |
|
|
env['ENV']['LD_LIBRARY_PATH']='' |
|
|
else: |
|
|
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
|
|
############ Copy environment variables into scons env ######### |
|
|
|
|
|
try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS'] |
|
|
except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1 |
|
|
|
|
|
try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS'] |
|
|
except KeyError: pass |
|
|
|
|
|
try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS'] |
|
|
except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1 |
|
|
|
|
|
try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES'] |
|
|
except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1 |
|
209 |
|
|
210 |
try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE'] |
################# Fill in compiler options if not set above ################## |
|
except KeyError: pass |
|
211 |
|
|
212 |
try: env['ENV']['PATH'] = os.environ['PATH'] |
if env['cc'] != 'default': env['CC']=env['cc'] |
213 |
except KeyError: pass |
if env['cxx'] != 'default': env['CXX']=env['cxx'] |
214 |
|
|
215 |
try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
# version >=9 of intel C++ compiler requires use of icpc to link in C++ |
216 |
except KeyError: pass |
# runtimes (icc does not) |
217 |
|
if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc': |
218 |
try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH'] |
env['LINK'] = env['CXX'] |
219 |
except KeyError: pass |
|
220 |
|
# default compiler/linker options |
221 |
try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH'] |
cc_flags = '' |
222 |
except KeyError: pass |
cc_optim = '' |
223 |
|
cc_debug = '' |
224 |
|
omp_flags = '' |
225 |
|
omp_ldflags = '' |
226 |
|
fatalwarning = '' # switch to turn warnings into errors |
227 |
|
sysheaderopt = '' # how to indicate that a header is a system header |
228 |
|
|
229 |
|
# env['CC'] might be a full path |
230 |
|
cc_name=os.path.basename(env['CC']) |
231 |
|
|
232 |
|
if cc_name == 'icc': |
233 |
|
# Intel compiler |
234 |
|
cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
235 |
|
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip" |
236 |
|
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
237 |
|
omp_flags = "-openmp -openmp_report0" |
238 |
|
omp_ldflags = "-openmp -openmp_report0 -lguide -lpthread" |
239 |
|
elif cc_name[:3] == 'gcc': |
240 |
|
# GNU C on any system |
241 |
|
cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions" |
242 |
|
cc_optim = "-O3" |
243 |
|
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
244 |
|
omp_flags = "-fopenmp" |
245 |
|
omp_ldflags = "-fopenmp" |
246 |
|
fatalwarning = "-Werror" |
247 |
|
sysheaderopt = "-isystem" |
248 |
|
elif cc_name == 'cl': |
249 |
|
# Microsoft Visual C on Windows |
250 |
|
cc_flags = "/EHsc /GR /MD /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" |
251 |
|
cc_optim = "/O2 /Op /MT /W3" |
252 |
|
cc_debug = "/Od /RTCcsu /MTd /ZI -DBOUNDS_CHECK" |
253 |
|
fatalwarning = "/WX" |
254 |
|
elif cc_name == 'icl': |
255 |
|
# Intel C on Windows |
256 |
|
cc_flags = '/EHsc /GR /MD' |
257 |
|
cc_optim = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll' |
258 |
|
cc_debug = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv' |
259 |
|
omp_flags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
260 |
|
omp_ldflags = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel' |
261 |
|
|
262 |
|
# set defaults if not otherwise specified |
263 |
|
if env['cc_flags'] == 'default': env['cc_flags'] = cc_flags |
264 |
|
if env['cc_optim'] == 'default': env['cc_optim'] = cc_optim |
265 |
|
if env['cc_debug'] == 'default': env['cc_debug'] = cc_debug |
266 |
|
if env['omp_flags'] == 'default': env['omp_flags'] = omp_flags |
267 |
|
if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags |
268 |
|
if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) |
269 |
|
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
270 |
|
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
271 |
|
|
272 |
try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH']) |
# set up the autolazy values |
273 |
except KeyError: pass |
if env['forcelazy'] == 'on': |
274 |
|
env.Append(CPPDEFINES=['FAUTOLAZYON']) |
275 |
|
elif env['forcelazy'] == 'off': |
276 |
|
env.Append(CPPDEFINES=['FAUTOLAZYOFF']) |
277 |
|
|
278 |
|
# set up the collective resolve values |
279 |
|
if env['forcecollres'] == 'on': |
280 |
|
env.Append(CPPDEFINES=['FRESCOLLECTON']) |
281 |
|
elif env['forcecollres'] == 'off': |
282 |
|
env.Append(CPPDEFINES=['FRESCOLLECTOFF']) |
283 |
|
|
284 |
try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH'] |
# allow non-standard C if requested |
285 |
except KeyError: pass |
if env['iknowwhatimdoing']: |
286 |
|
env.Append(CPPDEFINES=['IKNOWWHATIMDOING']) |
287 |
|
|
288 |
try: env['ENV']['DISPLAY'] = os.environ['DISPLAY'] |
# Disable OpenMP if no flags provided |
289 |
except KeyError: pass |
if env['openmp'] and env['omp_flags'] == '': |
290 |
|
print("OpenMP requested but no flags provided - disabling OpenMP!") |
291 |
|
env['openmp'] = False |
292 |
|
|
293 |
try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY'] |
if env['openmp']: |
294 |
except KeyError: pass |
env.Append(CCFLAGS = env['omp_flags']) |
295 |
|
if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags']) |
296 |
|
else: |
297 |
|
env['omp_flags']='' |
298 |
|
env['omp_ldflags']='' |
299 |
|
|
300 |
try: env['ENV']['HOME'] = os.environ['HOME'] |
# add debug/non-debug compiler flags |
301 |
except KeyError: pass |
if env['debug']: |
302 |
|
env.Append(CCFLAGS = env['cc_debug']) |
303 |
|
else: |
304 |
|
env.Append(CCFLAGS = env['cc_optim']) |
305 |
|
|
306 |
# Configure for test suite |
# always add cc_flags |
307 |
|
env.Append(CCFLAGS = env['cc_flags']) |
308 |
|
|
309 |
|
# Get the global Subversion revision number for the getVersion() method |
310 |
|
try: |
311 |
|
global_revision = os.popen('svnversion -n .').read() |
312 |
|
global_revision = re.sub(':.*', '', global_revision) |
313 |
|
global_revision = re.sub('[^0-9]', '', global_revision) |
314 |
|
if global_revision == '': global_revision='-2' |
315 |
|
except: |
316 |
|
global_revision = '-1' |
317 |
|
env.Append(CPPDEFINES=['SVN_VERSION='+global_revision]) |
318 |
|
|
319 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
if IS_WINDOWS: |
320 |
env.PrependENVPath('PYTHONPATH', prefix) |
if not env['share_esysutils']: |
321 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
322 |
|
if not env['share_paso']: |
323 |
|
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
324 |
|
|
325 |
############ Set up paths for Configure() ###################### |
###################### Copy required environment vars ######################## |
326 |
|
|
327 |
# Make a copy of an environment |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
328 |
# Use env.Clone if available, but fall back on env.Copy for older version of scons |
if IS_WINDOWS: |
329 |
def clone_env(env): |
LD_LIBRARY_PATH_KEY='PATH' |
330 |
if 'Clone' in dir(env): return env.Clone() # scons-0.98 |
env['ENV']['LD_LIBRARY_PATH']='' |
331 |
else: return env.Copy() # scons-0.96 |
else: |
332 |
|
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
333 |
|
|
334 |
# Add cc option -I<Escript>/trunk/include |
# the following env variables are exported for the unit tests |
|
env.Append(CPPPATH = [Dir('include')]) |
|
335 |
|
|
336 |
# Add cc option -L<Escript>/trunk/lib |
for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES': |
337 |
env.Append(LIBPATH = [Dir(env['libinstall'])]) |
try: |
338 |
|
env['ENV'][key] = os.environ[key] |
339 |
|
except KeyError: |
340 |
|
env['ENV'][key] = 1 |
341 |
|
|
342 |
|
env_export=env['env_export'] |
343 |
|
env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY']) |
344 |
|
|
345 |
|
for key in set(env_export): |
346 |
|
try: |
347 |
|
env['ENV'][key] = os.environ[key] |
348 |
|
except KeyError: |
349 |
|
pass |
350 |
|
|
351 |
if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra']) |
try: |
352 |
if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra']) |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY]) |
353 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
except KeyError: |
354 |
|
pass |
355 |
|
|
356 |
if env['usepedantic']: env.Append(CCFLAGS = pedantic) |
#for key in 'PATH','C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH': |
357 |
|
# try: |
358 |
|
# env['ENV'][key] = os.environ[key] |
359 |
|
# except KeyError: |
360 |
|
# pass |
361 |
|
# |
362 |
|
try: |
363 |
|
env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
364 |
|
except KeyError: |
365 |
|
pass |
366 |
|
|
367 |
# MS Windows |
######################## Add some custom builders ############################ |
|
if IS_WINDOWS_PLATFORM: |
|
|
env.AppendENVPath('PATH', [env['boost_lib_path']]) |
|
|
env.AppendENVPath('PATH', [env['libinstall']]) |
|
|
if not env['share_esysUtils'] : |
|
|
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
|
|
if not env['share_paso'] : |
|
|
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
|
368 |
|
|
369 |
if env['usenetcdf']: |
py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
370 |
env.AppendENVPath('PATH', [env['netCDF_lib_path']]) |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
371 |
|
|
372 |
env.Append(ARFLAGS = env['ar_flags']) |
runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
373 |
|
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
374 |
|
|
375 |
# Get the global Subversion revision number for getVersion() method |
runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
376 |
try: |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
|
global_revision = os.popen("svnversion -n .").read() |
|
|
global_revision = re.sub(":.*", "", global_revision) |
|
|
global_revision = re.sub("[^0-9]", "", global_revision) |
|
|
except: |
|
|
global_revision="-1" |
|
|
if global_revision == "": global_revision="-2" |
|
|
env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision]) |
|
377 |
|
|
378 |
############ numpy (required) ############################### |
epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True) |
379 |
|
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
380 |
|
|
381 |
try: |
############################ Dependency checks ############################### |
|
from numpy import identity |
|
|
except ImportError: |
|
|
print "Cannot import numpy, you need to set your PYTHONPATH" |
|
|
sys.exit(1) |
|
382 |
|
|
383 |
############ C compiler (required) ############################# |
# Create a Configure() environment to check for compilers and python |
384 |
|
conf = Configure(env) |
385 |
|
|
386 |
# Create a Configure() environment for checking existence of required libraries and headers |
######## Test that the compilers work |
|
conf = Configure(clone_env(env)) |
|
387 |
|
|
388 |
# Test that the compiler is working |
if 'CheckCC' in dir(conf): # exists since scons 1.1.0 |
389 |
if not conf.CheckFunc('printf'): |
if not conf.CheckCC(): |
390 |
print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC']) |
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
391 |
sys.exit(1) |
Exit(1) |
392 |
|
if not conf.CheckCXX(): |
393 |
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
394 |
|
Exit(1) |
395 |
|
else: |
396 |
|
if not conf.CheckFunc('printf', language='c'): |
397 |
|
print("Cannot run C compiler '%s' (check config.log)" % (env['CC'])) |
398 |
|
Exit(1) |
399 |
|
if not conf.CheckFunc('printf', language='c++'): |
400 |
|
print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX'])) |
401 |
|
Exit(1) |
402 |
|
|
403 |
if conf.CheckFunc('gethostname'): |
if conf.CheckFunc('gethostname'): |
404 |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
405 |
|
|
406 |
############ python libraries (required) ####################### |
######## Python headers & library (required) |
407 |
|
|
408 |
|
python_inc_path=sysconfig.get_python_inc() |
409 |
|
python_lib_path=sysconfig.get_config_var('LIBDIR') |
410 |
|
python_libs=[sysconfig.get_config_var('LDLIBRARY')] |
411 |
|
|
412 |
if not sysheaderopt =="": |
if sysheaderopt == '': |
413 |
conf.env.Append(CCFLAGS=sysheaderopt+env['python_path']) |
conf.env.AppendUnique(CPPPATH = [python_inc_path]) |
414 |
else: |
else: |
415 |
conf.env.AppendUnique(CPPPATH = [env['python_path']]) |
conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path]) |
|
|
|
|
conf.env.AppendUnique(LIBPATH = [env['python_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env['python_libs']]) |
|
416 |
|
|
417 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
conf.env.AppendUnique(LIBPATH = [python_lib_path]) |
418 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs |
conf.env.AppendUnique(LIBS = python_libs) |
419 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
# The wrapper script needs to find the libs |
420 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path) |
421 |
|
|
422 |
if not conf.CheckCHeader('Python.h'): |
if not conf.CheckCHeader('Python.h'): |
423 |
print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path']) |
print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path)) |
424 |
sys.exit(1) |
Exit(1) |
425 |
if not conf.CheckFunc('Py_Exit'): |
if not conf.CheckFunc('Py_Exit'): |
426 |
print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path']) |
print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path)) |
427 |
sys.exit(1) |
Exit(1) |
|
|
|
|
############ boost (required) ################################## |
|
|
|
|
|
if not sysheaderopt =="": |
|
|
# This is required because we can't -isystem /usr/system because it breaks std includes |
|
|
if os.path.normpath(env['boost_path']) =="/usr/include": |
|
|
conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost')) |
|
|
else: |
|
|
conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path']) |
|
|
else: |
|
|
conf.env.AppendUnique(CPPPATH = [env['boost_path']]) |
|
|
|
|
|
conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env['boost_libs']]) |
|
|
|
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs |
|
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
|
|
|
#Yep we still cant figure this one out. - working on it. |
|
|
if not IS_WINDOWS_PLATFORM: |
|
|
if not conf.CheckCXXHeader('boost/python.hpp'): |
|
|
print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path']) |
|
|
sys.exit(1) |
|
|
|
|
|
if not conf.CheckFunc('PyObject_SetAttr'): |
|
|
print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path']) |
|
|
sys.exit(1) |
|
|
|
|
428 |
|
|
429 |
# Commit changes to environment |
# Commit changes to environment |
430 |
env = conf.Finish() |
env = conf.Finish() |
431 |
|
|
432 |
############ VTK (optional) #################################### |
######## boost (required) |
|
|
|
|
if env['usevtk']: |
|
|
try: |
|
|
import vtk |
|
|
env['usevtk'] = 1 |
|
|
except ImportError: |
|
|
env['usevtk'] = 0 |
|
|
|
|
|
# Add VTK to environment env if it was found |
|
|
if env['usevtk']: |
|
|
env.Append(CPPDEFINES = ['USE_VTK']) |
|
|
|
|
|
############ NetCDF (optional) ################################# |
|
|
|
|
|
conf = Configure(clone_env(env)) |
|
|
|
|
|
if env['usenetcdf']: |
|
|
conf.env.AppendUnique(CPPPATH = [env['netCDF_path']]) |
|
|
conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env['netCDF_libs']]) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs |
|
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0 |
|
|
if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0 |
|
|
|
|
|
# Add NetCDF to environment env if it was found |
|
|
if env['usenetcdf']: |
|
|
env = conf.Finish() |
|
|
env.Append(CPPDEFINES = ['USE_NETCDF']) |
|
|
else: |
|
|
conf.Finish() |
|
|
|
|
|
############ PAPI (optional) ################################### |
|
|
|
|
|
# Start a new configure environment that reflects what we've already found |
|
|
conf = Configure(clone_env(env)) |
|
|
|
|
|
if env['usepapi']: |
|
|
conf.env.AppendUnique(CPPPATH = [env['papi_path']]) |
|
|
conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env['papi_libs']]) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs |
|
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0 |
|
|
if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0 |
|
|
|
|
|
# Add PAPI to environment env if it was found |
|
|
if env['usepapi']: |
|
|
env = conf.Finish() |
|
|
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
|
|
else: |
|
|
conf.Finish() |
|
|
|
|
|
############ MKL (optional) #################################### |
|
|
|
|
|
# Start a new configure environment that reflects what we've already found |
|
|
conf = Configure(clone_env(env)) |
|
|
|
|
|
if env['usemkl']: |
|
|
conf.env.AppendUnique(CPPPATH = [env['mkl_path']]) |
|
|
conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env['mkl_libs']]) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs |
|
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0 |
|
|
if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0 |
|
|
|
|
|
|
|
|
# Add MKL to environment env if it was found |
|
|
if env['usemkl']: |
|
|
env = conf.Finish() |
|
|
env.Append(CPPDEFINES = ['MKL']) |
|
|
else: |
|
|
conf.Finish() |
|
|
|
|
|
############ UMFPACK (optional) ################################ |
|
|
|
|
|
# Start a new configure environment that reflects what we've already found |
|
|
conf = Configure(clone_env(env)) |
|
433 |
|
|
434 |
if env['useumfpack']: |
boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++') |
435 |
conf.env.AppendUnique(CPPPATH = [env['ufc_path']]) |
if sysheaderopt == '': |
436 |
conf.env.AppendUnique(CPPPATH = [env['umf_path']]) |
env.AppendUnique(CPPPATH = [boost_inc_path]) |
437 |
conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']]) |
else: |
438 |
conf.env.AppendUnique(LIBS = [env['umf_libs']]) |
# This is required because we can't -isystem /usr/include since it breaks |
439 |
conf.env.AppendUnique(CPPPATH = [env['amd_path']]) |
# std includes |
440 |
conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']]) |
if os.path.normpath(boost_inc_path) == '/usr/include': |
441 |
conf.env.AppendUnique(LIBS = [env['amd_libs']]) |
conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')]) |
442 |
conf.env.AppendUnique(CPPPATH = [env['blas_path']]) |
else: |
443 |
conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']]) |
env.Append(CCFLAGS=[sysheaderopt, boost_inc_path]) |
444 |
conf.env.AppendUnique(LIBS = [env['blas_libs']]) |
|
445 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs |
env.AppendUnique(LIBPATH = [boost_lib_path]) |
446 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs |
env.AppendUnique(LIBS = env['boost_libs']) |
447 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path) |
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0 |
|
|
if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0 |
|
|
# if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73? |
|
|
|
|
|
# Add UMFPACK to environment env if it was found |
|
|
if env['useumfpack']: |
|
|
env = conf.Finish() |
|
|
env.Append(CPPDEFINES = ['UMFPACK']) |
|
|
else: |
|
|
conf.Finish() |
|
448 |
|
|
449 |
############ Silo (optional) ################################### |
######## numpy (required) |
450 |
|
|
451 |
if env['usesilo']: |
try: |
452 |
conf = Configure(clone_env(env)) |
from numpy import identity |
453 |
conf.env.AppendUnique(CPPPATH = [env['silo_path']]) |
except ImportError: |
454 |
conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
print("Cannot import numpy, you need to set your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY) |
455 |
conf.env.AppendUnique(LIBS = [env['silo_libs']]) |
Exit(1) |
|
if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0 |
|
|
if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0 |
|
|
conf.Finish() |
|
|
|
|
|
# Add the path to Silo to environment env if it was found. |
|
|
# Note that we do not add the libs since they are only needed for the |
|
|
# weipa library and tools. |
|
|
if env['usesilo']: |
|
|
env.AppendUnique(CPPPATH = [env['silo_path']]) |
|
|
env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
|
|
|
|
|
############ VisIt (optional) ################################### |
|
|
|
|
|
if env['usevisit']: |
|
|
env.AppendUnique(CPPPATH = [env['visit_path']]) |
|
|
env.AppendUnique(LIBPATH = [env['visit_lib_path']]) |
|
456 |
|
|
457 |
########### Lapack (optional) ################################## |
######## VTK (optional) |
458 |
|
|
459 |
|
if env['pyvisi']: |
460 |
|
try: |
461 |
|
import vtk |
462 |
|
env['pyvisi'] = True |
463 |
|
except ImportError: |
464 |
|
env['pyvisi'] = False |
465 |
|
|
466 |
|
######## netCDF (optional) |
467 |
|
|
468 |
|
netcdf_inc_path='' |
469 |
|
netcdf_lib_path='' |
470 |
|
if env['netcdf']: |
471 |
|
netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++') |
472 |
|
env.AppendUnique(CPPPATH = [netcdf_inc_path]) |
473 |
|
env.AppendUnique(LIBPATH = [netcdf_lib_path]) |
474 |
|
env.AppendUnique(LIBS = env['netcdf_libs']) |
475 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path) |
476 |
|
env.Append(CPPDEFINES = ['USE_NETCDF']) |
477 |
|
|
478 |
|
######## PAPI (optional) |
479 |
|
|
480 |
|
papi_inc_path='' |
481 |
|
papi_lib_path='' |
482 |
|
if env['papi']: |
483 |
|
papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c') |
484 |
|
env.AppendUnique(CPPPATH = [papi_inc_path]) |
485 |
|
env.AppendUnique(LIBPATH = [papi_lib_path]) |
486 |
|
env.AppendUnique(LIBS = env['papi_libs']) |
487 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path) |
488 |
|
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
489 |
|
|
490 |
|
######## MKL (optional) |
491 |
|
|
492 |
|
mkl_inc_path='' |
493 |
|
mkl_lib_path='' |
494 |
|
if env['mkl']: |
495 |
|
mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c') |
496 |
|
env.AppendUnique(CPPPATH = [mkl_inc_path]) |
497 |
|
env.AppendUnique(LIBPATH = [mkl_lib_path]) |
498 |
|
env.AppendUnique(LIBS = env['mkl_libs']) |
499 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path) |
500 |
|
env.Append(CPPDEFINES = ['MKL']) |
501 |
|
|
502 |
|
######## UMFPACK (optional) |
503 |
|
|
504 |
|
umfpack_inc_path='' |
505 |
|
umfpack_lib_path='' |
506 |
|
if env['umfpack']: |
507 |
|
umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c') |
508 |
|
env.AppendUnique(CPPPATH = [umfpack_inc_path]) |
509 |
|
env.AppendUnique(LIBPATH = [umfpack_lib_path]) |
510 |
|
env.AppendUnique(LIBS = env['umfpack_libs']) |
511 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path) |
512 |
|
env.Append(CPPDEFINES = ['UMFPACK']) |
513 |
|
|
514 |
|
######## LAPACK (optional) |
515 |
|
|
516 |
|
if env['lapack']=='mkl' and not env['mkl']: |
517 |
|
print("mkl_lapack requires MKL!") |
518 |
|
Exit(1) |
519 |
|
|
520 |
|
env['uselapack'] = env['lapack']!='none' |
521 |
|
lapack_inc_path='' |
522 |
|
lapack_lib_path='' |
523 |
if env['uselapack']: |
if env['uselapack']: |
524 |
env.AppendUnique(CPPDEFINES='USE_LAPACK') |
header='clapack.h' |
525 |
env.AppendUnique(CPPPATH = [env['lapack_path']]) |
if env['lapack']=='mkl': |
526 |
env.AppendUnique(LIBPATH =[env['lapack_lib_path']]) |
env.AppendUnique(CPPDEFINES = ['MKL_LAPACK']) |
527 |
|
header='mkl_lapack.h' |
528 |
env.Append(LIBPATH = '/usr/lib/atlas') |
lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c') |
529 |
env.Append(LIBS = [env['lapack_libs']]) |
env.AppendUnique(CPPPATH = [lapack_inc_path]) |
530 |
if env['lapack_type']=='mkl': |
env.AppendUnique(LIBPATH = [lapack_lib_path]) |
531 |
if not env['usemkl']: |
env.AppendUnique(LIBS = env['lapack_libs']) |
532 |
env['uselapack']=0 |
env.Append(CPPDEFINES = ['USE_LAPACK']) |
533 |
print "mkl_lapack requires mkl" |
|
534 |
else: |
######## Silo (optional) |
535 |
env.AppendUnique(CPPDEFINES='MKL_LAPACK') |
|
536 |
|
silo_inc_path='' |
537 |
|
silo_lib_path='' |
538 |
############ Add the compiler flags ############################ |
if env['silo']: |
539 |
|
silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c') |
540 |
# Enable debug by choosing either cc_debug or cc_optim |
env.AppendUnique(CPPPATH = [silo_inc_path]) |
541 |
if env['usedebug']: |
env.AppendUnique(LIBPATH = [silo_lib_path]) |
542 |
env.Append(CCFLAGS = env['cc_debug']) |
# Note that we do not add the libs since they are only needed for the |
543 |
env.Append(CCFLAGS = env['omp_debug']) |
# weipa library and tools. |
544 |
else: |
#env.AppendUnique(LIBS = [env['silo_libs']]) |
545 |
env.Append(CCFLAGS = env['cc_optim']) |
|
546 |
env.Append(CCFLAGS = env['omp_optim']) |
######## VisIt (optional) |
547 |
|
|
548 |
# Always use cc_flags |
visit_inc_path='' |
549 |
env.Append(CCFLAGS = env['cc_flags']) |
visit_lib_path='' |
550 |
env.Append(LIBS = [env['omp_libs']]) |
if env['visit']: |
551 |
|
visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c') |
552 |
############ Add some custom builders ########################## |
env.AppendUnique(CPPPATH = [visit_inc_path]) |
553 |
|
env.AppendUnique(LIBPATH = [visit_lib_path]) |
|
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
|
|
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
|
554 |
|
|
555 |
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
######## MPI (optional) |
|
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
|
|
|
|
|
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
|
|
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
|
556 |
|
|
557 |
epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True) |
env['usempi'] = env['mpi']!='none' |
|
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
|
|
|
|
|
############ MPI (optional) #################################### |
|
|
if not env['usempi']: env['mpi_flavour']='none' |
|
558 |
|
|
559 |
# Create a modified environment for MPI programs (identical to env if usempi=no) |
# Create a modified environment for MPI programs (identical to env if mpi=none) |
560 |
env_mpi = clone_env(env) |
env_mpi = clone_env(env) |
561 |
|
|
562 |
# Start a new configure environment that reflects what we've already found |
mpi_inc_path='' |
563 |
conf = Configure(clone_env(env_mpi)) |
mpi_lib_path='' |
|
|
|
564 |
if env_mpi['usempi']: |
if env_mpi['usempi']: |
565 |
VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ] |
mpi_inc_path,mpi_lib_path=findLibWithHeader(env_mpi, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c') |
566 |
if not env_mpi['mpi_flavour'] in VALID_MPIs: |
env_mpi.AppendUnique(CPPPATH = [mpi_inc_path]) |
567 |
raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs) |
env_mpi.AppendUnique(LIBPATH = [mpi_lib_path]) |
568 |
conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']]) |
env_mpi.AppendUnique(LIBS = env['mpi_libs']) |
569 |
conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']]) |
env_mpi.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path) |
570 |
conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']]) |
env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK']) |
571 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs |
# NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! |
572 |
#ensure that our path entries remain at the front |
# On the other hand MPT and OpenMPI don't define the latter so we have to |
573 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
# do that here |
574 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
if env['netcdf'] and env_mpi['mpi'] in ['MPT','OPENMPI']: |
575 |
|
env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED']) |
576 |
if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0 |
|
577 |
# if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0 |
|
578 |
|
######## ParMETIS (optional) |
579 |
# Add MPI to environment env_mpi if it was found |
|
580 |
if env_mpi['usempi']: |
if not env_mpi['usempi']: env_mpi['parmetis'] = False |
581 |
env_mpi = conf.Finish() |
|
582 |
env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']]) |
parmetis_inc_path='' |
583 |
# NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined! |
parmetis_lib_path='' |
584 |
# On the other hand MPT and OpenMPI don't define the latter so we have to |
if env_mpi['parmetis']: |
585 |
# do that here |
parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env_mpi, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c') |
586 |
if env['usenetcdf'] and env_mpi['mpi_flavour'] in ["MPT","OPENMPI"]: |
env_mpi.AppendUnique(CPPPATH = [parmetis_inc_path]) |
587 |
env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED']) |
env_mpi.AppendUnique(LIBPATH = [parmetis_lib_path]) |
588 |
else: |
env_mpi.AppendUnique(LIBS = env_mpi['parmetis_libs']) |
589 |
conf.Finish() |
env_mpi.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path) |
590 |
|
env_mpi.Append(CPPDEFINES = ['USE_PARMETIS']) |
|
env['usempi'] = env_mpi['usempi'] |
|
591 |
|
|
592 |
############ ParMETIS (optional) ############################### |
env['parmetis'] = env_mpi['parmetis'] |
593 |
|
|
594 |
# Start a new configure environment that reflects what we've already found |
######################## Summarize our environment ########################### |
|
conf = Configure(clone_env(env_mpi)) |
|
595 |
|
|
596 |
if not env_mpi['usempi']: env_mpi['useparmetis'] = 0 |
# keep some of our install paths first in the list for the unit tests |
597 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
598 |
if env_mpi['useparmetis']: |
env.PrependENVPath('PYTHONPATH', prefix) |
599 |
conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']]) |
env['ENV']['ESCRIPT_ROOT'] = prefix |
|
conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']]) |
|
|
conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']]) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs |
|
|
#ensure that our path entries remain at the front |
|
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
|
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
|
|
|
|
|
if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0 |
|
|
if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0 |
|
600 |
|
|
601 |
# Add ParMETIS to environment env_mpi if it was found |
if not env['verbose']: |
602 |
if env_mpi['useparmetis']: |
for e in env, env_mpi: |
603 |
env_mpi = conf.Finish() |
e['CCCOMSTR'] = "Compiling $TARGET" |
604 |
env_mpi.Append(CPPDEFINES = ['USE_PARMETIS']) |
e['CXXCOMSTR'] = "Compiling $TARGET" |
605 |
|
e['SHCCCOMSTR'] = "Compiling $TARGET" |
606 |
|
e['SHCXXCOMSTR'] = "Compiling $TARGET" |
607 |
|
e['ARCOMSTR'] = "Linking $TARGET" |
608 |
|
e['LINKCOMSTR'] = "Linking $TARGET" |
609 |
|
e['SHLINKCOMSTR'] = "Linking $TARGET" |
610 |
|
#Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17) |
611 |
|
|
612 |
|
print("") |
613 |
|
print("*** Config Summary (see config.log and lib/buildvars for details) ***") |
614 |
|
print("Escript/Finley revision %s"%global_revision) |
615 |
|
print(" Install prefix: %s"%env['prefix']) |
616 |
|
print(" Python: %s"%sysconfig.PREFIX) |
617 |
|
print(" boost: %s"%env['boost_prefix']) |
618 |
|
print(" numpy: YES") |
619 |
|
if env['usempi']: |
620 |
|
print(" MPI: YES (flavour: %s)"%env['mpi']) |
621 |
else: |
else: |
622 |
conf.Finish() |
print(" MPI: DISABLED") |
623 |
|
if env['uselapack']: |
624 |
env['useparmetis'] = env_mpi['useparmetis'] |
print(" LAPACK: YES (flavour: %s)"%env['lapack']) |
625 |
|
else: |
626 |
############ Summarize our environment ######################### |
print(" LAPACK: DISABLED") |
627 |
|
d_list=[] |
628 |
print "" |
e_list=[] |
629 |
print "Summary of configuration (see ./config.log for information)" |
for i in 'debug','openmp','netcdf','parmetis','papi','mkl','umfpack','silo','visit','pyvisi': |
630 |
print " Using python libraries" |
if env[i]: e_list.append(i) |
631 |
print " Using numpy" |
else: d_list.append(i) |
632 |
print " Using boost" |
for i in e_list: |
633 |
if env['usenetcdf']: print " Using NetCDF" |
print("%16s: YES"%i) |
634 |
else: print " Not using NetCDF" |
for i in d_list: |
635 |
if env['usevtk']: print " Using VTK" |
print("%16s: DISABLED"%i) |
636 |
else: print " Not using VTK" |
if ((fatalwarning != '') and (env['werror'])): |
637 |
if env['usevisit']: print " Using VisIt" |
print(" Treating warnings as errors") |
638 |
else: print " Not using VisIt" |
else: |
639 |
if env['usemkl']: print " Using MKL" |
print(" NOT treating warnings as errors") |
640 |
else: print " Not using MKL" |
print("") |
|
if env['useumfpack']: print " Using UMFPACK" |
|
|
else: print " Not using UMFPACK" |
|
|
if env['usesilo']: print " Using Silo" |
|
|
else: print " Not using Silo" |
|
|
if env['useopenmp']: print " Using OpenMP" |
|
|
else: print " Not using OpenMP" |
|
|
if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour'] |
|
|
else: print " Not using MPI" |
|
|
if env['useparmetis']: print " Using ParMETIS" |
|
|
else: print " Not using ParMETIS (requires MPI)" |
|
|
if env['usepapi']: print " Using PAPI" |
|
|
else: print " Not using PAPI" |
|
|
if env['uselapack']: print " Using Lapack" |
|
|
else: print " Not using Lapack" |
|
|
if env['usedebug']: print " Compiling for debug" |
|
|
else: print " Not compiling for debug" |
|
|
print " Installing in", prefix |
|
|
if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors" |
|
|
else: print " Not treating warnings as errors" |
|
|
print "" |
|
|
|
|
|
############ Delete option-dependent files ##################### |
|
|
|
|
|
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug"))) |
|
|
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi"))) |
|
|
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp"))) |
|
|
Execute(Delete(os.path.join(env['libinstall'],"buildvars"))) |
|
|
if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI"))) |
|
|
|
|
|
|
|
|
############ Build the subdirectories ########################## |
|
|
|
|
|
if env['usepedantic']: env_mpi.Append(CCFLAGS = pedantic) |
|
641 |
|
|
642 |
|
####################### Configure the subdirectories ######################### |
643 |
|
|
644 |
from grouptest import * |
from grouptest import * |
645 |
|
|
646 |
TestGroups=[] |
TestGroups=[] |
647 |
|
|
648 |
dodgy_env=clone_env(env_mpi) # Environment without pedantic options |
# keep an environment without warnings-as-errors |
649 |
|
dodgy_env=clone_env(env_mpi) |
|
############ Now we switch on Warnings as errors ############### |
|
|
|
|
|
#this needs to be done after configuration because the scons test files have warnings in them |
|
|
|
|
|
if ((fatalwarning != "") and (env['usewarnings'])): |
|
|
env.Append(CCFLAGS = fatalwarning) |
|
|
env_mpi.Append(CCFLAGS = fatalwarning) |
|
650 |
|
|
651 |
|
# now add warnings-as-errors flags. This needs to be done after configuration |
652 |
|
# because the scons test files have warnings in them |
653 |
|
if ((fatalwarning != '') and (env['werror'])): |
654 |
|
env.Append(CCFLAGS = fatalwarning) |
655 |
|
env_mpi.Append(CCFLAGS = fatalwarning) |
656 |
|
|
657 |
Export( |
Export( |
658 |
["env", |
['env', |
659 |
"env_mpi", |
'env_mpi', |
660 |
"clone_env", |
'clone_env', |
661 |
"dodgy_env", |
'dodgy_env', |
662 |
"IS_WINDOWS_PLATFORM", |
'IS_WINDOWS', |
663 |
"TestGroups", |
'TestGroups', |
664 |
"CallSConscript", |
'CallSConscript', |
665 |
"cantusevariantdir" |
'cantusevariantdir' |
666 |
] |
] |
667 |
) |
) |
668 |
|
|
669 |
CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
670 |
CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0) |
CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0) |
681 |
CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0) |
CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0) |
682 |
CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0) |
CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0) |
683 |
|
|
684 |
|
######################## Populate the buildvars file ######################### |
685 |
|
|
686 |
############ Remember what optimizations we used ############### |
# remove obsolete file |
687 |
|
if not env['usempi']: |
688 |
remember_list = [] |
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI'))) |
689 |
|
Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect'))) |
690 |
|
|
691 |
if env['usedebug']: |
# Try to extract the boost version from version.hpp |
692 |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET')) |
boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp')) |
|
|
|
|
if env['usempi']: |
|
|
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET')) |
|
|
|
|
|
if env['useopenmp']: |
|
|
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET')) |
|
|
|
|
|
env.Alias('remember_options', remember_list) |
|
|
|
|
|
|
|
|
############### Record python interpreter version ############## |
|
|
|
|
|
if not IS_WINDOWS_PLATFORM: |
|
|
|
|
|
versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]) |
|
|
# if sys.version_info[4] >0 : versionstring+="rc%s"%sys.version_info[4] |
|
|
|
|
|
############## Populate the buildvars file ##################### |
|
|
|
|
|
buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w') |
|
|
buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n') |
|
|
|
|
|
# Find the boost version by extracting it from version.hpp |
|
|
boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp')) |
|
693 |
boostversion='unknown' |
boostversion='unknown' |
694 |
try: |
try: |
695 |
for line in boosthpp: |
for line in boosthpp: |
696 |
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
697 |
if ver: |
if ver: |
698 |
boostversion=ver.group(1) |
boostversion=ver.group(1) |
699 |
except StopIteration: |
except StopIteration: |
700 |
pass |
pass |
701 |
buildvars.write("boost="+boostversion+"\n") |
boosthpp.close() |
702 |
|
|
703 |
|
buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w') |
704 |
buildvars.write("svn_revision="+str(global_revision)+"\n") |
buildvars.write("svn_revision="+str(global_revision)+"\n") |
705 |
out="usedebug=" |
buildvars.write("prefix="+prefix+"\n") |
706 |
if env['usedebug']: |
buildvars.write("cc="+env['CC']+"\n") |
707 |
out+="y" |
buildvars.write("cxx="+env['CXX']+"\n") |
708 |
else: |
buildvars.write("python="+sys.executable+"\n") |
709 |
out+="n" |
buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n") |
710 |
out+="\nusempi=" |
buildvars.write("boost_inc_path="+boost_inc_path+"\n") |
711 |
if env['usempi']: |
buildvars.write("boost_lib_path="+boost_lib_path+"\n") |
712 |
out+="y" |
buildvars.write("boost_version="+boostversion+"\n") |
713 |
else: |
buildvars.write("debug=%d\n"%int(env['debug'])) |
714 |
out+="n" |
buildvars.write("openmp=%d\n"%int(env['openmp'])) |
715 |
out+="\nuseopenmp=" |
buildvars.write("mpi=%s\n"%env['mpi']) |
716 |
if env['useopenmp']: |
buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path) |
717 |
out+="y" |
buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path) |
718 |
else: |
buildvars.write("lapack=%s\n"%env['lapack']) |
719 |
out+="n" |
buildvars.write("pyvisi=%d\n"%env['pyvisi']) |
720 |
buildvars.write(out+"\n") |
for i in 'netcdf','parmetis','papi','mkl','umfpack','silo','visit': |
721 |
buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n') |
buildvars.write("%s=%d\n"%(i, int(env[i]))) |
722 |
out="lapack=" |
if env[i]: |
723 |
if env['uselapack']: |
buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path'))) |
724 |
out+="y" |
buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path'))) |
|
else: |
|
|
out+="n" |
|
|
out+="\nsilo=" |
|
|
if env['usesilo']: |
|
|
out+="y" |
|
|
else: |
|
|
out+="n" |
|
|
out+="\nusevisit=" |
|
|
if env['usevisit']: |
|
|
out+="y" |
|
|
else: |
|
|
out+="n" |
|
|
buildvars.write(out+"\n") |
|
725 |
buildvars.close() |
buildvars.close() |
726 |
|
|
727 |
|
################### Targets to build and install libraries ################### |
|
############ Targets to build and install libraries ############ |
|
728 |
|
|
729 |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
730 |
env.Alias('target_init', [target_init]) |
env.Alias('target_init', [target_init]) |
731 |
|
|
732 |
# The headers have to be installed prior to build in order to satisfy #include <paso/Common.h> |
# The headers have to be installed prior to build in order to satisfy |
733 |
|
# #include <paso/Common.h> |
734 |
env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a']) |
env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a']) |
735 |
env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a']) |
env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a']) |
736 |
|
|
737 |
env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a']) |
env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a']) |
738 |
env.Alias('install_paso', ['build_paso', 'target_install_paso_a']) |
env.Alias('install_paso', ['build_paso', 'target_install_paso_a']) |
739 |
|
|
|
env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so']) |
|
|
env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py']) |
|
|
|
|
|
|
|
|
env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a']) |
|
|
env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a']) |
|
|
|
|
740 |
env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so']) |
env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so']) |
741 |
env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py']) |
env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py']) |
742 |
|
|
743 |
env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so']) |
env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so']) |
744 |
env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py']) |
env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py']) |
745 |
|
|
746 |
# Now gather all the above into a couple easy targets: build_all and install_all |
env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so']) |
747 |
|
env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py']) |
748 |
|
|
749 |
|
env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a']) |
750 |
|
env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a']) |
751 |
|
|
752 |
|
# Now gather all the above into some easy targets: build_all and install_all |
753 |
build_all_list = [] |
build_all_list = [] |
754 |
build_all_list += ['build_esysUtils'] |
build_all_list += ['build_esysUtils'] |
755 |
build_all_list += ['build_paso'] |
build_all_list += ['build_paso'] |
|
build_all_list += ['build_weipa'] |
|
756 |
build_all_list += ['build_escript'] |
build_all_list += ['build_escript'] |
757 |
build_all_list += ['build_finley'] |
build_all_list += ['build_finley'] |
758 |
if env['usempi']: build_all_list += ['target_pythonMPI_exe'] |
build_all_list += ['build_weipa'] |
759 |
#if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper'] |
build_all_list += ['build_escriptreader'] |
760 |
|
if env['usempi']: build_all_list += ['target_pythonMPI_exe'] |
761 |
build_all_list += ['target_escriptconvert'] |
build_all_list += ['target_escriptconvert'] |
762 |
env.Alias('build_all', build_all_list) |
env.Alias('build_all', build_all_list) |
763 |
|
|
765 |
install_all_list += ['target_init'] |
install_all_list += ['target_init'] |
766 |
install_all_list += ['install_esysUtils'] |
install_all_list += ['install_esysUtils'] |
767 |
install_all_list += ['install_paso'] |
install_all_list += ['install_paso'] |
|
install_all_list += ['install_weipa'] |
|
768 |
install_all_list += ['install_escript'] |
install_all_list += ['install_escript'] |
769 |
install_all_list += ['install_finley'] |
install_all_list += ['install_finley'] |
770 |
|
install_all_list += ['install_weipa'] |
771 |
|
install_all_list += ['install_escriptreader'] |
772 |
install_all_list += ['target_install_pyvisi_py'] |
install_all_list += ['target_install_pyvisi_py'] |
773 |
install_all_list += ['target_install_modellib_py'] |
install_all_list += ['target_install_modellib_py'] |
774 |
install_all_list += ['target_install_pycad_py'] |
install_all_list += ['target_install_pycad_py'] |
775 |
if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] |
if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] |
776 |
#if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper'] |
install_all_list += ['target_install_escriptconvert'] |
|
if env['usesilo']: install_all_list += ['target_install_escriptconvert'] |
|
|
install_all_list += ['remember_options'] |
|
777 |
env.Alias('install_all', install_all_list) |
env.Alias('install_all', install_all_list) |
778 |
|
|
779 |
# Default target is install |
# Default target is install |
780 |
env.Default('install_all') |
env.Default('install_all') |
781 |
|
|
782 |
############ Targets to build and run the test suite ########### |
################## Targets to build and run the test suite ################### |
783 |
|
|
784 |
env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a']) |
env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a']) |
785 |
env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) |
env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) |
786 |
env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) |
env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) |
787 |
env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) |
env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) |
788 |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
789 |
|
env.Alias('build_PasoTests','build/$PLATFORM/paso/profiling/PasoTests') |
790 |
|
|
791 |
|
##################### Targets to build the documentation ##################### |
|
############ Targets to build the documentation ################ |
|
792 |
|
|
793 |
env.Alias('api_epydoc','install_all') |
env.Alias('api_epydoc','install_all') |
|
|
|
794 |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf']) |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf']) |
795 |
|
env.Alias('release_prep', ['docs', 'install_all']) |
796 |
|
|
797 |
build_platform=os.name |
if not IS_WINDOWS: |
798 |
|
try: |
799 |
if not IS_WINDOWS_PLATFORM: |
utest=open('utest.sh','w') |
800 |
try: |
utest.write(GroupTest.makeHeader(env['PLATFORM'])) |
801 |
utest=open("utest.sh","w") |
for tests in TestGroups: |
802 |
#Sometimes Mac python says it is posix |
utest.write(tests.makeString()) |
803 |
if (build_platform=='posix') and platform.system()=="Darwin": |
utest.close() |
804 |
build_platform='darwin' |
Chmod('utest.sh', 0755) |
805 |
utest.write(GroupTest.makeHeader(build_platform)) |
print("Generated utest.sh.") |
806 |
for tests in TestGroups: |
except IOError: |
807 |
utest.write(tests.makeString()) |
print("Error attempting to write unittests file.") |
808 |
utest.close() |
Exit(1) |
809 |
os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH) |
|
810 |
print "utest.sh written" |
# Make sure that the escript wrapper is in place |
811 |
except IOError: |
if not os.path.isfile(os.path.join(env['bininstall'], 'escript')): |
812 |
print "Error attempting to write unittests file." |
print("Copying escript wrapper.") |
813 |
sys.exit(1) |
Execute(Copy(os.path.join(env['bininstall'],'escript'), 'bin/escript')) |
|
|
|
|
#Make sure that the escript wrapper is in place |
|
|
if not os.path.isfile(os.path.join(env['bininstall'],'escript')): |
|
|
print "Copying escript wrapper" |
|
|
shutil.copy("bin/escript",os.path.join(env['bininstall'],'escript')) |
|
|
|
|
|
############ Targets to build PasoTests suite ################ |
|
|
|
|
|
env.Alias('build_PasoTests','build/'+build_platform+'/paso/profiling/PasoTests') |
|
814 |
|
|
|
env.Alias('release_prep', ['docs', 'install_all']) |
|