1 |
# Copyright 2006 by ACcESS MNRF |
2 |
# |
3 |
# http://www.access.edu.au |
4 |
# Primary Business: Queensland, Australia |
5 |
# Licensed under the Open Software License version 3.0 |
6 |
# http://www.opensource.org/licenses/osl-3.0.php |
7 |
|
8 |
# top-level Scons configuration file for all esys13 modules |
9 |
# Begin initialisation Section |
10 |
# all of this section just intialises default environments and helper |
11 |
# scripts. You shouldn't need to modify this section. |
12 |
EnsureSConsVersion(0,96,91) |
13 |
EnsurePythonVersion(2,3) |
14 |
|
15 |
#=============================================================== |
16 |
# import tools: |
17 |
import glob |
18 |
import sys, os, re |
19 |
# Add our extensions |
20 |
if sys.path.count('scons')==0: sys.path.append('scons') |
21 |
import scons_extensions |
22 |
|
23 |
#=============================================================== |
24 |
|
25 |
tools_prefix="/usr" |
26 |
|
27 |
#============================================================================================== |
28 |
# |
29 |
# get the installation prefix |
30 |
# |
31 |
prefix = ARGUMENTS.get('prefix', '/usr') |
32 |
|
33 |
# We may also need to know where python's site-packages subdirectory lives |
34 |
python_version = 'python%s.%s'%(sys.version_info[0],sys.version_info[1]) |
35 |
|
36 |
# Install as a standard python package in /usr/lib64 if available, else in /usr/lib |
37 |
if os.path.isdir( prefix+"/lib64/"+python_version+"/site-packages"): |
38 |
sys_dir_packages = prefix+"/lib64/"+python_version+"/site-packages/esys" |
39 |
sys_dir_libraries = prefix+"/lib64" |
40 |
else: |
41 |
sys_dir_packages = prefix+"/lib/"+python_version+"/site-packages/esys" |
42 |
sys_dir_libraries = prefix+"/lib" |
43 |
|
44 |
sys_dir_examples = prefix+"/share/doc/esys" |
45 |
|
46 |
source_root = Dir('#.').abspath |
47 |
|
48 |
dir_packages = os.path.join(source_root,"esys") |
49 |
dir_examples = os.path.join(source_root,"examples") |
50 |
dir_libraries = os.path.join(source_root,"lib") |
51 |
|
52 |
print "Source root is : ",source_root |
53 |
print " Default packages local installation: ", dir_packages |
54 |
print " Default library local installation ", dir_libraries |
55 |
print " Default example local installation: ", dir_examples |
56 |
print "Install prefix is: ", prefix |
57 |
print " Default packages system installation: ", sys_dir_packages |
58 |
print " Default library system installation ", sys_dir_libraries |
59 |
print " Default example system installation: ", sys_dir_examples |
60 |
|
61 |
#============================================================================================== |
62 |
|
63 |
# Default options and options help text |
64 |
# These are defaults and can be overridden using command line arguments or an options file. |
65 |
# if the options_file or ARGUMENTS do not exist then the ones listed as default here are used |
66 |
# DO NOT CHANGE THEM HERE |
67 |
# Where to install? |
68 |
#============================================================================================== |
69 |
# |
70 |
# get the options file if present: |
71 |
# |
72 |
options_file = ARGUMENTS.get('options_file','') |
73 |
|
74 |
if not os.path.isfile(options_file) : |
75 |
options_file = False |
76 |
|
77 |
if not options_file : |
78 |
import socket |
79 |
hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) |
80 |
tmp = os.path.join("scons",hostname+"_options.py") |
81 |
|
82 |
if os.path.isfile(tmp) : |
83 |
options_file = tmp |
84 |
|
85 |
IS_WINDOWS_PLATFORM = (os.name== "nt") |
86 |
|
87 |
# If you're not going to tell me then...... |
88 |
# FIXME: add one for the altix too. |
89 |
if not options_file : |
90 |
if IS_WINDOWS_PLATFORM : |
91 |
options_file = "scons/windows_mscv71_options.py" |
92 |
else: |
93 |
options_file = "scons/linux_gcc_eg_options.py" |
94 |
|
95 |
# and load it |
96 |
opts = Options(options_file, ARGUMENTS) |
97 |
#================================================================ |
98 |
# |
99 |
# check if UMFPACK is installed on the system: |
100 |
# |
101 |
uf_root=None |
102 |
for i in [ 'UMFPACK', 'umfpack', 'ufsparse', 'UFSPARSE']: |
103 |
if os.path.isdir(os.path.join(tools_prefix,'include',i)): |
104 |
uf_root=i |
105 |
print i," is used form ",tools_prefix |
106 |
break |
107 |
if not uf_root==None: |
108 |
umf_path_default=os.path.join(tools_prefix,'include',uf_root) |
109 |
umf_lib_path_default=os.path.join(tools_prefix,'lib') |
110 |
umf_libs_default=['umfpack'] |
111 |
amd_path_default=os.path.join(tools_prefix,'include',uf_root) |
112 |
amd_lib_path_default=os.path.join(tools_prefix,'lib') |
113 |
amd_libs_default=['amd'] |
114 |
ufc_path_default=os.path.join(tools_prefix,'include',uf_root) |
115 |
else: |
116 |
umf_path_default=None |
117 |
umf_lib_path_default=None |
118 |
umf_libs_default=None |
119 |
amd_path_default=None |
120 |
amd_lib_path_default=None |
121 |
amd_libs_default=None |
122 |
ufc_path_default=None |
123 |
# |
124 |
#========================================================================== |
125 |
# |
126 |
# python installation: |
127 |
# |
128 |
python_path_default=os.path.join(tools_prefix,'include','python%s.%s'%(sys.version_info[0],sys.version_info[1])) |
129 |
python_lib_path_default=os.path.join(tools_prefix,'lib') |
130 |
python_lib_default="python%s.%s"%(sys.version_info[0],sys.version_info[1]) |
131 |
|
132 |
#========================================================================== |
133 |
# |
134 |
# boost installation: |
135 |
# |
136 |
boost_path_default=os.path.join(tools_prefix,'include') |
137 |
boost_lib_path_default=os.path.join(tools_prefix,'lib') |
138 |
boost_lib_default=['boost_python'] |
139 |
|
140 |
#========================================================================== |
141 |
# |
142 |
# check if netCDF is installed on the system: |
143 |
# |
144 |
netCDF_path_default=os.path.join(tools_prefix,'include','netcdf-3') |
145 |
netCDF_lib_path_default=os.path.join(tools_prefix,'lib') |
146 |
|
147 |
if os.path.isdir(netCDF_path_default) and os.path.isdir(netCDF_lib_path_default): |
148 |
useNetCDF_default='yes' |
149 |
netCDF_libs_default=[ 'netcdf_c++', 'netcdf' ] |
150 |
else: |
151 |
useNetCDF_default='no' |
152 |
netCDF_path_default=None |
153 |
netCDF_lib_path_default=None |
154 |
netCDF_libs_default=None |
155 |
|
156 |
#========================================================================== |
157 |
# |
158 |
# MPI: |
159 |
# |
160 |
if IS_WINDOWS_PLATFORM: |
161 |
useMPI_default='no' |
162 |
mpi_path_default=None |
163 |
mpi_lib_path_default=None |
164 |
mpi_libs_default=[] |
165 |
mpi_run_default=None |
166 |
else: |
167 |
useMPI_default='no' |
168 |
mpi_root='/usr/local' |
169 |
mpi_path_default=os.path.join(mpi_root,'include') |
170 |
mpi_lib_path_default=os.path.join(mpi_root,'lib') |
171 |
mpi_libs_default=[ 'mpich' , 'pthread', 'rt' ] |
172 |
mpi_run_default='mpiexec -np 1' |
173 |
# |
174 |
#========================================================================== |
175 |
# |
176 |
# compile: |
177 |
# |
178 |
cc_flags_default='-O3 -std=c99 -ffast-math -fpic -Wno-unknown-pragmas -ansi' |
179 |
cc_flags_debug_default='-g -O0 -ffast-math -std=c99 -fpic -Wno-unknown-pragmas -ansi' |
180 |
cxx_flags_default='--no-warn -ansi' |
181 |
cxx_flags_debug_default='--no-warn -ansi -DDOASSERT' |
182 |
|
183 |
#============================================================================================== |
184 |
# Default options and options help text |
185 |
# These are defaults and can be overridden using command line arguments or an options file. |
186 |
# if the options_file or ARGUMENTS do not exist then the ones listed as default here are used |
187 |
# DO NOT CHANGE THEM HERE |
188 |
opts.AddOptions( |
189 |
# Where to install esys stuff |
190 |
('incinstall', 'where the esys headers will be installed', Dir('#.').abspath+'/include'), |
191 |
('libinstall', 'where the esys libraries will be installed', dir_libraries), |
192 |
('pyinstall', 'where the esys python modules will be installed', dir_packages), |
193 |
('exinstall', 'where the esys examples will be installed', dir_examples), |
194 |
('sys_libinstall', 'where the system esys libraries will be installed', sys_dir_libraries), |
195 |
('sys_pyinstall', 'where the system esys python modules will be installed', sys_dir_packages), |
196 |
('sys_exinstall', 'where the system esys examples will be installed', sys_dir_examples), |
197 |
('src_zipfile', 'the source zip file will be installed.', Dir('#.').abspath+"/release/escript_src.zip"), |
198 |
('test_zipfile', 'the test zip file will be installed.', Dir('#.').abspath+"/release/escript_tests.zip"), |
199 |
('src_tarfile', 'the source tar file will be installed.', Dir('#.').abspath+"/release/escript_src.tar.gz"), |
200 |
('test_tarfile', 'the test tar file will be installed.', Dir('#.').abspath+"/release/escript_tests.tar.gz"), |
201 |
('examples_tarfile', 'the examples tar file will be installed.', Dir('#.').abspath+"/release/doc/escript_examples.tar.gz"), |
202 |
('examples_zipfile', 'the examples zip file will be installed.', Dir('#.').abspath+"/release/doc/escript_examples.zip"), |
203 |
('guide_pdf', 'name of the user guide in pdf format', Dir('#.').abspath+"/release/doc/user/guide.pdf"), |
204 |
('api_epydoc', 'name of the epydoc api docs directory', Dir('#.').abspath+"/release/doc/epydoc"), |
205 |
('guide_html', 'name of the directory for user guide in html format', Dir('#.').abspath+"/release/doc/user/html"), |
206 |
('api_doxygen', 'name of the doxygen api docs directory',prefix+"/release/doc/doxygen"), |
207 |
# Compilation options |
208 |
BoolOption('dodebug', 'Do you want a debug build?', 'no'), |
209 |
BoolOption('bounds_check', 'Do you want extra array bounds checking?', 'no'), |
210 |
('options_file', "Optional file containing preferred options. Ignored if it doesn't exist (default: scons/<hostname>_options.py)", options_file), |
211 |
('cc_defines','C/C++ defines to use', None), |
212 |
('cc_flags','C compiler flags to use (Release build)', cc_flags_default), |
213 |
('cc_flags_debug', 'C compiler flags to use (Debug build)', cc_flags_debug_default), |
214 |
('cxx_flags', 'C++ compiler flags to use (Release build)', cxx_flags_default), |
215 |
('cxx_flags_debug', 'C++ compiler flags to use (Debug build)', cxx_flags_debug_default), |
216 |
('omp_flags', 'OpenMP compiler flags to use (Release build)', ''), |
217 |
('omp_flags_debug', 'OpenMP compiler flags to use (Debug build)', ''), |
218 |
('ar_flags', 'Static library archiver flags to use', None), |
219 |
('sys_libs', 'System libraries to link with', None), |
220 |
('tar_flags','flags for zip files','-c -z'), |
221 |
# MKL |
222 |
PathOption('mkl_path', 'Path to MKL includes', None), |
223 |
PathOption('mkl_lib_path', 'Path to MKL libs', None), |
224 |
('mkl_libs', 'MKL libraries to link with', None), |
225 |
# SCSL |
226 |
PathOption('scsl_path', 'Path to SCSL includes', None), |
227 |
PathOption('scsl_lib_path', 'Path to SCSL libs', None), |
228 |
('scsl_libs', 'SCSL libraries to link with', None), |
229 |
('scsl_libs_MPI', 'SCSL libraries to link with for MPI build', None), |
230 |
# UMFPACK |
231 |
PathOption('ufc_path', 'Path to UFconfig includes', ufc_path_default), |
232 |
PathOption('umf_path', 'Path to UMFPACK includes', umf_path_default), |
233 |
PathOption('umf_lib_path', 'Path to UMFPACK libs', umf_lib_path_default), |
234 |
('umf_libs', 'UMFPACK libraries to link with', umf_libs_default), |
235 |
# AMD (used by UMFPACK) |
236 |
PathOption('amd_path', 'Path to AMD includes', amd_path_default), |
237 |
PathOption('amd_lib_path', 'Path to AMD libs', amd_lib_path_default), |
238 |
('amd_libs', 'AMD libraries to link with', amd_libs_default), |
239 |
# TRILINOS |
240 |
PathOption('trilinos_path', 'Path to TRILINOS includes', None), |
241 |
PathOption('trilinos_lib_path', 'Path to TRILINOS libs', None), |
242 |
('trilinos_libs', 'TRILINOS libraries to link with', None), |
243 |
# BLAS |
244 |
PathOption('blas_path', 'Path to BLAS includes', None), |
245 |
PathOption('blas_lib_path', 'Path to BLAS libs', None), |
246 |
('blas_libs', 'BLAS libraries to link with', None), |
247 |
# netCDF |
248 |
('useNetCDF', 'switch on/off the usage of netCDF', useNetCDF_default), |
249 |
PathOption('netCDF_path', 'Path to netCDF includes', netCDF_path_default), |
250 |
PathOption('netCDF_lib_path', 'Path to netCDF libs', netCDF_lib_path_default), |
251 |
('netCDF_libs', 'netCDF C++ libraries to link with', netCDF_libs_default), |
252 |
# Python |
253 |
# locations of include files for python |
254 |
# FIXME: python_path should be python_inc_path and the same for boost etc. |
255 |
PathOption('python_path', 'Path to Python includes', python_path_default), |
256 |
PathOption('python_lib_path', 'Path to Python libs', python_lib_path_default), |
257 |
('python_lib', 'Python libraries to link with', python_lib_default), |
258 |
('python_cmd', 'Python command', 'python'), |
259 |
# Boost |
260 |
PathOption('boost_path', 'Path to Boost includes', boost_path_default), |
261 |
PathOption('boost_lib_path', 'Path to Boost libs', boost_lib_path_default), |
262 |
('boost_lib', 'Boost libraries to link with', boost_lib_default), |
263 |
# Doc building |
264 |
# PathOption('doxygen_path', 'Path to Doxygen executable', None), |
265 |
# PathOption('epydoc_path', 'Path to Epydoc executable', None), |
266 |
# PAPI |
267 |
PathOption('papi_path', 'Path to PAPI includes', None), |
268 |
PathOption('papi_lib_path', 'Path to PAPI libs', None), |
269 |
('papi_libs', 'PAPI libraries to link with', None), |
270 |
('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', None), |
271 |
# MPI |
272 |
BoolOption('useMPI', 'Compile parallel version using MPI', useMPI_default), |
273 |
('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'), |
274 |
PathOption('mpi_path', 'Path to MPI includes', mpi_path_default), |
275 |
('mpi_run', 'mpirun name' , mpi_run_default), |
276 |
PathOption('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)',mpi_lib_path_default), |
277 |
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', mpi_libs_default) |
278 |
) |
279 |
#================================================================================================= |
280 |
# |
281 |
# Note: On the Altix the intel compilers are not automatically |
282 |
# detected by scons intelc.py script. The Altix has a different directory |
283 |
# path and in some locations the "modules" facility is used to support |
284 |
# multiple compiler versions. This forces the need to import the users PATH |
285 |
# environment which isn't the "scons way" |
286 |
# This doesn't impact linux and windows which will use the default compiler (g++ or msvc, or the intel compiler if it is installed on both platforms) |
287 |
# FIXME: Perhaps a modification to intelc.py will allow better support for ia64 on altix |
288 |
# |
289 |
|
290 |
if IS_WINDOWS_PLATFORM: |
291 |
env = Environment(tools = ['default', 'msvc'], options = opts) |
292 |
else: |
293 |
if os.uname()[4]=='ia64': |
294 |
env = Environment(tools = ['default', 'intelc'], options = opts) |
295 |
if env['CXX'] == 'icpc': |
296 |
env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not). FIXME: this behaviour could be directly incorporated into scons intelc.py |
297 |
else: |
298 |
env = Environment(tools = ['default'], options = opts) |
299 |
Help(opts.GenerateHelpText(env)) |
300 |
|
301 |
if env['bounds_check']: |
302 |
env.Append(CPPDEFINES = [ 'BOUNDS_CHECK' ]) |
303 |
env.Append(CXXDEFINES = [ 'BOUNDS_CHECK' ]) |
304 |
bounds_check = env['bounds_check'] |
305 |
else: |
306 |
bounds_check = 0 |
307 |
|
308 |
#================================================================================================= |
309 |
# |
310 |
# Initialise Scons Build Environment |
311 |
# check for user environment variables we are interested in |
312 |
try: |
313 |
tmp = os.environ['PYTHONPATH'] |
314 |
env['ENV']['PYTHONPATH'] = tmp |
315 |
except KeyError: |
316 |
pass |
317 |
|
318 |
env.PrependENVPath('PYTHONPATH', source_root) |
319 |
|
320 |
try: |
321 |
omp_num_threads = os.environ['OMP_NUM_THREADS'] |
322 |
except KeyError: |
323 |
omp_num_threads = 1 |
324 |
env['ENV']['OMP_NUM_THREADS'] = omp_num_threads |
325 |
|
326 |
try: |
327 |
path = os.environ['PATH'] |
328 |
env['ENV']['PATH'] = path |
329 |
except KeyError: |
330 |
omp_num_threads = 1 |
331 |
|
332 |
env['ENV']['OMP_NUM_THREADS'] = omp_num_threads |
333 |
|
334 |
|
335 |
# Copy some variables from the system environment to the build environment |
336 |
try: |
337 |
env['ENV']['DISPLAY'] = os.environ['DISPLAY'] |
338 |
env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY'] |
339 |
home_temp = os.environ['HOME'] # MPICH2's mpd needs $HOME to find $HOME/.mpd.conf |
340 |
env['ENV']['HOME'] = home_temp |
341 |
except KeyError: |
342 |
pass |
343 |
|
344 |
try: |
345 |
tmp = os.environ['PATH'] |
346 |
env['ENV']['PATH'] = tmp |
347 |
except KeyError: |
348 |
pass |
349 |
|
350 |
try: |
351 |
tmp = os.environ['LD_LIBRARY_PATH'] |
352 |
env['ENV']['LD_LIBRARY_PATH'] = tmp |
353 |
except KeyError: |
354 |
pass |
355 |
#========================================================================== |
356 |
# |
357 |
# Add some customer builders |
358 |
# |
359 |
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
360 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
361 |
|
362 |
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', |
363 |
src_suffix=env['PROGSUFFIX'], single_source=True) |
364 |
|
365 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
366 |
|
367 |
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
368 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
369 |
|
370 |
# Convert the options which are held in environment variable into python variables for ease of handling and configure compilation options |
371 |
try: |
372 |
incinstall = env['incinstall'] |
373 |
env.Append(CPPPATH = [incinstall,]) |
374 |
except KeyError: |
375 |
incinstall = None |
376 |
try: |
377 |
libinstall = env['libinstall'] |
378 |
env.Append(LIBPATH = [libinstall,]) # Adds -L for building of libescript.so libfinley.so escriptcpp.so finleycpp.so |
379 |
env.PrependENVPath('LD_LIBRARY_PATH', libinstall) |
380 |
if IS_WINDOWS_PLATFORM : |
381 |
env.PrependENVPath('PATH', libinstall) |
382 |
env.PrependENVPath('PATH', env['boost_lib_path']) |
383 |
except KeyError: |
384 |
libinstall = None |
385 |
try: |
386 |
pyinstall = env['pyinstall'] # all targets will install into pyinstall/esys but PYTHONPATH points at straight pyinstall so you go import esys.escript etc |
387 |
except KeyError: |
388 |
pyinstall = None |
389 |
|
390 |
try: |
391 |
cc_defines = env['cc_defines'] |
392 |
env.Append(CPPDEFINES = cc_defines) |
393 |
except KeyError: |
394 |
pass |
395 |
try: |
396 |
flags = env['ar_flags'] |
397 |
env.Append(ARFLAGS = flags) |
398 |
except KeyError: |
399 |
ar_flags = None |
400 |
try: |
401 |
sys_libs = env['sys_libs'] |
402 |
except KeyError: |
403 |
sys_libs = [] |
404 |
|
405 |
try: |
406 |
tar_flags = env['tar_flags'] |
407 |
env.Replace(TARFLAGS = tar_flags) |
408 |
except KeyError: |
409 |
pass |
410 |
|
411 |
try: |
412 |
exinstall = env['exinstall'] |
413 |
except KeyError: |
414 |
exinstall = None |
415 |
try: |
416 |
sys_libinstall = env['sys_libinstall'] |
417 |
except KeyError: |
418 |
sys_libinstall = None |
419 |
try: |
420 |
sys_pyinstall = env['sys_pyinstall'] |
421 |
except KeyError: |
422 |
sys_pyinstall = None |
423 |
try: |
424 |
sys_exinstall = env['sys_exinstall'] |
425 |
except KeyError: |
426 |
sys_exinstall = None |
427 |
|
428 |
# ====================== debugging =================================== |
429 |
try: |
430 |
dodebug = env['dodebug'] |
431 |
except KeyError: |
432 |
dodebug = None |
433 |
|
434 |
# === switch on omp =================================================== |
435 |
try: |
436 |
omp_flags = env['omp_flags'] |
437 |
except KeyError: |
438 |
omp_flags = '' |
439 |
|
440 |
try: |
441 |
omp_flags_debug = env['omp_flags_debug'] |
442 |
except KeyError: |
443 |
omp_flags_debug = '' |
444 |
|
445 |
# ========= use mpi? ===================================================== |
446 |
try: |
447 |
useMPI = env['useMPI'] |
448 |
except KeyError: |
449 |
useMPI = None |
450 |
# ========= set compiler flags =========================================== |
451 |
|
452 |
# Can't use MPI and OpenMP simultaneously at this time |
453 |
if useMPI: |
454 |
omp_flags='' |
455 |
omp_flags_debug='' |
456 |
|
457 |
if dodebug: |
458 |
try: |
459 |
flags = env['cc_flags_debug'] + ' ' + omp_flags_debug |
460 |
env.Append(CCFLAGS = flags) |
461 |
except KeyError: |
462 |
pass |
463 |
else: |
464 |
try: |
465 |
flags = env['cc_flags'] + ' ' + omp_flags |
466 |
env.Append(CCFLAGS = flags) |
467 |
except KeyError: |
468 |
pass |
469 |
if dodebug: |
470 |
try: |
471 |
flags = env['cxx_flags_debug'] |
472 |
env.Append(CXXFLAGS = flags) |
473 |
except KeyError: |
474 |
pass |
475 |
else: |
476 |
try: |
477 |
flags = env['cxx_flags'] |
478 |
env.Append(CXXFLAGS = flags) |
479 |
except KeyError: |
480 |
pass |
481 |
try: |
482 |
if env['CC'] == 'gcc': env.Append(CCFLAGS = "-pedantic-errors -Wno-long-long") |
483 |
except: |
484 |
pass |
485 |
|
486 |
# ============= Remember what options were used in the compile ===================================== |
487 |
env.Execute("/bin/rm -f " + libinstall + "/Compiled.with.*") |
488 |
|
489 |
if not useMPI: env.Execute("/bin/rm -f " + libinstall + "/pythonMPI") |
490 |
|
491 |
if dodebug: env.Execute("touch " + libinstall + "/Compiled.with.debug") |
492 |
if useMPI: env.Execute("touch " + libinstall + "/Compiled.with.mpi") |
493 |
if omp_flags != '': env.Execute("touch " + libinstall + "/Compiled.with.OpenMP") |
494 |
if bounds_check: env.Execute("touch " + libinstall + "/Compiled.with.bounds_check") |
495 |
|
496 |
# ============= set mkl (but only of no MPI) ===================================== |
497 |
if not useMPI: |
498 |
try: |
499 |
includes = env['mkl_path'] |
500 |
env.Append(CPPPATH = [includes,]) |
501 |
except KeyError: |
502 |
pass |
503 |
|
504 |
try: |
505 |
lib_path = env['mkl_lib_path'] |
506 |
env.Append(LIBPATH = [lib_path,]) |
507 |
except KeyError: |
508 |
pass |
509 |
|
510 |
try: |
511 |
mkl_libs = env['mkl_libs'] |
512 |
except KeyError: |
513 |
mkl_libs = [] |
514 |
else: |
515 |
mkl_libs = [] |
516 |
|
517 |
# ============= set scsl (but only of no MPI) ===================================== |
518 |
if not useMPI: |
519 |
try: |
520 |
includes = env['scsl_path'] |
521 |
env.Append(CPPPATH = [includes,]) |
522 |
except KeyError: |
523 |
pass |
524 |
|
525 |
try: |
526 |
lib_path = env['scsl_lib_path'] |
527 |
env.Append(LIBPATH = [lib_path,]) |
528 |
except KeyError: |
529 |
pass |
530 |
|
531 |
try: |
532 |
scsl_libs = env['scsl_libs'] |
533 |
except KeyError: |
534 |
scsl_libs = [ ] |
535 |
|
536 |
else: |
537 |
scsl_libs = [] |
538 |
|
539 |
# ============= set TRILINOS (but only with MPI) ===================================== |
540 |
if useMPI: |
541 |
try: |
542 |
includes = env['trilinos_path'] |
543 |
env.Append(CPPPATH = [includes,]) |
544 |
except KeyError: |
545 |
pass |
546 |
|
547 |
try: |
548 |
lib_path = env['trilinos_lib_path'] |
549 |
env.Append(LIBPATH = [lib_path,]) |
550 |
except KeyError: |
551 |
pass |
552 |
|
553 |
try: |
554 |
trilinos_libs = env['trilinos_libs'] |
555 |
except KeyError: |
556 |
trilinos_libs = [] |
557 |
else: |
558 |
trilinos_libs = [] |
559 |
|
560 |
|
561 |
# ============= set umfpack (but only without MPI) ===================================== |
562 |
umf_libs=[ ] |
563 |
if not useMPI: |
564 |
try: |
565 |
includes = env['umf_path'] |
566 |
env.Append(CPPPATH = [includes,]) |
567 |
except KeyError: |
568 |
pass |
569 |
|
570 |
try: |
571 |
lib_path = env['umf_lib_path'] |
572 |
env.Append(LIBPATH = [lib_path,]) |
573 |
except KeyError: |
574 |
pass |
575 |
|
576 |
try: |
577 |
umf_libs = env['umf_libs'] |
578 |
umf_libs+=umf_libs |
579 |
except KeyError: |
580 |
pass |
581 |
|
582 |
try: |
583 |
includes = env['ufc_path'] |
584 |
env.Append(CPPPATH = [includes,]) |
585 |
except KeyError: |
586 |
pass |
587 |
|
588 |
try: |
589 |
includes = env['amd_path'] |
590 |
env.Append(CPPPATH = [includes,]) |
591 |
except KeyError: |
592 |
pass |
593 |
|
594 |
try: |
595 |
lib_path = env['amd_lib_path'] |
596 |
env.Append(LIBPATH = [lib_path,]) |
597 |
except KeyError: |
598 |
pass |
599 |
|
600 |
try: |
601 |
amd_libs = env['amd_libs'] |
602 |
umf_libs+=amd_libs |
603 |
except KeyError: |
604 |
pass |
605 |
|
606 |
# ============= set TRILINOS (but only with MPI) ===================================== |
607 |
if useMPI: |
608 |
try: |
609 |
includes = env['trilinos_path'] |
610 |
env.Append(CPPPATH = [includes,]) |
611 |
except KeyError: |
612 |
pass |
613 |
|
614 |
try: |
615 |
lib_path = env['trilinos_lib_path'] |
616 |
env.Append(LIBPATH = [lib_path,]) |
617 |
except KeyError: |
618 |
pass |
619 |
|
620 |
try: |
621 |
trilinos_libs = env['trilinos_libs'] |
622 |
except KeyError: |
623 |
trilinos_libs = [] |
624 |
else: |
625 |
trilinos_libs = [] |
626 |
|
627 |
# ============= set blas ===================================== |
628 |
try: |
629 |
includes = env['blas_path'] |
630 |
env.Append(CPPPATH = [includes,]) |
631 |
except KeyError: |
632 |
pass |
633 |
|
634 |
try: |
635 |
lib_path = env['blas_lib_path'] |
636 |
env.Append(LIBPATH = [lib_path,]) |
637 |
except KeyError: |
638 |
pass |
639 |
|
640 |
try: |
641 |
blas_libs = env['blas_libs'] |
642 |
except KeyError: |
643 |
blas_libs = [ ] |
644 |
|
645 |
# ========== netcdf ==================================== |
646 |
try: |
647 |
useNetCDF = env['useNetCDF'] |
648 |
except KeyError: |
649 |
useNetCDF = 'yes' |
650 |
pass |
651 |
|
652 |
if useNetCDF == 'yes': |
653 |
try: |
654 |
netCDF_libs = env['netCDF_libs'] |
655 |
except KeyError: |
656 |
pass |
657 |
|
658 |
env.Append(LIBS = netCDF_libs) |
659 |
env.Append(CPPDEFINES = [ 'USE_NETCDF' ]) |
660 |
try: |
661 |
includes = env['netCDF_path'] |
662 |
env.Append(CPPPATH = [includes,]) |
663 |
except KeyError: |
664 |
pass |
665 |
|
666 |
try: |
667 |
lib_path = env['netCDF_lib_path'] |
668 |
env.Append(LIBPATH = [ lib_path, ]) |
669 |
if IS_WINDOWS_PLATFORM : |
670 |
env.PrependENVPath('PATH', lib_path) |
671 |
except KeyError: |
672 |
pass |
673 |
else: |
674 |
print "Warning: Installation is not configured with netCDF. Some I/O function may not be available." |
675 |
netCDF_libs=[ ] |
676 |
|
677 |
# ====================== boost ====================================== |
678 |
try: |
679 |
includes = env['boost_path'] |
680 |
env.Append(CPPPATH = [includes,]) |
681 |
except KeyError: |
682 |
pass |
683 |
try: |
684 |
lib_path = env['boost_lib_path'] |
685 |
env.Append(LIBPATH = [lib_path,]) |
686 |
if IS_WINDOWS_PLATFORM : |
687 |
env.PrependENVPath('PATH', lib_path) |
688 |
except KeyError: |
689 |
pass |
690 |
try: |
691 |
boost_lib = env['boost_lib'] |
692 |
except KeyError: |
693 |
boost_lib = None |
694 |
# ====================== python ====================================== |
695 |
try: |
696 |
includes = env['python_path'] |
697 |
env.Append(CPPPATH = [includes,]) |
698 |
except KeyError: |
699 |
pass |
700 |
try: |
701 |
lib_path = env['python_lib_path'] |
702 |
env.Append(LIBPATH = [lib_path,]) |
703 |
except KeyError: |
704 |
pass |
705 |
try: |
706 |
python_lib = env['python_lib'] |
707 |
except KeyError: |
708 |
python_lib = None |
709 |
# =============== documentation ======================================= |
710 |
try: |
711 |
doxygen_path = env['doxygen_path'] |
712 |
except KeyError: |
713 |
doxygen_path = None |
714 |
try: |
715 |
epydoc_path = env['epydoc_path'] |
716 |
except KeyError: |
717 |
epydoc_path = None |
718 |
# =============== PAPI ======================================= |
719 |
try: |
720 |
includes = env['papi_path'] |
721 |
env.Append(CPPPATH = [includes,]) |
722 |
except KeyError: |
723 |
pass |
724 |
try: |
725 |
lib_path = env['papi_lib_path'] |
726 |
env.Append(LIBPATH = [lib_path,]) |
727 |
except KeyError: |
728 |
pass |
729 |
try: |
730 |
papi_libs = env['papi_libs'] |
731 |
except KeyError: |
732 |
papi_libs = None |
733 |
# ============= set mpi ===================================== |
734 |
if useMPI: |
735 |
env.Append(CPPDEFINES=['PASO_MPI',]) |
736 |
try: |
737 |
includes = env['mpi_path'] |
738 |
env.Append(CPPPATH = [includes,]) |
739 |
except KeyError: |
740 |
pass |
741 |
try: |
742 |
lib_path = env['mpi_lib_path'] |
743 |
env.Append(LIBPATH = [lib_path,]) |
744 |
env['ENV']['LD_LIBRARY_PATH']+=":"+lib_path |
745 |
except KeyError: |
746 |
pass |
747 |
try: |
748 |
mpi_libs = env['mpi_libs'] |
749 |
except KeyError: |
750 |
mpi_libs = [] |
751 |
|
752 |
try: |
753 |
mpi_run = env['mpi_run'] |
754 |
except KeyError: |
755 |
mpi_run = '' |
756 |
|
757 |
try: |
758 |
mpich_ignore_cxx_seek=env['MPICH_IGNORE_CXX_SEEK'] |
759 |
env.Append(CPPDEFINES = [ mpich_ignore_cxx_seek ] ) |
760 |
except KeyError: |
761 |
pass |
762 |
else: |
763 |
mpi_libs=[] |
764 |
mpi_run = mpi_run_default |
765 |
# =========== zip files =========================================== |
766 |
try: |
767 |
includes = env['papi_path'] |
768 |
env.Append(CPPPATH = [includes,]) |
769 |
except KeyError: |
770 |
pass |
771 |
try: |
772 |
lib_path = env['papi_lib_path'] |
773 |
env.Append(LIBPATH = [lib_path,]) |
774 |
except KeyError: |
775 |
pass |
776 |
try: |
777 |
papi_libs = env['papi_libs'] |
778 |
except KeyError: |
779 |
papi_libs = None |
780 |
try: |
781 |
papi_instrument_solver = env['papi_instrument_solver'] |
782 |
except KeyError: |
783 |
papi_instrument_solver = None |
784 |
|
785 |
|
786 |
# ============= and some helpers ===================================== |
787 |
try: |
788 |
doxygen_path = env['doxygen_path'] |
789 |
except KeyError: |
790 |
doxygen_path = None |
791 |
try: |
792 |
epydoc_path = env['epydoc_path'] |
793 |
except KeyError: |
794 |
epydoc_path = None |
795 |
try: |
796 |
src_zipfile = env.File(env['src_zipfile']) |
797 |
except KeyError: |
798 |
src_zipfile = None |
799 |
try: |
800 |
test_zipfile = env.File(env['test_zipfile']) |
801 |
except KeyError: |
802 |
test_zipfile = None |
803 |
try: |
804 |
examples_zipfile = env.File(env['examples_zipfile']) |
805 |
except KeyError: |
806 |
examples_zipfile = None |
807 |
|
808 |
try: |
809 |
src_tarfile = env.File(env['src_tarfile']) |
810 |
except KeyError: |
811 |
src_tarfile = None |
812 |
try: |
813 |
test_tarfile = env.File(env['test_tarfile']) |
814 |
except KeyError: |
815 |
test_tarfile = None |
816 |
try: |
817 |
examples_tarfile = env.File(env['examples_tarfile']) |
818 |
except KeyError: |
819 |
examples_tarfile = None |
820 |
|
821 |
try: |
822 |
guide_pdf = env.File(env['guide_pdf']) |
823 |
except KeyError: |
824 |
guide_pdf = None |
825 |
|
826 |
try: |
827 |
guide_html_index = env.File('index.htm',env['guide_html']) |
828 |
except KeyError: |
829 |
guide_html_index = None |
830 |
|
831 |
try: |
832 |
api_epydoc = env.Dir(env['api_epydoc']) |
833 |
except KeyError: |
834 |
api_epydoc = None |
835 |
|
836 |
try: |
837 |
api_doxygen = env.Dir(env['api_doxygen']) |
838 |
except KeyError: |
839 |
api_doxygen = None |
840 |
|
841 |
try: |
842 |
svn_pipe = os.popen("svnversion -n .") |
843 |
global_revision = svn_pipe.readlines() |
844 |
svn_pipe.close() |
845 |
global_revision = re.sub(":.*", "", global_revision[0]) |
846 |
global_revision = re.sub("[^0-9]", "", global_revision) |
847 |
except: |
848 |
global_revision = "0" |
849 |
env.Append(CPPDEFINES = "SVN_VERSION="+global_revision) |
850 |
|
851 |
# Python install - esys __init__.py |
852 |
init_target = env.Command(pyinstall+'/__init__.py', None, Touch('$TARGET')) |
853 |
|
854 |
# FIXME: exinstall and friends related to examples are not working. |
855 |
build_target = env.Alias('build',[libinstall,incinstall,pyinstall,init_target]) |
856 |
|
857 |
env.Default(build_target) |
858 |
|
859 |
# Zipgets |
860 |
env.Alias('release_src',[ src_zipfile, src_tarfile ]) |
861 |
env.Alias('release_tests',[ test_zipfile, test_tarfile]) |
862 |
env.Alias('release_examples',[ examples_zipfile, examples_tarfile]) |
863 |
env.Alias('examples_zipfile',examples_zipfile) |
864 |
env.Alias('examples_tarfile',examples_tarfile) |
865 |
env.Alias('api_epydoc',api_epydoc) |
866 |
env.Alias('api_doxygen',api_doxygen) |
867 |
env.Alias('guide_html_index',guide_html_index) |
868 |
env.Alias('guide_pdf', guide_pdf) |
869 |
env.Alias('docs',[ 'release_examples', 'guide_pdf', api_epydoc, api_doxygen, guide_html_index]) |
870 |
env.Alias('release', ['release_src', 'release_tests', 'docs']) |
871 |
|
872 |
env.Alias('build_tests',build_target) # target to build all C++ tests |
873 |
env.Alias('build_py_tests',build_target) # target to build all python tests |
874 |
env.Alias('build_all_tests', [ 'build_tests', 'build_py_tests' ] ) # target to build all python tests |
875 |
env.Alias('run_tests', 'build_tests') # target to run all C++ test |
876 |
env.Alias('py_tests', 'build_py_tests') # taget to run all released python tests |
877 |
env.Alias('all_tests', ['run_tests', 'py_tests']) # target to run all C++ and released python tests |
878 |
|
879 |
|
880 |
# Allow sconscripts to see the env |
881 |
Export(["IS_WINDOWS_PLATFORM", "env", "incinstall", "libinstall", "pyinstall", "dodebug", "mkl_libs", "scsl_libs", "umf_libs", "blas_libs", "netCDF_libs", "useNetCDF", "mpi_run", |
882 |
"boost_lib", "python_lib", "doxygen_path", "epydoc_path", "papi_libs", |
883 |
"sys_libs", "test_zipfile", "src_zipfile", "test_tarfile", "src_tarfile", "examples_tarfile", "examples_zipfile", "trilinos_libs", "mpi_libs", "papi_instrument_solver", |
884 |
"guide_pdf", "guide_html_index", "api_epydoc", "api_doxygen", "useMPI" ]) |
885 |
|
886 |
# End initialisation section |
887 |
# Begin configuration section |
888 |
# adds this file and the scons option directore to the source tar |
889 |
release_srcfiles=[env.File('SConstruct'),env.Dir('lib'),env.Dir('include'),]+[ env.File(x) for x in glob.glob('scons/*.py') ] |
890 |
release_testfiles=[env.File('README_TESTS'),] |
891 |
env.Zip(src_zipfile, release_srcfiles) |
892 |
env.Zip(test_zipfile, release_testfiles) |
893 |
try: |
894 |
env.Tar(src_tarfile, release_srcfiles) |
895 |
env.Tar(test_tarfile, release_testfiles) |
896 |
except AttributeError: |
897 |
pass |
898 |
# Insert new components to be build here |
899 |
# FIXME: might be nice to replace this verbosity with a list of targets and some |
900 |
# FIXME: nifty python to create the lengthy but very similar env.Sconscript lines |
901 |
# Third Party libraries |
902 |
env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
903 |
# C/C++ Libraries |
904 |
env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) |
905 |
# bruce is removed for now as it doesn't really do anything |
906 |
# env.SConscript(dirs = ['bruce/src'], build_dir='build/$PLATFORM/bruce', duplicate=0) |
907 |
env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) |
908 |
env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) |
909 |
env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0) |
910 |
env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0) |
911 |
env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0) |
912 |
env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0) |
913 |
env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) |
914 |
env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
915 |
#env.SConscript(dirs = ['../test'], build_dir='../test/build', duplicate=0) |
916 |
|
917 |
|
918 |
syslib_install_target = env.installDirectory(sys_libinstall,libinstall) |
919 |
syspy_install_target = env.installDirectory(sys_pyinstall,pyinstall,recursive=True) |
920 |
|
921 |
install_target = env.Alias("install", env.Flatten([syslib_install_target, syspy_install_target]) ) |