1 |
|
2 |
######################################################## |
3 |
# |
4 |
# Copyright (c) 2003-2008 by University of Queensland |
5 |
# Earth Systems Science Computational Center (ESSCC) |
6 |
# http://www.uq.edu.au/esscc |
7 |
# |
8 |
# Primary Business: Queensland, Australia |
9 |
# Licensed under the Open Software License version 3.0 |
10 |
# http://www.opensource.org/licenses/osl-3.0.php |
11 |
# |
12 |
######################################################## |
13 |
|
14 |
|
15 |
EnsureSConsVersion(0,96,91) |
16 |
EnsurePythonVersion(2,3) |
17 |
|
18 |
import sys, os, re, socket, platform, stat |
19 |
|
20 |
# Add our extensions |
21 |
if os.path.isdir('scons'): sys.path.append('scons') |
22 |
import scons_extensions |
23 |
|
24 |
# Use /usr/lib64 if available, else /usr/lib |
25 |
usr_lib = '/usr/lib' |
26 |
if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64' |
27 |
|
28 |
# The string python2.4 or python2.5 |
29 |
python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1]) |
30 |
|
31 |
# MS Windows support, many thanks to PH |
32 |
IS_WINDOWS_PLATFORM = (os.name== "nt") |
33 |
|
34 |
prefix = ARGUMENTS.get('prefix', Dir('#.').abspath) |
35 |
|
36 |
#Determine where to read options from use: |
37 |
#1. command line |
38 |
#2. scons/<hostname>_options.py |
39 |
#3. name as part of a cluster |
40 |
options_file=ARGUMENTS.get('options_file', None) |
41 |
if not options_file: |
42 |
hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) |
43 |
options_file = os.path.join("scons",hostname+"_options.py") |
44 |
#If there is no options file with that name see if there is a substitute |
45 |
if not os.path.isfile(options_file): |
46 |
tmp = scons_extensions.effectiveName(hostname) |
47 |
options_file = os.path.join("scons",tmp+"_options.py") |
48 |
|
49 |
if not os.path.isfile(options_file): |
50 |
print "Options file not found (expected '%s')" % options_file |
51 |
options_file = False |
52 |
else: |
53 |
print "Options file is", options_file |
54 |
|
55 |
#Does our scons support the newer Variables class or do we need to use Options? |
56 |
|
57 |
try: |
58 |
dummyvar=Variables |
59 |
opts = Variables(options_file, ARGUMENTS) |
60 |
adder = opts.AddVariables |
61 |
except: |
62 |
opts = Options(options_file, ARGUMENTS) |
63 |
adder = opts.AddOptions |
64 |
BoolVariable = BoolOption |
65 |
|
66 |
############ Load build options ################################ |
67 |
|
68 |
adder( |
69 |
#opts.AddOptions( |
70 |
# Where to install esys stuff |
71 |
('prefix', 'where everything will be installed', Dir('#.').abspath), |
72 |
('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')), |
73 |
('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')), |
74 |
('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')), |
75 |
('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')), |
76 |
# Compilation options |
77 |
BoolVariable('dodebug', 'For backwards compatibility', 'no'), |
78 |
BoolVariable('usedebug', 'Do you want a debug build?', 'no'), |
79 |
BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'), |
80 |
('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file), |
81 |
('win_cc_name', 'windows C compiler name if needed', 'msvc'), |
82 |
# The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below |
83 |
('cc_flags', 'C compiler flags to use', '-DEFAULT_1'), |
84 |
('cc_optim', 'C compiler optimization flags to use', '-DEFAULT_2'), |
85 |
('cc_debug', 'C compiler debug flags to use', '-DEFAULT_3'), |
86 |
('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'), |
87 |
('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'), |
88 |
('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'), |
89 |
('cc_extra', 'Extra C/C++ flags', ''), |
90 |
('ld_extra', 'Extra linker flags', ''), |
91 |
('sys_libs', 'System libraries to link with', []), |
92 |
('ar_flags', 'Static library archiver flags to use', ''), |
93 |
BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'), |
94 |
BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'), |
95 |
BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'), |
96 |
('forcelazy','for testing use only - set the default value for autolazy','leave_alone'), |
97 |
# Python |
98 |
('python_path', 'Path to Python includes', '/usr/include/'+python_version), |
99 |
('python_lib_path', 'Path to Python libs', usr_lib), |
100 |
('python_libs', 'Python libraries to link with', [python_version]), |
101 |
('python_cmd', 'Python command', 'python'), |
102 |
# Boost |
103 |
('boost_path', 'Path to Boost includes', '/usr/include'), |
104 |
('boost_lib_path', 'Path to Boost libs', usr_lib), |
105 |
('boost_libs', 'Boost libraries to link with', ['boost_python']), |
106 |
# NetCDF |
107 |
BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'), |
108 |
('netCDF_path', 'Path to netCDF includes', '/usr/include'), |
109 |
('netCDF_lib_path', 'Path to netCDF libs', usr_lib), |
110 |
('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']), |
111 |
# MPI |
112 |
BoolVariable('useMPI', 'For backwards compatibility', 'no'), |
113 |
BoolVariable('usempi', 'Compile parallel version using MPI', 'no'), |
114 |
('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'), |
115 |
('mpi_path', 'Path to MPI includes', '/usr/include'), |
116 |
('mpi_run', 'mpirun name' , 'mpiexec -np 1'), |
117 |
('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib), |
118 |
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', ['mpich' , 'pthread', 'rt']), |
119 |
('mpi_flavour','Type of MPI execution environment','none'), |
120 |
# ParMETIS |
121 |
BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'), |
122 |
('parmetis_path', 'Path to ParMETIS includes', '/usr/include'), |
123 |
('parmetis_lib_path', 'Path to ParMETIS library', usr_lib), |
124 |
('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']), |
125 |
# PAPI |
126 |
BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'), |
127 |
('papi_path', 'Path to PAPI includes', '/usr/include'), |
128 |
('papi_lib_path', 'Path to PAPI libs', usr_lib), |
129 |
('papi_libs', 'PAPI libraries to link with', ['papi']), |
130 |
BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False), |
131 |
# MKL |
132 |
BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'), |
133 |
('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'), |
134 |
('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'), |
135 |
('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']), |
136 |
# UMFPACK |
137 |
BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'), |
138 |
('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'), |
139 |
('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'), |
140 |
('umf_lib_path', 'Path to UMFPACK libs', usr_lib), |
141 |
('umf_libs', 'UMFPACK libraries to link with', ['umfpack']), |
142 |
# Silo |
143 |
BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'), |
144 |
('silo_path', 'Path to Silo includes', '/usr/include'), |
145 |
('silo_lib_path', 'Path to Silo libs', usr_lib), |
146 |
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
147 |
# AMD (used by UMFPACK) |
148 |
('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'), |
149 |
('amd_lib_path', 'Path to AMD libs', usr_lib), |
150 |
('amd_libs', 'AMD libraries to link with', ['amd']), |
151 |
# BLAS (used by UMFPACK) |
152 |
('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'), |
153 |
('blas_lib_path', 'Path to BLAS libs', usr_lib), |
154 |
('blas_libs', 'BLAS libraries to link with', ['blas']), |
155 |
# An option for specifying the compiler tools set (see windows branch). |
156 |
('tools_names', 'allow control over the tools in the env setup', ['intelc']), |
157 |
# finer control over library building, intel aggressive global optimisation |
158 |
# works with dynamic libraries on windows. |
159 |
('share_esysUtils', 'control static or dynamic esysUtils lib', False), |
160 |
('share_paso', 'control static or dynamic paso lib', False) |
161 |
) |
162 |
|
163 |
############ Specify which compilers to use #################### |
164 |
|
165 |
# intelc uses regular expressions improperly and emits a warning about |
166 |
# failing to find the compilers. This warning can be safely ignored. |
167 |
|
168 |
if IS_WINDOWS_PLATFORM: |
169 |
env = Environment(options = opts) |
170 |
env = Environment(tools = ['default'] + env['tools_names'], |
171 |
options = opts) |
172 |
else: |
173 |
if socket.gethostname().split('.')[0] == 'service0': |
174 |
env = Environment(tools = ['default', 'intelc'], options = opts) |
175 |
elif os.uname()[4]=='ia64': |
176 |
env = Environment(tools = ['default', 'intelc'], options = opts) |
177 |
if env['CXX'] == 'icpc': |
178 |
env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not) |
179 |
else: |
180 |
env = Environment(tools = ['default'], options = opts) |
181 |
Help(opts.GenerateHelpText(env)) |
182 |
|
183 |
############ Fill in compiler options if not set above ######### |
184 |
|
185 |
# Backwards compatibility: allow dodebug=yes and useMPI=yes |
186 |
if env['dodebug']: env['usedebug'] = 1 |
187 |
if env['useMPI']: env['usempi'] = 1 |
188 |
|
189 |
# Default compiler options (override allowed in hostname_options.py, but should not be necessary) |
190 |
# For both C and C++ you get: cc_flags and either the optim flags or debug flags |
191 |
|
192 |
sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action. |
193 |
|
194 |
if env["CC"] == "icc": |
195 |
# Intel compilers |
196 |
cc_flags = "-fPIC -ansi -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
197 |
cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias" |
198 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
199 |
omp_optim = "-openmp -openmp_report0" |
200 |
omp_debug = "-openmp -openmp_report0" |
201 |
omp_libs = ['guide', 'pthread'] |
202 |
pedantic = "" |
203 |
fatalwarning = "" # Switch to turn warnings into errors |
204 |
sysheaderopt = "" |
205 |
elif env["CC"] == "gcc": |
206 |
# GNU C on any system |
207 |
cc_flags = "-pedantic -Wall -fPIC -ansi -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing" |
208 |
#the long long warning occurs on the Mac |
209 |
cc_optim = "-O3" |
210 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
211 |
omp_optim = "-fopenmp" |
212 |
omp_debug = "-fopenmp" |
213 |
omp_libs = ['gomp'] |
214 |
pedantic = "-pedantic-errors -Wno-long-long" |
215 |
fatalwarning = "-Werror" |
216 |
sysheaderopt = "-isystem " |
217 |
elif env["CC"] == "cl": |
218 |
# Microsoft Visual C on Windows |
219 |
cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" |
220 |
cc_optim = "/O2 /Op /MT /W3" |
221 |
cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK" |
222 |
omp_optim = "" |
223 |
omp_debug = "" |
224 |
omp_libs = [] |
225 |
pedantic = "" |
226 |
fatalwarning = "" |
227 |
sysheaderopt = "" |
228 |
elif env["CC"] == "icl": |
229 |
# intel C on Windows, see windows_intelc_options.py for a start |
230 |
pedantic = "" |
231 |
fatalwarning = "" |
232 |
sysheaderopt = "" |
233 |
|
234 |
|
235 |
# If not specified in hostname_options.py then set them here |
236 |
if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags |
237 |
if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim |
238 |
if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug |
239 |
if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim |
240 |
if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug |
241 |
if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs |
242 |
|
243 |
#set up the autolazy values |
244 |
if env['forcelazy'] != "leave_alone": |
245 |
if env['forcelazy'] == 'on': |
246 |
env.Append(CPPDEFINES='FAUTOLAZYON') |
247 |
else: |
248 |
if env['forcelazy'] == 'off': |
249 |
env.Append(CPPDEFINES='FAUTOLAZYOFF') |
250 |
|
251 |
# OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty |
252 |
if not env["useopenmp"]: |
253 |
env['omp_optim'] = "" |
254 |
env['omp_debug'] = "" |
255 |
env['omp_libs'] = [] |
256 |
|
257 |
if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0 |
258 |
|
259 |
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
260 |
if IS_WINDOWS_PLATFORM: |
261 |
LD_LIBRARY_PATH_KEY='PATH' |
262 |
env['ENV']['LD_LIBRARY_PATH']='' |
263 |
else: |
264 |
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
265 |
############ Copy environment variables into scons env ######### |
266 |
|
267 |
try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS'] |
268 |
except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1 |
269 |
|
270 |
try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS'] |
271 |
except KeyError: pass |
272 |
|
273 |
try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS'] |
274 |
except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1 |
275 |
|
276 |
try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES'] |
277 |
except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1 |
278 |
|
279 |
try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE'] |
280 |
except KeyError: pass |
281 |
|
282 |
try: env['ENV']['PATH'] = os.environ['PATH'] |
283 |
except KeyError: pass |
284 |
|
285 |
try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH'] |
286 |
except KeyError: pass |
287 |
|
288 |
try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH'] |
289 |
except KeyError: pass |
290 |
|
291 |
try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH'] |
292 |
except KeyError: pass |
293 |
|
294 |
try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH']) |
295 |
except KeyError: pass |
296 |
|
297 |
try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH'] |
298 |
except KeyError: pass |
299 |
|
300 |
try: env['ENV']['DISPLAY'] = os.environ['DISPLAY'] |
301 |
except KeyError: pass |
302 |
|
303 |
try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY'] |
304 |
except KeyError: pass |
305 |
|
306 |
try: env['ENV']['HOME'] = os.environ['HOME'] |
307 |
except KeyError: pass |
308 |
|
309 |
# Configure for test suite |
310 |
|
311 |
|
312 |
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
313 |
env.PrependENVPath('PYTHONPATH', prefix) |
314 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
315 |
|
316 |
############ Set up paths for Configure() ###################### |
317 |
|
318 |
# Make a copy of an environment |
319 |
# Use env.Clone if available, but fall back on env.Copy for older version of scons |
320 |
def clone_env(env): |
321 |
if 'Clone' in dir(env): return env.Clone() # scons-0.98 |
322 |
else: return env.Copy() # scons-0.96 |
323 |
|
324 |
# Add cc option -I<Escript>/trunk/include |
325 |
env.Append(CPPPATH = [Dir('include')]) |
326 |
|
327 |
# Add cc option -L<Escript>/trunk/lib |
328 |
env.Append(LIBPATH = [Dir(env['libinstall'])]) |
329 |
|
330 |
if env['cc_extra'] != '': env.Append(CCFLAGS = env['cc_extra']) |
331 |
if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra']) |
332 |
|
333 |
if env['usepedantic']: env.Append(CCFLAGS = pedantic) |
334 |
|
335 |
# MS Windows |
336 |
if IS_WINDOWS_PLATFORM: |
337 |
env.AppendENVPath('PATH', [env['boost_lib_path']]) |
338 |
env.AppendENVPath('PATH', [env['libinstall']]) |
339 |
if not env['share_esysUtils'] : |
340 |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
341 |
if not env['share_paso'] : |
342 |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
343 |
|
344 |
if env['usenetcdf']: |
345 |
env.AppendENVPath('PATH', [env['netCDF_lib_path']]) |
346 |
|
347 |
env.Append(ARFLAGS = env['ar_flags']) |
348 |
|
349 |
# Get the global Subversion revision number for getVersion() method |
350 |
try: |
351 |
global_revision = os.popen("svnversion -n .").read() |
352 |
global_revision = re.sub(":.*", "", global_revision) |
353 |
global_revision = re.sub("[^0-9]", "", global_revision) |
354 |
except: |
355 |
global_revision="-1" |
356 |
if global_revision == "": global_revision="-2" |
357 |
env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision]) |
358 |
|
359 |
############ numpy (required) ############################### |
360 |
|
361 |
try: |
362 |
from numpy import identity |
363 |
except ImportError: |
364 |
print "Cannot import numpy, you need to set your PYTHONPATH" |
365 |
sys.exit(1) |
366 |
|
367 |
############ C compiler (required) ############################# |
368 |
|
369 |
# Create a Configure() environment for checking existence of required libraries and headers |
370 |
conf = Configure(clone_env(env)) |
371 |
|
372 |
# Test that the compiler is working |
373 |
if not conf.CheckFunc('printf'): |
374 |
print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC']) |
375 |
sys.exit(1) |
376 |
|
377 |
if conf.CheckFunc('gethostname'): |
378 |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
379 |
|
380 |
############ python libraries (required) ####################### |
381 |
|
382 |
|
383 |
if not sysheaderopt =="": |
384 |
conf.env.Append(CCFLAGS=sysheaderopt+env['python_path']) |
385 |
else: |
386 |
conf.env.AppendUnique(CPPPATH = [env['python_path']]) |
387 |
|
388 |
conf.env.AppendUnique(LIBPATH = [env['python_lib_path']]) |
389 |
conf.env.AppendUnique(LIBS = [env['python_libs']]) |
390 |
|
391 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
392 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs |
393 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
394 |
|
395 |
if not conf.CheckCHeader('Python.h'): |
396 |
print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path']) |
397 |
sys.exit(1) |
398 |
if not conf.CheckFunc('Py_Exit'): |
399 |
print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path']) |
400 |
sys.exit(1) |
401 |
|
402 |
############ boost (required) ################################## |
403 |
|
404 |
if not sysheaderopt =="": |
405 |
# This is required because we can't -isystem /usr/system because it breaks std includes |
406 |
if os.path.normpath(env['boost_path']) =="/usr/include": |
407 |
conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost')) |
408 |
else: |
409 |
conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path']) |
410 |
else: |
411 |
conf.env.AppendUnique(CPPPATH = [env['boost_path']]) |
412 |
|
413 |
conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']]) |
414 |
conf.env.AppendUnique(LIBS = [env['boost_libs']]) |
415 |
|
416 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs |
417 |
#ensure that our path entries remain at the front |
418 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
419 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
420 |
|
421 |
if not conf.CheckCXXHeader('boost/python.hpp'): |
422 |
print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path']) |
423 |
sys.exit(1) |
424 |
|
425 |
if not conf.CheckFunc('PyObject_SetAttr'): |
426 |
print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path']) |
427 |
sys.exit(1) |
428 |
|
429 |
# Commit changes to environment |
430 |
env = conf.Finish() |
431 |
|
432 |
############ VTK (optional) #################################### |
433 |
|
434 |
if env['usevtk']: |
435 |
try: |
436 |
import vtk |
437 |
env['usevtk'] = 1 |
438 |
except ImportError: |
439 |
env['usevtk'] = 0 |
440 |
|
441 |
# Add VTK to environment env if it was found |
442 |
if env['usevtk']: |
443 |
env.Append(CPPDEFINES = ['USE_VTK']) |
444 |
|
445 |
############ NetCDF (optional) ################################# |
446 |
|
447 |
conf = Configure(clone_env(env)) |
448 |
|
449 |
if env['usenetcdf']: |
450 |
conf.env.AppendUnique(CPPPATH = [env['netCDF_path']]) |
451 |
conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']]) |
452 |
conf.env.AppendUnique(LIBS = [env['netCDF_libs']]) |
453 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs |
454 |
#ensure that our path entries remain at the front |
455 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
456 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
457 |
|
458 |
if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0 |
459 |
if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0 |
460 |
|
461 |
# Add NetCDF to environment env if it was found |
462 |
if env['usenetcdf']: |
463 |
env = conf.Finish() |
464 |
env.Append(CPPDEFINES = ['USE_NETCDF']) |
465 |
else: |
466 |
conf.Finish() |
467 |
|
468 |
############ PAPI (optional) ################################### |
469 |
|
470 |
# Start a new configure environment that reflects what we've already found |
471 |
conf = Configure(clone_env(env)) |
472 |
|
473 |
if env['usepapi']: |
474 |
conf.env.AppendUnique(CPPPATH = [env['papi_path']]) |
475 |
conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']]) |
476 |
conf.env.AppendUnique(LIBS = [env['papi_libs']]) |
477 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs |
478 |
#ensure that our path entries remain at the front |
479 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
480 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
481 |
|
482 |
if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0 |
483 |
if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0 |
484 |
|
485 |
# Add PAPI to environment env if it was found |
486 |
if env['usepapi']: |
487 |
env = conf.Finish() |
488 |
env.Append(CPPDEFINES = ['BLOCKPAPI']) |
489 |
else: |
490 |
conf.Finish() |
491 |
|
492 |
############ MKL (optional) #################################### |
493 |
|
494 |
# Start a new configure environment that reflects what we've already found |
495 |
conf = Configure(clone_env(env)) |
496 |
|
497 |
if env['usemkl']: |
498 |
conf.env.AppendUnique(CPPPATH = [env['mkl_path']]) |
499 |
conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']]) |
500 |
conf.env.AppendUnique(LIBS = [env['mkl_libs']]) |
501 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs |
502 |
#ensure that our path entries remain at the front |
503 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
504 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
505 |
|
506 |
if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0 |
507 |
if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0 |
508 |
|
509 |
# Add MKL to environment env if it was found |
510 |
if env['usemkl']: |
511 |
env = conf.Finish() |
512 |
env.Append(CPPDEFINES = ['MKL']) |
513 |
else: |
514 |
conf.Finish() |
515 |
|
516 |
############ UMFPACK (optional) ################################ |
517 |
|
518 |
# Start a new configure environment that reflects what we've already found |
519 |
conf = Configure(clone_env(env)) |
520 |
|
521 |
if env['useumfpack']: |
522 |
conf.env.AppendUnique(CPPPATH = [env['ufc_path']]) |
523 |
conf.env.AppendUnique(CPPPATH = [env['umf_path']]) |
524 |
conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']]) |
525 |
conf.env.AppendUnique(LIBS = [env['umf_libs']]) |
526 |
conf.env.AppendUnique(CPPPATH = [env['amd_path']]) |
527 |
conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']]) |
528 |
conf.env.AppendUnique(LIBS = [env['amd_libs']]) |
529 |
conf.env.AppendUnique(CPPPATH = [env['blas_path']]) |
530 |
conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']]) |
531 |
conf.env.AppendUnique(LIBS = [env['blas_libs']]) |
532 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs |
533 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs |
534 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs |
535 |
#ensure that our path entries remain at the front |
536 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
537 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
538 |
|
539 |
if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0 |
540 |
if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0 |
541 |
# if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73? |
542 |
|
543 |
# Add UMFPACK to environment env if it was found |
544 |
if env['useumfpack']: |
545 |
env = conf.Finish() |
546 |
env.Append(CPPDEFINES = ['UMFPACK']) |
547 |
else: |
548 |
conf.Finish() |
549 |
|
550 |
############ Silo (optional) ################################### |
551 |
|
552 |
if env['usesilo']: |
553 |
conf = Configure(clone_env(env)) |
554 |
conf.env.AppendUnique(CPPPATH = [env['silo_path']]) |
555 |
conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
556 |
conf.env.AppendUnique(LIBS = [env['silo_libs']]) |
557 |
if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0 |
558 |
if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0 |
559 |
conf.Finish() |
560 |
|
561 |
# Add the path to Silo to environment env if it was found. |
562 |
# Note that we do not add the libs since they are only needed for the |
563 |
# escriptreader library and tools. |
564 |
if env['usesilo']: |
565 |
env.AppendUnique(CPPPATH = [env['silo_path']]) |
566 |
env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
567 |
env.Append(CPPDEFINES = ['HAVE_SILO']) |
568 |
|
569 |
############ Add the compiler flags ############################ |
570 |
|
571 |
# Enable debug by choosing either cc_debug or cc_optim |
572 |
if env['usedebug']: |
573 |
env.Append(CCFLAGS = env['cc_debug']) |
574 |
env.Append(CCFLAGS = env['omp_debug']) |
575 |
else: |
576 |
env.Append(CCFLAGS = env['cc_optim']) |
577 |
env.Append(CCFLAGS = env['omp_optim']) |
578 |
|
579 |
# Always use cc_flags |
580 |
env.Append(CCFLAGS = env['cc_flags']) |
581 |
env.Append(LIBS = [env['omp_libs']]) |
582 |
|
583 |
############ Add some custom builders ########################## |
584 |
|
585 |
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
586 |
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
587 |
|
588 |
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
589 |
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
590 |
|
591 |
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
592 |
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
593 |
|
594 |
epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True) |
595 |
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
596 |
|
597 |
############ MPI (optional) #################################### |
598 |
if not env['usempi']: env['mpi_flavour']='none' |
599 |
|
600 |
# Create a modified environment for MPI programs (identical to env if usempi=no) |
601 |
env_mpi = clone_env(env) |
602 |
|
603 |
# Start a new configure environment that reflects what we've already found |
604 |
conf = Configure(clone_env(env_mpi)) |
605 |
|
606 |
if env_mpi['usempi']: |
607 |
VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ] |
608 |
if not env_mpi['mpi_flavour'] in VALID_MPIs: |
609 |
raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs) |
610 |
conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']]) |
611 |
conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']]) |
612 |
conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']]) |
613 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs |
614 |
#ensure that our path entries remain at the front |
615 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
616 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
617 |
|
618 |
if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0 |
619 |
# if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0 |
620 |
|
621 |
# Add MPI to environment env_mpi if it was found |
622 |
if env_mpi['usempi']: |
623 |
env_mpi = conf.Finish() |
624 |
env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']]) |
625 |
else: |
626 |
conf.Finish() |
627 |
|
628 |
env['usempi'] = env_mpi['usempi'] |
629 |
|
630 |
|
631 |
############ ParMETIS (optional) ############################### |
632 |
|
633 |
# Start a new configure environment that reflects what we've already found |
634 |
conf = Configure(clone_env(env_mpi)) |
635 |
|
636 |
if not env_mpi['usempi']: env_mpi['useparmetis'] = 0 |
637 |
|
638 |
if env_mpi['useparmetis']: |
639 |
conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']]) |
640 |
conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']]) |
641 |
conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']]) |
642 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs |
643 |
#ensure that our path entries remain at the front |
644 |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
645 |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
646 |
|
647 |
if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0 |
648 |
if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0 |
649 |
|
650 |
# Add ParMETIS to environment env_mpi if it was found |
651 |
if env_mpi['useparmetis']: |
652 |
env_mpi = conf.Finish() |
653 |
env_mpi.Append(CPPDEFINES = ['USE_PARMETIS']) |
654 |
else: |
655 |
conf.Finish() |
656 |
|
657 |
env['useparmetis'] = env_mpi['useparmetis'] |
658 |
|
659 |
############ Now we switch on Warnings as errors ############### |
660 |
|
661 |
#this needs to be done after configuration because the scons test files have warnings in them |
662 |
|
663 |
if ((fatalwarning != "") and (env['usewarnings'])): |
664 |
env.Append(CCFLAGS = fatalwarning) |
665 |
env_mpi.Append(CCFLAGS = fatalwarning) |
666 |
|
667 |
############ Summarize our environment ######################### |
668 |
|
669 |
print "" |
670 |
print "Summary of configuration (see ./config.log for information)" |
671 |
print " Using python libraries" |
672 |
print " Using numpy" |
673 |
print " Using boost" |
674 |
if env['usenetcdf']: print " Using NetCDF" |
675 |
else: print " Not using NetCDF" |
676 |
if env['usevtk']: print " Using VTK" |
677 |
else: print " Not using VTK" |
678 |
if env['usemkl']: print " Using MKL" |
679 |
else: print " Not using MKL" |
680 |
if env['useumfpack']: print " Using UMFPACK" |
681 |
else: print " Not using UMFPACK" |
682 |
if env['usesilo']: print " Using Silo" |
683 |
else: print " Not using Silo" |
684 |
if env['useopenmp']: print " Using OpenMP" |
685 |
else: print " Not using OpenMP" |
686 |
if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour'] |
687 |
else: print " Not using MPI" |
688 |
if env['useparmetis']: print " Using ParMETIS" |
689 |
else: print " Not using ParMETIS (requires MPI)" |
690 |
if env['usepapi']: print " Using PAPI" |
691 |
else: print " Not using PAPI" |
692 |
if env['usedebug']: print " Compiling for debug" |
693 |
else: print " Not compiling for debug" |
694 |
print " Installing in", prefix |
695 |
if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors" |
696 |
else: print " Not treating warnings as errors" |
697 |
print "" |
698 |
|
699 |
############ Delete option-dependent files ##################### |
700 |
|
701 |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug"))) |
702 |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi"))) |
703 |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp"))) |
704 |
Execute(Delete(os.path.join(env['libinstall'],"pyversion"))) |
705 |
Execute(Delete(os.path.join(env['libinstall'],"buildvars"))) |
706 |
if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI"))) |
707 |
|
708 |
|
709 |
############ Build the subdirectories ########################## |
710 |
|
711 |
from grouptest import * |
712 |
|
713 |
TestGroups=[] |
714 |
|
715 |
Export( |
716 |
["env", |
717 |
"env_mpi", |
718 |
"clone_env", |
719 |
"IS_WINDOWS_PLATFORM", |
720 |
"TestGroups" |
721 |
] |
722 |
) |
723 |
|
724 |
env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
725 |
env.SConscript(dirs = ['tools/libescriptreader/src'], build_dir='build/$PLATFORM/tools/libescriptreader', duplicate=0) |
726 |
env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) |
727 |
env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) |
728 |
env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) |
729 |
env.SConscript(dirs = ['finley/src'], build_dir='build/$PLATFORM/finley', duplicate=0) |
730 |
env.SConscript(dirs = ['modellib/py_src'], build_dir='build/$PLATFORM/modellib', duplicate=0) |
731 |
env.SConscript(dirs = ['doc'], build_dir='build/$PLATFORM/doc', duplicate=0) |
732 |
env.SConscript(dirs = ['pyvisi/py_src'], build_dir='build/$PLATFORM/pyvisi', duplicate=0) |
733 |
env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) |
734 |
env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
735 |
env.SConscript(dirs = ['scripts'], build_dir='build/$PLATFORM/scripts', duplicate=0) |
736 |
env.SConscript(dirs = ['paso/profiling'], build_dir='build/$PLATFORM/paso/profiling', duplicate=0) |
737 |
|
738 |
|
739 |
############ Remember what optimizations we used ############### |
740 |
|
741 |
remember_list = [] |
742 |
|
743 |
if env['usedebug']: |
744 |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET')) |
745 |
|
746 |
if env['usempi']: |
747 |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET')) |
748 |
|
749 |
if env['useopenmp']: |
750 |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET')) |
751 |
|
752 |
env.Alias('remember_options', remember_list) |
753 |
|
754 |
|
755 |
############### Record python interpreter version ############## |
756 |
|
757 |
if not IS_WINDOWS_PLATFORM: |
758 |
versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]) |
759 |
os.system("echo "+versionstring+" > "+os.path.join(env['libinstall'],"pyversion")) |
760 |
|
761 |
############## Populate the buildvars file ##################### |
762 |
|
763 |
buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w') |
764 |
buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n') |
765 |
|
766 |
# Find the boost version by extracting it from version.hpp |
767 |
boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp')) |
768 |
boostversion='unknown' |
769 |
try: |
770 |
for line in boosthpp: |
771 |
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
772 |
if ver: |
773 |
boostversion=ver.group(1) |
774 |
except StopIteration: |
775 |
pass |
776 |
buildvars.write("boost="+boostversion+"\n") |
777 |
buildvars.write("svn_revision="+str(global_revision)+"\n") |
778 |
out="usedebug=" |
779 |
if env['usedebug']: |
780 |
out+="y" |
781 |
else: |
782 |
out+="n" |
783 |
out+="\nusempi=" |
784 |
if env['usempi']: |
785 |
out+="y" |
786 |
else: |
787 |
out+="n" |
788 |
out+="\nuseopenmp=" |
789 |
if env['useopenmp']: |
790 |
out+="y" |
791 |
else: |
792 |
out+="n" |
793 |
buildvars.write(out+"\n") |
794 |
buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n') |
795 |
|
796 |
buildvars.close() |
797 |
|
798 |
|
799 |
############ Targets to build and install libraries ############ |
800 |
|
801 |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
802 |
env.Alias('target_init', [target_init]) |
803 |
|
804 |
# The headers have to be installed prior to build in order to satisfy #include <paso/Common.h> |
805 |
env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a']) |
806 |
env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a']) |
807 |
|
808 |
env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a']) |
809 |
env.Alias('install_paso', ['build_paso', 'target_install_paso_a']) |
810 |
|
811 |
env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so']) |
812 |
env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py']) |
813 |
|
814 |
env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so']) |
815 |
env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py']) |
816 |
|
817 |
# Now gather all the above into a couple easy targets: build_all and install_all |
818 |
build_all_list = [] |
819 |
build_all_list += ['build_esysUtils'] |
820 |
build_all_list += ['build_paso'] |
821 |
build_all_list += ['build_escript'] |
822 |
build_all_list += ['build_finley'] |
823 |
if env['usempi']: build_all_list += ['target_pythonMPI_exe'] |
824 |
#if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper'] |
825 |
if env['usesilo']: build_all_list += ['target_escript2silo'] |
826 |
env.Alias('build_all', build_all_list) |
827 |
|
828 |
install_all_list = [] |
829 |
install_all_list += ['target_init'] |
830 |
install_all_list += ['install_esysUtils'] |
831 |
install_all_list += ['install_paso'] |
832 |
install_all_list += ['install_escript'] |
833 |
install_all_list += ['install_finley'] |
834 |
install_all_list += ['target_install_pyvisi_py'] |
835 |
install_all_list += ['target_install_modellib_py'] |
836 |
install_all_list += ['target_install_pycad_py'] |
837 |
if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] |
838 |
#if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper'] |
839 |
if env['usesilo']: install_all_list += ['target_install_escript2silo'] |
840 |
install_all_list += ['remember_options'] |
841 |
env.Alias('install_all', install_all_list) |
842 |
|
843 |
# Default target is install |
844 |
env.Default('install_all') |
845 |
|
846 |
############ Targets to build and run the test suite ########### |
847 |
|
848 |
env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a']) |
849 |
env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) |
850 |
env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) |
851 |
env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) |
852 |
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
853 |
|
854 |
############ Targets to build the documentation ################ |
855 |
|
856 |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf']) |
857 |
|
858 |
if not IS_WINDOWS_PLATFORM: |
859 |
try: |
860 |
utest=open("utest.sh","w") |
861 |
build_platform=os.name #Sometimes Mac python says it is posix |
862 |
if (build_platform=='posix') and platform.system()=="Darwin": |
863 |
build_platform='darwin' |
864 |
utest.write(GroupTest.makeHeader(build_platform)) |
865 |
for tests in TestGroups: |
866 |
utest.write(tests.makeString()) |
867 |
utest.close() |
868 |
os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH) |
869 |
print "utest.sh written" |
870 |
except IOError: |
871 |
print "Error attempting to write unittests file." |
872 |
sys.exit(1) |
873 |
|