15 |
EnsureSConsVersion(0,96,91) |
EnsureSConsVersion(0,96,91) |
16 |
EnsurePythonVersion(2,3) |
EnsurePythonVersion(2,3) |
17 |
|
|
18 |
import sys, os, re, socket |
import sys, os, re, socket, platform, stat |
19 |
|
|
20 |
# Add our extensions |
# Add our extensions |
21 |
if os.path.isdir('scons'): sys.path.append('scons') |
if os.path.isdir('scons'): sys.path.append('scons') |
33 |
|
|
34 |
prefix = ARGUMENTS.get('prefix', Dir('#.').abspath) |
prefix = ARGUMENTS.get('prefix', Dir('#.').abspath) |
35 |
|
|
36 |
# Read configuration options from file scons/<hostname>_options.py |
#Determine where to read options from use: |
37 |
hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) |
#1. command line |
38 |
tmp = os.path.join("scons",hostname+"_options.py") |
#2. scons/<hostname>_options.py |
39 |
options_file = ARGUMENTS.get('options_file', tmp) |
#3. name as part of a cluster |
40 |
|
options_file=ARGUMENTS.get('options_file', None) |
41 |
|
if not options_file: |
42 |
|
hostname = re.sub("[^0-9a-zA-Z]", "_", socket.gethostname().split('.')[0]) |
43 |
|
options_file = os.path.join("scons",hostname+"_options.py") |
44 |
|
#If there is no options file with that name see if there is a substitute |
45 |
|
if not os.path.isfile(options_file): |
46 |
|
tmp = scons_extensions.effectiveName(hostname) |
47 |
|
options_file = os.path.join("scons",tmp+"_options.py") |
48 |
|
|
49 |
if not os.path.isfile(options_file): |
if not os.path.isfile(options_file): |
50 |
|
print "Options file not found (expected '%s')" % options_file |
51 |
options_file = False |
options_file = False |
|
print "Options file not found (expected '%s')" % tmp |
|
52 |
else: |
else: |
53 |
print "Options file is", options_file |
print "Options file is", options_file |
54 |
|
|
81 |
('ld_extra', 'Extra linker flags', ''), |
('ld_extra', 'Extra linker flags', ''), |
82 |
('sys_libs', 'System libraries to link with', []), |
('sys_libs', 'System libraries to link with', []), |
83 |
('ar_flags', 'Static library archiver flags to use', ''), |
('ar_flags', 'Static library archiver flags to use', ''), |
84 |
BoolOption('useopenmp', 'Compile parallel version using OpenMP', 'yes'), |
BoolOption('useopenmp', 'Compile parallel version using OpenMP', 'no'), |
85 |
BoolOption('usepedantic', 'Compile with -pedantic if using gcc', 'no'), |
BoolOption('usepedantic', 'Compile with -pedantic if using gcc', 'no'), |
86 |
BoolOption('usewarnings','Compile with warnings as errors if using gcc','yes'), |
BoolOption('usewarnings','Compile with warnings as errors if using gcc','yes'), |
87 |
|
('forcelazy','for testing use only - set the default value for autolazy','leave_alone'), |
88 |
# Python |
# Python |
89 |
('python_path', 'Path to Python includes', '/usr/include/'+python_version), |
('python_path', 'Path to Python includes', '/usr/include/'+python_version), |
90 |
('python_lib_path', 'Path to Python libs', usr_lib), |
('python_lib_path', 'Path to Python libs', usr_lib), |
107 |
('mpi_run', 'mpirun name' , 'mpiexec -np 1'), |
('mpi_run', 'mpirun name' , 'mpiexec -np 1'), |
108 |
('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib), |
('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib), |
109 |
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', ['mpich' , 'pthread', 'rt']), |
('mpi_libs', 'MPI libraries to link with (needs to be shared!)', ['mpich' , 'pthread', 'rt']), |
110 |
|
('mpi_flavour','Type of MPI execution environment','none'), |
111 |
# ParMETIS |
# ParMETIS |
112 |
BoolOption('useparmetis', 'Compile parallel version using ParMETIS', 'yes'), |
BoolOption('useparmetis', 'Compile parallel version using ParMETIS', 'yes'), |
113 |
('parmetis_path', 'Path to ParMETIS includes', '/usr/include'), |
('parmetis_path', 'Path to ParMETIS includes', '/usr/include'), |
130 |
('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'), |
('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'), |
131 |
('umf_lib_path', 'Path to UMFPACK libs', usr_lib), |
('umf_lib_path', 'Path to UMFPACK libs', usr_lib), |
132 |
('umf_libs', 'UMFPACK libraries to link with', ['umfpack']), |
('umf_libs', 'UMFPACK libraries to link with', ['umfpack']), |
133 |
|
# Silo |
134 |
|
BoolOption('usesilo', 'switch on/off the usage of Silo', 'yes'), |
135 |
|
('silo_path', 'Path to Silo includes', '/usr/include'), |
136 |
|
('silo_lib_path', 'Path to Silo libs', usr_lib), |
137 |
|
('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']), |
138 |
# AMD (used by UMFPACK) |
# AMD (used by UMFPACK) |
139 |
('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'), |
('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'), |
140 |
('amd_lib_path', 'Path to AMD libs', usr_lib), |
('amd_lib_path', 'Path to AMD libs', usr_lib), |
180 |
# Default compiler options (override allowed in hostname_options.py, but should not be necessary) |
# Default compiler options (override allowed in hostname_options.py, but should not be necessary) |
181 |
# For both C and C++ you get: cc_flags and either the optim flags or debug flags |
# For both C and C++ you get: cc_flags and either the optim flags or debug flags |
182 |
|
|
183 |
|
sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action. |
184 |
|
|
185 |
if env["CC"] == "icc": |
if env["CC"] == "icc": |
186 |
# Intel compilers |
# Intel compilers |
187 |
cc_flags = "-fPIC -ansi -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
cc_flags = "-fPIC -ansi -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1" |
192 |
omp_libs = ['guide', 'pthread'] |
omp_libs = ['guide', 'pthread'] |
193 |
pedantic = "" |
pedantic = "" |
194 |
fatalwarning = "" # Switch to turn warnings into errors |
fatalwarning = "" # Switch to turn warnings into errors |
195 |
|
sysheaderopt = "" |
196 |
elif env["CC"] == "gcc": |
elif env["CC"] == "gcc": |
197 |
# GNU C on any system |
# GNU C on any system |
198 |
cc_flags = "-pedantic -Wall -fPIC -ansi -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -isystem " + env['boost_path'] + "/boost -isystem " + env['python_path'] + " -Wno-sign-compare -Wno-system-headers -Wno-strict-aliasing" |
cc_flags = "-pedantic -Wall -fPIC -ansi -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing" |
199 |
#the strict aliasing warning is triggered by some type punning in the boost headers for version 1.34 |
#the long long warning occurs on the Mac |
|
#isystem does not seem to prevent this |
|
200 |
cc_optim = "-O3" |
cc_optim = "-O3" |
201 |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK" |
202 |
omp_optim = "" |
omp_optim = "-fopenmp" |
203 |
omp_debug = "" |
omp_debug = "-fopenmp" |
204 |
omp_libs = [] |
omp_libs = ['gomp'] |
205 |
pedantic = "-pedantic-errors -Wno-long-long" |
pedantic = "-pedantic-errors -Wno-long-long" |
206 |
fatalwarning = "-Werror" |
fatalwarning = "-Werror" |
207 |
|
sysheaderopt = "-isystem " |
208 |
elif env["CC"] == "cl": |
elif env["CC"] == "cl": |
209 |
# Microsoft Visual C on Windows |
# Microsoft Visual C on Windows |
210 |
cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" |
cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF" |
215 |
omp_libs = [] |
omp_libs = [] |
216 |
pedantic = "" |
pedantic = "" |
217 |
fatalwarning = "" |
fatalwarning = "" |
218 |
|
sysheaderopt = "" |
219 |
elif env["CC"] == "icl": |
elif env["CC"] == "icl": |
220 |
# intel C on Windows, see windows_intelc_options.py for a start |
# intel C on Windows, see windows_intelc_options.py for a start |
221 |
pedantic = "" |
pedantic = "" |
222 |
fatalwarning = "" |
fatalwarning = "" |
223 |
|
sysheaderopt = "" |
224 |
|
|
225 |
|
|
226 |
# If not specified in hostname_options.py then set them here |
# If not specified in hostname_options.py then set them here |
227 |
if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags |
if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags |
231 |
if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug |
if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug |
232 |
if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs |
if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs |
233 |
|
|
234 |
|
#set up the autolazy values |
235 |
|
if env['forcelazy'] != "leave_alone": |
236 |
|
if env['forcelazy'] == 'on': |
237 |
|
env.Append(CPPDEFINES='FAUTOLAZYON') |
238 |
|
else: |
239 |
|
if env['forcelazy'] == 'off': |
240 |
|
env.Append(CPPDEFINES='FAUTOLAZYOFF') |
241 |
|
|
242 |
# OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty |
# OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty |
243 |
if not env["useopenmp"]: |
if not env["useopenmp"]: |
244 |
env['omp_optim'] = "" |
env['omp_optim'] = "" |
247 |
|
|
248 |
if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0 |
if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0 |
249 |
|
|
250 |
|
# Windows doesn't use LD_LIBRARY_PATH but PATH instead |
251 |
|
if IS_WINDOWS_PLATFORM: |
252 |
|
LD_LIBRARY_PATH_KEY='PATH' |
253 |
|
env['ENV']['LD_LIBRARY_PATH']='' |
254 |
|
else: |
255 |
|
LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH' |
256 |
############ Copy environment variables into scons env ######### |
############ Copy environment variables into scons env ######### |
257 |
|
|
258 |
try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS'] |
try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS'] |
259 |
except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1 |
except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1 |
260 |
|
|
261 |
|
try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS'] |
262 |
|
except KeyError: pass |
263 |
|
|
264 |
|
try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS'] |
265 |
|
except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1 |
266 |
|
|
267 |
|
try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES'] |
268 |
|
except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1 |
269 |
|
|
270 |
|
try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE'] |
271 |
|
except KeyError: pass |
272 |
|
|
273 |
try: env['ENV']['PATH'] = os.environ['PATH'] |
try: env['ENV']['PATH'] = os.environ['PATH'] |
274 |
except KeyError: pass |
except KeyError: pass |
275 |
|
|
282 |
try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH'] |
try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH'] |
283 |
except KeyError: pass |
except KeyError: pass |
284 |
|
|
285 |
try: env['ENV']['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] |
try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH']) |
286 |
except KeyError: pass |
except KeyError: pass |
287 |
|
|
288 |
try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH'] |
try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH'] |
298 |
except KeyError: pass |
except KeyError: pass |
299 |
|
|
300 |
# Configure for test suite |
# Configure for test suite |
|
env.PrependENVPath('PYTHONPATH', prefix) |
|
|
env.PrependENVPath('LD_LIBRARY_PATH', env['libinstall']) |
|
301 |
|
|
302 |
|
|
303 |
|
env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
304 |
|
env.PrependENVPath('PYTHONPATH', prefix) |
305 |
env['ENV']['ESCRIPT_ROOT'] = prefix |
env['ENV']['ESCRIPT_ROOT'] = prefix |
306 |
|
|
307 |
############ Set up paths for Configure() ###################### |
############ Set up paths for Configure() ###################### |
325 |
|
|
326 |
# MS Windows |
# MS Windows |
327 |
if IS_WINDOWS_PLATFORM: |
if IS_WINDOWS_PLATFORM: |
328 |
env.PrependENVPath('PATH', [env['boost_lib_path']]) |
env.AppendENVPath('PATH', [env['boost_lib_path']]) |
329 |
env.PrependENVPath('PATH', [env['libinstall']]) |
env.AppendENVPath('PATH', [env['libinstall']]) |
330 |
if not env['share_esysUtils'] : |
if not env['share_esysUtils'] : |
331 |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB']) |
332 |
if not env['share_paso'] : |
if not env['share_paso'] : |
333 |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
env.Append(CPPDEFINES = ['PASO_STATIC_LIB']) |
334 |
|
|
335 |
if env['usenetcdf']: |
if env['usenetcdf']: |
336 |
env.PrependENVPath('PATH', [env['netCDF_lib_path']]) |
env.AppendENVPath('PATH', [env['netCDF_lib_path']]) |
337 |
|
|
338 |
env.Append(ARFLAGS = env['ar_flags']) |
env.Append(ARFLAGS = env['ar_flags']) |
339 |
|
|
362 |
|
|
363 |
# Test that the compiler is working |
# Test that the compiler is working |
364 |
if not conf.CheckFunc('printf'): |
if not conf.CheckFunc('printf'): |
365 |
print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC']) |
print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC']) |
366 |
sys.exit(1) |
sys.exit(1) |
367 |
|
|
368 |
if conf.CheckFunc('gethostname'): |
if conf.CheckFunc('gethostname'): |
369 |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME']) |
370 |
|
|
371 |
############ python libraries (required) ####################### |
############ python libraries (required) ####################### |
372 |
|
|
373 |
conf.env.AppendUnique(CPPPATH = [env['python_path']]) |
|
374 |
|
if not sysheaderopt =="": |
375 |
|
conf.env.Append(CCFLAGS=sysheaderopt+env['python_path']) |
376 |
|
else: |
377 |
|
conf.env.AppendUnique(CPPPATH = [env['python_path']]) |
378 |
|
|
379 |
conf.env.AppendUnique(LIBPATH = [env['python_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['python_lib_path']]) |
380 |
conf.env.AppendUnique(LIBS = [env['python_libs']]) |
conf.env.AppendUnique(LIBS = [env['python_libs']]) |
381 |
|
|
382 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['python_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath('PYTHONPATH', prefix) |
383 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs |
384 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
385 |
|
|
386 |
if not conf.CheckCHeader('Python.h'): |
if not conf.CheckCHeader('Python.h'): |
387 |
print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path']) |
print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path']) |
388 |
sys.exit(1) |
sys.exit(1) |
389 |
if not conf.CheckFunc('Py_Main'): |
if not conf.CheckFunc('Py_Exit'): |
390 |
print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path']) |
print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path']) |
391 |
sys.exit(1) |
sys.exit(1) |
392 |
|
|
393 |
############ boost (required) ################################## |
############ boost (required) ################################## |
394 |
|
|
395 |
conf.env.AppendUnique(CPPPATH = [env['boost_path']]) |
if not sysheaderopt =="": |
396 |
|
# This is required because we can't -isystem /usr/system because it breaks std includes |
397 |
|
if os.path.normpath(env['boost_path']) =="/usr/include": |
398 |
|
conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost')) |
399 |
|
else: |
400 |
|
conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path']) |
401 |
|
else: |
402 |
|
conf.env.AppendUnique(CPPPATH = [env['boost_path']]) |
403 |
|
|
404 |
conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']]) |
405 |
conf.env.AppendUnique(LIBS = [env['boost_libs']]) |
conf.env.AppendUnique(LIBS = [env['boost_libs']]) |
406 |
|
|
407 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['boost_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs |
408 |
|
#ensure that our path entries remain at the front |
409 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
410 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
411 |
|
|
412 |
if not conf.CheckCXXHeader('boost/python.hpp'): |
if not conf.CheckCXXHeader('boost/python.hpp'): |
413 |
print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path']) |
print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path']) |
414 |
sys.exit(1) |
sys.exit(1) |
415 |
|
|
416 |
if not conf.CheckFunc('PyObject_SetAttr'): |
if not conf.CheckFunc('PyObject_SetAttr'): |
417 |
print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path']) |
print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path']) |
418 |
sys.exit(1) |
sys.exit(1) |
441 |
conf.env.AppendUnique(CPPPATH = [env['netCDF_path']]) |
conf.env.AppendUnique(CPPPATH = [env['netCDF_path']]) |
442 |
conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']]) |
443 |
conf.env.AppendUnique(LIBS = [env['netCDF_libs']]) |
conf.env.AppendUnique(LIBS = [env['netCDF_libs']]) |
444 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['netCDF_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs |
445 |
|
#ensure that our path entries remain at the front |
446 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
447 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
448 |
|
|
449 |
if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0 |
if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0 |
450 |
if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0 |
if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0 |
465 |
conf.env.AppendUnique(CPPPATH = [env['papi_path']]) |
conf.env.AppendUnique(CPPPATH = [env['papi_path']]) |
466 |
conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']]) |
467 |
conf.env.AppendUnique(LIBS = [env['papi_libs']]) |
conf.env.AppendUnique(LIBS = [env['papi_libs']]) |
468 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['papi_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs |
469 |
|
#ensure that our path entries remain at the front |
470 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
471 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
472 |
|
|
473 |
if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0 |
if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0 |
474 |
if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0 |
if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0 |
489 |
conf.env.AppendUnique(CPPPATH = [env['mkl_path']]) |
conf.env.AppendUnique(CPPPATH = [env['mkl_path']]) |
490 |
conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']]) |
491 |
conf.env.AppendUnique(LIBS = [env['mkl_libs']]) |
conf.env.AppendUnique(LIBS = [env['mkl_libs']]) |
492 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mkl_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs |
493 |
|
#ensure that our path entries remain at the front |
494 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
495 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
496 |
|
|
497 |
if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0 |
if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0 |
498 |
if env['usemkl'] and not conf.CheckFunc('pardiso_'): env['usemkl'] = 0 |
if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0 |
499 |
|
|
500 |
# Add MKL to environment env if it was found |
# Add MKL to environment env if it was found |
501 |
if env['usemkl']: |
if env['usemkl']: |
520 |
conf.env.AppendUnique(CPPPATH = [env['blas_path']]) |
conf.env.AppendUnique(CPPPATH = [env['blas_path']]) |
521 |
conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']]) |
522 |
conf.env.AppendUnique(LIBS = [env['blas_libs']]) |
conf.env.AppendUnique(LIBS = [env['blas_libs']]) |
523 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['umf_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs |
524 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['amd_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs |
525 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['blas_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs |
526 |
|
#ensure that our path entries remain at the front |
527 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
528 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
529 |
|
|
|
if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0 |
|
530 |
if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0 |
if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0 |
531 |
|
if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0 |
532 |
|
# if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73? |
533 |
|
|
534 |
# Add UMFPACK to environment env if it was found |
# Add UMFPACK to environment env if it was found |
535 |
if env['useumfpack']: |
if env['useumfpack']: |
538 |
else: |
else: |
539 |
conf.Finish() |
conf.Finish() |
540 |
|
|
541 |
|
############ Silo (optional) ################################### |
542 |
|
|
543 |
|
if env['usesilo']: |
544 |
|
conf = Configure(clone_env(env)) |
545 |
|
conf.env.AppendUnique(CPPPATH = [env['silo_path']]) |
546 |
|
conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
547 |
|
conf.env.AppendUnique(LIBS = [env['silo_libs']]) |
548 |
|
if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0 |
549 |
|
if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0 |
550 |
|
conf.Finish() |
551 |
|
|
552 |
|
# Add the path to Silo to environment env if it was found. |
553 |
|
# Note that we do not add the libs since they are only needed for the |
554 |
|
# escriptreader library and tools. |
555 |
|
if env['usesilo']: |
556 |
|
env.AppendUnique(CPPPATH = [env['silo_path']]) |
557 |
|
env.AppendUnique(LIBPATH = [env['silo_lib_path']]) |
558 |
|
env.Append(CPPDEFINES = ['HAVE_SILO']) |
559 |
|
|
560 |
############ Add the compiler flags ############################ |
############ Add the compiler flags ############################ |
561 |
|
|
562 |
# Enable debug by choosing either cc_debug or cc_optim |
# Enable debug by choosing either cc_debug or cc_optim |
571 |
env.Append(CCFLAGS = env['cc_flags']) |
env.Append(CCFLAGS = env['cc_flags']) |
572 |
env.Append(LIBS = [env['omp_libs']]) |
env.Append(LIBS = [env['omp_libs']]) |
573 |
|
|
574 |
|
############ Add some custom builders ########################## |
575 |
|
|
576 |
|
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
577 |
|
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
578 |
|
|
579 |
|
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
580 |
|
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
581 |
|
|
582 |
|
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
583 |
|
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
584 |
|
|
585 |
|
epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True) |
586 |
|
env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder}); |
587 |
|
|
588 |
############ MPI (optional) #################################### |
############ MPI (optional) #################################### |
589 |
|
if not env['usempi']: env['mpi_flavour']='none' |
590 |
|
|
591 |
# Create a modified environment for MPI programs (identical to env if usempi=no) |
# Create a modified environment for MPI programs (identical to env if usempi=no) |
592 |
env_mpi = clone_env(env) |
env_mpi = clone_env(env) |
595 |
conf = Configure(clone_env(env_mpi)) |
conf = Configure(clone_env(env_mpi)) |
596 |
|
|
597 |
if env_mpi['usempi']: |
if env_mpi['usempi']: |
598 |
|
VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ] |
599 |
|
if not env_mpi['mpi_flavour'] in VALID_MPIs: |
600 |
|
raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs) |
601 |
conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']]) |
conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']]) |
602 |
conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']]) |
603 |
conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']]) |
conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']]) |
604 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['mpi_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs |
605 |
|
#ensure that our path entries remain at the front |
606 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
607 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
608 |
|
|
609 |
if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0 |
if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0 |
610 |
if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0 |
# if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0 |
611 |
|
|
612 |
# Add MPI to environment env_mpi if it was found |
# Add MPI to environment env_mpi if it was found |
613 |
if env_mpi['usempi']: |
if env_mpi['usempi']: |
618 |
|
|
619 |
env['usempi'] = env_mpi['usempi'] |
env['usempi'] = env_mpi['usempi'] |
620 |
|
|
621 |
|
|
622 |
############ ParMETIS (optional) ############################### |
############ ParMETIS (optional) ############################### |
623 |
|
|
624 |
# Start a new configure environment that reflects what we've already found |
# Start a new configure environment that reflects what we've already found |
630 |
conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']]) |
conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']]) |
631 |
conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']]) |
conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']]) |
632 |
conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']]) |
conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']]) |
633 |
conf.env.PrependENVPath('LD_LIBRARY_PATH', env['parmetis_lib_path']) # The wrapper script needs to find these libs |
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs |
634 |
|
#ensure that our path entries remain at the front |
635 |
|
conf.env.PrependENVPath('PYTHONPATH', prefix) |
636 |
|
conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall']) |
637 |
|
|
638 |
if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0 |
if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0 |
639 |
if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0 |
if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0 |
670 |
else: print " Not using MKL" |
else: print " Not using MKL" |
671 |
if env['useumfpack']: print " Using UMFPACK" |
if env['useumfpack']: print " Using UMFPACK" |
672 |
else: print " Not using UMFPACK" |
else: print " Not using UMFPACK" |
673 |
|
if env['usesilo']: print " Using Silo" |
674 |
|
else: print " Not using Silo" |
675 |
if env['useopenmp']: print " Using OpenMP" |
if env['useopenmp']: print " Using OpenMP" |
676 |
else: print " Not using OpenMP" |
else: print " Not using OpenMP" |
677 |
if env['usempi']: print " Using MPI" |
if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour'] |
678 |
else: print " Not using MPI" |
else: print " Not using MPI" |
679 |
if env['useparmetis']: print " Using ParMETIS" |
if env['useparmetis']: print " Using ParMETIS" |
680 |
else: print " Not using ParMETIS (requires MPI)" |
else: print " Not using ParMETIS (requires MPI)" |
689 |
|
|
690 |
############ Delete option-dependent files ##################### |
############ Delete option-dependent files ##################### |
691 |
|
|
692 |
Execute(Delete(env['libinstall'] + "/Compiled.with.debug")) |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug"))) |
693 |
Execute(Delete(env['libinstall'] + "/Compiled.with.mpi")) |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi"))) |
694 |
Execute(Delete(env['libinstall'] + "/Compiled.with.openmp")) |
Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp"))) |
695 |
if not env['usempi']: Execute(Delete(env['libinstall'] + "/pythonMPI")) |
Execute(Delete(os.path.join(env['libinstall'],"pyversion"))) |
696 |
|
Execute(Delete(os.path.join(env['libinstall'],"buildvars"))) |
697 |
|
if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI"))) |
698 |
|
|
|
############ Add some custom builders ########################## |
|
699 |
|
|
700 |
py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True) |
############ Build the subdirectories ########################## |
|
env.Append(BUILDERS = {'PyCompile' : py_builder}); |
|
701 |
|
|
702 |
runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True) |
from grouptest import * |
|
env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder}); |
|
703 |
|
|
704 |
runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True) |
TestGroups=[] |
|
env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder}); |
|
|
|
|
|
############ Build the subdirectories ########################## |
|
705 |
|
|
706 |
Export( |
Export( |
707 |
["env", |
["env", |
708 |
"env_mpi", |
"env_mpi", |
709 |
"clone_env", |
"clone_env", |
710 |
"IS_WINDOWS_PLATFORM" |
"IS_WINDOWS_PLATFORM", |
711 |
|
"TestGroups" |
712 |
] |
] |
713 |
) |
) |
714 |
|
|
715 |
env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
env.SConscript(dirs = ['tools/CppUnitTest/src'], build_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0) |
716 |
|
env.SConscript(dirs = ['tools/libescriptreader/src'], build_dir='build/$PLATFORM/tools/libescriptreader', duplicate=0) |
717 |
env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) |
env.SConscript(dirs = ['paso/src'], build_dir='build/$PLATFORM/paso', duplicate=0) |
718 |
env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) |
env.SConscript(dirs = ['escript/src'], build_dir='build/$PLATFORM/escript', duplicate=0) |
719 |
env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) |
env.SConscript(dirs = ['esysUtils/src'], build_dir='build/$PLATFORM/esysUtils', duplicate=0) |
724 |
env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) |
env.SConscript(dirs = ['pycad/py_src'], build_dir='build/$PLATFORM/pycad', duplicate=0) |
725 |
env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
env.SConscript(dirs = ['pythonMPI/src'], build_dir='build/$PLATFORM/pythonMPI', duplicate=0) |
726 |
env.SConscript(dirs = ['scripts'], build_dir='build/$PLATFORM/scripts', duplicate=0) |
env.SConscript(dirs = ['scripts'], build_dir='build/$PLATFORM/scripts', duplicate=0) |
727 |
|
env.SConscript(dirs = ['paso/profiling'], build_dir='build/$PLATFORM/paso/profiling', duplicate=0) |
728 |
|
|
729 |
|
|
730 |
############ Remember what optimizations we used ############### |
############ Remember what optimizations we used ############### |
731 |
|
|
732 |
remember_list = [] |
remember_list = [] |
733 |
|
|
734 |
if env['usedebug']: |
if env['usedebug']: |
735 |
remember_list += env.Command(env['libinstall'] + "/Compiled.with.debug", None, Touch('$TARGET')) |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET')) |
736 |
|
|
737 |
if env['usempi']: |
if env['usempi']: |
738 |
remember_list += env.Command(env['libinstall'] + "/Compiled.with.mpi", None, Touch('$TARGET')) |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET')) |
739 |
|
|
740 |
if env['omp_optim'] != '': |
if env['useopenmp']: |
741 |
remember_list += env.Command(env['libinstall'] + "/Compiled.with.openmp", None, Touch('$TARGET')) |
remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET')) |
742 |
|
|
743 |
env.Alias('remember_options', remember_list) |
env.Alias('remember_options', remember_list) |
744 |
|
|
745 |
|
|
746 |
|
############### Record python interpreter version ############## |
747 |
|
|
748 |
|
if not IS_WINDOWS_PLATFORM: |
749 |
|
versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]) |
750 |
|
os.system("echo "+versionstring+" > "+os.path.join(env['libinstall'],"pyversion")) |
751 |
|
|
752 |
|
############## Populate the buildvars file ##################### |
753 |
|
|
754 |
|
buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w') |
755 |
|
buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n') |
756 |
|
|
757 |
|
# Find the boost version by extracting it from version.hpp |
758 |
|
boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp')) |
759 |
|
boostversion='unknown' |
760 |
|
try: |
761 |
|
for line in boosthpp: |
762 |
|
ver=re.match(r'#define BOOST_VERSION (\d+)',line) |
763 |
|
if ver: |
764 |
|
boostversion=ver.group(1) |
765 |
|
except StopIteration: |
766 |
|
pass |
767 |
|
buildvars.write("boost="+boostversion+"\n") |
768 |
|
buildvars.write("svn_revision="+str(global_revision)+"\n") |
769 |
|
out="usedebug=" |
770 |
|
if env['usedebug']: |
771 |
|
out+="y" |
772 |
|
else: |
773 |
|
out+="n" |
774 |
|
out+="\nusempi=" |
775 |
|
if env['usempi']: |
776 |
|
out+="y" |
777 |
|
else: |
778 |
|
out+="n" |
779 |
|
out+="\nuseopenmp=" |
780 |
|
if env['useopenmp']: |
781 |
|
out+="y" |
782 |
|
else: |
783 |
|
out+="n" |
784 |
|
buildvars.write(out+"\n") |
785 |
|
buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n') |
786 |
|
|
787 |
|
buildvars.close() |
788 |
|
|
789 |
|
|
790 |
############ Targets to build and install libraries ############ |
############ Targets to build and install libraries ############ |
791 |
|
|
792 |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET')) |
812 |
build_all_list += ['build_escript'] |
build_all_list += ['build_escript'] |
813 |
build_all_list += ['build_finley'] |
build_all_list += ['build_finley'] |
814 |
if env['usempi']: build_all_list += ['target_pythonMPI_exe'] |
if env['usempi']: build_all_list += ['target_pythonMPI_exe'] |
815 |
if not IS_WINDOWS_PLATFORM: build_all_list += ['target_finley_wrapper'] |
#if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper'] |
816 |
|
if env['usesilo']: build_all_list += ['target_escript2silo'] |
817 |
env.Alias('build_all', build_all_list) |
env.Alias('build_all', build_all_list) |
818 |
|
|
819 |
install_all_list = [] |
install_all_list = [] |
826 |
install_all_list += ['target_install_modellib_py'] |
install_all_list += ['target_install_modellib_py'] |
827 |
install_all_list += ['target_install_pycad_py'] |
install_all_list += ['target_install_pycad_py'] |
828 |
if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] |
if env['usempi']: install_all_list += ['target_install_pythonMPI_exe'] |
829 |
if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_finley_wrapper'] |
#if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper'] |
830 |
|
if env['usesilo']: install_all_list += ['target_install_escript2silo'] |
831 |
install_all_list += ['remember_options'] |
install_all_list += ['remember_options'] |
832 |
env.Alias('install_all', install_all_list) |
env.Alias('install_all', install_all_list) |
833 |
|
|
840 |
env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) |
env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a']) |
841 |
env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) |
env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a']) |
842 |
env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) |
env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests']) |
843 |
|
env.Alias('build_full',['install_all','build_tests','build_py_tests']) |
844 |
|
|
845 |
############ Targets to build the documentation ################ |
############ Targets to build the documentation ################ |
846 |
|
|
847 |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html']) |
env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf']) |
848 |
|
|
849 |
|
if not IS_WINDOWS_PLATFORM: |
850 |
|
try: |
851 |
|
utest=open("utest.sh","w") |
852 |
|
build_platform=os.name #Sometimes Mac python says it is posix |
853 |
|
if (build_platform=='posix') and platform.system()=="Darwin": |
854 |
|
build_platform='darwin' |
855 |
|
utest.write(GroupTest.makeHeader(build_platform)) |
856 |
|
for tests in TestGroups: |
857 |
|
utest.write(tests.makeString()) |
858 |
|
utest.close() |
859 |
|
os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH) |
860 |
|
print "utest.sh written" |
861 |
|
except IOError: |
862 |
|
print "Error attempting to write unittests file." |
863 |
|
sys.exit(1) |
864 |
|
|