/[escript]/branches/domexper/SConstruct
ViewVC logotype

Contents of /branches/domexper/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3079 - (show annotations)
Tue Aug 3 04:04:51 2010 UTC (8 years, 9 months ago) by jfenwick
File size: 40493 byte(s)
Some experiments on finley

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2010 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14
15 EnsureSConsVersion(0,96,91)
16 EnsurePythonVersion(2,3)
17
18 import sys, os, re, socket, platform, stat
19 # For copy()
20 import shutil
21
22 # Add our extensions
23 if os.path.isdir('scons'): sys.path.append('scons')
24 import scons_extensions
25
26 # Use /usr/lib64 if available, else /usr/lib
27 usr_lib = '/usr/lib'
28 if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64'
29
30 # The string python2.4 or python2.5
31 python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
32
33 # MS Windows support, many thanks to PH
34 IS_WINDOWS_PLATFORM = (os.name== "nt")
35
36 prefix = ARGUMENTS.get('prefix', Dir('#.').abspath)
37
38 #Holds names of variables from the calling environment which need to be passed
39 #to tools
40 env_export=[]
41
42 #Determine where to read options from use:
43 #1. command line
44 #2. scons/<hostname>_options.py
45 #3. name as part of a cluster
46 options_file=ARGUMENTS.get('options_file', None)
47 effective_hostname=socket.gethostname().split('.')[0]
48 if not options_file:
49 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
50 options_file = os.path.join("scons",mangledhostname+"_options.py")
51 #If there is no options file with that name see if there is a substitute
52 if not os.path.isfile(options_file):
53 effective_hostname = scons_extensions.effectiveName(effective_hostname)
54 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
55 options_file = os.path.join("scons",mangledhostname+"_options.py")
56
57 if not os.path.isfile(options_file):
58 print "Options file not found (expected '%s')" % options_file
59 options_file = False
60 else:
61 print "Options file is", options_file
62
63 #Does our scons support the newer Variables class or do we need to use Options?
64
65 try:
66 dummyvar=Variables
67 opts = Variables(options_file, ARGUMENTS)
68 adder = opts.AddVariables
69 except:
70 opts = Options(options_file, ARGUMENTS)
71 adder = opts.AddOptions
72 BoolVariable = BoolOption
73
74 ############ Load build options ################################
75
76 adder(
77 #opts.AddOptions(
78 # Where to install esys stuff
79 ('prefix', 'where everything will be installed', Dir('#.').abspath),
80 ('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')),
81 ('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')),
82 ('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')),
83 ('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')),
84 # Compilation options
85 BoolVariable('dodebug', 'For backwards compatibility', 'no'),
86 BoolVariable('usedebug', 'Do you want a debug build?', 'no'),
87 BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'),
88 ('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file),
89 ('cc', 'path to C compiler', 'DEFAULT'),
90 ('cxx', 'path to C++ compiler', 'DEFAULT'),
91 ('win_cc_name', 'windows C compiler name if needed', 'msvc'),
92 # The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below
93 ('cc_flags', 'C/C++ compiler flags to use', '-DEFAULT_1'),
94 ('cc_optim', 'C/C++ optimization flags to use', '-DEFAULT_2'),
95 ('cc_debug', 'C/C++ debug flags to use', '-DEFAULT_3'),
96 ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'),
97 ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'),
98 ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'),
99 ('cc_extra', 'Extra C compiler flags', ''),
100 ('cxx_extra', 'Extra C++ compiler flags', ''),
101 ('ld_extra', 'Extra linker flags', ''),
102 ('sys_libs', 'System libraries to link with', []),
103 ('ar_flags', 'Static library archiver flags to use', ''),
104 BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'),
105 BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'),
106 BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'),
107 ('forcelazy','for testing use only - set the default value for autolazy','leave_alone'),
108 ('forcecollres','for testing use only - set the default value for force resolving collective ops','leave_alone'),
109 # Python
110 ('python_path', 'Path to Python includes', '/usr/include/'+python_version),
111 ('python_lib_path', 'Path to Python libs', usr_lib),
112 ('python_libs', 'Python libraries to link with', [python_version]),
113 ('python_cmd', 'Python command', 'python'),
114 # Boost
115 ('boost_path', 'Path to Boost includes', '/usr/include'),
116 ('boost_lib_path', 'Path to Boost libs', usr_lib),
117 ('boost_libs', 'Boost libraries to link with', ['boost_python']),
118 # NetCDF
119 BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'),
120 ('netCDF_path', 'Path to netCDF includes', '/usr/include'),
121 ('netCDF_lib_path', 'Path to netCDF libs', usr_lib),
122 ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']),
123 # MPI
124 BoolVariable('useMPI', 'For backwards compatibility', 'no'),
125 BoolVariable('usempi', 'Compile parallel version using MPI', 'no'),
126 ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),
127 ('mpi_path', 'Path to MPI includes', '/usr/include'),
128 ('mpi_run', 'mpirun name' , 'mpiexec -np 1'),
129 ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib),
130 ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', []),
131 ('mpi_flavour','Type of MPI execution environment','none'),
132 # ParMETIS
133 BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'),
134 ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'),
135 ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib),
136 ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']),
137 # PAPI
138 BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'),
139 ('papi_path', 'Path to PAPI includes', '/usr/include'),
140 ('papi_lib_path', 'Path to PAPI libs', usr_lib),
141 ('papi_libs', 'PAPI libraries to link with', ['papi']),
142 BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False),
143 # MKL
144 BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'),
145 ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'),
146 ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'),
147 ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']),
148 # UMFPACK
149 BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'),
150 ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'),
151 ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'),
152 ('umf_lib_path', 'Path to UMFPACK libs', usr_lib),
153 ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']),
154 # Silo
155 BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'),
156 ('silo_path', 'Path to Silo includes', '/usr/include'),
157 ('silo_lib_path', 'Path to Silo libs', usr_lib),
158 ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
159 # AMD (used by UMFPACK)
160 ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'),
161 ('amd_lib_path', 'Path to AMD libs', usr_lib),
162 ('amd_libs', 'AMD libraries to link with', ['amd']),
163 # BLAS (used by UMFPACK)
164 ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'),
165 ('blas_lib_path', 'Path to BLAS libs', usr_lib),
166 ('blas_libs', 'BLAS libraries to link with', ['blas']),
167 #Lapack options
168 BoolVariable('uselapack','switch on/off use of Lapack','no'),
169 ('lapack_path', 'Path to Lapack includes','/usr/include'),
170 ('lapack_lib_path', 'Path to Lapack libs', usr_lib),
171 ('lapack_libs', 'Lapack libraries to link with', []),
172 ('lapack_type', '{clapack,mkl}','clapack'),
173 # An option for specifying the compiler tools set (see windows branch).
174 ('tools_names', 'allow control over the tools in the env setup', ['default']),
175 # finer control over library building, intel aggressive global optimisation
176 # works with dynamic libraries on windows.
177 ('share_esysUtils', 'control static or dynamic esysUtils lib', False),
178 ('share_paso', 'control static or dynamic paso lib', False),
179 ('env_export','Environment variables to be passed to children',[]),
180 #To enable passing function pointers through python
181 BoolVariable('iknowwhatimdoing','allow nonstandard C',False)
182 )
183
184
185 ###################
186
187 # This is only to support old versions of scons which don't accept
188 # the variant_dir parameter (older than 0.98 I think).
189 # Once these are no longer an issue we can go back to a direct call
190 # to obj.SConscript
191 import SCons
192 vs=SCons.__version__.split('.')
193 cantusevariantdir=float(vs[0]+'.'+vs[1])<0.98
194
195
196 def CallSConscript(obj, **kw):
197 if cantusevariantdir:
198 if 'variant_dir' in kw:
199 kw['build_dir']=kw['variant_dir']
200 del kw['variant_dir']
201 obj.SConscript(**kw)
202
203
204 ############ Specify which compilers to use ####################
205
206 # intelc uses regular expressions improperly and emits a warning about
207 # failing to find the compilers. This warning can be safely ignored.
208
209 if IS_WINDOWS_PLATFORM:
210 env = Environment(options = opts)
211 env = Environment(tools = ['default'] + env['tools_names'],
212 options = opts)
213 else:
214 if os.uname()[4]=='ia64':
215 env = Environment(tools = ['default', 'intelc'], options = opts)
216 if env['CXX'] == 'icpc':
217 env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not)
218 else:
219 env = Environment(tools = ['default'], options = opts)
220 if env['tools_names']!='default':
221 env=Environment(tools = ['default'] +env['tools_names'], options=opts)
222
223 # Override compiler choice if provided
224 if env['cc'] != 'DEFAULT': env['CC']=env['cc']
225 if env['cxx'] != 'DEFAULT': env['CXX']=env['cxx']
226
227 Help(opts.GenerateHelpText(env))
228
229 ############ Make sure target directories exist ################
230
231 if not os.path.isdir(env['bininstall']):
232 os.makedirs(env['bininstall'])
233 if not os.path.isdir(env['libinstall']):
234 os.makedirs(env['libinstall'])
235 if not os.path.isdir(env['pyinstall']):
236 os.makedirs(env['pyinstall'])
237
238 ########## Copy required environment vars ######################
239
240 for i in env['env_export']:
241 env.Append(ENV = {i:os.environ[i]})
242
243 ############ Fill in compiler options if not set above #########
244
245 # Backwards compatibility: allow dodebug=yes and useMPI=yes
246 if env['dodebug']: env['usedebug'] = 1
247 if env['useMPI']: env['usempi'] = 1
248
249 # Default compiler options (override allowed in hostname_options.py, but should not be necessary)
250 # For both C and C++ you get: cc_flags and either the optim flags or debug flags
251
252 sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action.
253
254 cc_flags = ""
255 cc_optim = ""
256 cc_debug = ""
257 omp_optim = ""
258 omp_debug = ""
259 omp_libs = []
260
261 if env["CC"] == "icc":
262 # Intel compilers
263 cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"
264 cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias"
265 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
266 omp_optim = "-openmp -openmp_report0"
267 omp_debug = "-openmp -openmp_report0"
268 omp_libs = ['guide', 'pthread']
269 pedantic = ""
270 fatalwarning = "" # Switch to turn warnings into errors
271 sysheaderopt = ""
272 elif env["CC"][:3] == "gcc":
273 # GNU C on any system
274 cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing"
275 #the long long warning occurs on the Mac
276 cc_optim = "-O3"
277 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
278 omp_optim = "-fopenmp"
279 omp_debug = "-fopenmp"
280 omp_libs = []
281 pedantic = "-pedantic-errors -Wno-long-long"
282 fatalwarning = "-Werror"
283 sysheaderopt = "-isystem "
284 elif env["CC"] == "cl":
285 # Microsoft Visual C on Windows
286 cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF"
287 cc_optim = "/O2 /Op /MT /W3"
288 cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK"
289 omp_optim = ""
290 omp_debug = ""
291 omp_libs = []
292 pedantic = ""
293 fatalwarning = ""
294 sysheaderopt = ""
295 elif env["CC"] == "icl":
296 # intel C on Windows, see windows_intelc_options.py for a start
297 pedantic = ""
298 fatalwarning = ""
299 sysheaderopt = ""
300
301
302 # If not specified in hostname_options.py then set them here
303 if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags
304 if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim
305 if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug
306 if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim
307 if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug
308 if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs
309
310 #set up the autolazy values
311 if env['forcelazy'] != "leave_alone":
312 if env['forcelazy'] == 'on':
313 env.Append(CPPDEFINES=['FAUTOLAZYON'])
314 else:
315 if env['forcelazy'] == 'off':
316 env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
317
318 #set up the colective resolve values
319 if env['forcecollres'] != "leave_alone":
320 print env['forcecollres']
321 if env['forcecollres'] == 'on':
322 env.Append(CPPDEFINES=['FRESCOLLECTON'])
323 else:
324 if env['forcecollres'] == 'off':
325 env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
326
327
328 if env['iknowwhatimdoing']:
329 env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
330
331 # OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty
332 if not env["useopenmp"]:
333 env['omp_optim'] = ""
334 env['omp_debug'] = ""
335 env['omp_libs'] = []
336
337 if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0
338
339 # Windows doesn't use LD_LIBRARY_PATH but PATH instead
340 if IS_WINDOWS_PLATFORM:
341 LD_LIBRARY_PATH_KEY='PATH'
342 env['ENV']['LD_LIBRARY_PATH']=''
343 else:
344 LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
345 ############ Copy environment variables into scons env #########
346
347 try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS']
348 except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1
349
350 try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS']
351 except KeyError: pass
352
353 try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS']
354 except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1
355
356 try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES']
357 except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1
358
359 try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE']
360 except KeyError: pass
361
362 try: env['ENV']['PATH'] = os.environ['PATH']
363 except KeyError: pass
364
365 try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
366 except KeyError: pass
367
368 try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH']
369 except KeyError: pass
370
371 try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH']
372 except KeyError: pass
373
374 try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH'])
375 except KeyError: pass
376
377 try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH']
378 except KeyError: pass
379
380 try: env['ENV']['DISPLAY'] = os.environ['DISPLAY']
381 except KeyError: pass
382
383 try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']
384 except KeyError: pass
385
386 try: env['ENV']['HOME'] = os.environ['HOME']
387 except KeyError: pass
388
389 # Configure for test suite
390
391
392 env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
393 env.PrependENVPath('PYTHONPATH', prefix)
394 env['ENV']['ESCRIPT_ROOT'] = prefix
395
396 ############ Set up paths for Configure() ######################
397
398 # Make a copy of an environment
399 # Use env.Clone if available, but fall back on env.Copy for older version of scons
400 def clone_env(env):
401 if 'Clone' in dir(env): return env.Clone() # scons-0.98
402 else: return env.Copy() # scons-0.96
403
404 # Add cc option -I<Escript>/trunk/include
405 env.Append(CPPPATH = [Dir('include')])
406
407 # Add cc option -L<Escript>/trunk/lib
408 env.Append(LIBPATH = [Dir(env['libinstall'])])
409
410 if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra'])
411 if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
412 if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])
413
414 if env['usepedantic']: env.Append(CCFLAGS = pedantic)
415
416 # MS Windows
417 if IS_WINDOWS_PLATFORM:
418 env.AppendENVPath('PATH', [env['boost_lib_path']])
419 env.AppendENVPath('PATH', [env['libinstall']])
420 if not env['share_esysUtils'] :
421 env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
422 if not env['share_paso'] :
423 env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
424
425 if env['usenetcdf']:
426 env.AppendENVPath('PATH', [env['netCDF_lib_path']])
427
428 env.Append(ARFLAGS = env['ar_flags'])
429
430 # Get the global Subversion revision number for getVersion() method
431 try:
432 global_revision = os.popen("svnversion -n .").read()
433 global_revision = re.sub(":.*", "", global_revision)
434 global_revision = re.sub("[^0-9]", "", global_revision)
435 except:
436 global_revision="-1"
437 if global_revision == "": global_revision="-2"
438 env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision])
439
440 ############ numpy (required) ###############################
441
442 try:
443 from numpy import identity
444 except ImportError:
445 print "Cannot import numpy, you need to set your PYTHONPATH"
446 sys.exit(1)
447
448 ############ C compiler (required) #############################
449
450 # Create a Configure() environment for checking existence of required libraries and headers
451 conf = Configure(clone_env(env))
452
453 # Test that the compiler is working
454 if not conf.CheckFunc('printf'):
455 print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC'])
456 sys.exit(1)
457
458 if conf.CheckFunc('gethostname'):
459 conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
460
461 ############ python libraries (required) #######################
462
463
464 if not sysheaderopt =="":
465 conf.env.Append(CCFLAGS=sysheaderopt+env['python_path'])
466 else:
467 conf.env.AppendUnique(CPPPATH = [env['python_path']])
468
469 conf.env.AppendUnique(LIBPATH = [env['python_lib_path']])
470 conf.env.AppendUnique(LIBS = [env['python_libs']])
471
472 conf.env.PrependENVPath('PYTHONPATH', prefix)
473 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs
474 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
475
476 if not conf.CheckCHeader('Python.h'):
477 print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path'])
478 sys.exit(1)
479 if not conf.CheckFunc('Py_Exit'):
480 print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path'])
481 sys.exit(1)
482
483 ############ boost (required) ##################################
484
485 if not sysheaderopt =="":
486 # This is required because we can't -isystem /usr/system because it breaks std includes
487 if os.path.normpath(env['boost_path']) =="/usr/include":
488 conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost'))
489 else:
490 conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path'])
491 else:
492 conf.env.AppendUnique(CPPPATH = [env['boost_path']])
493
494 conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']])
495 conf.env.AppendUnique(LIBS = [env['boost_libs']])
496
497 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs
498 #ensure that our path entries remain at the front
499 conf.env.PrependENVPath('PYTHONPATH', prefix)
500 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
501
502
503 #Yep we still cant figure this one out. - working on it.
504 if not IS_WINDOWS_PLATFORM:
505 if not conf.CheckCXXHeader('boost/python.hpp'):
506 print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path'])
507 sys.exit(1)
508
509 if not conf.CheckFunc('PyObject_SetAttr'):
510 print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path'])
511 sys.exit(1)
512
513
514 # Commit changes to environment
515 env = conf.Finish()
516
517 ############ VTK (optional) ####################################
518
519 if env['usevtk']:
520 try:
521 import vtk
522 env['usevtk'] = 1
523 except ImportError:
524 env['usevtk'] = 0
525
526 # Add VTK to environment env if it was found
527 if env['usevtk']:
528 env.Append(CPPDEFINES = ['USE_VTK'])
529
530 ############ NetCDF (optional) #################################
531
532 conf = Configure(clone_env(env))
533
534 if env['usenetcdf']:
535 conf.env.AppendUnique(CPPPATH = [env['netCDF_path']])
536 conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']])
537 conf.env.AppendUnique(LIBS = [env['netCDF_libs']])
538 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs
539 #ensure that our path entries remain at the front
540 conf.env.PrependENVPath('PYTHONPATH', prefix)
541 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
542
543 if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0
544 if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0
545
546 # Add NetCDF to environment env if it was found
547 if env['usenetcdf']:
548 env = conf.Finish()
549 env.Append(CPPDEFINES = ['USE_NETCDF'])
550 else:
551 conf.Finish()
552
553 ############ PAPI (optional) ###################################
554
555 # Start a new configure environment that reflects what we've already found
556 conf = Configure(clone_env(env))
557
558 if env['usepapi']:
559 conf.env.AppendUnique(CPPPATH = [env['papi_path']])
560 conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']])
561 conf.env.AppendUnique(LIBS = [env['papi_libs']])
562 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs
563 #ensure that our path entries remain at the front
564 conf.env.PrependENVPath('PYTHONPATH', prefix)
565 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
566
567 if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0
568 if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0
569
570 # Add PAPI to environment env if it was found
571 if env['usepapi']:
572 env = conf.Finish()
573 env.Append(CPPDEFINES = ['BLOCKPAPI'])
574 else:
575 conf.Finish()
576
577 ############ MKL (optional) ####################################
578
579 # Start a new configure environment that reflects what we've already found
580 conf = Configure(clone_env(env))
581
582 if env['usemkl']:
583 conf.env.AppendUnique(CPPPATH = [env['mkl_path']])
584 conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']])
585 conf.env.AppendUnique(LIBS = [env['mkl_libs']])
586 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs
587 #ensure that our path entries remain at the front
588 conf.env.PrependENVPath('PYTHONPATH', prefix)
589 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
590
591 if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0
592 if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0
593
594
595 # Add MKL to environment env if it was found
596 if env['usemkl']:
597 env = conf.Finish()
598 env.Append(CPPDEFINES = ['MKL'])
599 else:
600 conf.Finish()
601
602 ############ UMFPACK (optional) ################################
603
604 # Start a new configure environment that reflects what we've already found
605 conf = Configure(clone_env(env))
606
607 if env['useumfpack']:
608 conf.env.AppendUnique(CPPPATH = [env['ufc_path']])
609 conf.env.AppendUnique(CPPPATH = [env['umf_path']])
610 conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']])
611 conf.env.AppendUnique(LIBS = [env['umf_libs']])
612 conf.env.AppendUnique(CPPPATH = [env['amd_path']])
613 conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']])
614 conf.env.AppendUnique(LIBS = [env['amd_libs']])
615 conf.env.AppendUnique(CPPPATH = [env['blas_path']])
616 conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']])
617 conf.env.AppendUnique(LIBS = [env['blas_libs']])
618 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs
619 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs
620 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs
621 #ensure that our path entries remain at the front
622 conf.env.PrependENVPath('PYTHONPATH', prefix)
623 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
624
625 if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0
626 if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0
627 # if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73?
628
629 # Add UMFPACK to environment env if it was found
630 if env['useumfpack']:
631 env = conf.Finish()
632 env.Append(CPPDEFINES = ['UMFPACK'])
633 else:
634 conf.Finish()
635
636 ############ Silo (optional) ###################################
637
638 if env['usesilo']:
639 conf = Configure(clone_env(env))
640 conf.env.AppendUnique(CPPPATH = [env['silo_path']])
641 conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']])
642 conf.env.AppendUnique(LIBS = [env['silo_libs']])
643 if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0
644 if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0
645 conf.Finish()
646
647 # Add the path to Silo to environment env if it was found.
648 # Note that we do not add the libs since they are only needed for the
649 # weipa library and tools.
650 if env['usesilo']:
651 env.AppendUnique(CPPPATH = [env['silo_path']])
652 env.AppendUnique(LIBPATH = [env['silo_lib_path']])
653
654 ########### Lapack (optional) ##################################
655
656 if env['uselapack']:
657 env.AppendUnique(CPPDEFINES='USE_LAPACK')
658 env.AppendUnique(CPPPATH = [env['lapack_path']])
659 env.AppendUnique(LIBPATH =[env['lapack_lib_path']])
660
661 env.Append(LIBPATH = '/usr/lib/atlas')
662 env.Append(LIBS = [env['lapack_libs']])
663 if env['lapack_type']=='mkl':
664 if not env['usemkl']:
665 env['uselapack']=0
666 print "mkl_lapack requires mkl"
667 else:
668 env.AppendUnique(CPPDEFINES='MKL_LAPACK')
669
670
671 ############ Add the compiler flags ############################
672
673 # Enable debug by choosing either cc_debug or cc_optim
674 if env['usedebug']:
675 env.Append(CCFLAGS = env['cc_debug'])
676 env.Append(CCFLAGS = env['omp_debug'])
677 else:
678 env.Append(CCFLAGS = env['cc_optim'])
679 env.Append(CCFLAGS = env['omp_optim'])
680
681 # Always use cc_flags
682 env.Append(CCFLAGS = env['cc_flags'])
683 env.Append(LIBS = [env['omp_libs']])
684
685 ############ Add some custom builders ##########################
686
687 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
688 env.Append(BUILDERS = {'PyCompile' : py_builder});
689
690 runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
691 env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
692
693 runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
694 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
695
696 epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True)
697 env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
698
699 ############ MPI (optional) ####################################
700 if not env['usempi']: env['mpi_flavour']='none'
701
702 # Create a modified environment for MPI programs (identical to env if usempi=no)
703 env_mpi = clone_env(env)
704
705 # Start a new configure environment that reflects what we've already found
706 conf = Configure(clone_env(env_mpi))
707
708 if env_mpi['usempi']:
709 VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ]
710 if not env_mpi['mpi_flavour'] in VALID_MPIs:
711 raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs)
712 conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']])
713 conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']])
714 conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']])
715 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs
716 #ensure that our path entries remain at the front
717 conf.env.PrependENVPath('PYTHONPATH', prefix)
718 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
719
720 if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0
721 # if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0
722
723 # Add MPI to environment env_mpi if it was found
724 if env_mpi['usempi']:
725 env_mpi = conf.Finish()
726 env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']])
727 # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
728 # On the other hand MPT and OpenMPI don't define the latter so we have to
729 # do that here
730 if env['usenetcdf'] and env_mpi['mpi_flavour'] in ["MPT","OPENMPI"]:
731 env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED'])
732 else:
733 conf.Finish()
734
735 env['usempi'] = env_mpi['usempi']
736
737 ############ ParMETIS (optional) ###############################
738
739 # Start a new configure environment that reflects what we've already found
740 conf = Configure(clone_env(env_mpi))
741
742 if not env_mpi['usempi']: env_mpi['useparmetis'] = 0
743
744 if env_mpi['useparmetis']:
745 conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']])
746 conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']])
747 conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']])
748 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs
749 #ensure that our path entries remain at the front
750 conf.env.PrependENVPath('PYTHONPATH', prefix)
751 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
752
753 if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0
754 if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0
755
756 # Add ParMETIS to environment env_mpi if it was found
757 if env_mpi['useparmetis']:
758 env_mpi = conf.Finish()
759 env_mpi.Append(CPPDEFINES = ['USE_PARMETIS'])
760 else:
761 conf.Finish()
762
763 env['useparmetis'] = env_mpi['useparmetis']
764
765 ############ Summarize our environment #########################
766
767 print ""
768 print "Summary of configuration (see ./config.log for information)"
769 print " Using python libraries"
770 print " Using numpy"
771 print " Using boost"
772 if env['usenetcdf']: print " Using NetCDF"
773 else: print " Not using NetCDF"
774 if env['usevtk']: print " Using VTK"
775 else: print " Not using VTK"
776 if env['usemkl']: print " Using MKL"
777 else: print " Not using MKL"
778 if env['useumfpack']: print " Using UMFPACK"
779 else: print " Not using UMFPACK"
780 if env['usesilo']: print " Using Silo"
781 else: print " Not using Silo"
782 if env['useopenmp']: print " Using OpenMP"
783 else: print " Not using OpenMP"
784 if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour']
785 else: print " Not using MPI"
786 if env['useparmetis']: print " Using ParMETIS"
787 else: print " Not using ParMETIS (requires MPI)"
788 if env['usepapi']: print " Using PAPI"
789 else: print " Not using PAPI"
790 if env['uselapack']: print " Using Lapack"
791 else: print " Not using Lapack"
792 if env['usedebug']: print " Compiling for debug"
793 else: print " Not compiling for debug"
794 print " Installing in", prefix
795 if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors"
796 else: print " Not treating warnings as errors"
797 print ""
798
799 ############ Delete option-dependent files #####################
800
801 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug")))
802 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi")))
803 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp")))
804 Execute(Delete(os.path.join(env['libinstall'],"pyversion")))
805 Execute(Delete(os.path.join(env['libinstall'],"buildvars")))
806 if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI")))
807
808
809 ############ Build the subdirectories ##########################
810
811 if env['usepedantic']: env_mpi.Append(CCFLAGS = pedantic)
812
813
814 from grouptest import *
815
816 TestGroups=[]
817
818 dodgy_env=clone_env(env_mpi) # Environment without pedantic options
819
820 ############ Now we switch on Warnings as errors ###############
821
822 #this needs to be done after configuration because the scons test files have warnings in them
823
824 if ((fatalwarning != "") and (env['usewarnings'])):
825 env.Append(CCFLAGS = fatalwarning)
826 env_mpi.Append(CCFLAGS = fatalwarning)
827
828
829 Export(
830 ["env",
831 "env_mpi",
832 "clone_env",
833 "dodgy_env",
834 "IS_WINDOWS_PLATFORM",
835 "TestGroups",
836 "CallSConscript",
837 "cantusevariantdir"
838 ]
839 )
840
841 CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)
842 CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0)
843 CallSConscript(env, dirs = ['paso/src'], variant_dir='build/$PLATFORM/paso', duplicate=0)
844 CallSConscript(env, dirs = ['weipa/src'], variant_dir='build/$PLATFORM/weipa', duplicate=0)
845 CallSConscript(env, dirs = ['escript/src'], variant_dir='build/$PLATFORM/escript', duplicate=0)
846 CallSConscript(env, dirs = ['esysUtils/src'], variant_dir='build/$PLATFORM/esysUtils', duplicate=0)
847 CallSConscript(env, dirs = ['finley/src'], variant_dir='build/$PLATFORM/finley', duplicate=0)
848 CallSConscript(env, dirs = ['modellib/py_src'], variant_dir='build/$PLATFORM/modellib', duplicate=0)
849 CallSConscript(env, dirs = ['doc'], variant_dir='build/$PLATFORM/doc', duplicate=0)
850 CallSConscript(env, dirs = ['pyvisi/py_src'], variant_dir='build/$PLATFORM/pyvisi', duplicate=0)
851 CallSConscript(env, dirs = ['pycad/py_src'], variant_dir='build/$PLATFORM/pycad', duplicate=0)
852 CallSConscript(env, dirs = ['pythonMPI/src'], variant_dir='build/$PLATFORM/pythonMPI', duplicate=0)
853 CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0)
854 CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0)
855
856
857 ############ Remember what optimizations we used ###############
858
859 remember_list = []
860
861 if env['usedebug']:
862 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET'))
863
864 if env['usempi']:
865 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET'))
866
867 if env['useopenmp']:
868 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET'))
869
870 env.Alias('remember_options', remember_list)
871
872
873 ############### Record python interpreter version ##############
874
875 if not IS_WINDOWS_PLATFORM:
876
877 versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])
878 if sys.version_info[4] >0 : versionstring+="rc%s"%sys.version_info[4]
879 os.system("echo "+versionstring+" > "+os.path.join(env['libinstall'],"pyversion"))
880
881 ############## Populate the buildvars file #####################
882
883 buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w')
884 buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n')
885
886 # Find the boost version by extracting it from version.hpp
887 boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp'))
888 boostversion='unknown'
889 try:
890 for line in boosthpp:
891 ver=re.match(r'#define BOOST_VERSION (\d+)',line)
892 if ver:
893 boostversion=ver.group(1)
894 except StopIteration:
895 pass
896 buildvars.write("boost="+boostversion+"\n")
897 buildvars.write("svn_revision="+str(global_revision)+"\n")
898 out="usedebug="
899 if env['usedebug']:
900 out+="y"
901 else:
902 out+="n"
903 out+="\nusempi="
904 if env['usempi']:
905 out+="y"
906 else:
907 out+="n"
908 out+="\nuseopenmp="
909 if env['useopenmp']:
910 out+="y"
911 else:
912 out+="n"
913 buildvars.write(out+"\n")
914 buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n')
915 out="lapack="
916 if env['uselapack']:
917 out+="y"
918 else:
919 out+="n"
920 out+="\nsilo="
921 if env['usesilo']:
922 out+="y"
923 else:
924 out+="n"
925 buildvars.write(out+"\n")
926 buildvars.close()
927
928
929 ############ Targets to build and install libraries ############
930
931 target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET'))
932 env.Alias('target_init', [target_init])
933
934 # The headers have to be installed prior to build in order to satisfy #include <paso/Common.h>
935 env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a'])
936 env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a'])
937
938 env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a'])
939 env.Alias('install_paso', ['build_paso', 'target_install_paso_a'])
940
941 env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so'])
942 env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py'])
943
944 env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a'])
945 env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a'])
946
947 env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so'])
948 env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py'])
949
950 env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so'])
951 env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py'])
952
953 # Now gather all the above into a couple easy targets: build_all and install_all
954 build_all_list = []
955 build_all_list += ['build_esysUtils']
956 build_all_list += ['build_paso']
957 build_all_list += ['build_weipa']
958 build_all_list += ['build_escript']
959 build_all_list += ['build_finley']
960 if env['usempi']: build_all_list += ['target_pythonMPI_exe']
961 #if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper']
962 build_all_list += ['target_escriptconvert']
963 env.Alias('build_all', build_all_list)
964
965 install_all_list = []
966 install_all_list += ['target_init']
967 install_all_list += ['install_esysUtils']
968 install_all_list += ['install_paso']
969 install_all_list += ['install_weipa']
970 install_all_list += ['install_escript']
971 install_all_list += ['install_finley']
972 install_all_list += ['target_install_pyvisi_py']
973 install_all_list += ['target_install_modellib_py']
974 install_all_list += ['target_install_pycad_py']
975 if env['usempi']: install_all_list += ['target_install_pythonMPI_exe']
976 #if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper']
977 if env['usesilo']: install_all_list += ['target_install_escriptconvert']
978 install_all_list += ['remember_options']
979 env.Alias('install_all', install_all_list)
980
981 # Default target is install
982 env.Default('install_all')
983
984 ############ Targets to build and run the test suite ###########
985
986 env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a'])
987 env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a'])
988 env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a'])
989 env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests'])
990 env.Alias('build_full',['install_all','build_tests','build_py_tests'])
991
992
993 ############ Targets to build the documentation ################
994
995 env.Alias('api_epydoc','install_all')
996
997 env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf'])
998
999 build_platform=os.name
1000
1001 if not IS_WINDOWS_PLATFORM:
1002 try:
1003 utest=open("utest.sh","w")
1004 #Sometimes Mac python says it is posix
1005 if (build_platform=='posix') and platform.system()=="Darwin":
1006 build_platform='darwin'
1007 utest.write(GroupTest.makeHeader(build_platform))
1008 for tests in TestGroups:
1009 utest.write(tests.makeString())
1010 utest.close()
1011 os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH)
1012 print "utest.sh written"
1013 except IOError:
1014 print "Error attempting to write unittests file."
1015 sys.exit(1)
1016
1017 #Make sure that the escript wrapper is in place
1018 if not os.path.isfile(os.path.join(env['bininstall'],'escript')):
1019 print "Copying escript wrapper"
1020 shutil.copy("bin/escript",os.path.join(env['bininstall'],'escript'))
1021
1022 ############ Targets to build PasoTests suite ################
1023
1024 env.Alias('build_PasoTests','build/'+build_platform+'/paso/profiling/PasoTests')
1025
1026 env.Alias('release_prep', ['docs', 'install_all'])

  ViewVC Help
Powered by ViewVC 1.1.26