/[escript]/trunk/SConstruct
ViewVC logotype

Contents of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3097 - (show annotations)
Fri Aug 20 04:59:12 2010 UTC (8 years, 2 months ago) by gross
File size: 41071 byte(s)
some modifications to the GaussSeidel
1
2 ########################################################
3 #
4 # Copyright (c) 2003-2010 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14
15 EnsureSConsVersion(0,96,91)
16 EnsurePythonVersion(2,3)
17
18 import sys, os, re, socket, platform, stat
19 # For copy()
20 import shutil
21
22 # Add our extensions
23 if os.path.isdir('scons'): sys.path.append('scons')
24 import scons_extensions
25
26 # Use /usr/lib64 if available, else /usr/lib
27 usr_lib = '/usr/lib'
28 if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64'
29
30 # The string python2.4 or python2.5
31 python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
32
33 # MS Windows support, many thanks to PH
34 IS_WINDOWS_PLATFORM = (os.name== "nt")
35
36 prefix = ARGUMENTS.get('prefix', Dir('#.').abspath)
37
38 #Holds names of variables from the calling environment which need to be passed
39 #to tools
40 env_export=[]
41
42 #Determine where to read options from use:
43 #1. command line
44 #2. scons/<hostname>_options.py
45 #3. name as part of a cluster
46 options_file=ARGUMENTS.get('options_file', None)
47 effective_hostname=socket.gethostname().split('.')[0]
48 if not options_file:
49 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
50 options_file = os.path.join("scons",mangledhostname+"_options.py")
51 #If there is no options file with that name see if there is a substitute
52 if not os.path.isfile(options_file):
53 effective_hostname = scons_extensions.effectiveName(effective_hostname)
54 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
55 options_file = os.path.join("scons",mangledhostname+"_options.py")
56
57 if not os.path.isfile(options_file):
58 print "Options file not found (expected '%s')" % options_file
59 options_file = False
60 else:
61 print "Options file is", options_file
62
63 #Does our scons support the newer Variables class or do we need to use Options?
64
65 try:
66 dummyvar=Variables
67 opts = Variables(options_file, ARGUMENTS)
68 adder = opts.AddVariables
69 except:
70 opts = Options(options_file, ARGUMENTS)
71 adder = opts.AddOptions
72 BoolVariable = BoolOption
73
74 ############ Load build options ################################
75
76 adder(
77 #opts.AddOptions(
78 # Where to install esys stuff
79 ('prefix', 'where everything will be installed', Dir('#.').abspath),
80 ('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')),
81 ('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')),
82 ('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')),
83 ('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')),
84 # Compilation options
85 BoolVariable('dodebug', 'For backwards compatibility', 'no'),
86 BoolVariable('usedebug', 'Do you want a debug build?', 'no'),
87 BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'),
88 ('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file),
89 ('cc', 'path to C compiler', 'DEFAULT'),
90 ('cxx', 'path to C++ compiler', 'DEFAULT'),
91 ('win_cc_name', 'windows C compiler name if needed', 'msvc'),
92 # The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below
93 ('cc_flags', 'C/C++ compiler flags to use', '-DEFAULT_1'),
94 ('cc_optim', 'C/C++ optimization flags to use', '-DEFAULT_2'),
95 ('cc_debug', 'C/C++ debug flags to use', '-DEFAULT_3'),
96 ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'),
97 ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'),
98 ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'),
99 ('cc_extra', 'Extra C compiler flags', ''),
100 ('cxx_extra', 'Extra C++ compiler flags', ''),
101 ('ld_extra', 'Extra linker flags', ''),
102 ('sys_libs', 'System libraries to link with', []),
103 ('ar_flags', 'Static library archiver flags to use', ''),
104 BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'),
105 BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'),
106 BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'),
107 ('forcelazy','for testing use only - set the default value for autolazy','leave_alone'),
108 ('forcecollres','for testing use only - set the default value for force resolving collective ops','leave_alone'),
109 # Python
110 ('python_path', 'Path to Python includes', '/usr/include/'+python_version),
111 ('python_lib_path', 'Path to Python libs', usr_lib),
112 ('python_libs', 'Python libraries to link with', [python_version]),
113 ('python_cmd', 'Python command', 'python'),
114 # Boost
115 ('boost_path', 'Path to Boost includes', '/usr/include'),
116 ('boost_lib_path', 'Path to Boost libs', usr_lib),
117 ('boost_libs', 'Boost libraries to link with', ['boost_python']),
118 # NetCDF
119 BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'),
120 ('netCDF_path', 'Path to netCDF includes', '/usr/include'),
121 ('netCDF_lib_path', 'Path to netCDF libs', usr_lib),
122 ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']),
123 # MPI
124 BoolVariable('useMPI', 'For backwards compatibility', 'no'),
125 BoolVariable('usempi', 'Compile parallel version using MPI', 'no'),
126 ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),
127 ('mpi_path', 'Path to MPI includes', '/usr/include'),
128 ('mpi_run', 'mpirun name' , 'mpiexec -np 1'),
129 ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib),
130 ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', []),
131 ('mpi_flavour','Type of MPI execution environment','none'),
132 # ParMETIS
133 BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'),
134 ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'),
135 ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib),
136 ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']),
137 # PAPI
138 BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'),
139 ('papi_path', 'Path to PAPI includes', '/usr/include'),
140 ('papi_lib_path', 'Path to PAPI libs', usr_lib),
141 ('papi_libs', 'PAPI libraries to link with', ['papi']),
142 BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False),
143 # MKL
144 BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'),
145 ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'),
146 ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'),
147 ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']),
148 # UMFPACK
149 BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'),
150 ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'),
151 ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'),
152 ('umf_lib_path', 'Path to UMFPACK libs', usr_lib),
153 ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']),
154 # Silo
155 BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'),
156 ('silo_path', 'Path to Silo includes', '/usr/include'),
157 ('silo_lib_path', 'Path to Silo libs', usr_lib),
158 ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
159 # VisIt
160 BoolVariable('usevisit', 'switch on/off the usage of the VisIt sim library', 'no'),
161 ('visit_path', 'Path to VisIt libsim includes', '/usr/include'),
162 ('visit_lib_path', 'Path to VisIt sim library', usr_lib),
163 # AMD (used by UMFPACK)
164 ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'),
165 ('amd_lib_path', 'Path to AMD libs', usr_lib),
166 ('amd_libs', 'AMD libraries to link with', ['amd']),
167 # BLAS (used by UMFPACK)
168 ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'),
169 ('blas_lib_path', 'Path to BLAS libs', usr_lib),
170 ('blas_libs', 'BLAS libraries to link with', ['blas']),
171 #Lapack options
172 BoolVariable('uselapack','switch on/off use of Lapack','no'),
173 ('lapack_path', 'Path to Lapack includes','/usr/include'),
174 ('lapack_lib_path', 'Path to Lapack libs', usr_lib),
175 ('lapack_libs', 'Lapack libraries to link with', []),
176 ('lapack_type', '{clapack,mkl}','clapack'),
177 # An option for specifying the compiler tools set (see windows branch).
178 ('tools_names', 'allow control over the tools in the env setup', ['default']),
179 # finer control over library building, intel aggressive global optimisation
180 # works with dynamic libraries on windows.
181 ('share_esysUtils', 'control static or dynamic esysUtils lib', False),
182 ('share_paso', 'control static or dynamic paso lib', False),
183 ('env_export','Environment variables to be passed to children',[]),
184 #To enable passing function pointers through python
185 BoolVariable('iknowwhatimdoing','allow nonstandard C',False)
186 )
187
188
189 ###################
190
191 # This is only to support old versions of scons which don't accept
192 # the variant_dir parameter (older than 0.98 I think).
193 # Once these are no longer an issue we can go back to a direct call
194 # to obj.SConscript
195 import SCons
196 vs=SCons.__version__.split('.')
197 cantusevariantdir=float(vs[0]+'.'+vs[1])<0.98
198
199
200 def CallSConscript(obj, **kw):
201 if cantusevariantdir:
202 if 'variant_dir' in kw:
203 kw['build_dir']=kw['variant_dir']
204 del kw['variant_dir']
205 obj.SConscript(**kw)
206
207
208 ############ Specify which compilers to use ####################
209
210 # intelc uses regular expressions improperly and emits a warning about
211 # failing to find the compilers. This warning can be safely ignored.
212
213 if IS_WINDOWS_PLATFORM:
214 env = Environment(options = opts)
215 env = Environment(tools = ['default'] + env['tools_names'],
216 options = opts)
217 else:
218 if os.uname()[4]=='ia64':
219 env = Environment(tools = ['default', 'intelc'], options = opts)
220 if env['CXX'] == 'icpc':
221 env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not)
222 else:
223 env = Environment(tools = ['default'], options = opts)
224 if env['tools_names']!='default':
225 env=Environment(tools = ['default'] +env['tools_names'], options=opts)
226
227 # Override compiler choice if provided
228 if env['cc'] != 'DEFAULT': env['CC']=env['cc']
229 if env['cxx'] != 'DEFAULT': env['CXX']=env['cxx']
230
231 Help(opts.GenerateHelpText(env))
232
233 ############ Make sure target directories exist ################
234
235 if not os.path.isdir(env['bininstall']):
236 os.makedirs(env['bininstall'])
237 if not os.path.isdir(env['libinstall']):
238 os.makedirs(env['libinstall'])
239 if not os.path.isdir(env['pyinstall']):
240 os.makedirs(env['pyinstall'])
241
242 ########## Copy required environment vars ######################
243
244 for i in env['env_export']:
245 env.Append(ENV = {i:os.environ[i]})
246
247 ############ Fill in compiler options if not set above #########
248
249 # Backwards compatibility: allow dodebug=yes and useMPI=yes
250 if env['dodebug']: env['usedebug'] = 1
251 if env['useMPI']: env['usempi'] = 1
252
253 # Default compiler options (override allowed in hostname_options.py, but should not be necessary)
254 # For both C and C++ you get: cc_flags and either the optim flags or debug flags
255
256 sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action.
257
258 cc_flags = ""
259 cc_optim = ""
260 cc_debug = ""
261 omp_optim = ""
262 omp_debug = ""
263 omp_libs = []
264
265 if env["CC"] == "icc":
266 # Intel compilers
267 cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"
268 cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip"
269 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
270 omp_optim = "-openmp -openmp_report0"
271 omp_debug = "-openmp -openmp_report0"
272 omp_libs = ['guide', 'pthread']
273 pedantic = ""
274 fatalwarning = "" # Switch to turn warnings into errors
275 sysheaderopt = ""
276 elif env["CC"][:3] == "gcc":
277 # GNU C on any system
278 cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
279 #the long long warning occurs on the Mac
280 cc_optim = "-O3"
281 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
282 omp_optim = "-fopenmp"
283 omp_debug = "-fopenmp"
284 omp_libs = []
285 pedantic = "-pedantic-errors -Wno-long-long"
286 fatalwarning = "-Werror"
287 sysheaderopt = "-isystem "
288 elif env["CC"] == "cl":
289 # Microsoft Visual C on Windows
290 cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF"
291 cc_optim = "/O2 /Op /MT /W3"
292 cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK"
293 omp_optim = ""
294 omp_debug = ""
295 omp_libs = []
296 pedantic = ""
297 fatalwarning = ""
298 sysheaderopt = ""
299 elif env["CC"] == "icl":
300 # intel C on Windows, see windows_intelc_options.py for a start
301 pedantic = ""
302 fatalwarning = ""
303 sysheaderopt = ""
304
305
306 # If not specified in hostname_options.py then set them here
307 if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags
308 if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim
309 if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug
310 if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim
311 if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug
312 if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs
313
314 #set up the autolazy values
315 if env['forcelazy'] != "leave_alone":
316 if env['forcelazy'] == 'on':
317 env.Append(CPPDEFINES=['FAUTOLAZYON'])
318 else:
319 if env['forcelazy'] == 'off':
320 env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
321
322 #set up the colective resolve values
323 if env['forcecollres'] != "leave_alone":
324 print env['forcecollres']
325 if env['forcecollres'] == 'on':
326 env.Append(CPPDEFINES=['FRESCOLLECTON'])
327 else:
328 if env['forcecollres'] == 'off':
329 env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
330
331
332 if env['iknowwhatimdoing']:
333 env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
334
335 # OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty
336 if not env["useopenmp"]:
337 env['omp_optim'] = ""
338 env['omp_debug'] = ""
339 env['omp_libs'] = []
340
341 if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0
342
343 # Windows doesn't use LD_LIBRARY_PATH but PATH instead
344 if IS_WINDOWS_PLATFORM:
345 LD_LIBRARY_PATH_KEY='PATH'
346 env['ENV']['LD_LIBRARY_PATH']=''
347 else:
348 LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
349 ############ Copy environment variables into scons env #########
350
351 try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS']
352 except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1
353
354 try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS']
355 except KeyError: pass
356
357 try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS']
358 except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1
359
360 try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES']
361 except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1
362
363 try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE']
364 except KeyError: pass
365
366 try: env['ENV']['PATH'] = os.environ['PATH']
367 except KeyError: pass
368
369 try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
370 except KeyError: pass
371
372 try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH']
373 except KeyError: pass
374
375 try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH']
376 except KeyError: pass
377
378 try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH'])
379 except KeyError: pass
380
381 try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH']
382 except KeyError: pass
383
384 try: env['ENV']['DISPLAY'] = os.environ['DISPLAY']
385 except KeyError: pass
386
387 try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']
388 except KeyError: pass
389
390 try: env['ENV']['HOME'] = os.environ['HOME']
391 except KeyError: pass
392
393 # Configure for test suite
394
395
396 env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
397 env.PrependENVPath('PYTHONPATH', prefix)
398 env['ENV']['ESCRIPT_ROOT'] = prefix
399
400 ############ Set up paths for Configure() ######################
401
402 # Make a copy of an environment
403 # Use env.Clone if available, but fall back on env.Copy for older version of scons
404 def clone_env(env):
405 if 'Clone' in dir(env): return env.Clone() # scons-0.98
406 else: return env.Copy() # scons-0.96
407
408 # Add cc option -I<Escript>/trunk/include
409 env.Append(CPPPATH = [Dir('include')])
410
411 # Add cc option -L<Escript>/trunk/lib
412 env.Append(LIBPATH = [Dir(env['libinstall'])])
413
414 if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra'])
415 if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
416 if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])
417
418 if env['usepedantic']: env.Append(CCFLAGS = pedantic)
419
420 # MS Windows
421 if IS_WINDOWS_PLATFORM:
422 env.AppendENVPath('PATH', [env['boost_lib_path']])
423 env.AppendENVPath('PATH', [env['libinstall']])
424 if not env['share_esysUtils'] :
425 env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
426 if not env['share_paso'] :
427 env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
428
429 if env['usenetcdf']:
430 env.AppendENVPath('PATH', [env['netCDF_lib_path']])
431
432 env.Append(ARFLAGS = env['ar_flags'])
433
434 # Get the global Subversion revision number for getVersion() method
435 try:
436 global_revision = os.popen("svnversion -n .").read()
437 global_revision = re.sub(":.*", "", global_revision)
438 global_revision = re.sub("[^0-9]", "", global_revision)
439 except:
440 global_revision="-1"
441 if global_revision == "": global_revision="-2"
442 env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision])
443
444 ############ numpy (required) ###############################
445
446 try:
447 from numpy import identity
448 except ImportError:
449 print "Cannot import numpy, you need to set your PYTHONPATH"
450 sys.exit(1)
451
452 ############ C compiler (required) #############################
453
454 # Create a Configure() environment for checking existence of required libraries and headers
455 conf = Configure(clone_env(env))
456
457 # Test that the compiler is working
458 if not conf.CheckFunc('printf'):
459 print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC'])
460 sys.exit(1)
461
462 if conf.CheckFunc('gethostname'):
463 conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
464
465 ############ python libraries (required) #######################
466
467
468 if not sysheaderopt =="":
469 conf.env.Append(CCFLAGS=sysheaderopt+env['python_path'])
470 else:
471 conf.env.AppendUnique(CPPPATH = [env['python_path']])
472
473 conf.env.AppendUnique(LIBPATH = [env['python_lib_path']])
474 conf.env.AppendUnique(LIBS = [env['python_libs']])
475
476 conf.env.PrependENVPath('PYTHONPATH', prefix)
477 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs
478 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
479
480 if not conf.CheckCHeader('Python.h'):
481 print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path'])
482 sys.exit(1)
483 if not conf.CheckFunc('Py_Exit'):
484 print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path'])
485 sys.exit(1)
486
487 ############ boost (required) ##################################
488
489 if not sysheaderopt =="":
490 # This is required because we can't -isystem /usr/system because it breaks std includes
491 if os.path.normpath(env['boost_path']) =="/usr/include":
492 conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost'))
493 else:
494 conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path'])
495 else:
496 conf.env.AppendUnique(CPPPATH = [env['boost_path']])
497
498 conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']])
499 conf.env.AppendUnique(LIBS = [env['boost_libs']])
500
501 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs
502 #ensure that our path entries remain at the front
503 conf.env.PrependENVPath('PYTHONPATH', prefix)
504 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
505
506
507 #Yep we still cant figure this one out. - working on it.
508 if not IS_WINDOWS_PLATFORM:
509 if not conf.CheckCXXHeader('boost/python.hpp'):
510 print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path'])
511 sys.exit(1)
512
513 if not conf.CheckFunc('PyObject_SetAttr'):
514 print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path'])
515 sys.exit(1)
516
517
518 # Commit changes to environment
519 env = conf.Finish()
520
521 ############ VTK (optional) ####################################
522
523 if env['usevtk']:
524 try:
525 import vtk
526 env['usevtk'] = 1
527 except ImportError:
528 env['usevtk'] = 0
529
530 # Add VTK to environment env if it was found
531 if env['usevtk']:
532 env.Append(CPPDEFINES = ['USE_VTK'])
533
534 ############ NetCDF (optional) #################################
535
536 conf = Configure(clone_env(env))
537
538 if env['usenetcdf']:
539 conf.env.AppendUnique(CPPPATH = [env['netCDF_path']])
540 conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']])
541 conf.env.AppendUnique(LIBS = [env['netCDF_libs']])
542 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs
543 #ensure that our path entries remain at the front
544 conf.env.PrependENVPath('PYTHONPATH', prefix)
545 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
546
547 if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0
548 if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0
549
550 # Add NetCDF to environment env if it was found
551 if env['usenetcdf']:
552 env = conf.Finish()
553 env.Append(CPPDEFINES = ['USE_NETCDF'])
554 else:
555 conf.Finish()
556
557 ############ PAPI (optional) ###################################
558
559 # Start a new configure environment that reflects what we've already found
560 conf = Configure(clone_env(env))
561
562 if env['usepapi']:
563 conf.env.AppendUnique(CPPPATH = [env['papi_path']])
564 conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']])
565 conf.env.AppendUnique(LIBS = [env['papi_libs']])
566 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs
567 #ensure that our path entries remain at the front
568 conf.env.PrependENVPath('PYTHONPATH', prefix)
569 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
570
571 if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0
572 if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0
573
574 # Add PAPI to environment env if it was found
575 if env['usepapi']:
576 env = conf.Finish()
577 env.Append(CPPDEFINES = ['BLOCKPAPI'])
578 else:
579 conf.Finish()
580
581 ############ MKL (optional) ####################################
582
583 # Start a new configure environment that reflects what we've already found
584 conf = Configure(clone_env(env))
585
586 if env['usemkl']:
587 conf.env.AppendUnique(CPPPATH = [env['mkl_path']])
588 conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']])
589 conf.env.AppendUnique(LIBS = [env['mkl_libs']])
590 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs
591 #ensure that our path entries remain at the front
592 conf.env.PrependENVPath('PYTHONPATH', prefix)
593 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
594
595 if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0
596 if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0
597
598
599 # Add MKL to environment env if it was found
600 if env['usemkl']:
601 env = conf.Finish()
602 env.Append(CPPDEFINES = ['MKL'])
603 else:
604 conf.Finish()
605
606 ############ UMFPACK (optional) ################################
607
608 # Start a new configure environment that reflects what we've already found
609 conf = Configure(clone_env(env))
610
611 if env['useumfpack']:
612 conf.env.AppendUnique(CPPPATH = [env['ufc_path']])
613 conf.env.AppendUnique(CPPPATH = [env['umf_path']])
614 conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']])
615 conf.env.AppendUnique(LIBS = [env['umf_libs']])
616 conf.env.AppendUnique(CPPPATH = [env['amd_path']])
617 conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']])
618 conf.env.AppendUnique(LIBS = [env['amd_libs']])
619 conf.env.AppendUnique(CPPPATH = [env['blas_path']])
620 conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']])
621 conf.env.AppendUnique(LIBS = [env['blas_libs']])
622 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs
623 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs
624 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs
625 #ensure that our path entries remain at the front
626 conf.env.PrependENVPath('PYTHONPATH', prefix)
627 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
628
629 if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0
630 if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0
631 # if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73?
632
633 # Add UMFPACK to environment env if it was found
634 if env['useumfpack']:
635 env = conf.Finish()
636 env.Append(CPPDEFINES = ['UMFPACK'])
637 else:
638 conf.Finish()
639
640 ############ Silo (optional) ###################################
641
642 if env['usesilo']:
643 conf = Configure(clone_env(env))
644 conf.env.AppendUnique(CPPPATH = [env['silo_path']])
645 conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']])
646 conf.env.AppendUnique(LIBS = [env['silo_libs']])
647 if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0
648 if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0
649 conf.Finish()
650
651 # Add the path to Silo to environment env if it was found.
652 # Note that we do not add the libs since they are only needed for the
653 # weipa library and tools.
654 if env['usesilo']:
655 env.AppendUnique(CPPPATH = [env['silo_path']])
656 env.AppendUnique(LIBPATH = [env['silo_lib_path']])
657
658 ############ VisIt (optional) ###################################
659
660 if env['usevisit']:
661 env.AppendUnique(CPPPATH = [env['visit_path']])
662 env.AppendUnique(LIBPATH = [env['visit_lib_path']])
663
664 ########### Lapack (optional) ##################################
665
666 if env['uselapack']:
667 env.AppendUnique(CPPDEFINES='USE_LAPACK')
668 env.AppendUnique(CPPPATH = [env['lapack_path']])
669 env.AppendUnique(LIBPATH =[env['lapack_lib_path']])
670
671 env.Append(LIBPATH = '/usr/lib/atlas')
672 env.Append(LIBS = [env['lapack_libs']])
673 if env['lapack_type']=='mkl':
674 if not env['usemkl']:
675 env['uselapack']=0
676 print "mkl_lapack requires mkl"
677 else:
678 env.AppendUnique(CPPDEFINES='MKL_LAPACK')
679
680
681 ############ Add the compiler flags ############################
682
683 # Enable debug by choosing either cc_debug or cc_optim
684 if env['usedebug']:
685 env.Append(CCFLAGS = env['cc_debug'])
686 env.Append(CCFLAGS = env['omp_debug'])
687 else:
688 env.Append(CCFLAGS = env['cc_optim'])
689 env.Append(CCFLAGS = env['omp_optim'])
690
691 # Always use cc_flags
692 env.Append(CCFLAGS = env['cc_flags'])
693 env.Append(LIBS = [env['omp_libs']])
694
695 ############ Add some custom builders ##########################
696
697 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
698 env.Append(BUILDERS = {'PyCompile' : py_builder});
699
700 runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
701 env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
702
703 runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
704 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
705
706 epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True)
707 env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
708
709 ############ MPI (optional) ####################################
710 if not env['usempi']: env['mpi_flavour']='none'
711
712 # Create a modified environment for MPI programs (identical to env if usempi=no)
713 env_mpi = clone_env(env)
714
715 # Start a new configure environment that reflects what we've already found
716 conf = Configure(clone_env(env_mpi))
717
718 if env_mpi['usempi']:
719 VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ]
720 if not env_mpi['mpi_flavour'] in VALID_MPIs:
721 raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs)
722 conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']])
723 conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']])
724 conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']])
725 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs
726 #ensure that our path entries remain at the front
727 conf.env.PrependENVPath('PYTHONPATH', prefix)
728 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
729
730 if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0
731 # if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0
732
733 # Add MPI to environment env_mpi if it was found
734 if env_mpi['usempi']:
735 env_mpi = conf.Finish()
736 env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']])
737 # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
738 # On the other hand MPT and OpenMPI don't define the latter so we have to
739 # do that here
740 if env['usenetcdf'] and env_mpi['mpi_flavour'] in ["MPT","OPENMPI"]:
741 env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED'])
742 else:
743 conf.Finish()
744
745 env['usempi'] = env_mpi['usempi']
746
747 ############ ParMETIS (optional) ###############################
748
749 # Start a new configure environment that reflects what we've already found
750 conf = Configure(clone_env(env_mpi))
751
752 if not env_mpi['usempi']: env_mpi['useparmetis'] = 0
753
754 if env_mpi['useparmetis']:
755 conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']])
756 conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']])
757 conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']])
758 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs
759 #ensure that our path entries remain at the front
760 conf.env.PrependENVPath('PYTHONPATH', prefix)
761 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
762
763 if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0
764 if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0
765
766 # Add ParMETIS to environment env_mpi if it was found
767 if env_mpi['useparmetis']:
768 env_mpi = conf.Finish()
769 env_mpi.Append(CPPDEFINES = ['USE_PARMETIS'])
770 else:
771 conf.Finish()
772
773 env['useparmetis'] = env_mpi['useparmetis']
774
775 ############ Summarize our environment #########################
776
777 print ""
778 print "Summary of configuration (see ./config.log for information)"
779 print " Using python libraries"
780 print " Using numpy"
781 print " Using boost"
782 if env['usenetcdf']: print " Using NetCDF"
783 else: print " Not using NetCDF"
784 if env['usevtk']: print " Using VTK"
785 else: print " Not using VTK"
786 if env['usevisit']: print " Using VisIt"
787 else: print " Not using VisIt"
788 if env['usemkl']: print " Using MKL"
789 else: print " Not using MKL"
790 if env['useumfpack']: print " Using UMFPACK"
791 else: print " Not using UMFPACK"
792 if env['usesilo']: print " Using Silo"
793 else: print " Not using Silo"
794 if env['useopenmp']: print " Using OpenMP"
795 else: print " Not using OpenMP"
796 if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour']
797 else: print " Not using MPI"
798 if env['useparmetis']: print " Using ParMETIS"
799 else: print " Not using ParMETIS (requires MPI)"
800 if env['usepapi']: print " Using PAPI"
801 else: print " Not using PAPI"
802 if env['uselapack']: print " Using Lapack"
803 else: print " Not using Lapack"
804 if env['usedebug']: print " Compiling for debug"
805 else: print " Not compiling for debug"
806 print " Installing in", prefix
807 if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors"
808 else: print " Not treating warnings as errors"
809 print ""
810
811 ############ Delete option-dependent files #####################
812
813 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug")))
814 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi")))
815 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp")))
816 Execute(Delete(os.path.join(env['libinstall'],"pyversion")))
817 Execute(Delete(os.path.join(env['libinstall'],"buildvars")))
818 if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI")))
819
820
821 ############ Build the subdirectories ##########################
822
823 if env['usepedantic']: env_mpi.Append(CCFLAGS = pedantic)
824
825
826 from grouptest import *
827
828 TestGroups=[]
829
830 dodgy_env=clone_env(env_mpi) # Environment without pedantic options
831
832 ############ Now we switch on Warnings as errors ###############
833
834 #this needs to be done after configuration because the scons test files have warnings in them
835
836 if ((fatalwarning != "") and (env['usewarnings'])):
837 env.Append(CCFLAGS = fatalwarning)
838 env_mpi.Append(CCFLAGS = fatalwarning)
839
840
841 Export(
842 ["env",
843 "env_mpi",
844 "clone_env",
845 "dodgy_env",
846 "IS_WINDOWS_PLATFORM",
847 "TestGroups",
848 "CallSConscript",
849 "cantusevariantdir"
850 ]
851 )
852
853 CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)
854 CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0)
855 CallSConscript(env, dirs = ['paso/src'], variant_dir='build/$PLATFORM/paso', duplicate=0)
856 CallSConscript(env, dirs = ['weipa/src'], variant_dir='build/$PLATFORM/weipa', duplicate=0)
857 CallSConscript(env, dirs = ['escript/src'], variant_dir='build/$PLATFORM/escript', duplicate=0)
858 CallSConscript(env, dirs = ['esysUtils/src'], variant_dir='build/$PLATFORM/esysUtils', duplicate=0)
859 CallSConscript(env, dirs = ['finley/src'], variant_dir='build/$PLATFORM/finley', duplicate=0)
860 CallSConscript(env, dirs = ['modellib/py_src'], variant_dir='build/$PLATFORM/modellib', duplicate=0)
861 CallSConscript(env, dirs = ['doc'], variant_dir='build/$PLATFORM/doc', duplicate=0)
862 CallSConscript(env, dirs = ['pyvisi/py_src'], variant_dir='build/$PLATFORM/pyvisi', duplicate=0)
863 CallSConscript(env, dirs = ['pycad/py_src'], variant_dir='build/$PLATFORM/pycad', duplicate=0)
864 CallSConscript(env, dirs = ['pythonMPI/src'], variant_dir='build/$PLATFORM/pythonMPI', duplicate=0)
865 CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0)
866 CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0)
867
868
869 ############ Remember what optimizations we used ###############
870
871 remember_list = []
872
873 if env['usedebug']:
874 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET'))
875
876 if env['usempi']:
877 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET'))
878
879 if env['useopenmp']:
880 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET'))
881
882 env.Alias('remember_options', remember_list)
883
884
885 ############### Record python interpreter version ##############
886
887 if not IS_WINDOWS_PLATFORM:
888
889 versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])
890 if sys.version_info[4] >0 : versionstring+="rc%s"%sys.version_info[4]
891 os.system("echo "+versionstring+" > "+os.path.join(env['libinstall'],"pyversion"))
892
893 ############## Populate the buildvars file #####################
894
895 buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w')
896 buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n')
897
898 # Find the boost version by extracting it from version.hpp
899 boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp'))
900 boostversion='unknown'
901 try:
902 for line in boosthpp:
903 ver=re.match(r'#define BOOST_VERSION (\d+)',line)
904 if ver:
905 boostversion=ver.group(1)
906 except StopIteration:
907 pass
908 buildvars.write("boost="+boostversion+"\n")
909 buildvars.write("svn_revision="+str(global_revision)+"\n")
910 out="usedebug="
911 if env['usedebug']:
912 out+="y"
913 else:
914 out+="n"
915 out+="\nusempi="
916 if env['usempi']:
917 out+="y"
918 else:
919 out+="n"
920 out+="\nuseopenmp="
921 if env['useopenmp']:
922 out+="y"
923 else:
924 out+="n"
925 buildvars.write(out+"\n")
926 buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n')
927 out="lapack="
928 if env['uselapack']:
929 out+="y"
930 else:
931 out+="n"
932 out+="\nsilo="
933 if env['usesilo']:
934 out+="y"
935 else:
936 out+="n"
937 out+="\nusevisit="
938 if env['usevisit']:
939 out+="y"
940 else:
941 out+="n"
942 buildvars.write(out+"\n")
943 buildvars.close()
944
945
946 ############ Targets to build and install libraries ############
947
948 target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET'))
949 env.Alias('target_init', [target_init])
950
951 # The headers have to be installed prior to build in order to satisfy #include <paso/Common.h>
952 env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a'])
953 env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a'])
954
955 env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a'])
956 env.Alias('install_paso', ['build_paso', 'target_install_paso_a'])
957
958 env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so'])
959 env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py'])
960
961
962 env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a'])
963 env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a'])
964
965 env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so'])
966 env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py'])
967
968 env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so'])
969 env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py'])
970
971 # Now gather all the above into a couple easy targets: build_all and install_all
972 build_all_list = []
973 build_all_list += ['build_esysUtils']
974 build_all_list += ['build_paso']
975 build_all_list += ['build_weipa']
976 build_all_list += ['build_escript']
977 build_all_list += ['build_finley']
978 if env['usempi']: build_all_list += ['target_pythonMPI_exe']
979 #if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper']
980 build_all_list += ['target_escriptconvert']
981 env.Alias('build_all', build_all_list)
982
983 install_all_list = []
984 install_all_list += ['target_init']
985 install_all_list += ['install_esysUtils']
986 install_all_list += ['install_paso']
987 install_all_list += ['install_weipa']
988 install_all_list += ['install_escript']
989 install_all_list += ['install_finley']
990 install_all_list += ['target_install_pyvisi_py']
991 install_all_list += ['target_install_modellib_py']
992 install_all_list += ['target_install_pycad_py']
993 if env['usempi']: install_all_list += ['target_install_pythonMPI_exe']
994 #if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper']
995 if env['usesilo']: install_all_list += ['target_install_escriptconvert']
996 install_all_list += ['remember_options']
997 env.Alias('install_all', install_all_list)
998
999 # Default target is install
1000 env.Default('install_all')
1001
1002 ############ Targets to build and run the test suite ###########
1003
1004 env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a'])
1005 env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a'])
1006 env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a'])
1007 env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests'])
1008 env.Alias('build_full',['install_all','build_tests','build_py_tests'])
1009
1010
1011 ############ Targets to build the documentation ################
1012
1013 env.Alias('api_epydoc','install_all')
1014
1015 env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf'])
1016
1017 build_platform=os.name
1018
1019 if not IS_WINDOWS_PLATFORM:
1020 try:
1021 utest=open("utest.sh","w")
1022 #Sometimes Mac python says it is posix
1023 if (build_platform=='posix') and platform.system()=="Darwin":
1024 build_platform='darwin'
1025 utest.write(GroupTest.makeHeader(build_platform))
1026 for tests in TestGroups:
1027 utest.write(tests.makeString())
1028 utest.close()
1029 os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH)
1030 print "utest.sh written"
1031 except IOError:
1032 print "Error attempting to write unittests file."
1033 sys.exit(1)
1034
1035 #Make sure that the escript wrapper is in place
1036 if not os.path.isfile(os.path.join(env['bininstall'],'escript')):
1037 print "Copying escript wrapper"
1038 shutil.copy("bin/escript",os.path.join(env['bininstall'],'escript'))
1039
1040 ############ Targets to build PasoTests suite ################
1041
1042 env.Alias('build_PasoTests','build/'+build_platform+'/paso/profiling/PasoTests')
1043
1044 env.Alias('release_prep', ['docs', 'install_all'])

  ViewVC Help
Powered by ViewVC 1.1.26