/[escript]/branches/domexper/SConstruct
ViewVC logotype

Contents of /branches/domexper/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log


Revision 3234 - (show annotations)
Mon Oct 4 01:46:30 2010 UTC (9 years, 2 months ago) by jfenwick
File size: 41443 byte(s)
Some subdirs need to have changes pulled over but all of the unit tests 
except for modellib appear to work

1
2 ########################################################
3 #
4 # Copyright (c) 2003-2010 by University of Queensland
5 # Earth Systems Science Computational Center (ESSCC)
6 # http://www.uq.edu.au/esscc
7 #
8 # Primary Business: Queensland, Australia
9 # Licensed under the Open Software License version 3.0
10 # http://www.opensource.org/licenses/osl-3.0.php
11 #
12 ########################################################
13
14
15 EnsureSConsVersion(0,96,91)
16 EnsurePythonVersion(2,3)
17
18 import sys, os, re, socket, platform, stat
19 # For copy()
20 import shutil
21
22 # Add our extensions
23 if os.path.isdir('scons'): sys.path.append('scons')
24 import scons_extensions
25
26 # Use /usr/lib64 if available, else /usr/lib
27 usr_lib = '/usr/lib'
28 if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64'
29
30 # The string python2.4 or python2.5
31 python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
32
33 # MS Windows support, many thanks to PH
34 IS_WINDOWS_PLATFORM = (os.name== "nt")
35
36 prefix = ARGUMENTS.get('prefix', Dir('#.').abspath)
37
38 #Holds names of variables from the calling environment which need to be passed
39 #to tools
40 env_export=[]
41
42 #Determine where to read options from use:
43 #1. command line
44 #2. scons/<hostname>_options.py
45 #3. name as part of a cluster
46 options_file=ARGUMENTS.get('options_file', None)
47 effective_hostname=socket.gethostname().split('.')[0]
48 if not options_file:
49 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
50 options_file = os.path.join("scons",mangledhostname+"_options.py")
51 #If there is no options file with that name see if there is a substitute
52 if not os.path.isfile(options_file):
53 effective_hostname = scons_extensions.effectiveName(effective_hostname)
54 mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)
55 options_file = os.path.join("scons",mangledhostname+"_options.py")
56
57 if not os.path.isfile(options_file):
58 print "Options file not found (expected '%s')" % options_file
59 options_file = False
60 else:
61 print "Options file is", options_file
62
63 #Does our scons support the newer Variables class or do we need to use Options?
64
65 try:
66 dummyvar=Variables
67 opts = Variables(options_file, ARGUMENTS)
68 adder = opts.AddVariables
69 except:
70 opts = Options(options_file, ARGUMENTS)
71 adder = opts.AddOptions
72 BoolVariable = BoolOption
73
74 ############ Load build options ################################
75
76 adder(
77 #opts.AddOptions(
78 # Where to install esys stuff
79 ('prefix', 'where everything will be installed', Dir('#.').abspath),
80 ('incinstall', 'where the esys headers will be installed', os.path.join(Dir('#.').abspath,'include')),
81 ('bininstall', 'where the esys binaries will be installed', os.path.join(prefix,'bin')),
82 ('libinstall', 'where the esys libraries will be installed', os.path.join(prefix,'lib')),
83 ('pyinstall', 'where the esys python modules will be installed', os.path.join(prefix,'esys')),
84 # Compilation options
85 BoolVariable('dodebug', 'For backwards compatibility', 'no'),
86 BoolVariable('usedebug', 'Do you want a debug build?', 'no'),
87 BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'),
88 ('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file),
89 ('cc', 'path to C compiler', 'DEFAULT'),
90 ('cxx', 'path to C++ compiler', 'DEFAULT'),
91 ('win_cc_name', 'windows C compiler name if needed', 'msvc'),
92 # The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below
93 ('cc_flags', 'C/C++ compiler flags to use', '-DEFAULT_1'),
94 ('cc_optim', 'C/C++ optimization flags to use', '-DEFAULT_2'),
95 ('cc_debug', 'C/C++ debug flags to use', '-DEFAULT_3'),
96 ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'),
97 ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'),
98 ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'),
99 ('cc_extra', 'Extra C compiler flags', ''),
100 ('cxx_extra', 'Extra C++ compiler flags', ''),
101 ('ld_extra', 'Extra linker flags', ''),
102 ('sys_libs', 'System libraries to link with', []),
103 ('ar_flags', 'Static library archiver flags to use', ''),
104 BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'),
105 BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'),
106 BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'),
107 ('forcelazy','for testing use only - set the default value for autolazy','leave_alone'),
108 ('forcecollres','for testing use only - set the default value for force resolving collective ops','leave_alone'),
109 # Python
110 ('python_path', 'Path to Python includes', '/usr/include/'+python_version),
111 ('python_lib_path', 'Path to Python libs', usr_lib),
112 ('python_libs', 'Python libraries to link with', [python_version]),
113 ('python_cmd', 'Python command', 'python'),
114 # Boost
115 ('boost_path', 'Path to Boost includes', '/usr/include'),
116 ('boost_lib_path', 'Path to Boost libs', usr_lib),
117 ('boost_libs', 'Boost libraries to link with', ['boost_python']),
118 # NetCDF
119 BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'),
120 ('netCDF_path', 'Path to netCDF includes', '/usr/include'),
121 ('netCDF_lib_path', 'Path to netCDF libs', usr_lib),
122 ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']),
123 # MPI
124 BoolVariable('useMPI', 'For backwards compatibility', 'no'),
125 BoolVariable('usempi', 'Compile parallel version using MPI', 'no'),
126 ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),
127 ('mpi_path', 'Path to MPI includes', '/usr/include'),
128 ('mpi_run', 'mpirun name' , 'mpiexec -np 1'),
129 ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib),
130 ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', []),
131 ('mpi_flavour','Type of MPI execution environment','none'),
132 # ParMETIS
133 BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'),
134 ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'),
135 ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib),
136 ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']),
137 # PAPI
138 BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'),
139 ('papi_path', 'Path to PAPI includes', '/usr/include'),
140 ('papi_lib_path', 'Path to PAPI libs', usr_lib),
141 ('papi_libs', 'PAPI libraries to link with', ['papi']),
142 BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False),
143 # MKL
144 BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'),
145 ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'),
146 ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'),
147 ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']),
148 # UMFPACK
149 BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'),
150 ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'),
151 ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'),
152 ('umf_lib_path', 'Path to UMFPACK libs', usr_lib),
153 ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']),
154 # Silo
155 BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'),
156 ('silo_path', 'Path to Silo includes', '/usr/include'),
157 ('silo_lib_path', 'Path to Silo libs', usr_lib),
158 ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
159 # VisIt
160 BoolVariable('usevisit', 'switch on/off the usage of the VisIt sim library', 'no'),
161 ('visit_path', 'Path to VisIt libsim includes', '/usr/include'),
162 ('visit_lib_path', 'Path to VisIt sim library', usr_lib),
163 # AMD (used by UMFPACK)
164 ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'),
165 ('amd_lib_path', 'Path to AMD libs', usr_lib),
166 ('amd_libs', 'AMD libraries to link with', ['amd']),
167 # BLAS (used by UMFPACK)
168 ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'),
169 ('blas_lib_path', 'Path to BLAS libs', usr_lib),
170 ('blas_libs', 'BLAS libraries to link with', ['blas']),
171 #Lapack options
172 BoolVariable('uselapack','switch on/off use of Lapack','no'),
173 ('lapack_path', 'Path to Lapack includes','/usr/include'),
174 ('lapack_lib_path', 'Path to Lapack libs', usr_lib),
175 ('lapack_libs', 'Lapack libraries to link with', []),
176 ('lapack_type', '{clapack,mkl}','clapack'),
177 # An option for specifying the compiler tools set (see windows branch).
178 ('tools_names', 'allow control over the tools in the env setup', ['default']),
179 # finer control over library building, intel aggressive global optimisation
180 # works with dynamic libraries on windows.
181 ('share_esysUtils', 'control static or dynamic esysUtils lib', False),
182 ('share_paso', 'control static or dynamic paso lib', False),
183 ('env_export','Environment variables to be passed to children',[]),
184 #To enable passing function pointers through python
185 BoolVariable('iknowwhatimdoing','allow nonstandard C',False),
186 #dudley_assemble_flags = -funroll-loops to actually do something
187 ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', '')
188 )
189
190
191 ###################
192
193 # This is only to support old versions of scons which don't accept
194 # the variant_dir parameter (older than 0.98 I think).
195 # Once these are no longer an issue we can go back to a direct call
196 # to obj.SConscript
197 import SCons
198 vs=SCons.__version__.split('.')
199 cantusevariantdir=float(vs[0]+'.'+vs[1])<0.98
200
201
202 def CallSConscript(obj, **kw):
203 if cantusevariantdir:
204 if 'variant_dir' in kw:
205 kw['build_dir']=kw['variant_dir']
206 del kw['variant_dir']
207 obj.SConscript(**kw)
208
209
210 ############ Specify which compilers to use ####################
211
212 # intelc uses regular expressions improperly and emits a warning about
213 # failing to find the compilers. This warning can be safely ignored.
214
215 if IS_WINDOWS_PLATFORM:
216 env = Environment(options = opts)
217 env = Environment(tools = ['default'] + env['tools_names'],
218 options = opts)
219 else:
220 if os.uname()[4]=='ia64':
221 env = Environment(tools = ['default', 'intelc'], options = opts)
222 if env['CXX'] == 'icpc':
223 env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not)
224 else:
225 env = Environment(tools = ['default'], options = opts)
226 if env['tools_names']!='default':
227 env=Environment(tools = ['default'] +env['tools_names'], options=opts)
228
229 # Override compiler choice if provided
230 if env['cc'] != 'DEFAULT': env['CC']=env['cc']
231 if env['cxx'] != 'DEFAULT': env['CXX']=env['cxx']
232
233 Help(opts.GenerateHelpText(env))
234
235 ############ Make sure target directories exist ################
236
237 if not os.path.isdir(env['bininstall']):
238 os.makedirs(env['bininstall'])
239 if not os.path.isdir(env['libinstall']):
240 os.makedirs(env['libinstall'])
241 if not os.path.isdir(env['pyinstall']):
242 os.makedirs(env['pyinstall'])
243
244 ########## Copy required environment vars ######################
245
246 for i in env['env_export']:
247 env.Append(ENV = {i:os.environ[i]})
248
249 ############ Fill in compiler options if not set above #########
250
251 # Backwards compatibility: allow dodebug=yes and useMPI=yes
252 if env['dodebug']: env['usedebug'] = 1
253 if env['useMPI']: env['usempi'] = 1
254
255 # Default compiler options (override allowed in hostname_options.py, but should not be necessary)
256 # For both C and C++ you get: cc_flags and either the optim flags or debug flags
257
258 sysheaderopt = "" # how do we indicate that a header is a system header. Use "" for no action.
259
260 cc_flags = ""
261 cc_optim = ""
262 cc_debug = ""
263 omp_optim = ""
264 omp_debug = ""
265 omp_libs = []
266
267 if env["CC"] == "icc":
268 # Intel compilers
269 cc_flags = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"
270 cc_optim = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip"
271 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
272 omp_optim = "-openmp -openmp_report0"
273 omp_debug = "-openmp -openmp_report0"
274 omp_libs = ['guide', 'pthread']
275 pedantic = ""
276 fatalwarning = "" # Switch to turn warnings into errors
277 sysheaderopt = ""
278 elif env["CC"][:3] == "gcc":
279 # GNU C on any system
280 cc_flags = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
281 #the long long warning occurs on the Mac
282 cc_optim = "-O3"
283 cc_debug = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
284 omp_optim = "-fopenmp"
285 omp_debug = "-fopenmp"
286 omp_libs = []
287 pedantic = "-pedantic-errors -Wno-long-long"
288 fatalwarning = "-Werror"
289 sysheaderopt = "-isystem "
290 elif env["CC"] == "cl":
291 # Microsoft Visual C on Windows
292 cc_flags = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF"
293 cc_optim = "/O2 /Op /MT /W3"
294 cc_debug = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK"
295 omp_optim = ""
296 omp_debug = ""
297 omp_libs = []
298 pedantic = ""
299 fatalwarning = ""
300 sysheaderopt = ""
301 elif env["CC"] == "icl":
302 # intel C on Windows, see windows_intelc_options.py for a start
303 pedantic = ""
304 fatalwarning = ""
305 sysheaderopt = ""
306
307
308 # If not specified in hostname_options.py then set them here
309 if env["cc_flags"] == "-DEFAULT_1": env['cc_flags'] = cc_flags
310 if env["cc_optim"] == "-DEFAULT_2": env['cc_optim'] = cc_optim
311 if env["cc_debug"] == "-DEFAULT_3": env['cc_debug'] = cc_debug
312 if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim
313 if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug
314 if env["omp_libs"] == "-DEFAULT_6": env['omp_libs'] = omp_libs
315
316 #set up the autolazy values
317 if env['forcelazy'] != "leave_alone":
318 if env['forcelazy'] == 'on':
319 env.Append(CPPDEFINES=['FAUTOLAZYON'])
320 else:
321 if env['forcelazy'] == 'off':
322 env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
323
324 #set up the colective resolve values
325 if env['forcecollres'] != "leave_alone":
326 print env['forcecollres']
327 if env['forcecollres'] == 'on':
328 env.Append(CPPDEFINES=['FRESCOLLECTON'])
329 else:
330 if env['forcecollres'] == 'off':
331 env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
332
333
334 if env['iknowwhatimdoing']:
335 env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
336
337 # OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty
338 if not env["useopenmp"]:
339 env['omp_optim'] = ""
340 env['omp_debug'] = ""
341 env['omp_libs'] = []
342
343 if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0
344
345 # Windows doesn't use LD_LIBRARY_PATH but PATH instead
346 if IS_WINDOWS_PLATFORM:
347 LD_LIBRARY_PATH_KEY='PATH'
348 env['ENV']['LD_LIBRARY_PATH']=''
349 else:
350 LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
351 ############ Copy environment variables into scons env #########
352
353 try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS']
354 except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1
355
356 try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS']
357 except KeyError: pass
358
359 try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS']
360 except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1
361
362 try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES']
363 except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1
364
365 try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE']
366 except KeyError: pass
367
368 try: env['ENV']['PATH'] = os.environ['PATH']
369 except KeyError: pass
370
371 try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
372 except KeyError: pass
373
374 try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH']
375 except KeyError: pass
376
377 try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH']
378 except KeyError: pass
379
380 try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH'])
381 except KeyError: pass
382
383 try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH']
384 except KeyError: pass
385
386 try: env['ENV']['DISPLAY'] = os.environ['DISPLAY']
387 except KeyError: pass
388
389 try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']
390 except KeyError: pass
391
392 try: env['ENV']['HOME'] = os.environ['HOME']
393 except KeyError: pass
394
395 # Configure for test suite
396
397
398 env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
399 env.PrependENVPath('PYTHONPATH', prefix)
400 env['ENV']['ESCRIPT_ROOT'] = prefix
401
402 ############ Set up paths for Configure() ######################
403
404 # Make a copy of an environment
405 # Use env.Clone if available, but fall back on env.Copy for older version of scons
406 def clone_env(env):
407 if 'Clone' in dir(env): return env.Clone() # scons-0.98
408 else: return env.Copy() # scons-0.96
409
410 # Add cc option -I<Escript>/trunk/include
411 env.Append(CPPPATH = [Dir('include')])
412
413 # Add cc option -L<Escript>/trunk/lib
414 env.Append(LIBPATH = [Dir(env['libinstall'])])
415
416 if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra'])
417 if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
418 if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])
419
420 if env['usepedantic']: env.Append(CCFLAGS = pedantic)
421
422 # MS Windows
423 if IS_WINDOWS_PLATFORM:
424 env.AppendENVPath('PATH', [env['boost_lib_path']])
425 env.AppendENVPath('PATH', [env['libinstall']])
426 if not env['share_esysUtils'] :
427 env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
428 if not env['share_paso'] :
429 env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
430
431 if env['usenetcdf']:
432 env.AppendENVPath('PATH', [env['netCDF_lib_path']])
433
434 env.Append(ARFLAGS = env['ar_flags'])
435
436 # Get the global Subversion revision number for getVersion() method
437 try:
438 global_revision = os.popen("svnversion -n .").read()
439 global_revision = re.sub(":.*", "", global_revision)
440 global_revision = re.sub("[^0-9]", "", global_revision)
441 except:
442 global_revision="-1"
443 if global_revision == "": global_revision="-2"
444 env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision])
445
446 ############ numpy (required) ###############################
447
448 try:
449 from numpy import identity
450 except ImportError:
451 print "Cannot import numpy, you need to set your PYTHONPATH"
452 sys.exit(1)
453
454 ############ C compiler (required) #############################
455
456 # Create a Configure() environment for checking existence of required libraries and headers
457 conf = Configure(clone_env(env))
458
459 # Test that the compiler is working
460 if not conf.CheckFunc('printf'):
461 print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC'])
462 sys.exit(1)
463
464 if conf.CheckFunc('gethostname'):
465 conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
466
467 ############ python libraries (required) #######################
468
469
470 if not sysheaderopt =="":
471 conf.env.Append(CCFLAGS=sysheaderopt+env['python_path'])
472 else:
473 conf.env.AppendUnique(CPPPATH = [env['python_path']])
474
475 conf.env.AppendUnique(LIBPATH = [env['python_lib_path']])
476 conf.env.AppendUnique(LIBS = [env['python_libs']])
477
478 conf.env.PrependENVPath('PYTHONPATH', prefix)
479 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path']) # The wrapper script needs to find these libs
480 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
481
482 if not conf.CheckCHeader('Python.h'):
483 print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path'])
484 sys.exit(1)
485 if not conf.CheckFunc('Py_Exit'):
486 print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path'])
487 sys.exit(1)
488
489 ############ boost (required) ##################################
490
491 if not sysheaderopt =="":
492 # This is required because we can't -isystem /usr/system because it breaks std includes
493 if os.path.normpath(env['boost_path']) =="/usr/include":
494 conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost'))
495 else:
496 conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path'])
497 else:
498 conf.env.AppendUnique(CPPPATH = [env['boost_path']])
499
500 conf.env.AppendUnique(LIBPATH = [env['boost_lib_path']])
501 conf.env.AppendUnique(LIBS = [env['boost_libs']])
502
503 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs
504 #ensure that our path entries remain at the front
505 conf.env.PrependENVPath('PYTHONPATH', prefix)
506 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
507
508
509 #Yep we still cant figure this one out. - working on it.
510 if not IS_WINDOWS_PLATFORM:
511 if not conf.CheckCXXHeader('boost/python.hpp'):
512 print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path'])
513 sys.exit(1)
514
515 if not conf.CheckFunc('PyObject_SetAttr'):
516 print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path'])
517 sys.exit(1)
518
519
520 # Commit changes to environment
521 env = conf.Finish()
522
523 ############ VTK (optional) ####################################
524
525 if env['usevtk']:
526 try:
527 import vtk
528 env['usevtk'] = 1
529 except ImportError:
530 env['usevtk'] = 0
531
532 # Add VTK to environment env if it was found
533 if env['usevtk']:
534 env.Append(CPPDEFINES = ['USE_VTK'])
535
536 ############ NetCDF (optional) #################################
537
538 conf = Configure(clone_env(env))
539
540 if env['usenetcdf']:
541 conf.env.AppendUnique(CPPPATH = [env['netCDF_path']])
542 conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']])
543 conf.env.AppendUnique(LIBS = [env['netCDF_libs']])
544 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path']) # The wrapper script needs to find these libs
545 #ensure that our path entries remain at the front
546 conf.env.PrependENVPath('PYTHONPATH', prefix)
547 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
548
549 if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0
550 if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0
551
552 # Add NetCDF to environment env if it was found
553 if env['usenetcdf']:
554 env = conf.Finish()
555 env.Append(CPPDEFINES = ['USE_NETCDF'])
556 else:
557 conf.Finish()
558
559 ############ PAPI (optional) ###################################
560
561 # Start a new configure environment that reflects what we've already found
562 conf = Configure(clone_env(env))
563
564 if env['usepapi']:
565 conf.env.AppendUnique(CPPPATH = [env['papi_path']])
566 conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']])
567 conf.env.AppendUnique(LIBS = [env['papi_libs']])
568 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path']) # The wrapper script needs to find these libs
569 #ensure that our path entries remain at the front
570 conf.env.PrependENVPath('PYTHONPATH', prefix)
571 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
572
573 if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0
574 if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0
575
576 # Add PAPI to environment env if it was found
577 if env['usepapi']:
578 env = conf.Finish()
579 env.Append(CPPDEFINES = ['BLOCKPAPI'])
580 else:
581 conf.Finish()
582
583 ############ MKL (optional) ####################################
584
585 # Start a new configure environment that reflects what we've already found
586 conf = Configure(clone_env(env))
587
588 if env['usemkl']:
589 conf.env.AppendUnique(CPPPATH = [env['mkl_path']])
590 conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']])
591 conf.env.AppendUnique(LIBS = [env['mkl_libs']])
592 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs
593 #ensure that our path entries remain at the front
594 conf.env.PrependENVPath('PYTHONPATH', prefix)
595 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
596
597 if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0
598 if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0
599
600
601 # Add MKL to environment env if it was found
602 if env['usemkl']:
603 env = conf.Finish()
604 env.Append(CPPDEFINES = ['MKL'])
605 else:
606 conf.Finish()
607
608 ############ UMFPACK (optional) ################################
609
610 # Start a new configure environment that reflects what we've already found
611 conf = Configure(clone_env(env))
612
613 if env['useumfpack']:
614 conf.env.AppendUnique(CPPPATH = [env['ufc_path']])
615 conf.env.AppendUnique(CPPPATH = [env['umf_path']])
616 conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']])
617 conf.env.AppendUnique(LIBS = [env['umf_libs']])
618 conf.env.AppendUnique(CPPPATH = [env['amd_path']])
619 conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']])
620 conf.env.AppendUnique(LIBS = [env['amd_libs']])
621 conf.env.AppendUnique(CPPPATH = [env['blas_path']])
622 conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']])
623 conf.env.AppendUnique(LIBS = [env['blas_libs']])
624 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs
625 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs
626 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path']) # The wrapper script needs to find these libs
627 #ensure that our path entries remain at the front
628 conf.env.PrependENVPath('PYTHONPATH', prefix)
629 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
630
631 if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0
632 if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0
633 # if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73?
634
635 # Add UMFPACK to environment env if it was found
636 if env['useumfpack']:
637 env = conf.Finish()
638 env.Append(CPPDEFINES = ['UMFPACK'])
639 else:
640 conf.Finish()
641
642 ############ Silo (optional) ###################################
643
644 if env['usesilo']:
645 conf = Configure(clone_env(env))
646 conf.env.AppendUnique(CPPPATH = [env['silo_path']])
647 conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']])
648 conf.env.AppendUnique(LIBS = [env['silo_libs']])
649 if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0
650 if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0
651 conf.Finish()
652
653 # Add the path to Silo to environment env if it was found.
654 # Note that we do not add the libs since they are only needed for the
655 # weipa library and tools.
656 if env['usesilo']:
657 env.AppendUnique(CPPPATH = [env['silo_path']])
658 env.AppendUnique(LIBPATH = [env['silo_lib_path']])
659
660 ############ VisIt (optional) ###################################
661
662 if env['usevisit']:
663 env.AppendUnique(CPPPATH = [env['visit_path']])
664 env.AppendUnique(LIBPATH = [env['visit_lib_path']])
665
666 ########### Lapack (optional) ##################################
667
668 if env['uselapack']:
669 env.AppendUnique(CPPDEFINES='USE_LAPACK')
670 env.AppendUnique(CPPPATH = [env['lapack_path']])
671 env.AppendUnique(LIBPATH =[env['lapack_lib_path']])
672
673 env.Append(LIBPATH = '/usr/lib/atlas')
674 env.Append(LIBS = [env['lapack_libs']])
675 if env['lapack_type']=='mkl':
676 if not env['usemkl']:
677 env['uselapack']=0
678 print "mkl_lapack requires mkl"
679 else:
680 env.AppendUnique(CPPDEFINES='MKL_LAPACK')
681
682
683 ############ Add the compiler flags ############################
684
685 # Enable debug by choosing either cc_debug or cc_optim
686 if env['usedebug']:
687 env.Append(CCFLAGS = env['cc_debug'])
688 env.Append(CCFLAGS = env['omp_debug'])
689 else:
690 env.Append(CCFLAGS = env['cc_optim'])
691 env.Append(CCFLAGS = env['omp_optim'])
692
693 # Always use cc_flags
694 env.Append(CCFLAGS = env['cc_flags'])
695 env.Append(LIBS = [env['omp_libs']])
696
697 ############ Add some custom builders ##########################
698
699 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
700 env.Append(BUILDERS = {'PyCompile' : py_builder});
701
702 runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
703 env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
704
705 runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
706 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
707
708 epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True)
709 env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
710
711 ############ MPI (optional) ####################################
712 if not env['usempi']: env['mpi_flavour']='none'
713
714 # Create a modified environment for MPI programs (identical to env if usempi=no)
715 env_mpi = clone_env(env)
716
717 # Start a new configure environment that reflects what we've already found
718 conf = Configure(clone_env(env_mpi))
719
720 if env_mpi['usempi']:
721 VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ]
722 if not env_mpi['mpi_flavour'] in VALID_MPIs:
723 raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs)
724 conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']])
725 conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']])
726 conf.env.AppendUnique(LIBS = [env_mpi['mpi_libs']])
727 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs
728 #ensure that our path entries remain at the front
729 conf.env.PrependENVPath('PYTHONPATH', prefix)
730 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
731
732 if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0
733 # if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0
734
735 # Add MPI to environment env_mpi if it was found
736 if env_mpi['usempi']:
737 env_mpi = conf.Finish()
738 env_mpi.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']])
739 # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
740 # On the other hand MPT and OpenMPI don't define the latter so we have to
741 # do that here
742 if env['usenetcdf'] and env_mpi['mpi_flavour'] in ["MPT","OPENMPI"]:
743 env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED'])
744 else:
745 conf.Finish()
746
747 env['usempi'] = env_mpi['usempi']
748
749 ############ ParMETIS (optional) ###############################
750
751 # Start a new configure environment that reflects what we've already found
752 conf = Configure(clone_env(env_mpi))
753
754 if not env_mpi['usempi']: env_mpi['useparmetis'] = 0
755
756 if env_mpi['useparmetis']:
757 conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']])
758 conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']])
759 conf.env.AppendUnique(LIBS = [env_mpi['parmetis_libs']])
760 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path']) # The wrapper script needs to find these libs
761 #ensure that our path entries remain at the front
762 conf.env.PrependENVPath('PYTHONPATH', prefix)
763 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
764
765 if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0
766 if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0
767
768 # Add ParMETIS to environment env_mpi if it was found
769 if env_mpi['useparmetis']:
770 env_mpi = conf.Finish()
771 env_mpi.Append(CPPDEFINES = ['USE_PARMETIS'])
772 else:
773 conf.Finish()
774
775 env['useparmetis'] = env_mpi['useparmetis']
776
777 ############ Summarize our environment #########################
778
779 print ""
780 print "Summary of configuration (see ./config.log for information)"
781 print " Using python libraries"
782 print " Using numpy"
783 print " Using boost"
784 if env['usenetcdf']: print " Using NetCDF"
785 else: print " Not using NetCDF"
786 if env['usevtk']: print " Using VTK"
787 else: print " Not using VTK"
788 if env['usevisit']: print " Using VisIt"
789 else: print " Not using VisIt"
790 if env['usemkl']: print " Using MKL"
791 else: print " Not using MKL"
792 if env['useumfpack']: print " Using UMFPACK"
793 else: print " Not using UMFPACK"
794 if env['usesilo']: print " Using Silo"
795 else: print " Not using Silo"
796 if env['useopenmp']: print " Using OpenMP"
797 else: print " Not using OpenMP"
798 if env['usempi']: print " Using MPI (flavour = %s)"%env['mpi_flavour']
799 else: print " Not using MPI"
800 if env['useparmetis']: print " Using ParMETIS"
801 else: print " Not using ParMETIS (requires MPI)"
802 if env['usepapi']: print " Using PAPI"
803 else: print " Not using PAPI"
804 if env['uselapack']: print " Using Lapack"
805 else: print " Not using Lapack"
806 if env['usedebug']: print " Compiling for debug"
807 else: print " Not compiling for debug"
808 print " Installing in", prefix
809 if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors"
810 else: print " Not treating warnings as errors"
811 print ""
812
813 ############ Delete option-dependent files #####################
814
815 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug")))
816 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi")))
817 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp")))
818 Execute(Delete(os.path.join(env['libinstall'],"buildvars")))
819 if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI")))
820
821
822 ############ Build the subdirectories ##########################
823
824 if env['usepedantic']: env_mpi.Append(CCFLAGS = pedantic)
825
826
827 from grouptest import *
828
829 TestGroups=[]
830
831 dodgy_env=clone_env(env_mpi) # Environment without pedantic options
832
833 ############ Now we switch on Warnings as errors ###############
834
835 #this needs to be done after configuration because the scons test files have warnings in them
836
837 if ((fatalwarning != "") and (env['usewarnings'])):
838 env.Append(CCFLAGS = fatalwarning)
839 env_mpi.Append(CCFLAGS = fatalwarning)
840
841
842 Export(
843 ["env",
844 "env_mpi",
845 "clone_env",
846 "dodgy_env",
847 "IS_WINDOWS_PLATFORM",
848 "TestGroups",
849 "CallSConscript",
850 "cantusevariantdir"
851 ]
852 )
853
854 CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)
855 CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0)
856 CallSConscript(env, dirs = ['paso/src'], variant_dir='build/$PLATFORM/paso', duplicate=0)
857 CallSConscript(env, dirs = ['weipa/src'], variant_dir='build/$PLATFORM/weipa', duplicate=0)
858 CallSConscript(env, dirs = ['escript/src'], variant_dir='build/$PLATFORM/escript', duplicate=0)
859 CallSConscript(env, dirs = ['esysUtils/src'], variant_dir='build/$PLATFORM/esysUtils', duplicate=0)
860 CallSConscript(env, dirs = ['dudley/src'], variant_dir='build/$PLATFORM/dudley', duplicate=0)
861 CallSConscript(env, dirs = ['finley/src'], variant_dir='build/$PLATFORM/finley', duplicate=0)
862 CallSConscript(env, dirs = ['modellib/py_src'], variant_dir='build/$PLATFORM/modellib', duplicate=0)
863 CallSConscript(env, dirs = ['doc'], variant_dir='build/$PLATFORM/doc', duplicate=0)
864 CallSConscript(env, dirs = ['pyvisi/py_src'], variant_dir='build/$PLATFORM/pyvisi', duplicate=0)
865 CallSConscript(env, dirs = ['pycad/py_src'], variant_dir='build/$PLATFORM/pycad', duplicate=0)
866 CallSConscript(env, dirs = ['pythonMPI/src'], variant_dir='build/$PLATFORM/pythonMPI', duplicate=0)
867 CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0)
868 CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0)
869
870
871 ############ Remember what optimizations we used ###############
872
873 remember_list = []
874
875 if env['usedebug']:
876 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET'))
877
878 if env['usempi']:
879 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET'))
880
881 if env['useopenmp']:
882 remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET'))
883
884 env.Alias('remember_options', remember_list)
885
886
887 ############### Record python interpreter version ##############
888
889 if not IS_WINDOWS_PLATFORM:
890
891 versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])
892 # if sys.version_info[4] >0 : versionstring+="rc%s"%sys.version_info[4]
893
894 ############## Populate the buildvars file #####################
895
896 buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w')
897 buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n')
898
899 # Find the boost version by extracting it from version.hpp
900 boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp'))
901 boostversion='unknown'
902 try:
903 for line in boosthpp:
904 ver=re.match(r'#define BOOST_VERSION (\d+)',line)
905 if ver:
906 boostversion=ver.group(1)
907 except StopIteration:
908 pass
909 buildvars.write("boost="+boostversion+"\n")
910 buildvars.write("svn_revision="+str(global_revision)+"\n")
911 out="usedebug="
912 if env['usedebug']:
913 out+="y"
914 else:
915 out+="n"
916 out+="\nusempi="
917 if env['usempi']:
918 out+="y"
919 else:
920 out+="n"
921 out+="\nuseopenmp="
922 if env['useopenmp']:
923 out+="y"
924 else:
925 out+="n"
926 buildvars.write(out+"\n")
927 buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n')
928 out="lapack="
929 if env['uselapack']:
930 out+="y"
931 else:
932 out+="n"
933 out+="\nsilo="
934 if env['usesilo']:
935 out+="y"
936 else:
937 out+="n"
938 out+="\nusevisit="
939 if env['usevisit']:
940 out+="y"
941 else:
942 out+="n"
943 buildvars.write(out+"\n")
944 buildvars.close()
945
946
947 ############ Targets to build and install libraries ############
948
949 target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET'))
950 env.Alias('target_init', [target_init])
951
952 # The headers have to be installed prior to build in order to satisfy #include <paso/Common.h>
953 env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a'])
954 env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a'])
955
956 env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a'])
957 env.Alias('install_paso', ['build_paso', 'target_install_paso_a'])
958
959 env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so'])
960 env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py'])
961
962
963 env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a'])
964 env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a'])
965
966 env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so'])
967 env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py'])
968
969 env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so'])
970 env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py'])
971
972 env.Alias('build_dudley', ['target_install_dudley_headers', 'target_dudley_so', 'target_dudleycpp_so'])
973 env.Alias('install_dudley', ['build_dudley', 'target_install_dudley_so', 'target_install_dudleycpp_so', 'target_dudley_finley_py'])
974
975
976 # Now gather all the above into a couple easy targets: build_all and install_all
977 build_all_list = []
978 build_all_list += ['build_esysUtils']
979 build_all_list += ['build_paso']
980 build_all_list += ['build_weipa']
981 build_all_list += ['build_escript']
982 build_all_list += ['build_dudley']
983 build_all_list += ['build_finley']
984 if env['usempi']: build_all_list += ['target_pythonMPI_exe']
985 #if not IS_WINDOWS_PLATFORM: build_all_list += ['target_escript_wrapper']
986 build_all_list += ['target_escriptconvert']
987 env.Alias('build_all', build_all_list)
988
989 install_all_list = []
990 install_all_list += ['target_init']
991 install_all_list += ['install_esysUtils']
992 install_all_list += ['install_paso']
993 install_all_list += ['install_weipa']
994 install_all_list += ['install_escript']
995 install_all_list += ['install_finley']
996 install_all_list += ['target_install_pyvisi_py']
997 install_all_list += ['target_install_modellib_py']
998 install_all_list += ['target_install_pycad_py']
999 if env['usempi']: install_all_list += ['target_install_pythonMPI_exe']
1000 #if not IS_WINDOWS_PLATFORM: install_all_list += ['target_install_escript_wrapper']
1001 if env['usesilo']: install_all_list += ['target_install_escriptconvert']
1002 install_all_list += ['remember_options']
1003 env.Alias('install_all', install_all_list)
1004
1005 # Default target is install
1006 env.Default('install_all')
1007
1008 ############ Targets to build and run the test suite ###########
1009
1010 env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a'])
1011 env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a'])
1012 env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a'])
1013 env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests'])
1014 env.Alias('build_full',['install_all','build_tests','build_py_tests'])
1015
1016
1017 ############ Targets to build the documentation ################
1018
1019 env.Alias('api_epydoc','install_all')
1020
1021 env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf'])
1022
1023 build_platform=os.name
1024
1025 if not IS_WINDOWS_PLATFORM:
1026 try:
1027 utest=open("utest.sh","w")
1028 #Sometimes Mac python says it is posix
1029 if (build_platform=='posix') and platform.system()=="Darwin":
1030 build_platform='darwin'
1031 utest.write(GroupTest.makeHeader(build_platform))
1032 for tests in TestGroups:
1033 utest.write(tests.makeString())
1034 utest.close()
1035 os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH)
1036 print "utest.sh written"
1037 except IOError:
1038 print "Error attempting to write unittests file."
1039 sys.exit(1)
1040
1041 #Make sure that the escript wrapper is in place
1042 if not os.path.isfile(os.path.join(env['bininstall'],'escript')):
1043 print "Copying escript wrapper"
1044 shutil.copy("bin/escript",os.path.join(env['bininstall'],'escript'))
1045
1046 ############ Targets to build PasoTests suite ################
1047
1048 env.Alias('build_PasoTests','build/'+build_platform+'/paso/profiling/PasoTests')
1049
1050 env.Alias('release_prep', ['docs', 'install_all'])

  ViewVC Help
Powered by ViewVC 1.1.26