/[escript]/trunk/SConstruct
ViewVC logotype

Diff of /trunk/SConstruct

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 3178 by caltinay, Tue Sep 14 00:31:59 2010 UTC revision 4249 by caltinay, Tue Feb 26 01:01:59 2013 UTC
# Line 1  Line 1 
1    ##############################################################################
 ########################################################  
2  #  #
3  # Copyright (c) 2003-2010 by University of Queensland  # Copyright (c) 2003-2013 by University of Queensland
4  # Earth Systems Science Computational Center (ESSCC)  # http://www.uq.edu.au
 # http://www.uq.edu.au/esscc  
5  #  #
6  # Primary Business: Queensland, Australia  # Primary Business: Queensland, Australia
7  # Licensed under the Open Software License version 3.0  # Licensed under the Open Software License version 3.0
8  # http://www.opensource.org/licenses/osl-3.0.php  # http://www.opensource.org/licenses/osl-3.0.php
9  #  #
10  ########################################################  # Development until 2012 by Earth Systems Science Computational Center (ESSCC)
11    # Development since 2012 by School of Earth Sciences
12    #
13  EnsureSConsVersion(0,96,91)  ##############################################################################
 EnsurePythonVersion(2,3)  
   
 import sys, os, re, socket, platform, stat  
 # For copy()  
 import shutil  
   
 # Add our extensions  
 if os.path.isdir('scons'): sys.path.append('scons')  
 import scons_extensions  
14    
15  # Use /usr/lib64 if available, else /usr/lib  EnsureSConsVersion(0,98,1)
16  usr_lib = '/usr/lib'  EnsurePythonVersion(2,5)
 if os.path.isfile('/usr/lib64/libc.so'): usr_lib = '/usr/lib64'  
17    
18  # The string python2.4 or python2.5  import sys, os, platform, re
19  python_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])  from distutils import sysconfig
20    from site_init import *
21    from subprocess import PIPE, Popen
22    
23    # Version number to check for in options file. Increment when new features are
24    # added or existing options changed.
25    REQUIRED_OPTS_VERSION=201
26    
27  # MS Windows support, many thanks to PH  # MS Windows support, many thanks to PH
28  IS_WINDOWS_PLATFORM = (os.name== "nt")  IS_WINDOWS = (os.name == 'nt')
   
 prefix = ARGUMENTS.get('prefix', Dir('#.').abspath)  
29    
30  #Holds names of variables from the calling environment which need to be passed  ########################## Determine options file ############################
31  #to tools  # 1. command line
32  env_export=[]  # 2. scons/<hostname>_options.py
33    # 3. name as part of a cluster
 #Determine where to read options from use:  
 #1. command line  
 #2. scons/<hostname>_options.py  
 #3. name as part of a cluster  
34  options_file=ARGUMENTS.get('options_file', None)  options_file=ARGUMENTS.get('options_file', None)
 effective_hostname=socket.gethostname().split('.')[0]  
35  if not options_file:  if not options_file:
36    mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)      ext_dir = os.path.join(os.getcwd(), 'scons')
37    options_file = os.path.join("scons",mangledhostname+"_options.py")      hostname = platform.node().split('.')[0]
38    #If there is no options file with that name see if there is a substitute      for name in hostname, effectiveName(hostname):
39    if not os.path.isfile(options_file):          mangledhostname = re.sub('[^0-9a-zA-Z]', '_', hostname)
40      effective_hostname = scons_extensions.effectiveName(effective_hostname)          options_file = os.path.join(ext_dir, mangledhostname+'_options.py')
41      mangledhostname = re.sub("[^0-9a-zA-Z]", "_", effective_hostname)          if os.path.isfile(options_file): break
     options_file = os.path.join("scons",mangledhostname+"_options.py")  
42    
43  if not os.path.isfile(options_file):  if not os.path.isfile(options_file):
44    print "Options file not found (expected '%s')" % options_file      print("\nWARNING:\nOptions file %s" % options_file)
45    options_file = False      print("not found! Default options will be used which is most likely suboptimal.")
46  else:      print("It is recommended that you copy one of the TEMPLATE files in the scons/")
47    print "Options file is", options_file      print("subdirectory and customize it to your needs.\n")
48        options_file = None
49  #Does our scons support the newer Variables class or do we need to use Options?  
50    ############################### Build options ################################
51  try:  
52     dummyvar=Variables  default_prefix='/usr'
53     opts = Variables(options_file, ARGUMENTS)  mpi_flavours=('no', 'none', 'MPT', 'MPICH', 'MPICH2', 'OPENMPI', 'INTELMPI')
54     adder = opts.AddVariables  lapack_flavours=('none', 'clapack', 'mkl')
55  except:  
56     opts = Options(options_file, ARGUMENTS)  vars = Variables(options_file, ARGUMENTS)
57     adder = opts.AddOptions  vars.AddVariables(
58     BoolVariable = BoolOption    PathVariable('options_file', 'Path to options file', options_file, PathVariable.PathIsFile),
59      PathVariable('prefix', 'Installation prefix', Dir('#.').abspath, PathVariable.PathIsDirCreate),
60  ############ Load build options ################################    PathVariable('build_dir', 'Top-level build directory', Dir('#/build').abspath, PathVariable.PathIsDirCreate),
61      BoolVariable('verbose', 'Output full compile/link lines', False),
62  adder(  # Compiler/Linker options
63  #opts.AddOptions(    ('cc', 'Path to C compiler', 'default'),
64  # Where to install esys stuff    ('cxx', 'Path to C++ compiler', 'default'),
65    ('prefix', 'where everything will be installed',                       Dir('#.').abspath),    ('cc_flags', 'Base C/C++ compiler flags', 'default'),
66    ('incinstall', 'where the esys headers will be installed',             os.path.join(Dir('#.').abspath,'include')),    ('cc_optim', 'Additional C/C++ flags for a non-debug build', 'default'),
67    ('bininstall', 'where the esys binaries will be installed',            os.path.join(prefix,'bin')),    ('cc_debug', 'Additional C/C++ flags for a debug build', 'default'),
   ('libinstall', 'where the esys libraries will be installed',           os.path.join(prefix,'lib')),  
   ('pyinstall', 'where the esys python modules will be installed',       os.path.join(prefix,'esys')),  
 # Compilation options  
   BoolVariable('dodebug', 'For backwards compatibility', 'no'),  
   BoolVariable('usedebug', 'Do you want a debug build?', 'no'),  
   BoolVariable('usevtk', 'Do you want to use VTK?', 'yes'),  
   ('options_file', 'File of paths/options. Default: scons/<hostname>_options.py', options_file),  
   ('cc', 'path to C compiler', 'DEFAULT'),  
   ('cxx', 'path to C++ compiler', 'DEFAULT'),  
   ('win_cc_name', 'windows C compiler name if needed', 'msvc'),  
   # The strings -DDEFAULT_ get replaced by scons/<hostname>_options.py or by defaults below  
   ('cc_flags', 'C/C++ compiler flags to use', '-DEFAULT_1'),  
   ('cc_optim', 'C/C++ optimization flags to use', '-DEFAULT_2'),  
   ('cc_debug', 'C/C++ debug flags to use', '-DEFAULT_3'),  
   ('omp_optim', 'OpenMP compiler flags to use (Release build)', '-DEFAULT_4'),  
   ('omp_debug', 'OpenMP compiler flags to use (Debug build)', '-DEFAULT_5'),  
   ('omp_libs', 'OpenMP compiler libraries to link with', '-DEFAULT_6'),  
68    ('cc_extra', 'Extra C compiler flags', ''),    ('cc_extra', 'Extra C compiler flags', ''),
69    ('cxx_extra', 'Extra C++ compiler flags', ''),    ('cxx_extra', 'Extra C++ compiler flags', ''),
70    ('ld_extra', 'Extra linker flags', ''),    ('ld_extra', 'Extra linker flags', ''),
71    ('sys_libs', 'System libraries to link with', []),    BoolVariable('werror','Treat compiler warnings as errors', True),
72    ('ar_flags', 'Static library archiver flags to use', ''),    BoolVariable('debug', 'Compile with debug flags', False),
73    BoolVariable('useopenmp', 'Compile parallel version using OpenMP', 'no'),    BoolVariable('openmp', 'Compile parallel version using OpenMP', False),
74    BoolVariable('usepedantic', 'Compile with -pedantic if using gcc', 'no'),    ('omp_flags', 'OpenMP compiler flags', 'default'),
75    BoolVariable('usewarnings','Compile with warnings as errors if using gcc','yes'),    ('omp_ldflags', 'OpenMP linker flags', 'default'),
76    ('forcelazy','for testing use only - set the default value for autolazy','leave_alone'),  # Mandatory libraries
77    ('forcecollres','for testing use only - set the default value for force resolving collective ops','leave_alone'),    ('boost_prefix', 'Prefix/Paths of boost installation', default_prefix),
78  # Python    ('boost_libs', 'Boost libraries to link with', ['boost_python-mt']),
79    ('python_path', 'Path to Python includes', '/usr/include/'+python_version),  # Mandatory for tests
80    ('python_lib_path', 'Path to Python libs', usr_lib),    ('cppunit_prefix', 'Prefix/Paths of CppUnit installation', default_prefix),
81    ('python_libs', 'Python libraries to link with', [python_version]),    ('cppunit_libs', 'CppUnit libraries to link with', ['cppunit']),
82    ('python_cmd', 'Python command', 'python'),  # Optional libraries and options
83  # Boost    EnumVariable('mpi', 'Compile parallel version using MPI flavour', 'none', allowed_values=mpi_flavours),
84    ('boost_path', 'Path to Boost includes', '/usr/include'),    ('mpi_prefix', 'Prefix/Paths of MPI installation', default_prefix),
85    ('boost_lib_path', 'Path to Boost libs', usr_lib),    ('mpi_libs', 'MPI shared libraries to link with', ['mpi']),
86    ('boost_libs', 'Boost libraries to link with', ['boost_python']),    BoolVariable('netcdf', 'Enable netCDF file support', False),
87  # NetCDF    ('netcdf_prefix', 'Prefix/Paths of netCDF installation', default_prefix),
88    BoolVariable('usenetcdf', 'switch on/off the usage of netCDF', 'yes'),    ('netcdf_libs', 'netCDF libraries to link with', ['netcdf_c++', 'netcdf']),
89    ('netCDF_path', 'Path to netCDF includes', '/usr/include'),    BoolVariable('parmetis', 'Enable ParMETIS (requires MPI)', False),
90    ('netCDF_lib_path', 'Path to netCDF libs', usr_lib),    ('parmetis_prefix', 'Prefix/Paths of ParMETIS installation', default_prefix),
91    ('netCDF_libs', 'netCDF C++ libraries to link with', ['netcdf_c++', 'netcdf']),    ('parmetis_libs', 'ParMETIS libraries to link with', ['parmetis', 'metis']),
92  # MPI    BoolVariable('papi', 'Enable PAPI', False),
93    BoolVariable('useMPI', 'For backwards compatibility', 'no'),    ('papi_prefix', 'Prefix/Paths to PAPI installation', default_prefix),
   BoolVariable('usempi', 'Compile parallel version using MPI', 'no'),  
   ('MPICH_IGNORE_CXX_SEEK', 'name of macro to ignore MPI settings of C++ SEEK macro (for MPICH)' , 'MPICH_IGNORE_CXX_SEEK'),  
   ('mpi_path', 'Path to MPI includes', '/usr/include'),  
   ('mpi_run', 'mpirun name' , 'mpiexec -np 1'),  
   ('mpi_lib_path', 'Path to MPI libs (needs to be added to the LD_LIBRARY_PATH)', usr_lib),  
   ('mpi_libs', 'MPI libraries to link with (needs to be shared!)', []),  
   ('mpi_flavour','Type of MPI execution environment','none'),  
 # ParMETIS  
   BoolVariable('useparmetis', 'Compile parallel version using ParMETIS', 'yes'),  
   ('parmetis_path', 'Path to ParMETIS includes', '/usr/include'),  
   ('parmetis_lib_path', 'Path to ParMETIS library', usr_lib),  
   ('parmetis_libs', 'ParMETIS library to link with', ['parmetis', 'metis']),  
 # PAPI  
   BoolVariable('usepapi', 'switch on/off the usage of PAPI', 'no'),  
   ('papi_path', 'Path to PAPI includes', '/usr/include'),  
   ('papi_lib_path', 'Path to PAPI libs', usr_lib),  
94    ('papi_libs', 'PAPI libraries to link with', ['papi']),    ('papi_libs', 'PAPI libraries to link with', ['papi']),
95    BoolVariable('papi_instrument_solver', 'use PAPI in Solver.c to instrument each iteration of the solver', False),    BoolVariable('papi_instrument_solver', 'Use PAPI to instrument each iteration of the solver', False),
96  # MKL    BoolVariable('mkl', 'Enable the Math Kernel Library', False),
97    BoolVariable('usemkl', 'switch on/off the usage of MKL', 'no'),    ('mkl_prefix', 'Prefix/Paths to MKL installation', default_prefix),
98    ('mkl_path', 'Path to MKL includes', '/sw/sdev/cmkl/10.0.2.18/include'),    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver','mkl_em64t','guide','pthread']),
99    ('mkl_lib_path', 'Path to MKL libs', '/sw/sdev/cmkl/10.0.2.18/lib/em64t'),    BoolVariable('umfpack', 'Enable UMFPACK', False),
100    ('mkl_libs', 'MKL libraries to link with', ['mkl_solver', 'mkl_em64t', 'guide', 'pthread']),    ('umfpack_prefix', 'Prefix/Paths to UMFPACK installation', default_prefix),
101  # UMFPACK    ('umfpack_libs', 'UMFPACK libraries to link with', ['umfpack']),
102    BoolVariable('useumfpack', 'switch on/off the usage of UMFPACK', 'no'),    BoolVariable('boomeramg', 'Enable BoomerAMG', False),
103    ('ufc_path', 'Path to UFconfig includes', '/usr/include/suitesparse'),    ('boomeramg_prefix', 'Prefix/Paths to BoomerAMG installation', default_prefix),
104    ('umf_path', 'Path to UMFPACK includes', '/usr/include/suitesparse'),    ('boomeramg_libs', 'BoomerAMG libraries to link with', ['boomeramg']),
105    ('umf_lib_path', 'Path to UMFPACK libs', usr_lib),    EnumVariable('lapack', 'Set LAPACK flavour', 'none', allowed_values=lapack_flavours),
106    ('umf_libs', 'UMFPACK libraries to link with', ['umfpack']),    ('lapack_prefix', 'Prefix/Paths to LAPACK installation', default_prefix),
107  # Silo    ('lapack_libs', 'LAPACK libraries to link with', []),
108    BoolVariable('usesilo', 'switch on/off the usage of Silo', 'yes'),    BoolVariable('silo', 'Enable the Silo file format in weipa', False),
109    ('silo_path', 'Path to Silo includes', '/usr/include'),    ('silo_prefix', 'Prefix/Paths to Silo installation', default_prefix),
   ('silo_lib_path', 'Path to Silo libs', usr_lib),  
110    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),    ('silo_libs', 'Silo libraries to link with', ['siloh5', 'hdf5']),
111  # VisIt    BoolVariable('visit', 'Enable the VisIt simulation interface', False),
112    BoolVariable('usevisit', 'switch on/off the usage of the VisIt sim library', 'no'),    ('visit_prefix', 'Prefix/Paths to VisIt installation', default_prefix),
113    ('visit_path', 'Path to VisIt libsim includes', '/usr/include'),    ('visit_libs', 'VisIt libraries to link with', ['simV2']),
114    ('visit_lib_path', 'Path to VisIt sim library', usr_lib),    BoolVariable('vsl_random', 'Use VSL from intel for random data', False),
115  # AMD (used by UMFPACK)  # Advanced settings
116    ('amd_path', 'Path to AMD includes', '/usr/include/suitesparse'),    #dudley_assemble_flags = -funroll-loops      to actually do something
117    ('amd_lib_path', 'Path to AMD libs', usr_lib),    ('dudley_assemble_flags', 'compiler flags for some dudley optimisations', ''),
118    ('amd_libs', 'AMD libraries to link with', ['amd']),    # To enable passing function pointers through python
119  # BLAS (used by UMFPACK)    BoolVariable('iknowwhatimdoing', 'Allow non-standard C', False),
120    ('blas_path', 'Path to BLAS includes', '/usr/include/suitesparse'),    # An option for specifying the compiler tools (see windows branch)
121    ('blas_lib_path', 'Path to BLAS libs', usr_lib),    ('tools_names', 'Compiler tools to use', ['default']),
122    ('blas_libs', 'BLAS libraries to link with', ['blas']),    ('env_export', 'Environment variables to be passed to tools',[]),
123  #Lapack options    EnumVariable('forcelazy', 'For testing use only - set the default value for autolazy', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
124    BoolVariable('uselapack','switch on/off use of Lapack','no'),    EnumVariable('forcecollres', 'For testing use only - set the default value for force resolving collective ops', 'leave_alone', allowed_values=('leave_alone', 'on', 'off')),
125    ('lapack_path', 'Path to Lapack includes','/usr/include'),    # finer control over library building, intel aggressive global optimisation
126    ('lapack_lib_path', 'Path to Lapack libs', usr_lib),    # works with dynamic libraries on windows.
127    ('lapack_libs', 'Lapack libraries to link with', []),    ('build_shared', 'Build dynamic libraries only', False),
128    ('lapack_type', '{clapack,mkl}','clapack'),    ('sys_libs', 'Extra libraries to link with', []),
129  # An option for specifying the compiler tools set (see windows branch).    ('escript_opts_version', 'Version of options file (do not specify on command line)'),
130    ('tools_names', 'allow control over the tools in the env setup', ['default']),    ('SVN_VERSION', 'Do not use from options file', -2),
131  # finer control over library building, intel aggressive global optimisation    ('pythoncmd', 'which python to compile with','python'),
132  # works with dynamic libraries on windows.    ('usepython3', 'Is this a python3 build? (experimental)', False),
133    ('share_esysUtils', 'control static or dynamic esysUtils lib', False),    ('pythonlibname', 'Name of the python library to link. (This is found automatically for python2.X.)', ''),
134    ('share_paso', 'control static or dynamic paso lib', False),    ('pythonlibpath', 'Path to the python library. (You should not need to set this unless your python has moved)',''),
135    ('env_export','Environment variables to be passed to children',[]),    ('pythonincpath','Path to python include files. (You should not need to set this unless your python has moved',''),
136  #To enable passing function pointers through python    BoolVariable('BADPYTHONMACROS','Extra \#include to get around a python bug.', True),
   BoolVariable('iknowwhatimdoing','allow nonstandard C',False)  
137  )  )
138    
139    ##################### Create environment and help text #######################
140    
141  ###################  # Intel's compiler uses regular expressions improperly and emits a warning
142    # about failing to find the compilers. This warning can be safely ignored.
 # This is only to support old versions of scons which don't accept  
 # the variant_dir parameter (older than 0.98 I think).  
 # Once these are no longer an issue we can go back to a direct call  
 # to obj.SConscript  
 import SCons  
 vs=SCons.__version__.split('.')  
 cantusevariantdir=float(vs[0]+'.'+vs[1])<0.98  
   
   
 def CallSConscript(obj, **kw):  
     if cantusevariantdir:  
         if 'variant_dir' in kw:  
         kw['build_dir']=kw['variant_dir']  
         del kw['variant_dir']  
     obj.SConscript(**kw)  
   
   
 ############ Specify which compilers to use ####################  
   
 # intelc uses regular expressions improperly and emits a warning about  
 # failing to find the compilers.  This warning can be safely ignored.  
   
 if IS_WINDOWS_PLATFORM:  
       env = Environment(options = opts)  
       env = Environment(tools = ['default'] + env['tools_names'],  
                         options = opts)  
 else:  
    if os.uname()[4]=='ia64':  
       env = Environment(tools = ['default', 'intelc'], options = opts)  
       if env['CXX'] == 'icpc':  
          env['LINK'] = env['CXX'] # version >=9 of intel c++ compiler requires use of icpc to link in C++ runtimes (icc does not)  
    else:  
       env = Environment(tools = ['default'], options = opts)  
       if env['tools_names']!='default':  
     env=Environment(tools = ['default'] +env['tools_names'], options=opts)  
   
 # Override compiler choice if provided  
 if env['cc'] != 'DEFAULT': env['CC']=env['cc']  
 if env['cxx'] != 'DEFAULT': env['CXX']=env['cxx']  
   
 Help(opts.GenerateHelpText(env))  
   
 ############ Make sure target directories exist ################  
143    
144    # PATH is needed so the compiler, linker and tools are found if they are not
145    # in default locations.
146    env = Environment(tools = ['default'], options = vars,
147                      ENV = {'PATH': os.environ['PATH']})
148                      
149    
150    #set the vars for clang
151    def mkclang(env):
152      env['CC']='clang'
153      env['CXX']='clang++'
154                      
155                      
156    if env['tools_names'] != 'default':
157        zz=env['tools_names']
158        if 'clang' in zz:
159            zz.remove('clang')
160            zz.insert(0, mkclang)
161        env = Environment(tools = ['default'] + env['tools_names'], options = vars,
162                          ENV = {'PATH' : os.environ['PATH']})
163    
164    if options_file:
165        opts_valid=False
166        if 'escript_opts_version' in env.Dictionary() and \
167            int(env['escript_opts_version']) >= REQUIRED_OPTS_VERSION:
168                opts_valid=True
169        if opts_valid:
170            print("Using options in %s." % options_file)
171        else:
172            print("\nOptions file %s" % options_file)
173            print("is outdated! Please update the file by examining one of the TEMPLATE")
174            print("files in the scons/ subdirectory and setting escript_opts_version to %d.\n"%REQUIRED_OPTS_VERSION)
175            Exit(1)
176    
177    # Generate help text (scons -h)
178    Help(vars.GenerateHelpText(env))
179    
180    # Check for superfluous options
181    if len(vars.UnknownVariables())>0:
182        for k in vars.UnknownVariables():
183            print("Unknown option '%s'" % k)
184        Exit(1)
185    
186    #################### Make sure install directories exist #####################
187    
188    env['BUILD_DIR']=Dir(env['build_dir']).abspath
189    prefix=Dir(env['prefix']).abspath
190    env['incinstall'] = os.path.join(prefix, 'include')
191    env['bininstall'] = os.path.join(prefix, 'bin')
192    env['libinstall'] = os.path.join(prefix, 'lib')
193    env['pyinstall']  = os.path.join(prefix, 'esys')
194  if not os.path.isdir(env['bininstall']):  if not os.path.isdir(env['bininstall']):
195      os.makedirs(env['bininstall'])      os.makedirs(env['bininstall'])
196  if not os.path.isdir(env['libinstall']):  if not os.path.isdir(env['libinstall']):
# Line 239  if not os.path.isdir(env['libinstall']): Line 198  if not os.path.isdir(env['libinstall']):
198  if not os.path.isdir(env['pyinstall']):  if not os.path.isdir(env['pyinstall']):
199      os.makedirs(env['pyinstall'])      os.makedirs(env['pyinstall'])
200    
201  ########## Copy required environment vars ######################  env.Append(CPPPATH = [env['incinstall']])
202    env.Append(LIBPATH = [env['libinstall']])
 for i in env['env_export']:  
    env.Append(ENV = {i:os.environ[i]})  
   
 ############ Fill in compiler options if not set above #########  
   
 # Backwards compatibility: allow dodebug=yes and useMPI=yes  
 if env['dodebug']: env['usedebug'] = 1  
 if env['useMPI']: env['usempi'] = 1  
   
 # Default compiler options (override allowed in hostname_options.py, but should not be necessary)  
 # For both C and C++ you get: cc_flags and either the optim flags or debug flags  
   
 sysheaderopt = ""       # how do we indicate that a header is a system header. Use "" for no action.  
   
 cc_flags = ""  
 cc_optim = ""  
 cc_debug = ""  
 omp_optim = ""  
 omp_debug = ""  
 omp_libs = []  
   
 if env["CC"] == "icc":  
   # Intel compilers  
   cc_flags      = "-std=c99 -fPIC -wd161 -w1 -vec-report0 -DBLOCKTIMER -DCORE_ID1"  
   cc_optim      = "-O3 -ftz -IPF_ftlacc- -IPF_fma -fno-alias -ip"  
   cc_debug      = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"  
   omp_optim     = "-openmp -openmp_report0"  
   omp_debug     = "-openmp -openmp_report0"  
   omp_libs      = ['guide', 'pthread']  
   pedantic      = ""  
   fatalwarning      = ""        # Switch to turn warnings into errors  
   sysheaderopt      = ""  
 elif env["CC"][:3] == "gcc":  
   # GNU C on any system  
   cc_flags      = "-pedantic -Wall -fPIC -ffast-math -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"  
 #the long long warning occurs on the Mac  
   cc_optim      = "-O3"  
   cc_debug      = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"  
   omp_optim     = "-fopenmp"  
   omp_debug     = "-fopenmp"  
   omp_libs      = []  
   pedantic      = "-pedantic-errors -Wno-long-long"  
   fatalwarning      = "-Werror"  
   sysheaderopt      = "-isystem "  
 elif env["CC"] == "cl":  
   # Microsoft Visual C on Windows  
   cc_flags      = "/FD /EHsc /GR /wd4068 -D_USE_MATH_DEFINES -DDLL_NETCDF"  
   cc_optim      = "/O2 /Op /MT /W3"  
   cc_debug      = "/Od /RTC1 /MTd /ZI -DBOUNDS_CHECK"  
   omp_optim     = ""  
   omp_debug     = ""  
   omp_libs      = []  
   pedantic      = ""  
   fatalwarning      = ""  
   sysheaderopt      = ""  
 elif env["CC"] == "icl":  
   # intel C on Windows, see windows_intelc_options.py for a start  
   pedantic      = ""  
   fatalwarning      = ""  
   sysheaderopt      = ""  
   
   
 # If not specified in hostname_options.py then set them here  
 if env["cc_flags"]  == "-DEFAULT_1": env['cc_flags'] = cc_flags  
 if env["cc_optim"]  == "-DEFAULT_2": env['cc_optim'] = cc_optim  
 if env["cc_debug"]  == "-DEFAULT_3": env['cc_debug'] = cc_debug  
 if env["omp_optim"] == "-DEFAULT_4": env['omp_optim'] = omp_optim  
 if env["omp_debug"] == "-DEFAULT_5": env['omp_debug'] = omp_debug  
 if env["omp_libs"]  == "-DEFAULT_6": env['omp_libs'] = omp_libs  
   
 #set up the autolazy values  
 if env['forcelazy']    != "leave_alone":  
   if env['forcelazy'] == 'on':  
     env.Append(CPPDEFINES=['FAUTOLAZYON'])  
   else:  
      if env['forcelazy'] == 'off':  
     env.Append(CPPDEFINES=['FAUTOLAZYOFF'])  
   
 #set up the colective resolve values  
 if env['forcecollres']    != "leave_alone":  
   print env['forcecollres']  
   if env['forcecollres'] == 'on':  
     env.Append(CPPDEFINES=['FRESCOLLECTON'])  
   else:  
      if env['forcecollres'] == 'off':  
     env.Append(CPPDEFINES=['FRESCOLLECTOFF'])  
   
   
 if env['iknowwhatimdoing']:  
     env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])  
203    
204  # OpenMP is disabled if useopenmp=no or both variables omp_optim and omp_debug are empty  ################# Fill in compiler options if not set above ##################
 if not env["useopenmp"]:  
   env['omp_optim'] = ""  
   env['omp_debug'] = ""  
   env['omp_libs'] = []  
205    
206  if env['omp_optim'] == "" and env['omp_debug'] == "": env["useopenmp"] = 0  if env['cc'] != 'default': env['CC']=env['cc']
207    if env['cxx'] != 'default': env['CXX']=env['cxx']
208    
209  # Windows doesn't use LD_LIBRARY_PATH but PATH instead  # version >=9 of intel C++ compiler requires use of icpc to link in C++
210  if IS_WINDOWS_PLATFORM:  # runtimes (icc does not)
211      LD_LIBRARY_PATH_KEY='PATH'  if not IS_WINDOWS and os.uname()[4]=='ia64' and env['CXX']=='icpc':
212      env['ENV']['LD_LIBRARY_PATH']=''      env['LINK'] = env['CXX']
213  else:  
214      LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'  # default compiler/linker options
215  ############ Copy environment variables into scons env #########  cc_flags = ''
216    cc_optim = ''
217  try: env['ENV']['OMP_NUM_THREADS'] = os.environ['OMP_NUM_THREADS']  cc_debug = ''
218  except KeyError: env['ENV']['OMP_NUM_THREADS'] = 1  omp_flags = ''
219    omp_ldflags = ''
220  try: env['ENV']['ESCRIPT_NUM_THREADS'] = os.environ['ESCRIPT_NUM_THREADS']  fatalwarning = '' # switch to turn warnings into errors
221  except KeyError: pass  sysheaderopt = '' # how to indicate that a header is a system header
222    
223    # env['CC'] might be a full path
224    cc_name=os.path.basename(env['CC'])
225    
226    if cc_name == 'icc':
227        # Intel compiler
228        # #1875: offsetof applied to non-POD types is nonstandard (in boost)
229        cc_flags    = "-std=c99 -fPIC -w2 -wd1875 -Wno-unknown-pragmas -DBLOCKTIMER -DCORE_ID1"
230        cc_optim    = "-O3 -ftz -fno-alias -ipo -xHost"
231        cc_debug    = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
232        omp_flags   = "-openmp"
233        omp_ldflags = "-openmp -openmp_report=1"
234        fatalwarning = "-Werror"
235    elif cc_name[:3] == 'gcc':
236        # GNU C on any system
237        # note that -ffast-math is not used because it breaks isnan(),
238        # see mantis #691
239        cc_flags     = "-pedantic -Wall -fPIC -Wno-unknown-pragmas -DBLOCKTIMER  -Wno-sign-compare -Wno-system-headers -Wno-long-long -Wno-strict-aliasing -finline-functions"
240        cc_optim     = "-O3"
241        cc_debug     = "-g -O0 -DDOASSERT -DDOPROF -DBOUNDS_CHECK"
242        omp_flags    = "-fopenmp"
243        omp_ldflags  = "-fopenmp"
244        fatalwarning = "-Werror"
245        sysheaderopt = "-isystem"
246    elif cc_name == 'cl':
247        # Microsoft Visual C on Windows
248        cc_flags     = "/EHsc /MD /GR /wd4068 /D_USE_MATH_DEFINES /DDLL_NETCDF"
249        cc_optim     = "/O2 /Op /W3"
250        cc_debug     = "/Od /RTCcsu /ZI /DBOUNDS_CHECK"
251        fatalwarning = "/WX"
252    elif cc_name == 'icl':
253        # Intel C on Windows
254        cc_flags     = '/EHsc /GR /MD'
255        cc_optim     = '/fast /Oi /W3 /Qssp /Qinline-factor- /Qinline-min-size=0 /Qunroll'
256        cc_debug     = '/Od /RTCcsu /Zi /Y- /debug:all /Qtrapuv'
257        omp_flags    = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
258        omp_ldflags  = '/Qvec-report0 /Qopenmp /Qopenmp-report0 /Qparallel'
259    
260    # set defaults if not otherwise specified
261    if env['cc_flags']    == 'default': env['cc_flags'] = cc_flags
262    if env['cc_optim']    == 'default': env['cc_optim'] = cc_optim
263    if env['cc_debug']    == 'default': env['cc_debug'] = cc_debug
264    if env['omp_flags']   == 'default': env['omp_flags'] = omp_flags
265    if env['omp_ldflags'] == 'default': env['omp_ldflags'] = omp_ldflags
266    if env['cc_extra']  != '': env.Append(CFLAGS = env['cc_extra'])
267    if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])
268    if env['ld_extra']  != '': env.Append(LINKFLAGS = env['ld_extra'])
269    
270  try: env['ENV']['ESCRIPT_NUM_PROCS'] = os.environ['ESCRIPT_NUM_PROCS']  if env['BADPYTHONMACROS']: env.Append(CXXFLAGS = ' -DBADPYTHONMACROS')
 except KeyError: env['ENV']['ESCRIPT_NUM_PROCS']=1  
271    
272  try: env['ENV']['ESCRIPT_NUM_NODES'] = os.environ['ESCRIPT_NUM_NODES']  if env['usepython3']:
273  except KeyError: env['ENV']['ESCRIPT_NUM_NODES']=1      env.Append(CPPDEFINES=['ESPYTHON3'])
274    
275  try: env['ENV']['ESCRIPT_HOSTFILE'] = os.environ['ESCRIPT_HOSTFILE']  # set up the autolazy values
276  except KeyError: pass  if env['forcelazy'] == 'on':
277        env.Append(CPPDEFINES=['FAUTOLAZYON'])
278    elif env['forcelazy'] == 'off':
279        env.Append(CPPDEFINES=['FAUTOLAZYOFF'])
280    
281    # set up the collective resolve values
282    if env['forcecollres'] == 'on':
283        env.Append(CPPDEFINES=['FRESCOLLECTON'])
284    elif env['forcecollres'] == 'off':
285        env.Append(CPPDEFINES=['FRESCOLLECTOFF'])
286    
287  try: env['ENV']['PATH'] = os.environ['PATH']  # allow non-standard C if requested
288  except KeyError: pass  if env['iknowwhatimdoing']:
289        env.Append(CPPDEFINES=['IKNOWWHATIMDOING'])
290    
291  try: env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']  # Disable OpenMP if no flags provided
292  except KeyError: pass  if env['openmp'] and env['omp_flags'] == '':
293       print("OpenMP requested but no flags provided - disabling OpenMP!")
294       env['openmp'] = False
295    
296  try: env['ENV']['C_INCLUDE_PATH'] = os.environ['C_INCLUDE_PATH']  if env['openmp']:
297  except KeyError: pass      env.Append(CCFLAGS = env['omp_flags'])
298        if env['omp_ldflags'] != '': env.Append(LINKFLAGS = env['omp_ldflags'])
299    else:
300        env['omp_flags']=''
301        env['omp_ldflags']=''
302    
303  try: env['ENV']['CPLUS_INCLUDE_PATH'] = os.environ['CPLUS_INCLUDE_PATH']  # add debug/non-debug compiler flags
304  except KeyError: pass  if env['debug']:
305        env.Append(CCFLAGS = env['cc_debug'])
306    else:
307        env.Append(CCFLAGS = env['cc_optim'])
308    
309  try: env.PrependENVPath(LD_LIBRARY_PATH_KEY,os.environ['LD_LIBRARY_PATH'])  # always add cc_flags
310  except KeyError: pass  env.Append(CCFLAGS = env['cc_flags'])
311    
312  try: env['ENV']['LIBRARY_PATH'] = os.environ['LIBRARY_PATH']  # add system libraries
313  except KeyError: pass  env.AppendUnique(LIBS = env['sys_libs'])
314    
 try: env['ENV']['DISPLAY'] = os.environ['DISPLAY']  
 except KeyError: pass  
315    
316  try: env['ENV']['XAUTHORITY'] = os.environ['XAUTHORITY']  global_revision=ARGUMENTS.get('SVN_VERSION', None)
317  except KeyError: pass  if global_revision:
318        global_revision = re.sub(':.*', '', global_revision)
319        global_revision = re.sub('[^0-9]', '', global_revision)
320        if global_revision == '': global_revision='-2'
321    else:
322      # Get the global Subversion revision number for the getVersion() method
323      try:
324        global_revision = os.popen('svnversion -n .').read()
325        global_revision = re.sub(':.*', '', global_revision)
326        global_revision = re.sub('[^0-9]', '', global_revision)
327        if global_revision == '': global_revision='-2'
328      except:
329        global_revision = '-1'
330    env['svn_revision']=global_revision
331    env.Append(CPPDEFINES=['SVN_VERSION='+global_revision])
332    
333    if IS_WINDOWS:
334        if not env['build_shared']:
335            env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])
336            env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])
337    
338  try: env['ENV']['HOME'] = os.environ['HOME']  ###################### Copy required environment vars ########################
 except KeyError: pass  
339    
340  # Configure for test suite  # Windows doesn't use LD_LIBRARY_PATH but PATH instead
341    if IS_WINDOWS:
342        LD_LIBRARY_PATH_KEY='PATH'
343        env['ENV']['LD_LIBRARY_PATH']=''
344    else:
345        LD_LIBRARY_PATH_KEY='LD_LIBRARY_PATH'
346    
347    # the following env variables are exported for the unit tests
348    
349  env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  for key in 'OMP_NUM_THREADS', 'ESCRIPT_NUM_PROCS', 'ESCRIPT_NUM_NODES':
350  env.PrependENVPath('PYTHONPATH', prefix)      try:
351  env['ENV']['ESCRIPT_ROOT'] = prefix          env['ENV'][key] = os.environ[key]
352        except KeyError:
353            env['ENV'][key] = 1
354    
355    env_export=env['env_export']
356    env_export.extend(['ESCRIPT_NUM_THREADS','ESCRIPT_HOSTFILE','DISPLAY','XAUTHORITY','PATH','HOME','KMP_MONITOR_STACKSIZE','TMPDIR','TEMP','TMP'])
357    
358    for key in set(env_export):
359        try:
360            env['ENV'][key] = os.environ[key]
361        except KeyError:
362            pass
363    
364  ############ Set up paths for Configure() ######################  try:
365        env.PrependENVPath(LD_LIBRARY_PATH_KEY, os.environ[LD_LIBRARY_PATH_KEY])
366    except KeyError:
367        pass
368    
369  # Make a copy of an environment  # these shouldn't be needed
370  # Use env.Clone if available, but fall back on env.Copy for older version of scons  #for key in 'C_INCLUDE_PATH','CPLUS_INCLUDE_PATH','LIBRARY_PATH':
371  def clone_env(env):  #    try:
372    if 'Clone' in dir(env): return env.Clone()    # scons-0.98  #        env['ENV'][key] = os.environ[key]
373    else:                   return env.Copy() # scons-0.96  #    except KeyError:
374    #        pass
375    
376  # Add cc option -I<Escript>/trunk/include  try:
377  env.Append(CPPPATH      = [Dir('include')])      env['ENV']['PYTHONPATH'] = os.environ['PYTHONPATH']
378    except KeyError:
379        pass
380    
381  # Add cc option -L<Escript>/trunk/lib  ######################## Add some custom builders ############################
 env.Append(LIBPATH      = [Dir(env['libinstall'])])  
382    
383  if env['cc_extra'] != '': env.Append(CFLAGS = env['cc_extra'])  if env['pythoncmd']=='python':
384  if env['cxx_extra'] != '': env.Append(CXXFLAGS = env['cxx_extra'])      py_builder = Builder(action = build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)
385  if env['ld_extra'] != '': env.Append(LINKFLAGS = env['ld_extra'])  else:
386        py_builder = Builder(action = env['pythoncmd']+" scripts/py_comp.py $SOURCE $TARGET", suffix = '.pyc', src_suffix = '.py', single_source=True)
387    env.Append(BUILDERS = {'PyCompile' : py_builder});
388    
389  if env['usepedantic']: env.Append(CCFLAGS = pedantic)  runUnitTest_builder = Builder(action = runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)
390    env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});
391    
392  # MS Windows  runPyUnitTest_builder = Builder(action = runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)
393  if IS_WINDOWS_PLATFORM:  env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});
   env.AppendENVPath('PATH', [env['boost_lib_path']])  
   env.AppendENVPath('PATH', [env['libinstall']])  
   if not env['share_esysUtils'] :  
     env.Append(CPPDEFINES = ['ESYSUTILS_STATIC_LIB'])  
   if not env['share_paso'] :  
     env.Append(CPPDEFINES = ['PASO_STATIC_LIB'])  
394    
395    if env['usenetcdf']:  epstopdfbuilder = Builder(action = eps2pdf, suffix='.pdf', src_suffix='.eps', single_source=True)
396      env.AppendENVPath('PATH',   [env['netCDF_lib_path']])  env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});
397    
398  env.Append(ARFLAGS = env['ar_flags'])  ############################ Dependency checks ###############################
399    
400  # Get the global Subversion revision number for getVersion() method  # Create a Configure() environment to check for compilers and python
401  try:  conf = Configure(env.Clone())
    global_revision = os.popen("svnversion -n .").read()  
    global_revision = re.sub(":.*", "", global_revision)  
    global_revision = re.sub("[^0-9]", "", global_revision)  
 except:  
    global_revision="-1"  
 if global_revision == "": global_revision="-2"  
 env.Append(CPPDEFINES = ["SVN_VERSION="+global_revision])  
402    
403  ############ numpy (required) ###############################  ######## Test that the compilers work
404    
405  try:  if 'CheckCC' in dir(conf): # exists since scons 1.1.0
406    from numpy import identity      if not conf.CheckCC():
407  except ImportError:          print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
408    print "Cannot import numpy, you need to set your PYTHONPATH"          Exit(1)
409    sys.exit(1)      if not conf.CheckCXX():
410            print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
411  ############ C compiler (required) #############################          Exit(1)
412    else:
413  # Create a Configure() environment for checking existence of required libraries and headers      if not conf.CheckFunc('printf', language='c'):
414  conf = Configure(clone_env(env))          print("Cannot run C compiler '%s' (check config.log)" % (env['CC']))
415            Exit(1)
416  # Test that the compiler is working      if not conf.CheckFunc('printf', language='c++'):
417  if not conf.CheckFunc('printf'):          print("Cannot run C++ compiler '%s' (check config.log)" % (env['CXX']))
418     print "Cannot run C compiler '%s' (or libc is missing)" % (env['CC'])          Exit(1)
    sys.exit(1)  
419    
420  if conf.CheckFunc('gethostname'):  if conf.CheckFunc('gethostname'):
421    conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])      conf.env.Append(CPPDEFINES = ['HAVE_GETHOSTNAME'])
   
 ############ python libraries (required) #######################  
422    
423    ######## Python headers & library (required)
424    
425  if not sysheaderopt =="":  #First we check to see if the config file has specified
426    conf.env.Append(CCFLAGS=sysheaderopt+env['python_path'])  ##Where to find the filae. Ideally, this should be automatic
427  else:  #But we need to deal with the case where python is not in its INSTALL
428    conf.env.AppendUnique(CPPPATH     = [env['python_path']])  #Directory
429    # Use the python scons is running
430  conf.env.AppendUnique(LIBPATH       = [env['python_lib_path']])  if env['pythoncmd']=='python':
431  conf.env.AppendUnique(LIBS      = [env['python_libs']])      python_inc_path=sysconfig.get_python_inc()
432        if IS_WINDOWS:
433  conf.env.PrependENVPath('PYTHONPATH', prefix)          python_lib_path=os.path.join(sysconfig.get_config_var('prefix'), 'libs')
434  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['python_lib_path'])    # The wrapper script needs to find these libs      elif env['PLATFORM']=='darwin':
435  conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])          python_lib_path=sysconfig.get_config_var('LIBPL')
436        else:
437            python_lib_path=sysconfig.get_config_var('LIBDIR')
438    
439        #python_libs=[sysconfig.get_config_var('LDLIBRARY')] # only on linux
440        if IS_WINDOWS:
441            python_libs=['python%s%s'%(sys.version_info[0], sys.version_info[1])]
442        else:
443            python_libs=['python'+sysconfig.get_python_version()]
444    
445    #if we want to use a python other than the one scons is running
446    else:
447        initstring='from __future__ import print_function;from distutils import sysconfig;'
448        if env['pythonlibname']!='':
449            python_libs=env['pythonlibname']
450        else:   # work it out by calling python    
451            if IS_WINDOWS:
452                cmd='print("python%s%s"%(sys.version_info[0], sys.version_info[1]))'
453            else:
454                cmd='print("python"+sysconfig.get_python_version())'
455            p=Popen([env['pythoncmd'], '-c', initstring+cmd], stdout=PIPE)
456            python_libs=p.stdout.readline()
457            if env['usepython3']:       # This is to convert unicode str into py2 string
458                python_libs=python_libs.encode() # If scons runs on py3 then this must be rethought
459            p.wait()
460            python_libs=python_libs.strip()
461    
462      
463        # Now we know whether we are using python3 or not
464        p=Popen([env['pythoncmd'], '-c',  initstring+'print(sysconfig.get_python_inc())'], stdout=PIPE)
465        python_inc_path=p.stdout.readline()
466        if env['usepython3']:
467             python_inc_path=python_inc_path.encode()
468        p.wait()  
469        python_inc_path=python_inc_path.strip()
470        if IS_WINDOWS:
471            cmd="os.path.join(sysconfig.get_config_var('prefix'), 'libs')"
472        elif env['PLATFORM']=='darwin':
473            cmd="sysconfig.get_config_var(\"LIBPL\")"
474        else:
475            cmd="sysconfig.get_config_var(\"LIBDIR\")"
476    
477        p=Popen([env['pythoncmd'], '-c', initstring+'print('+cmd+')'], stdout=PIPE)
478        python_lib_path=p.stdout.readline()
479        if env['usepython3']:
480            python_lib_path=python_lib_path.decode()
481        p.wait()
482        python_lib_path=python_lib_path.strip()
483    
484    #Check for an override from the config file.
485    #Ideally, this should be automatic
486    #But we need to deal with the case where python is not in its INSTALL
487    #Directory
488    if env['pythonlibpath']!='':
489        python_lib_path=env['pythonlibpath']
490    
491    if env['pythonincpath']!='':
492        python_inc_path=env['pythonincpath']
493    
494    
495    if sysheaderopt == '':
496        conf.env.AppendUnique(CPPPATH = [python_inc_path])
497    else:
498        conf.env.Append(CCFLAGS = [sysheaderopt, python_inc_path])
499    
500    conf.env.AppendUnique(LIBPATH = [python_lib_path])
501    conf.env.AppendUnique(LIBS = python_libs)
502    # The wrapper script needs to find the libs
503    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, python_lib_path)
504    
505  if not conf.CheckCHeader('Python.h'):  if not conf.CheckCHeader('Python.h'):
506    print "Cannot find python include files (tried 'Python.h' in directory %s)" % (env['python_path'])      print("Cannot find python include files (tried 'Python.h' in directory %s)" % (python_inc_path))
507    sys.exit(1)      Exit(1)
508  if not conf.CheckFunc('Py_Exit'):  if not conf.CheckFunc('Py_Exit'):
509    print "Cannot find python library method Py_Main (tried lib %s in directory %s)" % (env['python_libs'], env['python_lib_path'])      print("Cannot find python library method Py_Main (tried %s in directory %s)" % (python_libs, python_lib_path))
510    sys.exit(1)      Exit(1)
   
 ############ boost (required) ##################################  
   
 if not sysheaderopt =="":  
 # This is required because we can't -isystem /usr/system because it breaks std includes  
   if os.path.normpath(env['boost_path']) =="/usr/include":  
     conf.env.Append(CCFLAGS=sysheaderopt+os.path.join(env['boost_path'],'boost'))  
   else:  
     conf.env.Append(CCFLAGS=sysheaderopt+env['boost_path'])  
 else:  
   conf.env.AppendUnique(CPPPATH     = [env['boost_path']])  
   
 conf.env.AppendUnique(LIBPATH       = [env['boost_lib_path']])  
 conf.env.AppendUnique(LIBS      = [env['boost_libs']])  
   
 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['boost_lib_path']) # The wrapper script needs to find these libs  
 #ensure that our path entries remain at the front  
 conf.env.PrependENVPath('PYTHONPATH', prefix)  
 conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
   
 #Yep we still cant figure this one out. - working on it.  
 if not IS_WINDOWS_PLATFORM:  
   if not conf.CheckCXXHeader('boost/python.hpp'):  
     print "Cannot find boost include files (tried boost/python.hpp in directory %s)" % (env['boost_path'])  
     sys.exit(1)  
   
   if not conf.CheckFunc('PyObject_SetAttr'):  
     print "Cannot find boost library method PyObject_SetAttr (tried method PyObject_SetAttr in library %s in directory %s)" % (env['boost_libs'], env['boost_lib_path'])  
     sys.exit(1)  
511    
512    ## reuse conf to check for numpy header (optional)
513    if env['usepython3']:
514        # FIXME: This is until we can work out how to make the checks in python 3
515        conf.env['numpy_h']=False
516    else:
517        if conf.CheckCXXHeader(['Python.h','numpy/ndarrayobject.h']):
518            conf.env.Append(CPPDEFINES = ['HAVE_NUMPY_H'])
519            conf.env['numpy_h']=True
520        else:
521            conf.env['numpy_h']=False
522    
523  # Commit changes to environment  # Commit changes to environment
524  env = conf.Finish()  env = conf.Finish()
525    
526  ############ VTK (optional) ####################################  ######## boost (required)
   
 if env['usevtk']:  
   try:  
     import vtk  
     env['usevtk'] = 1  
   except ImportError:  
     env['usevtk'] = 0  
   
 # Add VTK to environment env if it was found  
 if env['usevtk']:  
   env.Append(CPPDEFINES = ['USE_VTK'])  
   
 ############ NetCDF (optional) #################################  
   
 conf = Configure(clone_env(env))  
   
 if env['usenetcdf']:  
   conf.env.AppendUnique(CPPPATH = [env['netCDF_path']])  
   conf.env.AppendUnique(LIBPATH = [env['netCDF_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['netCDF_libs']])  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['netCDF_lib_path'])  # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
 if env['usenetcdf'] and not conf.CheckCHeader('netcdf.h'): env['usenetcdf'] = 0  
 if env['usenetcdf'] and not conf.CheckFunc('nc_open'): env['usenetcdf'] = 0  
   
 # Add NetCDF to environment env if it was found  
 if env['usenetcdf']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['USE_NETCDF'])  
 else:  
   conf.Finish()  
   
 ############ PAPI (optional) ###################################  
527    
528  # Start a new configure environment that reflects what we've already found  boost_inc_path,boost_lib_path=findLibWithHeader(env, env['boost_libs'], 'boost/python.hpp', env['boost_prefix'], lang='c++')
529  conf = Configure(clone_env(env))  if sysheaderopt == '':
530        env.AppendUnique(CPPPATH = [boost_inc_path])
531    else:
532        # This is required because we can't -isystem /usr/include since it breaks
533        # std includes
534        if os.path.normpath(boost_inc_path) == '/usr/include':
535            conf.env.Append(CCFLAGS=[sysheaderopt, os.path.join(boost_inc_path,'boost')])
536        else:
537            env.Append(CCFLAGS=[sysheaderopt, boost_inc_path])
538    
539    env.AppendUnique(LIBPATH = [boost_lib_path])
540    env.AppendUnique(LIBS = env['boost_libs'])
541    env.PrependENVPath(LD_LIBRARY_PATH_KEY, boost_lib_path)
542    
543    ######## numpy (required)
544    
545    if not detectModule(env, 'numpy'):
546        print("Cannot import numpy. If it is installed try setting your PYTHONPATH and probably %s"%LD_LIBRARY_PATH_KEY)
547        Exit(1)
548    
549  if env['usepapi']:  ######## CppUnit (required for tests)
   conf.env.AppendUnique(CPPPATH = [env['papi_path']])  
   conf.env.AppendUnique(LIBPATH = [env['papi_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['papi_libs']])  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['papi_lib_path'])    # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
 if env['usepapi'] and not conf.CheckCHeader('papi.h'): env['usepapi'] = 0  
 if env['usepapi'] and not conf.CheckFunc('PAPI_start_counters'): env['usepapi'] = 0  
   
 # Add PAPI to environment env if it was found  
 if env['usepapi']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['BLOCKPAPI'])  
 else:  
   conf.Finish()  
550    
551  ############ MKL (optional) ####################################  try:
552        cppunit_inc_path,cppunit_lib_path=findLibWithHeader(env, env['cppunit_libs'], 'cppunit/TestFixture.h', env['cppunit_prefix'], lang='c++')
553        env.AppendUnique(CPPPATH = [cppunit_inc_path])
554        env.AppendUnique(LIBPATH = [cppunit_lib_path])
555        env.PrependENVPath(LD_LIBRARY_PATH_KEY, cppunit_lib_path)
556        env['cppunit']=True
557    except:
558        env['cppunit']=False
559    
560  # Start a new configure environment that reflects what we've already found  ######## sympy (optional)
 conf = Configure(clone_env(env))  
561    
562  if env['usemkl']:  if detectModule(env, 'sympy'):
563    conf.env.AppendUnique(CPPPATH = [env['mkl_path']])      env['sympy'] = True
   conf.env.AppendUnique(LIBPATH = [env['mkl_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env['mkl_libs']])  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mkl_lib_path']) # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
 if env['usemkl'] and not conf.CheckCHeader('mkl_solver.h'): env['usemkl'] = 0  
 if env['usemkl'] and not conf.CheckFunc('pardiso'): env['usemkl'] = 0  
   
   
 # Add MKL to environment env if it was found  
 if env['usemkl']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['MKL'])  
564  else:  else:
565    conf.Finish()      print("Cannot import sympy. Symbolic toolbox and nonlinear PDEs will not be available.")
566        env['sympy'] = False
567  ############ UMFPACK (optional) ################################  
568    ######## pyproj (optional)
569    
570    if detectModule(env, 'pyproj'):
571        env['pyproj'] = True
572    else:
573        print("Cannot import pyproj. Inversions may not work.")
574        env['pyproj'] = False
575    
576    ######## netCDF (optional)
577    
578    netcdf_inc_path=''
579    netcdf_lib_path=''
580    if env['netcdf']:
581        netcdf_inc_path,netcdf_lib_path=findLibWithHeader(env, env['netcdf_libs'], 'netcdf.h', env['netcdf_prefix'], lang='c++')
582        env.AppendUnique(CPPPATH = [netcdf_inc_path])
583        env.AppendUnique(LIBPATH = [netcdf_lib_path])
584        env.AppendUnique(LIBS = env['netcdf_libs'])
585        env.PrependENVPath(LD_LIBRARY_PATH_KEY, netcdf_lib_path)
586        env.Append(CPPDEFINES = ['USE_NETCDF'])
587    
588    ######## PAPI (optional)
589    
590    papi_inc_path=''
591    papi_lib_path=''
592    if env['papi']:
593        papi_inc_path,papi_lib_path=findLibWithHeader(env, env['papi_libs'], 'papi.h', env['papi_prefix'], lang='c')
594        env.AppendUnique(CPPPATH = [papi_inc_path])
595        env.AppendUnique(LIBPATH = [papi_lib_path])
596        env.AppendUnique(LIBS = env['papi_libs'])
597        env.PrependENVPath(LD_LIBRARY_PATH_KEY, papi_lib_path)
598        env.Append(CPPDEFINES = ['BLOCKPAPI'])
599    
600    ######## MKL (optional)
601    
602    mkl_inc_path=''
603    mkl_lib_path=''
604    if env['mkl']:
605        mkl_inc_path,mkl_lib_path=findLibWithHeader(env, env['mkl_libs'], 'mkl_solver.h', env['mkl_prefix'], lang='c')
606        env.AppendUnique(CPPPATH = [mkl_inc_path])
607        env.AppendUnique(LIBPATH = [mkl_lib_path])
608        env.AppendUnique(LIBS = env['mkl_libs'])
609        env.PrependENVPath(LD_LIBRARY_PATH_KEY, mkl_lib_path)
610        env.Append(CPPDEFINES = ['MKL'])
611    
612    ######## UMFPACK (optional)
613    
614    umfpack_inc_path=''
615    umfpack_lib_path=''
616    if env['umfpack']:
617        umfpack_inc_path,umfpack_lib_path=findLibWithHeader(env, env['umfpack_libs'], 'umfpack.h', env['umfpack_prefix'], lang='c')
618        env.AppendUnique(CPPPATH = [umfpack_inc_path])
619        env.AppendUnique(LIBPATH = [umfpack_lib_path])
620        env.AppendUnique(LIBS = env['umfpack_libs'])
621        env.PrependENVPath(LD_LIBRARY_PATH_KEY, umfpack_lib_path)
622        env.Append(CPPDEFINES = ['UMFPACK'])
623    
624    ######## LAPACK (optional)
625    
626    if env['lapack']=='mkl' and not env['mkl']:
627        print("mkl_lapack requires MKL!")
628        Exit(1)
629    
630    env['uselapack'] = env['lapack']!='none'
631    lapack_inc_path=''
632    lapack_lib_path=''
633    if env['uselapack']:
634        header='clapack.h'
635        if env['lapack']=='mkl':
636            env.AppendUnique(CPPDEFINES = ['MKL_LAPACK'])
637            header='mkl_lapack.h'
638        lapack_inc_path,lapack_lib_path=findLibWithHeader(env, env['lapack_libs'], header, env['lapack_prefix'], lang='c')
639        env.AppendUnique(CPPPATH = [lapack_inc_path])
640        env.AppendUnique(LIBPATH = [lapack_lib_path])
641        env.AppendUnique(LIBS = env['lapack_libs'])
642        env.Append(CPPDEFINES = ['USE_LAPACK'])
643    
644    ######## Silo (optional)
645    
646    silo_inc_path=''
647    silo_lib_path=''
648    if env['silo']:
649        silo_inc_path,silo_lib_path=findLibWithHeader(env, env['silo_libs'], 'silo.h', env['silo_prefix'], lang='c')
650        env.AppendUnique(CPPPATH = [silo_inc_path])
651        env.AppendUnique(LIBPATH = [silo_lib_path])
652        # Note that we do not add the libs since they are only needed for the
653        # weipa library and tools.
654        #env.AppendUnique(LIBS = [env['silo_libs']])
655    
656    ######## VSL random numbers (optional)
657    if env['vsl_random']:
658        env.Append(CPPDEFINES = ['MKLRANDOM'])
659    
660    ######## VisIt (optional)
661    
662    visit_inc_path=''
663    visit_lib_path=''
664    if env['visit']:
665        visit_inc_path,visit_lib_path=findLibWithHeader(env, env['visit_libs'], 'VisItControlInterface_V2.h', env['visit_prefix'], lang='c')
666        env.AppendUnique(CPPPATH = [visit_inc_path])
667        env.AppendUnique(LIBPATH = [visit_lib_path])
668    
669    ######## MPI (optional)
670    
671    if env['mpi']=='no':
672        env['mpi']='none'
673    
674    env['usempi'] = env['mpi']!='none'
675    mpi_inc_path=''
676    mpi_lib_path=''
677    if env['usempi']:
678        mpi_inc_path,mpi_lib_path=findLibWithHeader(env, env['mpi_libs'], 'mpi.h', env['mpi_prefix'], lang='c')
679        env.AppendUnique(CPPPATH = [mpi_inc_path])
680        env.AppendUnique(LIBPATH = [mpi_lib_path])
681        env.AppendUnique(LIBS = env['mpi_libs'])
682        env.PrependENVPath(LD_LIBRARY_PATH_KEY, mpi_lib_path)
683        env.Append(CPPDEFINES = ['ESYS_MPI', 'MPI_NO_CPPBIND', 'MPICH_IGNORE_CXX_SEEK'])
684        # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!
685        # On the other hand MPT and OpenMPI don't define the latter so we have to
686        # do that here
687        if env['netcdf'] and env['mpi'] in ['MPT','OPENMPI']:
688            env.Append(CPPDEFINES = ['MPI_INCLUDED'])
689    
690    ######## BOOMERAMG (optional)
691    
692    if env['mpi'] == 'none': env['boomeramg'] = False
693    
694    boomeramg_inc_path=''
695    boomeramg_lib_path=''
696    if env['boomeramg']:
697        boomeramg_inc_path,boomeramg_lib_path=findLibWithHeader(env, env['boomeramg_libs'], 'HYPRE.h', env['boomeramg_prefix'], lang='c')
698        env.AppendUnique(CPPPATH = [boomeramg_inc_path])
699        env.AppendUnique(LIBPATH = [boomeramg_lib_path])
700        env.AppendUnique(LIBS = env['boomeramg_libs'])
701        env.PrependENVPath(LD_LIBRARY_PATH_KEY, boomeramg_lib_path)
702        env.Append(CPPDEFINES = ['BOOMERAMG'])
703    
704    ######## ParMETIS (optional)
705    
706    if not env['usempi']: env['parmetis'] = False
707    
708    parmetis_inc_path=''
709    parmetis_lib_path=''
710    if env['parmetis']:
711        parmetis_inc_path,parmetis_lib_path=findLibWithHeader(env, env['parmetis_libs'], 'parmetis.h', env['parmetis_prefix'], lang='c')
712        env.AppendUnique(CPPPATH = [parmetis_inc_path])
713        env.AppendUnique(LIBPATH = [parmetis_lib_path])
714        env.AppendUnique(LIBS = env['parmetis_libs'])
715        env.PrependENVPath(LD_LIBRARY_PATH_KEY, parmetis_lib_path)
716        env.Append(CPPDEFINES = ['USE_PARMETIS'])
717    
718  # Start a new configure environment that reflects what we've already found  ######## gmsh (optional, for tests)
 conf = Configure(clone_env(env))  
719    
720  if env['useumfpack']:  try:
721    conf.env.AppendUnique(CPPPATH = [env['ufc_path']])      p=Popen(['gmsh', '-info'], stderr=PIPE)
722    conf.env.AppendUnique(CPPPATH = [env['umf_path']])      _,e=p.communicate()
723    conf.env.AppendUnique(LIBPATH = [env['umf_lib_path']])      if e.split().count("MPI"):
724    conf.env.AppendUnique(LIBS    = [env['umf_libs']])          env['gmsh']='m'
725    conf.env.AppendUnique(CPPPATH = [env['amd_path']])      else:
726    conf.env.AppendUnique(LIBPATH = [env['amd_lib_path']])          env['gmsh']='s'
727    conf.env.AppendUnique(LIBS    = [env['amd_libs']])  except OSError:
728    conf.env.AppendUnique(CPPPATH = [env['blas_path']])      env['gmsh']=False
729    conf.env.AppendUnique(LIBPATH = [env['blas_lib_path']])  
730    conf.env.AppendUnique(LIBS    = [env['blas_libs']])  ######## PDFLaTeX (for documentation)
731    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['umf_lib_path']) # The wrapper script needs to find these libs  if 'PDF' in dir(env) and '.tex' in env.PDF.builder.src_suffixes(env):
732    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['amd_lib_path']) # The wrapper script needs to find these libs      env['pdflatex']=True
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['blas_lib_path'])    # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
 if env['useumfpack'] and not conf.CheckCHeader('umfpack.h'): env['useumfpack'] = 0  
 if env['useumfpack'] and not conf.CheckFunc('umfpack_di_symbolic'): env['useumfpack'] = 0  
 # if env['useumfpack'] and not conf.CheckFunc('daxpy'): env['useumfpack'] = 0 # this does not work on shake73?  
   
 # Add UMFPACK to environment env if it was found  
 if env['useumfpack']:  
   env = conf.Finish()  
   env.Append(CPPDEFINES = ['UMFPACK'])  
733  else:  else:
734    conf.Finish()      env['pdflatex']=False
   
 ############ Silo (optional) ###################################  
735    
736  if env['usesilo']:  ######################## Summarize our environment ###########################
   conf = Configure(clone_env(env))  
   conf.env.AppendUnique(CPPPATH = [env['silo_path']])  
   conf.env.AppendUnique(LIBPATH = [env['silo_lib_path']])  
   conf.env.AppendUnique(LIBS = [env['silo_libs']])  
   if not conf.CheckCHeader('silo.h'): env['usesilo'] = 0  
   if not conf.CheckFunc('DBMkDir'): env['usesilo'] = 0  
   conf.Finish()  
   
 # Add the path to Silo to environment env if it was found.  
 # Note that we do not add the libs since they are only needed for the  
 # weipa library and tools.  
 if env['usesilo']:  
   env.AppendUnique(CPPPATH = [env['silo_path']])  
   env.AppendUnique(LIBPATH = [env['silo_lib_path']])  
   
 ############ VisIt (optional) ###################################  
   
 if env['usevisit']:  
   env.AppendUnique(CPPPATH = [env['visit_path']])  
   env.AppendUnique(LIBPATH = [env['visit_lib_path']])  
737    
738  ########### Lapack (optional) ##################################  # keep some of our install paths first in the list for the unit tests
739    env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])
740    env.PrependENVPath('PYTHONPATH', prefix)
741    env['ENV']['ESCRIPT_ROOT'] = prefix
742    
743  if env['uselapack']:  if not env['verbose']:
744      env.AppendUnique(CPPDEFINES='USE_LAPACK')      env['CCCOMSTR'] = "Compiling $TARGET"
745      env.AppendUnique(CPPPATH = [env['lapack_path']])      env['CXXCOMSTR'] = "Compiling $TARGET"
746      env.AppendUnique(LIBPATH =[env['lapack_lib_path']])      env['SHCCCOMSTR'] = "Compiling $TARGET"
747        env['SHCXXCOMSTR'] = "Compiling $TARGET"
748      env.Append(LIBPATH = '/usr/lib/atlas')      env['ARCOMSTR'] = "Linking $TARGET"
749      env.Append(LIBS = [env['lapack_libs']])      env['LINKCOMSTR'] = "Linking $TARGET"
750      if env['lapack_type']=='mkl':      env['SHLINKCOMSTR'] = "Linking $TARGET"
751         if not env['usemkl']:      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
752          env['uselapack']=0      env['BIBTEXCOMSTR'] = "Generating bibliography $TARGET"
753          print "mkl_lapack requires mkl"      env['MAKEINDEXCOMSTR'] = "Generating index $TARGET"
754         else:      env['PDFLATEXCOMSTR'] = "Building $TARGET from LaTeX input $SOURCES"
755          env.AppendUnique(CPPDEFINES='MKL_LAPACK')      #Progress(['Checking -\r', 'Checking \\\r', 'Checking |\r', 'Checking /\r'], interval=17)
756          
757    print("")
758  ############ Add the compiler flags ############################  print("*** Config Summary (see config.log and lib/buildvars for details) ***")
759    print("Escript/Finley revision %s"%global_revision)
760  # Enable debug by choosing either cc_debug or cc_optim  print("  Install prefix:  %s"%env['prefix'])
761  if env['usedebug']:  print("          Python:  %s"%sysconfig.PREFIX)
762    env.Append(CCFLAGS        = env['cc_debug'])  print("           boost:  %s"%env['boost_prefix'])
763    env.Append(CCFLAGS        = env['omp_debug'])  print("           numpy:  YES")
764    if env['usempi']:
765        print("             MPI:  YES (flavour: %s)"%env['mpi'])
766  else:  else:
767    env.Append(CCFLAGS        = env['cc_optim'])      print("             MPI:  DISABLED")
768    env.Append(CCFLAGS        = env['omp_optim'])  if env['uselapack']:
769        print("          LAPACK:  YES (flavour: %s)"%env['lapack'])
 # Always use cc_flags  
 env.Append(CCFLAGS      = env['cc_flags'])  
 env.Append(LIBS         = [env['omp_libs']])  
   
 ############ Add some custom builders ##########################  
   
 py_builder = Builder(action = scons_extensions.build_py, suffix = '.pyc', src_suffix = '.py', single_source=True)  
 env.Append(BUILDERS = {'PyCompile' : py_builder});  
   
 runUnitTest_builder = Builder(action = scons_extensions.runUnitTest, suffix = '.passed', src_suffix=env['PROGSUFFIX'], single_source=True)  
 env.Append(BUILDERS = {'RunUnitTest' : runUnitTest_builder});  
   
 runPyUnitTest_builder = Builder(action = scons_extensions.runPyUnitTest, suffix = '.passed', src_suffic='.py', single_source=True)  
 env.Append(BUILDERS = {'RunPyUnitTest' : runPyUnitTest_builder});  
   
 epstopdfbuilder = Builder(action = scons_extensions.eps2pdf, suffix=".pdf", src_suffix=".eps", single_source=True)  
 env.Append(BUILDERS = {'EpsToPDF' : epstopdfbuilder});  
   
 ############ MPI (optional) ####################################  
 if not env['usempi']: env['mpi_flavour']='none'  
   
 # Create a modified environment for MPI programs (identical to env if usempi=no)  
 env_mpi = clone_env(env)  
   
 # Start a new configure environment that reflects what we've already found  
 conf = Configure(clone_env(env_mpi))  
   
 if env_mpi['usempi']:  
   VALID_MPIs=[ "MPT", "MPICH", "MPICH2", "OPENMPI", "INTELMPI" ]  
   if not env_mpi['mpi_flavour'] in VALID_MPIs:  
       raise ValueError,"MPI is enabled but mpi_flavour = %s is not a valid key from %s."%( env_mpi['mpi_flavour'],VALID_MPIs)  
   conf.env.AppendUnique(CPPPATH = [env_mpi['mpi_path']])  
   conf.env.AppendUnique(LIBPATH = [env_mpi['mpi_lib_path']])  
   conf.env.AppendUnique(LIBS    = [env_mpi['mpi_libs']])  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['mpi_lib_path']) # The wrapper script needs to find these libs  
   #ensure that our path entries remain at the front  
   conf.env.PrependENVPath('PYTHONPATH', prefix)  
   conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  
   
 if env_mpi['usempi'] and not conf.CheckCHeader('mpi.h'): env_mpi['usempi'] = 0  
 # if env_mpi['usempi'] and not conf.CheckFunc('MPI_Init'): env_mpi['usempi'] = 0  
   
 # Add MPI to environment env_mpi if it was found  
 if env_mpi['usempi']:  
   env_mpi = conf.Finish()  
   env_mpi.Append(CPPDEFINES = ['PASO_MPI', 'MPI_NO_CPPBIND', env_mpi['MPICH_IGNORE_CXX_SEEK']])  
   # NetCDF 4.1 defines MPI_Comm et al. if MPI_INCLUDED is not defined!  
   # On the other hand MPT and OpenMPI don't define the latter so we have to  
   # do that here  
   if env['usenetcdf'] and env_mpi['mpi_flavour'] in ["MPT","OPENMPI"]:  
     env_mpi.Append(CPPDEFINES = ['MPI_INCLUDED'])  
770  else:  else:
771    conf.Finish()      print("          LAPACK:  DISABLED")
772    d_list=[]
773  env['usempi'] = env_mpi['usempi']  e_list=[]
774    for i in 'debug','openmp','boomeramg','mkl','netcdf','papi','parmetis','pyproj','silo','sympy','umfpack','visit','vsl_random':
775  ############ ParMETIS (optional) ###############################      if env[i]: e_list.append(i)
776        else: d_list.append(i)
777  # Start a new configure environment that reflects what we've already found  for i in e_list:
778  conf = Configure(clone_env(env_mpi))      print("%16s:  YES"%i)
779    for i in d_list:
780  if not env_mpi['usempi']: env_mpi['useparmetis'] = 0      print("%16s:  DISABLED"%i)
781    if env['cppunit']:
782  if env_mpi['useparmetis']:      print("         CppUnit:  FOUND")
783    conf.env.AppendUnique(CPPPATH = [env_mpi['parmetis_path']])  else:
784    conf.env.AppendUnique(LIBPATH = [env_mpi['parmetis_lib_path']])      print("         CppUnit:  NOT FOUND")
785    conf.env.AppendUnique(LIBS    = [env_mpi['parmetis_libs']])  if env['gmsh']=='m':
786    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['parmetis_lib_path'])    # The wrapper script needs to find these libs      print("            gmsh:  FOUND, MPI-ENABLED")
787    #ensure that our path entries remain at the front  elif env['gmsh']=='s':
788    conf.env.PrependENVPath('PYTHONPATH', prefix)      print("            gmsh:  FOUND")
789    conf.env.PrependENVPath(LD_LIBRARY_PATH_KEY, env['libinstall'])  else:
790        print("            gmsh:  NOT FOUND")
791  if env_mpi['useparmetis'] and not conf.CheckCHeader('parmetis.h'): env_mpi['useparmetis'] = 0  if env['numpy_h']:
792  if env_mpi['useparmetis'] and not conf.CheckFunc('ParMETIS_V3_PartGeomKway'): env_mpi['useparmetis'] = 0      print("   numpy headers:  FOUND")
793    else:
794  # Add ParMETIS to environment env_mpi if it was found      print("   numpy headers:  NOT FOUND")
795  if env_mpi['useparmetis']:  print("   vsl_random:  %s"%env['vsl_random'])
796    env_mpi = conf.Finish()      
797    env_mpi.Append(CPPDEFINES = ['USE_PARMETIS'])  if ((fatalwarning != '') and (env['werror'])):
798        print("  Treating warnings as errors")
799  else:  else:
800    conf.Finish()      print("  NOT treating warnings as errors")
801    print("")
 env['useparmetis'] = env_mpi['useparmetis']  
   
 ############ Summarize our environment #########################  
   
 print ""  
 print "Summary of configuration (see ./config.log for information)"  
 print " Using python libraries"  
 print " Using numpy"  
 print " Using boost"  
 if env['usenetcdf']: print "    Using NetCDF"  
 else: print "   Not using NetCDF"  
 if env['usevtk']: print "   Using VTK"  
 else: print "   Not using VTK"  
 if env['usevisit']: print " Using VisIt"  
 else: print "   Not using VisIt"  
 if env['usemkl']: print "   Using MKL"  
 else: print "   Not using MKL"  
 if env['useumfpack']: print "   Using UMFPACK"  
 else: print "   Not using UMFPACK"  
 if env['usesilo']: print "  Using Silo"  
 else: print "   Not using Silo"  
 if env['useopenmp']: print "    Using OpenMP"  
 else: print "   Not using OpenMP"  
 if env['usempi']: print "   Using MPI (flavour = %s)"%env['mpi_flavour']  
 else: print "   Not using MPI"  
 if env['useparmetis']: print "  Using ParMETIS"  
 else: print "   Not using ParMETIS (requires MPI)"  
 if env['usepapi']: print "  Using PAPI"  
 else: print "   Not using PAPI"  
 if env['uselapack']: print "    Using Lapack"  
 else: print "   Not using Lapack"  
 if env['usedebug']: print " Compiling for debug"  
 else: print "   Not compiling for debug"  
 print " Installing in", prefix  
 if ((fatalwarning != "") and (env['usewarnings'])): print " Treating warnings as errors"  
 else: print "   Not treating warnings as errors"  
 print ""  
   
 ############ Delete option-dependent files #####################  
   
 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.debug")))  
 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.mpi")))  
 Execute(Delete(os.path.join(env['libinstall'],"Compiled.with.openmp")))  
 Execute(Delete(os.path.join(env['libinstall'],"buildvars")))  
 if not env['usempi']: Execute(Delete(os.path.join(env['libinstall'],"pythonMPI")))  
   
   
 ############ Build the subdirectories ##########################  
   
 if env['usepedantic']: env_mpi.Append(CCFLAGS = pedantic)  
802    
803    ####################### Configure the subdirectories #########################
804    
805  from grouptest import *  from grouptest import *
806    
807  TestGroups=[]  TestGroups=[]
808    
809  dodgy_env=clone_env(env_mpi)    # Environment without pedantic options  # keep an environment without warnings-as-errors
810    dodgy_env=env.Clone()
 ############ Now we switch on Warnings as errors ###############  
   
 #this needs to be done after configuration because the scons test files have warnings in them  
   
 if ((fatalwarning != "") and (env['usewarnings'])):  
   env.Append(CCFLAGS        = fatalwarning)  
   env_mpi.Append(CCFLAGS        = fatalwarning)  
811    
812    # now add warnings-as-errors flags. This needs to be done after configuration
813    # because the scons test files have warnings in them
814    if ((fatalwarning != '') and (env['werror'])):
815        env.Append(CCFLAGS = fatalwarning)
816    
817  Export(  Export(
818    ["env",    ['env',
819     "env_mpi",     'dodgy_env',
820     "clone_env",     'IS_WINDOWS',
821     "dodgy_env",     'TestGroups'
822     "IS_WINDOWS_PLATFORM",    ]
823     "TestGroups",  )
    "CallSConscript",  
    "cantusevariantdir"  
    ]  
   )  
   
 CallSConscript(env, dirs = ['tools/CppUnitTest/src'], variant_dir='build/$PLATFORM/tools/CppUnitTest', duplicate=0)  
 CallSConscript(env, dirs = ['tools/escriptconvert'], variant_dir='build/$PLATFORM/tools/escriptconvert', duplicate=0)  
 CallSConscript(env, dirs = ['paso/src'], variant_dir='build/$PLATFORM/paso', duplicate=0)  
 CallSConscript(env, dirs = ['weipa/src'], variant_dir='build/$PLATFORM/weipa', duplicate=0)  
 CallSConscript(env, dirs = ['escript/src'], variant_dir='build/$PLATFORM/escript', duplicate=0)  
 CallSConscript(env, dirs = ['esysUtils/src'], variant_dir='build/$PLATFORM/esysUtils', duplicate=0)  
 CallSConscript(env, dirs = ['finley/src'], variant_dir='build/$PLATFORM/finley', duplicate=0)  
 CallSConscript(env, dirs = ['modellib/py_src'], variant_dir='build/$PLATFORM/modellib', duplicate=0)  
 CallSConscript(env, dirs = ['doc'], variant_dir='build/$PLATFORM/doc', duplicate=0)  
 CallSConscript(env, dirs = ['pyvisi/py_src'], variant_dir='build/$PLATFORM/pyvisi', duplicate=0)  
 CallSConscript(env, dirs = ['pycad/py_src'], variant_dir='build/$PLATFORM/pycad', duplicate=0)  
 CallSConscript(env, dirs = ['pythonMPI/src'], variant_dir='build/$PLATFORM/pythonMPI', duplicate=0)  
 CallSConscript(env, dirs = ['scripts'], variant_dir='build/$PLATFORM/scripts', duplicate=0)  
 CallSConscript(env, dirs = ['paso/profiling'], variant_dir='build/$PLATFORM/paso/profiling', duplicate=0)  
   
   
 ############ Remember what optimizations we used ###############  
   
 remember_list = []  
   
 if env['usedebug']:  
   remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.debug"), None, Touch('$TARGET'))  
   
 if env['usempi']:  
   remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.mpi"), None, Touch('$TARGET'))  
   
 if env['useopenmp']:  
   remember_list += env.Command(os.path.join(env['libinstall'],"Compiled.with.openmp"), None, Touch('$TARGET'))  
   
 env.Alias('remember_options', remember_list)  
   
   
 ############### Record python interpreter version ##############  
   
 if not IS_WINDOWS_PLATFORM:  
   
   versionstring="Python "+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])  
 #  if sys.version_info[4] >0 : versionstring+="rc%s"%sys.version_info[4]  
   
 ############## Populate the buildvars file #####################  
824    
825  buildvars=open(os.path.join(env['libinstall'],'buildvars'),'w')  env.SConscript(dirs = ['tools/escriptconvert'], variant_dir='$BUILD_DIR/$PLATFORM/tools/escriptconvert', duplicate=0)
826  buildvars.write('python='+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+'\n')  env.SConscript(dirs = ['paso/src'], variant_dir='$BUILD_DIR/$PLATFORM/paso', duplicate=0)
827    env.SConscript(dirs = ['weipa/src'], variant_dir='$BUILD_DIR/$PLATFORM/weipa', duplicate=0)
828    env.SConscript(dirs = ['escript/src'], variant_dir='$BUILD_DIR/$PLATFORM/escript', duplicate=0)
829    env.SConscript(dirs = ['esysUtils/src'], variant_dir='$BUILD_DIR/$PLATFORM/esysUtils', duplicate=0)
830    env.SConscript(dirs = ['pasowrap/src'], variant_dir='$BUILD_DIR/$PLATFORM/pasowrap', duplicate=0)
831    env.SConscript(dirs = ['dudley/src'], variant_dir='$BUILD_DIR/$PLATFORM/dudley', duplicate=0)
832    env.SConscript(dirs = ['finley/src'], variant_dir='$BUILD_DIR/$PLATFORM/finley', duplicate=0)
833    env.SConscript(dirs = ['ripley/src'], variant_dir='$BUILD_DIR/$PLATFORM/ripley', duplicate=0)
834    env.SConscript(dirs = ['downunder/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/downunder', duplicate=0)
835    env.SConscript(dirs = ['modellib/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/modellib', duplicate=0)
836    env.SConscript(dirs = ['pycad/py_src'], variant_dir='$BUILD_DIR/$PLATFORM/pycad', duplicate=0)
837    env.SConscript(dirs = ['pythonMPI/src'], variant_dir='$BUILD_DIR/$PLATFORM/pythonMPI', duplicate=0)
838    env.SConscript(dirs = ['doc'], variant_dir='$BUILD_DIR/$PLATFORM/doc', duplicate=0)
839    env.SConscript(dirs = ['paso/profiling'], variant_dir='$BUILD_DIR/$PLATFORM/paso/profiling', duplicate=0)
840    
841    
842    ######################## Populate the buildvars file #########################
843    
844    # remove obsolete file
845    if not env['usempi']:
846        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPI')))
847        Execute(Delete(os.path.join(env['libinstall'], 'pythonMPIredirect')))
848    
849  # Find the boost version by extracting it from version.hpp  # Try to extract the boost version from version.hpp
850  boosthpp=open(os.path.join(env['boost_path'],'boost','version.hpp'))  boosthpp=open(os.path.join(boost_inc_path, 'boost', 'version.hpp'))
851  boostversion='unknown'  boostversion='unknown'
852  try:  try:
853      for line in boosthpp:      for line in boosthpp:
854          ver=re.match(r'#define BOOST_VERSION (\d+)',line)          ver=re.match(r'#define BOOST_VERSION (\d+)',line)
855          if ver:          if ver:
856              boostversion=ver.group(1)              boostversion=ver.group(1)
857  except StopIteration:  except StopIteration:
858      pass      pass
859  buildvars.write("boost="+boostversion+"\n")  boosthpp.close()
860    
861    
862    buildvars=open(os.path.join(env['libinstall'], 'buildvars'), 'w')
863  buildvars.write("svn_revision="+str(global_revision)+"\n")  buildvars.write("svn_revision="+str(global_revision)+"\n")
864  out="usedebug="  buildvars.write("prefix="+prefix+"\n")
865  if env['usedebug']:  buildvars.write("cc="+env['CC']+"\n")
866      out+="y"  buildvars.write("cxx="+env['CXX']+"\n")
867  else:  if env['pythoncmd']=='python':
868      out+="n"      buildvars.write("python="+sys.executable+"\n")
869  out+="\nusempi="      buildvars.write("python_version="+str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2])+"\n")
870  if env['usempi']:  else:
871      out+="y"      buildvars.write("python="+env['pythoncmd']+"\n")
872  else:      p=Popen([env['pythoncmd'], '-c', 'from __future__ import print_function;import sys;print(str(sys.version_info[0])+"."+str(sys.version_info[1])+"."+str(sys.version_info[2]))'], stdout=PIPE)
873      out+="n"      verstring=p.stdout.readline().strip()
874  out+="\nuseopenmp="      p.wait()
875  if env['useopenmp']:      buildvars.write("python_version="+verstring+"\n")
876      out+="y"  buildvars.write("boost_inc_path="+boost_inc_path+"\n")
877  else:  buildvars.write("boost_lib_path="+boost_lib_path+"\n")
878      out+="n"  buildvars.write("boost_version="+boostversion+"\n")
879  buildvars.write(out+"\n")  buildvars.write("debug=%d\n"%int(env['debug']))
880  buildvars.write("mpi_flavour="+env['mpi_flavour']+'\n')  buildvars.write("openmp=%d\n"%int(env['openmp']))
881  out="lapack="  buildvars.write("mpi=%s\n"%env['mpi'])
882  if env['uselapack']:  buildvars.write("mpi_inc_path=%s\n"%mpi_inc_path)
883     out+="y"  buildvars.write("mpi_lib_path=%s\n"%mpi_lib_path)
884  else:  buildvars.write("lapack=%s\n"%env['lapack'])
885     out+="n"  buildvars.write("vsl_random=%d\n"%int(env['vsl_random']))
886  out+="\nsilo="  for i in 'netcdf','parmetis','papi','mkl','umfpack','boomeramg','silo','visit':
887  if env['usesilo']:      buildvars.write("%s=%d\n"%(i, int(env[i])))
888     out+="y"      if env[i]:
889  else:          buildvars.write("%s_inc_path=%s\n"%(i, eval(i+'_inc_path')))
890     out+="n"          buildvars.write("%s_lib_path=%s\n"%(i, eval(i+'_lib_path')))
 out+="\nusevisit="  
 if env['usevisit']:  
    out+="y"  
 else:  
    out+="n"  
 buildvars.write(out+"\n")  
891  buildvars.close()  buildvars.close()
892    
893    ################### Targets to build and install libraries ###################
894    
895  ############ Targets to build and install libraries ############  target_init = env.Command(os.path.join(env['pyinstall'],'__init__.py'), None, Touch('$TARGET'))
   
 target_init = env.Command(env['pyinstall']+'/__init__.py', None, Touch('$TARGET'))  
896  env.Alias('target_init', [target_init])  env.Alias('target_init', [target_init])
897    # delete buildvars upon cleanup
898    env.Clean('target_init', os.path.join(env['libinstall'], 'buildvars'))
899    
900    # The headers have to be installed prior to build in order to satisfy
901    # #include <paso/Common.h>
902    env.Alias('build_esysUtils', ['install_esysUtils_headers', 'build_esysUtils_lib'])
903    env.Alias('install_esysUtils', ['build_esysUtils', 'install_esysUtils_lib'])
904    
905  # The headers have to be installed prior to build in order to satisfy #include <paso/Common.h>  env.Alias('build_paso', ['install_paso_headers', 'build_paso_lib'])
906  env.Alias('build_esysUtils', ['target_install_esysUtils_headers', 'target_esysUtils_a'])  env.Alias('install_paso', ['build_paso', 'install_paso_lib'])
 env.Alias('install_esysUtils', ['build_esysUtils', 'target_install_esysUtils_a'])  
907    
908  env.Alias('build_paso', ['target_install_paso_headers', 'target_paso_a'])  env.Alias('build_escript', ['install_escript_headers', 'build_escript_lib', 'build_escriptcpp_lib'])
909  env.Alias('install_paso', ['build_paso', 'target_install_paso_a'])  env.Alias('install_escript', ['build_escript', 'install_escript_lib', 'install_escriptcpp_lib', 'install_escript_py'])
910    
911  env.Alias('build_weipa', ['target_install_weipa_headers', 'target_weipa_so', 'target_weipacpp_so'])  env.Alias('build_pasowrap', ['install_pasowrap_headers', 'build_pasowrap_lib', 'build_pasowrapcpp_lib'])
912  env.Alias('install_weipa', ['build_weipa', 'target_install_weipa_so', 'target_install_weipacpp_so', 'target_install_weipa_py'])  env.Alias('install_pasowrap', ['build_pasowrap', 'install_pasowrap_lib', 'install_pasowrapcpp_lib', 'install_pasowrap_py'])
913    
914    env.Alias('build_dudley', ['install_dudley_headers', 'build_dudley_lib', 'build_dudleycpp_lib'])
915    env.Alias('install_dudley', ['build_dudley', 'install_dudley_lib', 'install_dudleycpp_lib', 'install_dudley_py'])
916    
917  env.Alias('build_escriptreader', ['target_install_weipa_headers', 'target_escriptreader_a'])  env.Alias('build_finley', ['install_finley_headers', 'build_finley_lib', 'build_finleycpp_lib'])
918  env.Alias('install_escriptreader', ['build_escriptreader', 'target_install_escriptreader_a'])  env.Alias('install_finley', ['build_finley', 'install_finley_lib', 'install_finleycpp_lib', 'install_finley_py'])
919    
920  env.Alias('build_escript', ['target_install_escript_headers', 'target_escript_so', 'target_escriptcpp_so'])  env.Alias('build_ripley', ['install_ripley_headers', 'build_ripley_lib', 'build_ripleycpp_lib'])
921  env.Alias('install_escript', ['build_escript', 'target_install_escript_so', 'target_install_escriptcpp_so', 'target_install_escript_py'])  env.Alias('install_ripley', ['build_ripley', 'install_ripley_lib', 'install_ripleycpp_lib', 'install_ripley_py'])
922    
923  env.Alias('build_finley', ['target_install_finley_headers', 'target_finley_so', 'target_finleycpp_so'])  env.Alias('build_weipa', ['install_weipa_headers', 'build_weipa_lib', 'build_weipacpp_lib'])
924  env.Alias('install_finley', ['build_finley', 'target_install_finley_so', 'target_install_finleycpp_so', 'target_install_finley_py'])  env.Alias('install_weipa', ['build_weipa', 'install_weipa_lib', 'install_weipacpp_lib', 'install_weipa_py'])
925    
926  # Now gather all the above into a couple easy targets: build_all and install_all  env.Alias('build_escriptreader', ['install_weipa_headers', 'build_escriptreader_lib'])
927    env.Alias('install_escriptreader', ['build_escriptreader', 'install_escriptreader_lib'])
928    
929    # Now gather all the above into some easy targets: build_all and install_all
930  build_all_list = []  build_all_list = []
931  build_all_list += ['build_esysUtils']  build_all_list += ['build_esysUtils']
932  build_all_list += ['build_paso']  build_all_list += ['build_paso']
 build_all_list += ['build_weipa']  
933  build_all_list += ['build_escript']  build_all_list += ['build_escript']
934    build_all_list += ['build_pasowrap']
935    build_all_list += ['build_dudley']
936  build_all_list += ['build_finley']  build_all_list += ['build_finley']
937  if env['usempi']:       build_all_list += ['target_pythonMPI_exe']  build_all_list += ['build_ripley']
938  #if not IS_WINDOWS_PLATFORM:    build_all_list += ['target_escript_wrapper']  build_all_list += ['build_weipa']
939  build_all_list += ['target_escriptconvert']  if not IS_WINDOWS: build_all_list += ['build_escriptreader']
940    if env['usempi']:   build_all_list += ['build_pythonMPI']
941    build_all_list += ['build_escriptconvert']
942  env.Alias('build_all', build_all_list)  env.Alias('build_all', build_all_list)
943    
944  install_all_list = []  install_all_list = []
945  install_all_list += ['target_init']  install_all_list += ['target_init']
946  install_all_list += ['install_esysUtils']  install_all_list += ['install_esysUtils']
947  install_all_list += ['install_paso']  install_all_list += ['install_paso']
 install_all_list += ['install_weipa']  
948  install_all_list += ['install_escript']  install_all_list += ['install_escript']
949    install_all_list += ['install_pasowrap']
950    install_all_list += ['install_dudley']
951  install_all_list += ['install_finley']  install_all_list += ['install_finley']
952  install_all_list += ['target_install_pyvisi_py']  install_all_list += ['install_ripley']
953  install_all_list += ['target_install_modellib_py']  install_all_list += ['install_weipa']
954  install_all_list += ['target_install_pycad_py']  if not IS_WINDOWS: install_all_list += ['install_escriptreader']
955  if env['usempi']:       install_all_list += ['target_install_pythonMPI_exe']  install_all_list += ['install_downunder_py']
956  #if not IS_WINDOWS_PLATFORM:    install_all_list += ['target_install_escript_wrapper']  install_all_list += ['install_modellib_py']
957  if env['usesilo']:  install_all_list += ['target_install_escriptconvert']  install_all_list += ['install_pycad_py']
958  install_all_list += ['remember_options']  if env['usempi']:   install_all_list += ['install_pythonMPI']
959    install_all_list += ['install_escriptconvert']
960  env.Alias('install_all', install_all_list)  env.Alias('install_all', install_all_list)
961    
962  # Default target is install  # Default target is install
963  env.Default('install_all')  env.Default('install_all')
964    
965  ############ Targets to build and run the test suite ###########  ################## Targets to build and run the test suite ###################
966    
967  env.Alias('build_cppunittest', ['target_install_cppunittest_headers', 'target_cppunittest_a'])  if not env['cppunit']:
968  env.Alias('install_cppunittest', ['build_cppunittest', 'target_install_cppunittest_a'])      test_msg = env.Command('.dummy.', None, '@echo "Cannot run C/C++ unit tests, CppUnit not found!";exit 1')
969  env.Alias('run_tests', ['install_all', 'target_install_cppunittest_a'])      env.Alias('run_tests', test_msg)
970  env.Alias('all_tests', ['install_all', 'target_install_cppunittest_a', 'run_tests', 'py_tests'])      env.Alias('build_tests', '')
971    env.Alias('run_tests', ['install_all'])
972    env.Alias('all_tests', ['install_all', 'run_tests', 'py_tests'])
973  env.Alias('build_full',['install_all','build_tests','build_py_tests'])  env.Alias('build_full',['install_all','build_tests','build_py_tests'])
974    env.Alias('build_PasoTests','$BUILD_DIR/$PLATFORM/paso/profiling/PasoTests')
975    
976    ##################### Targets to build the documentation #####################
977    
978  ############ Targets to build the documentation ################  env.Alias('pdfdocs',['user_pdf', 'install_pdf', 'cookbook_pdf', 'inversion_pdf'])
979    env.Alias('basedocs', ['pdfdocs','examples_tarfile', 'examples_zipfile', 'api_doxygen'])
980  env.Alias('api_epydoc','install_all')  env.Alias('docs', ['basedocs', 'sphinxdoc'])
981    env.Alias('release_prep', ['docs', 'install_all'])
982  env.Alias('docs', ['examples_tarfile', 'examples_zipfile', 'api_epydoc', 'api_doxygen', 'guide_pdf', 'guide_html','install_pdf', 'cookbook_pdf'])  env.Alias('release_prep_old', ['basedocs', 'api_epydoc', 'install_all'])
   
 build_platform=os.name  
   
 if not IS_WINDOWS_PLATFORM:  
    try:  
     utest=open("utest.sh","w")  
     #Sometimes Mac python says it is posix  
     if (build_platform=='posix') and platform.system()=="Darwin":  
         build_platform='darwin'  
     utest.write(GroupTest.makeHeader(build_platform))  
     for tests in TestGroups:  
         utest.write(tests.makeString())  
     utest.close()  
     os.chmod("utest.sh",stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|stat.S_IROTH|stat.S_IXOTH)  
     print "utest.sh written"  
    except IOError:  
     print "Error attempting to write unittests file."  
     sys.exit(1)  
   
    #Make sure that the escript wrapper is in place  
    if not os.path.isfile(os.path.join(env['bininstall'],'escript')):  
        print "Copying escript wrapper"  
        shutil.copy("bin/escript",os.path.join(env['bininstall'],'escript'))  
983    
 ############ Targets to build PasoTests suite ################  
984    
985  env.Alias('build_PasoTests','build/'+build_platform+'/paso/profiling/PasoTests')  # The test scripts are always generated, this target allows us to
986    # generate the testscripts without doing a full build
987    env.Alias('testscripts',[])
988    
989    if not IS_WINDOWS:
990        try:
991            utest=open('utest.sh','w')
992            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, False))
993            for tests in TestGroups:
994                utest.write(tests.makeString())
995            utest.close()
996            Execute(Chmod('utest.sh', 0o755))
997            print("Generated utest.sh.")
998            # This version contains only python tests - I want this to be usable
999            # From a binary only install if you have the test files
1000            utest=open('itest.sh','w')
1001            utest.write(GroupTest.makeHeader(env['PLATFORM'], prefix, True))
1002            for tests in TestGroups:
1003              if tests.exec_cmd=='$PYTHONRUNNER ':
1004                utest.write(tests.makeString())
1005            utest.close()
1006            Execute(Chmod('itest.sh', 0o755))
1007            print("Generated itest.sh.")        
1008        except IOError:
1009            print("Error attempting to write unittests file.")
1010            Exit(1)
1011    
1012        # delete utest.sh upon cleanup
1013        env.Clean('target_init', 'utest.sh')
1014        env.Clean('target_init', 'itest.sh')
1015    
1016        # Make sure that the escript wrapper is in place
1017        if not os.path.isfile(os.path.join(env['bininstall'], 'run-escript')):
1018            print("Copying escript wrapper.")
1019            Execute(Copy(os.path.join(env['bininstall'],'run-escript'), 'bin/run-escript'))
1020    
 env.Alias('release_prep', ['docs', 'install_all'])  

Legend:
Removed from v.3178  
changed lines
  Added in v.4249

  ViewVC Help
Powered by ViewVC 1.1.26